forked from OSchip/llvm-project
[Lex] Don't assert when decoding invalid UCNs.
Currently if a lexically-valid UCN encodes an invalid codepoint, then we diagnose that, and then hit an assertion while trying to decode it. Since there isn't anything preventing us reaching this state, remove the assertion. expandUCNs("X\UAAAAAAAAY") will produce "XY". Differential Revision: https://reviews.llvm.org/D125059
This commit is contained in:
parent
e2ed3fd71e
commit
817550919e
|
@ -320,10 +320,8 @@ static void appendCodePoint(unsigned Codepoint,
|
|||
llvm::SmallVectorImpl<char> &Str) {
|
||||
char ResultBuf[4];
|
||||
char *ResultPtr = ResultBuf;
|
||||
bool Res = llvm::ConvertCodePointToUTF8(Codepoint, ResultPtr);
|
||||
(void)Res;
|
||||
assert(Res && "Unexpected conversion failure");
|
||||
Str.append(ResultBuf, ResultPtr);
|
||||
if (llvm::ConvertCodePointToUTF8(Codepoint, ResultPtr))
|
||||
Str.append(ResultBuf, ResultPtr);
|
||||
}
|
||||
|
||||
void clang::expandUCNs(SmallVectorImpl<char> &Buf, StringRef Input) {
|
||||
|
|
|
@ -28,6 +28,9 @@ CHECK : The preprocessor should not complain about Unicode characters like ©.
|
|||
|
||||
int _;
|
||||
|
||||
extern int X\UAAAAAAAA; // expected-error {{not allowed in an identifier}}
|
||||
int Y = '\UAAAAAAAA'; // expected-error {{invalid universal character}}
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
||||
extern int ༀ;
|
||||
|
|
Loading…
Reference in New Issue