forked from OSchip/llvm-project
Revert r373888 "[IA] Recognize hexadecimal escape sequences"
It broke MC/AsmParser/directive_ascii.s on all bots: Assertion failed: (Index < Length && "Invalid index!"), function operator[], file ../../llvm/include/llvm/ADT/StringRef.h, line 243. llvm-svn: 373898
This commit is contained in:
parent
77c97002dc
commit
0fedc26a0d
|
@ -2914,26 +2914,11 @@ bool AsmParser::parseEscapedString(std::string &Data) {
|
|||
}
|
||||
|
||||
// Recognize escaped characters. Note that this escape semantics currently
|
||||
// loosely follows Darwin 'as'.
|
||||
// loosely follows Darwin 'as'. Notably, it doesn't support hex escapes.
|
||||
++i;
|
||||
if (i == e)
|
||||
return TokError("unexpected backslash at end of string");
|
||||
|
||||
// Recognize hex sequences similarly to GNU 'as'.
|
||||
if (Str[i] == 'x' || Str[i] == 'X') {
|
||||
if (!isHexDigit(Str[i + 1]))
|
||||
return TokError("invalid hexadecimal escape sequence");
|
||||
|
||||
// Consume hex characters. GNU 'as' reads all hexadecimal characters and
|
||||
// then truncates to the lower 16 bits. Seems reasonable.
|
||||
unsigned Value = 0;
|
||||
while (isHexDigit(Str[i + 1]))
|
||||
Value = Value * 16 + hexDigitValue(Str[++i]);
|
||||
|
||||
Data += (unsigned char)(Value & 0xFF);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Recognize octal sequences.
|
||||
if ((unsigned)(Str[i] - '0') <= 7) {
|
||||
// Consume up to three octal characters.
|
||||
|
|
|
@ -39,8 +39,3 @@ TEST5:
|
|||
# CHECK: .byte 0
|
||||
TEST6:
|
||||
.string "B", "C"
|
||||
|
||||
# CHECK: TEST7:
|
||||
# CHECK: .ascii "dk"
|
||||
TEST7:
|
||||
.ascii "\x64\Xa6B"
|
||||
|
|
Loading…
Reference in New Issue