forked from OSchip/llvm-project
[flang] a little more refinement
Original-commit: flang-compiler/f18@5754d426e8 Reviewed-on: https://github.com/flang-compiler/f18/pull/87 Tree-same-pre-rewrite: false
This commit is contained in:
parent
42964c090c
commit
f651e74244
|
@ -355,19 +355,9 @@ TokenSequence Preprocessor::ReplaceMacros(
|
||||||
return tokens;
|
return tokens;
|
||||||
}
|
}
|
||||||
|
|
||||||
static std::size_t SkipBlanks(
|
|
||||||
const TokenSequence &tokens, std::size_t at, std::size_t lastToken) {
|
|
||||||
for (; at < lastToken; ++at) {
|
|
||||||
if (!tokens.TokenAt(at).IsBlank()) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return std::min(at, lastToken);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Preprocessor::Directive(const TokenSequence &dir, Prescanner *prescanner) {
|
void Preprocessor::Directive(const TokenSequence &dir, Prescanner *prescanner) {
|
||||||
std::size_t tokens{dir.SizeInTokens()};
|
std::size_t tokens{dir.SizeInTokens()};
|
||||||
std::size_t j{SkipBlanks(dir, 0, tokens)};
|
std::size_t j{dir.SkipBlanks(0)};
|
||||||
if (j == tokens) {
|
if (j == tokens) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -375,7 +365,7 @@ void Preprocessor::Directive(const TokenSequence &dir, Prescanner *prescanner) {
|
||||||
prescanner->Say("missing '#'"_err_en_US, dir.GetTokenProvenanceRange(j));
|
prescanner->Say("missing '#'"_err_en_US, dir.GetTokenProvenanceRange(j));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
j = SkipBlanks(dir, j + 1, tokens);
|
j = dir.SkipBlanks(j + 1);
|
||||||
if (j == tokens) {
|
if (j == tokens) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -384,7 +374,7 @@ void Preprocessor::Directive(const TokenSequence &dir, Prescanner *prescanner) {
|
||||||
}
|
}
|
||||||
std::size_t dirOffset{j};
|
std::size_t dirOffset{j};
|
||||||
std::string dirName{ToLowerCaseLetters(dir.TokenAt(dirOffset).ToString())};
|
std::string dirName{ToLowerCaseLetters(dir.TokenAt(dirOffset).ToString())};
|
||||||
j = SkipBlanks(dir, j + 1, tokens);
|
j = dir.SkipBlanks(j + 1);
|
||||||
CharBlock nameToken;
|
CharBlock nameToken;
|
||||||
if (j < tokens && IsLegalIdentifierStart(dir.TokenAt(j)[0])) {
|
if (j < tokens && IsLegalIdentifierStart(dir.TokenAt(j)[0])) {
|
||||||
nameToken = dir.TokenAt(j);
|
nameToken = dir.TokenAt(j);
|
||||||
|
@ -401,7 +391,7 @@ void Preprocessor::Directive(const TokenSequence &dir, Prescanner *prescanner) {
|
||||||
definitions_.erase(nameToken);
|
definitions_.erase(nameToken);
|
||||||
if (++j < tokens && dir.TokenAt(j).size() == 1 &&
|
if (++j < tokens && dir.TokenAt(j).size() == 1 &&
|
||||||
dir.TokenAt(j)[0] == '(') {
|
dir.TokenAt(j)[0] == '(') {
|
||||||
j = SkipBlanks(dir, j + 1, tokens);
|
j = dir.SkipBlanks(j + 1);
|
||||||
std::vector<std::string> argName;
|
std::vector<std::string> argName;
|
||||||
bool isVariadic{false};
|
bool isVariadic{false};
|
||||||
if (dir.TokenAt(j).ToString() != ")") {
|
if (dir.TokenAt(j).ToString() != ")") {
|
||||||
|
@ -418,7 +408,7 @@ void Preprocessor::Directive(const TokenSequence &dir, Prescanner *prescanner) {
|
||||||
}
|
}
|
||||||
argName.push_back(an);
|
argName.push_back(an);
|
||||||
}
|
}
|
||||||
j = SkipBlanks(dir, j + 1, tokens);
|
j = dir.SkipBlanks(j + 1);
|
||||||
if (j == tokens) {
|
if (j == tokens) {
|
||||||
prescanner->Say("#define: malformed argument list"_err_en_US,
|
prescanner->Say("#define: malformed argument list"_err_en_US,
|
||||||
dir.GetTokenProvenanceRange(tokens - 1));
|
dir.GetTokenProvenanceRange(tokens - 1));
|
||||||
|
@ -433,7 +423,7 @@ void Preprocessor::Directive(const TokenSequence &dir, Prescanner *prescanner) {
|
||||||
dir.GetTokenProvenanceRange(j));
|
dir.GetTokenProvenanceRange(j));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
j = SkipBlanks(dir, j + 1, tokens);
|
j = dir.SkipBlanks(j + 1);
|
||||||
if (j == tokens) {
|
if (j == tokens) {
|
||||||
prescanner->Say("#define: malformed argument list"_err_en_US,
|
prescanner->Say("#define: malformed argument list"_err_en_US,
|
||||||
dir.GetTokenProvenanceRange(tokens - 1));
|
dir.GetTokenProvenanceRange(tokens - 1));
|
||||||
|
@ -447,11 +437,11 @@ void Preprocessor::Directive(const TokenSequence &dir, Prescanner *prescanner) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
j = SkipBlanks(dir, j + 1, tokens);
|
j = dir.SkipBlanks(j + 1);
|
||||||
definitions_.emplace(std::make_pair(
|
definitions_.emplace(std::make_pair(
|
||||||
nameToken, Definition{argName, dir, j, tokens - j, isVariadic}));
|
nameToken, Definition{argName, dir, j, tokens - j, isVariadic}));
|
||||||
} else {
|
} else {
|
||||||
j = SkipBlanks(dir, j, tokens);
|
j = dir.SkipBlanks(j + 1);
|
||||||
definitions_.emplace(
|
definitions_.emplace(
|
||||||
std::make_pair(nameToken, Definition{dir, j, tokens - j}));
|
std::make_pair(nameToken, Definition{dir, j, tokens - j}));
|
||||||
}
|
}
|
||||||
|
@ -460,7 +450,7 @@ void Preprocessor::Directive(const TokenSequence &dir, Prescanner *prescanner) {
|
||||||
prescanner->Say("# missing or invalid name"_err_en_US,
|
prescanner->Say("# missing or invalid name"_err_en_US,
|
||||||
dir.GetIntervalProvenanceRange(dirOffset, tokens - dirOffset));
|
dir.GetIntervalProvenanceRange(dirOffset, tokens - dirOffset));
|
||||||
} else {
|
} else {
|
||||||
j = SkipBlanks(dir, j + 1, tokens);
|
j = dir.SkipBlanks(j + 1);
|
||||||
if (j != tokens) {
|
if (j != tokens) {
|
||||||
prescanner->Say("#undef: excess tokens at end of directive"_err_en_US,
|
prescanner->Say("#undef: excess tokens at end of directive"_err_en_US,
|
||||||
dir.GetIntervalProvenanceRange(j, tokens - j));
|
dir.GetIntervalProvenanceRange(j, tokens - j));
|
||||||
|
@ -475,7 +465,7 @@ void Preprocessor::Directive(const TokenSequence &dir, Prescanner *prescanner) {
|
||||||
dir.GetIntervalProvenanceRange(dirOffset, tokens - dirOffset));
|
dir.GetIntervalProvenanceRange(dirOffset, tokens - dirOffset));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
j = SkipBlanks(dir, j + 1, tokens);
|
j = dir.SkipBlanks(j + 1);
|
||||||
if (j != tokens) {
|
if (j != tokens) {
|
||||||
prescanner->Say(MessageFormattedText(
|
prescanner->Say(MessageFormattedText(
|
||||||
"#%s: excess tokens at end of directive"_err_en_US,
|
"#%s: excess tokens at end of directive"_err_en_US,
|
||||||
|
@ -603,17 +593,17 @@ bool Preprocessor::IsNameDefined(const CharBlock &token) {
|
||||||
static std::string GetDirectiveName(
|
static std::string GetDirectiveName(
|
||||||
const TokenSequence &line, std::size_t *rest) {
|
const TokenSequence &line, std::size_t *rest) {
|
||||||
std::size_t tokens{line.SizeInTokens()};
|
std::size_t tokens{line.SizeInTokens()};
|
||||||
std::size_t j{SkipBlanks(line, 0, tokens)};
|
std::size_t j{line.SkipBlanks(0)};
|
||||||
if (j == tokens || line.TokenAt(j).ToString() != "#") {
|
if (j == tokens || line.TokenAt(j).ToString() != "#") {
|
||||||
*rest = tokens;
|
*rest = tokens;
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
j = SkipBlanks(line, j + 1, tokens);
|
j = line.SkipBlanks(j + 1);
|
||||||
if (j == tokens) {
|
if (j == tokens) {
|
||||||
*rest = tokens;
|
*rest = tokens;
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
*rest = SkipBlanks(line, j + 1, tokens);
|
*rest = line.SkipBlanks(j + 1);
|
||||||
return ToLowerCaseLetters(line.TokenAt(j).ToString());
|
return ToLowerCaseLetters(line.TokenAt(j).ToString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -51,6 +51,19 @@ void TokenSequence::swap(TokenSequence &that) {
|
||||||
provenances_.swap(that.provenances_);
|
provenances_.swap(that.provenances_);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::size_t TokenSequence::SkipBlanks(std::size_t at) const {
|
||||||
|
std::size_t tokens{start_.size()};
|
||||||
|
if (at > tokens) {
|
||||||
|
return tokens; // j = ts.SkipBlanks(j+1) shouldn't go past tokens
|
||||||
|
}
|
||||||
|
for (std::size_t tokens{start_.size()}; at < tokens; ++at) {
|
||||||
|
if (!TokenAt(at).IsBlank()) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return at;
|
||||||
|
}
|
||||||
|
|
||||||
void TokenSequence::Put(const TokenSequence &that) {
|
void TokenSequence::Put(const TokenSequence &that) {
|
||||||
if (nextStart_ < char_.size()) {
|
if (nextStart_ < char_.size()) {
|
||||||
start_.push_back(nextStart_);
|
start_.push_back(nextStart_);
|
||||||
|
|
|
@ -67,9 +67,10 @@ public:
|
||||||
CharBlock TokenAt(std::size_t token) const {
|
CharBlock TokenAt(std::size_t token) const {
|
||||||
return {&char_[start_.at(token)], TokenBytes(token)};
|
return {&char_[start_.at(token)], TokenBytes(token)};
|
||||||
}
|
}
|
||||||
|
|
||||||
char CharAt(std::size_t j) const { return char_.at(j); }
|
char CharAt(std::size_t j) const { return char_.at(j); }
|
||||||
|
|
||||||
|
std::size_t SkipBlanks(std::size_t) const;
|
||||||
|
|
||||||
void PutNextTokenChar(char ch, Provenance provenance) {
|
void PutNextTokenChar(char ch, Provenance provenance) {
|
||||||
char_.emplace_back(ch);
|
char_.emplace_back(ch);
|
||||||
provenances_.Put({provenance, 1});
|
provenances_.Put({provenance, 1});
|
||||||
|
|
Loading…
Reference in New Issue