2008-08-10 21:15:22 +08:00
|
|
|
//===--- PPCaching.cpp - Handle caching lexed tokens ----------------------===//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file implements pieces of the Preprocessor interface that manage the
|
|
|
|
// caching of lexed tokens.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
|
|
|
#include "clang/Lex/Preprocessor.h"
|
|
|
|
using namespace clang;
|
|
|
|
|
2013-11-27 09:27:40 +08:00
|
|
|
// EnableBacktrackAtThisPos - From the point that this method is called, and
|
|
|
|
// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
|
|
|
|
// keeps track of the lexed tokens so that a subsequent Backtrack() call will
|
|
|
|
// make the Preprocessor re-lex the same tokens.
|
|
|
|
//
|
|
|
|
// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
|
|
|
|
// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
|
|
|
|
// be combined with the EnableBacktrackAtThisPos calls in reverse order.
|
2008-08-23 20:05:53 +08:00
|
|
|
void Preprocessor::EnableBacktrackAtThisPos() {
|
|
|
|
BacktrackPositions.push_back(CachedLexPos);
|
|
|
|
EnterCachingLexMode();
|
|
|
|
}
|
2008-08-23 05:27:50 +08:00
|
|
|
|
2013-11-27 09:27:40 +08:00
|
|
|
// Disable the last EnableBacktrackAtThisPos call.
|
2008-08-24 20:29:43 +08:00
|
|
|
void Preprocessor::CommitBacktrackedTokens() {
|
2008-08-23 20:05:53 +08:00
|
|
|
assert(!BacktrackPositions.empty()
|
|
|
|
&& "EnableBacktrackAtThisPos was not called!");
|
|
|
|
BacktrackPositions.pop_back();
|
|
|
|
}
|
|
|
|
|
[Preprocessor] Fix incorrect token caching that occurs when lexing _Pragma
in macro argument pre-expansion mode when skipping a function body
This commit fixes a token caching problem that currently occurs when clang is
skipping a function body (e.g. when looking for a code completion token) and at
the same time caching the tokens for _Pragma when lexing it in macro argument
pre-expansion mode.
When _Pragma is being lexed in macro argument pre-expansion mode, it caches the
tokens so that it can avoid interpreting the pragma immediately (as the macro
argument may not be used in the macro body), and then either backtracks over or
commits these tokens. The problem is that, when we're backtracking/committing in
such a scenario, there's already a previous backtracking position stored in
BacktrackPositions (as we're skipping the function body), and this leads to a
situation where the cached tokens from the pragma (like '(' 'string_literal'
and ')') will remain in the cached tokens array incorrectly even after they're
consumed (in the case of backtracking) or just ignored (in the case when they're
committed). Furthermore, what makes it even worse, is that because of a previous
backtracking position, the logic that deals with when should we call
ExitCachingLexMode in CachingLex no longer works for us in this situation, and
more tokens in the macro argument get cached, to the point where the EOF token
that corresponds to the macro argument EOF is cached. This problem leads to all
sorts of issues in code completion mode, where incorrect errors get presented
and code completion completely fails to produce completion results.
rdar://28523863
Differential Revision: https://reviews.llvm.org/D28772
llvm-svn: 296140
2017-02-25 01:45:16 +08:00
|
|
|
Preprocessor::CachedTokensRange Preprocessor::LastCachedTokenRange() {
|
|
|
|
assert(isBacktrackEnabled());
|
|
|
|
auto PrevCachedLexPos = BacktrackPositions.back();
|
|
|
|
return CachedTokensRange{PrevCachedLexPos, CachedLexPos};
|
|
|
|
}
|
|
|
|
|
|
|
|
void Preprocessor::EraseCachedTokens(CachedTokensRange TokenRange) {
|
|
|
|
assert(TokenRange.Begin <= TokenRange.End);
|
|
|
|
if (CachedLexPos == TokenRange.Begin && TokenRange.Begin != TokenRange.End) {
|
|
|
|
// We have backtracked to the start of the token range as we want to consume
|
|
|
|
// them again. Erase the tokens only after consuming then.
|
|
|
|
assert(!CachedTokenRangeToErase);
|
|
|
|
CachedTokenRangeToErase = TokenRange;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// The cached tokens were committed, so they should be erased now.
|
|
|
|
assert(TokenRange.End == CachedLexPos);
|
|
|
|
CachedTokens.erase(CachedTokens.begin() + TokenRange.Begin,
|
|
|
|
CachedTokens.begin() + TokenRange.End);
|
|
|
|
CachedLexPos = TokenRange.Begin;
|
|
|
|
ExitCachingLexMode();
|
|
|
|
}
|
|
|
|
|
2013-11-27 09:27:40 +08:00
|
|
|
// Make Preprocessor re-lex the tokens that were lexed since
|
|
|
|
// EnableBacktrackAtThisPos() was previously called.
|
2008-08-23 20:05:53 +08:00
|
|
|
void Preprocessor::Backtrack() {
|
|
|
|
assert(!BacktrackPositions.empty()
|
|
|
|
&& "EnableBacktrackAtThisPos was not called!");
|
|
|
|
CachedLexPos = BacktrackPositions.back();
|
|
|
|
BacktrackPositions.pop_back();
|
2012-01-04 14:20:15 +08:00
|
|
|
recomputeCurLexerKind();
|
2008-08-23 20:05:53 +08:00
|
|
|
}
|
2008-08-23 05:27:50 +08:00
|
|
|
|
2008-08-10 21:15:22 +08:00
|
|
|
void Preprocessor::CachingLex(Token &Result) {
|
2010-07-13 02:49:30 +08:00
|
|
|
if (!InCachingLexMode())
|
|
|
|
return;
|
|
|
|
|
2008-08-10 21:15:22 +08:00
|
|
|
if (CachedLexPos < CachedTokens.size()) {
|
|
|
|
Result = CachedTokens[CachedLexPos++];
|
[Preprocessor] Fix incorrect token caching that occurs when lexing _Pragma
in macro argument pre-expansion mode when skipping a function body
This commit fixes a token caching problem that currently occurs when clang is
skipping a function body (e.g. when looking for a code completion token) and at
the same time caching the tokens for _Pragma when lexing it in macro argument
pre-expansion mode.
When _Pragma is being lexed in macro argument pre-expansion mode, it caches the
tokens so that it can avoid interpreting the pragma immediately (as the macro
argument may not be used in the macro body), and then either backtracks over or
commits these tokens. The problem is that, when we're backtracking/committing in
such a scenario, there's already a previous backtracking position stored in
BacktrackPositions (as we're skipping the function body), and this leads to a
situation where the cached tokens from the pragma (like '(' 'string_literal'
and ')') will remain in the cached tokens array incorrectly even after they're
consumed (in the case of backtracking) or just ignored (in the case when they're
committed). Furthermore, what makes it even worse, is that because of a previous
backtracking position, the logic that deals with when should we call
ExitCachingLexMode in CachingLex no longer works for us in this situation, and
more tokens in the macro argument get cached, to the point where the EOF token
that corresponds to the macro argument EOF is cached. This problem leads to all
sorts of issues in code completion mode, where incorrect errors get presented
and code completion completely fails to produce completion results.
rdar://28523863
Differential Revision: https://reviews.llvm.org/D28772
llvm-svn: 296140
2017-02-25 01:45:16 +08:00
|
|
|
// Erase the some of the cached tokens after they are consumed when
|
|
|
|
// asked to do so.
|
|
|
|
if (CachedTokenRangeToErase &&
|
|
|
|
CachedTokenRangeToErase->End == CachedLexPos) {
|
|
|
|
EraseCachedTokens(*CachedTokenRangeToErase);
|
|
|
|
CachedTokenRangeToErase = None;
|
|
|
|
}
|
2008-08-10 21:15:22 +08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
ExitCachingLexMode();
|
2010-07-13 05:41:41 +08:00
|
|
|
Lex(Result);
|
2008-08-10 21:15:22 +08:00
|
|
|
|
2012-04-04 10:57:01 +08:00
|
|
|
if (isBacktrackEnabled()) {
|
|
|
|
// Cache the lexed token.
|
|
|
|
EnterCachingLexMode();
|
|
|
|
CachedTokens.push_back(Result);
|
|
|
|
++CachedLexPos;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (CachedLexPos < CachedTokens.size()) {
|
|
|
|
EnterCachingLexMode();
|
|
|
|
} else {
|
2008-08-10 21:15:22 +08:00
|
|
|
// All cached tokens were consumed.
|
|
|
|
CachedTokens.clear();
|
|
|
|
CachedLexPos = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Preprocessor::EnterCachingLexMode() {
|
2018-01-20 07:41:47 +08:00
|
|
|
if (InCachingLexMode()) {
|
|
|
|
assert(CurLexerKind == CLK_CachingLexer && "Unexpected lexer kind");
|
2008-08-10 21:15:22 +08:00
|
|
|
return;
|
2018-01-20 07:41:47 +08:00
|
|
|
}
|
2008-08-10 21:15:22 +08:00
|
|
|
|
2008-11-13 06:21:57 +08:00
|
|
|
PushIncludeMacroStack();
|
2012-01-04 14:20:15 +08:00
|
|
|
CurLexerKind = CLK_CachingLexer;
|
2008-08-10 21:15:22 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
const Token &Preprocessor::PeekAhead(unsigned N) {
|
|
|
|
assert(CachedLexPos + N > CachedTokens.size() && "Confused caching.");
|
|
|
|
ExitCachingLexMode();
|
2016-10-26 21:06:13 +08:00
|
|
|
for (size_t C = CachedLexPos + N - CachedTokens.size(); C > 0; --C) {
|
2008-08-10 21:15:22 +08:00
|
|
|
CachedTokens.push_back(Token());
|
|
|
|
Lex(CachedTokens.back());
|
|
|
|
}
|
|
|
|
EnterCachingLexMode();
|
|
|
|
return CachedTokens.back();
|
|
|
|
}
|
2008-11-09 00:17:04 +08:00
|
|
|
|
|
|
|
void Preprocessor::AnnotatePreviousCachedTokens(const Token &Tok) {
|
2009-01-27 03:29:26 +08:00
|
|
|
assert(Tok.isAnnotation() && "Expected annotation token");
|
2008-11-09 00:17:04 +08:00
|
|
|
assert(CachedLexPos != 0 && "Expected to have some cached tokens");
|
2010-02-09 03:35:18 +08:00
|
|
|
assert(CachedTokens[CachedLexPos-1].getLastLoc() == Tok.getAnnotationEndLoc()
|
2008-11-09 00:17:04 +08:00
|
|
|
&& "The annotation should be until the most recent cached token");
|
|
|
|
|
|
|
|
// Start from the end of the cached tokens list and look for the token
|
|
|
|
// that is the beginning of the annotation token.
|
|
|
|
for (CachedTokensTy::size_type i = CachedLexPos; i != 0; --i) {
|
|
|
|
CachedTokensTy::iterator AnnotBegin = CachedTokens.begin() + i-1;
|
|
|
|
if (AnnotBegin->getLocation() == Tok.getLocation()) {
|
2016-10-21 04:53:20 +08:00
|
|
|
assert((BacktrackPositions.empty() || BacktrackPositions.back() <= i) &&
|
2008-11-09 00:17:04 +08:00
|
|
|
"The backtrack pos points inside the annotated tokens!");
|
|
|
|
// Replace the cached tokens with the single annotation token.
|
2009-07-27 00:36:45 +08:00
|
|
|
if (i < CachedLexPos)
|
|
|
|
CachedTokens.erase(AnnotBegin + 1, CachedTokens.begin() + CachedLexPos);
|
2008-11-09 00:17:04 +08:00
|
|
|
*AnnotBegin = Tok;
|
|
|
|
CachedLexPos = i;
|
|
|
|
return;
|
|
|
|
}
|
2013-11-27 09:40:12 +08:00
|
|
|
}
|
|
|
|
}
|
2016-01-31 08:47:51 +08:00
|
|
|
|
|
|
|
bool Preprocessor::IsPreviousCachedToken(const Token &Tok) const {
|
|
|
|
// There's currently no cached token...
|
|
|
|
if (!CachedLexPos)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
const Token LastCachedTok = CachedTokens[CachedLexPos - 1];
|
|
|
|
if (LastCachedTok.getKind() != Tok.getKind())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
int RelOffset = 0;
|
|
|
|
if ((!getSourceManager().isInSameSLocAddrSpace(
|
|
|
|
Tok.getLocation(), getLastCachedTokenLocation(), &RelOffset)) ||
|
|
|
|
RelOffset)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Preprocessor::ReplacePreviousCachedToken(ArrayRef<Token> NewToks) {
|
|
|
|
assert(CachedLexPos != 0 && "Expected to have some cached tokens");
|
|
|
|
CachedTokens.insert(CachedTokens.begin() + CachedLexPos - 1, NewToks.begin(),
|
|
|
|
NewToks.end());
|
|
|
|
CachedTokens.erase(CachedTokens.begin() + CachedLexPos - 1 + NewToks.size());
|
|
|
|
CachedLexPos += NewToks.size() - 1;
|
|
|
|
}
|