PTHLexer now owns the Token vector.

llvm-svn: 60136
This commit is contained in:
Ted Kremenek 2008-11-27 00:38:24 +00:00
parent d1dda5339d
commit 1f50dc899f
3 changed files with 27 additions and 36 deletions

View File

@ -15,30 +15,24 @@
#define LLVM_CLANG_PTHLEXER_H
#include "clang/Lex/PreprocessorLexer.h"
#include <vector>
namespace clang {
class PTHLexer : public PreprocessorLexer {
/// Tokens - This is the pointer to an array of tokens that the macro is
/// defined to, with arguments expanded for function-like macros. If this is
/// a token stream, these are the tokens we are returning.
const Token *Tokens;
/// LastTokenIdx - The index of the last token in Tokens. This token
/// will be an eof token.
unsigned LastTokenIdx;
/// Tokens - Vector of raw tokens.
std::vector<Token> Tokens;
/// CurTokenIdx - This is the index of the next token that Lex will return.
unsigned CurTokenIdx;
PTHLexer(const PTHLexer&); // DO NOT IMPLEMENT
void operator=(const PTHLexer&); // DO NOT IMPLEMENT
public:
public:
/// Create a PTHLexer for the specified token stream.
PTHLexer(Preprocessor& pp, SourceLocation fileloc,
const Token *TokArray, unsigned NumToks);
PTHLexer(Preprocessor& pp, SourceLocation fileloc);
~PTHLexer() {}
/// Lex - Return the next token.
@ -46,6 +40,8 @@ public:
void setEOF(Token &Tok);
std::vector<Token>& getTokens() { return Tokens; }
/// DiscardToEndOfLine - Read the rest of the current preprocessor line as an
/// uninterpreted string. This switches the lexer out of directive mode.
void DiscardToEndOfLine();
@ -64,7 +60,7 @@ public:
private:
/// AtLastToken - Returns true if the PTHLexer is at the last token.
bool AtLastToken() const { return CurTokenIdx == LastTokenIdx; }
bool AtLastToken() const { return CurTokenIdx+1 == Tokens.size(); }
/// GetToken - Returns the next token. This method does not advance the
/// PTHLexer to the next token.

View File

@ -78,6 +78,16 @@ void Preprocessor::EnterSourceFile(unsigned FileID,
Lexer *TheLexer = new Lexer(SourceLocation::getFileLoc(FileID, 0), *this);
EnterSourceFileWithLexer(TheLexer, CurDir);
#else
if (CurPPLexer || CurTokenLexer)
PushIncludeMacroStack();
CurDirLookup = CurDir;
SourceLocation Loc = SourceLocation::getFileLoc(FileID, 0);
CurPTHLexer.reset(new PTHLexer(*this, Loc));
CurPPLexer = CurPTHLexer.get();
// Generate the tokens.
const llvm::MemoryBuffer* B = getSourceManager().getBuffer(FileID);
// Create a raw lexer.
@ -89,7 +99,7 @@ void Preprocessor::EnterSourceFile(unsigned FileID,
L.SetCommentRetentionState(false);
// Lex the file, populating our data structures.
std::vector<Token>* Tokens = new std::vector<Token>();
std::vector<Token>& Tokens = CurPTHLexer->getTokens();
Token Tok;
do {
@ -101,7 +111,7 @@ void Preprocessor::EnterSourceFile(unsigned FileID,
else if (Tok.is(tok::hash) && Tok.isAtStartOfLine()) {
// Special processing for #include. Store the '#' token and lex
// the next token.
Tokens->push_back(Tok);
Tokens.push_back(Tok);
L.LexFromRawLexer(Tok);
// Did we see 'include'/'import'/'include_next'?
@ -116,7 +126,7 @@ void Preprocessor::EnterSourceFile(unsigned FileID,
K == tok::pp_include_next) {
// Save the 'include' token.
Tokens->push_back(Tok);
Tokens.push_back(Tok);
// Lex the next token as an include string.
L.ParsingPreprocessorDirective = true;
@ -128,15 +138,7 @@ void Preprocessor::EnterSourceFile(unsigned FileID,
}
}
}
while (Tokens->push_back(Tok), Tok.isNot(tok::eof));
if (CurPPLexer || CurTokenLexer)
PushIncludeMacroStack();
CurDirLookup = CurDir;
SourceLocation Loc = SourceLocation::getFileLoc(FileID, 0);
CurPTHLexer.reset(new PTHLexer(*this, Loc, &(*Tokens)[0], Tokens->size()));
CurPPLexer = CurPTHLexer.get();
while (Tokens.push_back(Tok), Tok.isNot(tok::eof));
// Notify the client, if desired, that we are in a new source file.
if (Callbacks) {

View File

@ -16,16 +16,8 @@
#include "clang/Basic/TokenKinds.h"
using namespace clang;
PTHLexer::PTHLexer(Preprocessor& pp, SourceLocation fileloc,
const Token *TokArray, unsigned NumTokens)
: PreprocessorLexer(&pp, fileloc),
Tokens(TokArray),
LastTokenIdx(NumTokens - 1),
CurTokenIdx(0) {
assert(NumTokens >= 1);
assert(Tokens[LastTokenIdx].is(tok::eof));
}
PTHLexer::PTHLexer(Preprocessor& pp, SourceLocation fileloc)
: PreprocessorLexer(&pp, fileloc), CurTokenIdx(0) {}
Token PTHLexer::GetToken() {
Token Tok = Tokens[CurTokenIdx];
@ -104,7 +96,8 @@ bool PTHLexer::LexEndOfFile(Token &Tok) {
}
void PTHLexer::setEOF(Token& Tok) {
Tok = Tokens[LastTokenIdx];
assert(!Tokens.empty());
Tok = Tokens[Tokens.size()-1];
}
void PTHLexer::DiscardToEndOfLine() {