PTHLexer now owns the Token vector.

llvm-svn: 60136
This commit is contained in:
Ted Kremenek 2008-11-27 00:38:24 +00:00
parent d1dda5339d
commit 1f50dc899f
3 changed files with 27 additions and 36 deletions

View File

@ -15,30 +15,24 @@
#define LLVM_CLANG_PTHLEXER_H #define LLVM_CLANG_PTHLEXER_H
#include "clang/Lex/PreprocessorLexer.h" #include "clang/Lex/PreprocessorLexer.h"
#include <vector>
namespace clang { namespace clang {
class PTHLexer : public PreprocessorLexer { class PTHLexer : public PreprocessorLexer {
/// Tokens - This is the pointer to an array of tokens that the macro is /// Tokens - Vector of raw tokens.
/// defined to, with arguments expanded for function-like macros. If this is std::vector<Token> Tokens;
/// a token stream, these are the tokens we are returning.
const Token *Tokens;
/// LastTokenIdx - The index of the last token in Tokens. This token
/// will be an eof token.
unsigned LastTokenIdx;
/// CurTokenIdx - This is the index of the next token that Lex will return. /// CurTokenIdx - This is the index of the next token that Lex will return.
unsigned CurTokenIdx; unsigned CurTokenIdx;
PTHLexer(const PTHLexer&); // DO NOT IMPLEMENT PTHLexer(const PTHLexer&); // DO NOT IMPLEMENT
void operator=(const PTHLexer&); // DO NOT IMPLEMENT void operator=(const PTHLexer&); // DO NOT IMPLEMENT
public: public:
/// Create a PTHLexer for the specified token stream. /// Create a PTHLexer for the specified token stream.
PTHLexer(Preprocessor& pp, SourceLocation fileloc, PTHLexer(Preprocessor& pp, SourceLocation fileloc);
const Token *TokArray, unsigned NumToks);
~PTHLexer() {} ~PTHLexer() {}
/// Lex - Return the next token. /// Lex - Return the next token.
@ -46,6 +40,8 @@ public:
void setEOF(Token &Tok); void setEOF(Token &Tok);
std::vector<Token>& getTokens() { return Tokens; }
/// DiscardToEndOfLine - Read the rest of the current preprocessor line as an /// DiscardToEndOfLine - Read the rest of the current preprocessor line as an
/// uninterpreted string. This switches the lexer out of directive mode. /// uninterpreted string. This switches the lexer out of directive mode.
void DiscardToEndOfLine(); void DiscardToEndOfLine();
@ -64,7 +60,7 @@ public:
private: private:
/// AtLastToken - Returns true if the PTHLexer is at the last token. /// AtLastToken - Returns true if the PTHLexer is at the last token.
bool AtLastToken() const { return CurTokenIdx == LastTokenIdx; } bool AtLastToken() const { return CurTokenIdx+1 == Tokens.size(); }
/// GetToken - Returns the next token. This method does not advance the /// GetToken - Returns the next token. This method does not advance the
/// PTHLexer to the next token. /// PTHLexer to the next token.

View File

@ -78,6 +78,16 @@ void Preprocessor::EnterSourceFile(unsigned FileID,
Lexer *TheLexer = new Lexer(SourceLocation::getFileLoc(FileID, 0), *this); Lexer *TheLexer = new Lexer(SourceLocation::getFileLoc(FileID, 0), *this);
EnterSourceFileWithLexer(TheLexer, CurDir); EnterSourceFileWithLexer(TheLexer, CurDir);
#else #else
if (CurPPLexer || CurTokenLexer)
PushIncludeMacroStack();
CurDirLookup = CurDir;
SourceLocation Loc = SourceLocation::getFileLoc(FileID, 0);
CurPTHLexer.reset(new PTHLexer(*this, Loc));
CurPPLexer = CurPTHLexer.get();
// Generate the tokens.
const llvm::MemoryBuffer* B = getSourceManager().getBuffer(FileID); const llvm::MemoryBuffer* B = getSourceManager().getBuffer(FileID);
// Create a raw lexer. // Create a raw lexer.
@ -89,7 +99,7 @@ void Preprocessor::EnterSourceFile(unsigned FileID,
L.SetCommentRetentionState(false); L.SetCommentRetentionState(false);
// Lex the file, populating our data structures. // Lex the file, populating our data structures.
std::vector<Token>* Tokens = new std::vector<Token>(); std::vector<Token>& Tokens = CurPTHLexer->getTokens();
Token Tok; Token Tok;
do { do {
@ -101,7 +111,7 @@ void Preprocessor::EnterSourceFile(unsigned FileID,
else if (Tok.is(tok::hash) && Tok.isAtStartOfLine()) { else if (Tok.is(tok::hash) && Tok.isAtStartOfLine()) {
// Special processing for #include. Store the '#' token and lex // Special processing for #include. Store the '#' token and lex
// the next token. // the next token.
Tokens->push_back(Tok); Tokens.push_back(Tok);
L.LexFromRawLexer(Tok); L.LexFromRawLexer(Tok);
// Did we see 'include'/'import'/'include_next'? // Did we see 'include'/'import'/'include_next'?
@ -116,7 +126,7 @@ void Preprocessor::EnterSourceFile(unsigned FileID,
K == tok::pp_include_next) { K == tok::pp_include_next) {
// Save the 'include' token. // Save the 'include' token.
Tokens->push_back(Tok); Tokens.push_back(Tok);
// Lex the next token as an include string. // Lex the next token as an include string.
L.ParsingPreprocessorDirective = true; L.ParsingPreprocessorDirective = true;
@ -128,15 +138,7 @@ void Preprocessor::EnterSourceFile(unsigned FileID,
} }
} }
} }
while (Tokens->push_back(Tok), Tok.isNot(tok::eof)); while (Tokens.push_back(Tok), Tok.isNot(tok::eof));
if (CurPPLexer || CurTokenLexer)
PushIncludeMacroStack();
CurDirLookup = CurDir;
SourceLocation Loc = SourceLocation::getFileLoc(FileID, 0);
CurPTHLexer.reset(new PTHLexer(*this, Loc, &(*Tokens)[0], Tokens->size()));
CurPPLexer = CurPTHLexer.get();
// Notify the client, if desired, that we are in a new source file. // Notify the client, if desired, that we are in a new source file.
if (Callbacks) { if (Callbacks) {

View File

@ -16,16 +16,8 @@
#include "clang/Basic/TokenKinds.h" #include "clang/Basic/TokenKinds.h"
using namespace clang; using namespace clang;
PTHLexer::PTHLexer(Preprocessor& pp, SourceLocation fileloc, PTHLexer::PTHLexer(Preprocessor& pp, SourceLocation fileloc)
const Token *TokArray, unsigned NumTokens) : PreprocessorLexer(&pp, fileloc), CurTokenIdx(0) {}
: PreprocessorLexer(&pp, fileloc),
Tokens(TokArray),
LastTokenIdx(NumTokens - 1),
CurTokenIdx(0) {
assert(NumTokens >= 1);
assert(Tokens[LastTokenIdx].is(tok::eof));
}
Token PTHLexer::GetToken() { Token PTHLexer::GetToken() {
Token Tok = Tokens[CurTokenIdx]; Token Tok = Tokens[CurTokenIdx];
@ -104,7 +96,8 @@ bool PTHLexer::LexEndOfFile(Token &Tok) {
} }
void PTHLexer::setEOF(Token& Tok) { void PTHLexer::setEOF(Token& Tok) {
Tok = Tokens[LastTokenIdx]; assert(!Tokens.empty());
Tok = Tokens[Tokens.size()-1];
} }
void PTHLexer::DiscardToEndOfLine() { void PTHLexer::DiscardToEndOfLine() {