2018-03-27 08:01:49 +08:00
|
|
|
//===- TokenRewriter.cpp - Token-based code rewriting interface -----------===//
|
2008-10-12 13:44:03 +08:00
|
|
|
//
|
2019-01-19 16:50:56 +08:00
|
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
2008-10-12 13:44:03 +08:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file implements the TokenRewriter class, which is used for code
|
|
|
|
// transformations.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2012-09-01 13:09:24 +08:00
|
|
|
#include "clang/Rewrite/Core/TokenRewriter.h"
|
2012-12-04 17:13:33 +08:00
|
|
|
#include "clang/Basic/SourceManager.h"
|
2008-10-12 13:44:03 +08:00
|
|
|
#include "clang/Lex/Lexer.h"
|
2008-10-12 14:09:52 +08:00
|
|
|
#include "clang/Lex/ScratchBuffer.h"
|
2018-03-27 08:01:49 +08:00
|
|
|
#include "clang/Lex/Token.h"
|
|
|
|
#include <cassert>
|
|
|
|
#include <cstring>
|
|
|
|
#include <map>
|
|
|
|
#include <utility>
|
|
|
|
|
2008-10-12 13:44:03 +08:00
|
|
|
using namespace clang;
|
|
|
|
|
2009-01-17 14:22:33 +08:00
|
|
|
TokenRewriter::TokenRewriter(FileID FID, SourceManager &SM,
|
2008-10-12 13:44:03 +08:00
|
|
|
const LangOptions &LangOpts) {
|
2008-10-12 14:09:52 +08:00
|
|
|
ScratchBuf.reset(new ScratchBuffer(SM));
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2008-10-12 13:44:03 +08:00
|
|
|
// Create a lexer to lex all the tokens of the main file in raw mode.
|
2009-11-30 12:18:44 +08:00
|
|
|
const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID);
|
|
|
|
Lexer RawLex(FID, FromFile, SM, LangOpts);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2008-10-12 13:44:03 +08:00
|
|
|
// Return all comments and whitespace as tokens.
|
|
|
|
RawLex.SetKeepWhitespaceMode(true);
|
|
|
|
|
|
|
|
// Lex the file, populating our datastructures.
|
|
|
|
Token RawTok;
|
|
|
|
RawLex.LexFromRawLexer(RawTok);
|
|
|
|
while (RawTok.isNot(tok::eof)) {
|
2008-10-12 14:09:52 +08:00
|
|
|
#if 0
|
2010-12-22 16:23:18 +08:00
|
|
|
if (Tok.is(tok::raw_identifier)) {
|
2008-10-12 14:09:52 +08:00
|
|
|
// Look up the identifier info for the token. This should use
|
|
|
|
// IdentifierTable directly instead of PP.
|
2010-12-22 16:23:18 +08:00
|
|
|
PP.LookUpIdentifierInfo(Tok);
|
2008-10-12 14:09:52 +08:00
|
|
|
}
|
|
|
|
#endif
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2008-10-12 13:44:03 +08:00
|
|
|
AddToken(RawTok, TokenList.end());
|
|
|
|
RawLex.LexFromRawLexer(RawTok);
|
|
|
|
}
|
2008-10-12 14:09:52 +08:00
|
|
|
}
|
|
|
|
|
2018-03-27 08:01:49 +08:00
|
|
|
TokenRewriter::~TokenRewriter() = default;
|
2008-10-12 14:09:52 +08:00
|
|
|
|
|
|
|
/// RemapIterator - Convert from token_iterator (a const iterator) to
|
|
|
|
/// TokenRefTy (a non-const iterator).
|
|
|
|
TokenRewriter::TokenRefTy TokenRewriter::RemapIterator(token_iterator I) {
|
|
|
|
if (I == token_end()) return TokenList.end();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2008-10-12 14:09:52 +08:00
|
|
|
// FIXME: This is horrible, we should use our own list or something to avoid
|
|
|
|
// this.
|
2009-09-09 23:08:12 +08:00
|
|
|
std::map<SourceLocation, TokenRefTy>::iterator MapIt =
|
2008-10-12 14:09:52 +08:00
|
|
|
TokenAtLoc.find(I->getLocation());
|
|
|
|
assert(MapIt != TokenAtLoc.end() && "iterator not in rewriter?");
|
|
|
|
return MapIt->second;
|
2008-10-12 13:44:03 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// AddToken - Add the specified token into the Rewriter before the other
|
|
|
|
/// position.
|
2009-09-09 23:08:12 +08:00
|
|
|
TokenRewriter::TokenRefTy
|
2008-10-12 14:09:52 +08:00
|
|
|
TokenRewriter::AddToken(const Token &T, TokenRefTy Where) {
|
2008-10-12 13:44:03 +08:00
|
|
|
Where = TokenList.insert(Where, T);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2008-10-12 13:44:03 +08:00
|
|
|
bool InsertSuccess = TokenAtLoc.insert(std::make_pair(T.getLocation(),
|
|
|
|
Where)).second;
|
|
|
|
assert(InsertSuccess && "Token location already in rewriter!");
|
2011-01-06 09:37:28 +08:00
|
|
|
(void)InsertSuccess;
|
2008-10-12 14:09:52 +08:00
|
|
|
return Where;
|
2008-10-12 13:44:03 +08:00
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2008-10-12 14:09:52 +08:00
|
|
|
TokenRewriter::token_iterator
|
2009-01-27 03:29:26 +08:00
|
|
|
TokenRewriter::AddTokenBefore(token_iterator I, const char *Val) {
|
2008-10-12 14:09:52 +08:00
|
|
|
unsigned Len = strlen(Val);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2008-10-12 14:09:52 +08:00
|
|
|
// Plop the string into the scratch buffer, then create a token for this
|
|
|
|
// string.
|
|
|
|
Token Tok;
|
|
|
|
Tok.startToken();
|
2009-01-27 03:29:26 +08:00
|
|
|
const char *Spelling;
|
|
|
|
Tok.setLocation(ScratchBuf->getToken(Val, Len, Spelling));
|
2008-10-12 14:09:52 +08:00
|
|
|
Tok.setLength(Len);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2008-10-12 14:09:52 +08:00
|
|
|
// TODO: Form a whole lexer around this and relex the token! For now, just
|
|
|
|
// set kind to tok::unknown.
|
|
|
|
Tok.setKind(tok::unknown);
|
|
|
|
|
|
|
|
return AddToken(Tok, RemapIterator(I));
|
|
|
|
}
|