2012-07-06 08:28:32 +08:00
|
|
|
//===--- CommentParser.cpp - Doxygen comment parser -----------------------===//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
|
|
|
#include "clang/AST/CommentParser.h"
|
2012-08-09 08:03:17 +08:00
|
|
|
#include "clang/AST/CommentCommandTraits.h"
|
2012-12-04 17:13:33 +08:00
|
|
|
#include "clang/AST/CommentDiagnostic.h"
|
|
|
|
#include "clang/AST/CommentSema.h"
|
2013-02-09 23:16:58 +08:00
|
|
|
#include "clang/Basic/CharInfo.h"
|
2012-07-12 05:38:39 +08:00
|
|
|
#include "clang/Basic/SourceManager.h"
|
2012-07-06 08:28:32 +08:00
|
|
|
#include "llvm/Support/ErrorHandling.h"
|
|
|
|
|
|
|
|
namespace clang {
|
2013-08-24 02:03:40 +08:00
|
|
|
|
|
|
|
static inline bool isWhitespace(llvm::StringRef S) {
|
|
|
|
for (StringRef::const_iterator I = S.begin(), E = S.end(); I != E; ++I) {
|
|
|
|
if (!isWhitespace(*I))
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-07-06 08:28:32 +08:00
|
|
|
namespace comments {
|
|
|
|
|
2012-07-25 01:43:18 +08:00
|
|
|
/// Re-lexes a sequence of tok::text tokens.
|
|
|
|
class TextTokenRetokenizer {
|
|
|
|
llvm::BumpPtrAllocator &Allocator;
|
2012-07-25 01:52:18 +08:00
|
|
|
Parser &P;
|
2012-07-25 02:23:31 +08:00
|
|
|
|
|
|
|
/// This flag is set when there are no more tokens we can fetch from lexer.
|
|
|
|
bool NoMoreInterestingTokens;
|
|
|
|
|
|
|
|
/// Token buffer: tokens we have processed and lookahead.
|
2012-07-25 01:52:18 +08:00
|
|
|
SmallVector<Token, 16> Toks;
|
2012-07-25 01:43:18 +08:00
|
|
|
|
2012-07-25 02:23:31 +08:00
|
|
|
/// A position in \c Toks.
|
2012-07-25 01:43:18 +08:00
|
|
|
struct Position {
|
|
|
|
const char *BufferStart;
|
|
|
|
const char *BufferEnd;
|
|
|
|
const char *BufferPtr;
|
|
|
|
SourceLocation BufferStartLoc;
|
2016-09-21 02:32:48 +08:00
|
|
|
unsigned CurToken;
|
2012-07-25 01:43:18 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
/// Current position in Toks.
|
|
|
|
Position Pos;
|
|
|
|
|
|
|
|
bool isEnd() const {
|
|
|
|
return Pos.CurToken >= Toks.size();
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Sets up the buffer pointers to point to current token.
|
|
|
|
void setupBuffer() {
|
2012-07-25 01:52:18 +08:00
|
|
|
assert(!isEnd());
|
2012-07-25 01:43:18 +08:00
|
|
|
const Token &Tok = Toks[Pos.CurToken];
|
|
|
|
|
|
|
|
Pos.BufferStart = Tok.getText().begin();
|
|
|
|
Pos.BufferEnd = Tok.getText().end();
|
|
|
|
Pos.BufferPtr = Pos.BufferStart;
|
|
|
|
Pos.BufferStartLoc = Tok.getLocation();
|
|
|
|
}
|
|
|
|
|
|
|
|
SourceLocation getSourceLocation() const {
|
|
|
|
const unsigned CharNo = Pos.BufferPtr - Pos.BufferStart;
|
|
|
|
return Pos.BufferStartLoc.getLocWithOffset(CharNo);
|
|
|
|
}
|
|
|
|
|
|
|
|
char peek() const {
|
|
|
|
assert(!isEnd());
|
|
|
|
assert(Pos.BufferPtr != Pos.BufferEnd);
|
|
|
|
return *Pos.BufferPtr;
|
|
|
|
}
|
|
|
|
|
|
|
|
void consumeChar() {
|
|
|
|
assert(!isEnd());
|
|
|
|
assert(Pos.BufferPtr != Pos.BufferEnd);
|
|
|
|
Pos.BufferPtr++;
|
|
|
|
if (Pos.BufferPtr == Pos.BufferEnd) {
|
|
|
|
Pos.CurToken++;
|
2012-07-25 02:23:31 +08:00
|
|
|
if (isEnd() && !addToken())
|
|
|
|
return;
|
|
|
|
|
|
|
|
assert(!isEnd());
|
|
|
|
setupBuffer();
|
2012-07-25 01:43:18 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-07-25 01:52:18 +08:00
|
|
|
/// Add a token.
|
|
|
|
/// Returns true on success, false if there are no interesting tokens to
|
|
|
|
/// fetch from lexer.
|
|
|
|
bool addToken() {
|
2012-07-25 02:23:31 +08:00
|
|
|
if (NoMoreInterestingTokens)
|
2012-07-25 01:52:18 +08:00
|
|
|
return false;
|
|
|
|
|
2012-07-25 02:23:31 +08:00
|
|
|
if (P.Tok.is(tok::newline)) {
|
|
|
|
// If we see a single newline token between text tokens, skip it.
|
|
|
|
Token Newline = P.Tok;
|
|
|
|
P.consumeToken();
|
|
|
|
if (P.Tok.isNot(tok::text)) {
|
|
|
|
P.putBack(Newline);
|
|
|
|
NoMoreInterestingTokens = true;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (P.Tok.isNot(tok::text)) {
|
|
|
|
NoMoreInterestingTokens = true;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2012-07-25 01:52:18 +08:00
|
|
|
Toks.push_back(P.Tok);
|
|
|
|
P.consumeToken();
|
|
|
|
if (Toks.size() == 1)
|
|
|
|
setupBuffer();
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-07-25 01:43:18 +08:00
|
|
|
void consumeWhitespace() {
|
|
|
|
while (!isEnd()) {
|
|
|
|
if (isWhitespace(peek()))
|
|
|
|
consumeChar();
|
|
|
|
else
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void formTokenWithChars(Token &Result,
|
|
|
|
SourceLocation Loc,
|
|
|
|
const char *TokBegin,
|
|
|
|
unsigned TokLength,
|
|
|
|
StringRef Text) {
|
|
|
|
Result.setLocation(Loc);
|
|
|
|
Result.setKind(tok::text);
|
|
|
|
Result.setLength(TokLength);
|
|
|
|
#ifndef NDEBUG
|
2012-09-11 04:32:42 +08:00
|
|
|
Result.TextPtr = "<UNSET>";
|
|
|
|
Result.IntVal = 7;
|
2012-07-25 01:43:18 +08:00
|
|
|
#endif
|
|
|
|
Result.setText(Text);
|
|
|
|
}
|
|
|
|
|
|
|
|
public:
|
2012-07-25 01:52:18 +08:00
|
|
|
TextTokenRetokenizer(llvm::BumpPtrAllocator &Allocator, Parser &P):
|
2012-07-25 02:23:31 +08:00
|
|
|
Allocator(Allocator), P(P), NoMoreInterestingTokens(false) {
|
2012-07-25 01:43:18 +08:00
|
|
|
Pos.CurToken = 0;
|
2012-07-25 01:52:18 +08:00
|
|
|
addToken();
|
2012-07-25 01:43:18 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Extract a word -- sequence of non-whitespace characters.
|
|
|
|
bool lexWord(Token &Tok) {
|
|
|
|
if (isEnd())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
Position SavedPos = Pos;
|
|
|
|
|
|
|
|
consumeWhitespace();
|
|
|
|
SmallString<32> WordText;
|
|
|
|
const char *WordBegin = Pos.BufferPtr;
|
|
|
|
SourceLocation Loc = getSourceLocation();
|
|
|
|
while (!isEnd()) {
|
|
|
|
const char C = peek();
|
|
|
|
if (!isWhitespace(C)) {
|
|
|
|
WordText.push_back(C);
|
|
|
|
consumeChar();
|
|
|
|
} else
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
const unsigned Length = WordText.size();
|
|
|
|
if (Length == 0) {
|
|
|
|
Pos = SavedPos;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
char *TextPtr = Allocator.Allocate<char>(Length + 1);
|
|
|
|
|
|
|
|
memcpy(TextPtr, WordText.c_str(), Length + 1);
|
|
|
|
StringRef Text = StringRef(TextPtr, Length);
|
|
|
|
|
2012-12-20 01:34:55 +08:00
|
|
|
formTokenWithChars(Tok, Loc, WordBegin, Length, Text);
|
2012-07-25 01:43:18 +08:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool lexDelimitedSeq(Token &Tok, char OpenDelim, char CloseDelim) {
|
|
|
|
if (isEnd())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
Position SavedPos = Pos;
|
|
|
|
|
|
|
|
consumeWhitespace();
|
|
|
|
SmallString<32> WordText;
|
|
|
|
const char *WordBegin = Pos.BufferPtr;
|
|
|
|
SourceLocation Loc = getSourceLocation();
|
|
|
|
bool Error = false;
|
|
|
|
if (!isEnd()) {
|
|
|
|
const char C = peek();
|
|
|
|
if (C == OpenDelim) {
|
|
|
|
WordText.push_back(C);
|
|
|
|
consumeChar();
|
|
|
|
} else
|
|
|
|
Error = true;
|
|
|
|
}
|
|
|
|
char C = '\0';
|
|
|
|
while (!Error && !isEnd()) {
|
|
|
|
C = peek();
|
|
|
|
WordText.push_back(C);
|
|
|
|
consumeChar();
|
|
|
|
if (C == CloseDelim)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (!Error && C != CloseDelim)
|
|
|
|
Error = true;
|
|
|
|
|
|
|
|
if (Error) {
|
|
|
|
Pos = SavedPos;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
const unsigned Length = WordText.size();
|
|
|
|
char *TextPtr = Allocator.Allocate<char>(Length + 1);
|
|
|
|
|
|
|
|
memcpy(TextPtr, WordText.c_str(), Length + 1);
|
|
|
|
StringRef Text = StringRef(TextPtr, Length);
|
|
|
|
|
|
|
|
formTokenWithChars(Tok, Loc, WordBegin,
|
|
|
|
Pos.BufferPtr - WordBegin, Text);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-07-25 01:52:18 +08:00
|
|
|
/// Put back tokens that we didn't consume.
|
|
|
|
void putBackLeftoverTokens() {
|
2012-07-25 01:43:18 +08:00
|
|
|
if (isEnd())
|
2012-07-25 01:52:18 +08:00
|
|
|
return;
|
2012-07-25 01:43:18 +08:00
|
|
|
|
2012-07-25 01:52:18 +08:00
|
|
|
bool HavePartialTok = false;
|
|
|
|
Token PartialTok;
|
|
|
|
if (Pos.BufferPtr != Pos.BufferStart) {
|
|
|
|
formTokenWithChars(PartialTok, getSourceLocation(),
|
2012-07-25 01:43:18 +08:00
|
|
|
Pos.BufferPtr, Pos.BufferEnd - Pos.BufferPtr,
|
|
|
|
StringRef(Pos.BufferPtr,
|
|
|
|
Pos.BufferEnd - Pos.BufferPtr));
|
2012-07-25 01:52:18 +08:00
|
|
|
HavePartialTok = true;
|
|
|
|
Pos.CurToken++;
|
|
|
|
}
|
2012-07-25 01:43:18 +08:00
|
|
|
|
2012-07-25 01:52:18 +08:00
|
|
|
P.putBack(llvm::makeArrayRef(Toks.begin() + Pos.CurToken, Toks.end()));
|
|
|
|
Pos.CurToken = Toks.size();
|
|
|
|
|
|
|
|
if (HavePartialTok)
|
|
|
|
P.putBack(PartialTok);
|
2012-07-25 01:43:18 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2012-07-12 05:38:39 +08:00
|
|
|
Parser::Parser(Lexer &L, Sema &S, llvm::BumpPtrAllocator &Allocator,
|
2012-08-09 08:03:17 +08:00
|
|
|
const SourceManager &SourceMgr, DiagnosticsEngine &Diags,
|
|
|
|
const CommandTraits &Traits):
|
|
|
|
L(L), S(S), Allocator(Allocator), SourceMgr(SourceMgr), Diags(Diags),
|
|
|
|
Traits(Traits) {
|
2012-07-06 08:28:32 +08:00
|
|
|
consumeToken();
|
|
|
|
}
|
|
|
|
|
2012-08-07 03:03:12 +08:00
|
|
|
void Parser::parseParamCommandArgs(ParamCommandComment *PC,
|
|
|
|
TextTokenRetokenizer &Retokenizer) {
|
2012-07-06 08:28:32 +08:00
|
|
|
Token Arg;
|
|
|
|
// Check if argument looks like direction specification: [dir]
|
|
|
|
// e.g., [in], [out], [in,out]
|
|
|
|
if (Retokenizer.lexDelimitedSeq(Arg, '[', ']'))
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnParamCommandDirectionArg(PC,
|
|
|
|
Arg.getLocation(),
|
|
|
|
Arg.getEndLocation(),
|
|
|
|
Arg.getText());
|
2012-07-06 08:28:32 +08:00
|
|
|
|
|
|
|
if (Retokenizer.lexWord(Arg))
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnParamCommandParamNameArg(PC,
|
|
|
|
Arg.getLocation(),
|
|
|
|
Arg.getEndLocation(),
|
|
|
|
Arg.getText());
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
|
2012-08-07 03:03:12 +08:00
|
|
|
void Parser::parseTParamCommandArgs(TParamCommandComment *TPC,
|
|
|
|
TextTokenRetokenizer &Retokenizer) {
|
2012-08-01 06:37:06 +08:00
|
|
|
Token Arg;
|
|
|
|
if (Retokenizer.lexWord(Arg))
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnTParamCommandParamNameArg(TPC,
|
|
|
|
Arg.getLocation(),
|
|
|
|
Arg.getEndLocation(),
|
|
|
|
Arg.getText());
|
2012-08-01 06:37:06 +08:00
|
|
|
}
|
|
|
|
|
2012-08-07 03:03:12 +08:00
|
|
|
void Parser::parseBlockCommandArgs(BlockCommandComment *BC,
|
|
|
|
TextTokenRetokenizer &Retokenizer,
|
|
|
|
unsigned NumArgs) {
|
2012-07-06 08:28:32 +08:00
|
|
|
typedef BlockCommandComment::Argument Argument;
|
2012-07-07 00:41:59 +08:00
|
|
|
Argument *Args =
|
|
|
|
new (Allocator.Allocate<Argument>(NumArgs)) Argument[NumArgs];
|
2012-07-06 08:28:32 +08:00
|
|
|
unsigned ParsedArgs = 0;
|
|
|
|
Token Arg;
|
|
|
|
while (ParsedArgs < NumArgs && Retokenizer.lexWord(Arg)) {
|
|
|
|
Args[ParsedArgs] = Argument(SourceRange(Arg.getLocation(),
|
|
|
|
Arg.getEndLocation()),
|
|
|
|
Arg.getText());
|
|
|
|
ParsedArgs++;
|
|
|
|
}
|
|
|
|
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnBlockCommandArgs(BC, llvm::makeArrayRef(Args, ParsedArgs));
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
BlockCommandComment *Parser::parseBlockCommand() {
|
2013-03-02 10:39:57 +08:00
|
|
|
assert(Tok.is(tok::backslash_command) || Tok.is(tok::at_command));
|
2012-07-06 08:28:32 +08:00
|
|
|
|
2014-05-12 13:36:57 +08:00
|
|
|
ParamCommandComment *PC = nullptr;
|
|
|
|
TParamCommandComment *TPC = nullptr;
|
|
|
|
BlockCommandComment *BC = nullptr;
|
2012-09-11 04:32:42 +08:00
|
|
|
const CommandInfo *Info = Traits.getCommandInfo(Tok.getCommandID());
|
2013-03-05 07:06:15 +08:00
|
|
|
CommandMarkerKind CommandMarker =
|
|
|
|
Tok.is(tok::backslash_command) ? CMK_Backslash : CMK_At;
|
2012-09-11 04:32:42 +08:00
|
|
|
if (Info->IsParamCommand) {
|
2012-07-06 08:28:32 +08:00
|
|
|
PC = S.actOnParamCommandStart(Tok.getLocation(),
|
|
|
|
Tok.getEndLocation(),
|
2013-03-02 10:39:57 +08:00
|
|
|
Tok.getCommandID(),
|
2013-03-05 07:06:15 +08:00
|
|
|
CommandMarker);
|
2012-12-20 01:17:09 +08:00
|
|
|
} else if (Info->IsTParamCommand) {
|
2012-08-01 06:37:06 +08:00
|
|
|
TPC = S.actOnTParamCommandStart(Tok.getLocation(),
|
|
|
|
Tok.getEndLocation(),
|
2013-03-02 10:39:57 +08:00
|
|
|
Tok.getCommandID(),
|
2013-03-05 07:06:15 +08:00
|
|
|
CommandMarker);
|
2012-07-06 08:28:32 +08:00
|
|
|
} else {
|
|
|
|
BC = S.actOnBlockCommandStart(Tok.getLocation(),
|
|
|
|
Tok.getEndLocation(),
|
2013-03-02 10:39:57 +08:00
|
|
|
Tok.getCommandID(),
|
2013-03-05 07:06:15 +08:00
|
|
|
CommandMarker);
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
consumeToken();
|
|
|
|
|
2013-01-26 08:36:14 +08:00
|
|
|
if (isTokBlockCommand()) {
|
2012-07-06 08:28:32 +08:00
|
|
|
// Block command ahead. We can't nest block commands, so pretend that this
|
|
|
|
// command has an empty argument.
|
2013-05-05 08:41:58 +08:00
|
|
|
ParagraphComment *Paragraph = S.actOnParagraphComment(None);
|
2013-04-19 04:50:35 +08:00
|
|
|
if (PC) {
|
2012-08-07 07:48:44 +08:00
|
|
|
S.actOnParamCommandFinish(PC, Paragraph);
|
2012-08-07 03:03:12 +08:00
|
|
|
return PC;
|
2013-04-19 04:50:35 +08:00
|
|
|
} else if (TPC) {
|
2012-08-07 07:48:44 +08:00
|
|
|
S.actOnTParamCommandFinish(TPC, Paragraph);
|
|
|
|
return TPC;
|
2012-08-07 03:03:12 +08:00
|
|
|
} else {
|
|
|
|
S.actOnBlockCommandFinish(BC, Paragraph);
|
|
|
|
return BC;
|
|
|
|
}
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
|
2013-04-19 04:50:35 +08:00
|
|
|
if (PC || TPC || Info->NumArgs > 0) {
|
2012-07-06 08:28:32 +08:00
|
|
|
// In order to parse command arguments we need to retokenize a few
|
|
|
|
// following text tokens.
|
2012-07-25 01:52:18 +08:00
|
|
|
TextTokenRetokenizer Retokenizer(Allocator, *this);
|
2012-07-06 08:28:32 +08:00
|
|
|
|
2013-04-19 04:50:35 +08:00
|
|
|
if (PC)
|
2012-08-07 03:03:12 +08:00
|
|
|
parseParamCommandArgs(PC, Retokenizer);
|
2013-04-19 04:50:35 +08:00
|
|
|
else if (TPC)
|
2012-08-07 03:03:12 +08:00
|
|
|
parseTParamCommandArgs(TPC, Retokenizer);
|
2012-07-06 08:28:32 +08:00
|
|
|
else
|
2012-09-11 04:32:42 +08:00
|
|
|
parseBlockCommandArgs(BC, Retokenizer, Info->NumArgs);
|
2012-07-06 08:28:32 +08:00
|
|
|
|
2012-07-25 01:52:18 +08:00
|
|
|
Retokenizer.putBackLeftoverTokens();
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
|
2013-01-26 08:36:14 +08:00
|
|
|
// If there's a block command ahead, we will attach an empty paragraph to
|
|
|
|
// this command.
|
|
|
|
bool EmptyParagraph = false;
|
|
|
|
if (isTokBlockCommand())
|
|
|
|
EmptyParagraph = true;
|
|
|
|
else if (Tok.is(tok::newline)) {
|
|
|
|
Token PrevTok = Tok;
|
|
|
|
consumeToken();
|
|
|
|
EmptyParagraph = isTokBlockCommand();
|
|
|
|
putBack(PrevTok);
|
|
|
|
}
|
|
|
|
|
|
|
|
ParagraphComment *Paragraph;
|
|
|
|
if (EmptyParagraph)
|
2013-05-05 08:41:58 +08:00
|
|
|
Paragraph = S.actOnParagraphComment(None);
|
2013-01-26 08:36:14 +08:00
|
|
|
else {
|
|
|
|
BlockContentComment *Block = parseParagraphOrBlockCommand();
|
|
|
|
// Since we have checked for a block command, we should have parsed a
|
|
|
|
// paragraph.
|
|
|
|
Paragraph = cast<ParagraphComment>(Block);
|
|
|
|
}
|
|
|
|
|
2013-04-19 04:50:35 +08:00
|
|
|
if (PC) {
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnParamCommandFinish(PC, Paragraph);
|
|
|
|
return PC;
|
2013-04-19 04:50:35 +08:00
|
|
|
} else if (TPC) {
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnTParamCommandFinish(TPC, Paragraph);
|
|
|
|
return TPC;
|
|
|
|
} else {
|
|
|
|
S.actOnBlockCommandFinish(BC, Paragraph);
|
|
|
|
return BC;
|
|
|
|
}
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
InlineCommandComment *Parser::parseInlineCommand() {
|
2013-03-02 10:39:57 +08:00
|
|
|
assert(Tok.is(tok::backslash_command) || Tok.is(tok::at_command));
|
2012-07-06 08:28:32 +08:00
|
|
|
|
|
|
|
const Token CommandTok = Tok;
|
|
|
|
consumeToken();
|
|
|
|
|
2012-07-25 01:52:18 +08:00
|
|
|
TextTokenRetokenizer Retokenizer(Allocator, *this);
|
2012-07-06 08:28:32 +08:00
|
|
|
|
|
|
|
Token ArgTok;
|
|
|
|
bool ArgTokValid = Retokenizer.lexWord(ArgTok);
|
|
|
|
|
|
|
|
InlineCommandComment *IC;
|
|
|
|
if (ArgTokValid) {
|
|
|
|
IC = S.actOnInlineCommand(CommandTok.getLocation(),
|
|
|
|
CommandTok.getEndLocation(),
|
2012-09-11 04:32:42 +08:00
|
|
|
CommandTok.getCommandID(),
|
2012-07-06 08:28:32 +08:00
|
|
|
ArgTok.getLocation(),
|
|
|
|
ArgTok.getEndLocation(),
|
|
|
|
ArgTok.getText());
|
|
|
|
} else {
|
|
|
|
IC = S.actOnInlineCommand(CommandTok.getLocation(),
|
|
|
|
CommandTok.getEndLocation(),
|
2012-09-11 04:32:42 +08:00
|
|
|
CommandTok.getCommandID());
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
|
2012-07-25 01:52:18 +08:00
|
|
|
Retokenizer.putBackLeftoverTokens();
|
2012-07-06 08:28:32 +08:00
|
|
|
|
|
|
|
return IC;
|
|
|
|
}
|
|
|
|
|
2012-07-13 08:44:24 +08:00
|
|
|
HTMLStartTagComment *Parser::parseHTMLStartTag() {
|
|
|
|
assert(Tok.is(tok::html_start_tag));
|
|
|
|
HTMLStartTagComment *HST =
|
|
|
|
S.actOnHTMLStartTagStart(Tok.getLocation(),
|
|
|
|
Tok.getHTMLTagStartName());
|
2012-07-06 08:28:32 +08:00
|
|
|
consumeToken();
|
|
|
|
|
2012-07-13 08:44:24 +08:00
|
|
|
SmallVector<HTMLStartTagComment::Attribute, 2> Attrs;
|
2012-07-06 08:28:32 +08:00
|
|
|
while (true) {
|
2012-07-12 05:38:39 +08:00
|
|
|
switch (Tok.getKind()) {
|
|
|
|
case tok::html_ident: {
|
2012-07-06 08:28:32 +08:00
|
|
|
Token Ident = Tok;
|
|
|
|
consumeToken();
|
|
|
|
if (Tok.isNot(tok::html_equals)) {
|
2012-07-13 08:44:24 +08:00
|
|
|
Attrs.push_back(HTMLStartTagComment::Attribute(Ident.getLocation(),
|
|
|
|
Ident.getHTMLIdent()));
|
2012-07-06 08:28:32 +08:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
Token Equals = Tok;
|
|
|
|
consumeToken();
|
|
|
|
if (Tok.isNot(tok::html_quoted_string)) {
|
2012-07-12 05:38:39 +08:00
|
|
|
Diag(Tok.getLocation(),
|
2012-07-13 08:44:24 +08:00
|
|
|
diag::warn_doc_html_start_tag_expected_quoted_string)
|
2012-07-12 05:38:39 +08:00
|
|
|
<< SourceRange(Equals.getLocation());
|
2012-07-13 08:44:24 +08:00
|
|
|
Attrs.push_back(HTMLStartTagComment::Attribute(Ident.getLocation(),
|
|
|
|
Ident.getHTMLIdent()));
|
2012-07-12 05:38:39 +08:00
|
|
|
while (Tok.is(tok::html_equals) ||
|
|
|
|
Tok.is(tok::html_quoted_string))
|
|
|
|
consumeToken();
|
2012-07-06 08:28:32 +08:00
|
|
|
continue;
|
|
|
|
}
|
2012-07-13 08:44:24 +08:00
|
|
|
Attrs.push_back(HTMLStartTagComment::Attribute(
|
2012-07-06 08:28:32 +08:00
|
|
|
Ident.getLocation(),
|
|
|
|
Ident.getHTMLIdent(),
|
|
|
|
Equals.getLocation(),
|
|
|
|
SourceRange(Tok.getLocation(),
|
|
|
|
Tok.getEndLocation()),
|
|
|
|
Tok.getHTMLQuotedString()));
|
|
|
|
consumeToken();
|
|
|
|
continue;
|
2012-07-12 05:38:39 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
case tok::html_greater:
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnHTMLStartTagFinish(HST,
|
|
|
|
S.copyArray(llvm::makeArrayRef(Attrs)),
|
|
|
|
Tok.getLocation(),
|
|
|
|
/* IsSelfClosing = */ false);
|
2012-07-12 05:38:39 +08:00
|
|
|
consumeToken();
|
2012-07-13 08:44:24 +08:00
|
|
|
return HST;
|
2012-07-12 05:38:39 +08:00
|
|
|
|
|
|
|
case tok::html_slash_greater:
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnHTMLStartTagFinish(HST,
|
|
|
|
S.copyArray(llvm::makeArrayRef(Attrs)),
|
|
|
|
Tok.getLocation(),
|
|
|
|
/* IsSelfClosing = */ true);
|
2012-07-06 08:28:32 +08:00
|
|
|
consumeToken();
|
2012-07-13 08:44:24 +08:00
|
|
|
return HST;
|
2012-07-12 05:38:39 +08:00
|
|
|
|
|
|
|
case tok::html_equals:
|
|
|
|
case tok::html_quoted_string:
|
|
|
|
Diag(Tok.getLocation(),
|
2012-07-13 08:44:24 +08:00
|
|
|
diag::warn_doc_html_start_tag_expected_ident_or_greater);
|
2012-07-06 08:28:32 +08:00
|
|
|
while (Tok.is(tok::html_equals) ||
|
|
|
|
Tok.is(tok::html_quoted_string))
|
|
|
|
consumeToken();
|
2012-07-12 05:38:39 +08:00
|
|
|
if (Tok.is(tok::html_ident) ||
|
|
|
|
Tok.is(tok::html_greater) ||
|
|
|
|
Tok.is(tok::html_slash_greater))
|
|
|
|
continue;
|
|
|
|
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnHTMLStartTagFinish(HST,
|
|
|
|
S.copyArray(llvm::makeArrayRef(Attrs)),
|
|
|
|
SourceLocation(),
|
|
|
|
/* IsSelfClosing = */ false);
|
|
|
|
return HST;
|
2012-07-13 08:44:24 +08:00
|
|
|
|
|
|
|
default:
|
|
|
|
// Not a token from an HTML start tag. Thus HTML tag prematurely ended.
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnHTMLStartTagFinish(HST,
|
|
|
|
S.copyArray(llvm::makeArrayRef(Attrs)),
|
|
|
|
SourceLocation(),
|
|
|
|
/* IsSelfClosing = */ false);
|
2012-07-12 05:38:39 +08:00
|
|
|
bool StartLineInvalid;
|
|
|
|
const unsigned StartLine = SourceMgr.getPresumedLineNumber(
|
2012-07-13 08:44:24 +08:00
|
|
|
HST->getLocation(),
|
2012-07-12 05:38:39 +08:00
|
|
|
&StartLineInvalid);
|
|
|
|
bool EndLineInvalid;
|
|
|
|
const unsigned EndLine = SourceMgr.getPresumedLineNumber(
|
|
|
|
Tok.getLocation(),
|
|
|
|
&EndLineInvalid);
|
|
|
|
if (StartLineInvalid || EndLineInvalid || StartLine == EndLine)
|
|
|
|
Diag(Tok.getLocation(),
|
2012-07-13 08:44:24 +08:00
|
|
|
diag::warn_doc_html_start_tag_expected_ident_or_greater)
|
|
|
|
<< HST->getSourceRange();
|
2012-07-12 05:38:39 +08:00
|
|
|
else {
|
|
|
|
Diag(Tok.getLocation(),
|
2012-07-13 08:44:24 +08:00
|
|
|
diag::warn_doc_html_start_tag_expected_ident_or_greater);
|
|
|
|
Diag(HST->getLocation(), diag::note_doc_html_tag_started_here)
|
|
|
|
<< HST->getSourceRange();
|
2012-07-12 05:38:39 +08:00
|
|
|
}
|
2012-07-13 08:44:24 +08:00
|
|
|
return HST;
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-07-13 08:44:24 +08:00
|
|
|
HTMLEndTagComment *Parser::parseHTMLEndTag() {
|
|
|
|
assert(Tok.is(tok::html_end_tag));
|
|
|
|
Token TokEndTag = Tok;
|
2012-07-06 08:28:32 +08:00
|
|
|
consumeToken();
|
|
|
|
SourceLocation Loc;
|
|
|
|
if (Tok.is(tok::html_greater)) {
|
|
|
|
Loc = Tok.getLocation();
|
|
|
|
consumeToken();
|
|
|
|
}
|
|
|
|
|
2012-07-13 08:44:24 +08:00
|
|
|
return S.actOnHTMLEndTag(TokEndTag.getLocation(),
|
|
|
|
Loc,
|
|
|
|
TokEndTag.getHTMLTagEndName());
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
BlockContentComment *Parser::parseParagraphOrBlockCommand() {
|
|
|
|
SmallVector<InlineContentComment *, 8> Content;
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
switch (Tok.getKind()) {
|
|
|
|
case tok::verbatim_block_begin:
|
|
|
|
case tok::verbatim_line_name:
|
|
|
|
case tok::eof:
|
|
|
|
assert(Content.size() != 0);
|
|
|
|
break; // Block content or EOF ahead, finish this parapgaph.
|
|
|
|
|
2012-09-11 04:32:42 +08:00
|
|
|
case tok::unknown_command:
|
|
|
|
Content.push_back(S.actOnUnknownCommand(Tok.getLocation(),
|
|
|
|
Tok.getEndLocation(),
|
|
|
|
Tok.getUnknownCommandName()));
|
|
|
|
consumeToken();
|
|
|
|
continue;
|
|
|
|
|
2013-03-02 10:39:57 +08:00
|
|
|
case tok::backslash_command:
|
|
|
|
case tok::at_command: {
|
2012-09-11 04:32:42 +08:00
|
|
|
const CommandInfo *Info = Traits.getCommandInfo(Tok.getCommandID());
|
|
|
|
if (Info->IsBlockCommand) {
|
2012-07-06 08:28:32 +08:00
|
|
|
if (Content.size() == 0)
|
|
|
|
return parseBlockCommand();
|
|
|
|
break; // Block command ahead, finish this parapgaph.
|
|
|
|
}
|
2012-11-18 08:30:31 +08:00
|
|
|
if (Info->IsVerbatimBlockEndCommand) {
|
|
|
|
Diag(Tok.getLocation(),
|
|
|
|
diag::warn_verbatim_block_end_without_start)
|
2013-03-02 10:39:57 +08:00
|
|
|
<< Tok.is(tok::at_command)
|
2012-11-18 08:30:31 +08:00
|
|
|
<< Info->Name
|
|
|
|
<< SourceRange(Tok.getLocation(), Tok.getEndLocation());
|
|
|
|
consumeToken();
|
|
|
|
continue;
|
|
|
|
}
|
2012-09-12 03:22:03 +08:00
|
|
|
if (Info->IsUnknownCommand) {
|
|
|
|
Content.push_back(S.actOnUnknownCommand(Tok.getLocation(),
|
|
|
|
Tok.getEndLocation(),
|
|
|
|
Info->getID()));
|
|
|
|
consumeToken();
|
|
|
|
continue;
|
|
|
|
}
|
2012-09-11 04:32:42 +08:00
|
|
|
assert(Info->IsInlineCommand);
|
|
|
|
Content.push_back(parseInlineCommand());
|
2012-07-06 08:28:32 +08:00
|
|
|
continue;
|
2012-09-11 04:32:42 +08:00
|
|
|
}
|
2012-07-06 08:28:32 +08:00
|
|
|
|
|
|
|
case tok::newline: {
|
|
|
|
consumeToken();
|
|
|
|
if (Tok.is(tok::newline) || Tok.is(tok::eof)) {
|
|
|
|
consumeToken();
|
|
|
|
break; // Two newlines -- end of paragraph.
|
|
|
|
}
|
2013-08-24 02:03:40 +08:00
|
|
|
// Also allow [tok::newline, tok::text, tok::newline] if the middle
|
|
|
|
// tok::text is just whitespace.
|
|
|
|
if (Tok.is(tok::text) && isWhitespace(Tok.getText())) {
|
|
|
|
Token WhitespaceTok = Tok;
|
|
|
|
consumeToken();
|
|
|
|
if (Tok.is(tok::newline) || Tok.is(tok::eof)) {
|
|
|
|
consumeToken();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
// We have [tok::newline, tok::text, non-newline]. Put back tok::text.
|
|
|
|
putBack(WhitespaceTok);
|
|
|
|
}
|
2012-07-06 08:28:32 +08:00
|
|
|
if (Content.size() > 0)
|
|
|
|
Content.back()->addTrailingNewline();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Don't deal with HTML tag soup now.
|
2012-07-13 08:44:24 +08:00
|
|
|
case tok::html_start_tag:
|
|
|
|
Content.push_back(parseHTMLStartTag());
|
2012-07-06 08:28:32 +08:00
|
|
|
continue;
|
|
|
|
|
2012-07-13 08:44:24 +08:00
|
|
|
case tok::html_end_tag:
|
|
|
|
Content.push_back(parseHTMLEndTag());
|
2012-07-06 08:28:32 +08:00
|
|
|
continue;
|
|
|
|
|
|
|
|
case tok::text:
|
|
|
|
Content.push_back(S.actOnText(Tok.getLocation(),
|
|
|
|
Tok.getEndLocation(),
|
|
|
|
Tok.getText()));
|
|
|
|
consumeToken();
|
|
|
|
continue;
|
|
|
|
|
|
|
|
case tok::verbatim_block_line:
|
|
|
|
case tok::verbatim_block_end:
|
|
|
|
case tok::verbatim_line_text:
|
|
|
|
case tok::html_ident:
|
|
|
|
case tok::html_equals:
|
|
|
|
case tok::html_quoted_string:
|
|
|
|
case tok::html_greater:
|
2012-07-12 05:38:39 +08:00
|
|
|
case tok::html_slash_greater:
|
2012-07-06 08:28:32 +08:00
|
|
|
llvm_unreachable("should not see this token");
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2012-08-01 06:37:06 +08:00
|
|
|
return S.actOnParagraphComment(S.copyArray(llvm::makeArrayRef(Content)));
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
VerbatimBlockComment *Parser::parseVerbatimBlock() {
|
|
|
|
assert(Tok.is(tok::verbatim_block_begin));
|
|
|
|
|
|
|
|
VerbatimBlockComment *VB =
|
|
|
|
S.actOnVerbatimBlockStart(Tok.getLocation(),
|
2012-09-11 04:32:42 +08:00
|
|
|
Tok.getVerbatimBlockID());
|
2012-07-06 08:28:32 +08:00
|
|
|
consumeToken();
|
|
|
|
|
|
|
|
// Don't create an empty line if verbatim opening command is followed
|
|
|
|
// by a newline.
|
|
|
|
if (Tok.is(tok::newline))
|
|
|
|
consumeToken();
|
|
|
|
|
|
|
|
SmallVector<VerbatimBlockLineComment *, 8> Lines;
|
|
|
|
while (Tok.is(tok::verbatim_block_line) ||
|
|
|
|
Tok.is(tok::newline)) {
|
|
|
|
VerbatimBlockLineComment *Line;
|
|
|
|
if (Tok.is(tok::verbatim_block_line)) {
|
|
|
|
Line = S.actOnVerbatimBlockLine(Tok.getLocation(),
|
|
|
|
Tok.getVerbatimBlockText());
|
|
|
|
consumeToken();
|
|
|
|
if (Tok.is(tok::newline)) {
|
|
|
|
consumeToken();
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Empty line, just a tok::newline.
|
2012-07-19 05:27:38 +08:00
|
|
|
Line = S.actOnVerbatimBlockLine(Tok.getLocation(), "");
|
2012-07-06 08:28:32 +08:00
|
|
|
consumeToken();
|
|
|
|
}
|
|
|
|
Lines.push_back(Line);
|
|
|
|
}
|
|
|
|
|
2012-07-21 04:18:53 +08:00
|
|
|
if (Tok.is(tok::verbatim_block_end)) {
|
2012-09-11 04:32:42 +08:00
|
|
|
const CommandInfo *Info = Traits.getCommandInfo(Tok.getVerbatimBlockID());
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnVerbatimBlockFinish(VB, Tok.getLocation(),
|
2012-09-11 04:32:42 +08:00
|
|
|
Info->Name,
|
2012-08-07 03:03:12 +08:00
|
|
|
S.copyArray(llvm::makeArrayRef(Lines)));
|
2012-07-21 04:18:53 +08:00
|
|
|
consumeToken();
|
|
|
|
} else {
|
|
|
|
// Unterminated \\verbatim block
|
2012-08-07 03:03:12 +08:00
|
|
|
S.actOnVerbatimBlockFinish(VB, SourceLocation(), "",
|
|
|
|
S.copyArray(llvm::makeArrayRef(Lines)));
|
2012-07-21 04:18:53 +08:00
|
|
|
}
|
2012-07-06 08:28:32 +08:00
|
|
|
|
|
|
|
return VB;
|
|
|
|
}
|
|
|
|
|
|
|
|
VerbatimLineComment *Parser::parseVerbatimLine() {
|
|
|
|
assert(Tok.is(tok::verbatim_line_name));
|
|
|
|
|
|
|
|
Token NameTok = Tok;
|
|
|
|
consumeToken();
|
|
|
|
|
|
|
|
SourceLocation TextBegin;
|
|
|
|
StringRef Text;
|
|
|
|
// Next token might not be a tok::verbatim_line_text if verbatim line
|
|
|
|
// starting command comes just before a newline or comment end.
|
|
|
|
if (Tok.is(tok::verbatim_line_text)) {
|
|
|
|
TextBegin = Tok.getLocation();
|
|
|
|
Text = Tok.getVerbatimLineText();
|
|
|
|
} else {
|
|
|
|
TextBegin = NameTok.getEndLocation();
|
|
|
|
Text = "";
|
|
|
|
}
|
|
|
|
|
|
|
|
VerbatimLineComment *VL = S.actOnVerbatimLine(NameTok.getLocation(),
|
2012-09-11 04:32:42 +08:00
|
|
|
NameTok.getVerbatimLineID(),
|
2012-07-06 08:28:32 +08:00
|
|
|
TextBegin,
|
|
|
|
Text);
|
|
|
|
consumeToken();
|
|
|
|
return VL;
|
|
|
|
}
|
|
|
|
|
|
|
|
BlockContentComment *Parser::parseBlockContent() {
|
|
|
|
switch (Tok.getKind()) {
|
|
|
|
case tok::text:
|
2012-09-11 04:32:42 +08:00
|
|
|
case tok::unknown_command:
|
2013-03-02 10:39:57 +08:00
|
|
|
case tok::backslash_command:
|
|
|
|
case tok::at_command:
|
2012-07-13 08:44:24 +08:00
|
|
|
case tok::html_start_tag:
|
|
|
|
case tok::html_end_tag:
|
2012-07-06 08:28:32 +08:00
|
|
|
return parseParagraphOrBlockCommand();
|
|
|
|
|
|
|
|
case tok::verbatim_block_begin:
|
|
|
|
return parseVerbatimBlock();
|
|
|
|
|
|
|
|
case tok::verbatim_line_name:
|
|
|
|
return parseVerbatimLine();
|
|
|
|
|
|
|
|
case tok::eof:
|
|
|
|
case tok::newline:
|
|
|
|
case tok::verbatim_block_line:
|
|
|
|
case tok::verbatim_block_end:
|
|
|
|
case tok::verbatim_line_text:
|
|
|
|
case tok::html_ident:
|
|
|
|
case tok::html_equals:
|
|
|
|
case tok::html_quoted_string:
|
|
|
|
case tok::html_greater:
|
2012-07-12 05:38:39 +08:00
|
|
|
case tok::html_slash_greater:
|
2012-07-06 08:28:32 +08:00
|
|
|
llvm_unreachable("should not see this token");
|
|
|
|
}
|
2012-07-07 05:13:09 +08:00
|
|
|
llvm_unreachable("bogus token kind");
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
FullComment *Parser::parseFullComment() {
|
|
|
|
// Skip newlines at the beginning of the comment.
|
|
|
|
while (Tok.is(tok::newline))
|
|
|
|
consumeToken();
|
|
|
|
|
|
|
|
SmallVector<BlockContentComment *, 8> Blocks;
|
|
|
|
while (Tok.isNot(tok::eof)) {
|
|
|
|
Blocks.push_back(parseBlockContent());
|
|
|
|
|
|
|
|
// Skip extra newlines after paragraph end.
|
|
|
|
while (Tok.is(tok::newline))
|
|
|
|
consumeToken();
|
|
|
|
}
|
2012-08-01 06:37:06 +08:00
|
|
|
return S.actOnFullComment(S.copyArray(llvm::makeArrayRef(Blocks)));
|
2012-07-06 08:28:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
} // end namespace comments
|
|
|
|
} // end namespace clang
|