2018-03-22 08:53:26 +08:00
|
|
|
//===- VerifyDiagnosticConsumer.cpp - Verifying Diagnostic Client ---------===//
|
2009-11-14 11:23:19 +08:00
|
|
|
//
|
2019-01-19 16:50:56 +08:00
|
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
2009-11-14 11:23:19 +08:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This is a concrete diagnostic client, which buffers the diagnostic messages.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2011-09-26 08:38:03 +08:00
|
|
|
#include "clang/Frontend/VerifyDiagnosticConsumer.h"
|
2013-02-09 06:30:41 +08:00
|
|
|
#include "clang/Basic/CharInfo.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include "clang/Basic/Diagnostic.h"
|
|
|
|
#include "clang/Basic/DiagnosticOptions.h"
|
2012-12-04 17:13:33 +08:00
|
|
|
#include "clang/Basic/FileManager.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include "clang/Basic/LLVM.h"
|
|
|
|
#include "clang/Basic/SourceLocation.h"
|
|
|
|
#include "clang/Basic/SourceManager.h"
|
|
|
|
#include "clang/Basic/TokenKinds.h"
|
2009-11-14 11:23:19 +08:00
|
|
|
#include "clang/Frontend/FrontendDiagnostic.h"
|
|
|
|
#include "clang/Frontend/TextDiagnosticBuffer.h"
|
2012-08-10 09:06:16 +08:00
|
|
|
#include "clang/Lex/HeaderSearch.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include "clang/Lex/Lexer.h"
|
|
|
|
#include "clang/Lex/PPCallbacks.h"
|
2009-11-14 11:23:19 +08:00
|
|
|
#include "clang/Lex/Preprocessor.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include "clang/Lex/Token.h"
|
|
|
|
#include "llvm/ADT/STLExtras.h"
|
|
|
|
#include "llvm/ADT/SmallPtrSet.h"
|
2009-11-14 11:23:19 +08:00
|
|
|
#include "llvm/ADT/SmallString.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include "llvm/ADT/StringRef.h"
|
|
|
|
#include "llvm/ADT/Twine.h"
|
|
|
|
#include "llvm/Support/ErrorHandling.h"
|
2010-04-29 04:02:30 +08:00
|
|
|
#include "llvm/Support/Regex.h"
|
2009-11-14 11:23:19 +08:00
|
|
|
#include "llvm/Support/raw_ostream.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include <algorithm>
|
|
|
|
#include <cassert>
|
|
|
|
#include <cstddef>
|
|
|
|
#include <cstring>
|
|
|
|
#include <iterator>
|
|
|
|
#include <memory>
|
|
|
|
#include <string>
|
|
|
|
#include <utility>
|
|
|
|
#include <vector>
|
2011-12-15 10:58:00 +08:00
|
|
|
|
2009-11-14 11:23:19 +08:00
|
|
|
using namespace clang;
|
2018-03-22 08:53:26 +08:00
|
|
|
|
|
|
|
using Directive = VerifyDiagnosticConsumer::Directive;
|
|
|
|
using DirectiveList = VerifyDiagnosticConsumer::DirectiveList;
|
|
|
|
using ExpectedData = VerifyDiagnosticConsumer::ExpectedData;
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2014-10-16 14:00:55 +08:00
|
|
|
VerifyDiagnosticConsumer::VerifyDiagnosticConsumer(DiagnosticsEngine &Diags_)
|
2018-03-22 08:53:26 +08:00
|
|
|
: Diags(Diags_), PrimaryClient(Diags.getClient()),
|
|
|
|
PrimaryClientOwner(Diags.takeClient()),
|
|
|
|
Buffer(new TextDiagnosticBuffer()), Status(HasNoDirectives) {
|
2012-08-19 00:58:52 +08:00
|
|
|
if (Diags.hasSourceManager())
|
|
|
|
setSourceManager(Diags.getSourceManager());
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
|
|
|
|
2011-09-26 08:38:03 +08:00
|
|
|
VerifyDiagnosticConsumer::~VerifyDiagnosticConsumer() {
|
2012-08-10 09:06:16 +08:00
|
|
|
assert(!ActiveSourceFiles && "Incomplete parsing of source files!");
|
|
|
|
assert(!CurrentPreprocessor && "CurrentPreprocessor should be invalid!");
|
2014-05-22 12:46:25 +08:00
|
|
|
SrcManager = nullptr;
|
2014-11-18 07:46:02 +08:00
|
|
|
CheckDiagnostics();
|
2016-11-04 02:03:14 +08:00
|
|
|
assert(!Diags.ownsClient() &&
|
|
|
|
"The VerifyDiagnosticConsumer takes over ownership of the client!");
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
#ifndef NDEBUG
|
2018-03-22 08:53:26 +08:00
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
namespace {
|
2018-03-22 08:53:26 +08:00
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
class VerifyFileTracker : public PPCallbacks {
|
2012-08-19 00:58:52 +08:00
|
|
|
VerifyDiagnosticConsumer &Verify;
|
2012-08-10 09:06:16 +08:00
|
|
|
SourceManager &SM;
|
|
|
|
|
|
|
|
public:
|
2012-08-19 00:58:52 +08:00
|
|
|
VerifyFileTracker(VerifyDiagnosticConsumer &Verify, SourceManager &SM)
|
2018-03-22 08:53:26 +08:00
|
|
|
: Verify(Verify), SM(SM) {}
|
2012-08-10 09:06:16 +08:00
|
|
|
|
2018-05-09 09:00:01 +08:00
|
|
|
/// Hook into the preprocessor and update the list of parsed
|
2012-08-10 09:06:16 +08:00
|
|
|
/// files when the preprocessor indicates a new file is entered.
|
2015-04-11 10:00:23 +08:00
|
|
|
void FileChanged(SourceLocation Loc, FileChangeReason Reason,
|
|
|
|
SrcMgr::CharacteristicKind FileType,
|
|
|
|
FileID PrevFID) override {
|
2012-08-19 00:58:52 +08:00
|
|
|
Verify.UpdateParsedFileStatus(SM, SM.getFileID(Loc),
|
|
|
|
VerifyDiagnosticConsumer::IsParsed);
|
2012-08-10 09:06:16 +08:00
|
|
|
}
|
|
|
|
};
|
2018-03-22 08:53:26 +08:00
|
|
|
|
|
|
|
} // namespace
|
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
#endif
|
|
|
|
|
2011-09-26 07:39:51 +08:00
|
|
|
// DiagnosticConsumer interface.
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2011-09-26 08:38:03 +08:00
|
|
|
void VerifyDiagnosticConsumer::BeginSourceFile(const LangOptions &LangOpts,
|
2012-01-21 00:28:04 +08:00
|
|
|
const Preprocessor *PP) {
|
2012-08-10 09:06:16 +08:00
|
|
|
// Attach comment handler on first invocation.
|
|
|
|
if (++ActiveSourceFiles == 1) {
|
|
|
|
if (PP) {
|
|
|
|
CurrentPreprocessor = PP;
|
2012-08-19 00:58:52 +08:00
|
|
|
this->LangOpts = &LangOpts;
|
|
|
|
setSourceManager(PP->getSourceManager());
|
2018-03-22 08:53:26 +08:00
|
|
|
const_cast<Preprocessor *>(PP)->addCommentHandler(this);
|
2012-08-10 09:06:16 +08:00
|
|
|
#ifndef NDEBUG
|
2012-08-19 00:58:52 +08:00
|
|
|
// Debug build tracks parsed files.
|
2018-03-22 08:53:26 +08:00
|
|
|
const_cast<Preprocessor *>(PP)->addPPCallbacks(
|
2014-09-10 12:53:53 +08:00
|
|
|
llvm::make_unique<VerifyFileTracker>(*this, *SrcManager));
|
2012-08-10 09:06:16 +08:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
assert((!PP || CurrentPreprocessor == PP) && "Preprocessor changed!");
|
2009-11-14 11:23:19 +08:00
|
|
|
PrimaryClient->BeginSourceFile(LangOpts, PP);
|
|
|
|
}
|
|
|
|
|
2011-09-26 08:38:03 +08:00
|
|
|
void VerifyDiagnosticConsumer::EndSourceFile() {
|
2012-08-10 09:06:16 +08:00
|
|
|
assert(ActiveSourceFiles && "No active source files!");
|
2009-11-14 11:23:19 +08:00
|
|
|
PrimaryClient->EndSourceFile();
|
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
// Detach comment handler once last active source file completed.
|
|
|
|
if (--ActiveSourceFiles == 0) {
|
|
|
|
if (CurrentPreprocessor)
|
2018-03-22 08:53:26 +08:00
|
|
|
const_cast<Preprocessor *>(CurrentPreprocessor)->
|
|
|
|
removeCommentHandler(this);
|
2012-08-10 09:06:16 +08:00
|
|
|
|
|
|
|
// Check diagnostics once last file completed.
|
|
|
|
CheckDiagnostics();
|
2014-05-22 12:46:25 +08:00
|
|
|
CurrentPreprocessor = nullptr;
|
|
|
|
LangOpts = nullptr;
|
2012-08-10 09:06:16 +08:00
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
2009-11-14 15:53:24 +08:00
|
|
|
|
2011-09-26 08:38:03 +08:00
|
|
|
void VerifyDiagnosticConsumer::HandleDiagnostic(
|
2011-09-26 09:18:08 +08:00
|
|
|
DiagnosticsEngine::Level DiagLevel, const Diagnostic &Info) {
|
2013-05-04 06:58:43 +08:00
|
|
|
if (Info.hasSourceManager()) {
|
|
|
|
// If this diagnostic is for a different source manager, ignore it.
|
|
|
|
if (SrcManager && &Info.getSourceManager() != SrcManager)
|
|
|
|
return;
|
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
setSourceManager(Info.getSourceManager());
|
2013-05-04 06:58:43 +08:00
|
|
|
}
|
2012-08-19 00:58:52 +08:00
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
#ifndef NDEBUG
|
2012-08-19 00:58:52 +08:00
|
|
|
// Debug build tracks unparsed files for possible
|
|
|
|
// unparsed expected-* directives.
|
|
|
|
if (SrcManager) {
|
|
|
|
SourceLocation Loc = Info.getLocation();
|
|
|
|
if (Loc.isValid()) {
|
|
|
|
ParsedStatus PS = IsUnparsed;
|
|
|
|
|
|
|
|
Loc = SrcManager->getExpansionLoc(Loc);
|
|
|
|
FileID FID = SrcManager->getFileID(Loc);
|
|
|
|
|
|
|
|
const FileEntry *FE = SrcManager->getFileEntryForID(FID);
|
|
|
|
if (FE && CurrentPreprocessor && SrcManager->isLoadedFileID(FID)) {
|
|
|
|
// If the file is a modules header file it shall not be parsed
|
|
|
|
// for expected-* directives.
|
|
|
|
HeaderSearch &HS = CurrentPreprocessor->getHeaderSearchInfo();
|
|
|
|
if (HS.findModuleForHeader(FE))
|
|
|
|
PS = IsUnparsedNoDirectives;
|
|
|
|
}
|
|
|
|
|
|
|
|
UpdateParsedFileStatus(*SrcManager, FID, PS);
|
|
|
|
}
|
2011-07-26 03:18:12 +08:00
|
|
|
}
|
2012-08-10 09:06:16 +08:00
|
|
|
#endif
|
2012-08-19 00:58:52 +08:00
|
|
|
|
2009-11-14 11:23:19 +08:00
|
|
|
// Send the diagnostic to the buffer, we will check it once we reach the end
|
|
|
|
// of the source file (or are destructed).
|
|
|
|
Buffer->HandleDiagnostic(DiagLevel, Info);
|
|
|
|
}
|
|
|
|
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// Checking diagnostics implementation.
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2018-03-22 08:53:26 +08:00
|
|
|
using DiagList = TextDiagnosticBuffer::DiagList;
|
|
|
|
using const_diag_iterator = TextDiagnosticBuffer::const_iterator;
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
/// StandardDirective - Directive with string matching.
|
|
|
|
class StandardDirective : public Directive {
|
|
|
|
public:
|
2012-07-10 10:57:03 +08:00
|
|
|
StandardDirective(SourceLocation DirectiveLoc, SourceLocation DiagnosticLoc,
|
2014-07-11 00:43:29 +08:00
|
|
|
bool MatchAnyLine, StringRef Text, unsigned Min,
|
|
|
|
unsigned Max)
|
2018-03-22 08:53:26 +08:00
|
|
|
: Directive(DirectiveLoc, DiagnosticLoc, MatchAnyLine, Text, Min, Max) {}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2014-03-13 14:07:04 +08:00
|
|
|
bool isValid(std::string &Error) override {
|
2010-04-29 04:02:30 +08:00
|
|
|
// all strings are considered valid; even empty ones
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2014-03-13 14:07:04 +08:00
|
|
|
bool match(StringRef S) override {
|
2012-07-10 10:56:15 +08:00
|
|
|
return S.find(Text) != StringRef::npos;
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
/// RegexDirective - Directive with regular-expression matching.
|
|
|
|
class RegexDirective : public Directive {
|
|
|
|
public:
|
2012-07-10 10:57:03 +08:00
|
|
|
RegexDirective(SourceLocation DirectiveLoc, SourceLocation DiagnosticLoc,
|
2014-07-11 00:43:29 +08:00
|
|
|
bool MatchAnyLine, StringRef Text, unsigned Min, unsigned Max,
|
|
|
|
StringRef RegexStr)
|
2018-03-22 08:53:26 +08:00
|
|
|
: Directive(DirectiveLoc, DiagnosticLoc, MatchAnyLine, Text, Min, Max),
|
|
|
|
Regex(RegexStr) {}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2014-03-13 14:07:04 +08:00
|
|
|
bool isValid(std::string &Error) override {
|
2015-12-28 23:15:16 +08:00
|
|
|
return Regex.isValid(Error);
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
|
|
|
|
2014-03-13 14:07:04 +08:00
|
|
|
bool match(StringRef S) override {
|
2010-04-29 04:02:30 +08:00
|
|
|
return Regex.match(S);
|
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
private:
|
|
|
|
llvm::Regex Regex;
|
|
|
|
};
|
|
|
|
|
|
|
|
class ParseHelper
|
|
|
|
{
|
|
|
|
public:
|
2012-07-12 03:58:23 +08:00
|
|
|
ParseHelper(StringRef S)
|
2018-03-22 08:53:26 +08:00
|
|
|
: Begin(S.begin()), End(S.end()), C(Begin), P(Begin) {}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
|
|
|
// Return true if string literal is next.
|
2011-07-23 18:55:15 +08:00
|
|
|
bool Next(StringRef S) {
|
2010-04-29 04:02:30 +08:00
|
|
|
P = C;
|
2010-09-02 01:28:48 +08:00
|
|
|
PEnd = C + S.size();
|
2010-04-29 04:02:30 +08:00
|
|
|
if (PEnd > End)
|
|
|
|
return false;
|
2018-03-22 08:53:26 +08:00
|
|
|
return memcmp(P, S.data(), S.size()) == 0;
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// Return true if number is next.
|
|
|
|
// Output N only if number is next.
|
|
|
|
bool Next(unsigned &N) {
|
|
|
|
unsigned TMP = 0;
|
|
|
|
P = C;
|
|
|
|
for (; P < End && P[0] >= '0' && P[0] <= '9'; ++P) {
|
|
|
|
TMP *= 10;
|
|
|
|
TMP += P[0] - '0';
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
2010-04-29 04:02:30 +08:00
|
|
|
if (P == C)
|
|
|
|
return false;
|
|
|
|
PEnd = P;
|
|
|
|
N = TMP;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2017-12-16 10:23:22 +08:00
|
|
|
// Return true if string literal S is matched in content.
|
|
|
|
// When true, P marks begin-position of the match, and calling Advance sets C
|
|
|
|
// to end-position of the match.
|
|
|
|
// If S is the empty string, then search for any letter instead (makes sense
|
|
|
|
// with FinishDirectiveToken=true).
|
|
|
|
// If EnsureStartOfWord, then skip matches that don't start a new word.
|
|
|
|
// If FinishDirectiveToken, then assume the match is the start of a comment
|
|
|
|
// directive for -verify, and extend the match to include the entire first
|
|
|
|
// token of that directive.
|
|
|
|
bool Search(StringRef S, bool EnsureStartOfWord = false,
|
|
|
|
bool FinishDirectiveToken = false) {
|
2012-10-19 20:36:49 +08:00
|
|
|
do {
|
2017-12-16 10:23:22 +08:00
|
|
|
if (!S.empty()) {
|
|
|
|
P = std::search(C, End, S.begin(), S.end());
|
|
|
|
PEnd = P + S.size();
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
P = C;
|
|
|
|
while (P != End && !isLetter(*P))
|
|
|
|
++P;
|
|
|
|
PEnd = P + 1;
|
|
|
|
}
|
2012-10-19 20:36:49 +08:00
|
|
|
if (P == End)
|
|
|
|
break;
|
2017-12-16 10:23:22 +08:00
|
|
|
// If not start of word but required, skip and search again.
|
|
|
|
if (EnsureStartOfWord
|
|
|
|
// Check if string literal starts a new word.
|
|
|
|
&& !(P == Begin || isWhitespace(P[-1])
|
|
|
|
// Or it could be preceded by the start of a comment.
|
|
|
|
|| (P > (Begin + 1) && (P[-1] == '/' || P[-1] == '*')
|
|
|
|
&& P[-2] == '/')))
|
|
|
|
continue;
|
|
|
|
if (FinishDirectiveToken) {
|
|
|
|
while (PEnd != End && (isAlphanumeric(*PEnd)
|
|
|
|
|| *PEnd == '-' || *PEnd == '_'))
|
|
|
|
++PEnd;
|
|
|
|
// Put back trailing digits and hyphens to be parsed later as a count
|
|
|
|
// or count range. Because -verify prefixes must start with letters,
|
|
|
|
// we know the actual directive we found starts with a letter, so
|
|
|
|
// we won't put back the entire directive word and thus record an empty
|
|
|
|
// string.
|
|
|
|
assert(isLetter(*P) && "-verify prefix must start with a letter");
|
|
|
|
while (isDigit(PEnd[-1]) || PEnd[-1] == '-')
|
|
|
|
--PEnd;
|
|
|
|
}
|
|
|
|
return true;
|
2012-10-19 20:36:49 +08:00
|
|
|
} while (Advance());
|
|
|
|
return false;
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
|
|
|
|
2013-12-12 07:40:50 +08:00
|
|
|
// Return true if a CloseBrace that closes the OpenBrace at the current nest
|
|
|
|
// level is found. When true, P marks begin-position of CloseBrace.
|
|
|
|
bool SearchClosingBrace(StringRef OpenBrace, StringRef CloseBrace) {
|
|
|
|
unsigned Depth = 1;
|
|
|
|
P = C;
|
|
|
|
while (P < End) {
|
|
|
|
StringRef S(P, End - P);
|
|
|
|
if (S.startswith(OpenBrace)) {
|
|
|
|
++Depth;
|
|
|
|
P += OpenBrace.size();
|
|
|
|
} else if (S.startswith(CloseBrace)) {
|
|
|
|
--Depth;
|
|
|
|
if (Depth == 0) {
|
|
|
|
PEnd = P + CloseBrace.size();
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
P += CloseBrace.size();
|
|
|
|
} else {
|
|
|
|
++P;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
// Advance 1-past previous next/search.
|
|
|
|
// Behavior is undefined if previous next/search failed.
|
|
|
|
bool Advance() {
|
|
|
|
C = PEnd;
|
|
|
|
return C < End;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Skip zero or more whitespace.
|
|
|
|
void SkipWhitespace() {
|
2013-02-09 06:30:41 +08:00
|
|
|
for (; C < End && isWhitespace(*C); ++C)
|
2010-04-29 04:02:30 +08:00
|
|
|
;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Return true if EOF reached.
|
|
|
|
bool Done() {
|
|
|
|
return !(C < End);
|
|
|
|
}
|
|
|
|
|
2018-03-22 08:53:26 +08:00
|
|
|
// Beginning of expected content.
|
|
|
|
const char * const Begin;
|
|
|
|
|
|
|
|
// End of expected content (1-past).
|
|
|
|
const char * const End;
|
|
|
|
|
|
|
|
// Position of next char in content.
|
|
|
|
const char *C;
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
const char *P;
|
|
|
|
|
|
|
|
private:
|
2018-03-22 08:53:26 +08:00
|
|
|
// Previous next/search subject end (1-past).
|
|
|
|
const char *PEnd = nullptr;
|
2010-04-29 04:02:30 +08:00
|
|
|
};
|
|
|
|
|
2018-03-22 08:53:26 +08:00
|
|
|
} // anonymous
|
2010-04-29 04:02:30 +08:00
|
|
|
|
|
|
|
/// ParseDirective - Go through the comment and see if it indicates expected
|
|
|
|
/// diagnostics. If so, then put them in the appropriate directive list.
|
|
|
|
///
|
2012-07-12 03:58:23 +08:00
|
|
|
/// Returns true if any valid directives were found.
|
2012-08-10 09:06:16 +08:00
|
|
|
static bool ParseDirective(StringRef S, ExpectedData *ED, SourceManager &SM,
|
2013-04-17 16:06:46 +08:00
|
|
|
Preprocessor *PP, SourceLocation Pos,
|
2012-10-19 20:49:32 +08:00
|
|
|
VerifyDiagnosticConsumer::DirectiveStatus &Status) {
|
2013-04-17 16:06:46 +08:00
|
|
|
DiagnosticsEngine &Diags = PP ? PP->getDiagnostics() : SM.getDiagnostics();
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
// A single comment may contain multiple directives.
|
2012-07-12 03:58:23 +08:00
|
|
|
bool FoundDirective = false;
|
|
|
|
for (ParseHelper PH(S); !PH.Done();) {
|
2017-12-16 10:23:22 +08:00
|
|
|
// Search for the initial directive token.
|
|
|
|
// If one prefix, save time by searching only for its directives.
|
|
|
|
// Otherwise, search for any potential directive token and check it later.
|
|
|
|
const auto &Prefixes = Diags.getDiagnosticOptions().VerifyPrefixes;
|
|
|
|
if (!(Prefixes.size() == 1 ? PH.Search(*Prefixes.begin(), true, true)
|
|
|
|
: PH.Search("", true, true)))
|
2010-04-29 04:02:30 +08:00
|
|
|
break;
|
|
|
|
PH.Advance();
|
|
|
|
|
2017-12-16 10:23:22 +08:00
|
|
|
// Default directive kind.
|
|
|
|
bool RegexKind = false;
|
|
|
|
const char* KindStr = "string";
|
|
|
|
|
|
|
|
// Parse the initial directive token in reverse so we can easily determine
|
|
|
|
// its exact actual prefix. If we were to parse it from the front instead,
|
|
|
|
// it would be harder to determine where the prefix ends because there
|
|
|
|
// might be multiple matching -verify prefixes because some might prefix
|
|
|
|
// others.
|
|
|
|
StringRef DToken(PH.P, PH.C - PH.P);
|
|
|
|
|
|
|
|
// Regex in initial directive token: -re
|
|
|
|
if (DToken.endswith("-re")) {
|
|
|
|
RegexKind = true;
|
|
|
|
KindStr = "regex";
|
|
|
|
DToken = DToken.substr(0, DToken.size()-3);
|
|
|
|
}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2017-12-16 10:23:22 +08:00
|
|
|
// Type in initial directive token: -{error|warning|note|no-diagnostics}
|
2014-05-22 12:46:25 +08:00
|
|
|
DirectiveList *DL = nullptr;
|
2017-12-16 10:23:22 +08:00
|
|
|
bool NoDiag = false;
|
|
|
|
StringRef DType;
|
|
|
|
if (DToken.endswith(DType="-error"))
|
2014-05-22 12:46:25 +08:00
|
|
|
DL = ED ? &ED->Errors : nullptr;
|
2017-12-16 10:23:22 +08:00
|
|
|
else if (DToken.endswith(DType="-warning"))
|
2014-05-22 12:46:25 +08:00
|
|
|
DL = ED ? &ED->Warnings : nullptr;
|
2017-12-16 10:23:22 +08:00
|
|
|
else if (DToken.endswith(DType="-remark"))
|
2014-05-22 12:46:25 +08:00
|
|
|
DL = ED ? &ED->Remarks : nullptr;
|
2017-12-16 10:23:22 +08:00
|
|
|
else if (DToken.endswith(DType="-note"))
|
2014-05-22 12:46:25 +08:00
|
|
|
DL = ED ? &ED->Notes : nullptr;
|
2017-12-16 10:23:22 +08:00
|
|
|
else if (DToken.endswith(DType="-no-diagnostics")) {
|
|
|
|
NoDiag = true;
|
|
|
|
if (RegexKind)
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
continue;
|
|
|
|
DToken = DToken.substr(0, DToken.size()-DType.size());
|
|
|
|
|
|
|
|
// What's left in DToken is the actual prefix. That might not be a -verify
|
|
|
|
// prefix even if there is only one -verify prefix (for example, the full
|
|
|
|
// DToken is foo-bar-warning, but foo is the only -verify prefix).
|
|
|
|
if (!std::binary_search(Prefixes.begin(), Prefixes.end(), DToken))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (NoDiag) {
|
2012-10-19 20:49:32 +08:00
|
|
|
if (Status == VerifyDiagnosticConsumer::HasOtherExpectedDirectives)
|
|
|
|
Diags.Report(Pos, diag::err_verify_invalid_no_diags)
|
|
|
|
<< /*IsExpectedNoDiagnostics=*/true;
|
|
|
|
else
|
|
|
|
Status = VerifyDiagnosticConsumer::HasExpectedNoDiagnostics;
|
|
|
|
continue;
|
2017-12-16 10:23:22 +08:00
|
|
|
}
|
2012-10-19 20:49:32 +08:00
|
|
|
if (Status == VerifyDiagnosticConsumer::HasExpectedNoDiagnostics) {
|
|
|
|
Diags.Report(Pos, diag::err_verify_invalid_no_diags)
|
|
|
|
<< /*IsExpectedNoDiagnostics=*/false;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
Status = VerifyDiagnosticConsumer::HasOtherExpectedDirectives;
|
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
// If a directive has been found but we're not interested
|
|
|
|
// in storing the directive information, return now.
|
|
|
|
if (!DL)
|
|
|
|
return true;
|
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Next optional token: @
|
|
|
|
SourceLocation ExpectedLoc;
|
2014-07-11 00:43:29 +08:00
|
|
|
bool MatchAnyLine = false;
|
2012-07-10 10:57:03 +08:00
|
|
|
if (!PH.Next("@")) {
|
|
|
|
ExpectedLoc = Pos;
|
|
|
|
} else {
|
|
|
|
PH.Advance();
|
|
|
|
unsigned Line = 0;
|
|
|
|
bool FoundPlus = PH.Next("+");
|
|
|
|
if (FoundPlus || PH.Next("-")) {
|
|
|
|
// Relative to current line.
|
|
|
|
PH.Advance();
|
|
|
|
bool Invalid = false;
|
|
|
|
unsigned ExpectedLine = SM.getSpellingLineNumber(Pos, &Invalid);
|
|
|
|
if (!Invalid && PH.Next(Line) && (FoundPlus || Line < ExpectedLine)) {
|
|
|
|
if (FoundPlus) ExpectedLine += Line;
|
|
|
|
else ExpectedLine -= Line;
|
|
|
|
ExpectedLoc = SM.translateLineCol(SM.getFileID(Pos), ExpectedLine, 1);
|
|
|
|
}
|
2013-04-17 16:06:46 +08:00
|
|
|
} else if (PH.Next(Line)) {
|
2012-07-10 10:57:03 +08:00
|
|
|
// Absolute line number.
|
2013-04-17 16:06:46 +08:00
|
|
|
if (Line > 0)
|
2012-07-10 10:57:03 +08:00
|
|
|
ExpectedLoc = SM.translateLineCol(SM.getFileID(Pos), Line, 1);
|
2013-04-17 16:06:46 +08:00
|
|
|
} else if (PP && PH.Search(":")) {
|
|
|
|
// Specific source file.
|
|
|
|
StringRef Filename(PH.C, PH.P-PH.C);
|
|
|
|
PH.Advance();
|
|
|
|
|
|
|
|
// Lookup file via Preprocessor, like a #include.
|
|
|
|
const DirectoryLookup *CurDir;
|
2014-10-20 08:15:49 +08:00
|
|
|
const FileEntry *FE =
|
|
|
|
PP->LookupFile(Pos, Filename, false, nullptr, nullptr, CurDir,
|
Preprocessor: Suppress -Wnonportable-include-path for header maps
If a file search involves a header map, suppress
-Wnonportable-include-path. It's firing lots of false positives for
framework authors internally, and it's not trivial to fix.
Consider a framework called "Foo" with a main (installed) framework header
"Foo/Foo.h". It's atypical for "Foo.h" to actually live inside a
directory called "Foo" in the source repository. Instead, the
build system generates a header map while building the framework.
If Foo.h lives at the top-level of the source repository (common), and
the git repo is called ssh://some.url/foo.git, then the header map will
have something like:
Foo/Foo.h -> /Users/myname/code/foo/Foo.h
where "/Users/myname/code/foo" is the clone of ssh://some.url/foo.git.
After #import <Foo/Foo.h>, the current implementation of
-Wnonportable-include-path will falsely assume that Foo.h was found in a
nonportable way, because of the name of the git clone (.../foo/Foo.h).
However, that directory name was not involved in the header search at
all.
This commit adds an extra parameter to Preprocessor::LookupFile and
HeaderSearch::LookupFile to track if the search used a header map,
making it easy to suppress the warning. Longer term, once we find a way
to avoid the false positive, we should turn the warning back on.
rdar://problem/28863903
llvm-svn: 301592
2017-04-28 05:41:51 +08:00
|
|
|
nullptr, nullptr, nullptr, nullptr);
|
2013-04-17 16:06:46 +08:00
|
|
|
if (!FE) {
|
|
|
|
Diags.Report(Pos.getLocWithOffset(PH.C-PH.Begin),
|
|
|
|
diag::err_verify_missing_file) << Filename << KindStr;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (SM.translateFile(FE).isInvalid())
|
|
|
|
SM.createFileID(FE, Pos, SrcMgr::C_User);
|
|
|
|
|
|
|
|
if (PH.Next(Line) && Line > 0)
|
|
|
|
ExpectedLoc = SM.translateFileLineCol(FE, Line, 1);
|
2014-07-11 00:43:29 +08:00
|
|
|
else if (PH.Next("*")) {
|
|
|
|
MatchAnyLine = true;
|
|
|
|
ExpectedLoc = SM.translateFileLineCol(FE, 1, 1);
|
|
|
|
}
|
2017-10-18 09:41:38 +08:00
|
|
|
} else if (PH.Next("*")) {
|
|
|
|
MatchAnyLine = true;
|
|
|
|
ExpectedLoc = SourceLocation();
|
2012-07-10 10:57:03 +08:00
|
|
|
}
|
|
|
|
|
2017-10-18 09:41:38 +08:00
|
|
|
if (ExpectedLoc.isInvalid() && !MatchAnyLine) {
|
2012-07-10 10:57:03 +08:00
|
|
|
Diags.Report(Pos.getLocWithOffset(PH.C-PH.Begin),
|
|
|
|
diag::err_verify_missing_line) << KindStr;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
PH.Advance();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Skip optional whitespace.
|
2010-04-29 04:02:30 +08:00
|
|
|
PH.SkipWhitespace();
|
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Next optional token: positive integer or a '+'.
|
2012-07-10 10:57:26 +08:00
|
|
|
unsigned Min = 1;
|
|
|
|
unsigned Max = 1;
|
|
|
|
if (PH.Next(Min)) {
|
2010-04-29 04:02:30 +08:00
|
|
|
PH.Advance();
|
2012-07-10 10:57:26 +08:00
|
|
|
// A positive integer can be followed by a '+' meaning min
|
|
|
|
// or more, or by a '-' meaning a range from min to max.
|
|
|
|
if (PH.Next("+")) {
|
|
|
|
Max = Directive::MaxCount;
|
|
|
|
PH.Advance();
|
|
|
|
} else if (PH.Next("-")) {
|
|
|
|
PH.Advance();
|
|
|
|
if (!PH.Next(Max) || Max < Min) {
|
|
|
|
Diags.Report(Pos.getLocWithOffset(PH.C-PH.Begin),
|
|
|
|
diag::err_verify_invalid_range) << KindStr;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
PH.Advance();
|
|
|
|
} else {
|
|
|
|
Max = Min;
|
|
|
|
}
|
|
|
|
} else if (PH.Next("+")) {
|
|
|
|
// '+' on its own means "1 or more".
|
|
|
|
Max = Directive::MaxCount;
|
2011-12-15 10:28:16 +08:00
|
|
|
PH.Advance();
|
|
|
|
}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Skip optional whitespace.
|
2010-04-29 04:02:30 +08:00
|
|
|
PH.SkipWhitespace();
|
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Next token: {{
|
2010-04-29 04:02:30 +08:00
|
|
|
if (!PH.Next("{{")) {
|
2012-07-10 10:56:15 +08:00
|
|
|
Diags.Report(Pos.getLocWithOffset(PH.C-PH.Begin),
|
|
|
|
diag::err_verify_missing_start) << KindStr;
|
2010-04-29 04:02:30 +08:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
PH.Advance();
|
|
|
|
const char* const ContentBegin = PH.C; // mark content begin
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Search for token: }}
|
2013-12-12 07:40:50 +08:00
|
|
|
if (!PH.SearchClosingBrace("{{", "}}")) {
|
2012-07-10 10:56:15 +08:00
|
|
|
Diags.Report(Pos.getLocWithOffset(PH.C-PH.Begin),
|
|
|
|
diag::err_verify_missing_end) << KindStr;
|
2010-04-29 04:02:30 +08:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
const char* const ContentEnd = PH.P; // mark content end
|
|
|
|
PH.Advance();
|
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Build directive text; convert \n to newlines.
|
2010-04-29 04:02:30 +08:00
|
|
|
std::string Text;
|
2011-07-23 18:55:15 +08:00
|
|
|
StringRef NewlineStr = "\\n";
|
|
|
|
StringRef Content(ContentBegin, ContentEnd-ContentBegin);
|
2010-04-29 04:02:30 +08:00
|
|
|
size_t CPos = 0;
|
|
|
|
size_t FPos;
|
2011-07-23 18:55:15 +08:00
|
|
|
while ((FPos = Content.find(NewlineStr, CPos)) != StringRef::npos) {
|
2010-04-29 04:02:30 +08:00
|
|
|
Text += Content.substr(CPos, FPos-CPos);
|
|
|
|
Text += '\n';
|
|
|
|
CPos = FPos + NewlineStr.size();
|
|
|
|
}
|
|
|
|
if (Text.empty())
|
|
|
|
Text.assign(ContentBegin, ContentEnd);
|
|
|
|
|
2013-12-14 09:07:05 +08:00
|
|
|
// Check that regex directives contain at least one regex.
|
|
|
|
if (RegexKind && Text.find("{{") == StringRef::npos) {
|
|
|
|
Diags.Report(Pos.getLocWithOffset(ContentBegin-PH.Begin),
|
|
|
|
diag::err_verify_missing_regex) << Text;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Construct new directive.
|
2014-08-30 00:30:23 +08:00
|
|
|
std::unique_ptr<Directive> D = Directive::create(
|
|
|
|
RegexKind, Pos, ExpectedLoc, MatchAnyLine, Text, Min, Max);
|
2014-04-24 13:32:03 +08:00
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
std::string Error;
|
2012-07-12 03:58:23 +08:00
|
|
|
if (D->isValid(Error)) {
|
2014-08-30 00:30:23 +08:00
|
|
|
DL->push_back(std::move(D));
|
2012-07-12 03:58:23 +08:00
|
|
|
FoundDirective = true;
|
|
|
|
} else {
|
2012-07-10 10:56:15 +08:00
|
|
|
Diags.Report(Pos.getLocWithOffset(ContentBegin-PH.Begin),
|
|
|
|
diag::err_verify_invalid_content)
|
2010-04-29 04:02:30 +08:00
|
|
|
<< KindStr << Error;
|
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
2012-07-12 03:58:23 +08:00
|
|
|
|
|
|
|
return FoundDirective;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// HandleComment - Hook into the preprocessor and extract comments containing
|
|
|
|
/// expected errors and warnings.
|
|
|
|
bool VerifyDiagnosticConsumer::HandleComment(Preprocessor &PP,
|
|
|
|
SourceRange Comment) {
|
|
|
|
SourceManager &SM = PP.getSourceManager();
|
2013-05-04 06:58:43 +08:00
|
|
|
|
|
|
|
// If this comment is for a different source manager, ignore it.
|
|
|
|
if (SrcManager && &SM != SrcManager)
|
|
|
|
return false;
|
|
|
|
|
2012-07-12 03:58:23 +08:00
|
|
|
SourceLocation CommentBegin = Comment.getBegin();
|
|
|
|
|
|
|
|
const char *CommentRaw = SM.getCharacterData(CommentBegin);
|
|
|
|
StringRef C(CommentRaw, SM.getCharacterData(Comment.getEnd()) - CommentRaw);
|
|
|
|
|
|
|
|
if (C.empty())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// Fold any "\<EOL>" sequences
|
|
|
|
size_t loc = C.find('\\');
|
|
|
|
if (loc == StringRef::npos) {
|
2013-04-17 16:06:46 +08:00
|
|
|
ParseDirective(C, &ED, SM, &PP, CommentBegin, Status);
|
2012-07-12 03:58:23 +08:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string C2;
|
|
|
|
C2.reserve(C.size());
|
|
|
|
|
|
|
|
for (size_t last = 0;; loc = C.find('\\', last)) {
|
|
|
|
if (loc == StringRef::npos || loc == C.size()) {
|
|
|
|
C2 += C.substr(last);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
C2 += C.substr(last, loc-last);
|
|
|
|
last = loc + 1;
|
|
|
|
|
|
|
|
if (C[last] == '\n' || C[last] == '\r') {
|
|
|
|
++last;
|
|
|
|
|
|
|
|
// Escape \r\n or \n\r, but not \n\n.
|
|
|
|
if (last < C.size())
|
|
|
|
if (C[last] == '\n' || C[last] == '\r')
|
|
|
|
if (C[last] != C[last-1])
|
|
|
|
++last;
|
|
|
|
} else {
|
|
|
|
// This was just a normal backslash.
|
|
|
|
C2 += '\\';
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!C2.empty())
|
2013-04-17 16:06:46 +08:00
|
|
|
ParseDirective(C2, &ED, SM, &PP, CommentBegin, Status);
|
2012-07-12 03:58:23 +08:00
|
|
|
return false;
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
#ifndef NDEBUG
|
2018-05-09 09:00:01 +08:00
|
|
|
/// Lex the specified source file to determine whether it contains
|
2012-08-10 09:06:16 +08:00
|
|
|
/// any expected-* directives. As a Lexer is used rather than a full-blown
|
|
|
|
/// Preprocessor, directives inside skipped #if blocks will still be found.
|
|
|
|
///
|
|
|
|
/// \return true if any directives were found.
|
2012-08-19 00:58:52 +08:00
|
|
|
static bool findDirectives(SourceManager &SM, FileID FID,
|
|
|
|
const LangOptions &LangOpts) {
|
2011-07-26 03:18:12 +08:00
|
|
|
// Create a raw lexer to pull all the comments out of FID.
|
|
|
|
if (FID.isInvalid())
|
2012-08-10 09:06:16 +08:00
|
|
|
return false;
|
2009-11-14 11:23:19 +08:00
|
|
|
|
|
|
|
// Create a lexer to lex all the tokens of the main file in raw mode.
|
2009-11-30 12:18:44 +08:00
|
|
|
const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID);
|
2012-08-19 00:58:52 +08:00
|
|
|
Lexer RawLex(FID, FromFile, SM, LangOpts);
|
2009-11-14 11:23:19 +08:00
|
|
|
|
|
|
|
// Return comments as tokens, this is how we find expected diagnostics.
|
|
|
|
RawLex.SetCommentRetentionState(true);
|
|
|
|
|
|
|
|
Token Tok;
|
|
|
|
Tok.setKind(tok::comment);
|
2012-10-19 20:49:32 +08:00
|
|
|
VerifyDiagnosticConsumer::DirectiveStatus Status =
|
|
|
|
VerifyDiagnosticConsumer::HasNoDirectives;
|
2009-11-14 11:23:19 +08:00
|
|
|
while (Tok.isNot(tok::eof)) {
|
2013-09-19 08:41:32 +08:00
|
|
|
RawLex.LexFromRawLexer(Tok);
|
2009-11-14 11:23:19 +08:00
|
|
|
if (!Tok.is(tok::comment)) continue;
|
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
std::string Comment = RawLex.getSpelling(Tok, SM, LangOpts);
|
2009-11-14 11:23:19 +08:00
|
|
|
if (Comment.empty()) continue;
|
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
// Find first directive.
|
2014-05-22 12:46:25 +08:00
|
|
|
if (ParseDirective(Comment, nullptr, SM, nullptr, Tok.getLocation(),
|
|
|
|
Status))
|
2012-08-19 00:58:52 +08:00
|
|
|
return true;
|
2012-08-10 09:06:16 +08:00
|
|
|
}
|
2012-08-19 00:58:52 +08:00
|
|
|
return false;
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
2012-08-10 09:06:16 +08:00
|
|
|
#endif // !NDEBUG
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2018-05-09 09:00:01 +08:00
|
|
|
/// Takes a list of diagnostics that have been generated but not matched
|
2012-07-10 10:57:03 +08:00
|
|
|
/// by an expected-* directive and produces a diagnostic to the user from this.
|
|
|
|
static unsigned PrintUnexpected(DiagnosticsEngine &Diags, SourceManager *SourceMgr,
|
|
|
|
const_diag_iterator diag_begin,
|
|
|
|
const_diag_iterator diag_end,
|
|
|
|
const char *Kind) {
|
2009-11-14 11:23:19 +08:00
|
|
|
if (diag_begin == diag_end) return 0;
|
|
|
|
|
2012-02-05 10:13:05 +08:00
|
|
|
SmallString<256> Fmt;
|
2009-11-14 11:23:19 +08:00
|
|
|
llvm::raw_svector_ostream OS(Fmt);
|
|
|
|
for (const_diag_iterator I = diag_begin, E = diag_end; I != E; ++I) {
|
2009-11-14 15:53:24 +08:00
|
|
|
if (I->first.isInvalid() || !SourceMgr)
|
2009-11-14 11:23:19 +08:00
|
|
|
OS << "\n (frontend)";
|
2013-04-17 16:06:46 +08:00
|
|
|
else {
|
|
|
|
OS << "\n ";
|
|
|
|
if (const FileEntry *File = SourceMgr->getFileEntryForID(
|
|
|
|
SourceMgr->getFileID(I->first)))
|
|
|
|
OS << " File " << File->getName();
|
|
|
|
OS << " Line " << SourceMgr->getPresumedLineNumber(I->first);
|
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
OS << ": " << I->second;
|
|
|
|
}
|
|
|
|
|
2012-07-12 00:50:36 +08:00
|
|
|
Diags.Report(diag::err_verify_inconsistent_diags).setForceEmit()
|
2012-07-10 10:57:03 +08:00
|
|
|
<< Kind << /*Unexpected=*/true << OS.str();
|
2009-11-14 11:23:19 +08:00
|
|
|
return std::distance(diag_begin, diag_end);
|
|
|
|
}
|
|
|
|
|
2018-05-09 09:00:01 +08:00
|
|
|
/// Takes a list of diagnostics that were expected to have been generated
|
2012-07-10 10:57:03 +08:00
|
|
|
/// but were not and produces a diagnostic to the user from this.
|
2014-08-30 00:30:23 +08:00
|
|
|
static unsigned PrintExpected(DiagnosticsEngine &Diags,
|
|
|
|
SourceManager &SourceMgr,
|
|
|
|
std::vector<Directive *> &DL, const char *Kind) {
|
2010-04-29 04:02:30 +08:00
|
|
|
if (DL.empty())
|
|
|
|
return 0;
|
|
|
|
|
2012-02-05 10:13:05 +08:00
|
|
|
SmallString<256> Fmt;
|
2010-04-29 04:02:30 +08:00
|
|
|
llvm::raw_svector_ostream OS(Fmt);
|
2018-03-22 08:53:26 +08:00
|
|
|
for (const auto *D : DL) {
|
|
|
|
if (D->DiagnosticLoc.isInvalid())
|
2017-10-18 09:41:38 +08:00
|
|
|
OS << "\n File *";
|
|
|
|
else
|
2018-03-22 08:53:26 +08:00
|
|
|
OS << "\n File " << SourceMgr.getFilename(D->DiagnosticLoc);
|
|
|
|
if (D->MatchAnyLine)
|
2014-07-11 00:43:29 +08:00
|
|
|
OS << " Line *";
|
|
|
|
else
|
2018-03-22 08:53:26 +08:00
|
|
|
OS << " Line " << SourceMgr.getPresumedLineNumber(D->DiagnosticLoc);
|
|
|
|
if (D->DirectiveLoc != D->DiagnosticLoc)
|
2012-07-10 10:57:03 +08:00
|
|
|
OS << " (directive at "
|
2018-03-22 08:53:26 +08:00
|
|
|
<< SourceMgr.getFilename(D->DirectiveLoc) << ':'
|
|
|
|
<< SourceMgr.getPresumedLineNumber(D->DirectiveLoc) << ')';
|
|
|
|
OS << ": " << D->Text;
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
|
|
|
|
2012-07-12 00:50:36 +08:00
|
|
|
Diags.Report(diag::err_verify_inconsistent_diags).setForceEmit()
|
2012-07-10 10:57:03 +08:00
|
|
|
<< Kind << /*Unexpected=*/false << OS.str();
|
2010-04-29 04:02:30 +08:00
|
|
|
return DL.size();
|
|
|
|
}
|
|
|
|
|
2018-05-09 09:00:01 +08:00
|
|
|
/// Determine whether two source locations come from the same file.
|
2013-04-17 16:06:46 +08:00
|
|
|
static bool IsFromSameFile(SourceManager &SM, SourceLocation DirectiveLoc,
|
|
|
|
SourceLocation DiagnosticLoc) {
|
|
|
|
while (DiagnosticLoc.isMacroID())
|
|
|
|
DiagnosticLoc = SM.getImmediateMacroCallerLoc(DiagnosticLoc);
|
|
|
|
|
2013-08-22 08:27:10 +08:00
|
|
|
if (SM.isWrittenInSameFile(DirectiveLoc, DiagnosticLoc))
|
2013-04-17 16:06:46 +08:00
|
|
|
return true;
|
|
|
|
|
|
|
|
const FileEntry *DiagFile = SM.getFileEntryForID(SM.getFileID(DiagnosticLoc));
|
2013-08-22 08:27:10 +08:00
|
|
|
if (!DiagFile && SM.isWrittenInMainFile(DirectiveLoc))
|
2013-04-17 16:06:46 +08:00
|
|
|
return true;
|
|
|
|
|
|
|
|
return (DiagFile == SM.getFileEntryForID(SM.getFileID(DirectiveLoc)));
|
|
|
|
}
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
/// CheckLists - Compare expected to seen diagnostic lists and return the
|
|
|
|
/// the difference between them.
|
2011-09-26 07:23:43 +08:00
|
|
|
static unsigned CheckLists(DiagnosticsEngine &Diags, SourceManager &SourceMgr,
|
2010-04-29 04:02:30 +08:00
|
|
|
const char *Label,
|
|
|
|
DirectiveList &Left,
|
|
|
|
const_diag_iterator d2_begin,
|
2015-06-13 15:11:40 +08:00
|
|
|
const_diag_iterator d2_end,
|
|
|
|
bool IgnoreUnexpected) {
|
2014-08-30 00:30:23 +08:00
|
|
|
std::vector<Directive *> LeftOnly;
|
2009-11-14 11:23:19 +08:00
|
|
|
DiagList Right(d2_begin, d2_end);
|
|
|
|
|
2014-08-30 00:30:23 +08:00
|
|
|
for (auto &Owner : Left) {
|
|
|
|
Directive &D = *Owner;
|
2012-07-10 10:57:03 +08:00
|
|
|
unsigned LineNo1 = SourceMgr.getPresumedLineNumber(D.DiagnosticLoc);
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2012-07-10 10:57:26 +08:00
|
|
|
for (unsigned i = 0; i < D.Max; ++i) {
|
2010-04-29 04:02:30 +08:00
|
|
|
DiagList::iterator II, IE;
|
|
|
|
for (II = Right.begin(), IE = Right.end(); II != IE; ++II) {
|
2014-07-11 00:43:29 +08:00
|
|
|
if (!D.MatchAnyLine) {
|
|
|
|
unsigned LineNo2 = SourceMgr.getPresumedLineNumber(II->first);
|
|
|
|
if (LineNo1 != LineNo2)
|
|
|
|
continue;
|
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2017-10-18 09:41:38 +08:00
|
|
|
if (!D.DiagnosticLoc.isInvalid() &&
|
|
|
|
!IsFromSameFile(SourceMgr, D.DiagnosticLoc, II->first))
|
2013-04-17 16:06:46 +08:00
|
|
|
continue;
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
const std::string &RightText = II->second;
|
2012-07-10 10:56:15 +08:00
|
|
|
if (D.match(RightText))
|
2010-04-29 04:02:30 +08:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (II == IE) {
|
|
|
|
// Not found.
|
2012-07-10 10:57:26 +08:00
|
|
|
if (i >= D.Min) break;
|
2014-08-30 00:30:23 +08:00
|
|
|
LeftOnly.push_back(&D);
|
2010-04-29 04:02:30 +08:00
|
|
|
} else {
|
|
|
|
// Found. The same cannot be found twice.
|
|
|
|
Right.erase(II);
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Now all that's left in Right are those that were not matched.
|
2012-07-10 10:57:03 +08:00
|
|
|
unsigned num = PrintExpected(Diags, SourceMgr, LeftOnly, Label);
|
2015-06-13 15:11:40 +08:00
|
|
|
if (!IgnoreUnexpected)
|
|
|
|
num += PrintUnexpected(Diags, &SourceMgr, Right.begin(), Right.end(), Label);
|
2011-12-17 21:00:31 +08:00
|
|
|
return num;
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// CheckResults - This compares the expected results to those that
|
|
|
|
/// were actually reported. It emits any discrepencies. Return "true" if there
|
|
|
|
/// were problems. Return "false" otherwise.
|
2011-09-26 07:23:43 +08:00
|
|
|
static unsigned CheckResults(DiagnosticsEngine &Diags, SourceManager &SourceMgr,
|
2009-11-14 11:23:19 +08:00
|
|
|
const TextDiagnosticBuffer &Buffer,
|
2010-04-29 04:02:30 +08:00
|
|
|
ExpectedData &ED) {
|
2009-11-14 11:23:19 +08:00
|
|
|
// We want to capture the delta between what was expected and what was
|
|
|
|
// seen.
|
|
|
|
//
|
|
|
|
// Expected \ Seen - set expected but not seen
|
|
|
|
// Seen \ Expected - set seen but not expected
|
|
|
|
unsigned NumProblems = 0;
|
|
|
|
|
2015-06-13 15:11:40 +08:00
|
|
|
const DiagnosticLevelMask DiagMask =
|
|
|
|
Diags.getDiagnosticOptions().getVerifyIgnoreUnexpected();
|
|
|
|
|
2009-11-14 11:23:19 +08:00
|
|
|
// See if there are error mismatches.
|
2010-04-29 04:02:30 +08:00
|
|
|
NumProblems += CheckLists(Diags, SourceMgr, "error", ED.Errors,
|
2015-06-13 15:11:40 +08:00
|
|
|
Buffer.err_begin(), Buffer.err_end(),
|
|
|
|
bool(DiagnosticLevelMask::Error & DiagMask));
|
2009-11-14 15:53:24 +08:00
|
|
|
|
2009-11-14 11:23:19 +08:00
|
|
|
// See if there are warning mismatches.
|
2010-04-29 04:02:30 +08:00
|
|
|
NumProblems += CheckLists(Diags, SourceMgr, "warning", ED.Warnings,
|
2015-06-13 15:11:40 +08:00
|
|
|
Buffer.warn_begin(), Buffer.warn_end(),
|
|
|
|
bool(DiagnosticLevelMask::Warning & DiagMask));
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2014-05-01 22:06:01 +08:00
|
|
|
// See if there are remark mismatches.
|
|
|
|
NumProblems += CheckLists(Diags, SourceMgr, "remark", ED.Remarks,
|
2015-06-13 15:11:40 +08:00
|
|
|
Buffer.remark_begin(), Buffer.remark_end(),
|
|
|
|
bool(DiagnosticLevelMask::Remark & DiagMask));
|
2014-05-01 22:06:01 +08:00
|
|
|
|
2009-11-14 11:23:19 +08:00
|
|
|
// See if there are note mismatches.
|
2010-04-29 04:02:30 +08:00
|
|
|
NumProblems += CheckLists(Diags, SourceMgr, "note", ED.Notes,
|
2015-06-13 15:11:40 +08:00
|
|
|
Buffer.note_begin(), Buffer.note_end(),
|
|
|
|
bool(DiagnosticLevelMask::Note & DiagMask));
|
2009-11-14 11:23:19 +08:00
|
|
|
|
|
|
|
return NumProblems;
|
|
|
|
}
|
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
void VerifyDiagnosticConsumer::UpdateParsedFileStatus(SourceManager &SM,
|
|
|
|
FileID FID,
|
|
|
|
ParsedStatus PS) {
|
|
|
|
// Check SourceManager hasn't changed.
|
|
|
|
setSourceManager(SM);
|
|
|
|
|
|
|
|
#ifndef NDEBUG
|
|
|
|
if (FID.isInvalid())
|
|
|
|
return;
|
|
|
|
|
|
|
|
const FileEntry *FE = SM.getFileEntryForID(FID);
|
|
|
|
|
|
|
|
if (PS == IsParsed) {
|
|
|
|
// Move the FileID from the unparsed set to the parsed set.
|
|
|
|
UnparsedFiles.erase(FID);
|
|
|
|
ParsedFiles.insert(std::make_pair(FID, FE));
|
|
|
|
} else if (!ParsedFiles.count(FID) && !UnparsedFiles.count(FID)) {
|
|
|
|
// Add the FileID to the unparsed set if we haven't seen it before.
|
|
|
|
|
|
|
|
// Check for directives.
|
|
|
|
bool FoundDirectives;
|
|
|
|
if (PS == IsUnparsedNoDirectives)
|
|
|
|
FoundDirectives = false;
|
|
|
|
else
|
|
|
|
FoundDirectives = !LangOpts || findDirectives(SM, FID, *LangOpts);
|
|
|
|
|
|
|
|
// Add the FileID to the unparsed set.
|
|
|
|
UnparsedFiles.insert(std::make_pair(FID,
|
|
|
|
UnparsedFileStatus(FE, FoundDirectives)));
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2011-09-26 08:38:03 +08:00
|
|
|
void VerifyDiagnosticConsumer::CheckDiagnostics() {
|
2009-11-14 11:23:19 +08:00
|
|
|
// Ensure any diagnostics go to the primary client.
|
2014-11-18 07:46:02 +08:00
|
|
|
DiagnosticConsumer *CurClient = Diags.getClient();
|
|
|
|
std::unique_ptr<DiagnosticConsumer> Owner = Diags.takeClient();
|
2011-09-13 09:26:44 +08:00
|
|
|
Diags.setClient(PrimaryClient, false);
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
#ifndef NDEBUG
|
2012-08-19 00:58:52 +08:00
|
|
|
// In a debug build, scan through any files that may have been missed
|
|
|
|
// during parsing and issue a fatal error if directives are contained
|
|
|
|
// within these files. If a fatal error occurs, this suggests that
|
|
|
|
// this file is being parsed separately from the main file, in which
|
|
|
|
// case consider moving the directives to the correct place, if this
|
|
|
|
// is applicable.
|
2018-03-22 08:53:26 +08:00
|
|
|
if (!UnparsedFiles.empty()) {
|
2012-08-19 00:58:52 +08:00
|
|
|
// Generate a cache of parsed FileEntry pointers for alias lookups.
|
|
|
|
llvm::SmallPtrSet<const FileEntry *, 8> ParsedFileCache;
|
2018-03-22 08:53:26 +08:00
|
|
|
for (const auto &I : ParsedFiles)
|
|
|
|
if (const FileEntry *FE = I.second)
|
2012-08-19 00:58:52 +08:00
|
|
|
ParsedFileCache.insert(FE);
|
|
|
|
|
|
|
|
// Iterate through list of unparsed files.
|
2018-03-22 08:53:26 +08:00
|
|
|
for (const auto &I : UnparsedFiles) {
|
|
|
|
const UnparsedFileStatus &Status = I.second;
|
2012-08-19 00:58:52 +08:00
|
|
|
const FileEntry *FE = Status.getFile();
|
|
|
|
|
|
|
|
// Skip files that have been parsed via an alias.
|
|
|
|
if (FE && ParsedFileCache.count(FE))
|
2012-08-10 09:06:16 +08:00
|
|
|
continue;
|
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
// Report a fatal error if this file contained directives.
|
|
|
|
if (Status.foundDirectives()) {
|
2012-08-10 09:06:16 +08:00
|
|
|
llvm::report_fatal_error(Twine("-verify directives found after rather"
|
|
|
|
" than during normal parsing of ",
|
2012-08-19 00:58:52 +08:00
|
|
|
StringRef(FE ? FE->getName() : "(unknown)")));
|
|
|
|
}
|
2011-08-24 21:36:19 +08:00
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
// UnparsedFiles has been processed now, so clear it.
|
|
|
|
UnparsedFiles.clear();
|
|
|
|
}
|
|
|
|
#endif // !NDEBUG
|
|
|
|
|
|
|
|
if (SrcManager) {
|
2012-10-19 20:49:32 +08:00
|
|
|
// Produce an error if no expected-* directives could be found in the
|
|
|
|
// source file(s) processed.
|
|
|
|
if (Status == HasNoDirectives) {
|
|
|
|
Diags.Report(diag::err_verify_no_directives).setForceEmit();
|
|
|
|
++NumErrors;
|
|
|
|
Status = HasNoDirectivesReported;
|
|
|
|
}
|
|
|
|
|
2009-11-14 15:53:24 +08:00
|
|
|
// Check that the expected diagnostics occurred.
|
2012-08-19 00:58:52 +08:00
|
|
|
NumErrors += CheckResults(Diags, *SrcManager, *Buffer, ED);
|
2009-11-14 15:53:24 +08:00
|
|
|
} else {
|
2015-06-13 15:11:40 +08:00
|
|
|
const DiagnosticLevelMask DiagMask =
|
|
|
|
~Diags.getDiagnosticOptions().getVerifyIgnoreUnexpected();
|
|
|
|
if (bool(DiagnosticLevelMask::Error & DiagMask))
|
|
|
|
NumErrors += PrintUnexpected(Diags, nullptr, Buffer->err_begin(),
|
|
|
|
Buffer->err_end(), "error");
|
|
|
|
if (bool(DiagnosticLevelMask::Warning & DiagMask))
|
|
|
|
NumErrors += PrintUnexpected(Diags, nullptr, Buffer->warn_begin(),
|
|
|
|
Buffer->warn_end(), "warn");
|
|
|
|
if (bool(DiagnosticLevelMask::Remark & DiagMask))
|
|
|
|
NumErrors += PrintUnexpected(Diags, nullptr, Buffer->remark_begin(),
|
|
|
|
Buffer->remark_end(), "remark");
|
|
|
|
if (bool(DiagnosticLevelMask::Note & DiagMask))
|
|
|
|
NumErrors += PrintUnexpected(Diags, nullptr, Buffer->note_begin(),
|
|
|
|
Buffer->note_end(), "note");
|
2009-11-14 15:53:24 +08:00
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2014-11-18 07:46:02 +08:00
|
|
|
Diags.setClient(CurClient, Owner.release() != nullptr);
|
2009-11-14 11:23:19 +08:00
|
|
|
|
|
|
|
// Reset the buffer, we have processed all the diagnostics in it.
|
|
|
|
Buffer.reset(new TextDiagnosticBuffer());
|
2014-04-24 13:39:55 +08:00
|
|
|
ED.Reset();
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2014-08-30 00:30:23 +08:00
|
|
|
std::unique_ptr<Directive> Directive::create(bool RegexKind,
|
|
|
|
SourceLocation DirectiveLoc,
|
|
|
|
SourceLocation DiagnosticLoc,
|
|
|
|
bool MatchAnyLine, StringRef Text,
|
|
|
|
unsigned Min, unsigned Max) {
|
2013-12-14 09:07:05 +08:00
|
|
|
if (!RegexKind)
|
2014-08-30 00:30:23 +08:00
|
|
|
return llvm::make_unique<StandardDirective>(DirectiveLoc, DiagnosticLoc,
|
|
|
|
MatchAnyLine, Text, Min, Max);
|
2013-12-12 07:40:50 +08:00
|
|
|
|
|
|
|
// Parse the directive into a regular expression.
|
|
|
|
std::string RegexStr;
|
|
|
|
StringRef S = Text;
|
|
|
|
while (!S.empty()) {
|
|
|
|
if (S.startswith("{{")) {
|
|
|
|
S = S.drop_front(2);
|
|
|
|
size_t RegexMatchLength = S.find("}}");
|
|
|
|
assert(RegexMatchLength != StringRef::npos);
|
|
|
|
// Append the regex, enclosed in parentheses.
|
|
|
|
RegexStr += "(";
|
|
|
|
RegexStr.append(S.data(), RegexMatchLength);
|
|
|
|
RegexStr += ")";
|
|
|
|
S = S.drop_front(RegexMatchLength + 2);
|
|
|
|
} else {
|
|
|
|
size_t VerbatimMatchLength = S.find("{{");
|
|
|
|
if (VerbatimMatchLength == StringRef::npos)
|
|
|
|
VerbatimMatchLength = S.size();
|
|
|
|
// Escape and append the fixed string.
|
2013-12-12 08:27:31 +08:00
|
|
|
RegexStr += llvm::Regex::escape(S.substr(0, VerbatimMatchLength));
|
2013-12-12 07:40:50 +08:00
|
|
|
S = S.drop_front(VerbatimMatchLength);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-30 00:30:23 +08:00
|
|
|
return llvm::make_unique<RegexDirective>(
|
|
|
|
DirectiveLoc, DiagnosticLoc, MatchAnyLine, Text, Min, Max, RegexStr);
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|