2018-03-22 08:53:26 +08:00
|
|
|
//===- VerifyDiagnosticConsumer.cpp - Verifying Diagnostic Client ---------===//
|
2009-11-14 11:23:19 +08:00
|
|
|
//
|
2019-01-19 16:50:56 +08:00
|
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
2009-11-14 11:23:19 +08:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This is a concrete diagnostic client, which buffers the diagnostic messages.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2011-09-26 08:38:03 +08:00
|
|
|
#include "clang/Frontend/VerifyDiagnosticConsumer.h"
|
2013-02-09 06:30:41 +08:00
|
|
|
#include "clang/Basic/CharInfo.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include "clang/Basic/Diagnostic.h"
|
|
|
|
#include "clang/Basic/DiagnosticOptions.h"
|
2012-12-04 17:13:33 +08:00
|
|
|
#include "clang/Basic/FileManager.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include "clang/Basic/LLVM.h"
|
|
|
|
#include "clang/Basic/SourceLocation.h"
|
|
|
|
#include "clang/Basic/SourceManager.h"
|
|
|
|
#include "clang/Basic/TokenKinds.h"
|
2009-11-14 11:23:19 +08:00
|
|
|
#include "clang/Frontend/FrontendDiagnostic.h"
|
|
|
|
#include "clang/Frontend/TextDiagnosticBuffer.h"
|
2012-08-10 09:06:16 +08:00
|
|
|
#include "clang/Lex/HeaderSearch.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include "clang/Lex/Lexer.h"
|
|
|
|
#include "clang/Lex/PPCallbacks.h"
|
2009-11-14 11:23:19 +08:00
|
|
|
#include "clang/Lex/Preprocessor.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include "clang/Lex/Token.h"
|
|
|
|
#include "llvm/ADT/STLExtras.h"
|
|
|
|
#include "llvm/ADT/SmallPtrSet.h"
|
2009-11-14 11:23:19 +08:00
|
|
|
#include "llvm/ADT/SmallString.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include "llvm/ADT/StringRef.h"
|
|
|
|
#include "llvm/ADT/Twine.h"
|
|
|
|
#include "llvm/Support/ErrorHandling.h"
|
2010-04-29 04:02:30 +08:00
|
|
|
#include "llvm/Support/Regex.h"
|
2009-11-14 11:23:19 +08:00
|
|
|
#include "llvm/Support/raw_ostream.h"
|
2018-03-22 08:53:26 +08:00
|
|
|
#include <algorithm>
|
|
|
|
#include <cassert>
|
|
|
|
#include <cstddef>
|
|
|
|
#include <cstring>
|
|
|
|
#include <iterator>
|
|
|
|
#include <memory>
|
|
|
|
#include <string>
|
|
|
|
#include <utility>
|
|
|
|
#include <vector>
|
2011-12-15 10:58:00 +08:00
|
|
|
|
2009-11-14 11:23:19 +08:00
|
|
|
using namespace clang;
|
2018-03-22 08:53:26 +08:00
|
|
|
|
|
|
|
using Directive = VerifyDiagnosticConsumer::Directive;
|
|
|
|
using DirectiveList = VerifyDiagnosticConsumer::DirectiveList;
|
|
|
|
using ExpectedData = VerifyDiagnosticConsumer::ExpectedData;
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
#ifndef NDEBUG
|
2018-03-22 08:53:26 +08:00
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
namespace {
|
2018-03-22 08:53:26 +08:00
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
class VerifyFileTracker : public PPCallbacks {
|
2012-08-19 00:58:52 +08:00
|
|
|
VerifyDiagnosticConsumer &Verify;
|
2012-08-10 09:06:16 +08:00
|
|
|
SourceManager &SM;
|
|
|
|
|
|
|
|
public:
|
2012-08-19 00:58:52 +08:00
|
|
|
VerifyFileTracker(VerifyDiagnosticConsumer &Verify, SourceManager &SM)
|
2018-03-22 08:53:26 +08:00
|
|
|
: Verify(Verify), SM(SM) {}
|
2012-08-10 09:06:16 +08:00
|
|
|
|
2018-05-09 09:00:01 +08:00
|
|
|
/// Hook into the preprocessor and update the list of parsed
|
2012-08-10 09:06:16 +08:00
|
|
|
/// files when the preprocessor indicates a new file is entered.
|
2015-04-11 10:00:23 +08:00
|
|
|
void FileChanged(SourceLocation Loc, FileChangeReason Reason,
|
|
|
|
SrcMgr::CharacteristicKind FileType,
|
|
|
|
FileID PrevFID) override {
|
2012-08-19 00:58:52 +08:00
|
|
|
Verify.UpdateParsedFileStatus(SM, SM.getFileID(Loc),
|
|
|
|
VerifyDiagnosticConsumer::IsParsed);
|
2012-08-10 09:06:16 +08:00
|
|
|
}
|
|
|
|
};
|
2018-03-22 08:53:26 +08:00
|
|
|
|
|
|
|
} // namespace
|
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
#endif
|
|
|
|
|
2009-11-14 11:23:19 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// Checking diagnostics implementation.
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2018-03-22 08:53:26 +08:00
|
|
|
using DiagList = TextDiagnosticBuffer::DiagList;
|
|
|
|
using const_diag_iterator = TextDiagnosticBuffer::const_iterator;
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
/// StandardDirective - Directive with string matching.
|
|
|
|
class StandardDirective : public Directive {
|
|
|
|
public:
|
2012-07-10 10:57:03 +08:00
|
|
|
StandardDirective(SourceLocation DirectiveLoc, SourceLocation DiagnosticLoc,
|
2014-07-11 00:43:29 +08:00
|
|
|
bool MatchAnyLine, StringRef Text, unsigned Min,
|
|
|
|
unsigned Max)
|
2018-03-22 08:53:26 +08:00
|
|
|
: Directive(DirectiveLoc, DiagnosticLoc, MatchAnyLine, Text, Min, Max) {}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2014-03-13 14:07:04 +08:00
|
|
|
bool isValid(std::string &Error) override {
|
2010-04-29 04:02:30 +08:00
|
|
|
// all strings are considered valid; even empty ones
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2014-03-13 14:07:04 +08:00
|
|
|
bool match(StringRef S) override {
|
2012-07-10 10:56:15 +08:00
|
|
|
return S.find(Text) != StringRef::npos;
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
/// RegexDirective - Directive with regular-expression matching.
|
|
|
|
class RegexDirective : public Directive {
|
|
|
|
public:
|
2012-07-10 10:57:03 +08:00
|
|
|
RegexDirective(SourceLocation DirectiveLoc, SourceLocation DiagnosticLoc,
|
2014-07-11 00:43:29 +08:00
|
|
|
bool MatchAnyLine, StringRef Text, unsigned Min, unsigned Max,
|
|
|
|
StringRef RegexStr)
|
2018-03-22 08:53:26 +08:00
|
|
|
: Directive(DirectiveLoc, DiagnosticLoc, MatchAnyLine, Text, Min, Max),
|
|
|
|
Regex(RegexStr) {}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2014-03-13 14:07:04 +08:00
|
|
|
bool isValid(std::string &Error) override {
|
2015-12-28 23:15:16 +08:00
|
|
|
return Regex.isValid(Error);
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
|
|
|
|
2014-03-13 14:07:04 +08:00
|
|
|
bool match(StringRef S) override {
|
2010-04-29 04:02:30 +08:00
|
|
|
return Regex.match(S);
|
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
private:
|
|
|
|
llvm::Regex Regex;
|
|
|
|
};
|
|
|
|
|
|
|
|
class ParseHelper
|
|
|
|
{
|
|
|
|
public:
|
2012-07-12 03:58:23 +08:00
|
|
|
ParseHelper(StringRef S)
|
2018-03-22 08:53:26 +08:00
|
|
|
: Begin(S.begin()), End(S.end()), C(Begin), P(Begin) {}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
|
|
|
// Return true if string literal is next.
|
2011-07-23 18:55:15 +08:00
|
|
|
bool Next(StringRef S) {
|
2010-04-29 04:02:30 +08:00
|
|
|
P = C;
|
2010-09-02 01:28:48 +08:00
|
|
|
PEnd = C + S.size();
|
2010-04-29 04:02:30 +08:00
|
|
|
if (PEnd > End)
|
|
|
|
return false;
|
2018-03-22 08:53:26 +08:00
|
|
|
return memcmp(P, S.data(), S.size()) == 0;
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// Return true if number is next.
|
|
|
|
// Output N only if number is next.
|
|
|
|
bool Next(unsigned &N) {
|
|
|
|
unsigned TMP = 0;
|
|
|
|
P = C;
|
2019-04-13 12:33:39 +08:00
|
|
|
PEnd = P;
|
|
|
|
for (; PEnd < End && *PEnd >= '0' && *PEnd <= '9'; ++PEnd) {
|
2010-04-29 04:02:30 +08:00
|
|
|
TMP *= 10;
|
2019-04-13 12:33:39 +08:00
|
|
|
TMP += *PEnd - '0';
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
2019-04-13 12:33:39 +08:00
|
|
|
if (PEnd == C)
|
2010-04-29 04:02:30 +08:00
|
|
|
return false;
|
|
|
|
N = TMP;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
// Return true if a marker is next.
|
|
|
|
// A marker is the longest match for /#[A-Za-z0-9_-]+/.
|
|
|
|
bool NextMarker() {
|
|
|
|
P = C;
|
|
|
|
if (P == End || *P != '#')
|
|
|
|
return false;
|
|
|
|
PEnd = P;
|
|
|
|
++PEnd;
|
|
|
|
while ((isAlphanumeric(*PEnd) || *PEnd == '-' || *PEnd == '_') &&
|
|
|
|
PEnd < End)
|
|
|
|
++PEnd;
|
|
|
|
return PEnd > P + 1;
|
|
|
|
}
|
|
|
|
|
2017-12-16 10:23:22 +08:00
|
|
|
// Return true if string literal S is matched in content.
|
|
|
|
// When true, P marks begin-position of the match, and calling Advance sets C
|
|
|
|
// to end-position of the match.
|
|
|
|
// If S is the empty string, then search for any letter instead (makes sense
|
|
|
|
// with FinishDirectiveToken=true).
|
|
|
|
// If EnsureStartOfWord, then skip matches that don't start a new word.
|
|
|
|
// If FinishDirectiveToken, then assume the match is the start of a comment
|
|
|
|
// directive for -verify, and extend the match to include the entire first
|
|
|
|
// token of that directive.
|
|
|
|
bool Search(StringRef S, bool EnsureStartOfWord = false,
|
|
|
|
bool FinishDirectiveToken = false) {
|
2012-10-19 20:36:49 +08:00
|
|
|
do {
|
2017-12-16 10:23:22 +08:00
|
|
|
if (!S.empty()) {
|
|
|
|
P = std::search(C, End, S.begin(), S.end());
|
|
|
|
PEnd = P + S.size();
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
P = C;
|
|
|
|
while (P != End && !isLetter(*P))
|
|
|
|
++P;
|
|
|
|
PEnd = P + 1;
|
|
|
|
}
|
2012-10-19 20:36:49 +08:00
|
|
|
if (P == End)
|
|
|
|
break;
|
2017-12-16 10:23:22 +08:00
|
|
|
// If not start of word but required, skip and search again.
|
|
|
|
if (EnsureStartOfWord
|
|
|
|
// Check if string literal starts a new word.
|
|
|
|
&& !(P == Begin || isWhitespace(P[-1])
|
|
|
|
// Or it could be preceded by the start of a comment.
|
|
|
|
|| (P > (Begin + 1) && (P[-1] == '/' || P[-1] == '*')
|
|
|
|
&& P[-2] == '/')))
|
|
|
|
continue;
|
|
|
|
if (FinishDirectiveToken) {
|
|
|
|
while (PEnd != End && (isAlphanumeric(*PEnd)
|
|
|
|
|| *PEnd == '-' || *PEnd == '_'))
|
|
|
|
++PEnd;
|
|
|
|
// Put back trailing digits and hyphens to be parsed later as a count
|
|
|
|
// or count range. Because -verify prefixes must start with letters,
|
|
|
|
// we know the actual directive we found starts with a letter, so
|
|
|
|
// we won't put back the entire directive word and thus record an empty
|
|
|
|
// string.
|
|
|
|
assert(isLetter(*P) && "-verify prefix must start with a letter");
|
|
|
|
while (isDigit(PEnd[-1]) || PEnd[-1] == '-')
|
|
|
|
--PEnd;
|
|
|
|
}
|
|
|
|
return true;
|
2012-10-19 20:36:49 +08:00
|
|
|
} while (Advance());
|
|
|
|
return false;
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
|
|
|
|
2013-12-12 07:40:50 +08:00
|
|
|
// Return true if a CloseBrace that closes the OpenBrace at the current nest
|
|
|
|
// level is found. When true, P marks begin-position of CloseBrace.
|
|
|
|
bool SearchClosingBrace(StringRef OpenBrace, StringRef CloseBrace) {
|
|
|
|
unsigned Depth = 1;
|
|
|
|
P = C;
|
|
|
|
while (P < End) {
|
|
|
|
StringRef S(P, End - P);
|
|
|
|
if (S.startswith(OpenBrace)) {
|
|
|
|
++Depth;
|
|
|
|
P += OpenBrace.size();
|
|
|
|
} else if (S.startswith(CloseBrace)) {
|
|
|
|
--Depth;
|
|
|
|
if (Depth == 0) {
|
|
|
|
PEnd = P + CloseBrace.size();
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
P += CloseBrace.size();
|
|
|
|
} else {
|
|
|
|
++P;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
// Advance 1-past previous next/search.
|
|
|
|
// Behavior is undefined if previous next/search failed.
|
|
|
|
bool Advance() {
|
|
|
|
C = PEnd;
|
|
|
|
return C < End;
|
|
|
|
}
|
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
// Return the text matched by the previous next/search.
|
|
|
|
// Behavior is undefined if previous next/search failed.
|
|
|
|
StringRef Match() { return StringRef(P, PEnd - P); }
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
// Skip zero or more whitespace.
|
|
|
|
void SkipWhitespace() {
|
2013-02-09 06:30:41 +08:00
|
|
|
for (; C < End && isWhitespace(*C); ++C)
|
2010-04-29 04:02:30 +08:00
|
|
|
;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Return true if EOF reached.
|
|
|
|
bool Done() {
|
|
|
|
return !(C < End);
|
|
|
|
}
|
|
|
|
|
2018-03-22 08:53:26 +08:00
|
|
|
// Beginning of expected content.
|
|
|
|
const char * const Begin;
|
|
|
|
|
|
|
|
// End of expected content (1-past).
|
|
|
|
const char * const End;
|
|
|
|
|
|
|
|
// Position of next char in content.
|
|
|
|
const char *C;
|
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
// Previous next/search subject start.
|
2010-04-29 04:02:30 +08:00
|
|
|
const char *P;
|
|
|
|
|
|
|
|
private:
|
2018-03-22 08:53:26 +08:00
|
|
|
// Previous next/search subject end (1-past).
|
|
|
|
const char *PEnd = nullptr;
|
2010-04-29 04:02:30 +08:00
|
|
|
};
|
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
// The information necessary to create a directive.
|
|
|
|
struct UnattachedDirective {
|
|
|
|
DirectiveList *DL = nullptr;
|
|
|
|
bool RegexKind = false;
|
|
|
|
SourceLocation DirectivePos, ContentBegin;
|
|
|
|
std::string Text;
|
|
|
|
unsigned Min = 1, Max = 1;
|
|
|
|
};
|
|
|
|
|
|
|
|
// Attach the specified directive to the line of code indicated by
|
|
|
|
// \p ExpectedLoc.
|
|
|
|
void attachDirective(DiagnosticsEngine &Diags, const UnattachedDirective &UD,
|
|
|
|
SourceLocation ExpectedLoc, bool MatchAnyLine = false) {
|
|
|
|
// Construct new directive.
|
|
|
|
std::unique_ptr<Directive> D =
|
|
|
|
Directive::create(UD.RegexKind, UD.DirectivePos, ExpectedLoc,
|
|
|
|
MatchAnyLine, UD.Text, UD.Min, UD.Max);
|
|
|
|
|
|
|
|
std::string Error;
|
|
|
|
if (!D->isValid(Error)) {
|
|
|
|
Diags.Report(UD.ContentBegin, diag::err_verify_invalid_content)
|
|
|
|
<< (UD.RegexKind ? "regex" : "string") << Error;
|
|
|
|
}
|
|
|
|
|
|
|
|
UD.DL->push_back(std::move(D));
|
|
|
|
}
|
|
|
|
|
2018-03-22 08:53:26 +08:00
|
|
|
} // anonymous
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
// Tracker for markers in the input files. A marker is a comment of the form
|
|
|
|
//
|
|
|
|
// n = 123; // #123
|
|
|
|
//
|
|
|
|
// ... that can be referred to by a later expected-* directive:
|
|
|
|
//
|
|
|
|
// // expected-error@#123 {{undeclared identifier 'n'}}
|
|
|
|
//
|
|
|
|
// Marker declarations must be at the start of a comment or preceded by
|
|
|
|
// whitespace to distinguish them from uses of markers in directives.
|
|
|
|
class VerifyDiagnosticConsumer::MarkerTracker {
|
|
|
|
DiagnosticsEngine &Diags;
|
|
|
|
|
|
|
|
struct Marker {
|
|
|
|
SourceLocation DefLoc;
|
|
|
|
SourceLocation RedefLoc;
|
|
|
|
SourceLocation UseLoc;
|
|
|
|
};
|
|
|
|
llvm::StringMap<Marker> Markers;
|
|
|
|
|
|
|
|
// Directives that couldn't be created yet because they name an unknown
|
|
|
|
// marker.
|
|
|
|
llvm::StringMap<llvm::SmallVector<UnattachedDirective, 2>> DeferredDirectives;
|
|
|
|
|
|
|
|
public:
|
|
|
|
MarkerTracker(DiagnosticsEngine &Diags) : Diags(Diags) {}
|
|
|
|
|
|
|
|
// Register a marker.
|
|
|
|
void addMarker(StringRef MarkerName, SourceLocation Pos) {
|
|
|
|
auto InsertResult = Markers.insert(
|
|
|
|
{MarkerName, Marker{Pos, SourceLocation(), SourceLocation()}});
|
|
|
|
|
|
|
|
Marker &M = InsertResult.first->second;
|
|
|
|
if (!InsertResult.second) {
|
|
|
|
// Marker was redefined.
|
|
|
|
M.RedefLoc = Pos;
|
|
|
|
} else {
|
|
|
|
// First definition: build any deferred directives.
|
|
|
|
auto Deferred = DeferredDirectives.find(MarkerName);
|
|
|
|
if (Deferred != DeferredDirectives.end()) {
|
|
|
|
for (auto &UD : Deferred->second) {
|
|
|
|
if (M.UseLoc.isInvalid())
|
|
|
|
M.UseLoc = UD.DirectivePos;
|
|
|
|
attachDirective(Diags, UD, Pos);
|
|
|
|
}
|
|
|
|
DeferredDirectives.erase(Deferred);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Register a directive at the specified marker.
|
|
|
|
void addDirective(StringRef MarkerName, const UnattachedDirective &UD) {
|
|
|
|
auto MarkerIt = Markers.find(MarkerName);
|
|
|
|
if (MarkerIt != Markers.end()) {
|
|
|
|
Marker &M = MarkerIt->second;
|
|
|
|
if (M.UseLoc.isInvalid())
|
|
|
|
M.UseLoc = UD.DirectivePos;
|
|
|
|
return attachDirective(Diags, UD, M.DefLoc);
|
|
|
|
}
|
|
|
|
DeferredDirectives[MarkerName].push_back(UD);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ensure we have no remaining deferred directives, and no
|
|
|
|
// multiply-defined-and-used markers.
|
|
|
|
void finalize() {
|
|
|
|
for (auto &MarkerInfo : Markers) {
|
|
|
|
StringRef Name = MarkerInfo.first();
|
|
|
|
Marker &M = MarkerInfo.second;
|
|
|
|
if (M.RedefLoc.isValid() && M.UseLoc.isValid()) {
|
|
|
|
Diags.Report(M.UseLoc, diag::err_verify_ambiguous_marker) << Name;
|
|
|
|
Diags.Report(M.DefLoc, diag::note_verify_ambiguous_marker) << Name;
|
|
|
|
Diags.Report(M.RedefLoc, diag::note_verify_ambiguous_marker) << Name;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (auto &DeferredPair : DeferredDirectives) {
|
|
|
|
Diags.Report(DeferredPair.second.front().DirectivePos,
|
|
|
|
diag::err_verify_no_such_marker)
|
|
|
|
<< DeferredPair.first();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
/// ParseDirective - Go through the comment and see if it indicates expected
|
|
|
|
/// diagnostics. If so, then put them in the appropriate directive list.
|
|
|
|
///
|
2012-07-12 03:58:23 +08:00
|
|
|
/// Returns true if any valid directives were found.
|
2012-08-10 09:06:16 +08:00
|
|
|
static bool ParseDirective(StringRef S, ExpectedData *ED, SourceManager &SM,
|
2013-04-17 16:06:46 +08:00
|
|
|
Preprocessor *PP, SourceLocation Pos,
|
2019-04-13 12:33:39 +08:00
|
|
|
VerifyDiagnosticConsumer::DirectiveStatus &Status,
|
|
|
|
VerifyDiagnosticConsumer::MarkerTracker &Markers) {
|
2013-04-17 16:06:46 +08:00
|
|
|
DiagnosticsEngine &Diags = PP ? PP->getDiagnostics() : SM.getDiagnostics();
|
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
// First, scan the comment looking for markers.
|
|
|
|
for (ParseHelper PH(S); !PH.Done();) {
|
|
|
|
if (!PH.Search("#", true))
|
|
|
|
break;
|
|
|
|
PH.C = PH.P;
|
|
|
|
if (!PH.NextMarker()) {
|
|
|
|
PH.Next("#");
|
|
|
|
PH.Advance();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
PH.Advance();
|
|
|
|
Markers.addMarker(PH.Match(), Pos);
|
|
|
|
}
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
// A single comment may contain multiple directives.
|
2012-07-12 03:58:23 +08:00
|
|
|
bool FoundDirective = false;
|
|
|
|
for (ParseHelper PH(S); !PH.Done();) {
|
2017-12-16 10:23:22 +08:00
|
|
|
// Search for the initial directive token.
|
|
|
|
// If one prefix, save time by searching only for its directives.
|
|
|
|
// Otherwise, search for any potential directive token and check it later.
|
|
|
|
const auto &Prefixes = Diags.getDiagnosticOptions().VerifyPrefixes;
|
|
|
|
if (!(Prefixes.size() == 1 ? PH.Search(*Prefixes.begin(), true, true)
|
|
|
|
: PH.Search("", true, true)))
|
2010-04-29 04:02:30 +08:00
|
|
|
break;
|
2019-04-13 12:33:39 +08:00
|
|
|
|
|
|
|
StringRef DToken = PH.Match();
|
2010-04-29 04:02:30 +08:00
|
|
|
PH.Advance();
|
|
|
|
|
2017-12-16 10:23:22 +08:00
|
|
|
// Default directive kind.
|
2019-04-13 12:33:39 +08:00
|
|
|
UnattachedDirective D;
|
|
|
|
const char *KindStr = "string";
|
2017-12-16 10:23:22 +08:00
|
|
|
|
|
|
|
// Parse the initial directive token in reverse so we can easily determine
|
|
|
|
// its exact actual prefix. If we were to parse it from the front instead,
|
|
|
|
// it would be harder to determine where the prefix ends because there
|
|
|
|
// might be multiple matching -verify prefixes because some might prefix
|
|
|
|
// others.
|
|
|
|
|
|
|
|
// Regex in initial directive token: -re
|
|
|
|
if (DToken.endswith("-re")) {
|
2019-04-13 12:33:39 +08:00
|
|
|
D.RegexKind = true;
|
2017-12-16 10:23:22 +08:00
|
|
|
KindStr = "regex";
|
|
|
|
DToken = DToken.substr(0, DToken.size()-3);
|
|
|
|
}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2017-12-16 10:23:22 +08:00
|
|
|
// Type in initial directive token: -{error|warning|note|no-diagnostics}
|
|
|
|
bool NoDiag = false;
|
|
|
|
StringRef DType;
|
|
|
|
if (DToken.endswith(DType="-error"))
|
2019-04-13 12:33:39 +08:00
|
|
|
D.DL = ED ? &ED->Errors : nullptr;
|
2017-12-16 10:23:22 +08:00
|
|
|
else if (DToken.endswith(DType="-warning"))
|
2019-04-13 12:33:39 +08:00
|
|
|
D.DL = ED ? &ED->Warnings : nullptr;
|
2017-12-16 10:23:22 +08:00
|
|
|
else if (DToken.endswith(DType="-remark"))
|
2019-04-13 12:33:39 +08:00
|
|
|
D.DL = ED ? &ED->Remarks : nullptr;
|
2017-12-16 10:23:22 +08:00
|
|
|
else if (DToken.endswith(DType="-note"))
|
2019-04-13 12:33:39 +08:00
|
|
|
D.DL = ED ? &ED->Notes : nullptr;
|
2017-12-16 10:23:22 +08:00
|
|
|
else if (DToken.endswith(DType="-no-diagnostics")) {
|
|
|
|
NoDiag = true;
|
2019-04-13 12:33:39 +08:00
|
|
|
if (D.RegexKind)
|
2017-12-16 10:23:22 +08:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
continue;
|
|
|
|
DToken = DToken.substr(0, DToken.size()-DType.size());
|
|
|
|
|
|
|
|
// What's left in DToken is the actual prefix. That might not be a -verify
|
|
|
|
// prefix even if there is only one -verify prefix (for example, the full
|
|
|
|
// DToken is foo-bar-warning, but foo is the only -verify prefix).
|
|
|
|
if (!std::binary_search(Prefixes.begin(), Prefixes.end(), DToken))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (NoDiag) {
|
2012-10-19 20:49:32 +08:00
|
|
|
if (Status == VerifyDiagnosticConsumer::HasOtherExpectedDirectives)
|
|
|
|
Diags.Report(Pos, diag::err_verify_invalid_no_diags)
|
|
|
|
<< /*IsExpectedNoDiagnostics=*/true;
|
|
|
|
else
|
|
|
|
Status = VerifyDiagnosticConsumer::HasExpectedNoDiagnostics;
|
|
|
|
continue;
|
2017-12-16 10:23:22 +08:00
|
|
|
}
|
2012-10-19 20:49:32 +08:00
|
|
|
if (Status == VerifyDiagnosticConsumer::HasExpectedNoDiagnostics) {
|
|
|
|
Diags.Report(Pos, diag::err_verify_invalid_no_diags)
|
|
|
|
<< /*IsExpectedNoDiagnostics=*/false;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
Status = VerifyDiagnosticConsumer::HasOtherExpectedDirectives;
|
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
// If a directive has been found but we're not interested
|
|
|
|
// in storing the directive information, return now.
|
2019-04-13 12:33:39 +08:00
|
|
|
if (!D.DL)
|
2012-08-10 09:06:16 +08:00
|
|
|
return true;
|
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Next optional token: @
|
|
|
|
SourceLocation ExpectedLoc;
|
2019-04-13 12:33:39 +08:00
|
|
|
StringRef Marker;
|
2014-07-11 00:43:29 +08:00
|
|
|
bool MatchAnyLine = false;
|
2012-07-10 10:57:03 +08:00
|
|
|
if (!PH.Next("@")) {
|
|
|
|
ExpectedLoc = Pos;
|
|
|
|
} else {
|
|
|
|
PH.Advance();
|
|
|
|
unsigned Line = 0;
|
|
|
|
bool FoundPlus = PH.Next("+");
|
|
|
|
if (FoundPlus || PH.Next("-")) {
|
|
|
|
// Relative to current line.
|
|
|
|
PH.Advance();
|
|
|
|
bool Invalid = false;
|
|
|
|
unsigned ExpectedLine = SM.getSpellingLineNumber(Pos, &Invalid);
|
|
|
|
if (!Invalid && PH.Next(Line) && (FoundPlus || Line < ExpectedLine)) {
|
|
|
|
if (FoundPlus) ExpectedLine += Line;
|
|
|
|
else ExpectedLine -= Line;
|
|
|
|
ExpectedLoc = SM.translateLineCol(SM.getFileID(Pos), ExpectedLine, 1);
|
|
|
|
}
|
2013-04-17 16:06:46 +08:00
|
|
|
} else if (PH.Next(Line)) {
|
2012-07-10 10:57:03 +08:00
|
|
|
// Absolute line number.
|
2013-04-17 16:06:46 +08:00
|
|
|
if (Line > 0)
|
2012-07-10 10:57:03 +08:00
|
|
|
ExpectedLoc = SM.translateLineCol(SM.getFileID(Pos), Line, 1);
|
2019-04-13 12:33:39 +08:00
|
|
|
} else if (PH.NextMarker()) {
|
|
|
|
Marker = PH.Match();
|
2013-04-17 16:06:46 +08:00
|
|
|
} else if (PP && PH.Search(":")) {
|
|
|
|
// Specific source file.
|
|
|
|
StringRef Filename(PH.C, PH.P-PH.C);
|
|
|
|
PH.Advance();
|
|
|
|
|
|
|
|
// Lookup file via Preprocessor, like a #include.
|
|
|
|
const DirectoryLookup *CurDir;
|
Introduce FileEntryRef and use it when handling includes to report correct dependencies
when the FileManager is reused across invocations
This commit introduces a parallel API to FileManager's getFile: getFileEntryRef, which returns
a reference to the FileEntry, and the name that was used to access the file. In the case of
a VFS with 'use-external-names', the FileEntyRef contains the external name of the file,
not the filename that was used to access it.
The new API is adopted only in the HeaderSearch and Preprocessor for include file lookup, so that the
accessed path can be propagated to SourceManager's FileInfo. SourceManager's FileInfo now can report this accessed path, using
the new getName method. This API is then adopted in the dependency collector, which now correctly reports dependencies when a file
is included both using a symlink and a real path in the case when the FileManager is reused across multiple Preprocessor invocations.
Note that this patch does not fix all dependency collector issues, as the same problem is still present in other cases when dependencies
are obtained using FileSkipped, InclusionDirective, and HasInclude. This will be fixed in follow-up commits.
Differential Revision: https://reviews.llvm.org/D65907
llvm-svn: 369680
2019-08-23 02:15:50 +08:00
|
|
|
Optional<FileEntryRef> File =
|
2014-10-20 08:15:49 +08:00
|
|
|
PP->LookupFile(Pos, Filename, false, nullptr, nullptr, CurDir,
|
2019-02-06 06:34:55 +08:00
|
|
|
nullptr, nullptr, nullptr, nullptr, nullptr);
|
Introduce FileEntryRef and use it when handling includes to report correct dependencies
when the FileManager is reused across invocations
This commit introduces a parallel API to FileManager's getFile: getFileEntryRef, which returns
a reference to the FileEntry, and the name that was used to access the file. In the case of
a VFS with 'use-external-names', the FileEntyRef contains the external name of the file,
not the filename that was used to access it.
The new API is adopted only in the HeaderSearch and Preprocessor for include file lookup, so that the
accessed path can be propagated to SourceManager's FileInfo. SourceManager's FileInfo now can report this accessed path, using
the new getName method. This API is then adopted in the dependency collector, which now correctly reports dependencies when a file
is included both using a symlink and a real path in the case when the FileManager is reused across multiple Preprocessor invocations.
Note that this patch does not fix all dependency collector issues, as the same problem is still present in other cases when dependencies
are obtained using FileSkipped, InclusionDirective, and HasInclude. This will be fixed in follow-up commits.
Differential Revision: https://reviews.llvm.org/D65907
llvm-svn: 369680
2019-08-23 02:15:50 +08:00
|
|
|
if (!File) {
|
2013-04-17 16:06:46 +08:00
|
|
|
Diags.Report(Pos.getLocWithOffset(PH.C-PH.Begin),
|
|
|
|
diag::err_verify_missing_file) << Filename << KindStr;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
Introduce FileEntryRef and use it when handling includes to report correct dependencies
when the FileManager is reused across invocations
This commit introduces a parallel API to FileManager's getFile: getFileEntryRef, which returns
a reference to the FileEntry, and the name that was used to access the file. In the case of
a VFS with 'use-external-names', the FileEntyRef contains the external name of the file,
not the filename that was used to access it.
The new API is adopted only in the HeaderSearch and Preprocessor for include file lookup, so that the
accessed path can be propagated to SourceManager's FileInfo. SourceManager's FileInfo now can report this accessed path, using
the new getName method. This API is then adopted in the dependency collector, which now correctly reports dependencies when a file
is included both using a symlink and a real path in the case when the FileManager is reused across multiple Preprocessor invocations.
Note that this patch does not fix all dependency collector issues, as the same problem is still present in other cases when dependencies
are obtained using FileSkipped, InclusionDirective, and HasInclude. This will be fixed in follow-up commits.
Differential Revision: https://reviews.llvm.org/D65907
llvm-svn: 369680
2019-08-23 02:15:50 +08:00
|
|
|
const FileEntry *FE = &File->getFileEntry();
|
2013-04-17 16:06:46 +08:00
|
|
|
if (SM.translateFile(FE).isInvalid())
|
|
|
|
SM.createFileID(FE, Pos, SrcMgr::C_User);
|
|
|
|
|
|
|
|
if (PH.Next(Line) && Line > 0)
|
|
|
|
ExpectedLoc = SM.translateFileLineCol(FE, Line, 1);
|
2014-07-11 00:43:29 +08:00
|
|
|
else if (PH.Next("*")) {
|
|
|
|
MatchAnyLine = true;
|
|
|
|
ExpectedLoc = SM.translateFileLineCol(FE, 1, 1);
|
|
|
|
}
|
2017-10-18 09:41:38 +08:00
|
|
|
} else if (PH.Next("*")) {
|
|
|
|
MatchAnyLine = true;
|
|
|
|
ExpectedLoc = SourceLocation();
|
2012-07-10 10:57:03 +08:00
|
|
|
}
|
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
if (ExpectedLoc.isInvalid() && !MatchAnyLine && Marker.empty()) {
|
2012-07-10 10:57:03 +08:00
|
|
|
Diags.Report(Pos.getLocWithOffset(PH.C-PH.Begin),
|
|
|
|
diag::err_verify_missing_line) << KindStr;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
PH.Advance();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Skip optional whitespace.
|
2010-04-29 04:02:30 +08:00
|
|
|
PH.SkipWhitespace();
|
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Next optional token: positive integer or a '+'.
|
2019-04-13 12:33:39 +08:00
|
|
|
if (PH.Next(D.Min)) {
|
2010-04-29 04:02:30 +08:00
|
|
|
PH.Advance();
|
2012-07-10 10:57:26 +08:00
|
|
|
// A positive integer can be followed by a '+' meaning min
|
|
|
|
// or more, or by a '-' meaning a range from min to max.
|
|
|
|
if (PH.Next("+")) {
|
2019-04-13 12:33:39 +08:00
|
|
|
D.Max = Directive::MaxCount;
|
2012-07-10 10:57:26 +08:00
|
|
|
PH.Advance();
|
|
|
|
} else if (PH.Next("-")) {
|
|
|
|
PH.Advance();
|
2019-04-13 12:33:39 +08:00
|
|
|
if (!PH.Next(D.Max) || D.Max < D.Min) {
|
2012-07-10 10:57:26 +08:00
|
|
|
Diags.Report(Pos.getLocWithOffset(PH.C-PH.Begin),
|
|
|
|
diag::err_verify_invalid_range) << KindStr;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
PH.Advance();
|
|
|
|
} else {
|
2019-04-13 12:33:39 +08:00
|
|
|
D.Max = D.Min;
|
2012-07-10 10:57:26 +08:00
|
|
|
}
|
|
|
|
} else if (PH.Next("+")) {
|
|
|
|
// '+' on its own means "1 or more".
|
2019-04-13 12:33:39 +08:00
|
|
|
D.Max = Directive::MaxCount;
|
2011-12-15 10:28:16 +08:00
|
|
|
PH.Advance();
|
|
|
|
}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Skip optional whitespace.
|
2010-04-29 04:02:30 +08:00
|
|
|
PH.SkipWhitespace();
|
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Next token: {{
|
2010-04-29 04:02:30 +08:00
|
|
|
if (!PH.Next("{{")) {
|
2012-07-10 10:56:15 +08:00
|
|
|
Diags.Report(Pos.getLocWithOffset(PH.C-PH.Begin),
|
|
|
|
diag::err_verify_missing_start) << KindStr;
|
2010-04-29 04:02:30 +08:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
PH.Advance();
|
|
|
|
const char* const ContentBegin = PH.C; // mark content begin
|
2012-07-10 10:57:03 +08:00
|
|
|
// Search for token: }}
|
2013-12-12 07:40:50 +08:00
|
|
|
if (!PH.SearchClosingBrace("{{", "}}")) {
|
2012-07-10 10:56:15 +08:00
|
|
|
Diags.Report(Pos.getLocWithOffset(PH.C-PH.Begin),
|
|
|
|
diag::err_verify_missing_end) << KindStr;
|
2010-04-29 04:02:30 +08:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
const char* const ContentEnd = PH.P; // mark content end
|
|
|
|
PH.Advance();
|
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
D.DirectivePos = Pos;
|
|
|
|
D.ContentBegin = Pos.getLocWithOffset(ContentBegin - PH.Begin);
|
|
|
|
|
2012-07-10 10:57:03 +08:00
|
|
|
// Build directive text; convert \n to newlines.
|
2011-07-23 18:55:15 +08:00
|
|
|
StringRef NewlineStr = "\\n";
|
|
|
|
StringRef Content(ContentBegin, ContentEnd-ContentBegin);
|
2010-04-29 04:02:30 +08:00
|
|
|
size_t CPos = 0;
|
|
|
|
size_t FPos;
|
2011-07-23 18:55:15 +08:00
|
|
|
while ((FPos = Content.find(NewlineStr, CPos)) != StringRef::npos) {
|
2019-04-13 12:33:39 +08:00
|
|
|
D.Text += Content.substr(CPos, FPos-CPos);
|
|
|
|
D.Text += '\n';
|
2010-04-29 04:02:30 +08:00
|
|
|
CPos = FPos + NewlineStr.size();
|
|
|
|
}
|
2019-04-13 12:33:39 +08:00
|
|
|
if (D.Text.empty())
|
|
|
|
D.Text.assign(ContentBegin, ContentEnd);
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2013-12-14 09:07:05 +08:00
|
|
|
// Check that regex directives contain at least one regex.
|
2019-04-13 12:33:39 +08:00
|
|
|
if (D.RegexKind && D.Text.find("{{") == StringRef::npos) {
|
|
|
|
Diags.Report(D.ContentBegin, diag::err_verify_missing_regex) << D.Text;
|
2013-12-14 09:07:05 +08:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
if (Marker.empty())
|
|
|
|
attachDirective(Diags, D, ExpectedLoc, MatchAnyLine);
|
|
|
|
else
|
|
|
|
Markers.addDirective(Marker, D);
|
|
|
|
FoundDirective = true;
|
|
|
|
}
|
2014-04-24 13:32:03 +08:00
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
return FoundDirective;
|
|
|
|
}
|
|
|
|
|
|
|
|
VerifyDiagnosticConsumer::VerifyDiagnosticConsumer(DiagnosticsEngine &Diags_)
|
|
|
|
: Diags(Diags_), PrimaryClient(Diags.getClient()),
|
|
|
|
PrimaryClientOwner(Diags.takeClient()),
|
|
|
|
Buffer(new TextDiagnosticBuffer()), Markers(new MarkerTracker(Diags)),
|
|
|
|
Status(HasNoDirectives) {
|
|
|
|
if (Diags.hasSourceManager())
|
|
|
|
setSourceManager(Diags.getSourceManager());
|
|
|
|
}
|
|
|
|
|
|
|
|
VerifyDiagnosticConsumer::~VerifyDiagnosticConsumer() {
|
|
|
|
assert(!ActiveSourceFiles && "Incomplete parsing of source files!");
|
|
|
|
assert(!CurrentPreprocessor && "CurrentPreprocessor should be invalid!");
|
|
|
|
SrcManager = nullptr;
|
|
|
|
CheckDiagnostics();
|
|
|
|
assert(!Diags.ownsClient() &&
|
|
|
|
"The VerifyDiagnosticConsumer takes over ownership of the client!");
|
|
|
|
}
|
|
|
|
|
|
|
|
// DiagnosticConsumer interface.
|
|
|
|
|
|
|
|
void VerifyDiagnosticConsumer::BeginSourceFile(const LangOptions &LangOpts,
|
|
|
|
const Preprocessor *PP) {
|
|
|
|
// Attach comment handler on first invocation.
|
|
|
|
if (++ActiveSourceFiles == 1) {
|
|
|
|
if (PP) {
|
|
|
|
CurrentPreprocessor = PP;
|
|
|
|
this->LangOpts = &LangOpts;
|
|
|
|
setSourceManager(PP->getSourceManager());
|
|
|
|
const_cast<Preprocessor *>(PP)->addCommentHandler(this);
|
|
|
|
#ifndef NDEBUG
|
|
|
|
// Debug build tracks parsed files.
|
|
|
|
const_cast<Preprocessor *>(PP)->addPPCallbacks(
|
2019-08-15 07:04:18 +08:00
|
|
|
std::make_unique<VerifyFileTracker>(*this, *SrcManager));
|
2019-04-13 12:33:39 +08:00
|
|
|
#endif
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
2012-07-12 03:58:23 +08:00
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
assert((!PP || CurrentPreprocessor == PP) && "Preprocessor changed!");
|
|
|
|
PrimaryClient->BeginSourceFile(LangOpts, PP);
|
|
|
|
}
|
|
|
|
|
|
|
|
void VerifyDiagnosticConsumer::EndSourceFile() {
|
|
|
|
assert(ActiveSourceFiles && "No active source files!");
|
|
|
|
PrimaryClient->EndSourceFile();
|
|
|
|
|
|
|
|
// Detach comment handler once last active source file completed.
|
|
|
|
if (--ActiveSourceFiles == 0) {
|
|
|
|
if (CurrentPreprocessor)
|
|
|
|
const_cast<Preprocessor *>(CurrentPreprocessor)->
|
|
|
|
removeCommentHandler(this);
|
|
|
|
|
|
|
|
// Diagnose any used-but-not-defined markers.
|
|
|
|
Markers->finalize();
|
|
|
|
|
|
|
|
// Check diagnostics once last file completed.
|
|
|
|
CheckDiagnostics();
|
|
|
|
CurrentPreprocessor = nullptr;
|
|
|
|
LangOpts = nullptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void VerifyDiagnosticConsumer::HandleDiagnostic(
|
|
|
|
DiagnosticsEngine::Level DiagLevel, const Diagnostic &Info) {
|
|
|
|
if (Info.hasSourceManager()) {
|
|
|
|
// If this diagnostic is for a different source manager, ignore it.
|
|
|
|
if (SrcManager && &Info.getSourceManager() != SrcManager)
|
|
|
|
return;
|
|
|
|
|
|
|
|
setSourceManager(Info.getSourceManager());
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifndef NDEBUG
|
|
|
|
// Debug build tracks unparsed files for possible
|
|
|
|
// unparsed expected-* directives.
|
|
|
|
if (SrcManager) {
|
|
|
|
SourceLocation Loc = Info.getLocation();
|
|
|
|
if (Loc.isValid()) {
|
|
|
|
ParsedStatus PS = IsUnparsed;
|
|
|
|
|
|
|
|
Loc = SrcManager->getExpansionLoc(Loc);
|
|
|
|
FileID FID = SrcManager->getFileID(Loc);
|
|
|
|
|
|
|
|
const FileEntry *FE = SrcManager->getFileEntryForID(FID);
|
|
|
|
if (FE && CurrentPreprocessor && SrcManager->isLoadedFileID(FID)) {
|
|
|
|
// If the file is a modules header file it shall not be parsed
|
|
|
|
// for expected-* directives.
|
|
|
|
HeaderSearch &HS = CurrentPreprocessor->getHeaderSearchInfo();
|
|
|
|
if (HS.findModuleForHeader(FE))
|
|
|
|
PS = IsUnparsedNoDirectives;
|
|
|
|
}
|
|
|
|
|
|
|
|
UpdateParsedFileStatus(*SrcManager, FID, PS);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
// Send the diagnostic to the buffer, we will check it once we reach the end
|
|
|
|
// of the source file (or are destructed).
|
|
|
|
Buffer->HandleDiagnostic(DiagLevel, Info);
|
2012-07-12 03:58:23 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// HandleComment - Hook into the preprocessor and extract comments containing
|
|
|
|
/// expected errors and warnings.
|
|
|
|
bool VerifyDiagnosticConsumer::HandleComment(Preprocessor &PP,
|
|
|
|
SourceRange Comment) {
|
|
|
|
SourceManager &SM = PP.getSourceManager();
|
2013-05-04 06:58:43 +08:00
|
|
|
|
|
|
|
// If this comment is for a different source manager, ignore it.
|
|
|
|
if (SrcManager && &SM != SrcManager)
|
|
|
|
return false;
|
|
|
|
|
2012-07-12 03:58:23 +08:00
|
|
|
SourceLocation CommentBegin = Comment.getBegin();
|
|
|
|
|
|
|
|
const char *CommentRaw = SM.getCharacterData(CommentBegin);
|
|
|
|
StringRef C(CommentRaw, SM.getCharacterData(Comment.getEnd()) - CommentRaw);
|
|
|
|
|
|
|
|
if (C.empty())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// Fold any "\<EOL>" sequences
|
|
|
|
size_t loc = C.find('\\');
|
|
|
|
if (loc == StringRef::npos) {
|
2019-04-13 12:33:39 +08:00
|
|
|
ParseDirective(C, &ED, SM, &PP, CommentBegin, Status, *Markers);
|
2012-07-12 03:58:23 +08:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string C2;
|
|
|
|
C2.reserve(C.size());
|
|
|
|
|
|
|
|
for (size_t last = 0;; loc = C.find('\\', last)) {
|
|
|
|
if (loc == StringRef::npos || loc == C.size()) {
|
|
|
|
C2 += C.substr(last);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
C2 += C.substr(last, loc-last);
|
|
|
|
last = loc + 1;
|
|
|
|
|
|
|
|
if (C[last] == '\n' || C[last] == '\r') {
|
|
|
|
++last;
|
|
|
|
|
|
|
|
// Escape \r\n or \n\r, but not \n\n.
|
|
|
|
if (last < C.size())
|
|
|
|
if (C[last] == '\n' || C[last] == '\r')
|
|
|
|
if (C[last] != C[last-1])
|
|
|
|
++last;
|
|
|
|
} else {
|
|
|
|
// This was just a normal backslash.
|
|
|
|
C2 += '\\';
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!C2.empty())
|
2019-04-13 12:33:39 +08:00
|
|
|
ParseDirective(C2, &ED, SM, &PP, CommentBegin, Status, *Markers);
|
2012-07-12 03:58:23 +08:00
|
|
|
return false;
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
#ifndef NDEBUG
|
2018-05-09 09:00:01 +08:00
|
|
|
/// Lex the specified source file to determine whether it contains
|
2012-08-10 09:06:16 +08:00
|
|
|
/// any expected-* directives. As a Lexer is used rather than a full-blown
|
|
|
|
/// Preprocessor, directives inside skipped #if blocks will still be found.
|
|
|
|
///
|
|
|
|
/// \return true if any directives were found.
|
2012-08-19 00:58:52 +08:00
|
|
|
static bool findDirectives(SourceManager &SM, FileID FID,
|
|
|
|
const LangOptions &LangOpts) {
|
2011-07-26 03:18:12 +08:00
|
|
|
// Create a raw lexer to pull all the comments out of FID.
|
|
|
|
if (FID.isInvalid())
|
2012-08-10 09:06:16 +08:00
|
|
|
return false;
|
2009-11-14 11:23:19 +08:00
|
|
|
|
|
|
|
// Create a lexer to lex all the tokens of the main file in raw mode.
|
2009-11-30 12:18:44 +08:00
|
|
|
const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID);
|
2012-08-19 00:58:52 +08:00
|
|
|
Lexer RawLex(FID, FromFile, SM, LangOpts);
|
2009-11-14 11:23:19 +08:00
|
|
|
|
|
|
|
// Return comments as tokens, this is how we find expected diagnostics.
|
|
|
|
RawLex.SetCommentRetentionState(true);
|
|
|
|
|
|
|
|
Token Tok;
|
|
|
|
Tok.setKind(tok::comment);
|
2012-10-19 20:49:32 +08:00
|
|
|
VerifyDiagnosticConsumer::DirectiveStatus Status =
|
|
|
|
VerifyDiagnosticConsumer::HasNoDirectives;
|
2009-11-14 11:23:19 +08:00
|
|
|
while (Tok.isNot(tok::eof)) {
|
2013-09-19 08:41:32 +08:00
|
|
|
RawLex.LexFromRawLexer(Tok);
|
2009-11-14 11:23:19 +08:00
|
|
|
if (!Tok.is(tok::comment)) continue;
|
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
std::string Comment = RawLex.getSpelling(Tok, SM, LangOpts);
|
2009-11-14 11:23:19 +08:00
|
|
|
if (Comment.empty()) continue;
|
|
|
|
|
2019-04-13 12:33:39 +08:00
|
|
|
// We don't care about tracking markers for this phase.
|
|
|
|
VerifyDiagnosticConsumer::MarkerTracker Markers(SM.getDiagnostics());
|
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
// Find first directive.
|
2014-05-22 12:46:25 +08:00
|
|
|
if (ParseDirective(Comment, nullptr, SM, nullptr, Tok.getLocation(),
|
2019-04-13 12:33:39 +08:00
|
|
|
Status, Markers))
|
2012-08-19 00:58:52 +08:00
|
|
|
return true;
|
2012-08-10 09:06:16 +08:00
|
|
|
}
|
2012-08-19 00:58:52 +08:00
|
|
|
return false;
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
2012-08-10 09:06:16 +08:00
|
|
|
#endif // !NDEBUG
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2018-05-09 09:00:01 +08:00
|
|
|
/// Takes a list of diagnostics that have been generated but not matched
|
2012-07-10 10:57:03 +08:00
|
|
|
/// by an expected-* directive and produces a diagnostic to the user from this.
|
|
|
|
static unsigned PrintUnexpected(DiagnosticsEngine &Diags, SourceManager *SourceMgr,
|
|
|
|
const_diag_iterator diag_begin,
|
|
|
|
const_diag_iterator diag_end,
|
|
|
|
const char *Kind) {
|
2009-11-14 11:23:19 +08:00
|
|
|
if (diag_begin == diag_end) return 0;
|
|
|
|
|
2012-02-05 10:13:05 +08:00
|
|
|
SmallString<256> Fmt;
|
2009-11-14 11:23:19 +08:00
|
|
|
llvm::raw_svector_ostream OS(Fmt);
|
|
|
|
for (const_diag_iterator I = diag_begin, E = diag_end; I != E; ++I) {
|
2009-11-14 15:53:24 +08:00
|
|
|
if (I->first.isInvalid() || !SourceMgr)
|
2009-11-14 11:23:19 +08:00
|
|
|
OS << "\n (frontend)";
|
2013-04-17 16:06:46 +08:00
|
|
|
else {
|
|
|
|
OS << "\n ";
|
|
|
|
if (const FileEntry *File = SourceMgr->getFileEntryForID(
|
|
|
|
SourceMgr->getFileID(I->first)))
|
|
|
|
OS << " File " << File->getName();
|
|
|
|
OS << " Line " << SourceMgr->getPresumedLineNumber(I->first);
|
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
OS << ": " << I->second;
|
|
|
|
}
|
|
|
|
|
2012-07-12 00:50:36 +08:00
|
|
|
Diags.Report(diag::err_verify_inconsistent_diags).setForceEmit()
|
2012-07-10 10:57:03 +08:00
|
|
|
<< Kind << /*Unexpected=*/true << OS.str();
|
2009-11-14 11:23:19 +08:00
|
|
|
return std::distance(diag_begin, diag_end);
|
|
|
|
}
|
|
|
|
|
2018-05-09 09:00:01 +08:00
|
|
|
/// Takes a list of diagnostics that were expected to have been generated
|
2012-07-10 10:57:03 +08:00
|
|
|
/// but were not and produces a diagnostic to the user from this.
|
2014-08-30 00:30:23 +08:00
|
|
|
static unsigned PrintExpected(DiagnosticsEngine &Diags,
|
|
|
|
SourceManager &SourceMgr,
|
|
|
|
std::vector<Directive *> &DL, const char *Kind) {
|
2010-04-29 04:02:30 +08:00
|
|
|
if (DL.empty())
|
|
|
|
return 0;
|
|
|
|
|
2012-02-05 10:13:05 +08:00
|
|
|
SmallString<256> Fmt;
|
2010-04-29 04:02:30 +08:00
|
|
|
llvm::raw_svector_ostream OS(Fmt);
|
2018-03-22 08:53:26 +08:00
|
|
|
for (const auto *D : DL) {
|
|
|
|
if (D->DiagnosticLoc.isInvalid())
|
2017-10-18 09:41:38 +08:00
|
|
|
OS << "\n File *";
|
|
|
|
else
|
2018-03-22 08:53:26 +08:00
|
|
|
OS << "\n File " << SourceMgr.getFilename(D->DiagnosticLoc);
|
|
|
|
if (D->MatchAnyLine)
|
2014-07-11 00:43:29 +08:00
|
|
|
OS << " Line *";
|
|
|
|
else
|
2018-03-22 08:53:26 +08:00
|
|
|
OS << " Line " << SourceMgr.getPresumedLineNumber(D->DiagnosticLoc);
|
|
|
|
if (D->DirectiveLoc != D->DiagnosticLoc)
|
2012-07-10 10:57:03 +08:00
|
|
|
OS << " (directive at "
|
2018-03-22 08:53:26 +08:00
|
|
|
<< SourceMgr.getFilename(D->DirectiveLoc) << ':'
|
|
|
|
<< SourceMgr.getPresumedLineNumber(D->DirectiveLoc) << ')';
|
|
|
|
OS << ": " << D->Text;
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|
|
|
|
|
2012-07-12 00:50:36 +08:00
|
|
|
Diags.Report(diag::err_verify_inconsistent_diags).setForceEmit()
|
2012-07-10 10:57:03 +08:00
|
|
|
<< Kind << /*Unexpected=*/false << OS.str();
|
2010-04-29 04:02:30 +08:00
|
|
|
return DL.size();
|
|
|
|
}
|
|
|
|
|
2018-05-09 09:00:01 +08:00
|
|
|
/// Determine whether two source locations come from the same file.
|
2013-04-17 16:06:46 +08:00
|
|
|
static bool IsFromSameFile(SourceManager &SM, SourceLocation DirectiveLoc,
|
|
|
|
SourceLocation DiagnosticLoc) {
|
|
|
|
while (DiagnosticLoc.isMacroID())
|
|
|
|
DiagnosticLoc = SM.getImmediateMacroCallerLoc(DiagnosticLoc);
|
|
|
|
|
2013-08-22 08:27:10 +08:00
|
|
|
if (SM.isWrittenInSameFile(DirectiveLoc, DiagnosticLoc))
|
2013-04-17 16:06:46 +08:00
|
|
|
return true;
|
|
|
|
|
|
|
|
const FileEntry *DiagFile = SM.getFileEntryForID(SM.getFileID(DiagnosticLoc));
|
2013-08-22 08:27:10 +08:00
|
|
|
if (!DiagFile && SM.isWrittenInMainFile(DirectiveLoc))
|
2013-04-17 16:06:46 +08:00
|
|
|
return true;
|
|
|
|
|
|
|
|
return (DiagFile == SM.getFileEntryForID(SM.getFileID(DirectiveLoc)));
|
|
|
|
}
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
/// CheckLists - Compare expected to seen diagnostic lists and return the
|
|
|
|
/// the difference between them.
|
2011-09-26 07:23:43 +08:00
|
|
|
static unsigned CheckLists(DiagnosticsEngine &Diags, SourceManager &SourceMgr,
|
2010-04-29 04:02:30 +08:00
|
|
|
const char *Label,
|
|
|
|
DirectiveList &Left,
|
|
|
|
const_diag_iterator d2_begin,
|
2015-06-13 15:11:40 +08:00
|
|
|
const_diag_iterator d2_end,
|
|
|
|
bool IgnoreUnexpected) {
|
2014-08-30 00:30:23 +08:00
|
|
|
std::vector<Directive *> LeftOnly;
|
2009-11-14 11:23:19 +08:00
|
|
|
DiagList Right(d2_begin, d2_end);
|
|
|
|
|
2014-08-30 00:30:23 +08:00
|
|
|
for (auto &Owner : Left) {
|
|
|
|
Directive &D = *Owner;
|
2012-07-10 10:57:03 +08:00
|
|
|
unsigned LineNo1 = SourceMgr.getPresumedLineNumber(D.DiagnosticLoc);
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2012-07-10 10:57:26 +08:00
|
|
|
for (unsigned i = 0; i < D.Max; ++i) {
|
2010-04-29 04:02:30 +08:00
|
|
|
DiagList::iterator II, IE;
|
|
|
|
for (II = Right.begin(), IE = Right.end(); II != IE; ++II) {
|
2014-07-11 00:43:29 +08:00
|
|
|
if (!D.MatchAnyLine) {
|
|
|
|
unsigned LineNo2 = SourceMgr.getPresumedLineNumber(II->first);
|
|
|
|
if (LineNo1 != LineNo2)
|
|
|
|
continue;
|
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2017-10-18 09:41:38 +08:00
|
|
|
if (!D.DiagnosticLoc.isInvalid() &&
|
|
|
|
!IsFromSameFile(SourceMgr, D.DiagnosticLoc, II->first))
|
2013-04-17 16:06:46 +08:00
|
|
|
continue;
|
|
|
|
|
2010-04-29 04:02:30 +08:00
|
|
|
const std::string &RightText = II->second;
|
2012-07-10 10:56:15 +08:00
|
|
|
if (D.match(RightText))
|
2010-04-29 04:02:30 +08:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (II == IE) {
|
|
|
|
// Not found.
|
2012-07-10 10:57:26 +08:00
|
|
|
if (i >= D.Min) break;
|
2014-08-30 00:30:23 +08:00
|
|
|
LeftOnly.push_back(&D);
|
2010-04-29 04:02:30 +08:00
|
|
|
} else {
|
|
|
|
// Found. The same cannot be found twice.
|
|
|
|
Right.erase(II);
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Now all that's left in Right are those that were not matched.
|
2012-07-10 10:57:03 +08:00
|
|
|
unsigned num = PrintExpected(Diags, SourceMgr, LeftOnly, Label);
|
2015-06-13 15:11:40 +08:00
|
|
|
if (!IgnoreUnexpected)
|
|
|
|
num += PrintUnexpected(Diags, &SourceMgr, Right.begin(), Right.end(), Label);
|
2011-12-17 21:00:31 +08:00
|
|
|
return num;
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// CheckResults - This compares the expected results to those that
|
|
|
|
/// were actually reported. It emits any discrepencies. Return "true" if there
|
|
|
|
/// were problems. Return "false" otherwise.
|
2011-09-26 07:23:43 +08:00
|
|
|
static unsigned CheckResults(DiagnosticsEngine &Diags, SourceManager &SourceMgr,
|
2009-11-14 11:23:19 +08:00
|
|
|
const TextDiagnosticBuffer &Buffer,
|
2010-04-29 04:02:30 +08:00
|
|
|
ExpectedData &ED) {
|
2009-11-14 11:23:19 +08:00
|
|
|
// We want to capture the delta between what was expected and what was
|
|
|
|
// seen.
|
|
|
|
//
|
|
|
|
// Expected \ Seen - set expected but not seen
|
|
|
|
// Seen \ Expected - set seen but not expected
|
|
|
|
unsigned NumProblems = 0;
|
|
|
|
|
2015-06-13 15:11:40 +08:00
|
|
|
const DiagnosticLevelMask DiagMask =
|
|
|
|
Diags.getDiagnosticOptions().getVerifyIgnoreUnexpected();
|
|
|
|
|
2009-11-14 11:23:19 +08:00
|
|
|
// See if there are error mismatches.
|
2010-04-29 04:02:30 +08:00
|
|
|
NumProblems += CheckLists(Diags, SourceMgr, "error", ED.Errors,
|
2015-06-13 15:11:40 +08:00
|
|
|
Buffer.err_begin(), Buffer.err_end(),
|
|
|
|
bool(DiagnosticLevelMask::Error & DiagMask));
|
2009-11-14 15:53:24 +08:00
|
|
|
|
2009-11-14 11:23:19 +08:00
|
|
|
// See if there are warning mismatches.
|
2010-04-29 04:02:30 +08:00
|
|
|
NumProblems += CheckLists(Diags, SourceMgr, "warning", ED.Warnings,
|
2015-06-13 15:11:40 +08:00
|
|
|
Buffer.warn_begin(), Buffer.warn_end(),
|
|
|
|
bool(DiagnosticLevelMask::Warning & DiagMask));
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2014-05-01 22:06:01 +08:00
|
|
|
// See if there are remark mismatches.
|
|
|
|
NumProblems += CheckLists(Diags, SourceMgr, "remark", ED.Remarks,
|
2015-06-13 15:11:40 +08:00
|
|
|
Buffer.remark_begin(), Buffer.remark_end(),
|
|
|
|
bool(DiagnosticLevelMask::Remark & DiagMask));
|
2014-05-01 22:06:01 +08:00
|
|
|
|
2009-11-14 11:23:19 +08:00
|
|
|
// See if there are note mismatches.
|
2010-04-29 04:02:30 +08:00
|
|
|
NumProblems += CheckLists(Diags, SourceMgr, "note", ED.Notes,
|
2015-06-13 15:11:40 +08:00
|
|
|
Buffer.note_begin(), Buffer.note_end(),
|
|
|
|
bool(DiagnosticLevelMask::Note & DiagMask));
|
2009-11-14 11:23:19 +08:00
|
|
|
|
|
|
|
return NumProblems;
|
|
|
|
}
|
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
void VerifyDiagnosticConsumer::UpdateParsedFileStatus(SourceManager &SM,
|
|
|
|
FileID FID,
|
|
|
|
ParsedStatus PS) {
|
|
|
|
// Check SourceManager hasn't changed.
|
|
|
|
setSourceManager(SM);
|
|
|
|
|
|
|
|
#ifndef NDEBUG
|
|
|
|
if (FID.isInvalid())
|
|
|
|
return;
|
|
|
|
|
|
|
|
const FileEntry *FE = SM.getFileEntryForID(FID);
|
|
|
|
|
|
|
|
if (PS == IsParsed) {
|
|
|
|
// Move the FileID from the unparsed set to the parsed set.
|
|
|
|
UnparsedFiles.erase(FID);
|
|
|
|
ParsedFiles.insert(std::make_pair(FID, FE));
|
|
|
|
} else if (!ParsedFiles.count(FID) && !UnparsedFiles.count(FID)) {
|
|
|
|
// Add the FileID to the unparsed set if we haven't seen it before.
|
|
|
|
|
|
|
|
// Check for directives.
|
|
|
|
bool FoundDirectives;
|
|
|
|
if (PS == IsUnparsedNoDirectives)
|
|
|
|
FoundDirectives = false;
|
|
|
|
else
|
|
|
|
FoundDirectives = !LangOpts || findDirectives(SM, FID, *LangOpts);
|
|
|
|
|
|
|
|
// Add the FileID to the unparsed set.
|
|
|
|
UnparsedFiles.insert(std::make_pair(FID,
|
|
|
|
UnparsedFileStatus(FE, FoundDirectives)));
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2011-09-26 08:38:03 +08:00
|
|
|
void VerifyDiagnosticConsumer::CheckDiagnostics() {
|
2009-11-14 11:23:19 +08:00
|
|
|
// Ensure any diagnostics go to the primary client.
|
2014-11-18 07:46:02 +08:00
|
|
|
DiagnosticConsumer *CurClient = Diags.getClient();
|
|
|
|
std::unique_ptr<DiagnosticConsumer> Owner = Diags.takeClient();
|
2011-09-13 09:26:44 +08:00
|
|
|
Diags.setClient(PrimaryClient, false);
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2012-08-10 09:06:16 +08:00
|
|
|
#ifndef NDEBUG
|
2012-08-19 00:58:52 +08:00
|
|
|
// In a debug build, scan through any files that may have been missed
|
|
|
|
// during parsing and issue a fatal error if directives are contained
|
|
|
|
// within these files. If a fatal error occurs, this suggests that
|
|
|
|
// this file is being parsed separately from the main file, in which
|
|
|
|
// case consider moving the directives to the correct place, if this
|
|
|
|
// is applicable.
|
2018-03-22 08:53:26 +08:00
|
|
|
if (!UnparsedFiles.empty()) {
|
2012-08-19 00:58:52 +08:00
|
|
|
// Generate a cache of parsed FileEntry pointers for alias lookups.
|
|
|
|
llvm::SmallPtrSet<const FileEntry *, 8> ParsedFileCache;
|
2018-03-22 08:53:26 +08:00
|
|
|
for (const auto &I : ParsedFiles)
|
|
|
|
if (const FileEntry *FE = I.second)
|
2012-08-19 00:58:52 +08:00
|
|
|
ParsedFileCache.insert(FE);
|
|
|
|
|
|
|
|
// Iterate through list of unparsed files.
|
2018-03-22 08:53:26 +08:00
|
|
|
for (const auto &I : UnparsedFiles) {
|
|
|
|
const UnparsedFileStatus &Status = I.second;
|
2012-08-19 00:58:52 +08:00
|
|
|
const FileEntry *FE = Status.getFile();
|
|
|
|
|
|
|
|
// Skip files that have been parsed via an alias.
|
|
|
|
if (FE && ParsedFileCache.count(FE))
|
2012-08-10 09:06:16 +08:00
|
|
|
continue;
|
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
// Report a fatal error if this file contained directives.
|
|
|
|
if (Status.foundDirectives()) {
|
2012-08-10 09:06:16 +08:00
|
|
|
llvm::report_fatal_error(Twine("-verify directives found after rather"
|
|
|
|
" than during normal parsing of ",
|
2012-08-19 00:58:52 +08:00
|
|
|
StringRef(FE ? FE->getName() : "(unknown)")));
|
|
|
|
}
|
2011-08-24 21:36:19 +08:00
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2012-08-19 00:58:52 +08:00
|
|
|
// UnparsedFiles has been processed now, so clear it.
|
|
|
|
UnparsedFiles.clear();
|
|
|
|
}
|
|
|
|
#endif // !NDEBUG
|
|
|
|
|
|
|
|
if (SrcManager) {
|
2012-10-19 20:49:32 +08:00
|
|
|
// Produce an error if no expected-* directives could be found in the
|
|
|
|
// source file(s) processed.
|
|
|
|
if (Status == HasNoDirectives) {
|
|
|
|
Diags.Report(diag::err_verify_no_directives).setForceEmit();
|
|
|
|
++NumErrors;
|
|
|
|
Status = HasNoDirectivesReported;
|
|
|
|
}
|
|
|
|
|
2009-11-14 15:53:24 +08:00
|
|
|
// Check that the expected diagnostics occurred.
|
2012-08-19 00:58:52 +08:00
|
|
|
NumErrors += CheckResults(Diags, *SrcManager, *Buffer, ED);
|
2009-11-14 15:53:24 +08:00
|
|
|
} else {
|
2015-06-13 15:11:40 +08:00
|
|
|
const DiagnosticLevelMask DiagMask =
|
|
|
|
~Diags.getDiagnosticOptions().getVerifyIgnoreUnexpected();
|
|
|
|
if (bool(DiagnosticLevelMask::Error & DiagMask))
|
|
|
|
NumErrors += PrintUnexpected(Diags, nullptr, Buffer->err_begin(),
|
|
|
|
Buffer->err_end(), "error");
|
|
|
|
if (bool(DiagnosticLevelMask::Warning & DiagMask))
|
|
|
|
NumErrors += PrintUnexpected(Diags, nullptr, Buffer->warn_begin(),
|
|
|
|
Buffer->warn_end(), "warn");
|
|
|
|
if (bool(DiagnosticLevelMask::Remark & DiagMask))
|
|
|
|
NumErrors += PrintUnexpected(Diags, nullptr, Buffer->remark_begin(),
|
|
|
|
Buffer->remark_end(), "remark");
|
|
|
|
if (bool(DiagnosticLevelMask::Note & DiagMask))
|
|
|
|
NumErrors += PrintUnexpected(Diags, nullptr, Buffer->note_begin(),
|
|
|
|
Buffer->note_end(), "note");
|
2009-11-14 15:53:24 +08:00
|
|
|
}
|
2009-11-14 11:23:19 +08:00
|
|
|
|
2014-11-18 07:46:02 +08:00
|
|
|
Diags.setClient(CurClient, Owner.release() != nullptr);
|
2009-11-14 11:23:19 +08:00
|
|
|
|
|
|
|
// Reset the buffer, we have processed all the diagnostics in it.
|
|
|
|
Buffer.reset(new TextDiagnosticBuffer());
|
2014-04-24 13:39:55 +08:00
|
|
|
ED.Reset();
|
2009-11-14 11:23:19 +08:00
|
|
|
}
|
2010-04-29 04:02:30 +08:00
|
|
|
|
2014-08-30 00:30:23 +08:00
|
|
|
std::unique_ptr<Directive> Directive::create(bool RegexKind,
|
|
|
|
SourceLocation DirectiveLoc,
|
|
|
|
SourceLocation DiagnosticLoc,
|
|
|
|
bool MatchAnyLine, StringRef Text,
|
|
|
|
unsigned Min, unsigned Max) {
|
2013-12-14 09:07:05 +08:00
|
|
|
if (!RegexKind)
|
2019-08-15 07:04:18 +08:00
|
|
|
return std::make_unique<StandardDirective>(DirectiveLoc, DiagnosticLoc,
|
2014-08-30 00:30:23 +08:00
|
|
|
MatchAnyLine, Text, Min, Max);
|
2013-12-12 07:40:50 +08:00
|
|
|
|
|
|
|
// Parse the directive into a regular expression.
|
|
|
|
std::string RegexStr;
|
|
|
|
StringRef S = Text;
|
|
|
|
while (!S.empty()) {
|
|
|
|
if (S.startswith("{{")) {
|
|
|
|
S = S.drop_front(2);
|
|
|
|
size_t RegexMatchLength = S.find("}}");
|
|
|
|
assert(RegexMatchLength != StringRef::npos);
|
|
|
|
// Append the regex, enclosed in parentheses.
|
|
|
|
RegexStr += "(";
|
|
|
|
RegexStr.append(S.data(), RegexMatchLength);
|
|
|
|
RegexStr += ")";
|
|
|
|
S = S.drop_front(RegexMatchLength + 2);
|
|
|
|
} else {
|
|
|
|
size_t VerbatimMatchLength = S.find("{{");
|
|
|
|
if (VerbatimMatchLength == StringRef::npos)
|
|
|
|
VerbatimMatchLength = S.size();
|
|
|
|
// Escape and append the fixed string.
|
2013-12-12 08:27:31 +08:00
|
|
|
RegexStr += llvm::Regex::escape(S.substr(0, VerbatimMatchLength));
|
2013-12-12 07:40:50 +08:00
|
|
|
S = S.drop_front(VerbatimMatchLength);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-15 07:04:18 +08:00
|
|
|
return std::make_unique<RegexDirective>(
|
2014-08-30 00:30:23 +08:00
|
|
|
DirectiveLoc, DiagnosticLoc, MatchAnyLine, Text, Min, Max, RegexStr);
|
2010-04-29 04:02:30 +08:00
|
|
|
}
|