2006-06-18 13:43:12 +08:00
|
|
|
//===--- SourceManager.cpp - Track and cache source files -----------------===//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
2007-12-30 03:59:25 +08:00
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
2006-06-18 13:43:12 +08:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file implements the SourceManager interface.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
|
|
|
#include "clang/Basic/SourceManager.h"
|
2009-04-13 23:31:25 +08:00
|
|
|
#include "clang/Basic/SourceManagerInternals.h"
|
2010-03-16 06:54:52 +08:00
|
|
|
#include "clang/Basic/Diagnostic.h"
|
2006-06-18 13:43:12 +08:00
|
|
|
#include "clang/Basic/FileManager.h"
|
2010-11-18 20:46:39 +08:00
|
|
|
#include "llvm/ADT/StringSwitch.h"
|
2011-02-04 01:17:35 +08:00
|
|
|
#include "llvm/ADT/Optional.h"
|
2007-07-24 13:57:19 +08:00
|
|
|
#include "llvm/Support/Compiler.h"
|
2007-04-29 15:12:06 +08:00
|
|
|
#include "llvm/Support/MemoryBuffer.h"
|
2009-08-24 06:45:33 +08:00
|
|
|
#include "llvm/Support/raw_ostream.h"
|
2010-11-30 02:12:39 +08:00
|
|
|
#include "llvm/Support/Path.h"
|
2006-06-18 13:43:12 +08:00
|
|
|
#include <algorithm>
|
2010-03-16 06:54:52 +08:00
|
|
|
#include <string>
|
2010-03-16 08:06:06 +08:00
|
|
|
#include <cstring>
|
2011-02-04 01:17:35 +08:00
|
|
|
#include <sys/stat.h>
|
2010-03-16 06:54:52 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
using namespace clang;
|
2006-06-20 13:02:40 +08:00
|
|
|
using namespace SrcMgr;
|
2007-06-16 07:05:46 +08:00
|
|
|
using llvm::MemoryBuffer;
|
2006-06-18 13:43:12 +08:00
|
|
|
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-26 08:43:02 +08:00
|
|
|
// SourceManager Helper Classes
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-26 08:43:02 +08:00
|
|
|
|
2007-10-31 05:08:08 +08:00
|
|
|
ContentCache::~ContentCache() {
|
Introduce basic support for loading a precompiled preamble while
reparsing an ASTUnit. When saving a preamble, create a buffer larger
than the actual file we're working with but fill everything from the
end of the preamble to the end of the file with spaces (so the lexer
will quickly skip them). When we load the file, create a buffer of the
same size, filling it with the file and then spaces. Then, instruct
the lexer to start lexing after the preamble, therefore continuing the
parse from the spot where the preamble left off.
It's now possible to perform a simple preamble build + parse (+
reparse) with ASTUnit. However, one has to disable a bunch of checking
in the PCH reader to do so. That part isn't committed; it will likely
be handled with some other kind of flag (e.g., -fno-validate-pch).
As part of this, fix some issues with null termination of the memory
buffers created for the preamble; we were trying to explicitly
NULL-terminate them, even though they were also getting implicitly
NULL terminated, leading to excess warnings about NULL characters in
source files.
llvm-svn: 109445
2010-07-27 05:36:20 +08:00
|
|
|
if (shouldFreeBuffer())
|
|
|
|
delete Buffer.getPointer();
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
|
|
|
|
2009-01-06 09:55:26 +08:00
|
|
|
/// getSizeBytesMapped - Returns the number of bytes actually mapped for
|
|
|
|
/// this ContentCache. This can be 0 if the MemBuffer was not actually
|
|
|
|
/// instantiated.
|
|
|
|
unsigned ContentCache::getSizeBytesMapped() const {
|
2010-03-17 06:53:51 +08:00
|
|
|
return Buffer.getPointer() ? Buffer.getPointer()->getBufferSize() : 0;
|
2009-01-06 09:55:26 +08:00
|
|
|
}
|
|
|
|
|
2011-04-29 04:36:42 +08:00
|
|
|
/// Returns the kind of memory used to back the memory buffer for
|
|
|
|
/// this content cache. This is used for performance analysis.
|
|
|
|
llvm::MemoryBuffer::BufferKind ContentCache::getMemoryBufferKind() const {
|
|
|
|
assert(Buffer.getPointer());
|
|
|
|
|
|
|
|
// Should be unreachable, but keep for sanity.
|
|
|
|
if (!Buffer.getPointer())
|
|
|
|
return llvm::MemoryBuffer::MemoryBuffer_Malloc;
|
|
|
|
|
|
|
|
const llvm::MemoryBuffer *buf = Buffer.getPointer();
|
|
|
|
return buf->getBufferKind();
|
|
|
|
}
|
|
|
|
|
2009-01-06 09:55:26 +08:00
|
|
|
/// getSize - Returns the size of the content encapsulated by this ContentCache.
|
|
|
|
/// This can be the size of the source file or the size of an arbitrary
|
|
|
|
/// scratch buffer. If the ContentCache encapsulates a source file, that
|
2009-12-02 14:49:09 +08:00
|
|
|
/// file is not lazily brought in from disk to satisfy this query.
|
2009-01-06 09:55:26 +08:00
|
|
|
unsigned ContentCache::getSize() const {
|
2010-03-17 06:53:51 +08:00
|
|
|
return Buffer.getPointer() ? (unsigned) Buffer.getPointer()->getBufferSize()
|
2011-03-05 09:03:53 +08:00
|
|
|
: (unsigned) ContentsEntry->getSize();
|
2009-01-06 09:55:26 +08:00
|
|
|
}
|
|
|
|
|
Introduce basic support for loading a precompiled preamble while
reparsing an ASTUnit. When saving a preamble, create a buffer larger
than the actual file we're working with but fill everything from the
end of the preamble to the end of the file with spaces (so the lexer
will quickly skip them). When we load the file, create a buffer of the
same size, filling it with the file and then spaces. Then, instruct
the lexer to start lexing after the preamble, therefore continuing the
parse from the spot where the preamble left off.
It's now possible to perform a simple preamble build + parse (+
reparse) with ASTUnit. However, one has to disable a bunch of checking
in the PCH reader to do so. That part isn't committed; it will likely
be handled with some other kind of flag (e.g., -fno-validate-pch).
As part of this, fix some issues with null termination of the memory
buffers created for the preamble; we were trying to explicitly
NULL-terminate them, even though they were also getting implicitly
NULL terminated, leading to excess warnings about NULL characters in
source files.
llvm-svn: 109445
2010-07-27 05:36:20 +08:00
|
|
|
void ContentCache::replaceBuffer(const llvm::MemoryBuffer *B,
|
|
|
|
bool DoNotFree) {
|
2010-03-17 06:53:51 +08:00
|
|
|
assert(B != Buffer.getPointer());
|
2009-12-02 14:49:09 +08:00
|
|
|
|
Introduce basic support for loading a precompiled preamble while
reparsing an ASTUnit. When saving a preamble, create a buffer larger
than the actual file we're working with but fill everything from the
end of the preamble to the end of the file with spaces (so the lexer
will quickly skip them). When we load the file, create a buffer of the
same size, filling it with the file and then spaces. Then, instruct
the lexer to start lexing after the preamble, therefore continuing the
parse from the spot where the preamble left off.
It's now possible to perform a simple preamble build + parse (+
reparse) with ASTUnit. However, one has to disable a bunch of checking
in the PCH reader to do so. That part isn't committed; it will likely
be handled with some other kind of flag (e.g., -fno-validate-pch).
As part of this, fix some issues with null termination of the memory
buffers created for the preamble; we were trying to explicitly
NULL-terminate them, even though they were also getting implicitly
NULL terminated, leading to excess warnings about NULL characters in
source files.
llvm-svn: 109445
2010-07-27 05:36:20 +08:00
|
|
|
if (shouldFreeBuffer())
|
|
|
|
delete Buffer.getPointer();
|
2010-03-17 06:53:51 +08:00
|
|
|
Buffer.setPointer(B);
|
Introduce basic support for loading a precompiled preamble while
reparsing an ASTUnit. When saving a preamble, create a buffer larger
than the actual file we're working with but fill everything from the
end of the preamble to the end of the file with spaces (so the lexer
will quickly skip them). When we load the file, create a buffer of the
same size, filling it with the file and then spaces. Then, instruct
the lexer to start lexing after the preamble, therefore continuing the
parse from the spot where the preamble left off.
It's now possible to perform a simple preamble build + parse (+
reparse) with ASTUnit. However, one has to disable a bunch of checking
in the PCH reader to do so. That part isn't committed; it will likely
be handled with some other kind of flag (e.g., -fno-validate-pch).
As part of this, fix some issues with null termination of the memory
buffers created for the preamble; we were trying to explicitly
NULL-terminate them, even though they were also getting implicitly
NULL terminated, leading to excess warnings about NULL characters in
source files.
llvm-svn: 109445
2010-07-27 05:36:20 +08:00
|
|
|
Buffer.setInt(DoNotFree? DoNotFreeFlag : 0);
|
2009-12-02 14:49:09 +08:00
|
|
|
}
|
|
|
|
|
2010-03-16 08:35:39 +08:00
|
|
|
const llvm::MemoryBuffer *ContentCache::getBuffer(Diagnostic &Diag,
|
2010-04-21 04:49:23 +08:00
|
|
|
const SourceManager &SM,
|
2010-04-21 04:35:58 +08:00
|
|
|
SourceLocation Loc,
|
2010-03-16 08:35:39 +08:00
|
|
|
bool *Invalid) const {
|
2010-11-23 16:50:03 +08:00
|
|
|
// Lazily create the Buffer for ContentCaches that wrap files. If we already
|
2011-04-15 13:22:18 +08:00
|
|
|
// computed it, just return what we have.
|
2011-03-05 09:03:53 +08:00
|
|
|
if (Buffer.getPointer() || ContentsEntry == 0) {
|
2010-11-23 16:50:03 +08:00
|
|
|
if (Invalid)
|
|
|
|
*Invalid = isBufferInvalid();
|
|
|
|
|
|
|
|
return Buffer.getPointer();
|
|
|
|
}
|
2010-03-22 23:10:57 +08:00
|
|
|
|
2010-11-23 16:50:03 +08:00
|
|
|
std::string ErrorStr;
|
2011-03-05 09:03:53 +08:00
|
|
|
Buffer.setPointer(SM.getFileManager().getBufferForFile(ContentsEntry, &ErrorStr));
|
2010-03-22 23:10:57 +08:00
|
|
|
|
2010-11-23 16:50:03 +08:00
|
|
|
// If we were unable to open the file, then we are in an inconsistent
|
|
|
|
// situation where the content cache referenced a file which no longer
|
|
|
|
// exists. Most likely, we were using a stat cache with an invalid entry but
|
|
|
|
// the file could also have been removed during processing. Since we can't
|
|
|
|
// really deal with this situation, just create an empty buffer.
|
|
|
|
//
|
|
|
|
// FIXME: This is definitely not ideal, but our immediate clients can't
|
|
|
|
// currently handle returning a null entry here. Ideally we should detect
|
|
|
|
// that we are in an inconsistent situation and error out as quickly as
|
|
|
|
// possible.
|
|
|
|
if (!Buffer.getPointer()) {
|
|
|
|
const llvm::StringRef FillStr("<<<MISSING SOURCE FILE>>>\n");
|
2011-03-05 09:03:53 +08:00
|
|
|
Buffer.setPointer(MemoryBuffer::getNewMemBuffer(ContentsEntry->getSize(),
|
2010-11-23 16:50:03 +08:00
|
|
|
"<invalid>"));
|
|
|
|
char *Ptr = const_cast<char*>(Buffer.getPointer()->getBufferStart());
|
2011-03-05 09:03:53 +08:00
|
|
|
for (unsigned i = 0, e = ContentsEntry->getSize(); i != e; ++i)
|
2010-11-23 16:50:03 +08:00
|
|
|
Ptr[i] = FillStr[i % FillStr.size()];
|
2010-04-10 09:17:16 +08:00
|
|
|
|
2010-11-23 16:50:03 +08:00
|
|
|
if (Diag.isDiagnosticInFlight())
|
|
|
|
Diag.SetDelayedDiagnostic(diag::err_cannot_open_file,
|
2011-03-05 09:03:53 +08:00
|
|
|
ContentsEntry->getName(), ErrorStr);
|
2010-11-23 16:50:03 +08:00
|
|
|
else
|
|
|
|
Diag.Report(Loc, diag::err_cannot_open_file)
|
2011-03-05 09:03:53 +08:00
|
|
|
<< ContentsEntry->getName() << ErrorStr;
|
2010-03-22 23:10:57 +08:00
|
|
|
|
2010-11-23 16:50:03 +08:00
|
|
|
Buffer.setInt(Buffer.getInt() | InvalidFlag);
|
2010-04-21 02:14:03 +08:00
|
|
|
|
2010-11-23 16:50:03 +08:00
|
|
|
if (Invalid) *Invalid = true;
|
|
|
|
return Buffer.getPointer();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check that the file's size is the same as in the file entry (which may
|
|
|
|
// have come from a stat cache).
|
2011-03-05 09:03:53 +08:00
|
|
|
if (getRawBuffer()->getBufferSize() != (size_t)ContentsEntry->getSize()) {
|
2010-11-23 16:50:03 +08:00
|
|
|
if (Diag.isDiagnosticInFlight())
|
|
|
|
Diag.SetDelayedDiagnostic(diag::err_file_modified,
|
2011-03-05 09:03:53 +08:00
|
|
|
ContentsEntry->getName());
|
2010-11-23 16:50:03 +08:00
|
|
|
else
|
|
|
|
Diag.Report(Loc, diag::err_file_modified)
|
2011-03-05 09:03:53 +08:00
|
|
|
<< ContentsEntry->getName();
|
2010-11-23 16:50:03 +08:00
|
|
|
|
|
|
|
Buffer.setInt(Buffer.getInt() | InvalidFlag);
|
|
|
|
if (Invalid) *Invalid = true;
|
|
|
|
return Buffer.getPointer();
|
|
|
|
}
|
2011-04-09 08:01:04 +08:00
|
|
|
|
2010-11-23 16:50:03 +08:00
|
|
|
// If the buffer is valid, check to see if it has a UTF Byte Order Mark
|
2011-04-09 08:01:04 +08:00
|
|
|
// (BOM). We only support UTF-8 with and without a BOM right now. See
|
2010-11-23 16:50:03 +08:00
|
|
|
// http://en.wikipedia.org/wiki/Byte_order_mark for more information.
|
|
|
|
llvm::StringRef BufStr = Buffer.getPointer()->getBuffer();
|
2011-04-09 08:01:04 +08:00
|
|
|
const char *InvalidBOM = llvm::StringSwitch<const char *>(BufStr)
|
2010-11-23 16:50:03 +08:00
|
|
|
.StartsWith("\xFE\xFF", "UTF-16 (BE)")
|
|
|
|
.StartsWith("\xFF\xFE", "UTF-16 (LE)")
|
|
|
|
.StartsWith("\x00\x00\xFE\xFF", "UTF-32 (BE)")
|
|
|
|
.StartsWith("\xFF\xFE\x00\x00", "UTF-32 (LE)")
|
|
|
|
.StartsWith("\x2B\x2F\x76", "UTF-7")
|
|
|
|
.StartsWith("\xF7\x64\x4C", "UTF-1")
|
|
|
|
.StartsWith("\xDD\x73\x66\x73", "UTF-EBCDIC")
|
|
|
|
.StartsWith("\x0E\xFE\xFF", "SDSU")
|
|
|
|
.StartsWith("\xFB\xEE\x28", "BOCU-1")
|
|
|
|
.StartsWith("\x84\x31\x95\x33", "GB-18030")
|
|
|
|
.Default(0);
|
2010-11-18 20:46:39 +08:00
|
|
|
|
2011-04-09 08:01:04 +08:00
|
|
|
if (InvalidBOM) {
|
2010-11-23 16:50:03 +08:00
|
|
|
Diag.Report(Loc, diag::err_unsupported_bom)
|
2011-04-09 08:01:04 +08:00
|
|
|
<< InvalidBOM << ContentsEntry->getName();
|
2010-11-23 16:50:03 +08:00
|
|
|
Buffer.setInt(Buffer.getInt() | InvalidFlag);
|
2009-01-07 06:43:04 +08:00
|
|
|
}
|
2010-03-16 06:54:52 +08:00
|
|
|
|
2010-03-17 06:53:51 +08:00
|
|
|
if (Invalid)
|
Introduce basic support for loading a precompiled preamble while
reparsing an ASTUnit. When saving a preamble, create a buffer larger
than the actual file we're working with but fill everything from the
end of the preamble to the end of the file with spaces (so the lexer
will quickly skip them). When we load the file, create a buffer of the
same size, filling it with the file and then spaces. Then, instruct
the lexer to start lexing after the preamble, therefore continuing the
parse from the spot where the preamble left off.
It's now possible to perform a simple preamble build + parse (+
reparse) with ASTUnit. However, one has to disable a bunch of checking
in the PCH reader to do so. That part isn't committed; it will likely
be handled with some other kind of flag (e.g., -fno-validate-pch).
As part of this, fix some issues with null termination of the memory
buffers created for the preamble; we were trying to explicitly
NULL-terminate them, even though they were also getting implicitly
NULL terminated, leading to excess warnings about NULL characters in
source files.
llvm-svn: 109445
2010-07-27 05:36:20 +08:00
|
|
|
*Invalid = isBufferInvalid();
|
2010-03-17 06:53:51 +08:00
|
|
|
|
|
|
|
return Buffer.getPointer();
|
2009-01-06 09:55:26 +08:00
|
|
|
}
|
|
|
|
|
2011-06-21 23:13:30 +08:00
|
|
|
unsigned LineTableInfo::getLineTableFilenameID(llvm::StringRef Name) {
|
2009-01-26 15:57:50 +08:00
|
|
|
// Look up the filename in the string table, returning the pre-existing value
|
|
|
|
// if it exists.
|
2009-09-09 23:08:12 +08:00
|
|
|
llvm::StringMapEntry<unsigned> &Entry =
|
2011-06-21 23:13:30 +08:00
|
|
|
FilenameIDs.GetOrCreateValue(Name, ~0U);
|
2009-01-26 15:57:50 +08:00
|
|
|
if (Entry.getValue() != ~0U)
|
|
|
|
return Entry.getValue();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
// Otherwise, assign this the next available ID.
|
|
|
|
Entry.setValue(FilenamesByID.size());
|
|
|
|
FilenamesByID.push_back(&Entry);
|
|
|
|
return FilenamesByID.size()-1;
|
|
|
|
}
|
|
|
|
|
2009-02-04 06:13:05 +08:00
|
|
|
/// AddLineNote - Add a line note to the line table that indicates that there
|
|
|
|
/// is a #line at the specified FID/Offset location which changes the presumed
|
|
|
|
/// location to LineNo/FilenameID.
|
2009-02-04 08:40:31 +08:00
|
|
|
void LineTableInfo::AddLineNote(unsigned FID, unsigned Offset,
|
2009-02-04 06:13:05 +08:00
|
|
|
unsigned LineNo, int FilenameID) {
|
2009-02-04 08:40:31 +08:00
|
|
|
std::vector<LineEntry> &Entries = LineEntries[FID];
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 08:40:31 +08:00
|
|
|
assert((Entries.empty() || Entries.back().FileOffset < Offset) &&
|
|
|
|
"Adding line entries out of order!");
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
SrcMgr::CharacteristicKind Kind = SrcMgr::C_User;
|
2009-02-04 14:25:26 +08:00
|
|
|
unsigned IncludeOffset = 0;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
if (!Entries.empty()) {
|
|
|
|
// If this is a '#line 4' after '#line 42 "foo.h"', make sure to remember
|
|
|
|
// that we are still in "foo.h".
|
|
|
|
if (FilenameID == -1)
|
|
|
|
FilenameID = Entries.back().FilenameID;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
// If we are after a line marker that switched us to system header mode, or
|
|
|
|
// that set #include information, preserve it.
|
2009-02-04 13:21:58 +08:00
|
|
|
Kind = Entries.back().FileKind;
|
2009-02-04 14:25:26 +08:00
|
|
|
IncludeOffset = Entries.back().IncludeOffset;
|
2009-02-04 13:21:58 +08:00
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
Entries.push_back(LineEntry::get(Offset, LineNo, FilenameID, Kind,
|
|
|
|
IncludeOffset));
|
2009-02-04 13:21:58 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// AddLineNote This is the same as the previous version of AddLineNote, but is
|
|
|
|
/// used for GNU line markers. If EntryExit is 0, then this doesn't change the
|
|
|
|
/// presumed #include stack. If it is 1, this is a file entry, if it is 2 then
|
|
|
|
/// this is a file exit. FileKind specifies whether this is a system header or
|
|
|
|
/// extern C system header.
|
|
|
|
void LineTableInfo::AddLineNote(unsigned FID, unsigned Offset,
|
|
|
|
unsigned LineNo, int FilenameID,
|
|
|
|
unsigned EntryExit,
|
|
|
|
SrcMgr::CharacteristicKind FileKind) {
|
|
|
|
assert(FilenameID != -1 && "Unspecified filename should use other accessor");
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
std::vector<LineEntry> &Entries = LineEntries[FID];
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
assert((Entries.empty() || Entries.back().FileOffset < Offset) &&
|
|
|
|
"Adding line entries out of order!");
|
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
unsigned IncludeOffset = 0;
|
|
|
|
if (EntryExit == 0) { // No #include stack change.
|
|
|
|
IncludeOffset = Entries.empty() ? 0 : Entries.back().IncludeOffset;
|
|
|
|
} else if (EntryExit == 1) {
|
|
|
|
IncludeOffset = Offset-1;
|
|
|
|
} else if (EntryExit == 2) {
|
|
|
|
assert(!Entries.empty() && Entries.back().IncludeOffset &&
|
|
|
|
"PPDirectives should have caught case when popping empty include stack");
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
// Get the include loc of the last entries' include loc as our include loc.
|
|
|
|
IncludeOffset = 0;
|
|
|
|
if (const LineEntry *PrevEntry =
|
|
|
|
FindNearestLineEntry(FID, Entries.back().IncludeOffset))
|
|
|
|
IncludeOffset = PrevEntry->IncludeOffset;
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
Entries.push_back(LineEntry::get(Offset, LineNo, FilenameID, FileKind,
|
|
|
|
IncludeOffset));
|
2009-02-04 06:13:05 +08:00
|
|
|
}
|
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
|
2009-02-04 09:55:42 +08:00
|
|
|
/// FindNearestLineEntry - Find the line entry nearest to FID that is before
|
|
|
|
/// it. If there is no line entry before Offset in FID, return null.
|
2009-09-09 23:08:12 +08:00
|
|
|
const LineEntry *LineTableInfo::FindNearestLineEntry(unsigned FID,
|
2009-02-04 09:55:42 +08:00
|
|
|
unsigned Offset) {
|
|
|
|
const std::vector<LineEntry> &Entries = LineEntries[FID];
|
|
|
|
assert(!Entries.empty() && "No #line entries for this FID after all!");
|
|
|
|
|
2009-02-04 12:46:59 +08:00
|
|
|
// It is very common for the query to be after the last #line, check this
|
|
|
|
// first.
|
|
|
|
if (Entries.back().FileOffset <= Offset)
|
|
|
|
return &Entries.back();
|
2009-02-04 09:55:42 +08:00
|
|
|
|
2009-02-04 12:46:59 +08:00
|
|
|
// Do a binary search to find the maximal element that is still before Offset.
|
|
|
|
std::vector<LineEntry>::const_iterator I =
|
|
|
|
std::upper_bound(Entries.begin(), Entries.end(), Offset);
|
|
|
|
if (I == Entries.begin()) return 0;
|
|
|
|
return &*--I;
|
2009-02-04 09:55:42 +08:00
|
|
|
}
|
2009-02-04 06:13:05 +08:00
|
|
|
|
2009-04-14 00:31:14 +08:00
|
|
|
/// \brief Add a new line entry that has already been encoded into
|
|
|
|
/// the internal representation of the line table.
|
2009-09-09 23:08:12 +08:00
|
|
|
void LineTableInfo::AddEntry(unsigned FID,
|
2009-04-14 00:31:14 +08:00
|
|
|
const std::vector<LineEntry> &Entries) {
|
|
|
|
LineEntries[FID] = Entries;
|
|
|
|
}
|
2009-02-04 06:13:05 +08:00
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
/// getLineTableFilenameID - Return the uniqued ID for the specified filename.
|
2009-09-09 23:08:12 +08:00
|
|
|
///
|
2011-06-21 23:13:30 +08:00
|
|
|
unsigned SourceManager::getLineTableFilenameID(llvm::StringRef Name) {
|
2009-01-26 15:57:50 +08:00
|
|
|
if (LineTable == 0)
|
|
|
|
LineTable = new LineTableInfo();
|
2011-06-21 23:13:30 +08:00
|
|
|
return LineTable->getLineTableFilenameID(Name);
|
2009-01-26 15:57:50 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-02-04 05:52:55 +08:00
|
|
|
/// AddLineNote - Add a line note to the line table for the FileID and offset
|
|
|
|
/// specified by Loc. If FilenameID is -1, it is considered to be
|
|
|
|
/// unspecified.
|
|
|
|
void SourceManager::AddLineNote(SourceLocation Loc, unsigned LineNo,
|
|
|
|
int FilenameID) {
|
2009-02-04 06:13:05 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2011-04-20 08:21:03 +08:00
|
|
|
bool Invalid = false;
|
|
|
|
const SLocEntry &Entry = getSLocEntry(LocInfo.first, &Invalid);
|
|
|
|
if (!Entry.isFile() || Invalid)
|
|
|
|
return;
|
|
|
|
|
|
|
|
const SrcMgr::FileInfo &FileInfo = Entry.getFile();
|
2009-02-04 06:13:05 +08:00
|
|
|
|
|
|
|
// Remember that this file has #line directives now if it doesn't already.
|
|
|
|
const_cast<SrcMgr::FileInfo&>(FileInfo).setHasLineDirectives();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 06:13:05 +08:00
|
|
|
if (LineTable == 0)
|
|
|
|
LineTable = new LineTableInfo();
|
2009-02-04 08:40:31 +08:00
|
|
|
LineTable->AddLineNote(LocInfo.first.ID, LocInfo.second, LineNo, FilenameID);
|
2009-02-04 05:52:55 +08:00
|
|
|
}
|
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
/// AddLineNote - Add a GNU line marker to the line table.
|
|
|
|
void SourceManager::AddLineNote(SourceLocation Loc, unsigned LineNo,
|
|
|
|
int FilenameID, bool IsFileEntry,
|
|
|
|
bool IsFileExit, bool IsSystemHeader,
|
|
|
|
bool IsExternCHeader) {
|
|
|
|
// If there is no filename and no flags, this is treated just like a #line,
|
|
|
|
// which does not change the flags of the previous line marker.
|
|
|
|
if (FilenameID == -1) {
|
|
|
|
assert(!IsFileEntry && !IsFileExit && !IsSystemHeader && !IsExternCHeader &&
|
|
|
|
"Can't set flags without setting the filename!");
|
|
|
|
return AddLineNote(Loc, LineNo, FilenameID);
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
2011-04-20 08:21:03 +08:00
|
|
|
|
|
|
|
bool Invalid = false;
|
|
|
|
const SLocEntry &Entry = getSLocEntry(LocInfo.first, &Invalid);
|
|
|
|
if (!Entry.isFile() || Invalid)
|
|
|
|
return;
|
|
|
|
|
|
|
|
const SrcMgr::FileInfo &FileInfo = Entry.getFile();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
// Remember that this file has #line directives now if it doesn't already.
|
|
|
|
const_cast<SrcMgr::FileInfo&>(FileInfo).setHasLineDirectives();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
if (LineTable == 0)
|
|
|
|
LineTable = new LineTableInfo();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
SrcMgr::CharacteristicKind FileKind;
|
|
|
|
if (IsExternCHeader)
|
|
|
|
FileKind = SrcMgr::C_ExternCSystem;
|
|
|
|
else if (IsSystemHeader)
|
|
|
|
FileKind = SrcMgr::C_System;
|
|
|
|
else
|
|
|
|
FileKind = SrcMgr::C_User;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
unsigned EntryExit = 0;
|
|
|
|
if (IsFileEntry)
|
|
|
|
EntryExit = 1;
|
|
|
|
else if (IsFileExit)
|
|
|
|
EntryExit = 2;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
LineTable->AddLineNote(LocInfo.first.ID, LocInfo.second, LineNo, FilenameID,
|
|
|
|
EntryExit, FileKind);
|
|
|
|
}
|
|
|
|
|
2009-04-14 00:31:14 +08:00
|
|
|
LineTableInfo &SourceManager::getLineTable() {
|
|
|
|
if (LineTable == 0)
|
|
|
|
LineTable = new LineTableInfo();
|
|
|
|
return *LineTable;
|
|
|
|
}
|
2009-02-04 05:52:55 +08:00
|
|
|
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-26 08:43:02 +08:00
|
|
|
// Private 'Create' methods.
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-06 09:55:26 +08:00
|
|
|
|
2010-11-23 16:35:12 +08:00
|
|
|
SourceManager::SourceManager(Diagnostic &Diag, FileManager &FileMgr)
|
2011-03-09 07:35:24 +08:00
|
|
|
: Diag(Diag), FileMgr(FileMgr), OverridenFilesKeepOriginalName(true),
|
2010-11-19 04:06:41 +08:00
|
|
|
ExternalSLocEntries(0), LineTable(0), NumLinearScans(0),
|
2011-04-20 08:21:03 +08:00
|
|
|
NumBinaryProbes(0), FakeBufferForRecovery(0) {
|
2010-11-19 04:06:41 +08:00
|
|
|
clearIDTables();
|
|
|
|
Diag.setSourceManager(this);
|
|
|
|
}
|
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
SourceManager::~SourceManager() {
|
|
|
|
delete LineTable;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-03 15:30:45 +08:00
|
|
|
// Delete FileEntry objects corresponding to content caches. Since the actual
|
|
|
|
// content cache objects are bump pointer allocated, we just have to run the
|
|
|
|
// dtors, but we call the deallocate method for completeness.
|
|
|
|
for (unsigned i = 0, e = MemBufferInfos.size(); i != e; ++i) {
|
|
|
|
MemBufferInfos[i]->~ContentCache();
|
|
|
|
ContentCacheAlloc.Deallocate(MemBufferInfos[i]);
|
|
|
|
}
|
|
|
|
for (llvm::DenseMap<const FileEntry*, SrcMgr::ContentCache*>::iterator
|
|
|
|
I = FileInfos.begin(), E = FileInfos.end(); I != E; ++I) {
|
|
|
|
I->second->~ContentCache();
|
|
|
|
ContentCacheAlloc.Deallocate(I->second);
|
|
|
|
}
|
2011-04-20 08:21:03 +08:00
|
|
|
|
|
|
|
delete FakeBufferForRecovery;
|
2009-01-26 15:57:50 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void SourceManager::clearIDTables() {
|
|
|
|
MainFileID = FileID();
|
|
|
|
SLocEntryTable.clear();
|
|
|
|
LastLineNoFileIDQuery = FileID();
|
|
|
|
LastLineNoContentCache = 0;
|
|
|
|
LastFileIDLookup = FileID();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
if (LineTable)
|
|
|
|
LineTable->clear();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
// Use up FileID #0 as an invalid instantiation.
|
|
|
|
NextOffset = 0;
|
2009-02-16 04:52:18 +08:00
|
|
|
createInstantiationLoc(SourceLocation(),SourceLocation(),SourceLocation(), 1);
|
2009-01-26 15:57:50 +08:00
|
|
|
}
|
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
/// getOrCreateContentCache - Create or return a cached ContentCache for the
|
|
|
|
/// specified file.
|
|
|
|
const ContentCache *
|
|
|
|
SourceManager::getOrCreateContentCache(const FileEntry *FileEnt) {
|
2006-06-18 13:43:12 +08:00
|
|
|
assert(FileEnt && "Didn't specify a file entry to use?");
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
// Do we already have information about this file?
|
2009-02-03 15:30:45 +08:00
|
|
|
ContentCache *&Entry = FileInfos[FileEnt];
|
|
|
|
if (Entry) return Entry;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-03 15:41:46 +08:00
|
|
|
// Nope, create a new Cache entry. Make sure it is at least 8-byte aligned
|
|
|
|
// so that FileInfo can use the low 3 bits of the pointer for its own
|
|
|
|
// nefarious purposes.
|
|
|
|
unsigned EntryAlign = llvm::AlignOf<ContentCache>::Alignment;
|
|
|
|
EntryAlign = std::max(8U, EntryAlign);
|
|
|
|
Entry = ContentCacheAlloc.Allocate<ContentCache>(1, EntryAlign);
|
2011-03-05 09:03:53 +08:00
|
|
|
|
|
|
|
// If the file contents are overridden with contents from another file,
|
|
|
|
// pass that file to ContentCache.
|
|
|
|
llvm::DenseMap<const FileEntry *, const FileEntry *>::iterator
|
|
|
|
overI = OverriddenFiles.find(FileEnt);
|
|
|
|
if (overI == OverriddenFiles.end())
|
|
|
|
new (Entry) ContentCache(FileEnt);
|
|
|
|
else
|
2011-03-09 07:35:24 +08:00
|
|
|
new (Entry) ContentCache(OverridenFilesKeepOriginalName ? FileEnt
|
|
|
|
: overI->second,
|
|
|
|
overI->second);
|
2011-03-05 09:03:53 +08:00
|
|
|
|
2009-02-03 15:30:45 +08:00
|
|
|
return Entry;
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-11-01 01:53:38 +08:00
|
|
|
/// createMemBufferContentCache - Create a new ContentCache for the specified
|
|
|
|
/// memory buffer. This does no caching.
|
2007-10-31 05:08:08 +08:00
|
|
|
const ContentCache*
|
|
|
|
SourceManager::createMemBufferContentCache(const MemoryBuffer *Buffer) {
|
2009-02-03 15:41:46 +08:00
|
|
|
// Add a new ContentCache to the MemBufferInfos list and return it. Make sure
|
|
|
|
// it is at least 8-byte aligned so that FileInfo can use the low 3 bits of
|
|
|
|
// the pointer for its own nefarious purposes.
|
|
|
|
unsigned EntryAlign = llvm::AlignOf<ContentCache>::Alignment;
|
|
|
|
EntryAlign = std::max(8U, EntryAlign);
|
|
|
|
ContentCache *Entry = ContentCacheAlloc.Allocate<ContentCache>(1, EntryAlign);
|
2009-02-03 15:30:45 +08:00
|
|
|
new (Entry) ContentCache();
|
|
|
|
MemBufferInfos.push_back(Entry);
|
|
|
|
Entry->setBuffer(Buffer);
|
|
|
|
return Entry;
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
|
|
|
|
2009-04-27 14:38:32 +08:00
|
|
|
void SourceManager::PreallocateSLocEntries(ExternalSLocEntrySource *Source,
|
|
|
|
unsigned NumSLocEntries,
|
|
|
|
unsigned NextOffset) {
|
|
|
|
ExternalSLocEntries = Source;
|
|
|
|
this->NextOffset = NextOffset;
|
2010-07-29 05:07:02 +08:00
|
|
|
unsigned CurPrealloc = SLocEntryLoaded.size();
|
|
|
|
// If we've ever preallocated, we must not count the dummy entry.
|
|
|
|
if (CurPrealloc) --CurPrealloc;
|
2009-04-27 14:38:32 +08:00
|
|
|
SLocEntryLoaded.resize(NumSLocEntries + 1);
|
|
|
|
SLocEntryLoaded[0] = true;
|
2010-07-29 05:07:02 +08:00
|
|
|
SLocEntryTable.resize(SLocEntryTable.size() + NumSLocEntries - CurPrealloc);
|
2009-04-27 14:38:32 +08:00
|
|
|
}
|
|
|
|
|
2009-04-28 05:28:04 +08:00
|
|
|
void SourceManager::ClearPreallocatedSLocEntries() {
|
|
|
|
unsigned I = 0;
|
|
|
|
for (unsigned N = SLocEntryLoaded.size(); I != N; ++I)
|
|
|
|
if (!SLocEntryLoaded[I])
|
|
|
|
break;
|
|
|
|
|
|
|
|
// We've already loaded all preallocated source location entries.
|
|
|
|
if (I == SLocEntryLoaded.size())
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Remove everything from location I onward.
|
|
|
|
SLocEntryTable.resize(I);
|
|
|
|
SLocEntryLoaded.clear();
|
|
|
|
ExternalSLocEntries = 0;
|
|
|
|
}
|
|
|
|
|
2011-04-20 08:21:03 +08:00
|
|
|
/// \brief As part of recovering from missing or changed content, produce a
|
|
|
|
/// fake, non-empty buffer.
|
|
|
|
const llvm::MemoryBuffer *SourceManager::getFakeBufferForRecovery() const {
|
|
|
|
if (!FakeBufferForRecovery)
|
|
|
|
FakeBufferForRecovery
|
|
|
|
= llvm::MemoryBuffer::getMemBuffer("<<<INVALID BUFFER>>");
|
|
|
|
|
|
|
|
return FakeBufferForRecovery;
|
|
|
|
}
|
2009-04-27 14:38:32 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// Methods to create new FileID's and instantiations.
|
|
|
|
//===----------------------------------------------------------------------===//
|
2006-06-18 13:43:12 +08:00
|
|
|
|
2010-08-27 05:27:06 +08:00
|
|
|
/// createFileID - Create a new FileID for the specified ContentCache and
|
2007-10-31 06:57:35 +08:00
|
|
|
/// include position. This works regardless of whether the ContentCache
|
|
|
|
/// corresponds to a file or some other input source.
|
2009-01-17 14:22:33 +08:00
|
|
|
FileID SourceManager::createFileID(const ContentCache *File,
|
2009-01-26 08:43:02 +08:00
|
|
|
SourceLocation IncludePos,
|
2009-04-27 14:38:32 +08:00
|
|
|
SrcMgr::CharacteristicKind FileCharacter,
|
|
|
|
unsigned PreallocatedID,
|
|
|
|
unsigned Offset) {
|
|
|
|
if (PreallocatedID) {
|
|
|
|
// If we're filling in a preallocated ID, just load in the file
|
|
|
|
// entry and return.
|
2009-09-09 23:08:12 +08:00
|
|
|
assert(PreallocatedID < SLocEntryLoaded.size() &&
|
2009-04-27 14:38:32 +08:00
|
|
|
"Preallocate ID out-of-range");
|
2009-09-09 23:08:12 +08:00
|
|
|
assert(!SLocEntryLoaded[PreallocatedID] &&
|
2009-04-27 14:38:32 +08:00
|
|
|
"Source location entry already loaded");
|
|
|
|
assert(Offset && "Preallocate source location cannot have zero offset");
|
2009-09-09 23:08:12 +08:00
|
|
|
SLocEntryTable[PreallocatedID]
|
2009-04-27 14:38:32 +08:00
|
|
|
= SLocEntry::get(Offset, FileInfo::get(IncludePos, File, FileCharacter));
|
|
|
|
SLocEntryLoaded[PreallocatedID] = true;
|
2009-06-20 16:09:57 +08:00
|
|
|
FileID FID = FileID::get(PreallocatedID);
|
2010-03-19 14:12:06 +08:00
|
|
|
return FID;
|
2009-04-27 14:38:32 +08:00
|
|
|
}
|
|
|
|
|
2009-09-09 23:08:12 +08:00
|
|
|
SLocEntryTable.push_back(SLocEntry::get(NextOffset,
|
2009-01-26 08:43:02 +08:00
|
|
|
FileInfo::get(IncludePos, File,
|
|
|
|
FileCharacter)));
|
2009-01-06 09:55:26 +08:00
|
|
|
unsigned FileSize = File->getSize();
|
2009-01-26 08:43:02 +08:00
|
|
|
assert(NextOffset+FileSize+1 > NextOffset && "Ran out of source locations!");
|
|
|
|
NextOffset += FileSize+1;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// Set LastFileIDLookup to the newly created file. The next getFileID call is
|
|
|
|
// almost guaranteed to be from that file.
|
2009-06-23 08:42:06 +08:00
|
|
|
FileID FID = FileID::get(SLocEntryTable.size()-1);
|
|
|
|
return LastFileIDLookup = FID;
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
|
|
|
|
2011-07-08 07:56:36 +08:00
|
|
|
SourceLocation
|
|
|
|
SourceManager::createMacroArgInstantiationLoc(SourceLocation SpellingLoc,
|
|
|
|
SourceLocation ILoc,
|
|
|
|
unsigned TokLength) {
|
|
|
|
InstantiationInfo II =
|
|
|
|
InstantiationInfo::createForMacroArg(SpellingLoc, ILoc);
|
|
|
|
return createInstantiationLocImpl(II, TokLength);
|
|
|
|
}
|
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
SourceLocation SourceManager::createInstantiationLoc(SourceLocation SpellingLoc,
|
2009-02-16 04:52:18 +08:00
|
|
|
SourceLocation ILocStart,
|
|
|
|
SourceLocation ILocEnd,
|
2009-04-27 14:38:32 +08:00
|
|
|
unsigned TokLength,
|
|
|
|
unsigned PreallocatedID,
|
|
|
|
unsigned Offset) {
|
2011-07-08 07:56:36 +08:00
|
|
|
InstantiationInfo II =
|
|
|
|
InstantiationInfo::create(SpellingLoc, ILocStart, ILocEnd);
|
|
|
|
return createInstantiationLocImpl(II, TokLength, PreallocatedID, Offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
SourceLocation
|
|
|
|
SourceManager::createInstantiationLocImpl(const InstantiationInfo &II,
|
|
|
|
unsigned TokLength,
|
|
|
|
unsigned PreallocatedID,
|
|
|
|
unsigned Offset) {
|
2009-04-27 14:38:32 +08:00
|
|
|
if (PreallocatedID) {
|
|
|
|
// If we're filling in a preallocated ID, just load in the
|
|
|
|
// instantiation entry and return.
|
2009-09-09 23:08:12 +08:00
|
|
|
assert(PreallocatedID < SLocEntryLoaded.size() &&
|
2009-04-27 14:38:32 +08:00
|
|
|
"Preallocate ID out-of-range");
|
2009-09-09 23:08:12 +08:00
|
|
|
assert(!SLocEntryLoaded[PreallocatedID] &&
|
2009-04-27 14:38:32 +08:00
|
|
|
"Source location entry already loaded");
|
|
|
|
assert(Offset && "Preallocate source location cannot have zero offset");
|
|
|
|
SLocEntryTable[PreallocatedID] = SLocEntry::get(Offset, II);
|
|
|
|
SLocEntryLoaded[PreallocatedID] = true;
|
|
|
|
return SourceLocation::getMacroLoc(Offset);
|
|
|
|
}
|
2009-02-16 04:52:18 +08:00
|
|
|
SLocEntryTable.push_back(SLocEntry::get(NextOffset, II));
|
2009-01-26 08:43:02 +08:00
|
|
|
assert(NextOffset+TokLength+1 > NextOffset && "Ran out of source locations!");
|
|
|
|
NextOffset += TokLength+1;
|
|
|
|
return SourceLocation::getMacroLoc(NextOffset-(TokLength+1));
|
2006-06-30 14:10:08 +08:00
|
|
|
}
|
|
|
|
|
2010-03-16 08:35:39 +08:00
|
|
|
const llvm::MemoryBuffer *
|
2010-03-16 13:20:39 +08:00
|
|
|
SourceManager::getMemoryBufferForFile(const FileEntry *File,
|
|
|
|
bool *Invalid) {
|
2009-12-02 14:49:09 +08:00
|
|
|
const SrcMgr::ContentCache *IR = getOrCreateContentCache(File);
|
2010-03-16 06:54:52 +08:00
|
|
|
assert(IR && "getOrCreateContentCache() cannot return NULL");
|
2010-04-21 04:35:58 +08:00
|
|
|
return IR->getBuffer(Diag, *this, SourceLocation(), Invalid);
|
2009-12-02 14:49:09 +08:00
|
|
|
}
|
|
|
|
|
2010-10-27 04:47:28 +08:00
|
|
|
void SourceManager::overrideFileContents(const FileEntry *SourceFile,
|
Introduce basic support for loading a precompiled preamble while
reparsing an ASTUnit. When saving a preamble, create a buffer larger
than the actual file we're working with but fill everything from the
end of the preamble to the end of the file with spaces (so the lexer
will quickly skip them). When we load the file, create a buffer of the
same size, filling it with the file and then spaces. Then, instruct
the lexer to start lexing after the preamble, therefore continuing the
parse from the spot where the preamble left off.
It's now possible to perform a simple preamble build + parse (+
reparse) with ASTUnit. However, one has to disable a bunch of checking
in the PCH reader to do so. That part isn't committed; it will likely
be handled with some other kind of flag (e.g., -fno-validate-pch).
As part of this, fix some issues with null termination of the memory
buffers created for the preamble; we were trying to explicitly
NULL-terminate them, even though they were also getting implicitly
NULL terminated, leading to excess warnings about NULL characters in
source files.
llvm-svn: 109445
2010-07-27 05:36:20 +08:00
|
|
|
const llvm::MemoryBuffer *Buffer,
|
|
|
|
bool DoNotFree) {
|
2009-12-02 14:49:09 +08:00
|
|
|
const SrcMgr::ContentCache *IR = getOrCreateContentCache(SourceFile);
|
2010-10-27 04:47:28 +08:00
|
|
|
assert(IR && "getOrCreateContentCache() cannot return NULL");
|
2009-12-02 14:49:09 +08:00
|
|
|
|
Introduce basic support for loading a precompiled preamble while
reparsing an ASTUnit. When saving a preamble, create a buffer larger
than the actual file we're working with but fill everything from the
end of the preamble to the end of the file with spaces (so the lexer
will quickly skip them). When we load the file, create a buffer of the
same size, filling it with the file and then spaces. Then, instruct
the lexer to start lexing after the preamble, therefore continuing the
parse from the spot where the preamble left off.
It's now possible to perform a simple preamble build + parse (+
reparse) with ASTUnit. However, one has to disable a bunch of checking
in the PCH reader to do so. That part isn't committed; it will likely
be handled with some other kind of flag (e.g., -fno-validate-pch).
As part of this, fix some issues with null termination of the memory
buffers created for the preamble; we were trying to explicitly
NULL-terminate them, even though they were also getting implicitly
NULL terminated, leading to excess warnings about NULL characters in
source files.
llvm-svn: 109445
2010-07-27 05:36:20 +08:00
|
|
|
const_cast<SrcMgr::ContentCache *>(IR)->replaceBuffer(Buffer, DoNotFree);
|
2009-12-02 14:49:09 +08:00
|
|
|
}
|
|
|
|
|
2011-03-05 09:03:53 +08:00
|
|
|
void SourceManager::overrideFileContents(const FileEntry *SourceFile,
|
|
|
|
const FileEntry *NewFile) {
|
|
|
|
assert(SourceFile->getSize() == NewFile->getSize() &&
|
|
|
|
"Different sizes, use the FileManager to create a virtual file with "
|
|
|
|
"the correct size");
|
|
|
|
assert(FileInfos.count(SourceFile) == 0 &&
|
|
|
|
"This function should be called at the initialization stage, before "
|
|
|
|
"any parsing occurs.");
|
|
|
|
OverriddenFiles[SourceFile] = NewFile;
|
|
|
|
}
|
|
|
|
|
2010-03-16 22:14:31 +08:00
|
|
|
llvm::StringRef SourceManager::getBufferData(FileID FID, bool *Invalid) const {
|
2010-03-17 04:01:30 +08:00
|
|
|
bool MyInvalid = false;
|
2011-04-20 08:21:03 +08:00
|
|
|
const SLocEntry &SLoc = getSLocEntry(FID.ID, &MyInvalid);
|
|
|
|
if (!SLoc.isFile() || MyInvalid) {
|
2011-02-01 06:42:36 +08:00
|
|
|
if (Invalid)
|
|
|
|
*Invalid = true;
|
|
|
|
return "<<<<<INVALID SOURCE LOCATION>>>>>";
|
|
|
|
}
|
|
|
|
|
|
|
|
const llvm::MemoryBuffer *Buf
|
|
|
|
= SLoc.getFile().getContentCache()->getBuffer(Diag, *this, SourceLocation(),
|
|
|
|
&MyInvalid);
|
2010-03-16 08:06:06 +08:00
|
|
|
if (Invalid)
|
2010-03-17 04:01:30 +08:00
|
|
|
*Invalid = MyInvalid;
|
|
|
|
|
|
|
|
if (MyInvalid)
|
2011-02-01 06:42:36 +08:00
|
|
|
return "<<<<<INVALID SOURCE LOCATION>>>>>";
|
2010-03-17 04:01:30 +08:00
|
|
|
|
2010-03-16 22:14:31 +08:00
|
|
|
return Buf->getBuffer();
|
2010-03-16 06:54:52 +08:00
|
|
|
}
|
2009-01-17 14:22:33 +08:00
|
|
|
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-26 08:43:02 +08:00
|
|
|
// SourceLocation manipulation methods.
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-26 08:43:02 +08:00
|
|
|
|
|
|
|
/// getFileIDSlow - Return the FileID for a SourceLocation. This is a very hot
|
|
|
|
/// method that is used for all SourceManager queries that start with a
|
|
|
|
/// SourceLocation object. It is responsible for finding the entry in
|
|
|
|
/// SLocEntryTable which contains the specified location.
|
|
|
|
///
|
|
|
|
FileID SourceManager::getFileIDSlow(unsigned SLocOffset) const {
|
2011-04-20 08:21:03 +08:00
|
|
|
if (!SLocOffset)
|
|
|
|
return FileID::get(0);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// After the first and second level caches, I see two common sorts of
|
|
|
|
// behavior: 1) a lot of searched FileID's are "near" the cached file location
|
|
|
|
// or are "near" the cached instantiation location. 2) others are just
|
|
|
|
// completely random and may be a very long way away.
|
|
|
|
//
|
|
|
|
// To handle this, we do a linear search for up to 8 steps to catch #1 quickly
|
|
|
|
// then we fall back to a less cache efficient, but more scalable, binary
|
|
|
|
// search to find the location.
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// See if this is near the file point - worst case we start scanning from the
|
|
|
|
// most newly created FileID.
|
|
|
|
std::vector<SrcMgr::SLocEntry>::const_iterator I;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
if (SLocEntryTable[LastFileIDLookup.ID].getOffset() < SLocOffset) {
|
|
|
|
// Neither loc prunes our search.
|
|
|
|
I = SLocEntryTable.end();
|
|
|
|
} else {
|
|
|
|
// Perhaps it is near the file point.
|
|
|
|
I = SLocEntryTable.begin()+LastFileIDLookup.ID;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Find the FileID that contains this. "I" is an iterator that points to a
|
|
|
|
// FileID whose offset is known to be larger than SLocOffset.
|
|
|
|
unsigned NumProbes = 0;
|
|
|
|
while (1) {
|
|
|
|
--I;
|
2011-04-20 08:21:03 +08:00
|
|
|
if (ExternalSLocEntries) {
|
|
|
|
bool Invalid = false;
|
|
|
|
getSLocEntry(FileID::get(I - SLocEntryTable.begin()), &Invalid);
|
|
|
|
if (Invalid)
|
|
|
|
return FileID::get(0);
|
|
|
|
}
|
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
if (I->getOffset() <= SLocOffset) {
|
|
|
|
#if 0
|
|
|
|
printf("lin %d -> %d [%s] %d %d\n", SLocOffset,
|
|
|
|
I-SLocEntryTable.begin(),
|
|
|
|
I->isInstantiation() ? "inst" : "file",
|
|
|
|
LastFileIDLookup.ID, int(SLocEntryTable.end()-I));
|
|
|
|
#endif
|
|
|
|
FileID Res = FileID::get(I-SLocEntryTable.begin());
|
2009-04-27 14:38:32 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// If this isn't an instantiation, remember it. We have good locality
|
|
|
|
// across FileID lookups.
|
|
|
|
if (!I->isInstantiation())
|
|
|
|
LastFileIDLookup = Res;
|
|
|
|
NumLinearScans += NumProbes+1;
|
|
|
|
return Res;
|
|
|
|
}
|
|
|
|
if (++NumProbes == 8)
|
|
|
|
break;
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// Convert "I" back into an index. We know that it is an entry whose index is
|
|
|
|
// larger than the offset we are looking for.
|
|
|
|
unsigned GreaterIndex = I-SLocEntryTable.begin();
|
|
|
|
// LessIndex - This is the lower bound of the range that we're searching.
|
|
|
|
// We know that the offset corresponding to the FileID is is less than
|
|
|
|
// SLocOffset.
|
|
|
|
unsigned LessIndex = 0;
|
|
|
|
NumProbes = 0;
|
|
|
|
while (1) {
|
2011-04-20 08:21:03 +08:00
|
|
|
bool Invalid = false;
|
2009-01-26 08:43:02 +08:00
|
|
|
unsigned MiddleIndex = (GreaterIndex-LessIndex)/2+LessIndex;
|
2011-04-20 08:21:03 +08:00
|
|
|
unsigned MidOffset = getSLocEntry(FileID::get(MiddleIndex), &Invalid)
|
|
|
|
.getOffset();
|
|
|
|
if (Invalid)
|
|
|
|
return FileID::get(0);
|
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
++NumProbes;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// If the offset of the midpoint is too large, chop the high side of the
|
|
|
|
// range to the midpoint.
|
|
|
|
if (MidOffset > SLocOffset) {
|
|
|
|
GreaterIndex = MiddleIndex;
|
|
|
|
continue;
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// If the middle index contains the value, succeed and return.
|
|
|
|
if (isOffsetInFileID(FileID::get(MiddleIndex), SLocOffset)) {
|
|
|
|
#if 0
|
|
|
|
printf("bin %d -> %d [%s] %d %d\n", SLocOffset,
|
|
|
|
I-SLocEntryTable.begin(),
|
|
|
|
I->isInstantiation() ? "inst" : "file",
|
|
|
|
LastFileIDLookup.ID, int(SLocEntryTable.end()-I));
|
|
|
|
#endif
|
|
|
|
FileID Res = FileID::get(MiddleIndex);
|
|
|
|
|
|
|
|
// If this isn't an instantiation, remember it. We have good locality
|
|
|
|
// across FileID lookups.
|
|
|
|
if (!I->isInstantiation())
|
|
|
|
LastFileIDLookup = Res;
|
|
|
|
NumBinaryProbes += NumProbes;
|
|
|
|
return Res;
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// Otherwise, move the low-side up to the middle index.
|
|
|
|
LessIndex = MiddleIndex;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-01-27 04:04:19 +08:00
|
|
|
SourceLocation SourceManager::
|
|
|
|
getInstantiationLocSlowCase(SourceLocation Loc) const {
|
|
|
|
do {
|
2010-02-13 03:31:35 +08:00
|
|
|
// Note: If Loc indicates an offset into a token that came from a macro
|
|
|
|
// expansion (e.g. the 5th character of the token) we do not want to add
|
|
|
|
// this offset when going to the instantiation location. The instatiation
|
|
|
|
// location is the macro invocation, which the offset has nothing to do
|
|
|
|
// with. This is unlike when we get the spelling loc, because the offset
|
|
|
|
// directly correspond to the token whose spelling we're inspecting.
|
|
|
|
Loc = getSLocEntry(getFileID(Loc)).getInstantiation()
|
2009-02-16 04:52:18 +08:00
|
|
|
.getInstantiationLocStart();
|
2009-01-27 04:04:19 +08:00
|
|
|
} while (!Loc.isFileID());
|
|
|
|
|
|
|
|
return Loc;
|
|
|
|
}
|
|
|
|
|
|
|
|
SourceLocation SourceManager::getSpellingLocSlowCase(SourceLocation Loc) const {
|
|
|
|
do {
|
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedLoc(Loc);
|
|
|
|
Loc = getSLocEntry(LocInfo.first).getInstantiation().getSpellingLoc();
|
|
|
|
Loc = Loc.getFileLocWithOffset(LocInfo.second);
|
|
|
|
} while (!Loc.isFileID());
|
|
|
|
return Loc;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
std::pair<FileID, unsigned>
|
2011-07-07 11:40:27 +08:00
|
|
|
SourceManager::getDecomposedInstantiationLocSlowCase(
|
|
|
|
const SrcMgr::SLocEntry *E) const {
|
2009-01-26 08:43:02 +08:00
|
|
|
// If this is an instantiation record, walk through all the instantiation
|
|
|
|
// points.
|
|
|
|
FileID FID;
|
|
|
|
SourceLocation Loc;
|
2011-07-07 11:40:27 +08:00
|
|
|
unsigned Offset;
|
2009-01-26 08:43:02 +08:00
|
|
|
do {
|
2009-02-16 04:52:18 +08:00
|
|
|
Loc = E->getInstantiation().getInstantiationLocStart();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
FID = getFileID(Loc);
|
|
|
|
E = &getSLocEntry(FID);
|
2011-07-07 11:40:27 +08:00
|
|
|
Offset = Loc.getOffset()-E->getOffset();
|
2009-01-27 03:41:58 +08:00
|
|
|
} while (!Loc.isFileID());
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
return std::make_pair(FID, Offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::pair<FileID, unsigned>
|
|
|
|
SourceManager::getDecomposedSpellingLocSlowCase(const SrcMgr::SLocEntry *E,
|
|
|
|
unsigned Offset) const {
|
2009-01-27 03:41:58 +08:00
|
|
|
// If this is an instantiation record, walk through all the instantiation
|
|
|
|
// points.
|
|
|
|
FileID FID;
|
|
|
|
SourceLocation Loc;
|
|
|
|
do {
|
|
|
|
Loc = E->getInstantiation().getSpellingLoc();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-27 03:41:58 +08:00
|
|
|
FID = getFileID(Loc);
|
|
|
|
E = &getSLocEntry(FID);
|
|
|
|
Offset += Loc.getOffset()-E->getOffset();
|
|
|
|
} while (!Loc.isFileID());
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
return std::make_pair(FID, Offset);
|
|
|
|
}
|
|
|
|
|
2009-02-17 16:04:48 +08:00
|
|
|
/// getImmediateSpellingLoc - Given a SourceLocation object, return the
|
|
|
|
/// spelling location referenced by the ID. This is the first level down
|
|
|
|
/// towards the place where the characters that make up the lexed token can be
|
|
|
|
/// found. This should not generally be used by clients.
|
|
|
|
SourceLocation SourceManager::getImmediateSpellingLoc(SourceLocation Loc) const{
|
|
|
|
if (Loc.isFileID()) return Loc;
|
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedLoc(Loc);
|
|
|
|
Loc = getSLocEntry(LocInfo.first).getInstantiation().getSpellingLoc();
|
|
|
|
return Loc.getFileLocWithOffset(LocInfo.second);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-02-16 04:52:18 +08:00
|
|
|
/// getImmediateInstantiationRange - Loc is required to be an instantiation
|
|
|
|
/// location. Return the start/end of the instantiation information.
|
|
|
|
std::pair<SourceLocation,SourceLocation>
|
|
|
|
SourceManager::getImmediateInstantiationRange(SourceLocation Loc) const {
|
|
|
|
assert(Loc.isMacroID() && "Not an instantiation loc!");
|
|
|
|
const InstantiationInfo &II = getSLocEntry(getFileID(Loc)).getInstantiation();
|
|
|
|
return II.getInstantiationLocRange();
|
|
|
|
}
|
|
|
|
|
2009-02-16 05:26:50 +08:00
|
|
|
/// getInstantiationRange - Given a SourceLocation object, return the
|
|
|
|
/// range of tokens covered by the instantiation in the ultimate file.
|
|
|
|
std::pair<SourceLocation,SourceLocation>
|
|
|
|
SourceManager::getInstantiationRange(SourceLocation Loc) const {
|
|
|
|
if (Loc.isFileID()) return std::make_pair(Loc, Loc);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-16 05:26:50 +08:00
|
|
|
std::pair<SourceLocation,SourceLocation> Res =
|
|
|
|
getImmediateInstantiationRange(Loc);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-16 05:26:50 +08:00
|
|
|
// Fully resolve the start and end locations to their ultimate instantiation
|
|
|
|
// points.
|
|
|
|
while (!Res.first.isFileID())
|
|
|
|
Res.first = getImmediateInstantiationRange(Res.first).first;
|
|
|
|
while (!Res.second.isFileID())
|
|
|
|
Res.second = getImmediateInstantiationRange(Res.second).second;
|
|
|
|
return Res;
|
|
|
|
}
|
|
|
|
|
2011-07-08 07:56:36 +08:00
|
|
|
bool SourceManager::isMacroArgInstantiation(SourceLocation Loc) const {
|
|
|
|
if (!Loc.isMacroID()) return false;
|
|
|
|
|
|
|
|
FileID FID = getFileID(Loc);
|
|
|
|
const SrcMgr::SLocEntry *E = &getSLocEntry(FID);
|
|
|
|
const SrcMgr::InstantiationInfo &II = E->getInstantiation();
|
|
|
|
return II.isMacroArgInstantiation();
|
|
|
|
}
|
2009-02-16 04:52:18 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// Queries about the code at a SourceLocation.
|
|
|
|
//===----------------------------------------------------------------------===//
|
2006-06-21 11:01:55 +08:00
|
|
|
|
2006-06-19 00:22:51 +08:00
|
|
|
/// getCharacterData - Return a pointer to the start of the specified location
|
2007-04-29 15:12:06 +08:00
|
|
|
/// in the appropriate MemoryBuffer.
|
2010-03-16 13:20:39 +08:00
|
|
|
const char *SourceManager::getCharacterData(SourceLocation SL,
|
|
|
|
bool *Invalid) const {
|
2006-07-05 07:01:03 +08:00
|
|
|
// Note that this is a hot function in the getSpelling() path, which is
|
|
|
|
// heavily used by -E mode.
|
2009-01-26 08:43:02 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedSpellingLoc(SL);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-06 09:55:26 +08:00
|
|
|
// Note that calling 'getBuffer()' may lazily page in a source file.
|
2010-03-16 13:20:39 +08:00
|
|
|
bool CharDataInvalid = false;
|
2011-04-20 08:21:03 +08:00
|
|
|
const SLocEntry &Entry = getSLocEntry(LocInfo.first, &CharDataInvalid);
|
|
|
|
if (CharDataInvalid || !Entry.isFile()) {
|
|
|
|
if (Invalid)
|
|
|
|
*Invalid = true;
|
|
|
|
|
|
|
|
return "<<<<INVALID BUFFER>>>>";
|
|
|
|
}
|
2010-03-16 13:20:39 +08:00
|
|
|
const llvm::MemoryBuffer *Buffer
|
2011-04-20 08:21:03 +08:00
|
|
|
= Entry.getFile().getContentCache()
|
|
|
|
->getBuffer(Diag, *this, SourceLocation(), &CharDataInvalid);
|
2010-03-16 13:20:39 +08:00
|
|
|
if (Invalid)
|
|
|
|
*Invalid = CharDataInvalid;
|
|
|
|
return Buffer->getBufferStart() + (CharDataInvalid? 0 : LocInfo.second);
|
2006-06-19 00:22:51 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-07-21 00:37:10 +08:00
|
|
|
/// getColumnNumber - Return the column # for the specified file position.
|
2009-02-04 08:55:58 +08:00
|
|
|
/// this is significantly cheaper to compute than the line number.
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getColumnNumber(FileID FID, unsigned FilePos,
|
|
|
|
bool *Invalid) const {
|
|
|
|
bool MyInvalid = false;
|
|
|
|
const char *Buf = getBuffer(FID, &MyInvalid)->getBufferStart();
|
|
|
|
if (Invalid)
|
|
|
|
*Invalid = MyInvalid;
|
|
|
|
|
|
|
|
if (MyInvalid)
|
|
|
|
return 1;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
unsigned LineStart = FilePos;
|
|
|
|
while (LineStart && Buf[LineStart-1] != '\n' && Buf[LineStart-1] != '\r')
|
|
|
|
--LineStart;
|
|
|
|
return FilePos-LineStart+1;
|
|
|
|
}
|
|
|
|
|
2010-10-06 01:56:33 +08:00
|
|
|
// isInvalid - Return the result of calling loc.isInvalid(), and
|
|
|
|
// if Invalid is not null, set its value to same.
|
|
|
|
static bool isInvalid(SourceLocation Loc, bool *Invalid) {
|
|
|
|
bool MyInvalid = Loc.isInvalid();
|
|
|
|
if (Invalid)
|
|
|
|
*Invalid = MyInvalid;
|
|
|
|
return MyInvalid;
|
|
|
|
}
|
|
|
|
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getSpellingColumnNumber(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
2010-10-06 01:56:33 +08:00
|
|
|
if (isInvalid(Loc, Invalid)) return 0;
|
2009-02-04 08:55:58 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedSpellingLoc(Loc);
|
2010-03-16 13:20:39 +08:00
|
|
|
return getColumnNumber(LocInfo.first, LocInfo.second, Invalid);
|
2009-02-04 08:55:58 +08:00
|
|
|
}
|
|
|
|
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getInstantiationColumnNumber(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
2010-10-06 01:56:33 +08:00
|
|
|
if (isInvalid(Loc, Invalid)) return 0;
|
2009-02-04 08:55:58 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
2010-03-16 13:20:39 +08:00
|
|
|
return getColumnNumber(LocInfo.first, LocInfo.second, Invalid);
|
2009-02-04 08:55:58 +08:00
|
|
|
}
|
|
|
|
|
2011-02-23 08:47:48 +08:00
|
|
|
unsigned SourceManager::getPresumedColumnNumber(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
|
|
|
if (isInvalid(Loc, Invalid)) return 0;
|
|
|
|
return getPresumedLoc(Loc).getColumn();
|
|
|
|
}
|
|
|
|
|
2010-10-23 16:44:57 +08:00
|
|
|
static LLVM_ATTRIBUTE_NOINLINE void
|
2010-04-21 04:35:58 +08:00
|
|
|
ComputeLineNumbers(Diagnostic &Diag, ContentCache *FI,
|
|
|
|
llvm::BumpPtrAllocator &Alloc,
|
|
|
|
const SourceManager &SM, bool &Invalid);
|
|
|
|
static void ComputeLineNumbers(Diagnostic &Diag, ContentCache *FI,
|
|
|
|
llvm::BumpPtrAllocator &Alloc,
|
|
|
|
const SourceManager &SM, bool &Invalid) {
|
2009-01-06 09:55:26 +08:00
|
|
|
// Note that calling 'getBuffer()' may lazily page in the file.
|
2010-04-21 04:35:58 +08:00
|
|
|
const MemoryBuffer *Buffer = FI->getBuffer(Diag, SM, SourceLocation(),
|
|
|
|
&Invalid);
|
2010-03-16 13:20:39 +08:00
|
|
|
if (Invalid)
|
|
|
|
return;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
// Find the file offsets of all of the *physical* source lines. This does
|
|
|
|
// not look at trigraphs, escaped newlines, or anything else tricky.
|
2011-07-07 00:43:46 +08:00
|
|
|
llvm::SmallVector<unsigned, 256> LineOffsets;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
// Line #1 starts at char 0.
|
|
|
|
LineOffsets.push_back(0);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
const unsigned char *Buf = (const unsigned char *)Buffer->getBufferStart();
|
|
|
|
const unsigned char *End = (const unsigned char *)Buffer->getBufferEnd();
|
|
|
|
unsigned Offs = 0;
|
|
|
|
while (1) {
|
|
|
|
// Skip over the contents of the line.
|
|
|
|
// TODO: Vectorize this? This is very performance sensitive for programs
|
|
|
|
// with lots of diagnostics and in -E mode.
|
|
|
|
const unsigned char *NextBuf = (const unsigned char *)Buf;
|
|
|
|
while (*NextBuf != '\n' && *NextBuf != '\r' && *NextBuf != '\0')
|
|
|
|
++NextBuf;
|
|
|
|
Offs += NextBuf-Buf;
|
|
|
|
Buf = NextBuf;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
if (Buf[0] == '\n' || Buf[0] == '\r') {
|
|
|
|
// If this is \n\r or \r\n, skip both characters.
|
|
|
|
if ((Buf[1] == '\n' || Buf[1] == '\r') && Buf[0] != Buf[1])
|
|
|
|
++Offs, ++Buf;
|
|
|
|
++Offs, ++Buf;
|
|
|
|
LineOffsets.push_back(Offs);
|
|
|
|
} else {
|
|
|
|
// Otherwise, this is a null. If end of file, exit.
|
|
|
|
if (Buf == End) break;
|
|
|
|
// Otherwise, skip the null.
|
|
|
|
++Offs, ++Buf;
|
|
|
|
}
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
// Copy the offsets into the FileInfo structure.
|
|
|
|
FI->NumLines = LineOffsets.size();
|
2009-02-03 15:30:45 +08:00
|
|
|
FI->SourceLineCache = Alloc.Allocate<unsigned>(LineOffsets.size());
|
2007-07-24 13:57:19 +08:00
|
|
|
std::copy(LineOffsets.begin(), LineOffsets.end(), FI->SourceLineCache);
|
|
|
|
}
|
2006-06-21 12:57:09 +08:00
|
|
|
|
2009-01-16 15:00:02 +08:00
|
|
|
/// getLineNumber - Given a SourceLocation, return the spelling line number
|
2006-06-18 13:43:12 +08:00
|
|
|
/// for the position indicated. This requires building and caching a table of
|
2007-04-29 15:12:06 +08:00
|
|
|
/// line offsets for the MemoryBuffer, so this is not cheap: use only when
|
2006-06-18 13:43:12 +08:00
|
|
|
/// about to emit a diagnostic.
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getLineNumber(FileID FID, unsigned FilePos,
|
|
|
|
bool *Invalid) const {
|
2011-05-18 06:09:53 +08:00
|
|
|
if (FID.isInvalid()) {
|
|
|
|
if (Invalid)
|
|
|
|
*Invalid = true;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
ContentCache *Content;
|
2009-02-04 09:06:56 +08:00
|
|
|
if (LastLineNoFileIDQuery == FID)
|
2007-10-31 05:08:08 +08:00
|
|
|
Content = LastLineNoContentCache;
|
2011-04-20 08:21:03 +08:00
|
|
|
else {
|
|
|
|
bool MyInvalid = false;
|
|
|
|
const SLocEntry &Entry = getSLocEntry(FID, &MyInvalid);
|
|
|
|
if (MyInvalid || !Entry.isFile()) {
|
|
|
|
if (Invalid)
|
|
|
|
*Invalid = true;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
Content = const_cast<ContentCache*>(Entry.getFile().getContentCache());
|
|
|
|
}
|
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
// If this is the first use of line information for this buffer, compute the
|
2007-07-24 13:57:19 +08:00
|
|
|
/// SourceLineCache for it on demand.
|
2010-03-16 13:20:39 +08:00
|
|
|
if (Content->SourceLineCache == 0) {
|
|
|
|
bool MyInvalid = false;
|
2010-04-21 04:35:58 +08:00
|
|
|
ComputeLineNumbers(Diag, Content, ContentCacheAlloc, *this, MyInvalid);
|
2010-03-16 13:20:39 +08:00
|
|
|
if (Invalid)
|
|
|
|
*Invalid = MyInvalid;
|
|
|
|
if (MyInvalid)
|
|
|
|
return 1;
|
|
|
|
} else if (Invalid)
|
|
|
|
*Invalid = false;
|
2006-06-18 13:43:12 +08:00
|
|
|
|
|
|
|
// Okay, we know we have a line number table. Do a binary search to find the
|
|
|
|
// line number that this character position lands on.
|
2007-10-31 05:08:08 +08:00
|
|
|
unsigned *SourceLineCache = Content->SourceLineCache;
|
2007-07-24 13:57:19 +08:00
|
|
|
unsigned *SourceLineCacheStart = SourceLineCache;
|
2007-10-31 05:08:08 +08:00
|
|
|
unsigned *SourceLineCacheEnd = SourceLineCache + Content->NumLines;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 09:06:56 +08:00
|
|
|
unsigned QueriedFilePos = FilePos+1;
|
2007-07-24 13:57:19 +08:00
|
|
|
|
2009-05-19 01:30:52 +08:00
|
|
|
// FIXME: I would like to be convinced that this code is worth being as
|
2009-09-09 23:08:12 +08:00
|
|
|
// complicated as it is, binary search isn't that slow.
|
2009-05-19 01:30:52 +08:00
|
|
|
//
|
|
|
|
// If it is worth being optimized, then in my opinion it could be more
|
|
|
|
// performant, simpler, and more obviously correct by just "galloping" outward
|
|
|
|
// from the queried file position. In fact, this could be incorporated into a
|
|
|
|
// generic algorithm such as lower_bound_with_hint.
|
|
|
|
//
|
|
|
|
// If someone gives me a test case where this matters, and I will do it! - DWD
|
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
// If the previous query was to the same file, we know both the file pos from
|
|
|
|
// that query and the line number returned. This allows us to narrow the
|
|
|
|
// search space from the entire file to something near the match.
|
2009-02-04 09:06:56 +08:00
|
|
|
if (LastLineNoFileIDQuery == FID) {
|
2007-07-24 13:57:19 +08:00
|
|
|
if (QueriedFilePos >= LastLineNoFilePos) {
|
2009-05-19 01:30:52 +08:00
|
|
|
// FIXME: Potential overflow?
|
2007-07-24 13:57:19 +08:00
|
|
|
SourceLineCache = SourceLineCache+LastLineNoResult-1;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
// The query is likely to be nearby the previous one. Here we check to
|
|
|
|
// see if it is within 5, 10 or 20 lines. It can be far away in cases
|
|
|
|
// where big comment blocks and vertical whitespace eat up lines but
|
|
|
|
// contribute no tokens.
|
|
|
|
if (SourceLineCache+5 < SourceLineCacheEnd) {
|
|
|
|
if (SourceLineCache[5] > QueriedFilePos)
|
|
|
|
SourceLineCacheEnd = SourceLineCache+5;
|
|
|
|
else if (SourceLineCache+10 < SourceLineCacheEnd) {
|
|
|
|
if (SourceLineCache[10] > QueriedFilePos)
|
|
|
|
SourceLineCacheEnd = SourceLineCache+10;
|
|
|
|
else if (SourceLineCache+20 < SourceLineCacheEnd) {
|
|
|
|
if (SourceLineCache[20] > QueriedFilePos)
|
|
|
|
SourceLineCacheEnd = SourceLineCache+20;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2009-05-19 01:30:52 +08:00
|
|
|
if (LastLineNoResult < Content->NumLines)
|
|
|
|
SourceLineCacheEnd = SourceLineCache+LastLineNoResult+1;
|
2007-07-24 13:57:19 +08:00
|
|
|
}
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 14:43:46 +08:00
|
|
|
// If the spread is large, do a "radix" test as our initial guess, based on
|
|
|
|
// the assumption that lines average to approximately the same length.
|
|
|
|
// NOTE: This is currently disabled, as it does not appear to be profitable in
|
|
|
|
// initial measurements.
|
|
|
|
if (0 && SourceLineCacheEnd-SourceLineCache > 20) {
|
2007-10-31 05:08:08 +08:00
|
|
|
unsigned FileLen = Content->SourceLineCache[Content->NumLines-1];
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 14:43:46 +08:00
|
|
|
// Take a stab at guessing where it is.
|
2007-10-31 05:08:08 +08:00
|
|
|
unsigned ApproxPos = Content->NumLines*QueriedFilePos / FileLen;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 14:43:46 +08:00
|
|
|
// Check for -10 and +10 lines.
|
|
|
|
unsigned LowerBound = std::max(int(ApproxPos-10), 0);
|
|
|
|
unsigned UpperBound = std::min(ApproxPos+10, FileLen);
|
|
|
|
|
|
|
|
// If the computed lower bound is less than the query location, move it in.
|
|
|
|
if (SourceLineCache < SourceLineCacheStart+LowerBound &&
|
|
|
|
SourceLineCacheStart[LowerBound] < QueriedFilePos)
|
|
|
|
SourceLineCache = SourceLineCacheStart+LowerBound;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 14:43:46 +08:00
|
|
|
// If the computed upper bound is greater than the query location, move it.
|
|
|
|
if (SourceLineCacheEnd > SourceLineCacheStart+UpperBound &&
|
|
|
|
SourceLineCacheStart[UpperBound] >= QueriedFilePos)
|
|
|
|
SourceLineCacheEnd = SourceLineCacheStart+UpperBound;
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 14:43:46 +08:00
|
|
|
unsigned *Pos
|
|
|
|
= std::lower_bound(SourceLineCache, SourceLineCacheEnd, QueriedFilePos);
|
2007-07-24 13:57:19 +08:00
|
|
|
unsigned LineNo = Pos-SourceLineCacheStart;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 09:06:56 +08:00
|
|
|
LastLineNoFileIDQuery = FID;
|
2007-10-31 05:08:08 +08:00
|
|
|
LastLineNoContentCache = Content;
|
2007-07-24 13:57:19 +08:00
|
|
|
LastLineNoFilePos = QueriedFilePos;
|
|
|
|
LastLineNoResult = LineNo;
|
|
|
|
return LineNo;
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
|
|
|
|
2011-02-23 08:47:48 +08:00
|
|
|
unsigned SourceManager::getSpellingLineNumber(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
|
|
|
if (isInvalid(Loc, Invalid)) return 0;
|
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedSpellingLoc(Loc);
|
|
|
|
return getLineNumber(LocInfo.first, LocInfo.second);
|
|
|
|
}
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getInstantiationLineNumber(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
2010-10-06 01:56:33 +08:00
|
|
|
if (isInvalid(Loc, Invalid)) return 0;
|
2009-02-04 09:06:56 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
|
|
|
return getLineNumber(LocInfo.first, LocInfo.second);
|
|
|
|
}
|
2011-02-23 08:47:48 +08:00
|
|
|
unsigned SourceManager::getPresumedLineNumber(SourceLocation Loc,
|
2010-03-16 13:20:39 +08:00
|
|
|
bool *Invalid) const {
|
2010-10-06 01:56:33 +08:00
|
|
|
if (isInvalid(Loc, Invalid)) return 0;
|
2011-02-23 08:47:48 +08:00
|
|
|
return getPresumedLoc(Loc).getLine();
|
2009-02-04 09:06:56 +08:00
|
|
|
}
|
|
|
|
|
2009-02-04 13:33:01 +08:00
|
|
|
/// getFileCharacteristic - return the file characteristic of the specified
|
2009-09-09 23:08:12 +08:00
|
|
|
/// source location, indicating whether this is a normal file, a system
|
2009-02-04 13:33:01 +08:00
|
|
|
/// header, or an "implicit extern C" system header.
|
|
|
|
///
|
|
|
|
/// This state can be modified with flags on GNU linemarker directives like:
|
|
|
|
/// # 4 "foo.h" 3
|
|
|
|
/// which changes all source locations in the current file after that to be
|
|
|
|
/// considered to be from a system header.
|
2009-09-09 23:08:12 +08:00
|
|
|
SrcMgr::CharacteristicKind
|
2009-02-04 13:33:01 +08:00
|
|
|
SourceManager::getFileCharacteristic(SourceLocation Loc) const {
|
|
|
|
assert(!Loc.isInvalid() && "Can't get file characteristic of invalid loc!");
|
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
2011-04-20 08:21:03 +08:00
|
|
|
bool Invalid = false;
|
|
|
|
const SLocEntry &SEntry = getSLocEntry(LocInfo.first, &Invalid);
|
|
|
|
if (Invalid || !SEntry.isFile())
|
|
|
|
return C_User;
|
|
|
|
|
|
|
|
const SrcMgr::FileInfo &FI = SEntry.getFile();
|
2009-02-04 13:33:01 +08:00
|
|
|
|
|
|
|
// If there are no #line directives in this file, just return the whole-file
|
|
|
|
// state.
|
|
|
|
if (!FI.hasLineDirectives())
|
|
|
|
return FI.getFileCharacteristic();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:33:01 +08:00
|
|
|
assert(LineTable && "Can't have linetable entries without a LineTable!");
|
|
|
|
// See if there is a #line directive before the location.
|
|
|
|
const LineEntry *Entry =
|
|
|
|
LineTable->FindNearestLineEntry(LocInfo.first.ID, LocInfo.second);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:33:01 +08:00
|
|
|
// If this is before the first line marker, use the file characteristic.
|
|
|
|
if (!Entry)
|
|
|
|
return FI.getFileCharacteristic();
|
|
|
|
|
|
|
|
return Entry->FileKind;
|
|
|
|
}
|
|
|
|
|
2009-02-17 16:39:06 +08:00
|
|
|
/// Return the filename or buffer identifier of the buffer the location is in.
|
|
|
|
/// Note that this name does not respect #line directives. Use getPresumedLoc
|
|
|
|
/// for normal clients.
|
2010-03-16 13:20:39 +08:00
|
|
|
const char *SourceManager::getBufferName(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
2010-10-06 01:56:33 +08:00
|
|
|
if (isInvalid(Loc, Invalid)) return "<invalid loc>";
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2010-03-16 13:20:39 +08:00
|
|
|
return getBuffer(getFileID(Loc), Invalid)->getBufferIdentifier();
|
2009-02-17 16:39:06 +08:00
|
|
|
}
|
|
|
|
|
2009-02-04 09:06:56 +08:00
|
|
|
|
2009-01-27 15:57:44 +08:00
|
|
|
/// getPresumedLoc - This method returns the "presumed" location of a
|
|
|
|
/// SourceLocation specifies. A "presumed location" can be modified by #line
|
|
|
|
/// or GNU line marker directives. This provides a view on the data that a
|
|
|
|
/// user should see in diagnostics, for example.
|
|
|
|
///
|
|
|
|
/// Note that a presumed location is always given as the instantiation point
|
|
|
|
/// of an instantiation location, not at the spelling location.
|
|
|
|
PresumedLoc SourceManager::getPresumedLoc(SourceLocation Loc) const {
|
|
|
|
if (Loc.isInvalid()) return PresumedLoc();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-27 15:57:44 +08:00
|
|
|
// Presumed locations are always for instantiation points.
|
2009-02-04 08:55:58 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2011-04-20 08:21:03 +08:00
|
|
|
bool Invalid = false;
|
|
|
|
const SLocEntry &Entry = getSLocEntry(LocInfo.first, &Invalid);
|
|
|
|
if (Invalid || !Entry.isFile())
|
|
|
|
return PresumedLoc();
|
|
|
|
|
|
|
|
const SrcMgr::FileInfo &FI = Entry.getFile();
|
2009-01-27 15:57:44 +08:00
|
|
|
const SrcMgr::ContentCache *C = FI.getContentCache();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 09:55:42 +08:00
|
|
|
// To get the source name, first consult the FileEntry (if one exists)
|
|
|
|
// before the MemBuffer as this will avoid unnecessarily paging in the
|
|
|
|
// MemBuffer.
|
2010-04-21 04:35:58 +08:00
|
|
|
const char *Filename;
|
2011-03-05 09:03:53 +08:00
|
|
|
if (C->OrigEntry)
|
|
|
|
Filename = C->OrigEntry->getName();
|
2010-04-21 04:35:58 +08:00
|
|
|
else
|
|
|
|
Filename = C->getBuffer(Diag, *this)->getBufferIdentifier();
|
2011-04-20 08:21:03 +08:00
|
|
|
|
2010-11-02 08:39:22 +08:00
|
|
|
unsigned LineNo = getLineNumber(LocInfo.first, LocInfo.second, &Invalid);
|
|
|
|
if (Invalid)
|
|
|
|
return PresumedLoc();
|
|
|
|
unsigned ColNo = getColumnNumber(LocInfo.first, LocInfo.second, &Invalid);
|
|
|
|
if (Invalid)
|
|
|
|
return PresumedLoc();
|
|
|
|
|
2009-02-04 09:55:42 +08:00
|
|
|
SourceLocation IncludeLoc = FI.getIncludeLoc();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 09:55:42 +08:00
|
|
|
// If we have #line directives in this file, update and overwrite the physical
|
|
|
|
// location info if appropriate.
|
|
|
|
if (FI.hasLineDirectives()) {
|
|
|
|
assert(LineTable && "Can't have linetable entries without a LineTable!");
|
|
|
|
// See if there is a #line directive before this. If so, get it.
|
|
|
|
if (const LineEntry *Entry =
|
|
|
|
LineTable->FindNearestLineEntry(LocInfo.first.ID, LocInfo.second)) {
|
2009-02-04 10:00:59 +08:00
|
|
|
// If the LineEntry indicates a filename, use it.
|
2009-02-04 09:55:42 +08:00
|
|
|
if (Entry->FilenameID != -1)
|
|
|
|
Filename = LineTable->getFilename(Entry->FilenameID);
|
2009-02-04 10:00:59 +08:00
|
|
|
|
|
|
|
// Use the line number specified by the LineEntry. This line number may
|
|
|
|
// be multiple lines down from the line entry. Add the difference in
|
|
|
|
// physical line numbers from the query point and the line marker to the
|
|
|
|
// total.
|
|
|
|
unsigned MarkerLineNo = getLineNumber(LocInfo.first, Entry->FileOffset);
|
|
|
|
LineNo = Entry->LineNo + (LineNo-MarkerLineNo-1);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 10:15:40 +08:00
|
|
|
// Note that column numbers are not molested by line markers.
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
// Handle virtual #include manipulation.
|
|
|
|
if (Entry->IncludeOffset) {
|
|
|
|
IncludeLoc = getLocForStartOfFile(LocInfo.first);
|
|
|
|
IncludeLoc = IncludeLoc.getFileLocWithOffset(Entry->IncludeOffset);
|
|
|
|
}
|
2009-02-04 09:55:42 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return PresumedLoc(Filename, LineNo, ColNo, IncludeLoc);
|
2009-01-26 08:43:02 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// Other miscellaneous methods.
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2011-02-04 01:17:35 +08:00
|
|
|
/// \brief Retrieve the inode for the given file entry, if possible.
|
|
|
|
///
|
|
|
|
/// This routine involves a system call, and therefore should only be used
|
|
|
|
/// in non-performance-critical code.
|
|
|
|
static llvm::Optional<ino_t> getActualFileInode(const FileEntry *File) {
|
|
|
|
if (!File)
|
|
|
|
return llvm::Optional<ino_t>();
|
|
|
|
|
|
|
|
struct stat StatBuf;
|
|
|
|
if (::stat(File->getName(), &StatBuf))
|
|
|
|
return llvm::Optional<ino_t>();
|
|
|
|
|
|
|
|
return StatBuf.st_ino;
|
|
|
|
}
|
|
|
|
|
2009-06-20 16:09:57 +08:00
|
|
|
/// \brief Get the source location for the given file:line:col triplet.
|
|
|
|
///
|
|
|
|
/// If the source file is included multiple times, the source location will
|
|
|
|
/// be based upon the first inclusion.
|
|
|
|
SourceLocation SourceManager::getLocation(const FileEntry *SourceFile,
|
2011-02-04 01:17:35 +08:00
|
|
|
unsigned Line, unsigned Col) {
|
2009-06-20 16:09:57 +08:00
|
|
|
assert(SourceFile && "Null source file!");
|
|
|
|
assert(Line && Col && "Line and column should start from 1!");
|
|
|
|
|
2009-12-02 13:34:39 +08:00
|
|
|
// Find the first file ID that corresponds to the given file.
|
|
|
|
FileID FirstFID;
|
|
|
|
|
|
|
|
// First, check the main file ID, since it is common to look for a
|
|
|
|
// location in the main file.
|
2011-02-04 01:17:35 +08:00
|
|
|
llvm::Optional<ino_t> SourceFileInode;
|
|
|
|
llvm::Optional<llvm::StringRef> SourceFileName;
|
2009-12-02 13:34:39 +08:00
|
|
|
if (!MainFileID.isInvalid()) {
|
2011-04-20 08:21:03 +08:00
|
|
|
bool Invalid = false;
|
|
|
|
const SLocEntry &MainSLoc = getSLocEntry(MainFileID, &Invalid);
|
|
|
|
if (Invalid)
|
|
|
|
return SourceLocation();
|
|
|
|
|
2011-02-04 01:17:35 +08:00
|
|
|
if (MainSLoc.isFile()) {
|
|
|
|
const ContentCache *MainContentCache
|
|
|
|
= MainSLoc.getFile().getContentCache();
|
2011-02-12 02:08:15 +08:00
|
|
|
if (!MainContentCache) {
|
|
|
|
// Can't do anything
|
2011-03-05 09:03:53 +08:00
|
|
|
} else if (MainContentCache->OrigEntry == SourceFile) {
|
2011-02-04 01:17:35 +08:00
|
|
|
FirstFID = MainFileID;
|
2011-02-12 02:08:15 +08:00
|
|
|
} else {
|
2011-02-04 01:17:35 +08:00
|
|
|
// Fall back: check whether we have the same base name and inode
|
|
|
|
// as the main file.
|
2011-03-05 09:03:53 +08:00
|
|
|
const FileEntry *MainFile = MainContentCache->OrigEntry;
|
2011-02-04 01:17:35 +08:00
|
|
|
SourceFileName = llvm::sys::path::filename(SourceFile->getName());
|
|
|
|
if (*SourceFileName == llvm::sys::path::filename(MainFile->getName())) {
|
|
|
|
SourceFileInode = getActualFileInode(SourceFile);
|
2011-02-17 03:09:24 +08:00
|
|
|
if (SourceFileInode) {
|
|
|
|
if (llvm::Optional<ino_t> MainFileInode
|
|
|
|
= getActualFileInode(MainFile)) {
|
|
|
|
if (*SourceFileInode == *MainFileInode) {
|
|
|
|
FirstFID = MainFileID;
|
|
|
|
SourceFile = MainFile;
|
|
|
|
}
|
|
|
|
}
|
2011-02-04 01:17:35 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2009-12-02 13:34:39 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if (FirstFID.isInvalid()) {
|
|
|
|
// The location we're looking for isn't in the main file; look
|
|
|
|
// through all of the source locations.
|
|
|
|
for (unsigned I = 0, N = sloc_entry_size(); I != N; ++I) {
|
2011-04-20 08:21:03 +08:00
|
|
|
bool Invalid = false;
|
|
|
|
const SLocEntry &SLoc = getSLocEntry(I, &Invalid);
|
|
|
|
if (Invalid)
|
|
|
|
return SourceLocation();
|
|
|
|
|
2011-02-04 01:17:35 +08:00
|
|
|
if (SLoc.isFile() &&
|
|
|
|
SLoc.getFile().getContentCache() &&
|
2011-03-05 09:03:53 +08:00
|
|
|
SLoc.getFile().getContentCache()->OrigEntry == SourceFile) {
|
2009-12-02 13:34:39 +08:00
|
|
|
FirstFID = FileID::get(I);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2011-02-04 01:17:35 +08:00
|
|
|
|
|
|
|
// If we haven't found what we want yet, try again, but this time stat()
|
|
|
|
// each of the files in case the files have changed since we originally
|
|
|
|
// parsed the file.
|
|
|
|
if (FirstFID.isInvalid() &&
|
|
|
|
(SourceFileName ||
|
|
|
|
(SourceFileName = llvm::sys::path::filename(SourceFile->getName()))) &&
|
|
|
|
(SourceFileInode ||
|
|
|
|
(SourceFileInode = getActualFileInode(SourceFile)))) {
|
2011-04-20 08:21:03 +08:00
|
|
|
bool Invalid = false;
|
2011-02-04 01:17:35 +08:00
|
|
|
for (unsigned I = 0, N = sloc_entry_size(); I != N; ++I) {
|
2011-04-20 08:21:03 +08:00
|
|
|
const SLocEntry &SLoc = getSLocEntry(I, &Invalid);
|
|
|
|
if (Invalid)
|
|
|
|
return SourceLocation();
|
|
|
|
|
2011-02-04 01:17:35 +08:00
|
|
|
if (SLoc.isFile()) {
|
|
|
|
const ContentCache *FileContentCache
|
|
|
|
= SLoc.getFile().getContentCache();
|
2011-03-05 09:03:53 +08:00
|
|
|
const FileEntry *Entry =FileContentCache? FileContentCache->OrigEntry : 0;
|
2011-02-04 01:17:35 +08:00
|
|
|
if (Entry &&
|
2011-02-12 02:08:15 +08:00
|
|
|
*SourceFileName == llvm::sys::path::filename(Entry->getName())) {
|
|
|
|
if (llvm::Optional<ino_t> EntryInode = getActualFileInode(Entry)) {
|
|
|
|
if (*SourceFileInode == *EntryInode) {
|
|
|
|
FirstFID = FileID::get(I);
|
|
|
|
SourceFile = Entry;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2011-02-04 01:17:35 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2009-12-02 13:34:39 +08:00
|
|
|
|
|
|
|
if (FirstFID.isInvalid())
|
|
|
|
return SourceLocation();
|
|
|
|
|
2011-02-04 01:17:35 +08:00
|
|
|
if (Line == 1 && Col == 1)
|
|
|
|
return getLocForStartOfFile(FirstFID);
|
|
|
|
|
|
|
|
ContentCache *Content
|
|
|
|
= const_cast<ContentCache *>(getOrCreateContentCache(SourceFile));
|
|
|
|
if (!Content)
|
|
|
|
return SourceLocation();
|
|
|
|
|
|
|
|
// If this is the first use of line information for this buffer, compute the
|
|
|
|
/// SourceLineCache for it on demand.
|
|
|
|
if (Content->SourceLineCache == 0) {
|
|
|
|
bool MyInvalid = false;
|
|
|
|
ComputeLineNumbers(Diag, Content, ContentCacheAlloc, *this, MyInvalid);
|
|
|
|
if (MyInvalid)
|
|
|
|
return SourceLocation();
|
|
|
|
}
|
|
|
|
|
2010-02-27 10:42:25 +08:00
|
|
|
if (Line > Content->NumLines) {
|
2010-04-21 04:35:58 +08:00
|
|
|
unsigned Size = Content->getBuffer(Diag, *this)->getBufferSize();
|
2010-02-27 10:42:25 +08:00
|
|
|
if (Size > 0)
|
|
|
|
--Size;
|
|
|
|
return getLocForStartOfFile(FirstFID).getFileLocWithOffset(Size);
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned FilePos = Content->SourceLineCache[Line - 1];
|
2010-04-21 04:35:58 +08:00
|
|
|
const char *Buf = Content->getBuffer(Diag, *this)->getBufferStart() + FilePos;
|
|
|
|
unsigned BufLength = Content->getBuffer(Diag, *this)->getBufferEnd() - Buf;
|
2010-02-27 10:42:25 +08:00
|
|
|
unsigned i = 0;
|
|
|
|
|
|
|
|
// Check that the given column is valid.
|
|
|
|
while (i < BufLength-1 && i < Col-1 && Buf[i] != '\n' && Buf[i] != '\r')
|
|
|
|
++i;
|
|
|
|
if (i < Col-1)
|
|
|
|
return getLocForStartOfFile(FirstFID).getFileLocWithOffset(FilePos + i);
|
|
|
|
|
2009-12-02 13:34:39 +08:00
|
|
|
return getLocForStartOfFile(FirstFID).getFileLocWithOffset(FilePos + Col - 1);
|
2009-06-20 16:09:57 +08:00
|
|
|
}
|
|
|
|
|
2010-05-08 04:35:24 +08:00
|
|
|
/// Given a decomposed source location, move it up the include/instantiation
|
|
|
|
/// stack to the parent source location. If this is possible, return the
|
|
|
|
/// decomposed version of the parent in Loc and return false. If Loc is the
|
|
|
|
/// top-level entry, return true and don't modify it.
|
|
|
|
static bool MoveUpIncludeHierarchy(std::pair<FileID, unsigned> &Loc,
|
|
|
|
const SourceManager &SM) {
|
|
|
|
SourceLocation UpperLoc;
|
|
|
|
const SrcMgr::SLocEntry &Entry = SM.getSLocEntry(Loc.first);
|
|
|
|
if (Entry.isInstantiation())
|
|
|
|
UpperLoc = Entry.getInstantiation().getInstantiationLocStart();
|
|
|
|
else
|
|
|
|
UpperLoc = Entry.getFile().getIncludeLoc();
|
|
|
|
|
|
|
|
if (UpperLoc.isInvalid())
|
|
|
|
return true; // We reached the top.
|
|
|
|
|
|
|
|
Loc = SM.getDecomposedLoc(UpperLoc);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-24 06:01:48 +08:00
|
|
|
/// \brief Determines the order of 2 source locations in the translation unit.
|
|
|
|
///
|
|
|
|
/// \returns true if LHS source location comes before RHS, false otherwise.
|
|
|
|
bool SourceManager::isBeforeInTranslationUnit(SourceLocation LHS,
|
|
|
|
SourceLocation RHS) const {
|
|
|
|
assert(LHS.isValid() && RHS.isValid() && "Passed invalid source location!");
|
|
|
|
if (LHS == RHS)
|
|
|
|
return false;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2010-12-24 10:53:53 +08:00
|
|
|
// If both locations are macro instantiations, the order of their offsets
|
|
|
|
// reflect the order that the tokens, pointed to by these locations, were
|
|
|
|
// instantiated (during parsing each token that is instantiated by a macro,
|
|
|
|
// expands the SLocEntries).
|
|
|
|
|
2009-06-24 06:01:48 +08:00
|
|
|
std::pair<FileID, unsigned> LOffs = getDecomposedLoc(LHS);
|
|
|
|
std::pair<FileID, unsigned> ROffs = getDecomposedLoc(RHS);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-06-24 06:01:48 +08:00
|
|
|
// If the source locations are in the same file, just compare offsets.
|
|
|
|
if (LOffs.first == ROffs.first)
|
|
|
|
return LOffs.second < ROffs.second;
|
|
|
|
|
|
|
|
// If we are comparing a source location with multiple locations in the same
|
|
|
|
// file, we get a big win by caching the result.
|
2010-05-07 13:10:46 +08:00
|
|
|
if (IsBeforeInTUCache.isCacheValid(LOffs.first, ROffs.first))
|
|
|
|
return IsBeforeInTUCache.getCachedResult(LOffs.second, ROffs.second);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2010-05-07 09:17:07 +08:00
|
|
|
// Okay, we missed in the cache, start updating the cache for this query.
|
|
|
|
IsBeforeInTUCache.setQueryFIDs(LOffs.first, ROffs.first);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-06-24 06:01:48 +08:00
|
|
|
// "Traverse" the include/instantiation stacks of both locations and try to
|
2010-05-07 13:51:13 +08:00
|
|
|
// find a common "ancestor". FileIDs build a tree-like structure that
|
|
|
|
// reflects the #include hierarchy, and this algorithm needs to find the
|
|
|
|
// nearest common ancestor between the two locations. For example, if you
|
|
|
|
// have a.c that includes b.h and c.h, and are comparing a location in b.h to
|
|
|
|
// a location in c.h, we need to find that their nearest common ancestor is
|
|
|
|
// a.c, and compare the locations of the two #includes to find their relative
|
|
|
|
// ordering.
|
2009-06-24 06:01:48 +08:00
|
|
|
//
|
2010-05-07 13:51:13 +08:00
|
|
|
// SourceManager assigns FileIDs in order of parsing. This means that an
|
|
|
|
// includee always has a larger FileID than an includer. While you might
|
|
|
|
// think that we could just compare the FileID's here, that doesn't work to
|
|
|
|
// compare a point at the end of a.c with a point within c.h. Though c.h has
|
|
|
|
// a larger FileID, we have to compare the include point of c.h to the
|
|
|
|
// location in a.c.
|
|
|
|
//
|
|
|
|
// Despite not being able to directly compare FileID's, we can tell that a
|
|
|
|
// larger FileID is necessarily more deeply nested than a lower one and use
|
|
|
|
// this information to walk up the tree to the nearest common ancestor.
|
|
|
|
do {
|
|
|
|
// If LOffs is larger than ROffs, then LOffs must be more deeply nested than
|
|
|
|
// ROffs, walk up the #include chain.
|
|
|
|
if (LOffs.first.ID > ROffs.first.ID) {
|
2010-05-08 04:35:24 +08:00
|
|
|
if (MoveUpIncludeHierarchy(LOffs, *this))
|
2010-05-07 13:51:13 +08:00
|
|
|
break; // We reached the top.
|
|
|
|
|
|
|
|
} else {
|
|
|
|
// Otherwise, ROffs is larger than LOffs, so ROffs must be more deeply
|
|
|
|
// nested than LOffs, walk up the #include chain.
|
2010-05-08 04:35:24 +08:00
|
|
|
if (MoveUpIncludeHierarchy(ROffs, *this))
|
2010-05-07 13:51:13 +08:00
|
|
|
break; // We reached the top.
|
2010-05-07 09:17:07 +08:00
|
|
|
}
|
2010-05-07 13:51:13 +08:00
|
|
|
} while (LOffs.first != ROffs.first);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2010-05-07 13:51:13 +08:00
|
|
|
// If we exited because we found a nearest common ancestor, compare the
|
|
|
|
// locations within the common file and cache them.
|
|
|
|
if (LOffs.first == ROffs.first) {
|
|
|
|
IsBeforeInTUCache.setCommonLoc(LOffs.first, LOffs.second, ROffs.second);
|
|
|
|
return IsBeforeInTUCache.getCachedResult(LOffs.second, ROffs.second);
|
2009-06-24 06:01:48 +08:00
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-12-02 07:07:57 +08:00
|
|
|
// There is no common ancestor, most probably because one location is in the
|
2010-08-19 07:57:06 +08:00
|
|
|
// predefines buffer or an AST file.
|
2009-12-02 07:07:57 +08:00
|
|
|
// FIXME: We should rearrange the external interface so this simply never
|
|
|
|
// happens; it can't conceptually happen. Also see PR5662.
|
2010-05-08 04:35:24 +08:00
|
|
|
IsBeforeInTUCache.setQueryFIDs(FileID(), FileID()); // Don't try caching.
|
|
|
|
|
|
|
|
// Zip both entries up to the top level record.
|
|
|
|
while (!MoveUpIncludeHierarchy(LOffs, *this)) /*empty*/;
|
|
|
|
while (!MoveUpIncludeHierarchy(ROffs, *this)) /*empty*/;
|
2010-05-07 13:51:13 +08:00
|
|
|
|
2011-04-15 13:22:18 +08:00
|
|
|
// If exactly one location is a memory buffer, assume it precedes the other.
|
2010-05-08 04:35:24 +08:00
|
|
|
|
|
|
|
// Strip off macro instantation locations, going up to the top-level File
|
|
|
|
// SLocEntry.
|
|
|
|
bool LIsMB = getFileEntryForID(LOffs.first) == 0;
|
|
|
|
bool RIsMB = getFileEntryForID(ROffs.first) == 0;
|
2010-05-07 13:51:13 +08:00
|
|
|
if (LIsMB != RIsMB)
|
2010-05-07 09:17:07 +08:00
|
|
|
return LIsMB;
|
2009-12-02 07:07:57 +08:00
|
|
|
|
|
|
|
// Otherwise, just assume FileIDs were created in order.
|
2010-05-07 09:17:07 +08:00
|
|
|
return LOffs.first < ROffs.first;
|
2009-06-24 06:01:48 +08:00
|
|
|
}
|
2009-01-26 08:43:02 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
/// PrintStats - Print statistics to stderr.
|
|
|
|
///
|
|
|
|
void SourceManager::PrintStats() const {
|
2009-08-23 20:08:50 +08:00
|
|
|
llvm::errs() << "\n*** Source Manager Stats:\n";
|
|
|
|
llvm::errs() << FileInfos.size() << " files mapped, " << MemBufferInfos.size()
|
|
|
|
<< " mem buffers mapped.\n";
|
2011-07-07 11:40:24 +08:00
|
|
|
llvm::errs() << SLocEntryTable.size() << " SLocEntry's allocated ("
|
|
|
|
<< SLocEntryTable.capacity()*sizeof(SrcMgr::SLocEntry)
|
|
|
|
<< " bytes of capacity), "
|
2009-08-23 20:08:50 +08:00
|
|
|
<< NextOffset << "B of Sloc address space used.\n";
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
unsigned NumLineNumsComputed = 0;
|
|
|
|
unsigned NumFileBytesMapped = 0;
|
2009-02-03 15:30:45 +08:00
|
|
|
for (fileinfo_iterator I = fileinfo_begin(), E = fileinfo_end(); I != E; ++I){
|
|
|
|
NumLineNumsComputed += I->second->SourceLineCache != 0;
|
|
|
|
NumFileBytesMapped += I->second->getSizeBytesMapped();
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-08-23 20:08:50 +08:00
|
|
|
llvm::errs() << NumFileBytesMapped << " bytes of files mapped, "
|
|
|
|
<< NumLineNumsComputed << " files with line #'s computed.\n";
|
|
|
|
llvm::errs() << "FileID scans: " << NumLinearScans << " linear, "
|
|
|
|
<< NumBinaryProbes << " binary.\n";
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
2009-04-27 14:38:32 +08:00
|
|
|
|
|
|
|
ExternalSLocEntrySource::~ExternalSLocEntrySource() { }
|
2011-04-29 04:36:42 +08:00
|
|
|
|
|
|
|
/// Return the amount of memory used by memory buffers, breaking down
|
|
|
|
/// by heap-backed versus mmap'ed memory.
|
|
|
|
SourceManager::MemoryBufferSizes SourceManager::getMemoryBufferSizes() const {
|
|
|
|
size_t malloc_bytes = 0;
|
|
|
|
size_t mmap_bytes = 0;
|
|
|
|
|
|
|
|
for (unsigned i = 0, e = MemBufferInfos.size(); i != e; ++i)
|
|
|
|
if (size_t sized_mapped = MemBufferInfos[i]->getSizeBytesMapped())
|
|
|
|
switch (MemBufferInfos[i]->getMemoryBufferKind()) {
|
|
|
|
case llvm::MemoryBuffer::MemoryBuffer_MMap:
|
|
|
|
mmap_bytes += sized_mapped;
|
|
|
|
break;
|
|
|
|
case llvm::MemoryBuffer::MemoryBuffer_Malloc:
|
|
|
|
malloc_bytes += sized_mapped;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return MemoryBufferSizes(malloc_bytes, mmap_bytes);
|
|
|
|
}
|
|
|
|
|