2006-06-18 13:43:12 +08:00
|
|
|
//===--- SourceManager.cpp - Track and cache source files -----------------===//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
2007-12-30 03:59:25 +08:00
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
2006-06-18 13:43:12 +08:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file implements the SourceManager interface.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
|
|
|
#include "clang/Basic/SourceManager.h"
|
2009-04-13 23:31:25 +08:00
|
|
|
#include "clang/Basic/SourceManagerInternals.h"
|
2010-03-16 06:54:52 +08:00
|
|
|
#include "clang/Basic/Diagnostic.h"
|
2006-06-18 13:43:12 +08:00
|
|
|
#include "clang/Basic/FileManager.h"
|
2007-07-24 13:57:19 +08:00
|
|
|
#include "llvm/Support/Compiler.h"
|
2007-04-29 15:12:06 +08:00
|
|
|
#include "llvm/Support/MemoryBuffer.h"
|
2009-08-24 06:45:33 +08:00
|
|
|
#include "llvm/Support/raw_ostream.h"
|
2006-06-18 13:43:12 +08:00
|
|
|
#include "llvm/System/Path.h"
|
|
|
|
#include <algorithm>
|
2010-03-16 06:54:52 +08:00
|
|
|
#include <string>
|
2010-03-16 08:06:06 +08:00
|
|
|
#include <cstring>
|
2010-03-16 06:54:52 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
using namespace clang;
|
2006-06-20 13:02:40 +08:00
|
|
|
using namespace SrcMgr;
|
2007-06-16 07:05:46 +08:00
|
|
|
using llvm::MemoryBuffer;
|
2006-06-18 13:43:12 +08:00
|
|
|
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-26 08:43:02 +08:00
|
|
|
// SourceManager Helper Classes
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-26 08:43:02 +08:00
|
|
|
|
2007-10-31 05:08:08 +08:00
|
|
|
ContentCache::~ContentCache() {
|
2010-03-17 06:53:51 +08:00
|
|
|
delete Buffer.getPointer();
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
|
|
|
|
2009-01-06 09:55:26 +08:00
|
|
|
/// getSizeBytesMapped - Returns the number of bytes actually mapped for
|
|
|
|
/// this ContentCache. This can be 0 if the MemBuffer was not actually
|
|
|
|
/// instantiated.
|
|
|
|
unsigned ContentCache::getSizeBytesMapped() const {
|
2010-03-17 06:53:51 +08:00
|
|
|
return Buffer.getPointer() ? Buffer.getPointer()->getBufferSize() : 0;
|
2009-01-06 09:55:26 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// getSize - Returns the size of the content encapsulated by this ContentCache.
|
|
|
|
/// This can be the size of the source file or the size of an arbitrary
|
|
|
|
/// scratch buffer. If the ContentCache encapsulates a source file, that
|
2009-12-02 14:49:09 +08:00
|
|
|
/// file is not lazily brought in from disk to satisfy this query.
|
2009-01-06 09:55:26 +08:00
|
|
|
unsigned ContentCache::getSize() const {
|
2010-03-17 06:53:51 +08:00
|
|
|
return Buffer.getPointer() ? (unsigned) Buffer.getPointer()->getBufferSize()
|
|
|
|
: (unsigned) Entry->getSize();
|
2009-01-06 09:55:26 +08:00
|
|
|
}
|
|
|
|
|
2009-12-02 14:49:09 +08:00
|
|
|
void ContentCache::replaceBuffer(const llvm::MemoryBuffer *B) {
|
2010-03-17 06:53:51 +08:00
|
|
|
assert(B != Buffer.getPointer());
|
2009-12-02 14:49:09 +08:00
|
|
|
|
2010-03-17 06:53:51 +08:00
|
|
|
delete Buffer.getPointer();
|
|
|
|
Buffer.setPointer(B);
|
|
|
|
Buffer.setInt(false);
|
2009-12-02 14:49:09 +08:00
|
|
|
}
|
|
|
|
|
2010-03-16 08:35:39 +08:00
|
|
|
const llvm::MemoryBuffer *ContentCache::getBuffer(Diagnostic &Diag,
|
2010-04-21 04:49:23 +08:00
|
|
|
const SourceManager &SM,
|
2010-04-21 04:35:58 +08:00
|
|
|
SourceLocation Loc,
|
2010-03-16 08:35:39 +08:00
|
|
|
bool *Invalid) const {
|
|
|
|
if (Invalid)
|
|
|
|
*Invalid = false;
|
|
|
|
|
2009-01-07 06:43:04 +08:00
|
|
|
// Lazily create the Buffer for ContentCaches that wrap files.
|
2010-03-17 06:53:51 +08:00
|
|
|
if (!Buffer.getPointer() && Entry) {
|
2010-03-16 06:54:52 +08:00
|
|
|
std::string ErrorStr;
|
|
|
|
struct stat FileInfo;
|
2010-03-17 06:53:51 +08:00
|
|
|
Buffer.setPointer(MemoryBuffer::getFile(Entry->getName(), &ErrorStr,
|
|
|
|
Entry->getSize(), &FileInfo));
|
|
|
|
Buffer.setInt(false);
|
|
|
|
|
2009-12-06 13:43:36 +08:00
|
|
|
// If we were unable to open the file, then we are in an inconsistent
|
|
|
|
// situation where the content cache referenced a file which no longer
|
|
|
|
// exists. Most likely, we were using a stat cache with an invalid entry but
|
|
|
|
// the file could also have been removed during processing. Since we can't
|
|
|
|
// really deal with this situation, just create an empty buffer.
|
|
|
|
//
|
|
|
|
// FIXME: This is definitely not ideal, but our immediate clients can't
|
|
|
|
// currently handle returning a null entry here. Ideally we should detect
|
|
|
|
// that we are in an inconsistent situation and error out as quickly as
|
|
|
|
// possible.
|
2010-03-17 06:53:51 +08:00
|
|
|
if (!Buffer.getPointer()) {
|
2009-12-06 13:43:36 +08:00
|
|
|
const llvm::StringRef FillStr("<<<MISSING SOURCE FILE>>>\n");
|
2010-03-17 06:53:51 +08:00
|
|
|
Buffer.setPointer(MemoryBuffer::getNewMemBuffer(Entry->getSize(),
|
|
|
|
"<invalid>"));
|
|
|
|
char *Ptr = const_cast<char*>(Buffer.getPointer()->getBufferStart());
|
2009-12-06 13:43:36 +08:00
|
|
|
for (unsigned i = 0, e = Entry->getSize(); i != e; ++i)
|
|
|
|
Ptr[i] = FillStr[i % FillStr.size()];
|
2010-03-22 23:10:57 +08:00
|
|
|
|
|
|
|
if (Diag.isDiagnosticInFlight())
|
|
|
|
Diag.SetDelayedDiagnostic(diag::err_cannot_open_file,
|
|
|
|
Entry->getName(), ErrorStr);
|
|
|
|
else
|
2010-04-21 04:35:58 +08:00
|
|
|
Diag.Report(FullSourceLoc(Loc, SM), diag::err_cannot_open_file)
|
2010-03-22 23:10:57 +08:00
|
|
|
<< Entry->getName() << ErrorStr;
|
|
|
|
|
2010-03-17 06:53:51 +08:00
|
|
|
Buffer.setInt(true);
|
2010-04-10 09:17:16 +08:00
|
|
|
|
|
|
|
// FIXME: This conditionalization is horrible, but we see spurious failures
|
|
|
|
// in the test suite due to this warning and no one has had time to hunt it
|
|
|
|
// down. So for now, we just don't emit this diagnostic on Win32, and hope
|
|
|
|
// nothing bad happens.
|
|
|
|
//
|
|
|
|
// PR6812.
|
2010-04-09 23:54:22 +08:00
|
|
|
#if !defined(LLVM_ON_WIN32)
|
2010-04-10 09:17:16 +08:00
|
|
|
} else if (FileInfo.st_size != Entry->getSize() ||
|
|
|
|
FileInfo.st_mtime != Entry->getModificationTime()) {
|
2010-04-09 23:54:22 +08:00
|
|
|
// Check that the file's size and modification time are the same
|
|
|
|
// as in the file entry (which may have come from a stat cache).
|
2010-03-22 23:10:57 +08:00
|
|
|
if (Diag.isDiagnosticInFlight())
|
2010-04-10 09:17:16 +08:00
|
|
|
Diag.SetDelayedDiagnostic(diag::err_file_modified,
|
2010-03-22 23:10:57 +08:00
|
|
|
Entry->getName());
|
2010-04-10 09:17:16 +08:00
|
|
|
else
|
2010-04-21 04:35:58 +08:00
|
|
|
Diag.Report(FullSourceLoc(Loc, SM), diag::err_file_modified)
|
|
|
|
<< Entry->getName();
|
2010-03-22 23:10:57 +08:00
|
|
|
|
2010-03-17 23:30:15 +08:00
|
|
|
Buffer.setInt(true);
|
2010-04-10 09:17:16 +08:00
|
|
|
#endif
|
2009-12-06 13:43:36 +08:00
|
|
|
}
|
2010-04-21 02:14:03 +08:00
|
|
|
|
|
|
|
// If the buffer is valid, check to see if it has a UTF Byte Order Mark
|
|
|
|
// (BOM). We only support UTF-8 without a BOM right now. See
|
|
|
|
// http://en.wikipedia.org/wiki/Byte_order_mark for more information.
|
|
|
|
if (!Buffer.getInt()) {
|
|
|
|
llvm::StringRef BufStr = Buffer.getPointer()->getBuffer();
|
|
|
|
const char *BOM = 0;
|
|
|
|
if (BufStr.startswith("\xFE\xBB\xBF"))
|
|
|
|
BOM = "UTF-8";
|
|
|
|
else if (BufStr.startswith("\xFE\xFF"))
|
|
|
|
BOM = "UTF-16 (BE)";
|
|
|
|
else if (BufStr.startswith("\xFF\xFE"))
|
|
|
|
BOM = "UTF-16 (LE)";
|
|
|
|
else if (BufStr.startswith(llvm::StringRef("\x00\x00\xFE\xFF", 4)))
|
|
|
|
BOM = "UTF-32 (BE)";
|
|
|
|
else if (BufStr.startswith(llvm::StringRef("\xFF\xFE\x00\x00", 4)))
|
|
|
|
BOM = "UTF-32 (LE)";
|
|
|
|
else if (BufStr.startswith("\x2B\x2F\x76"))
|
|
|
|
BOM = "UTF-7";
|
|
|
|
else if (BufStr.startswith("\xF7\x64\x4C"))
|
|
|
|
BOM = "UTF-1";
|
|
|
|
else if (BufStr.startswith("\xDD\x73\x66\x73"))
|
|
|
|
BOM = "UTF-EBCDIC";
|
|
|
|
else if (BufStr.startswith("\x0E\xFE\xFF"))
|
|
|
|
BOM = "SDSU";
|
|
|
|
else if (BufStr.startswith("\xFB\xEE\x28"))
|
|
|
|
BOM = "BOCU-1";
|
|
|
|
else if (BufStr.startswith("\x84\x31\x95\x33"))
|
|
|
|
BOM = "BOCU-1";
|
|
|
|
|
|
|
|
if (BOM) {
|
2010-04-21 04:35:58 +08:00
|
|
|
Diag.Report(FullSourceLoc(Loc, SM), diag::err_unsupported_bom)
|
|
|
|
<< BOM << Entry->getName();
|
2010-04-21 02:14:03 +08:00
|
|
|
Buffer.setInt(1);
|
|
|
|
}
|
|
|
|
}
|
2009-01-07 06:43:04 +08:00
|
|
|
}
|
2010-03-16 06:54:52 +08:00
|
|
|
|
2010-03-17 06:53:51 +08:00
|
|
|
if (Invalid)
|
|
|
|
*Invalid = Buffer.getInt();
|
|
|
|
|
|
|
|
return Buffer.getPointer();
|
2009-01-06 09:55:26 +08:00
|
|
|
}
|
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
unsigned LineTableInfo::getLineTableFilenameID(const char *Ptr, unsigned Len) {
|
|
|
|
// Look up the filename in the string table, returning the pre-existing value
|
|
|
|
// if it exists.
|
2009-09-09 23:08:12 +08:00
|
|
|
llvm::StringMapEntry<unsigned> &Entry =
|
2009-01-26 15:57:50 +08:00
|
|
|
FilenameIDs.GetOrCreateValue(Ptr, Ptr+Len, ~0U);
|
|
|
|
if (Entry.getValue() != ~0U)
|
|
|
|
return Entry.getValue();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
// Otherwise, assign this the next available ID.
|
|
|
|
Entry.setValue(FilenamesByID.size());
|
|
|
|
FilenamesByID.push_back(&Entry);
|
|
|
|
return FilenamesByID.size()-1;
|
|
|
|
}
|
|
|
|
|
2009-02-04 06:13:05 +08:00
|
|
|
/// AddLineNote - Add a line note to the line table that indicates that there
|
|
|
|
/// is a #line at the specified FID/Offset location which changes the presumed
|
|
|
|
/// location to LineNo/FilenameID.
|
2009-02-04 08:40:31 +08:00
|
|
|
void LineTableInfo::AddLineNote(unsigned FID, unsigned Offset,
|
2009-02-04 06:13:05 +08:00
|
|
|
unsigned LineNo, int FilenameID) {
|
2009-02-04 08:40:31 +08:00
|
|
|
std::vector<LineEntry> &Entries = LineEntries[FID];
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 08:40:31 +08:00
|
|
|
assert((Entries.empty() || Entries.back().FileOffset < Offset) &&
|
|
|
|
"Adding line entries out of order!");
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
SrcMgr::CharacteristicKind Kind = SrcMgr::C_User;
|
2009-02-04 14:25:26 +08:00
|
|
|
unsigned IncludeOffset = 0;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
if (!Entries.empty()) {
|
|
|
|
// If this is a '#line 4' after '#line 42 "foo.h"', make sure to remember
|
|
|
|
// that we are still in "foo.h".
|
|
|
|
if (FilenameID == -1)
|
|
|
|
FilenameID = Entries.back().FilenameID;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
// If we are after a line marker that switched us to system header mode, or
|
|
|
|
// that set #include information, preserve it.
|
2009-02-04 13:21:58 +08:00
|
|
|
Kind = Entries.back().FileKind;
|
2009-02-04 14:25:26 +08:00
|
|
|
IncludeOffset = Entries.back().IncludeOffset;
|
2009-02-04 13:21:58 +08:00
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
Entries.push_back(LineEntry::get(Offset, LineNo, FilenameID, Kind,
|
|
|
|
IncludeOffset));
|
2009-02-04 13:21:58 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// AddLineNote This is the same as the previous version of AddLineNote, but is
|
|
|
|
/// used for GNU line markers. If EntryExit is 0, then this doesn't change the
|
|
|
|
/// presumed #include stack. If it is 1, this is a file entry, if it is 2 then
|
|
|
|
/// this is a file exit. FileKind specifies whether this is a system header or
|
|
|
|
/// extern C system header.
|
|
|
|
void LineTableInfo::AddLineNote(unsigned FID, unsigned Offset,
|
|
|
|
unsigned LineNo, int FilenameID,
|
|
|
|
unsigned EntryExit,
|
|
|
|
SrcMgr::CharacteristicKind FileKind) {
|
|
|
|
assert(FilenameID != -1 && "Unspecified filename should use other accessor");
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
std::vector<LineEntry> &Entries = LineEntries[FID];
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
assert((Entries.empty() || Entries.back().FileOffset < Offset) &&
|
|
|
|
"Adding line entries out of order!");
|
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
unsigned IncludeOffset = 0;
|
|
|
|
if (EntryExit == 0) { // No #include stack change.
|
|
|
|
IncludeOffset = Entries.empty() ? 0 : Entries.back().IncludeOffset;
|
|
|
|
} else if (EntryExit == 1) {
|
|
|
|
IncludeOffset = Offset-1;
|
|
|
|
} else if (EntryExit == 2) {
|
|
|
|
assert(!Entries.empty() && Entries.back().IncludeOffset &&
|
|
|
|
"PPDirectives should have caught case when popping empty include stack");
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
// Get the include loc of the last entries' include loc as our include loc.
|
|
|
|
IncludeOffset = 0;
|
|
|
|
if (const LineEntry *PrevEntry =
|
|
|
|
FindNearestLineEntry(FID, Entries.back().IncludeOffset))
|
|
|
|
IncludeOffset = PrevEntry->IncludeOffset;
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
Entries.push_back(LineEntry::get(Offset, LineNo, FilenameID, FileKind,
|
|
|
|
IncludeOffset));
|
2009-02-04 06:13:05 +08:00
|
|
|
}
|
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
|
2009-02-04 09:55:42 +08:00
|
|
|
/// FindNearestLineEntry - Find the line entry nearest to FID that is before
|
|
|
|
/// it. If there is no line entry before Offset in FID, return null.
|
2009-09-09 23:08:12 +08:00
|
|
|
const LineEntry *LineTableInfo::FindNearestLineEntry(unsigned FID,
|
2009-02-04 09:55:42 +08:00
|
|
|
unsigned Offset) {
|
|
|
|
const std::vector<LineEntry> &Entries = LineEntries[FID];
|
|
|
|
assert(!Entries.empty() && "No #line entries for this FID after all!");
|
|
|
|
|
2009-02-04 12:46:59 +08:00
|
|
|
// It is very common for the query to be after the last #line, check this
|
|
|
|
// first.
|
|
|
|
if (Entries.back().FileOffset <= Offset)
|
|
|
|
return &Entries.back();
|
2009-02-04 09:55:42 +08:00
|
|
|
|
2009-02-04 12:46:59 +08:00
|
|
|
// Do a binary search to find the maximal element that is still before Offset.
|
|
|
|
std::vector<LineEntry>::const_iterator I =
|
|
|
|
std::upper_bound(Entries.begin(), Entries.end(), Offset);
|
|
|
|
if (I == Entries.begin()) return 0;
|
|
|
|
return &*--I;
|
2009-02-04 09:55:42 +08:00
|
|
|
}
|
2009-02-04 06:13:05 +08:00
|
|
|
|
2009-04-14 00:31:14 +08:00
|
|
|
/// \brief Add a new line entry that has already been encoded into
|
|
|
|
/// the internal representation of the line table.
|
2009-09-09 23:08:12 +08:00
|
|
|
void LineTableInfo::AddEntry(unsigned FID,
|
2009-04-14 00:31:14 +08:00
|
|
|
const std::vector<LineEntry> &Entries) {
|
|
|
|
LineEntries[FID] = Entries;
|
|
|
|
}
|
2009-02-04 06:13:05 +08:00
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
/// getLineTableFilenameID - Return the uniqued ID for the specified filename.
|
2009-09-09 23:08:12 +08:00
|
|
|
///
|
2009-01-26 15:57:50 +08:00
|
|
|
unsigned SourceManager::getLineTableFilenameID(const char *Ptr, unsigned Len) {
|
|
|
|
if (LineTable == 0)
|
|
|
|
LineTable = new LineTableInfo();
|
|
|
|
return LineTable->getLineTableFilenameID(Ptr, Len);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-02-04 05:52:55 +08:00
|
|
|
/// AddLineNote - Add a line note to the line table for the FileID and offset
|
|
|
|
/// specified by Loc. If FilenameID is -1, it is considered to be
|
|
|
|
/// unspecified.
|
|
|
|
void SourceManager::AddLineNote(SourceLocation Loc, unsigned LineNo,
|
|
|
|
int FilenameID) {
|
2009-02-04 06:13:05 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 06:13:05 +08:00
|
|
|
const SrcMgr::FileInfo &FileInfo = getSLocEntry(LocInfo.first).getFile();
|
|
|
|
|
|
|
|
// Remember that this file has #line directives now if it doesn't already.
|
|
|
|
const_cast<SrcMgr::FileInfo&>(FileInfo).setHasLineDirectives();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 06:13:05 +08:00
|
|
|
if (LineTable == 0)
|
|
|
|
LineTable = new LineTableInfo();
|
2009-02-04 08:40:31 +08:00
|
|
|
LineTable->AddLineNote(LocInfo.first.ID, LocInfo.second, LineNo, FilenameID);
|
2009-02-04 05:52:55 +08:00
|
|
|
}
|
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
/// AddLineNote - Add a GNU line marker to the line table.
|
|
|
|
void SourceManager::AddLineNote(SourceLocation Loc, unsigned LineNo,
|
|
|
|
int FilenameID, bool IsFileEntry,
|
|
|
|
bool IsFileExit, bool IsSystemHeader,
|
|
|
|
bool IsExternCHeader) {
|
|
|
|
// If there is no filename and no flags, this is treated just like a #line,
|
|
|
|
// which does not change the flags of the previous line marker.
|
|
|
|
if (FilenameID == -1) {
|
|
|
|
assert(!IsFileEntry && !IsFileExit && !IsSystemHeader && !IsExternCHeader &&
|
|
|
|
"Can't set flags without setting the filename!");
|
|
|
|
return AddLineNote(Loc, LineNo, FilenameID);
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
|
|
|
const SrcMgr::FileInfo &FileInfo = getSLocEntry(LocInfo.first).getFile();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
// Remember that this file has #line directives now if it doesn't already.
|
|
|
|
const_cast<SrcMgr::FileInfo&>(FileInfo).setHasLineDirectives();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
if (LineTable == 0)
|
|
|
|
LineTable = new LineTableInfo();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
SrcMgr::CharacteristicKind FileKind;
|
|
|
|
if (IsExternCHeader)
|
|
|
|
FileKind = SrcMgr::C_ExternCSystem;
|
|
|
|
else if (IsSystemHeader)
|
|
|
|
FileKind = SrcMgr::C_System;
|
|
|
|
else
|
|
|
|
FileKind = SrcMgr::C_User;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
unsigned EntryExit = 0;
|
|
|
|
if (IsFileEntry)
|
|
|
|
EntryExit = 1;
|
|
|
|
else if (IsFileExit)
|
|
|
|
EntryExit = 2;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:21:58 +08:00
|
|
|
LineTable->AddLineNote(LocInfo.first.ID, LocInfo.second, LineNo, FilenameID,
|
|
|
|
EntryExit, FileKind);
|
|
|
|
}
|
|
|
|
|
2009-04-14 00:31:14 +08:00
|
|
|
LineTableInfo &SourceManager::getLineTable() {
|
|
|
|
if (LineTable == 0)
|
|
|
|
LineTable = new LineTableInfo();
|
|
|
|
return *LineTable;
|
|
|
|
}
|
2009-02-04 05:52:55 +08:00
|
|
|
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-26 08:43:02 +08:00
|
|
|
// Private 'Create' methods.
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-06 09:55:26 +08:00
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
SourceManager::~SourceManager() {
|
|
|
|
delete LineTable;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-03 15:30:45 +08:00
|
|
|
// Delete FileEntry objects corresponding to content caches. Since the actual
|
|
|
|
// content cache objects are bump pointer allocated, we just have to run the
|
|
|
|
// dtors, but we call the deallocate method for completeness.
|
|
|
|
for (unsigned i = 0, e = MemBufferInfos.size(); i != e; ++i) {
|
|
|
|
MemBufferInfos[i]->~ContentCache();
|
|
|
|
ContentCacheAlloc.Deallocate(MemBufferInfos[i]);
|
|
|
|
}
|
|
|
|
for (llvm::DenseMap<const FileEntry*, SrcMgr::ContentCache*>::iterator
|
|
|
|
I = FileInfos.begin(), E = FileInfos.end(); I != E; ++I) {
|
|
|
|
I->second->~ContentCache();
|
|
|
|
ContentCacheAlloc.Deallocate(I->second);
|
|
|
|
}
|
2009-01-26 15:57:50 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void SourceManager::clearIDTables() {
|
|
|
|
MainFileID = FileID();
|
|
|
|
SLocEntryTable.clear();
|
|
|
|
LastLineNoFileIDQuery = FileID();
|
|
|
|
LastLineNoContentCache = 0;
|
|
|
|
LastFileIDLookup = FileID();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
if (LineTable)
|
|
|
|
LineTable->clear();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 15:57:50 +08:00
|
|
|
// Use up FileID #0 as an invalid instantiation.
|
|
|
|
NextOffset = 0;
|
2009-02-16 04:52:18 +08:00
|
|
|
createInstantiationLoc(SourceLocation(),SourceLocation(),SourceLocation(), 1);
|
2009-01-26 15:57:50 +08:00
|
|
|
}
|
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
/// getOrCreateContentCache - Create or return a cached ContentCache for the
|
|
|
|
/// specified file.
|
|
|
|
const ContentCache *
|
|
|
|
SourceManager::getOrCreateContentCache(const FileEntry *FileEnt) {
|
2006-06-18 13:43:12 +08:00
|
|
|
assert(FileEnt && "Didn't specify a file entry to use?");
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
// Do we already have information about this file?
|
2009-02-03 15:30:45 +08:00
|
|
|
ContentCache *&Entry = FileInfos[FileEnt];
|
|
|
|
if (Entry) return Entry;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-03 15:41:46 +08:00
|
|
|
// Nope, create a new Cache entry. Make sure it is at least 8-byte aligned
|
|
|
|
// so that FileInfo can use the low 3 bits of the pointer for its own
|
|
|
|
// nefarious purposes.
|
|
|
|
unsigned EntryAlign = llvm::AlignOf<ContentCache>::Alignment;
|
|
|
|
EntryAlign = std::max(8U, EntryAlign);
|
|
|
|
Entry = ContentCacheAlloc.Allocate<ContentCache>(1, EntryAlign);
|
2009-02-03 15:30:45 +08:00
|
|
|
new (Entry) ContentCache(FileEnt);
|
|
|
|
return Entry;
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-11-01 01:53:38 +08:00
|
|
|
/// createMemBufferContentCache - Create a new ContentCache for the specified
|
|
|
|
/// memory buffer. This does no caching.
|
2007-10-31 05:08:08 +08:00
|
|
|
const ContentCache*
|
|
|
|
SourceManager::createMemBufferContentCache(const MemoryBuffer *Buffer) {
|
2009-02-03 15:41:46 +08:00
|
|
|
// Add a new ContentCache to the MemBufferInfos list and return it. Make sure
|
|
|
|
// it is at least 8-byte aligned so that FileInfo can use the low 3 bits of
|
|
|
|
// the pointer for its own nefarious purposes.
|
|
|
|
unsigned EntryAlign = llvm::AlignOf<ContentCache>::Alignment;
|
|
|
|
EntryAlign = std::max(8U, EntryAlign);
|
|
|
|
ContentCache *Entry = ContentCacheAlloc.Allocate<ContentCache>(1, EntryAlign);
|
2009-02-03 15:30:45 +08:00
|
|
|
new (Entry) ContentCache();
|
|
|
|
MemBufferInfos.push_back(Entry);
|
|
|
|
Entry->setBuffer(Buffer);
|
|
|
|
return Entry;
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
|
|
|
|
2009-04-27 14:38:32 +08:00
|
|
|
void SourceManager::PreallocateSLocEntries(ExternalSLocEntrySource *Source,
|
|
|
|
unsigned NumSLocEntries,
|
|
|
|
unsigned NextOffset) {
|
|
|
|
ExternalSLocEntries = Source;
|
|
|
|
this->NextOffset = NextOffset;
|
|
|
|
SLocEntryLoaded.resize(NumSLocEntries + 1);
|
|
|
|
SLocEntryLoaded[0] = true;
|
|
|
|
SLocEntryTable.resize(SLocEntryTable.size() + NumSLocEntries);
|
|
|
|
}
|
|
|
|
|
2009-04-28 05:28:04 +08:00
|
|
|
void SourceManager::ClearPreallocatedSLocEntries() {
|
|
|
|
unsigned I = 0;
|
|
|
|
for (unsigned N = SLocEntryLoaded.size(); I != N; ++I)
|
|
|
|
if (!SLocEntryLoaded[I])
|
|
|
|
break;
|
|
|
|
|
|
|
|
// We've already loaded all preallocated source location entries.
|
|
|
|
if (I == SLocEntryLoaded.size())
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Remove everything from location I onward.
|
|
|
|
SLocEntryTable.resize(I);
|
|
|
|
SLocEntryLoaded.clear();
|
|
|
|
ExternalSLocEntries = 0;
|
|
|
|
}
|
|
|
|
|
2009-04-27 14:38:32 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// Methods to create new FileID's and instantiations.
|
|
|
|
//===----------------------------------------------------------------------===//
|
2006-06-18 13:43:12 +08:00
|
|
|
|
2008-09-29 08:25:48 +08:00
|
|
|
/// createFileID - Create a new fileID for the specified ContentCache and
|
2007-10-31 06:57:35 +08:00
|
|
|
/// include position. This works regardless of whether the ContentCache
|
|
|
|
/// corresponds to a file or some other input source.
|
2009-01-17 14:22:33 +08:00
|
|
|
FileID SourceManager::createFileID(const ContentCache *File,
|
2009-01-26 08:43:02 +08:00
|
|
|
SourceLocation IncludePos,
|
2009-04-27 14:38:32 +08:00
|
|
|
SrcMgr::CharacteristicKind FileCharacter,
|
|
|
|
unsigned PreallocatedID,
|
|
|
|
unsigned Offset) {
|
|
|
|
if (PreallocatedID) {
|
|
|
|
// If we're filling in a preallocated ID, just load in the file
|
|
|
|
// entry and return.
|
2009-09-09 23:08:12 +08:00
|
|
|
assert(PreallocatedID < SLocEntryLoaded.size() &&
|
2009-04-27 14:38:32 +08:00
|
|
|
"Preallocate ID out-of-range");
|
2009-09-09 23:08:12 +08:00
|
|
|
assert(!SLocEntryLoaded[PreallocatedID] &&
|
2009-04-27 14:38:32 +08:00
|
|
|
"Source location entry already loaded");
|
|
|
|
assert(Offset && "Preallocate source location cannot have zero offset");
|
2009-09-09 23:08:12 +08:00
|
|
|
SLocEntryTable[PreallocatedID]
|
2009-04-27 14:38:32 +08:00
|
|
|
= SLocEntry::get(Offset, FileInfo::get(IncludePos, File, FileCharacter));
|
|
|
|
SLocEntryLoaded[PreallocatedID] = true;
|
2009-06-20 16:09:57 +08:00
|
|
|
FileID FID = FileID::get(PreallocatedID);
|
2010-03-19 14:12:06 +08:00
|
|
|
return FID;
|
2009-04-27 14:38:32 +08:00
|
|
|
}
|
|
|
|
|
2009-09-09 23:08:12 +08:00
|
|
|
SLocEntryTable.push_back(SLocEntry::get(NextOffset,
|
2009-01-26 08:43:02 +08:00
|
|
|
FileInfo::get(IncludePos, File,
|
|
|
|
FileCharacter)));
|
2009-01-06 09:55:26 +08:00
|
|
|
unsigned FileSize = File->getSize();
|
2009-01-26 08:43:02 +08:00
|
|
|
assert(NextOffset+FileSize+1 > NextOffset && "Ran out of source locations!");
|
|
|
|
NextOffset += FileSize+1;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// Set LastFileIDLookup to the newly created file. The next getFileID call is
|
|
|
|
// almost guaranteed to be from that file.
|
2009-06-23 08:42:06 +08:00
|
|
|
FileID FID = FileID::get(SLocEntryTable.size()-1);
|
|
|
|
return LastFileIDLookup = FID;
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
/// createInstantiationLoc - Return a new SourceLocation that encodes the fact
|
2009-01-16 15:00:02 +08:00
|
|
|
/// that a token from SpellingLoc should actually be referenced from
|
2006-06-30 14:10:08 +08:00
|
|
|
/// InstantiationLoc.
|
2009-01-26 08:43:02 +08:00
|
|
|
SourceLocation SourceManager::createInstantiationLoc(SourceLocation SpellingLoc,
|
2009-02-16 04:52:18 +08:00
|
|
|
SourceLocation ILocStart,
|
|
|
|
SourceLocation ILocEnd,
|
2009-04-27 14:38:32 +08:00
|
|
|
unsigned TokLength,
|
|
|
|
unsigned PreallocatedID,
|
|
|
|
unsigned Offset) {
|
2009-02-16 04:52:18 +08:00
|
|
|
InstantiationInfo II = InstantiationInfo::get(ILocStart,ILocEnd, SpellingLoc);
|
2009-04-27 14:38:32 +08:00
|
|
|
if (PreallocatedID) {
|
|
|
|
// If we're filling in a preallocated ID, just load in the
|
|
|
|
// instantiation entry and return.
|
2009-09-09 23:08:12 +08:00
|
|
|
assert(PreallocatedID < SLocEntryLoaded.size() &&
|
2009-04-27 14:38:32 +08:00
|
|
|
"Preallocate ID out-of-range");
|
2009-09-09 23:08:12 +08:00
|
|
|
assert(!SLocEntryLoaded[PreallocatedID] &&
|
2009-04-27 14:38:32 +08:00
|
|
|
"Source location entry already loaded");
|
|
|
|
assert(Offset && "Preallocate source location cannot have zero offset");
|
|
|
|
SLocEntryTable[PreallocatedID] = SLocEntry::get(Offset, II);
|
|
|
|
SLocEntryLoaded[PreallocatedID] = true;
|
|
|
|
return SourceLocation::getMacroLoc(Offset);
|
|
|
|
}
|
2009-02-16 04:52:18 +08:00
|
|
|
SLocEntryTable.push_back(SLocEntry::get(NextOffset, II));
|
2009-01-26 08:43:02 +08:00
|
|
|
assert(NextOffset+TokLength+1 > NextOffset && "Ran out of source locations!");
|
|
|
|
NextOffset += TokLength+1;
|
|
|
|
return SourceLocation::getMacroLoc(NextOffset-(TokLength+1));
|
2006-06-30 14:10:08 +08:00
|
|
|
}
|
|
|
|
|
2010-03-16 08:35:39 +08:00
|
|
|
const llvm::MemoryBuffer *
|
2010-03-16 13:20:39 +08:00
|
|
|
SourceManager::getMemoryBufferForFile(const FileEntry *File,
|
|
|
|
bool *Invalid) {
|
2009-12-02 14:49:09 +08:00
|
|
|
const SrcMgr::ContentCache *IR = getOrCreateContentCache(File);
|
2010-03-16 06:54:52 +08:00
|
|
|
assert(IR && "getOrCreateContentCache() cannot return NULL");
|
2010-04-21 04:35:58 +08:00
|
|
|
return IR->getBuffer(Diag, *this, SourceLocation(), Invalid);
|
2009-12-02 14:49:09 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
bool SourceManager::overrideFileContents(const FileEntry *SourceFile,
|
|
|
|
const llvm::MemoryBuffer *Buffer) {
|
|
|
|
const SrcMgr::ContentCache *IR = getOrCreateContentCache(SourceFile);
|
|
|
|
if (IR == 0)
|
|
|
|
return true;
|
|
|
|
|
|
|
|
const_cast<SrcMgr::ContentCache *>(IR)->replaceBuffer(Buffer);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2010-03-16 22:14:31 +08:00
|
|
|
llvm::StringRef SourceManager::getBufferData(FileID FID, bool *Invalid) const {
|
2010-03-17 04:01:30 +08:00
|
|
|
bool MyInvalid = false;
|
|
|
|
const llvm::MemoryBuffer *Buf = getBuffer(FID, &MyInvalid);
|
2010-03-16 08:06:06 +08:00
|
|
|
if (Invalid)
|
2010-03-17 04:01:30 +08:00
|
|
|
*Invalid = MyInvalid;
|
|
|
|
|
|
|
|
if (MyInvalid)
|
2010-03-16 22:14:31 +08:00
|
|
|
return "";
|
2010-03-17 04:01:30 +08:00
|
|
|
|
2010-03-16 22:14:31 +08:00
|
|
|
return Buf->getBuffer();
|
2010-03-16 06:54:52 +08:00
|
|
|
}
|
2009-01-17 14:22:33 +08:00
|
|
|
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-26 08:43:02 +08:00
|
|
|
// SourceLocation manipulation methods.
|
2009-02-04 08:40:31 +08:00
|
|
|
//===----------------------------------------------------------------------===//
|
2009-01-26 08:43:02 +08:00
|
|
|
|
|
|
|
/// getFileIDSlow - Return the FileID for a SourceLocation. This is a very hot
|
|
|
|
/// method that is used for all SourceManager queries that start with a
|
|
|
|
/// SourceLocation object. It is responsible for finding the entry in
|
|
|
|
/// SLocEntryTable which contains the specified location.
|
|
|
|
///
|
|
|
|
FileID SourceManager::getFileIDSlow(unsigned SLocOffset) const {
|
|
|
|
assert(SLocOffset && "Invalid FileID");
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// After the first and second level caches, I see two common sorts of
|
|
|
|
// behavior: 1) a lot of searched FileID's are "near" the cached file location
|
|
|
|
// or are "near" the cached instantiation location. 2) others are just
|
|
|
|
// completely random and may be a very long way away.
|
|
|
|
//
|
|
|
|
// To handle this, we do a linear search for up to 8 steps to catch #1 quickly
|
|
|
|
// then we fall back to a less cache efficient, but more scalable, binary
|
|
|
|
// search to find the location.
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// See if this is near the file point - worst case we start scanning from the
|
|
|
|
// most newly created FileID.
|
|
|
|
std::vector<SrcMgr::SLocEntry>::const_iterator I;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
if (SLocEntryTable[LastFileIDLookup.ID].getOffset() < SLocOffset) {
|
|
|
|
// Neither loc prunes our search.
|
|
|
|
I = SLocEntryTable.end();
|
|
|
|
} else {
|
|
|
|
// Perhaps it is near the file point.
|
|
|
|
I = SLocEntryTable.begin()+LastFileIDLookup.ID;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Find the FileID that contains this. "I" is an iterator that points to a
|
|
|
|
// FileID whose offset is known to be larger than SLocOffset.
|
|
|
|
unsigned NumProbes = 0;
|
|
|
|
while (1) {
|
|
|
|
--I;
|
2009-04-27 14:38:32 +08:00
|
|
|
if (ExternalSLocEntries)
|
|
|
|
getSLocEntry(FileID::get(I - SLocEntryTable.begin()));
|
2009-01-26 08:43:02 +08:00
|
|
|
if (I->getOffset() <= SLocOffset) {
|
|
|
|
#if 0
|
|
|
|
printf("lin %d -> %d [%s] %d %d\n", SLocOffset,
|
|
|
|
I-SLocEntryTable.begin(),
|
|
|
|
I->isInstantiation() ? "inst" : "file",
|
|
|
|
LastFileIDLookup.ID, int(SLocEntryTable.end()-I));
|
|
|
|
#endif
|
|
|
|
FileID Res = FileID::get(I-SLocEntryTable.begin());
|
2009-04-27 14:38:32 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// If this isn't an instantiation, remember it. We have good locality
|
|
|
|
// across FileID lookups.
|
|
|
|
if (!I->isInstantiation())
|
|
|
|
LastFileIDLookup = Res;
|
|
|
|
NumLinearScans += NumProbes+1;
|
|
|
|
return Res;
|
|
|
|
}
|
|
|
|
if (++NumProbes == 8)
|
|
|
|
break;
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// Convert "I" back into an index. We know that it is an entry whose index is
|
|
|
|
// larger than the offset we are looking for.
|
|
|
|
unsigned GreaterIndex = I-SLocEntryTable.begin();
|
|
|
|
// LessIndex - This is the lower bound of the range that we're searching.
|
|
|
|
// We know that the offset corresponding to the FileID is is less than
|
|
|
|
// SLocOffset.
|
|
|
|
unsigned LessIndex = 0;
|
|
|
|
NumProbes = 0;
|
|
|
|
while (1) {
|
|
|
|
unsigned MiddleIndex = (GreaterIndex-LessIndex)/2+LessIndex;
|
2009-04-27 14:38:32 +08:00
|
|
|
unsigned MidOffset = getSLocEntry(FileID::get(MiddleIndex)).getOffset();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
++NumProbes;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// If the offset of the midpoint is too large, chop the high side of the
|
|
|
|
// range to the midpoint.
|
|
|
|
if (MidOffset > SLocOffset) {
|
|
|
|
GreaterIndex = MiddleIndex;
|
|
|
|
continue;
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// If the middle index contains the value, succeed and return.
|
|
|
|
if (isOffsetInFileID(FileID::get(MiddleIndex), SLocOffset)) {
|
|
|
|
#if 0
|
|
|
|
printf("bin %d -> %d [%s] %d %d\n", SLocOffset,
|
|
|
|
I-SLocEntryTable.begin(),
|
|
|
|
I->isInstantiation() ? "inst" : "file",
|
|
|
|
LastFileIDLookup.ID, int(SLocEntryTable.end()-I));
|
|
|
|
#endif
|
|
|
|
FileID Res = FileID::get(MiddleIndex);
|
|
|
|
|
|
|
|
// If this isn't an instantiation, remember it. We have good locality
|
|
|
|
// across FileID lookups.
|
|
|
|
if (!I->isInstantiation())
|
|
|
|
LastFileIDLookup = Res;
|
|
|
|
NumBinaryProbes += NumProbes;
|
|
|
|
return Res;
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
// Otherwise, move the low-side up to the middle index.
|
|
|
|
LessIndex = MiddleIndex;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-01-27 04:04:19 +08:00
|
|
|
SourceLocation SourceManager::
|
|
|
|
getInstantiationLocSlowCase(SourceLocation Loc) const {
|
|
|
|
do {
|
2010-02-13 03:31:35 +08:00
|
|
|
// Note: If Loc indicates an offset into a token that came from a macro
|
|
|
|
// expansion (e.g. the 5th character of the token) we do not want to add
|
|
|
|
// this offset when going to the instantiation location. The instatiation
|
|
|
|
// location is the macro invocation, which the offset has nothing to do
|
|
|
|
// with. This is unlike when we get the spelling loc, because the offset
|
|
|
|
// directly correspond to the token whose spelling we're inspecting.
|
|
|
|
Loc = getSLocEntry(getFileID(Loc)).getInstantiation()
|
2009-02-16 04:52:18 +08:00
|
|
|
.getInstantiationLocStart();
|
2009-01-27 04:04:19 +08:00
|
|
|
} while (!Loc.isFileID());
|
|
|
|
|
|
|
|
return Loc;
|
|
|
|
}
|
|
|
|
|
|
|
|
SourceLocation SourceManager::getSpellingLocSlowCase(SourceLocation Loc) const {
|
|
|
|
do {
|
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedLoc(Loc);
|
|
|
|
Loc = getSLocEntry(LocInfo.first).getInstantiation().getSpellingLoc();
|
|
|
|
Loc = Loc.getFileLocWithOffset(LocInfo.second);
|
|
|
|
} while (!Loc.isFileID());
|
|
|
|
return Loc;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
std::pair<FileID, unsigned>
|
|
|
|
SourceManager::getDecomposedInstantiationLocSlowCase(const SrcMgr::SLocEntry *E,
|
|
|
|
unsigned Offset) const {
|
|
|
|
// If this is an instantiation record, walk through all the instantiation
|
|
|
|
// points.
|
|
|
|
FileID FID;
|
|
|
|
SourceLocation Loc;
|
|
|
|
do {
|
2009-02-16 04:52:18 +08:00
|
|
|
Loc = E->getInstantiation().getInstantiationLocStart();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
FID = getFileID(Loc);
|
|
|
|
E = &getSLocEntry(FID);
|
|
|
|
Offset += Loc.getOffset()-E->getOffset();
|
2009-01-27 03:41:58 +08:00
|
|
|
} while (!Loc.isFileID());
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
return std::make_pair(FID, Offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::pair<FileID, unsigned>
|
|
|
|
SourceManager::getDecomposedSpellingLocSlowCase(const SrcMgr::SLocEntry *E,
|
|
|
|
unsigned Offset) const {
|
2009-01-27 03:41:58 +08:00
|
|
|
// If this is an instantiation record, walk through all the instantiation
|
|
|
|
// points.
|
|
|
|
FileID FID;
|
|
|
|
SourceLocation Loc;
|
|
|
|
do {
|
|
|
|
Loc = E->getInstantiation().getSpellingLoc();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-27 03:41:58 +08:00
|
|
|
FID = getFileID(Loc);
|
|
|
|
E = &getSLocEntry(FID);
|
|
|
|
Offset += Loc.getOffset()-E->getOffset();
|
|
|
|
} while (!Loc.isFileID());
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
return std::make_pair(FID, Offset);
|
|
|
|
}
|
|
|
|
|
2009-02-17 16:04:48 +08:00
|
|
|
/// getImmediateSpellingLoc - Given a SourceLocation object, return the
|
|
|
|
/// spelling location referenced by the ID. This is the first level down
|
|
|
|
/// towards the place where the characters that make up the lexed token can be
|
|
|
|
/// found. This should not generally be used by clients.
|
|
|
|
SourceLocation SourceManager::getImmediateSpellingLoc(SourceLocation Loc) const{
|
|
|
|
if (Loc.isFileID()) return Loc;
|
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedLoc(Loc);
|
|
|
|
Loc = getSLocEntry(LocInfo.first).getInstantiation().getSpellingLoc();
|
|
|
|
return Loc.getFileLocWithOffset(LocInfo.second);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-02-16 04:52:18 +08:00
|
|
|
/// getImmediateInstantiationRange - Loc is required to be an instantiation
|
|
|
|
/// location. Return the start/end of the instantiation information.
|
|
|
|
std::pair<SourceLocation,SourceLocation>
|
|
|
|
SourceManager::getImmediateInstantiationRange(SourceLocation Loc) const {
|
|
|
|
assert(Loc.isMacroID() && "Not an instantiation loc!");
|
|
|
|
const InstantiationInfo &II = getSLocEntry(getFileID(Loc)).getInstantiation();
|
|
|
|
return II.getInstantiationLocRange();
|
|
|
|
}
|
|
|
|
|
2009-02-16 05:26:50 +08:00
|
|
|
/// getInstantiationRange - Given a SourceLocation object, return the
|
|
|
|
/// range of tokens covered by the instantiation in the ultimate file.
|
|
|
|
std::pair<SourceLocation,SourceLocation>
|
|
|
|
SourceManager::getInstantiationRange(SourceLocation Loc) const {
|
|
|
|
if (Loc.isFileID()) return std::make_pair(Loc, Loc);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-16 05:26:50 +08:00
|
|
|
std::pair<SourceLocation,SourceLocation> Res =
|
|
|
|
getImmediateInstantiationRange(Loc);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-16 05:26:50 +08:00
|
|
|
// Fully resolve the start and end locations to their ultimate instantiation
|
|
|
|
// points.
|
|
|
|
while (!Res.first.isFileID())
|
|
|
|
Res.first = getImmediateInstantiationRange(Res.first).first;
|
|
|
|
while (!Res.second.isFileID())
|
|
|
|
Res.second = getImmediateInstantiationRange(Res.second).second;
|
|
|
|
return Res;
|
|
|
|
}
|
|
|
|
|
2009-02-16 04:52:18 +08:00
|
|
|
|
2009-01-26 08:43:02 +08:00
|
|
|
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// Queries about the code at a SourceLocation.
|
|
|
|
//===----------------------------------------------------------------------===//
|
2006-06-21 11:01:55 +08:00
|
|
|
|
2006-06-19 00:22:51 +08:00
|
|
|
/// getCharacterData - Return a pointer to the start of the specified location
|
2007-04-29 15:12:06 +08:00
|
|
|
/// in the appropriate MemoryBuffer.
|
2010-03-16 13:20:39 +08:00
|
|
|
const char *SourceManager::getCharacterData(SourceLocation SL,
|
|
|
|
bool *Invalid) const {
|
2006-07-05 07:01:03 +08:00
|
|
|
// Note that this is a hot function in the getSpelling() path, which is
|
|
|
|
// heavily used by -E mode.
|
2009-01-26 08:43:02 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedSpellingLoc(SL);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-06 09:55:26 +08:00
|
|
|
// Note that calling 'getBuffer()' may lazily page in a source file.
|
2010-03-16 13:20:39 +08:00
|
|
|
bool CharDataInvalid = false;
|
|
|
|
const llvm::MemoryBuffer *Buffer
|
2010-04-21 04:35:58 +08:00
|
|
|
= getSLocEntry(LocInfo.first).getFile().getContentCache()
|
|
|
|
->getBuffer(Diag, *this, SourceLocation(), &CharDataInvalid);
|
2010-03-16 13:20:39 +08:00
|
|
|
if (Invalid)
|
|
|
|
*Invalid = CharDataInvalid;
|
|
|
|
return Buffer->getBufferStart() + (CharDataInvalid? 0 : LocInfo.second);
|
2006-06-19 00:22:51 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-07-21 00:37:10 +08:00
|
|
|
/// getColumnNumber - Return the column # for the specified file position.
|
2009-02-04 08:55:58 +08:00
|
|
|
/// this is significantly cheaper to compute than the line number.
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getColumnNumber(FileID FID, unsigned FilePos,
|
|
|
|
bool *Invalid) const {
|
|
|
|
bool MyInvalid = false;
|
|
|
|
const char *Buf = getBuffer(FID, &MyInvalid)->getBufferStart();
|
|
|
|
if (Invalid)
|
|
|
|
*Invalid = MyInvalid;
|
|
|
|
|
|
|
|
if (MyInvalid)
|
|
|
|
return 1;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
unsigned LineStart = FilePos;
|
|
|
|
while (LineStart && Buf[LineStart-1] != '\n' && Buf[LineStart-1] != '\r')
|
|
|
|
--LineStart;
|
|
|
|
return FilePos-LineStart+1;
|
|
|
|
}
|
|
|
|
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getSpellingColumnNumber(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
2009-02-04 09:06:56 +08:00
|
|
|
if (Loc.isInvalid()) return 0;
|
2009-02-04 08:55:58 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedSpellingLoc(Loc);
|
2010-03-16 13:20:39 +08:00
|
|
|
return getColumnNumber(LocInfo.first, LocInfo.second, Invalid);
|
2009-02-04 08:55:58 +08:00
|
|
|
}
|
|
|
|
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getInstantiationColumnNumber(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
2009-02-04 09:06:56 +08:00
|
|
|
if (Loc.isInvalid()) return 0;
|
2009-02-04 08:55:58 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
2010-03-16 13:20:39 +08:00
|
|
|
return getColumnNumber(LocInfo.first, LocInfo.second, Invalid);
|
2009-02-04 08:55:58 +08:00
|
|
|
}
|
|
|
|
|
2010-04-21 04:35:58 +08:00
|
|
|
static DISABLE_INLINE void
|
|
|
|
ComputeLineNumbers(Diagnostic &Diag, ContentCache *FI,
|
|
|
|
llvm::BumpPtrAllocator &Alloc,
|
|
|
|
const SourceManager &SM, bool &Invalid);
|
|
|
|
static void ComputeLineNumbers(Diagnostic &Diag, ContentCache *FI,
|
|
|
|
llvm::BumpPtrAllocator &Alloc,
|
|
|
|
const SourceManager &SM, bool &Invalid) {
|
2009-01-06 09:55:26 +08:00
|
|
|
// Note that calling 'getBuffer()' may lazily page in the file.
|
2010-04-21 04:35:58 +08:00
|
|
|
const MemoryBuffer *Buffer = FI->getBuffer(Diag, SM, SourceLocation(),
|
|
|
|
&Invalid);
|
2010-03-16 13:20:39 +08:00
|
|
|
if (Invalid)
|
|
|
|
return;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
// Find the file offsets of all of the *physical* source lines. This does
|
|
|
|
// not look at trigraphs, escaped newlines, or anything else tricky.
|
|
|
|
std::vector<unsigned> LineOffsets;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
// Line #1 starts at char 0.
|
|
|
|
LineOffsets.push_back(0);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
const unsigned char *Buf = (const unsigned char *)Buffer->getBufferStart();
|
|
|
|
const unsigned char *End = (const unsigned char *)Buffer->getBufferEnd();
|
|
|
|
unsigned Offs = 0;
|
|
|
|
while (1) {
|
|
|
|
// Skip over the contents of the line.
|
|
|
|
// TODO: Vectorize this? This is very performance sensitive for programs
|
|
|
|
// with lots of diagnostics and in -E mode.
|
|
|
|
const unsigned char *NextBuf = (const unsigned char *)Buf;
|
|
|
|
while (*NextBuf != '\n' && *NextBuf != '\r' && *NextBuf != '\0')
|
|
|
|
++NextBuf;
|
|
|
|
Offs += NextBuf-Buf;
|
|
|
|
Buf = NextBuf;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
if (Buf[0] == '\n' || Buf[0] == '\r') {
|
|
|
|
// If this is \n\r or \r\n, skip both characters.
|
|
|
|
if ((Buf[1] == '\n' || Buf[1] == '\r') && Buf[0] != Buf[1])
|
|
|
|
++Offs, ++Buf;
|
|
|
|
++Offs, ++Buf;
|
|
|
|
LineOffsets.push_back(Offs);
|
|
|
|
} else {
|
|
|
|
// Otherwise, this is a null. If end of file, exit.
|
|
|
|
if (Buf == End) break;
|
|
|
|
// Otherwise, skip the null.
|
|
|
|
++Offs, ++Buf;
|
|
|
|
}
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
// Copy the offsets into the FileInfo structure.
|
|
|
|
FI->NumLines = LineOffsets.size();
|
2009-02-03 15:30:45 +08:00
|
|
|
FI->SourceLineCache = Alloc.Allocate<unsigned>(LineOffsets.size());
|
2007-07-24 13:57:19 +08:00
|
|
|
std::copy(LineOffsets.begin(), LineOffsets.end(), FI->SourceLineCache);
|
|
|
|
}
|
2006-06-21 12:57:09 +08:00
|
|
|
|
2009-01-16 15:00:02 +08:00
|
|
|
/// getLineNumber - Given a SourceLocation, return the spelling line number
|
2006-06-18 13:43:12 +08:00
|
|
|
/// for the position indicated. This requires building and caching a table of
|
2007-04-29 15:12:06 +08:00
|
|
|
/// line offsets for the MemoryBuffer, so this is not cheap: use only when
|
2006-06-18 13:43:12 +08:00
|
|
|
/// about to emit a diagnostic.
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getLineNumber(FileID FID, unsigned FilePos,
|
|
|
|
bool *Invalid) const {
|
2009-01-26 08:43:02 +08:00
|
|
|
ContentCache *Content;
|
2009-02-04 09:06:56 +08:00
|
|
|
if (LastLineNoFileIDQuery == FID)
|
2007-10-31 05:08:08 +08:00
|
|
|
Content = LastLineNoContentCache;
|
2007-07-24 13:57:19 +08:00
|
|
|
else
|
2009-02-04 09:06:56 +08:00
|
|
|
Content = const_cast<ContentCache*>(getSLocEntry(FID)
|
2009-01-26 08:43:02 +08:00
|
|
|
.getFile().getContentCache());
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
// If this is the first use of line information for this buffer, compute the
|
2007-07-24 13:57:19 +08:00
|
|
|
/// SourceLineCache for it on demand.
|
2010-03-16 13:20:39 +08:00
|
|
|
if (Content->SourceLineCache == 0) {
|
|
|
|
bool MyInvalid = false;
|
2010-04-21 04:35:58 +08:00
|
|
|
ComputeLineNumbers(Diag, Content, ContentCacheAlloc, *this, MyInvalid);
|
2010-03-16 13:20:39 +08:00
|
|
|
if (Invalid)
|
|
|
|
*Invalid = MyInvalid;
|
|
|
|
if (MyInvalid)
|
|
|
|
return 1;
|
|
|
|
} else if (Invalid)
|
|
|
|
*Invalid = false;
|
2006-06-18 13:43:12 +08:00
|
|
|
|
|
|
|
// Okay, we know we have a line number table. Do a binary search to find the
|
|
|
|
// line number that this character position lands on.
|
2007-10-31 05:08:08 +08:00
|
|
|
unsigned *SourceLineCache = Content->SourceLineCache;
|
2007-07-24 13:57:19 +08:00
|
|
|
unsigned *SourceLineCacheStart = SourceLineCache;
|
2007-10-31 05:08:08 +08:00
|
|
|
unsigned *SourceLineCacheEnd = SourceLineCache + Content->NumLines;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 09:06:56 +08:00
|
|
|
unsigned QueriedFilePos = FilePos+1;
|
2007-07-24 13:57:19 +08:00
|
|
|
|
2009-05-19 01:30:52 +08:00
|
|
|
// FIXME: I would like to be convinced that this code is worth being as
|
2009-09-09 23:08:12 +08:00
|
|
|
// complicated as it is, binary search isn't that slow.
|
2009-05-19 01:30:52 +08:00
|
|
|
//
|
|
|
|
// If it is worth being optimized, then in my opinion it could be more
|
|
|
|
// performant, simpler, and more obviously correct by just "galloping" outward
|
|
|
|
// from the queried file position. In fact, this could be incorporated into a
|
|
|
|
// generic algorithm such as lower_bound_with_hint.
|
|
|
|
//
|
|
|
|
// If someone gives me a test case where this matters, and I will do it! - DWD
|
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
// If the previous query was to the same file, we know both the file pos from
|
|
|
|
// that query and the line number returned. This allows us to narrow the
|
|
|
|
// search space from the entire file to something near the match.
|
2009-02-04 09:06:56 +08:00
|
|
|
if (LastLineNoFileIDQuery == FID) {
|
2007-07-24 13:57:19 +08:00
|
|
|
if (QueriedFilePos >= LastLineNoFilePos) {
|
2009-05-19 01:30:52 +08:00
|
|
|
// FIXME: Potential overflow?
|
2007-07-24 13:57:19 +08:00
|
|
|
SourceLineCache = SourceLineCache+LastLineNoResult-1;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 13:57:19 +08:00
|
|
|
// The query is likely to be nearby the previous one. Here we check to
|
|
|
|
// see if it is within 5, 10 or 20 lines. It can be far away in cases
|
|
|
|
// where big comment blocks and vertical whitespace eat up lines but
|
|
|
|
// contribute no tokens.
|
|
|
|
if (SourceLineCache+5 < SourceLineCacheEnd) {
|
|
|
|
if (SourceLineCache[5] > QueriedFilePos)
|
|
|
|
SourceLineCacheEnd = SourceLineCache+5;
|
|
|
|
else if (SourceLineCache+10 < SourceLineCacheEnd) {
|
|
|
|
if (SourceLineCache[10] > QueriedFilePos)
|
|
|
|
SourceLineCacheEnd = SourceLineCache+10;
|
|
|
|
else if (SourceLineCache+20 < SourceLineCacheEnd) {
|
|
|
|
if (SourceLineCache[20] > QueriedFilePos)
|
|
|
|
SourceLineCacheEnd = SourceLineCache+20;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2009-05-19 01:30:52 +08:00
|
|
|
if (LastLineNoResult < Content->NumLines)
|
|
|
|
SourceLineCacheEnd = SourceLineCache+LastLineNoResult+1;
|
2007-07-24 13:57:19 +08:00
|
|
|
}
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 14:43:46 +08:00
|
|
|
// If the spread is large, do a "radix" test as our initial guess, based on
|
|
|
|
// the assumption that lines average to approximately the same length.
|
|
|
|
// NOTE: This is currently disabled, as it does not appear to be profitable in
|
|
|
|
// initial measurements.
|
|
|
|
if (0 && SourceLineCacheEnd-SourceLineCache > 20) {
|
2007-10-31 05:08:08 +08:00
|
|
|
unsigned FileLen = Content->SourceLineCache[Content->NumLines-1];
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 14:43:46 +08:00
|
|
|
// Take a stab at guessing where it is.
|
2007-10-31 05:08:08 +08:00
|
|
|
unsigned ApproxPos = Content->NumLines*QueriedFilePos / FileLen;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 14:43:46 +08:00
|
|
|
// Check for -10 and +10 lines.
|
|
|
|
unsigned LowerBound = std::max(int(ApproxPos-10), 0);
|
|
|
|
unsigned UpperBound = std::min(ApproxPos+10, FileLen);
|
|
|
|
|
|
|
|
// If the computed lower bound is less than the query location, move it in.
|
|
|
|
if (SourceLineCache < SourceLineCacheStart+LowerBound &&
|
|
|
|
SourceLineCacheStart[LowerBound] < QueriedFilePos)
|
|
|
|
SourceLineCache = SourceLineCacheStart+LowerBound;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 14:43:46 +08:00
|
|
|
// If the computed upper bound is greater than the query location, move it.
|
|
|
|
if (SourceLineCacheEnd > SourceLineCacheStart+UpperBound &&
|
|
|
|
SourceLineCacheStart[UpperBound] >= QueriedFilePos)
|
|
|
|
SourceLineCacheEnd = SourceLineCacheStart+UpperBound;
|
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2007-07-24 14:43:46 +08:00
|
|
|
unsigned *Pos
|
|
|
|
= std::lower_bound(SourceLineCache, SourceLineCacheEnd, QueriedFilePos);
|
2007-07-24 13:57:19 +08:00
|
|
|
unsigned LineNo = Pos-SourceLineCacheStart;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 09:06:56 +08:00
|
|
|
LastLineNoFileIDQuery = FID;
|
2007-10-31 05:08:08 +08:00
|
|
|
LastLineNoContentCache = Content;
|
2007-07-24 13:57:19 +08:00
|
|
|
LastLineNoFilePos = QueriedFilePos;
|
|
|
|
LastLineNoResult = LineNo;
|
|
|
|
return LineNo;
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
|
|
|
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getInstantiationLineNumber(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
2009-02-04 09:06:56 +08:00
|
|
|
if (Loc.isInvalid()) return 0;
|
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
|
|
|
return getLineNumber(LocInfo.first, LocInfo.second);
|
|
|
|
}
|
2010-03-16 13:20:39 +08:00
|
|
|
unsigned SourceManager::getSpellingLineNumber(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
2009-02-04 09:06:56 +08:00
|
|
|
if (Loc.isInvalid()) return 0;
|
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedSpellingLoc(Loc);
|
|
|
|
return getLineNumber(LocInfo.first, LocInfo.second);
|
|
|
|
}
|
|
|
|
|
2009-02-04 13:33:01 +08:00
|
|
|
/// getFileCharacteristic - return the file characteristic of the specified
|
2009-09-09 23:08:12 +08:00
|
|
|
/// source location, indicating whether this is a normal file, a system
|
2009-02-04 13:33:01 +08:00
|
|
|
/// header, or an "implicit extern C" system header.
|
|
|
|
///
|
|
|
|
/// This state can be modified with flags on GNU linemarker directives like:
|
|
|
|
/// # 4 "foo.h" 3
|
|
|
|
/// which changes all source locations in the current file after that to be
|
|
|
|
/// considered to be from a system header.
|
2009-09-09 23:08:12 +08:00
|
|
|
SrcMgr::CharacteristicKind
|
2009-02-04 13:33:01 +08:00
|
|
|
SourceManager::getFileCharacteristic(SourceLocation Loc) const {
|
|
|
|
assert(!Loc.isInvalid() && "Can't get file characteristic of invalid loc!");
|
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
|
|
|
const SrcMgr::FileInfo &FI = getSLocEntry(LocInfo.first).getFile();
|
|
|
|
|
|
|
|
// If there are no #line directives in this file, just return the whole-file
|
|
|
|
// state.
|
|
|
|
if (!FI.hasLineDirectives())
|
|
|
|
return FI.getFileCharacteristic();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:33:01 +08:00
|
|
|
assert(LineTable && "Can't have linetable entries without a LineTable!");
|
|
|
|
// See if there is a #line directive before the location.
|
|
|
|
const LineEntry *Entry =
|
|
|
|
LineTable->FindNearestLineEntry(LocInfo.first.ID, LocInfo.second);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 13:33:01 +08:00
|
|
|
// If this is before the first line marker, use the file characteristic.
|
|
|
|
if (!Entry)
|
|
|
|
return FI.getFileCharacteristic();
|
|
|
|
|
|
|
|
return Entry->FileKind;
|
|
|
|
}
|
|
|
|
|
2009-02-17 16:39:06 +08:00
|
|
|
/// Return the filename or buffer identifier of the buffer the location is in.
|
|
|
|
/// Note that this name does not respect #line directives. Use getPresumedLoc
|
|
|
|
/// for normal clients.
|
2010-03-16 13:20:39 +08:00
|
|
|
const char *SourceManager::getBufferName(SourceLocation Loc,
|
|
|
|
bool *Invalid) const {
|
2009-02-17 16:39:06 +08:00
|
|
|
if (Loc.isInvalid()) return "<invalid loc>";
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2010-03-16 13:20:39 +08:00
|
|
|
return getBuffer(getFileID(Loc), Invalid)->getBufferIdentifier();
|
2009-02-17 16:39:06 +08:00
|
|
|
}
|
|
|
|
|
2009-02-04 09:06:56 +08:00
|
|
|
|
2009-01-27 15:57:44 +08:00
|
|
|
/// getPresumedLoc - This method returns the "presumed" location of a
|
|
|
|
/// SourceLocation specifies. A "presumed location" can be modified by #line
|
|
|
|
/// or GNU line marker directives. This provides a view on the data that a
|
|
|
|
/// user should see in diagnostics, for example.
|
|
|
|
///
|
|
|
|
/// Note that a presumed location is always given as the instantiation point
|
|
|
|
/// of an instantiation location, not at the spelling location.
|
|
|
|
PresumedLoc SourceManager::getPresumedLoc(SourceLocation Loc) const {
|
|
|
|
if (Loc.isInvalid()) return PresumedLoc();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-01-27 15:57:44 +08:00
|
|
|
// Presumed locations are always for instantiation points.
|
2009-02-04 08:55:58 +08:00
|
|
|
std::pair<FileID, unsigned> LocInfo = getDecomposedInstantiationLoc(Loc);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 09:06:56 +08:00
|
|
|
const SrcMgr::FileInfo &FI = getSLocEntry(LocInfo.first).getFile();
|
2009-01-27 15:57:44 +08:00
|
|
|
const SrcMgr::ContentCache *C = FI.getContentCache();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 09:55:42 +08:00
|
|
|
// To get the source name, first consult the FileEntry (if one exists)
|
|
|
|
// before the MemBuffer as this will avoid unnecessarily paging in the
|
|
|
|
// MemBuffer.
|
2010-04-21 04:35:58 +08:00
|
|
|
const char *Filename;
|
|
|
|
if (C->Entry)
|
|
|
|
Filename = C->Entry->getName();
|
|
|
|
else
|
|
|
|
Filename = C->getBuffer(Diag, *this)->getBufferIdentifier();
|
2009-02-04 09:55:42 +08:00
|
|
|
unsigned LineNo = getLineNumber(LocInfo.first, LocInfo.second);
|
|
|
|
unsigned ColNo = getColumnNumber(LocInfo.first, LocInfo.second);
|
|
|
|
SourceLocation IncludeLoc = FI.getIncludeLoc();
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 09:55:42 +08:00
|
|
|
// If we have #line directives in this file, update and overwrite the physical
|
|
|
|
// location info if appropriate.
|
|
|
|
if (FI.hasLineDirectives()) {
|
|
|
|
assert(LineTable && "Can't have linetable entries without a LineTable!");
|
|
|
|
// See if there is a #line directive before this. If so, get it.
|
|
|
|
if (const LineEntry *Entry =
|
|
|
|
LineTable->FindNearestLineEntry(LocInfo.first.ID, LocInfo.second)) {
|
2009-02-04 10:00:59 +08:00
|
|
|
// If the LineEntry indicates a filename, use it.
|
2009-02-04 09:55:42 +08:00
|
|
|
if (Entry->FilenameID != -1)
|
|
|
|
Filename = LineTable->getFilename(Entry->FilenameID);
|
2009-02-04 10:00:59 +08:00
|
|
|
|
|
|
|
// Use the line number specified by the LineEntry. This line number may
|
|
|
|
// be multiple lines down from the line entry. Add the difference in
|
|
|
|
// physical line numbers from the query point and the line marker to the
|
|
|
|
// total.
|
|
|
|
unsigned MarkerLineNo = getLineNumber(LocInfo.first, Entry->FileOffset);
|
|
|
|
LineNo = Entry->LineNo + (LineNo-MarkerLineNo-1);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 10:15:40 +08:00
|
|
|
// Note that column numbers are not molested by line markers.
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-02-04 14:25:26 +08:00
|
|
|
// Handle virtual #include manipulation.
|
|
|
|
if (Entry->IncludeOffset) {
|
|
|
|
IncludeLoc = getLocForStartOfFile(LocInfo.first);
|
|
|
|
IncludeLoc = IncludeLoc.getFileLocWithOffset(Entry->IncludeOffset);
|
|
|
|
}
|
2009-02-04 09:55:42 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return PresumedLoc(Filename, LineNo, ColNo, IncludeLoc);
|
2009-01-26 08:43:02 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// Other miscellaneous methods.
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2009-06-20 16:09:57 +08:00
|
|
|
/// \brief Get the source location for the given file:line:col triplet.
|
|
|
|
///
|
|
|
|
/// If the source file is included multiple times, the source location will
|
|
|
|
/// be based upon the first inclusion.
|
|
|
|
SourceLocation SourceManager::getLocation(const FileEntry *SourceFile,
|
|
|
|
unsigned Line, unsigned Col) const {
|
|
|
|
assert(SourceFile && "Null source file!");
|
|
|
|
assert(Line && Col && "Line and column should start from 1!");
|
|
|
|
|
|
|
|
fileinfo_iterator FI = FileInfos.find(SourceFile);
|
|
|
|
if (FI == FileInfos.end())
|
|
|
|
return SourceLocation();
|
|
|
|
ContentCache *Content = FI->second;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-06-20 16:09:57 +08:00
|
|
|
// If this is the first use of line information for this buffer, compute the
|
|
|
|
/// SourceLineCache for it on demand.
|
2010-03-16 13:20:39 +08:00
|
|
|
if (Content->SourceLineCache == 0) {
|
|
|
|
bool MyInvalid = false;
|
2010-04-21 04:35:58 +08:00
|
|
|
ComputeLineNumbers(Diag, Content, ContentCacheAlloc, *this, MyInvalid);
|
2010-03-16 13:20:39 +08:00
|
|
|
if (MyInvalid)
|
|
|
|
return SourceLocation();
|
|
|
|
}
|
2009-06-20 16:09:57 +08:00
|
|
|
|
2009-12-02 13:34:39 +08:00
|
|
|
// Find the first file ID that corresponds to the given file.
|
|
|
|
FileID FirstFID;
|
|
|
|
|
|
|
|
// First, check the main file ID, since it is common to look for a
|
|
|
|
// location in the main file.
|
|
|
|
if (!MainFileID.isInvalid()) {
|
|
|
|
const SLocEntry &MainSLoc = getSLocEntry(MainFileID);
|
|
|
|
if (MainSLoc.isFile() && MainSLoc.getFile().getContentCache() == Content)
|
|
|
|
FirstFID = MainFileID;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (FirstFID.isInvalid()) {
|
|
|
|
// The location we're looking for isn't in the main file; look
|
|
|
|
// through all of the source locations.
|
|
|
|
for (unsigned I = 0, N = sloc_entry_size(); I != N; ++I) {
|
|
|
|
const SLocEntry &SLoc = getSLocEntry(I);
|
|
|
|
if (SLoc.isFile() && SLoc.getFile().getContentCache() == Content) {
|
|
|
|
FirstFID = FileID::get(I);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (FirstFID.isInvalid())
|
|
|
|
return SourceLocation();
|
|
|
|
|
2010-02-27 10:42:25 +08:00
|
|
|
if (Line > Content->NumLines) {
|
2010-04-21 04:35:58 +08:00
|
|
|
unsigned Size = Content->getBuffer(Diag, *this)->getBufferSize();
|
2010-02-27 10:42:25 +08:00
|
|
|
if (Size > 0)
|
|
|
|
--Size;
|
|
|
|
return getLocForStartOfFile(FirstFID).getFileLocWithOffset(Size);
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned FilePos = Content->SourceLineCache[Line - 1];
|
2010-04-21 04:35:58 +08:00
|
|
|
const char *Buf = Content->getBuffer(Diag, *this)->getBufferStart() + FilePos;
|
|
|
|
unsigned BufLength = Content->getBuffer(Diag, *this)->getBufferEnd() - Buf;
|
2010-02-27 10:42:25 +08:00
|
|
|
unsigned i = 0;
|
|
|
|
|
|
|
|
// Check that the given column is valid.
|
|
|
|
while (i < BufLength-1 && i < Col-1 && Buf[i] != '\n' && Buf[i] != '\r')
|
|
|
|
++i;
|
|
|
|
if (i < Col-1)
|
|
|
|
return getLocForStartOfFile(FirstFID).getFileLocWithOffset(FilePos + i);
|
|
|
|
|
2009-12-02 13:34:39 +08:00
|
|
|
return getLocForStartOfFile(FirstFID).getFileLocWithOffset(FilePos + Col - 1);
|
2009-06-20 16:09:57 +08:00
|
|
|
}
|
|
|
|
|
2010-05-08 04:35:24 +08:00
|
|
|
/// Given a decomposed source location, move it up the include/instantiation
|
|
|
|
/// stack to the parent source location. If this is possible, return the
|
|
|
|
/// decomposed version of the parent in Loc and return false. If Loc is the
|
|
|
|
/// top-level entry, return true and don't modify it.
|
|
|
|
static bool MoveUpIncludeHierarchy(std::pair<FileID, unsigned> &Loc,
|
|
|
|
const SourceManager &SM) {
|
|
|
|
SourceLocation UpperLoc;
|
|
|
|
const SrcMgr::SLocEntry &Entry = SM.getSLocEntry(Loc.first);
|
|
|
|
if (Entry.isInstantiation())
|
|
|
|
UpperLoc = Entry.getInstantiation().getInstantiationLocStart();
|
|
|
|
else
|
|
|
|
UpperLoc = Entry.getFile().getIncludeLoc();
|
|
|
|
|
|
|
|
if (UpperLoc.isInvalid())
|
|
|
|
return true; // We reached the top.
|
|
|
|
|
|
|
|
Loc = SM.getDecomposedLoc(UpperLoc);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-24 06:01:48 +08:00
|
|
|
/// \brief Determines the order of 2 source locations in the translation unit.
|
|
|
|
///
|
|
|
|
/// \returns true if LHS source location comes before RHS, false otherwise.
|
|
|
|
bool SourceManager::isBeforeInTranslationUnit(SourceLocation LHS,
|
|
|
|
SourceLocation RHS) const {
|
|
|
|
assert(LHS.isValid() && RHS.isValid() && "Passed invalid source location!");
|
|
|
|
if (LHS == RHS)
|
|
|
|
return false;
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-06-24 06:01:48 +08:00
|
|
|
std::pair<FileID, unsigned> LOffs = getDecomposedLoc(LHS);
|
|
|
|
std::pair<FileID, unsigned> ROffs = getDecomposedLoc(RHS);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-06-24 06:01:48 +08:00
|
|
|
// If the source locations are in the same file, just compare offsets.
|
|
|
|
if (LOffs.first == ROffs.first)
|
|
|
|
return LOffs.second < ROffs.second;
|
|
|
|
|
|
|
|
// If we are comparing a source location with multiple locations in the same
|
|
|
|
// file, we get a big win by caching the result.
|
2010-05-07 13:10:46 +08:00
|
|
|
if (IsBeforeInTUCache.isCacheValid(LOffs.first, ROffs.first))
|
|
|
|
return IsBeforeInTUCache.getCachedResult(LOffs.second, ROffs.second);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2010-05-07 09:17:07 +08:00
|
|
|
// Okay, we missed in the cache, start updating the cache for this query.
|
|
|
|
IsBeforeInTUCache.setQueryFIDs(LOffs.first, ROffs.first);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-06-24 06:01:48 +08:00
|
|
|
// "Traverse" the include/instantiation stacks of both locations and try to
|
2010-05-07 13:51:13 +08:00
|
|
|
// find a common "ancestor". FileIDs build a tree-like structure that
|
|
|
|
// reflects the #include hierarchy, and this algorithm needs to find the
|
|
|
|
// nearest common ancestor between the two locations. For example, if you
|
|
|
|
// have a.c that includes b.h and c.h, and are comparing a location in b.h to
|
|
|
|
// a location in c.h, we need to find that their nearest common ancestor is
|
|
|
|
// a.c, and compare the locations of the two #includes to find their relative
|
|
|
|
// ordering.
|
2009-06-24 06:01:48 +08:00
|
|
|
//
|
2010-05-07 13:51:13 +08:00
|
|
|
// SourceManager assigns FileIDs in order of parsing. This means that an
|
|
|
|
// includee always has a larger FileID than an includer. While you might
|
|
|
|
// think that we could just compare the FileID's here, that doesn't work to
|
|
|
|
// compare a point at the end of a.c with a point within c.h. Though c.h has
|
|
|
|
// a larger FileID, we have to compare the include point of c.h to the
|
|
|
|
// location in a.c.
|
|
|
|
//
|
|
|
|
// Despite not being able to directly compare FileID's, we can tell that a
|
|
|
|
// larger FileID is necessarily more deeply nested than a lower one and use
|
|
|
|
// this information to walk up the tree to the nearest common ancestor.
|
|
|
|
do {
|
|
|
|
// If LOffs is larger than ROffs, then LOffs must be more deeply nested than
|
|
|
|
// ROffs, walk up the #include chain.
|
|
|
|
if (LOffs.first.ID > ROffs.first.ID) {
|
2010-05-08 04:35:24 +08:00
|
|
|
if (MoveUpIncludeHierarchy(LOffs, *this))
|
2010-05-07 13:51:13 +08:00
|
|
|
break; // We reached the top.
|
|
|
|
|
|
|
|
} else {
|
|
|
|
// Otherwise, ROffs is larger than LOffs, so ROffs must be more deeply
|
|
|
|
// nested than LOffs, walk up the #include chain.
|
2010-05-08 04:35:24 +08:00
|
|
|
if (MoveUpIncludeHierarchy(ROffs, *this))
|
2010-05-07 13:51:13 +08:00
|
|
|
break; // We reached the top.
|
2010-05-07 09:17:07 +08:00
|
|
|
}
|
2010-05-07 13:51:13 +08:00
|
|
|
} while (LOffs.first != ROffs.first);
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2010-05-07 13:51:13 +08:00
|
|
|
// If we exited because we found a nearest common ancestor, compare the
|
|
|
|
// locations within the common file and cache them.
|
|
|
|
if (LOffs.first == ROffs.first) {
|
|
|
|
IsBeforeInTUCache.setCommonLoc(LOffs.first, LOffs.second, ROffs.second);
|
|
|
|
return IsBeforeInTUCache.getCachedResult(LOffs.second, ROffs.second);
|
2009-06-24 06:01:48 +08:00
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-12-02 07:07:57 +08:00
|
|
|
// There is no common ancestor, most probably because one location is in the
|
2010-05-08 04:35:24 +08:00
|
|
|
// predefines buffer or a PCH file.
|
2009-12-02 07:07:57 +08:00
|
|
|
// FIXME: We should rearrange the external interface so this simply never
|
|
|
|
// happens; it can't conceptually happen. Also see PR5662.
|
2010-05-08 04:35:24 +08:00
|
|
|
IsBeforeInTUCache.setQueryFIDs(FileID(), FileID()); // Don't try caching.
|
|
|
|
|
|
|
|
// Zip both entries up to the top level record.
|
|
|
|
while (!MoveUpIncludeHierarchy(LOffs, *this)) /*empty*/;
|
|
|
|
while (!MoveUpIncludeHierarchy(ROffs, *this)) /*empty*/;
|
2010-05-07 13:51:13 +08:00
|
|
|
|
2009-12-02 07:07:57 +08:00
|
|
|
// If exactly one location is a memory buffer, assume it preceeds the other.
|
2010-05-08 04:35:24 +08:00
|
|
|
|
|
|
|
// Strip off macro instantation locations, going up to the top-level File
|
|
|
|
// SLocEntry.
|
|
|
|
bool LIsMB = getFileEntryForID(LOffs.first) == 0;
|
|
|
|
bool RIsMB = getFileEntryForID(ROffs.first) == 0;
|
2010-05-07 13:51:13 +08:00
|
|
|
if (LIsMB != RIsMB)
|
2010-05-07 09:17:07 +08:00
|
|
|
return LIsMB;
|
2009-12-02 07:07:57 +08:00
|
|
|
|
|
|
|
// Otherwise, just assume FileIDs were created in order.
|
2010-05-07 09:17:07 +08:00
|
|
|
return LOffs.first < ROffs.first;
|
2009-06-24 06:01:48 +08:00
|
|
|
}
|
2009-01-26 08:43:02 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
/// PrintStats - Print statistics to stderr.
|
|
|
|
///
|
|
|
|
void SourceManager::PrintStats() const {
|
2009-08-23 20:08:50 +08:00
|
|
|
llvm::errs() << "\n*** Source Manager Stats:\n";
|
|
|
|
llvm::errs() << FileInfos.size() << " files mapped, " << MemBufferInfos.size()
|
|
|
|
<< " mem buffers mapped.\n";
|
|
|
|
llvm::errs() << SLocEntryTable.size() << " SLocEntry's allocated, "
|
|
|
|
<< NextOffset << "B of Sloc address space used.\n";
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2006-06-18 13:43:12 +08:00
|
|
|
unsigned NumLineNumsComputed = 0;
|
|
|
|
unsigned NumFileBytesMapped = 0;
|
2009-02-03 15:30:45 +08:00
|
|
|
for (fileinfo_iterator I = fileinfo_begin(), E = fileinfo_end(); I != E; ++I){
|
|
|
|
NumLineNumsComputed += I->second->SourceLineCache != 0;
|
|
|
|
NumFileBytesMapped += I->second->getSizeBytesMapped();
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
2009-09-09 23:08:12 +08:00
|
|
|
|
2009-08-23 20:08:50 +08:00
|
|
|
llvm::errs() << NumFileBytesMapped << " bytes of files mapped, "
|
|
|
|
<< NumLineNumsComputed << " files with line #'s computed.\n";
|
|
|
|
llvm::errs() << "FileID scans: " << NumLinearScans << " linear, "
|
|
|
|
<< NumBinaryProbes << " binary.\n";
|
2006-06-18 13:43:12 +08:00
|
|
|
}
|
2009-04-27 14:38:32 +08:00
|
|
|
|
|
|
|
ExternalSLocEntrySource::~ExternalSLocEntrySource() { }
|