forked from OSchip/llvm-project
[asan] Move lsan_disabled out of thread context.
Fix for the case where disabler is used in pthread key destructor. llvm-svn: 184553
This commit is contained in:
parent
91add7dfbf
commit
b94d5e2d1c
|
@ -423,10 +423,8 @@ static void *Allocate(uptr size, uptr alignment, StackTrace *stack,
|
|||
uptr fill_size = Min(size, (uptr)fl.max_malloc_fill_size);
|
||||
REAL(memset)(res, fl.malloc_fill_byte, fill_size);
|
||||
}
|
||||
if (t && t->lsan_disabled())
|
||||
m->lsan_tag = __lsan::kIgnored;
|
||||
else
|
||||
m->lsan_tag = __lsan::kDirectlyLeaked;
|
||||
m->lsan_tag = __lsan::DisabledInThisThread() ? __lsan::kIgnored
|
||||
: __lsan::kDirectlyLeaked;
|
||||
// Must be the last mutation of metadata in this function.
|
||||
atomic_store((atomic_uint8_t *)m, CHUNK_ALLOCATED, memory_order_release);
|
||||
ASAN_MALLOC_HOOK(res, size);
|
||||
|
@ -775,8 +773,8 @@ template void ForEachChunk<PrintLeakedCb>(PrintLeakedCb const &callback);
|
|||
template void ForEachChunk<CollectLeaksCb>(CollectLeaksCb const &callback);
|
||||
template void ForEachChunk<MarkIndirectlyLeakedCb>(
|
||||
MarkIndirectlyLeakedCb const &callback);
|
||||
template void ForEachChunk<CollectSuppressedCb>(
|
||||
CollectSuppressedCb const &callback);
|
||||
template void ForEachChunk<CollectIgnoredCb>(
|
||||
CollectIgnoredCb const &callback);
|
||||
#endif // CAN_SANITIZE_LEAKS
|
||||
|
||||
IgnoreObjectResult IgnoreObjectLocked(const void *p) {
|
||||
|
@ -794,28 +792,6 @@ IgnoreObjectResult IgnoreObjectLocked(const void *p) {
|
|||
}
|
||||
} // namespace __lsan
|
||||
|
||||
extern "C" {
|
||||
SANITIZER_INTERFACE_ATTRIBUTE
|
||||
void __lsan_disable() {
|
||||
#if CAN_SANITIZE_LEAKS
|
||||
__asan_init();
|
||||
__asan::AsanThread *t = __asan::GetCurrentThread();
|
||||
CHECK(t);
|
||||
t->disable_lsan();
|
||||
#endif // CAN_SANITIZE_LEAKS
|
||||
}
|
||||
|
||||
SANITIZER_INTERFACE_ATTRIBUTE
|
||||
void __lsan_enable() {
|
||||
#if CAN_SANITIZE_LEAKS
|
||||
__asan_init();
|
||||
__asan::AsanThread *t = __asan::GetCurrentThread();
|
||||
CHECK(t);
|
||||
t->enable_lsan();
|
||||
#endif // CAN_SANITIZE_LEAKS
|
||||
}
|
||||
} // extern "C"
|
||||
|
||||
// ---------------------- Interface ---------------- {{{1
|
||||
using namespace __asan; // NOLINT
|
||||
|
||||
|
|
|
@ -109,7 +109,6 @@ void AsanThread::Destroy() {
|
|||
|
||||
void AsanThread::Init() {
|
||||
SetThreadStackAndTls();
|
||||
lsan_disabled_ = 0;
|
||||
CHECK(AddrIsInMem(stack_bottom_));
|
||||
CHECK(AddrIsInMem(stack_top_ - 1));
|
||||
ClearShadowForThreadStackAndTLS();
|
||||
|
|
|
@ -65,15 +65,6 @@ class AsanThread {
|
|||
uptr stack_size() { return stack_top_ - stack_bottom_; }
|
||||
uptr tls_begin() { return tls_begin_; }
|
||||
uptr tls_end() { return tls_end_; }
|
||||
uptr lsan_disabled() { return lsan_disabled_; }
|
||||
void disable_lsan() { lsan_disabled_++; }
|
||||
void enable_lsan() {
|
||||
if (!lsan_disabled_) {
|
||||
Report("Unmatched call to __lsan_enable().\n");
|
||||
Die();
|
||||
}
|
||||
lsan_disabled_--;
|
||||
}
|
||||
u32 tid() { return context_->tid; }
|
||||
AsanThreadContext *context() { return context_; }
|
||||
void set_context(AsanThreadContext *context) { context_ = context; }
|
||||
|
@ -99,7 +90,6 @@ class AsanThread {
|
|||
uptr stack_bottom_;
|
||||
uptr tls_begin_;
|
||||
uptr tls_end_;
|
||||
uptr lsan_disabled_;
|
||||
|
||||
FakeStack fake_stack_;
|
||||
AsanThreadLocalMallocStorage malloc_storage_;
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
// Regression test. Disabler should not depend on TSD validity.
|
||||
// RUN: LSAN_BASE="report_objects=1:use_registers=0:use_stacks=0:use_globals=0:use_tls=1"
|
||||
// RUN: %clangxx_lsan %s -o %t
|
||||
// RUN: LSAN_OPTIONS=$LSAN_BASE %t
|
||||
|
||||
#include <assert.h>
|
||||
#include <pthread.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "sanitizer/lsan_interface.h"
|
||||
|
||||
pthread_key_t key;
|
||||
|
||||
void key_destructor(void *) {
|
||||
__lsan::ScopedDisabler d;
|
||||
void *p = malloc(1337);
|
||||
// Break optimization.
|
||||
fprintf(stderr, "Test alloc: %p.\n", p);
|
||||
pthread_setspecific(key, 0);
|
||||
}
|
||||
|
||||
void *thread_func(void *arg) {
|
||||
int res = pthread_setspecific(key, (void*)1);
|
||||
assert(res == 0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int main() {
|
||||
int res = pthread_key_create(&key, &key_destructor);
|
||||
assert(res == 0);
|
||||
pthread_t thread_id;
|
||||
res = pthread_create(&thread_id, 0, thread_func, 0);
|
||||
assert(res == 0);
|
||||
res = pthread_join(thread_id, 0);
|
||||
assert(res == 0);
|
||||
return 0;
|
||||
}
|
|
@ -44,8 +44,6 @@ typedef CombinedAllocator<PrimaryAllocator, AllocatorCache,
|
|||
|
||||
static Allocator allocator;
|
||||
static THREADLOCAL AllocatorCache cache;
|
||||
// All allocations made while this is > 0 will be treated as non-leaks.
|
||||
static THREADLOCAL uptr lsan_disabled;
|
||||
|
||||
void InitializeAllocator() {
|
||||
allocator.Init();
|
||||
|
@ -63,7 +61,7 @@ static void RegisterAllocation(const StackTrace &stack, void *p, uptr size) {
|
|||
if (!p) return;
|
||||
ChunkMetadata *m = Metadata(p);
|
||||
CHECK(m);
|
||||
m->tag = lsan_disabled ? kIgnored : kDirectlyLeaked;
|
||||
m->tag = DisabledInThisThread() ? kIgnored : kDirectlyLeaked;
|
||||
m->stack_trace_id = StackDepotPut(stack.trace, stack.size);
|
||||
m->requested_size = size;
|
||||
atomic_store((atomic_uint8_t*)m, 1, memory_order_relaxed);
|
||||
|
@ -188,8 +186,8 @@ template void ForEachChunk<PrintLeakedCb>(PrintLeakedCb const &callback);
|
|||
template void ForEachChunk<CollectLeaksCb>(CollectLeaksCb const &callback);
|
||||
template void ForEachChunk<MarkIndirectlyLeakedCb>(
|
||||
MarkIndirectlyLeakedCb const &callback);
|
||||
template void ForEachChunk<CollectSuppressedCb>(
|
||||
CollectSuppressedCb const &callback);
|
||||
template void ForEachChunk<CollectIgnoredCb>(
|
||||
CollectIgnoredCb const &callback);
|
||||
|
||||
IgnoreObjectResult IgnoreObjectLocked(const void *p) {
|
||||
void *chunk = allocator.GetBlockBegin(p);
|
||||
|
@ -206,20 +204,3 @@ IgnoreObjectResult IgnoreObjectLocked(const void *p) {
|
|||
}
|
||||
}
|
||||
} // namespace __lsan
|
||||
|
||||
extern "C" {
|
||||
SANITIZER_INTERFACE_ATTRIBUTE
|
||||
void __lsan_disable() {
|
||||
__lsan::lsan_disabled++;
|
||||
}
|
||||
|
||||
SANITIZER_INTERFACE_ATTRIBUTE
|
||||
void __lsan_enable() {
|
||||
if (!__lsan::lsan_disabled) {
|
||||
Report("Unmatched call to __lsan_enable().\n");
|
||||
Die();
|
||||
}
|
||||
__lsan::lsan_disabled--;
|
||||
}
|
||||
} // extern "C"
|
||||
|
||||
|
|
|
@ -26,6 +26,9 @@ namespace __lsan {
|
|||
// This mutex is used to prevent races between DoLeakCheck and SuppressObject.
|
||||
BlockingMutex global_mutex(LINKER_INITIALIZED);
|
||||
|
||||
THREADLOCAL int disable_counter;
|
||||
bool DisabledInThisThread() { return disable_counter > 0; }
|
||||
|
||||
Flags lsan_flags;
|
||||
|
||||
static void InitializeFlags() {
|
||||
|
@ -83,7 +86,7 @@ static inline bool CanBeAHeapPointer(uptr p) {
|
|||
|
||||
// Scan the memory range, looking for byte patterns that point into allocator
|
||||
// chunks. Mark those chunks with tag and add them to the frontier.
|
||||
// There are two usage modes for this function: finding reachable or suppressed
|
||||
// There are two usage modes for this function: finding reachable or ignored
|
||||
// chunks (tag = kReachable or kIgnored) and finding indirectly leaked chunks
|
||||
// (tag = kIndirectlyLeaked). In the second case, there's no flood fill,
|
||||
// so frontier = 0.
|
||||
|
@ -102,7 +105,7 @@ void ScanRangeForPointers(uptr begin, uptr end,
|
|||
void *chunk = PointsIntoChunk(p);
|
||||
if (!chunk) continue;
|
||||
LsanMetadata m(chunk);
|
||||
// Reachable beats suppressed beats leaked.
|
||||
// Reachable beats ignored beats leaked.
|
||||
if (m.tag() == kReachable) continue;
|
||||
if (m.tag() == kIgnored && tag != kReachable) continue;
|
||||
m.set_tag(tag);
|
||||
|
@ -205,7 +208,7 @@ void MarkIndirectlyLeakedCb::operator()(void *p) const {
|
|||
}
|
||||
}
|
||||
|
||||
void CollectSuppressedCb::operator()(void *p) const {
|
||||
void CollectIgnoredCb::operator()(void *p) const {
|
||||
p = GetUserBegin(p);
|
||||
LsanMetadata m(p);
|
||||
if (m.allocated() && m.tag() == kIgnored)
|
||||
|
@ -230,7 +233,7 @@ static void ClassifyAllChunks(SuspendedThreadsList const &suspended_threads) {
|
|||
if (flags()->log_pointers)
|
||||
Report("Scanning ignored chunks.\n");
|
||||
CHECK_EQ(0, frontier.size());
|
||||
ForEachChunk(CollectSuppressedCb(&frontier));
|
||||
ForEachChunk(CollectIgnoredCb(&frontier));
|
||||
FloodFillTag(&frontier, kIgnored);
|
||||
|
||||
// Iterate over leaked chunks and mark those that are reachable from other
|
||||
|
@ -394,8 +397,9 @@ void LeakReport::PrintSummary() {
|
|||
bytes += leaks_[i].total_size;
|
||||
allocations += leaks_[i].hit_count;
|
||||
}
|
||||
Printf("SUMMARY: LeakSanitizer: %llu byte(s) leaked in %llu allocation(s).\n\n",
|
||||
bytes, allocations);
|
||||
Printf(
|
||||
"SUMMARY: LeakSanitizer: %llu byte(s) leaked in %llu allocation(s).\n\n",
|
||||
bytes, allocations);
|
||||
}
|
||||
|
||||
} // namespace __lsan
|
||||
|
@ -420,4 +424,22 @@ void __lsan_ignore_object(const void *p) {
|
|||
Report("__lsan_ignore_object(): ignoring heap object at %p\n", p);
|
||||
#endif // CAN_SANITIZE_LEAKS
|
||||
}
|
||||
|
||||
SANITIZER_INTERFACE_ATTRIBUTE
|
||||
void __lsan_disable() {
|
||||
#if CAN_SANITIZE_LEAKS
|
||||
__lsan::disable_counter++;
|
||||
#endif
|
||||
}
|
||||
|
||||
SANITIZER_INTERFACE_ATTRIBUTE
|
||||
void __lsan_enable() {
|
||||
#if CAN_SANITIZE_LEAKS
|
||||
if (!__lsan::disable_counter) {
|
||||
Report("Unmatched call to __lsan_enable().\n");
|
||||
Die();
|
||||
}
|
||||
__lsan::disable_counter--;
|
||||
#endif
|
||||
}
|
||||
} // extern "C"
|
||||
|
|
|
@ -75,12 +75,6 @@ struct Flags {
|
|||
extern Flags lsan_flags;
|
||||
inline Flags *flags() { return &lsan_flags; }
|
||||
|
||||
void InitCommonLsan();
|
||||
// Testing interface. Find leaked chunks and dump their addresses to vector.
|
||||
void ReportLeaked(InternalMmapVector<void *> *leaked, uptr sources);
|
||||
// Normal leak check. Find leaks and print a report according to flags.
|
||||
void DoLeakCheck();
|
||||
|
||||
struct Leak {
|
||||
uptr hit_count;
|
||||
uptr total_size;
|
||||
|
@ -151,9 +145,9 @@ class MarkIndirectlyLeakedCb {
|
|||
};
|
||||
|
||||
// Finds all chunk marked as kIgnored and adds their addresses to frontier.
|
||||
class CollectSuppressedCb {
|
||||
class CollectIgnoredCb {
|
||||
public:
|
||||
explicit CollectSuppressedCb(Frontier *frontier)
|
||||
explicit CollectIgnoredCb(Frontier *frontier)
|
||||
: frontier_(frontier) {}
|
||||
void operator()(void *p) const;
|
||||
private:
|
||||
|
@ -166,6 +160,11 @@ enum IgnoreObjectResult {
|
|||
kIgnoreObjectInvalid
|
||||
};
|
||||
|
||||
// Functions called from the parent tool.
|
||||
void InitCommonLsan();
|
||||
void DoLeakCheck();
|
||||
bool DisabledInThisThread();
|
||||
|
||||
// The following must be implemented in the parent tool.
|
||||
|
||||
template<typename Callable> void ForEachChunk(Callable const &callback);
|
||||
|
|
Loading…
Reference in New Issue