forked from OSchip/llvm-project
Make InternalAlloc/InternalFree in sanitizer runtimes libc-free by switching to a custom allocator.
llvm-svn: 182836
This commit is contained in:
parent
08f662845d
commit
c30e2d6b3a
|
@ -13,6 +13,7 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "msan.h"
|
||||
#include "sanitizer_common/sanitizer_allocator_internal.h"
|
||||
#include "sanitizer_common/sanitizer_common.h"
|
||||
#include "sanitizer_common/sanitizer_flags.h"
|
||||
#include "sanitizer_common/sanitizer_mutex.h"
|
||||
|
|
|
@ -34,6 +34,7 @@ set(SANITIZER_LIBCDEP_SOURCES
|
|||
# headers when building our custom unit tests.
|
||||
set(SANITIZER_HEADERS
|
||||
sanitizer_allocator.h
|
||||
sanitizer_allocator_internal.h
|
||||
sanitizer_atomic_clang.h
|
||||
sanitizer_atomic_msvc.h
|
||||
sanitizer_atomic.h
|
||||
|
|
|
@ -9,44 +9,102 @@
|
|||
//
|
||||
// This file is shared between AddressSanitizer and ThreadSanitizer
|
||||
// run-time libraries.
|
||||
// This allocator that is used inside run-times.
|
||||
// This allocator is used inside run-times.
|
||||
//===----------------------------------------------------------------------===//
|
||||
#include "sanitizer_allocator.h"
|
||||
#include "sanitizer_allocator_internal.h"
|
||||
#include "sanitizer_common.h"
|
||||
|
||||
// FIXME: We should probably use more low-level allocator that would
|
||||
// mmap some pages and split them into chunks to fulfill requests.
|
||||
#if SANITIZER_LINUX && !SANITIZER_ANDROID
|
||||
extern "C" void *__libc_malloc(__sanitizer::uptr size);
|
||||
extern "C" void __libc_free(void *ptr);
|
||||
# define LIBC_MALLOC __libc_malloc
|
||||
# define LIBC_FREE __libc_free
|
||||
#else // SANITIZER_LINUX && !SANITIZER_ANDROID
|
||||
# include <stdlib.h>
|
||||
# define LIBC_MALLOC malloc
|
||||
# define LIBC_FREE free
|
||||
#endif // SANITIZER_LINUX && !SANITIZER_ANDROID
|
||||
|
||||
namespace __sanitizer {
|
||||
|
||||
// ThreadSanitizer for Go uses libc malloc/free.
|
||||
#if defined(SANITIZER_GO)
|
||||
# if SANITIZER_LINUX && !SANITIZER_ANDROID
|
||||
extern "C" void *__libc_malloc(uptr size);
|
||||
extern "C" void __libc_free(void *ptr);
|
||||
# define LIBC_MALLOC __libc_malloc
|
||||
# define LIBC_FREE __libc_free
|
||||
# else
|
||||
# include <stdlib.h>
|
||||
# define LIBC_MALLOC malloc
|
||||
# define LIBC_FREE free
|
||||
# endif
|
||||
|
||||
static void *RawInternalAlloc(uptr size, InternalAllocatorCache *cache) {
|
||||
(void)cache;
|
||||
return LIBC_MALLOC(size);
|
||||
}
|
||||
|
||||
static void RawInternalFree(void *ptr, InternalAllocatorCache *cache) {
|
||||
(void)cache;
|
||||
LIBC_FREE(ptr);
|
||||
}
|
||||
|
||||
InternalAllocator *internal_allocator() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
#else // SANITIZER_GO
|
||||
|
||||
static char internal_alloc_placeholder[sizeof(InternalAllocator)] ALIGNED(64);
|
||||
static atomic_uint8_t internal_allocator_initialized;
|
||||
static StaticSpinMutex internal_alloc_init_mu;
|
||||
|
||||
static InternalAllocatorCache internal_allocator_cache;
|
||||
static StaticSpinMutex internal_allocator_cache_mu;
|
||||
|
||||
InternalAllocator *internal_allocator() {
|
||||
InternalAllocator *internal_allocator_instance =
|
||||
reinterpret_cast<InternalAllocator *>(&internal_alloc_placeholder);
|
||||
if (atomic_load(&internal_allocator_initialized, memory_order_acquire) == 0) {
|
||||
SpinMutexLock l(&internal_alloc_init_mu);
|
||||
if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) ==
|
||||
0) {
|
||||
internal_allocator_instance->Init();
|
||||
atomic_store(&internal_allocator_initialized, 1, memory_order_release);
|
||||
}
|
||||
}
|
||||
return internal_allocator_instance;
|
||||
}
|
||||
|
||||
static void *RawInternalAlloc(uptr size, InternalAllocatorCache *cache) {
|
||||
if (cache == 0) {
|
||||
SpinMutexLock l(&internal_allocator_cache_mu);
|
||||
return internal_allocator()->Allocate(&internal_allocator_cache, size, 8,
|
||||
false);
|
||||
}
|
||||
return internal_allocator()->Allocate(cache, size, 8, false);
|
||||
}
|
||||
|
||||
static void RawInternalFree(void *ptr, InternalAllocatorCache *cache) {
|
||||
if (cache == 0) {
|
||||
SpinMutexLock l(&internal_allocator_cache_mu);
|
||||
return internal_allocator()->Deallocate(&internal_allocator_cache, ptr);
|
||||
}
|
||||
internal_allocator()->Deallocate(cache, ptr);
|
||||
}
|
||||
|
||||
#endif // SANITIZER_GO
|
||||
|
||||
const u64 kBlockMagic = 0x6A6CB03ABCEBC041ull;
|
||||
|
||||
void *InternalAlloc(uptr size) {
|
||||
void *InternalAlloc(uptr size, InternalAllocatorCache *cache) {
|
||||
if (size + sizeof(u64) < size)
|
||||
return 0;
|
||||
void *p = LIBC_MALLOC(size + sizeof(u64));
|
||||
void *p = RawInternalAlloc(size + sizeof(u64), cache);
|
||||
if (p == 0)
|
||||
return 0;
|
||||
((u64*)p)[0] = kBlockMagic;
|
||||
return (char*)p + sizeof(u64);
|
||||
}
|
||||
|
||||
void InternalFree(void *addr) {
|
||||
void InternalFree(void *addr, InternalAllocatorCache *cache) {
|
||||
if (addr == 0)
|
||||
return;
|
||||
addr = (char*)addr - sizeof(u64);
|
||||
CHECK_EQ(((u64*)addr)[0], kBlockMagic);
|
||||
CHECK_EQ(kBlockMagic, ((u64*)addr)[0]);
|
||||
((u64*)addr)[0] = 0;
|
||||
LIBC_FREE(addr);
|
||||
RawInternalFree(addr, cache);
|
||||
}
|
||||
|
||||
// LowLevelAllocator
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
//===-- sanitizer_allocator_internal.h -------------------------- C++ -----===//
|
||||
//
|
||||
// The LLVM Compiler Infrastructure
|
||||
//
|
||||
// This file is distributed under the University of Illinois Open Source
|
||||
// License. See LICENSE.TXT for details.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
//
|
||||
// This allocator is used inside run-times.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#ifndef SANITIZER_ALLOCATOR_INTERNAL_H
|
||||
#define SANITIZER_ALLOCATOR_INTERNAL_H
|
||||
|
||||
#include "sanitizer_allocator.h"
|
||||
#include "sanitizer_internal_defs.h"
|
||||
|
||||
namespace __sanitizer {
|
||||
|
||||
// TODO: Check if we may use even more compact size class map for internal
|
||||
// purposes.
|
||||
typedef CompactSizeClassMap InternalSizeClassMap;
|
||||
|
||||
static const uptr kInternalAllocatorSpace = 0;
|
||||
#if SANITIZER_WORDSIZE == 32
|
||||
static const u64 kInternalAllocatorSize = (1ULL << 32);
|
||||
static const uptr kInternalAllocatorRegionSizeLog = 20;
|
||||
#else
|
||||
static const u64 kInternalAllocatorSize = (1ULL << 47);
|
||||
static const uptr kInternalAllocatorRegionSizeLog = 24;
|
||||
#endif
|
||||
static const uptr kInternalAllocatorFlatByteMapSize =
|
||||
kInternalAllocatorSize >> kInternalAllocatorRegionSizeLog;
|
||||
typedef SizeClassAllocator32<
|
||||
kInternalAllocatorSpace, kInternalAllocatorSize, 16, InternalSizeClassMap,
|
||||
kInternalAllocatorRegionSizeLog,
|
||||
FlatByteMap<kInternalAllocatorFlatByteMapSize> > PrimaryInternalAllocator;
|
||||
|
||||
typedef SizeClassAllocatorLocalCache<PrimaryInternalAllocator>
|
||||
InternalAllocatorCache;
|
||||
|
||||
// We don't want our internal allocator to do any map/unmap operations.
|
||||
struct CrashOnMapUnmap {
|
||||
void OnMap(uptr p, uptr size) const {
|
||||
CHECK(0 && "Unexpected mmap in InternalAllocator!");
|
||||
}
|
||||
void OnUnmap(uptr p, uptr size) const {
|
||||
CHECK(0 && "Unexpected unmap in InternalAllocator!");
|
||||
}
|
||||
};
|
||||
|
||||
typedef CombinedAllocator<PrimaryInternalAllocator, InternalAllocatorCache,
|
||||
LargeMmapAllocator<CrashOnMapUnmap> >
|
||||
InternalAllocator;
|
||||
|
||||
void *InternalAlloc(uptr size, InternalAllocatorCache *cache = 0);
|
||||
void InternalFree(void *p, InternalAllocatorCache *cache = 0);
|
||||
InternalAllocator *internal_allocator();
|
||||
|
||||
} // namespace __sanitizer
|
||||
|
||||
#endif // SANITIZER_ALLOCATOR_INTERNAL_H
|
|
@ -59,10 +59,6 @@ void *MmapAlignedOrDie(uptr size, uptr alignment, const char *mem_type);
|
|||
bool MemoryRangeIsAvailable(uptr range_start, uptr range_end);
|
||||
void FlushUnneededShadowMemory(uptr addr, uptr size);
|
||||
|
||||
// Internal allocator
|
||||
void *InternalAlloc(uptr size);
|
||||
void InternalFree(void *p);
|
||||
|
||||
// InternalScopedBuffer can be used instead of large stack arrays to
|
||||
// keep frame size low.
|
||||
// FIXME: use InternalAlloc instead of MmapOrDie once
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
// This file is shared between AddressSanitizer and ThreadSanitizer
|
||||
// run-time libraries. See sanitizer_libc.h for details.
|
||||
//===----------------------------------------------------------------------===//
|
||||
#include "sanitizer_allocator_internal.h"
|
||||
#include "sanitizer_common.h"
|
||||
#include "sanitizer_libc.h"
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
// run-time libraries. See sanitizer_symbolizer.h for details.
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "sanitizer_allocator_internal.h"
|
||||
#include "sanitizer_common.h"
|
||||
#include "sanitizer_placement_new.h"
|
||||
#include "sanitizer_procmaps.h"
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
#include "sanitizer_common/sanitizer_allocator.h"
|
||||
#include "sanitizer_common/sanitizer_allocator_internal.h"
|
||||
#include "sanitizer_common/sanitizer_common.h"
|
||||
|
||||
#include "sanitizer_test_utils.h"
|
||||
|
@ -67,6 +68,10 @@ TEST(SanitizerCommon, CompactSizeClassMap) {
|
|||
TestSizeClassMap<CompactSizeClassMap>();
|
||||
}
|
||||
|
||||
TEST(SanitizerCommon, InternalSizeClassMap) {
|
||||
TestSizeClassMap<InternalSizeClassMap>();
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
void TestSizeClassAllocator() {
|
||||
Allocator *a = new Allocator;
|
||||
|
@ -611,6 +616,11 @@ TEST(Allocator, Stress) {
|
|||
}
|
||||
}
|
||||
|
||||
TEST(Allocator, InternalAllocFailure) {
|
||||
EXPECT_DEATH(Ident(InternalAlloc(10 << 20)),
|
||||
"Unexpected mmap in InternalAllocator!");
|
||||
}
|
||||
|
||||
TEST(Allocator, ScopedBuffer) {
|
||||
const int kSize = 512;
|
||||
{
|
||||
|
|
|
@ -75,10 +75,12 @@ void InitializeAllocator() {
|
|||
|
||||
void AllocatorThreadStart(ThreadState *thr) {
|
||||
allocator()->InitCache(&thr->alloc_cache);
|
||||
internal_allocator()->InitCache(&thr->internal_alloc_cache);
|
||||
}
|
||||
|
||||
void AllocatorThreadFinish(ThreadState *thr) {
|
||||
allocator()->DestroyCache(&thr->alloc_cache);
|
||||
internal_allocator()->DestroyCache(&thr->internal_alloc_cache);
|
||||
}
|
||||
|
||||
void AllocatorPrintStats() {
|
||||
|
@ -194,11 +196,12 @@ void invoke_free_hook(void *ptr) {
|
|||
void *internal_alloc(MBlockType typ, uptr sz) {
|
||||
ThreadState *thr = cur_thread();
|
||||
CHECK_GT(thr->in_rtl, 0);
|
||||
CHECK_LE(sz, InternalSizeClassMap::kMaxSize);
|
||||
if (thr->nomalloc) {
|
||||
thr->nomalloc = 0; // CHECK calls internal_malloc().
|
||||
CHECK(0);
|
||||
}
|
||||
return InternalAlloc(sz);
|
||||
return InternalAlloc(sz, &thr->internal_alloc_cache);
|
||||
}
|
||||
|
||||
void internal_free(void *p) {
|
||||
|
@ -208,7 +211,7 @@ void internal_free(void *p) {
|
|||
thr->nomalloc = 0; // CHECK calls internal_malloc().
|
||||
CHECK(0);
|
||||
}
|
||||
InternalFree(p);
|
||||
InternalFree(p, &thr->internal_alloc_cache);
|
||||
}
|
||||
|
||||
} // namespace __tsan
|
||||
|
@ -261,5 +264,6 @@ uptr __tsan_get_allocated_size(void *p) {
|
|||
void __tsan_on_thread_idle() {
|
||||
ThreadState *thr = cur_thread();
|
||||
allocator()->SwallowCache(&thr->alloc_cache);
|
||||
internal_allocator()->SwallowCache(&thr->internal_alloc_cache);
|
||||
}
|
||||
} // extern "C"
|
||||
|
|
|
@ -27,6 +27,7 @@
|
|||
#define TSAN_RTL_H
|
||||
|
||||
#include "sanitizer_common/sanitizer_allocator.h"
|
||||
#include "sanitizer_common/sanitizer_allocator_internal.h"
|
||||
#include "sanitizer_common/sanitizer_common.h"
|
||||
#include "sanitizer_common/sanitizer_thread_registry.h"
|
||||
#include "tsan_clock.h"
|
||||
|
@ -424,6 +425,7 @@ struct ThreadState {
|
|||
ThreadClock clock;
|
||||
#ifndef TSAN_GO
|
||||
AllocatorCache alloc_cache;
|
||||
InternalAllocatorCache internal_alloc_cache;
|
||||
Vector<JmpBuf> jmp_bufs;
|
||||
#endif
|
||||
u64 stat[StatCnt];
|
||||
|
|
Loading…
Reference in New Issue