forked from OSchip/llvm-project
[sanitizer] factor out ByteMap from SizeClassAllocator32 so that it can be later replaced with a more memory-efficient implementation on 64-bit.
llvm-svn: 182234
This commit is contained in:
parent
29699c6365
commit
3469375e4f
|
@ -69,8 +69,12 @@ typedef SizeClassAllocator64<kAllocatorSpace, kAllocatorSize, 0 /*metadata*/,
|
|||
#elif SANITIZER_WORDSIZE == 32
|
||||
static const u64 kAddressSpaceSize = 1ULL << 32;
|
||||
typedef CompactSizeClassMap SizeClassMap;
|
||||
static const uptr kRegionSizeLog = 20;
|
||||
static const uptr kFlatByteMapSize = kAddressSpaceSize >> kRegionSizeLog;
|
||||
typedef SizeClassAllocator32<0, kAddressSpaceSize, 16,
|
||||
SizeClassMap, AsanMapUnmapCallback> PrimaryAllocator;
|
||||
SizeClassMap, kRegionSizeLog,
|
||||
FlatByteMap<kFlatByteMapSize>,
|
||||
AsanMapUnmapCallback> PrimaryAllocator;
|
||||
#endif
|
||||
|
||||
typedef SizeClassAllocatorLocalCache<PrimaryAllocator> AllocatorCache;
|
||||
|
|
|
@ -561,6 +561,30 @@ class SizeClassAllocator64 {
|
|||
}
|
||||
};
|
||||
|
||||
// Maps integers in rage [0, kSize) to u8 values.
|
||||
template<u64 kSize>
|
||||
class FlatByteMap {
|
||||
public:
|
||||
void TestOnlyInit() {
|
||||
internal_memset(map_, 0, sizeof(map_));
|
||||
}
|
||||
|
||||
void set(uptr idx, u8 val) {
|
||||
CHECK_LT(idx, kSize);
|
||||
CHECK_EQ(0U, map_[idx]);
|
||||
map_[idx] = val;
|
||||
}
|
||||
u8 operator[] (uptr idx) {
|
||||
CHECK_LT(idx, kSize);
|
||||
// FIXME: CHECK may be too expensive here.
|
||||
return map_[idx];
|
||||
}
|
||||
private:
|
||||
u8 map_[kSize];
|
||||
};
|
||||
|
||||
// FIXME: Also implement TwoLevelByteMap.
|
||||
|
||||
// SizeClassAllocator32 -- allocator for 32-bit address space.
|
||||
// This allocator can theoretically be used on 64-bit arch, but there it is less
|
||||
// efficient than SizeClassAllocator64.
|
||||
|
@ -572,7 +596,7 @@ class SizeClassAllocator64 {
|
|||
// a result of a single call to MmapAlignedOrDie(kRegionSize, kRegionSize).
|
||||
// Since the regions are aligned by kRegionSize, there are exactly
|
||||
// kNumPossibleRegions possible regions in the address space and so we keep
|
||||
// an u8 array possible_regions[kNumPossibleRegions] to store the size classes.
|
||||
// a ByteMap possible_regions to store the size classes of each Region.
|
||||
// 0 size class means the region is not used by the allocator.
|
||||
//
|
||||
// One Region is used to allocate chunks of a single size class.
|
||||
|
@ -583,16 +607,19 @@ class SizeClassAllocator64 {
|
|||
// chache-line aligned.
|
||||
template <const uptr kSpaceBeg, const u64 kSpaceSize,
|
||||
const uptr kMetadataSize, class SizeClassMap,
|
||||
const uptr kRegionSizeLog,
|
||||
class ByteMap,
|
||||
class MapUnmapCallback = NoOpMapUnmapCallback>
|
||||
class SizeClassAllocator32 {
|
||||
public:
|
||||
typedef typename SizeClassMap::TransferBatch Batch;
|
||||
typedef SizeClassAllocator32<kSpaceBeg, kSpaceSize, kMetadataSize,
|
||||
SizeClassMap, MapUnmapCallback> ThisT;
|
||||
SizeClassMap, kRegionSizeLog, ByteMap, MapUnmapCallback> ThisT;
|
||||
typedef SizeClassAllocatorLocalCache<ThisT> AllocatorCache;
|
||||
|
||||
void Init() {
|
||||
state_ = reinterpret_cast<State *>(MapWithCallback(sizeof(State)));
|
||||
possible_regions.TestOnlyInit();
|
||||
internal_memset(size_class_info_array, 0, sizeof(size_class_info_array));
|
||||
}
|
||||
|
||||
void *MapWithCallback(uptr size) {
|
||||
|
@ -649,7 +676,7 @@ class SizeClassAllocator32 {
|
|||
}
|
||||
|
||||
uptr GetSizeClass(void *p) {
|
||||
return state_->possible_regions[ComputeRegionId(reinterpret_cast<uptr>(p))];
|
||||
return possible_regions[ComputeRegionId(reinterpret_cast<uptr>(p))];
|
||||
}
|
||||
|
||||
void *GetBlockBegin(void *p) {
|
||||
|
@ -674,16 +701,15 @@ class SizeClassAllocator32 {
|
|||
// No need to lock here.
|
||||
uptr res = 0;
|
||||
for (uptr i = 0; i < kNumPossibleRegions; i++)
|
||||
if (state_->possible_regions[i])
|
||||
if (possible_regions[i])
|
||||
res += kRegionSize;
|
||||
return res;
|
||||
}
|
||||
|
||||
void TestOnlyUnmap() {
|
||||
for (uptr i = 0; i < kNumPossibleRegions; i++)
|
||||
if (state_->possible_regions[i])
|
||||
if (possible_regions[i])
|
||||
UnmapWithCallback((i * kRegionSize), kRegionSize);
|
||||
UnmapWithCallback(reinterpret_cast<uptr>(state_), sizeof(State));
|
||||
}
|
||||
|
||||
// ForceLock() and ForceUnlock() are needed to implement Darwin malloc zone
|
||||
|
@ -706,8 +732,8 @@ class SizeClassAllocator32 {
|
|||
template<typename Callable>
|
||||
void ForEachChunk(const Callable &callback) {
|
||||
for (uptr region = 0; region < kNumPossibleRegions; region++)
|
||||
if (state_->possible_regions[region]) {
|
||||
uptr chunk_size = SizeClassMap::Size(state_->possible_regions[region]);
|
||||
if (possible_regions[region]) {
|
||||
uptr chunk_size = SizeClassMap::Size(possible_regions[region]);
|
||||
uptr max_chunks_in_region = kRegionSize / (chunk_size + kMetadataSize);
|
||||
uptr region_beg = region * kRegionSize;
|
||||
for (uptr p = region_beg;
|
||||
|
@ -726,7 +752,6 @@ class SizeClassAllocator32 {
|
|||
static const uptr kNumClasses = SizeClassMap::kNumClasses;
|
||||
|
||||
private:
|
||||
static const uptr kRegionSizeLog = SANITIZER_WORDSIZE == 64 ? 24 : 20;
|
||||
static const uptr kRegionSize = 1 << kRegionSizeLog;
|
||||
static const uptr kNumPossibleRegions = kSpaceSize / kRegionSize;
|
||||
|
||||
|
@ -754,14 +779,13 @@ class SizeClassAllocator32 {
|
|||
MapUnmapCallback().OnMap(res, kRegionSize);
|
||||
stat->Add(AllocatorStatMmapped, kRegionSize);
|
||||
CHECK_EQ(0U, (res & (kRegionSize - 1)));
|
||||
CHECK_EQ(0U, state_->possible_regions[ComputeRegionId(res)]);
|
||||
state_->possible_regions[ComputeRegionId(res)] = class_id;
|
||||
possible_regions.set(ComputeRegionId(res), class_id);
|
||||
return res;
|
||||
}
|
||||
|
||||
SizeClassInfo *GetSizeClassInfo(uptr class_id) {
|
||||
CHECK_LT(class_id, kNumClasses);
|
||||
return &state_->size_class_info_array[class_id];
|
||||
return &size_class_info_array[class_id];
|
||||
}
|
||||
|
||||
void PopulateFreeList(AllocatorStats *stat, AllocatorCache *c,
|
||||
|
@ -792,11 +816,8 @@ class SizeClassAllocator32 {
|
|||
}
|
||||
}
|
||||
|
||||
struct State {
|
||||
u8 possible_regions[kNumPossibleRegions];
|
||||
SizeClassInfo size_class_info_array[kNumClasses];
|
||||
};
|
||||
State *state_;
|
||||
ByteMap possible_regions;
|
||||
SizeClassInfo size_class_info_array[kNumClasses];
|
||||
};
|
||||
|
||||
// Objects of this type should be used as local caches for SizeClassAllocator64
|
||||
|
|
|
@ -41,8 +41,16 @@ typedef SizeClassAllocator64<
|
|||
static const u64 kAddressSpaceSize = 1ULL << 32;
|
||||
#endif
|
||||
|
||||
static const uptr kRegionSizeLog = FIRST_32_SECOND_64(20, 24);
|
||||
static const uptr kFlatByteMapSize = kAddressSpaceSize >> kRegionSizeLog;
|
||||
|
||||
typedef SizeClassAllocator32<
|
||||
0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
|
||||
0, kAddressSpaceSize,
|
||||
/*kMetadataSize*/16,
|
||||
CompactSizeClassMap,
|
||||
kRegionSizeLog,
|
||||
FlatByteMap<kFlatByteMapSize> >
|
||||
Allocator32Compact;
|
||||
|
||||
template <class SizeClassMap>
|
||||
void TestSizeClassMap() {
|
||||
|
@ -254,20 +262,25 @@ TEST(SanitizerCommon, SizeClassAllocator32MapUnmapCallback) {
|
|||
TestMapUnmapCallback::map_count = 0;
|
||||
TestMapUnmapCallback::unmap_count = 0;
|
||||
typedef SizeClassAllocator32<
|
||||
0, kAddressSpaceSize, 16, CompactSizeClassMap,
|
||||
TestMapUnmapCallback> Allocator32WithCallBack;
|
||||
0, kAddressSpaceSize,
|
||||
/*kMetadataSize*/16,
|
||||
CompactSizeClassMap,
|
||||
kRegionSizeLog,
|
||||
FlatByteMap<kFlatByteMapSize>,
|
||||
TestMapUnmapCallback>
|
||||
Allocator32WithCallBack;
|
||||
Allocator32WithCallBack *a = new Allocator32WithCallBack;
|
||||
a->Init();
|
||||
EXPECT_EQ(TestMapUnmapCallback::map_count, 1); // Allocator state.
|
||||
EXPECT_EQ(TestMapUnmapCallback::map_count, 0);
|
||||
SizeClassAllocatorLocalCache<Allocator32WithCallBack> cache;
|
||||
memset(&cache, 0, sizeof(cache));
|
||||
cache.Init(0);
|
||||
AllocatorStats stats;
|
||||
stats.Init();
|
||||
a->AllocateBatch(&stats, &cache, 32);
|
||||
EXPECT_EQ(TestMapUnmapCallback::map_count, 2); // alloc.
|
||||
EXPECT_EQ(TestMapUnmapCallback::map_count, 1);
|
||||
a->TestOnlyUnmap();
|
||||
EXPECT_EQ(TestMapUnmapCallback::unmap_count, 2); // The whole thing + alloc.
|
||||
EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);
|
||||
delete a;
|
||||
// fprintf(stderr, "Map: %d Unmap: %d\n",
|
||||
// TestMapUnmapCallback::map_count,
|
||||
|
|
Loading…
Reference in New Issue