Repoison the entire ASan chunk if it is not in active use.

Summary:
Repoisoning just the minimal redzones might leave an unpoisoned
gap of the size of the actual redzone minus minimal redzone size.
After ASan activation the actual redzone might be bigger than the minimal
size and ASan allocator assumes that the chunk returned by the common
allocator is either entirely poisoned or entirely not poisoned (it's too
expensive to check the entire chunk or always poison one).

Reviewers: eugenis

Subscribers: kubabrecka, llvm-commits

Differential Revision: https://reviews.llvm.org/D28577

llvm-svn: 291714
This commit is contained in:
Alex Shlyapnikov 2017-01-11 22:10:35 +00:00
parent da710362c1
commit c83efa85e2
2 changed files with 38 additions and 11 deletions

View File

@ -269,24 +269,24 @@ struct Allocator {
}
void RePoisonChunk(uptr chunk) {
// This could a user-facing chunk (with redzones), or some internal
// This could be a user-facing chunk (with redzones), or some internal
// housekeeping chunk, like TransferBatch. Start by assuming the former.
AsanChunk *ac = GetAsanChunk((void *)chunk);
uptr allocated_size = allocator.GetActuallyAllocatedSize((void *)ac);
uptr beg = ac->Beg();
uptr end = ac->Beg() + ac->UsedSize(true);
uptr chunk_end = chunk + allocated_size;
if (chunk < beg && beg < end && end <= chunk_end) {
// Looks like a valid AsanChunk. Or maybe not. Be conservative and only
// poison the redzones.
if (chunk < beg && beg < end && end <= chunk_end &&
ac->chunk_state == CHUNK_ALLOCATED) {
// Looks like a valid AsanChunk in use, poison redzones only.
PoisonShadow(chunk, beg - chunk, kAsanHeapLeftRedzoneMagic);
uptr end_aligned_down = RoundDownTo(end, SHADOW_GRANULARITY);
FastPoisonShadowPartialRightRedzone(
end_aligned_down, end - end_aligned_down,
chunk_end - end_aligned_down, kAsanHeapLeftRedzoneMagic);
} else {
// This can not be an AsanChunk. Poison everything. It may be reused as
// AsanChunk later.
// This is either not an AsanChunk or freed or quarantined AsanChunk.
// In either case, poison everything.
PoisonShadow(chunk, allocated_size, kAsanHeapLeftRedzoneMagic);
}
}

View File

@ -21,6 +21,7 @@
// XFAIL: arm-linux-gnueabi
#if !defined(SHARED_LIB)
#include <assert.h>
#include <dlfcn.h>
#include <stdio.h>
@ -32,13 +33,13 @@
#include "sanitizer/asan_interface.h"
constexpr unsigned nPtrs = 200;
char *ptrs[nPtrs];
void test_malloc_shadow(char *p, size_t sz, bool expect_redzones) {
// Last byte of the left redzone, if present.
assert((char *)__asan_region_is_poisoned(p - 1, sz + 1) ==
(expect_redzones ? p - 1 : nullptr));
// The user memory.
assert((char *)__asan_region_is_poisoned(p, sz) == nullptr);
// First byte of the right redzone, if present.
assert((char *)__asan_region_is_poisoned(p, sz + 1) ==
(expect_redzones ? p + sz : nullptr));
}
@ -46,12 +47,29 @@ void test_malloc_shadow(char *p, size_t sz, bool expect_redzones) {
typedef void (*Fn)();
int main(int argc, char *argv[]) {
constexpr unsigned nPtrs = 200;
char *ptrs[nPtrs];
// Before activation: no redzones.
for (size_t sz = 1; sz < nPtrs; ++sz) {
ptrs[sz] = (char *)malloc(sz);
test_malloc_shadow(ptrs[sz], sz, false);
}
// Create a honey pot for the future, instrumented, allocations. Since the
// quarantine is disabled, chunks are going to be recycled right away and
// reused for the new allocations. New allocations must get the proper
// redzones anyway, whether it's a fresh or reused allocation.
constexpr size_t HoneyPotBlockSize = 4096;
constexpr int HoneyPotSize = 200;
char *honeyPot[HoneyPotSize];
for (int i = 1; i < HoneyPotSize; ++i) {
honeyPot[i] = (char *)malloc(HoneyPotBlockSize);
test_malloc_shadow(honeyPot[i], HoneyPotBlockSize, false);
}
for (int i = 1; i < HoneyPotSize; ++i)
free(honeyPot[i]);
std::string path = std::string(argv[0]) + "-so.so";
void *dso = dlopen(path.c_str(), RTLD_NOW);
if (!dso) {
@ -67,11 +85,17 @@ int main(int argc, char *argv[]) {
}
// After activation: redzones.
for (int i = 1; i < HoneyPotSize; ++i) {
honeyPot[i] = (char *)malloc(HoneyPotBlockSize);
test_malloc_shadow(honeyPot[i], HoneyPotBlockSize, true);
}
{
char *p = (char *)malloc(100);
test_malloc_shadow(p, 100, true);
char *p = (char *)malloc(HoneyPotBlockSize);
test_malloc_shadow(p, HoneyPotBlockSize, true);
free(p);
}
for (int i = 1; i < HoneyPotSize; ++i)
free(honeyPot[i]);
// Pre-existing allocations got redzones, too.
for (size_t sz = 1; sz < nPtrs; ++sz) {
@ -93,7 +117,9 @@ int main(int argc, char *argv[]) {
return 0;
}
#else // SHARED_LIB
#include <stdio.h>
#include <stdlib.h>
@ -101,6 +127,7 @@ extern "C" void do_another_bad_thing() {
char *volatile p = (char *)malloc(100);
printf("%hhx\n", p[105]);
}
#endif // SHARED_LIB
// help=1 in activation flags lists only flags are are supported at activation