forked from OSchip/llvm-project
[msan] Better use-after-free reports.
By attaching an extra integer tag to heap origins, we are able to distinguish between uninits - created by heap allocation, - created by heap deallocation (i.e. use-after-free), - created by __msan_allocated_memory call, - etc. See https://code.google.com/p/memory-sanitizer/issues/detail?id=35. llvm-svn: 226821
This commit is contained in:
parent
a713fce539
commit
d38af30b74
|
@ -81,7 +81,7 @@ void AsanLocateAddress(uptr addr, AddressDescription *descr) {
|
|||
GetInfoForHeapAddress(addr, descr);
|
||||
}
|
||||
|
||||
uptr AsanGetStack(uptr addr, uptr *trace, uptr size, u32 *thread_id,
|
||||
static uptr AsanGetStack(uptr addr, uptr *trace, u32 size, u32 *thread_id,
|
||||
bool alloc_stack) {
|
||||
AsanChunkView chunk = FindHeapChunkByAddress(addr);
|
||||
if (!chunk.IsValid()) return 0;
|
||||
|
|
|
@ -366,7 +366,7 @@ static void CollectLeaksCb(uptr chunk, void *arg) {
|
|||
LsanMetadata m(chunk);
|
||||
if (!m.allocated()) return;
|
||||
if (m.tag() == kDirectlyLeaked || m.tag() == kIndirectlyLeaked) {
|
||||
uptr resolution = flags()->resolution;
|
||||
u32 resolution = flags()->resolution;
|
||||
u32 stack_trace_id = 0;
|
||||
if (resolution > 0) {
|
||||
StackTrace stack = StackDepotGet(m.stack_trace_id());
|
||||
|
|
|
@ -273,6 +273,7 @@ u32 ChainOrigin(u32 id, StackTrace *stack) {
|
|||
return id;
|
||||
|
||||
Origin o = Origin::FromRawId(id);
|
||||
stack->tag = StackTrace::TAG_UNKNOWN;
|
||||
Origin chained = Origin::CreateChainedOrigin(o, stack);
|
||||
return chained.raw_id();
|
||||
}
|
||||
|
|
|
@ -167,6 +167,8 @@ void UnpoisonThreadLocalState();
|
|||
// the previous origin id.
|
||||
u32 ChainOrigin(u32 id, StackTrace *stack);
|
||||
|
||||
const int STACK_TRACE_TAG_POISON = StackTrace::TAG_CUSTOM + 1;
|
||||
|
||||
#define GET_MALLOC_STACK_TRACE \
|
||||
BufferedStackTrace stack; \
|
||||
if (__msan_get_track_origins() && msan_inited) \
|
||||
|
|
|
@ -113,6 +113,7 @@ static void *MsanAllocate(StackTrace *stack, uptr size, uptr alignment,
|
|||
} else if (flags()->poison_in_malloc) {
|
||||
__msan_poison(allocated, size);
|
||||
if (__msan_get_track_origins()) {
|
||||
stack->tag = StackTrace::TAG_ALLOC;
|
||||
Origin o = Origin::CreateHeapOrigin(stack);
|
||||
__msan_set_origin(allocated, size, o.raw_id());
|
||||
}
|
||||
|
@ -133,6 +134,7 @@ void MsanDeallocate(StackTrace *stack, void *p) {
|
|||
if (flags()->poison_in_free) {
|
||||
__msan_poison(p, size);
|
||||
if (__msan_get_track_origins()) {
|
||||
stack->tag = StackTrace::TAG_DEALLOC;
|
||||
Origin o = Origin::CreateHeapOrigin(stack);
|
||||
__msan_set_origin(p, size, o.raw_id());
|
||||
}
|
||||
|
@ -174,6 +176,7 @@ void *MsanReallocate(StackTrace *stack, void *old_p, uptr new_size,
|
|||
__msan_clear_and_unpoison((char *)old_p + old_size,
|
||||
new_size - old_size);
|
||||
} else if (flags()->poison_in_malloc) {
|
||||
stack->tag = StackTrace::TAG_ALLOC;
|
||||
PoisonMemory((char *)old_p + old_size, new_size - old_size, stack);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -986,11 +986,9 @@ INTERCEPTOR(void *, malloc, SIZE_T size) {
|
|||
|
||||
void __msan_allocated_memory(const void *data, uptr size) {
|
||||
GET_MALLOC_STACK_TRACE;
|
||||
if (flags()->poison_in_malloc)
|
||||
__msan_poison(data, size);
|
||||
if (__msan_get_track_origins()) {
|
||||
Origin o = Origin::CreateHeapOrigin(&stack);
|
||||
__msan_set_origin(data, size, o.raw_id());
|
||||
if (flags()->poison_in_malloc) {
|
||||
stack.tag = STACK_TRACE_TAG_POISON;
|
||||
PoisonMemory(data, size, &stack);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -75,8 +75,23 @@ static void DescribeOrigin(u32 id) {
|
|||
DescribeStackOrigin(so, pc);
|
||||
} else {
|
||||
StackTrace stack = o.getStackTraceForHeapOrigin();
|
||||
switch (stack.tag) {
|
||||
case StackTrace::TAG_ALLOC:
|
||||
Printf(" %sUninitialized value was created by a heap allocation%s\n",
|
||||
d.Origin(), d.End());
|
||||
break;
|
||||
case StackTrace::TAG_DEALLOC:
|
||||
Printf(" %sUninitialized value was created by a heap deallocation%s\n",
|
||||
d.Origin(), d.End());
|
||||
break;
|
||||
case STACK_TRACE_TAG_POISON:
|
||||
Printf(" %sMemory was marked as uninitialized%s\n", d.Origin(),
|
||||
d.End());
|
||||
break;
|
||||
default:
|
||||
Printf(" %sUninitialized value was created%s\n", d.Origin(), d.End());
|
||||
break;
|
||||
}
|
||||
stack.Print();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,8 @@ struct StackDepotNode {
|
|||
StackDepotNode *link;
|
||||
u32 id;
|
||||
atomic_uint32_t hash_and_use_count; // hash_bits : 12; use_count : 20;
|
||||
uptr size;
|
||||
u32 size;
|
||||
u32 tag;
|
||||
uptr stack[1]; // [size]
|
||||
|
||||
static const u32 kTabSizeLog = 20;
|
||||
|
@ -37,7 +38,8 @@ struct StackDepotNode {
|
|||
bool eq(u32 hash, const args_type &args) const {
|
||||
u32 hash_bits =
|
||||
atomic_load(&hash_and_use_count, memory_order_relaxed) & kHashMask;
|
||||
if ((hash & kHashMask) != hash_bits || args.size != size) return false;
|
||||
if ((hash & kHashMask) != hash_bits || args.size != size || args.tag != tag)
|
||||
return false;
|
||||
uptr i = 0;
|
||||
for (; i < size; i++) {
|
||||
if (stack[i] != args.trace[i]) return false;
|
||||
|
@ -72,10 +74,11 @@ struct StackDepotNode {
|
|||
void store(const args_type &args, u32 hash) {
|
||||
atomic_store(&hash_and_use_count, hash & kHashMask, memory_order_relaxed);
|
||||
size = args.size;
|
||||
tag = args.tag;
|
||||
internal_memcpy(stack, args.trace, size * sizeof(uptr));
|
||||
}
|
||||
args_type load() const {
|
||||
return args_type(&stack[0], size);
|
||||
return args_type(&stack[0], size, tag);
|
||||
}
|
||||
StackDepotHandle get_handle() { return StackDepotHandle(this); }
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ static inline uhwptr *GetCanonicFrame(uptr bp,
|
|||
}
|
||||
|
||||
void BufferedStackTrace::FastUnwindStack(uptr pc, uptr bp, uptr stack_top,
|
||||
uptr stack_bottom, uptr max_depth) {
|
||||
uptr stack_bottom, u32 max_depth) {
|
||||
CHECK_GE(max_depth, 2);
|
||||
trace_buffer[0] = pc;
|
||||
size = 1;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
namespace __sanitizer {
|
||||
|
||||
static const uptr kStackTraceMax = 256;
|
||||
static const u32 kStackTraceMax = 256;
|
||||
|
||||
#if SANITIZER_LINUX && (defined(__aarch64__) || defined(__powerpc__) || \
|
||||
defined(__powerpc64__) || defined(__sparc__) || \
|
||||
|
@ -40,10 +40,18 @@ static const uptr kStackTraceMax = 256;
|
|||
|
||||
struct StackTrace {
|
||||
const uptr *trace;
|
||||
uptr size;
|
||||
u32 size;
|
||||
u32 tag;
|
||||
|
||||
StackTrace() : trace(nullptr), size(0) {}
|
||||
StackTrace(const uptr *trace, uptr size) : trace(trace), size(size) {}
|
||||
static const int TAG_UNKNOWN = 0;
|
||||
static const int TAG_ALLOC = 1;
|
||||
static const int TAG_DEALLOC = 2;
|
||||
static const int TAG_CUSTOM = 100; // Tool specific tags start here.
|
||||
|
||||
StackTrace() : trace(nullptr), size(0), tag(0) {}
|
||||
StackTrace(const uptr *trace, u32 size) : trace(trace), size(size), tag(0) {}
|
||||
StackTrace(const uptr *trace, u32 size, u32 tag)
|
||||
: trace(trace), size(size), tag(tag) {}
|
||||
|
||||
// Prints a symbolized stacktrace, followed by an empty line.
|
||||
void Print() const;
|
||||
|
@ -88,15 +96,15 @@ struct BufferedStackTrace : public StackTrace {
|
|||
BufferedStackTrace() : StackTrace(trace_buffer, 0), top_frame_bp(0) {}
|
||||
|
||||
void Init(const uptr *pcs, uptr cnt, uptr extra_top_pc = 0);
|
||||
void Unwind(uptr max_depth, uptr pc, uptr bp, void *context, uptr stack_top,
|
||||
void Unwind(u32 max_depth, uptr pc, uptr bp, void *context, uptr stack_top,
|
||||
uptr stack_bottom, bool request_fast_unwind);
|
||||
|
||||
private:
|
||||
void FastUnwindStack(uptr pc, uptr bp, uptr stack_top, uptr stack_bottom,
|
||||
uptr max_depth);
|
||||
void SlowUnwindStack(uptr pc, uptr max_depth);
|
||||
u32 max_depth);
|
||||
void SlowUnwindStack(uptr pc, u32 max_depth);
|
||||
void SlowUnwindStackWithContext(uptr pc, void *context,
|
||||
uptr max_depth);
|
||||
u32 max_depth);
|
||||
void PopStackFrames(uptr count);
|
||||
uptr LocatePcInTrace(uptr pc);
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ void StackTrace::Print() const {
|
|||
Printf("\n");
|
||||
}
|
||||
|
||||
void BufferedStackTrace::Unwind(uptr max_depth, uptr pc, uptr bp, void *context,
|
||||
void BufferedStackTrace::Unwind(u32 max_depth, uptr pc, uptr bp, void *context,
|
||||
uptr stack_top, uptr stack_bottom,
|
||||
bool request_fast_unwind) {
|
||||
top_frame_bp = (max_depth > 0) ? bp : 0;
|
||||
|
|
|
@ -96,7 +96,7 @@ uptr Unwind_GetIP(struct _Unwind_Context *ctx) {
|
|||
|
||||
struct UnwindTraceArg {
|
||||
BufferedStackTrace *stack;
|
||||
uptr max_depth;
|
||||
u32 max_depth;
|
||||
};
|
||||
|
||||
_Unwind_Reason_Code Unwind_Trace(struct _Unwind_Context *ctx, void *param) {
|
||||
|
@ -108,7 +108,7 @@ _Unwind_Reason_Code Unwind_Trace(struct _Unwind_Context *ctx, void *param) {
|
|||
return UNWIND_CONTINUE;
|
||||
}
|
||||
|
||||
void BufferedStackTrace::SlowUnwindStack(uptr pc, uptr max_depth) {
|
||||
void BufferedStackTrace::SlowUnwindStack(uptr pc, u32 max_depth) {
|
||||
CHECK_GE(max_depth, 2);
|
||||
size = 0;
|
||||
UnwindTraceArg arg = {this, Min(max_depth + 1, kStackTraceMax)};
|
||||
|
@ -128,7 +128,7 @@ void BufferedStackTrace::SlowUnwindStack(uptr pc, uptr max_depth) {
|
|||
}
|
||||
|
||||
void BufferedStackTrace::SlowUnwindStackWithContext(uptr pc, void *context,
|
||||
uptr max_depth) {
|
||||
u32 max_depth) {
|
||||
CHECK_GE(max_depth, 2);
|
||||
if (!unwind_backtrace_signal_arch) {
|
||||
SlowUnwindStack(pc, max_depth);
|
||||
|
|
|
@ -99,7 +99,7 @@ int main(void) {
|
|||
// CHECK-ORIGINS: #1 {{.*}} in main{{.*}}msan_print_shadow.cc:14
|
||||
|
||||
// CHECK-ORIGINS: Origin B (origin_id {{.*}}):
|
||||
// CHECK-ORIGINS: Uninitialized value was created by a heap allocation
|
||||
// CHECK-ORIGINS: Memory was marked as uninitialized
|
||||
// CHECK-ORIGINS: #0 {{.*}} in __msan_allocated_memory
|
||||
// CHECK-ORIGINS: #1 {{.*}} in main{{.*}}msan_print_shadow.cc:18
|
||||
|
||||
|
@ -110,13 +110,13 @@ int main(void) {
|
|||
// CHECK-ORIGINS: #0 {{.*}} in main{{.*}}msan_print_shadow.cc:12
|
||||
|
||||
// CHECK-ORIGINS: Origin D (origin_id {{.*}}):
|
||||
// CHECK-ORIGINS: Uninitialized value was created by a heap allocation
|
||||
// CHECK-ORIGINS: Memory was marked as uninitialized
|
||||
// CHECK-ORIGINS: #0 {{.*}} in __msan_allocated_memory
|
||||
// CHECK-ORIGINS: #1 {{.*}} in main{{.*}}msan_print_shadow.cc:20
|
||||
|
||||
// ...
|
||||
|
||||
// CHECK-ORIGINS: Origin Z (origin_id {{.*}}):
|
||||
// CHECK-ORIGINS: Uninitialized value was created by a heap allocation
|
||||
// CHECK-ORIGINS: Memory was marked as uninitialized
|
||||
// CHECK-ORIGINS: #0 {{.*}} in __msan_allocated_memory
|
||||
// CHECK-ORIGINS: #1 {{.*}} in main{{.*}}msan_print_shadow.cc:42
|
||||
|
|
|
@ -17,7 +17,7 @@ int main() {
|
|||
__msan_allocated_memory(&y, sizeof(y));
|
||||
float z = b ? x : y;
|
||||
if (z > 0) printf(".\n");
|
||||
// CHECK: Uninitialized value was created by a heap allocation
|
||||
// CHECK: Memory was marked as uninitialized
|
||||
// CHECK: {{#0 0x.* in .*__msan_allocated_memory}}
|
||||
// CHECK: {{#1 0x.* in main .*select_float_origin.cc:}}[[@LINE-6]]
|
||||
return 0;
|
||||
|
|
|
@ -27,7 +27,7 @@ int main(int argc, char **argv) {
|
|||
// CHECK: WARNING: MemorySanitizer: use-of-uninitialized-value
|
||||
// CHECK: {{#0 0x.* in main .*use-after-free.cc:}}[[@LINE-3]]
|
||||
|
||||
// CHECK-ORIGINS: Uninitialized value was created by a heap allocation
|
||||
// CHECK-ORIGINS: Uninitialized value was created by a heap deallocation
|
||||
// CHECK-ORIGINS: {{#0 0x.* in .*free}}
|
||||
// CHECK-ORIGINS: {{#1 0x.* in main .*use-after-free.cc:}}[[@LINE-9]]
|
||||
return 0;
|
||||
|
|
Loading…
Reference in New Issue