forked from OSchip/llvm-project
scudo: Interleave odd and even tags for adjacent blocks.
This guarantees that we will detect a buffer overflow or underflow that overwrites an adjacent block. This spatial guarantee is similar to the temporal guarantee that we provide for immediate use-after-free. Enabling odd/even tags involves a tradeoff between use-after-free detection and buffer overflow detection. Odd/even tags make it more likely for buffer overflows to be detected by increasing the size of the guaranteed "red zone" around the allocation, but on the other hand use-after-free is less likely to be detected because the tag space for any particular chunk is cut in half. Therefore we introduce a tuning setting to control whether odd/even tags are enabled. Differential Revision: https://reviews.llvm.org/D84361
This commit is contained in:
parent
3ac828b8f7
commit
b83417aa7e
|
@ -41,7 +41,7 @@ extern "C" size_t android_unsafe_frame_pointer_chase(scudo::uptr *buf,
|
|||
|
||||
namespace scudo {
|
||||
|
||||
enum class Option { ReleaseInterval };
|
||||
enum class Option { ReleaseInterval, MemtagTuning };
|
||||
|
||||
template <class Params, void (*PostInitCallback)(void) = EmptyCallback>
|
||||
class Allocator {
|
||||
|
@ -154,6 +154,7 @@ public:
|
|||
Options.DeallocTypeMismatch = getFlags()->dealloc_type_mismatch;
|
||||
Options.DeleteSizeMismatch = getFlags()->delete_size_mismatch;
|
||||
Options.TrackAllocationStacks = false;
|
||||
Options.UseOddEvenTags = true;
|
||||
Options.QuarantineMaxChunkSize =
|
||||
static_cast<u32>(getFlags()->quarantine_max_chunk_size);
|
||||
|
||||
|
@ -251,6 +252,19 @@ public:
|
|||
#endif
|
||||
}
|
||||
|
||||
uptr computeOddEvenMaskForPointerMaybe(uptr Ptr, uptr Size) {
|
||||
if (!Options.UseOddEvenTags)
|
||||
return 0;
|
||||
|
||||
// If a chunk's tag is odd, we want the tags of the surrounding blocks to be
|
||||
// even, and vice versa. Blocks are laid out Size bytes apart, and adding
|
||||
// Size to Ptr will flip the least significant set bit of Size in Ptr, so
|
||||
// that bit will have the pattern 010101... for consecutive blocks, which we
|
||||
// can use to determine which tag mask to use.
|
||||
return (Ptr & (1ULL << getLeastSignificantSetBitIndex(Size))) ? 0xaaaa
|
||||
: 0x5555;
|
||||
}
|
||||
|
||||
NOINLINE void *allocate(uptr Size, Chunk::Origin Origin,
|
||||
uptr Alignment = MinAlignment,
|
||||
bool ZeroContents = false) {
|
||||
|
@ -350,7 +364,8 @@ public:
|
|||
if (UNLIKELY(useMemoryTagging())) {
|
||||
uptr PrevUserPtr;
|
||||
Chunk::UnpackedHeader Header;
|
||||
const uptr BlockEnd = BlockUptr + PrimaryT::getSizeByClassId(ClassId);
|
||||
const uptr BlockSize = PrimaryT::getSizeByClassId(ClassId);
|
||||
const uptr BlockEnd = BlockUptr + BlockSize;
|
||||
// If possible, try to reuse the UAF tag that was set by deallocate().
|
||||
// For simplicity, only reuse tags if we have the same start address as
|
||||
// the previous allocation. This handles the majority of cases since
|
||||
|
@ -400,7 +415,9 @@ public:
|
|||
memset(TaggedPtr, 0, archMemoryTagGranuleSize());
|
||||
}
|
||||
} else {
|
||||
TaggedPtr = prepareTaggedChunk(Ptr, Size, BlockEnd);
|
||||
const uptr OddEvenMask =
|
||||
computeOddEvenMaskForPointerMaybe(BlockUptr, BlockSize);
|
||||
TaggedPtr = prepareTaggedChunk(Ptr, Size, OddEvenMask, BlockEnd);
|
||||
}
|
||||
storeAllocationStackMaybe(Ptr);
|
||||
} else if (UNLIKELY(FillContents != NoFill)) {
|
||||
|
@ -680,6 +697,23 @@ public:
|
|||
Secondary.setReleaseToOsIntervalMs(static_cast<s32>(Value));
|
||||
return true;
|
||||
}
|
||||
if (O == Option::MemtagTuning) {
|
||||
// Enabling odd/even tags involves a tradeoff between use-after-free
|
||||
// detection and buffer overflow detection. Odd/even tags make it more
|
||||
// likely for buffer overflows to be detected by increasing the size of
|
||||
// the guaranteed "red zone" around the allocation, but on the other hand
|
||||
// use-after-free is less likely to be detected because the tag space for
|
||||
// any particular chunk is cut in half. Therefore we use this tuning
|
||||
// setting to control whether odd/even tags are enabled.
|
||||
if (Value == M_MEMTAG_TUNING_BUFFER_OVERFLOW) {
|
||||
Options.UseOddEvenTags = true;
|
||||
return true;
|
||||
}
|
||||
if (Value == M_MEMTAG_TUNING_UAF) {
|
||||
Options.UseOddEvenTags = false;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -921,6 +955,7 @@ private:
|
|||
u8 DeallocTypeMismatch : 1; // dealloc_type_mismatch
|
||||
u8 DeleteSizeMismatch : 1; // delete_size_mismatch
|
||||
u8 TrackAllocationStacks : 1;
|
||||
u8 UseOddEvenTags : 1;
|
||||
u32 QuarantineMaxChunkSize; // quarantine_max_chunk_size
|
||||
} Options;
|
||||
|
||||
|
@ -987,9 +1022,13 @@ private:
|
|||
if (UNLIKELY(NewHeader.ClassId && useMemoryTagging())) {
|
||||
u8 PrevTag = extractTag(loadTag(reinterpret_cast<uptr>(Ptr)));
|
||||
uptr TaggedBegin, TaggedEnd;
|
||||
const uptr OddEvenMask = computeOddEvenMaskForPointerMaybe(
|
||||
reinterpret_cast<uptr>(getBlockBegin(Ptr, &NewHeader)),
|
||||
SizeClassMap::getSizeByClassId(NewHeader.ClassId));
|
||||
// Exclude the previous tag so that immediate use after free is detected
|
||||
// 100% of the time.
|
||||
setRandomTag(Ptr, Size, 1UL << PrevTag, &TaggedBegin, &TaggedEnd);
|
||||
setRandomTag(Ptr, Size, OddEvenMask | (1UL << PrevTag), &TaggedBegin,
|
||||
&TaggedEnd);
|
||||
storeDeallocationStackMaybe(Ptr, PrevTag);
|
||||
}
|
||||
// If the quarantine is disabled, the actual size of a chunk is 0 or larger
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#define SCUDO_INTERFACE_H_
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
|
||||
extern "C" {
|
||||
|
||||
|
@ -105,6 +106,29 @@ size_t __scudo_get_stack_depot_size();
|
|||
const char *__scudo_get_region_info_addr();
|
||||
size_t __scudo_get_region_info_size();
|
||||
|
||||
#ifndef M_DECAY_TIME
|
||||
#define M_DECAY_TIME -100
|
||||
#endif
|
||||
|
||||
#ifndef M_PURGE
|
||||
#define M_PURGE -101
|
||||
#endif
|
||||
|
||||
// Tune the allocator's choice of memory tags to make it more likely that
|
||||
// a certain class of memory errors will be detected. The value argument should
|
||||
// be one of the enumerators of the scudo_memtag_tuning enum below.
|
||||
#ifndef M_MEMTAG_TUNING
|
||||
#define M_MEMTAG_TUNING -102
|
||||
#endif
|
||||
|
||||
enum scudo_memtag_tuning {
|
||||
// Tune for buffer overflows.
|
||||
M_MEMTAG_TUNING_BUFFER_OVERFLOW,
|
||||
|
||||
// Tune for use-after-free.
|
||||
M_MEMTAG_TUNING_UAF,
|
||||
};
|
||||
|
||||
} // extern "C"
|
||||
|
||||
#endif // SCUDO_INTERFACE_H_
|
||||
|
|
|
@ -126,7 +126,8 @@ inline void setRandomTag(void *Ptr, uptr Size, uptr ExcludeMask,
|
|||
: "memory");
|
||||
}
|
||||
|
||||
inline void *prepareTaggedChunk(void *Ptr, uptr Size, uptr BlockEnd) {
|
||||
inline void *prepareTaggedChunk(void *Ptr, uptr Size, uptr ExcludeMask,
|
||||
uptr BlockEnd) {
|
||||
// Prepare the granule before the chunk to store the chunk header by setting
|
||||
// its tag to 0. Normally its tag will already be 0, but in the case where a
|
||||
// chunk holding a low alignment allocation is reused for a higher alignment
|
||||
|
@ -138,7 +139,7 @@ inline void *prepareTaggedChunk(void *Ptr, uptr Size, uptr BlockEnd) {
|
|||
: "memory");
|
||||
|
||||
uptr TaggedBegin, TaggedEnd;
|
||||
setRandomTag(Ptr, Size, 0, &TaggedBegin, &TaggedEnd);
|
||||
setRandomTag(Ptr, Size, ExcludeMask, &TaggedBegin, &TaggedEnd);
|
||||
|
||||
// Finally, set the tag of the granule past the end of the allocation to 0,
|
||||
// to catch linear overflows even if a previous larger allocation used the
|
||||
|
@ -235,9 +236,11 @@ inline void setRandomTag(void *Ptr, uptr Size, uptr ExcludeMask,
|
|||
UNREACHABLE("memory tagging not supported");
|
||||
}
|
||||
|
||||
inline void *prepareTaggedChunk(void *Ptr, uptr Size, uptr BlockEnd) {
|
||||
inline void *prepareTaggedChunk(void *Ptr, uptr Size, uptr ExcludeMask,
|
||||
uptr BlockEnd) {
|
||||
(void)Ptr;
|
||||
(void)Size;
|
||||
(void)ExcludeMask;
|
||||
(void)BlockEnd;
|
||||
UNREACHABLE("memory tagging not supported");
|
||||
}
|
||||
|
|
|
@ -469,3 +469,42 @@ TEST(ScudoCombinedTest, FullRegion) {
|
|||
}
|
||||
EXPECT_EQ(FailedAllocationsCount, 0U);
|
||||
}
|
||||
|
||||
TEST(ScudoCombinedTest, OddEven) {
|
||||
using AllocatorT = TestAllocator<scudo::AndroidConfig>;
|
||||
using SizeClassMap = AllocatorT::PrimaryT::SizeClassMap;
|
||||
auto Allocator = std::make_unique<AllocatorT>();
|
||||
|
||||
if (!Allocator->useMemoryTagging())
|
||||
return;
|
||||
|
||||
auto CheckOddEven = [](scudo::uptr P1, scudo::uptr P2) {
|
||||
scudo::uptr Tag1 = scudo::extractTag(scudo::loadTag(P1));
|
||||
scudo::uptr Tag2 = scudo::extractTag(scudo::loadTag(P2));
|
||||
EXPECT_NE(Tag1 % 2, Tag2 % 2);
|
||||
};
|
||||
|
||||
for (scudo::uptr ClassId = 1U; ClassId <= SizeClassMap::LargestClassId;
|
||||
ClassId++) {
|
||||
const scudo::uptr Size = SizeClassMap::getSizeByClassId(ClassId);
|
||||
|
||||
std::set<scudo::uptr> Ptrs;
|
||||
bool Found = false;
|
||||
for (unsigned I = 0; I != 65536; ++I) {
|
||||
scudo::uptr P = scudo::untagPointer(reinterpret_cast<scudo::uptr>(
|
||||
Allocator->allocate(Size - scudo::Chunk::getHeaderSize(), Origin)));
|
||||
if (Ptrs.count(P - Size)) {
|
||||
Found = true;
|
||||
CheckOddEven(P, P - Size);
|
||||
break;
|
||||
}
|
||||
if (Ptrs.count(P + Size)) {
|
||||
Found = true;
|
||||
CheckOddEven(P, P + Size);
|
||||
break;
|
||||
}
|
||||
Ptrs.insert(P);
|
||||
}
|
||||
EXPECT_TRUE(Found);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "scudo/interface.h"
|
||||
#include "tests/scudo_unit_test.h"
|
||||
|
||||
#include <errno.h>
|
||||
|
@ -188,14 +189,6 @@ TEST(ScudoWrappersCTest, Realloc) {
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef M_DECAY_TIME
|
||||
#define M_DECAY_TIME -100
|
||||
#endif
|
||||
|
||||
#ifndef M_PURGE
|
||||
#define M_PURGE -101
|
||||
#endif
|
||||
|
||||
#if !SCUDO_FUCHSIA
|
||||
TEST(ScudoWrappersCTest, MallOpt) {
|
||||
errno = 0;
|
||||
|
|
|
@ -41,12 +41,4 @@ struct __scudo_mallinfo {
|
|||
#define SCUDO_MALLINFO __scudo_mallinfo
|
||||
#endif
|
||||
|
||||
#ifndef M_DECAY_TIME
|
||||
#define M_DECAY_TIME -100
|
||||
#endif
|
||||
|
||||
#ifndef M_PURGE
|
||||
#define M_PURGE -101
|
||||
#endif
|
||||
|
||||
#endif // SCUDO_WRAPPERS_C_H_
|
||||
|
|
|
@ -173,6 +173,9 @@ INTERFACE WEAK int SCUDO_PREFIX(mallopt)(int param, UNUSED int value) {
|
|||
} else if (param == M_PURGE) {
|
||||
SCUDO_ALLOCATOR.releaseToOS();
|
||||
return 1;
|
||||
} else if (param == M_MEMTAG_TUNING) {
|
||||
return SCUDO_ALLOCATOR.setOption(scudo::Option::MemtagTuning,
|
||||
static_cast<scudo::sptr>(value));
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue