[libFuzzer] apply changes lost during the migration to compiler-rt

llvm-svn: 311420
This commit is contained in:
Kostya Serebryany 2017-08-22 01:28:32 +00:00
parent e4c5322992
commit f65cf64fa2
6 changed files with 108 additions and 32 deletions

View File

@ -1,4 +1,5 @@
set(LIBFUZZER_SOURCES
FuzzerClangCounters.cpp
FuzzerCrossOver.cpp
FuzzerDriver.cpp
FuzzerExtFunctionsDlsym.cpp

View File

@ -0,0 +1,49 @@
//===- FuzzerExtraCounters.cpp - Extra coverage counters ------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
// Coverage counters from Clang's SourceBasedCodeCoverage.
//===----------------------------------------------------------------------===//
// Support for SourceBasedCodeCoverage is experimental:
// * Works only for the main binary, not DSOs yet.
// * Works only on Linux.
// * Does not implement print_pcs/print_coverage yet.
// * Is not fully evaluated for performance and sensitivity.
// We expect large performance drop due to 64-bit counters,
// and *maybe* better sensitivity due to more fine-grained counters.
// Preliminary comparison on a single benchmark (RE2) shows
// a bit worse sensitivity though.
#include "FuzzerDefs.h"
#if LIBFUZZER_LINUX
__attribute__((weak)) extern uint64_t __start___llvm_prf_cnts;
__attribute__((weak)) extern uint64_t __stop___llvm_prf_cnts;
namespace fuzzer {
uint64_t *ClangCountersBegin() { return &__start___llvm_prf_cnts; }
uint64_t *ClangCountersEnd() { return &__stop___llvm_prf_cnts; }
} // namespace fuzzer
#else
// TODO: Implement on Mac (if the data shows it's worth it).
//__attribute__((visibility("hidden")))
//extern uint64_t CountersStart __asm("section$start$__DATA$__llvm_prf_cnts");
//__attribute__((visibility("hidden")))
//extern uint64_t CountersEnd __asm("section$end$__DATA$__llvm_prf_cnts");
namespace fuzzer {
uint64_t *ClangCountersBegin() { return nullptr; }
uint64_t *ClangCountersEnd() { return nullptr; }
} // namespace fuzzer
#endif
namespace fuzzer {
ATTRIBUTE_NO_SANITIZE_ALL
void ClearClangCounters() { // hand-written memset, don't asan-ify.
for (auto P = ClangCountersBegin(); P < ClangCountersEnd(); P++)
*P = 0;
}
}

View File

@ -123,6 +123,10 @@ uint8_t *ExtraCountersBegin();
uint8_t *ExtraCountersEnd();
void ClearExtraCounters();
uint64_t *ClangCountersBegin();
uint64_t *ClangCountersEnd();
void ClearClangCounters();
} // namespace fuzzer
#endif // LLVM_FUZZER_DEFS_H

View File

@ -388,11 +388,12 @@ void Fuzzer::ShuffleAndMinimize(UnitVector *InitialCorpus) {
uint8_t dummy;
ExecuteCallback(&dummy, 0);
for (const auto &U : *InitialCorpus) {
for (auto &U : *InitialCorpus) {
RunOne(U.data(), U.size());
CheckExitOnSrcPosOrItem();
TryDetectingAMemoryLeak(U.data(), U.size(),
/*DuringInitialCorpusExecution*/ true);
U.clear();
}
PrintStats("INITED");
if (Corpus.empty()) {

View File

@ -31,6 +31,9 @@ uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs];
ATTRIBUTE_INTERFACE
uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs];
// Used by -fsanitize-coverage=stack-depth to track stack depth
ATTRIBUTE_INTERFACE thread_local uintptr_t __sancov_lowest_stack;
namespace fuzzer {
TracePC TPC;
@ -126,6 +129,8 @@ void TracePC::PrintModuleInfo() {
_Exit(1);
}
}
if (size_t NumClangCounters = ClangCountersEnd() - ClangCountersBegin())
Printf("INFO: %zd Clang Coverage Counters\n", NumClangCounters);
}
ATTRIBUTE_NO_SANITIZE_ALL
@ -137,13 +142,12 @@ void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
}
void TracePC::UpdateObservedPCs() {
auto Observe = [&](uintptr_t PC) {
bool Inserted = ObservedPCs.insert(PC).second;
if (Inserted && DoPrintNewPCs)
PrintPC("\tNEW_PC: %p %F %L\n", "\tNEW_PC: %p\n", PC + 1);
};
if (NumPCsInPCTables) {
auto Observe = [&](uintptr_t PC) {
bool Inserted = ObservedPCs.insert(PC).second;
if (Inserted && DoPrintNewPCs)
PrintPC("\tNEW_PC: %p %F %L\n", "\tNEW_PC: %p\n", PC + 1);
};
if (NumInline8bitCounters == NumPCsInPCTables) {
for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
uint8_t *Beg = ModuleCounters[i].Start;
@ -167,6 +171,13 @@ void TracePC::UpdateObservedPCs() {
}
}
}
if (size_t NumClangCounters =
ClangCountersEnd() - ClangCountersBegin()) {
auto P = ClangCountersBegin();
for (size_t Idx = 0; Idx < NumClangCounters; Idx++)
if (P[Idx])
Observe((uintptr_t)Idx);
}
}
inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
@ -332,6 +343,14 @@ void TracePC::ClearInlineCounters() {
}
}
void TracePC::RecordInitialStack() {
InitialStack = __sancov_lowest_stack;
}
uintptr_t TracePC::GetMaxStackOffset() const {
return InitialStack - __sancov_lowest_stack; // Stack grows down
}
} // namespace fuzzer
extern "C" {
@ -342,8 +361,6 @@ void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
uint32_t Idx = *Guard;
__sancov_trace_pc_pcs[Idx] = PC;
__sancov_trace_pc_guard_8bit_counters[Idx]++;
// Uncomment the following line to get stack-depth profiling.
// fuzzer::TPC.RecordCurrentStack();
}
// Best-effort support for -fsanitize-coverage=trace-pc, which is available

View File

@ -91,6 +91,7 @@ class TracePC {
memset(Counters(), 0, GetNumPCs());
ClearExtraCounters();
ClearInlineCounters();
ClearClangCounters();
}
void ClearInlineCounters();
@ -119,19 +120,8 @@ class TracePC {
return PCs()[Idx];
}
void RecordCurrentStack() {
uintptr_t Stack = GetCurrentStack();
if (Stack < LowestStack)
LowestStack = Stack;
}
void RecordInitialStack() {
InitialStack = GetCurrentStack();
LowestStack = InitialStack;
}
uintptr_t GetCurrentStack() const {
return reinterpret_cast<uintptr_t>(__builtin_frame_address(0));
}
uintptr_t GetMaxStackOffset() const { return InitialStack - LowestStack; }
void RecordInitialStack();
uintptr_t GetMaxStackOffset() const;
template<class CallBack>
void ForEachObservedPC(CallBack CB) {
@ -166,7 +156,7 @@ private:
std::set<uintptr_t> ObservedPCs;
ValueBitMap ValueProfileMap;
uintptr_t InitialStack, LowestStack; // Assume stack grows down.
uintptr_t InitialStack;
};
template <class Callback>
@ -196,14 +186,9 @@ void ForEachNonZeroByte(const uint8_t *Begin, const uint8_t *End,
Handle8bitCounter(FirstFeature, P - Begin, V);
}
template <class Callback> // bool Callback(size_t Feature)
ATTRIBUTE_NO_SANITIZE_ADDRESS
__attribute__((noinline))
void TracePC::CollectFeatures(Callback HandleFeature) const {
uint8_t *Counters = this->Counters();
size_t N = GetNumPCs();
auto Handle8bitCounter = [&](size_t FirstFeature,
size_t Idx, uint8_t Counter) {
// Given a non-zero Counters returns a number in [0,7].
template<class T>
unsigned CounterToFeature(T Counter) {
assert(Counter);
unsigned Bit = 0;
/**/ if (Counter >= 128) Bit = 7;
@ -213,7 +198,18 @@ void TracePC::CollectFeatures(Callback HandleFeature) const {
else if (Counter >= 4) Bit = 3;
else if (Counter >= 3) Bit = 2;
else if (Counter >= 2) Bit = 1;
HandleFeature(FirstFeature + Idx * 8 + Bit);
return Bit;
}
template <class Callback> // bool Callback(size_t Feature)
ATTRIBUTE_NO_SANITIZE_ADDRESS
__attribute__((noinline))
void TracePC::CollectFeatures(Callback HandleFeature) const {
uint8_t *Counters = this->Counters();
size_t N = GetNumPCs();
auto Handle8bitCounter = [&](size_t FirstFeature,
size_t Idx, uint8_t Counter) {
HandleFeature(FirstFeature + Idx * 8 + CounterToFeature(Counter));
};
size_t FirstFeature = 0;
@ -231,6 +227,14 @@ void TracePC::CollectFeatures(Callback HandleFeature) const {
}
}
if (size_t NumClangCounters = ClangCountersEnd() - ClangCountersBegin()) {
auto P = ClangCountersBegin();
for (size_t Idx = 0; Idx < NumClangCounters; Idx++)
if (auto Cnt = P[Idx])
HandleFeature(FirstFeature + Idx * 8 + CounterToFeature(Cnt));
FirstFeature += NumClangCounters;
}
ForEachNonZeroByte(ExtraCountersBegin(), ExtraCountersEnd(), FirstFeature,
Handle8bitCounter);
FirstFeature += (ExtraCountersEnd() - ExtraCountersBegin()) * 8;