mirror of
https://github.com/capstone-engine/llvm-capstone.git
synced 2024-11-26 15:11:00 +00:00
[lsan] Implement __lsan_ignore_object().
Leak annotation similar to HeapChecker's IgnoreObject(). llvm-svn: 183412
This commit is contained in:
parent
e674320ade
commit
ecc4f5ba8e
@ -23,7 +23,8 @@ extern "C" {
|
||||
// be treated as non-leaks. Disable/enable pairs can be nested.
|
||||
void __lsan_disable();
|
||||
void __lsan_enable();
|
||||
|
||||
// The heap object into which p points will be treated as a non-leak.
|
||||
void __lsan_ignore_object(const void *p);
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
|
||||
|
@ -792,6 +792,20 @@ template void ForEachChunk<MarkIndirectlyLeakedCb>(
|
||||
template void ForEachChunk<CollectSuppressedCb>(
|
||||
CollectSuppressedCb const &callback);
|
||||
#endif // CAN_SANITIZE_LEAKS
|
||||
|
||||
IgnoreObjectResult IgnoreObjectLocked(const void *p) {
|
||||
uptr addr = reinterpret_cast<uptr>(p);
|
||||
__asan::AsanChunk *m = __asan::GetAsanChunkByAddr(addr);
|
||||
if (!m) return kIgnoreObjectInvalid;
|
||||
if ((m->chunk_state == __asan::CHUNK_ALLOCATED) && m->AddrIsInside(addr)) {
|
||||
if (m->lsan_tag == kSuppressed)
|
||||
return kIgnoreObjectAlreadyIgnored;
|
||||
m->lsan_tag = __lsan::kSuppressed;
|
||||
return kIgnoreObjectSuccess;
|
||||
} else {
|
||||
return kIgnoreObjectInvalid;
|
||||
}
|
||||
}
|
||||
} // namespace __lsan
|
||||
|
||||
extern "C" {
|
||||
|
30
compiler-rt/lib/lsan/lit_tests/TestCases/ignore_object.cc
Normal file
30
compiler-rt/lib/lsan/lit_tests/TestCases/ignore_object.cc
Normal file
@ -0,0 +1,30 @@
|
||||
// Test for __lsan_ignore_object().
|
||||
// RUN: LSAN_BASE="report_blocks=1:use_registers=0:use_stacks=0:use_globals=0:use_tls=0:verbosity=2"
|
||||
// RUN: %clangxx_lsan -I %p/../../../../include %s -o %t
|
||||
// RUN: LSAN_OPTIONS=$LSAN_BASE %t 2>&1 | FileCheck %s
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "sanitizer/lsan_interface.h"
|
||||
|
||||
int main() {
|
||||
{
|
||||
// The first malloc call can cause an allocation in libdl. Ignore it here so
|
||||
// it doesn't show up in our output.
|
||||
__lsan::ScopedDisabler d;
|
||||
malloc(1);
|
||||
}
|
||||
// Explicitly ignored block.
|
||||
void **p = new void *;
|
||||
// Transitively ignored block.
|
||||
*p = malloc(666);
|
||||
// Non-ignored block.
|
||||
volatile void *q = malloc(1337);
|
||||
fprintf(stderr, "Test alloc: %p.\n", p);
|
||||
__lsan_ignore_object(p);
|
||||
return 0;
|
||||
}
|
||||
// CHECK: Test alloc: [[ADDR:.*]].
|
||||
// CHECK: ignoring heap object at [[ADDR]]
|
||||
// CHECK: SUMMARY: LeakSanitizer: 1337 byte(s) leaked in 1 allocation(s)
|
@ -0,0 +1,22 @@
|
||||
// Test for incorrect use of __lsan_ignore_object().
|
||||
// RUN: LSAN_BASE="verbosity=1"
|
||||
// RUN: %clangxx_lsan -I %p/../../../../include %s -o %t
|
||||
// RUN: LSAN_OPTIONS=$LSAN_BASE %t 2>&1 | FileCheck %s
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "sanitizer/lsan_interface.h"
|
||||
|
||||
int main() {
|
||||
void *p = malloc(1337);
|
||||
fprintf(stderr, "Test alloc: %p.\n", p);
|
||||
__lsan_ignore_object(p);
|
||||
__lsan_ignore_object(p);
|
||||
free(p);
|
||||
__lsan_ignore_object(p);
|
||||
return 0;
|
||||
}
|
||||
// CHECK: Test alloc: [[ADDR:.*]].
|
||||
// CHECK: heap object at [[ADDR]] is already being ignored
|
||||
// CHECK: no heap object found at [[ADDR]]
|
@ -190,6 +190,21 @@ template void ForEachChunk<MarkIndirectlyLeakedCb>(
|
||||
MarkIndirectlyLeakedCb const &callback);
|
||||
template void ForEachChunk<CollectSuppressedCb>(
|
||||
CollectSuppressedCb const &callback);
|
||||
|
||||
IgnoreObjectResult IgnoreObjectLocked(const void *p) {
|
||||
void *chunk = allocator.GetBlockBegin(p);
|
||||
if (!chunk || p < chunk) return kIgnoreObjectInvalid;
|
||||
ChunkMetadata *m = Metadata(chunk);
|
||||
CHECK(m);
|
||||
if (m->allocated && (uptr)p < (uptr)chunk + m->requested_size) {
|
||||
if (m->tag == kSuppressed)
|
||||
return kIgnoreObjectAlreadyIgnored;
|
||||
m->tag = kSuppressed;
|
||||
return kIgnoreObjectSuccess;
|
||||
} else {
|
||||
return kIgnoreObjectInvalid;
|
||||
}
|
||||
}
|
||||
} // namespace __lsan
|
||||
|
||||
extern "C" {
|
||||
|
@ -23,6 +23,9 @@
|
||||
#if CAN_SANITIZE_LEAKS
|
||||
namespace __lsan {
|
||||
|
||||
// This mutex is used to prevent races between DoLeakCheck and SuppressObject.
|
||||
BlockingMutex global_mutex(LINKER_INITIALIZED);
|
||||
|
||||
Flags lsan_flags;
|
||||
|
||||
static void InitializeFlags() {
|
||||
@ -37,6 +40,7 @@ static void InitializeFlags() {
|
||||
f->use_stacks = true;
|
||||
f->use_tls = true;
|
||||
f->use_unaligned = false;
|
||||
f->verbosity = 0;
|
||||
f->log_pointers = false;
|
||||
f->log_threads = false;
|
||||
|
||||
@ -52,6 +56,7 @@ static void InitializeFlags() {
|
||||
CHECK_GE(&f->resolution, 0);
|
||||
ParseFlag(options, &f->max_leaks, "max_leaks");
|
||||
CHECK_GE(&f->max_leaks, 0);
|
||||
ParseFlag(options, &f->verbosity, "verbosity");
|
||||
ParseFlag(options, &f->log_pointers, "log_pointers");
|
||||
ParseFlag(options, &f->log_threads, "log_threads");
|
||||
ParseFlag(options, &f->exitcode, "exitcode");
|
||||
@ -311,12 +316,13 @@ static void DoLeakCheckCallback(const SuspendedThreadsList &suspended_threads,
|
||||
}
|
||||
|
||||
void DoLeakCheck() {
|
||||
BlockingMutexLock l(&global_mutex);
|
||||
static bool already_done;
|
||||
CHECK(!already_done);
|
||||
already_done = true;
|
||||
LeakCheckResult result = kFatalError;
|
||||
LockThreadRegistry();
|
||||
LockAllocator();
|
||||
CHECK(!already_done);
|
||||
already_done = true;
|
||||
StopTheWorld(DoLeakCheckCallback, &result);
|
||||
UnlockAllocator();
|
||||
UnlockThreadRegistry();
|
||||
@ -394,4 +400,22 @@ void LeakReport::PrintSummary() {
|
||||
}
|
||||
|
||||
} // namespace __lsan
|
||||
|
||||
using namespace __lsan; // NOLINT
|
||||
|
||||
extern "C" {
|
||||
void __lsan_ignore_object(const void *p) {
|
||||
// Cannot use PointsIntoChunk or LsanMetadata here, since the allocator is not
|
||||
// locked.
|
||||
BlockingMutexLock l(&global_mutex);
|
||||
IgnoreObjectResult res = IgnoreObjectLocked(p);
|
||||
if (res == kIgnoreObjectInvalid && flags()->verbosity >= 1)
|
||||
Report("__lsan_ignore_object(): no heap object found at %p", p);
|
||||
if (res == kIgnoreObjectAlreadyIgnored && flags()->verbosity >= 1)
|
||||
Report("__lsan_ignore_object(): "
|
||||
"heap object at %p is already being ignored\n", p);
|
||||
if (res == kIgnoreObjectSuccess && flags()->verbosity >= 2)
|
||||
Report("__lsan_ignore_object(): ignoring heap object at %p\n", p);
|
||||
}
|
||||
} // extern "C"
|
||||
#endif // CAN_SANITIZE_LEAKS
|
||||
|
@ -64,6 +64,9 @@ struct Flags {
|
||||
// Consider unaligned pointers valid.
|
||||
bool use_unaligned;
|
||||
|
||||
// User-visible verbosity.
|
||||
int verbosity;
|
||||
|
||||
// Debug logging.
|
||||
bool log_pointers;
|
||||
bool log_threads;
|
||||
@ -153,6 +156,12 @@ class CollectSuppressedCb {
|
||||
InternalVector<uptr> *frontier_;
|
||||
};
|
||||
|
||||
enum IgnoreObjectResult {
|
||||
kIgnoreObjectSuccess,
|
||||
kIgnoreObjectAlreadyIgnored,
|
||||
kIgnoreObjectInvalid
|
||||
};
|
||||
|
||||
// The following must be implemented in the parent tool.
|
||||
|
||||
template<typename Callable> void ForEachChunk(Callable const &callback);
|
||||
@ -172,6 +181,8 @@ bool GetThreadRangesLocked(uptr os_id, uptr *stack_begin, uptr *stack_end,
|
||||
void *PointsIntoChunk(void *p);
|
||||
// Return address of user-visible chunk contained in this allocator chunk.
|
||||
void *GetUserBegin(void *p);
|
||||
// Helper for __lsan_ignore_object().
|
||||
IgnoreObjectResult IgnoreObjectLocked(const void *p);
|
||||
// Wrapper for chunk metadata operations.
|
||||
class LsanMetadata {
|
||||
public:
|
||||
|
@ -344,15 +344,15 @@ class SizeClassAllocator64 {
|
||||
region->n_freed += b->count;
|
||||
}
|
||||
|
||||
static bool PointerIsMine(void *p) {
|
||||
static bool PointerIsMine(const void *p) {
|
||||
return reinterpret_cast<uptr>(p) / kSpaceSize == kSpaceBeg / kSpaceSize;
|
||||
}
|
||||
|
||||
static uptr GetSizeClass(void *p) {
|
||||
static uptr GetSizeClass(const void *p) {
|
||||
return (reinterpret_cast<uptr>(p) / kRegionSize) % kNumClassesRounded;
|
||||
}
|
||||
|
||||
void *GetBlockBegin(void *p) {
|
||||
void *GetBlockBegin(const void *p) {
|
||||
uptr class_id = GetSizeClass(p);
|
||||
uptr size = SizeClassMap::Size(class_id);
|
||||
if (!size) return 0;
|
||||
@ -671,15 +671,15 @@ class SizeClassAllocator32 {
|
||||
sci->free_list.push_front(b);
|
||||
}
|
||||
|
||||
bool PointerIsMine(void *p) {
|
||||
bool PointerIsMine(const void *p) {
|
||||
return GetSizeClass(p) != 0;
|
||||
}
|
||||
|
||||
uptr GetSizeClass(void *p) {
|
||||
uptr GetSizeClass(const void *p) {
|
||||
return possible_regions[ComputeRegionId(reinterpret_cast<uptr>(p))];
|
||||
}
|
||||
|
||||
void *GetBlockBegin(void *p) {
|
||||
void *GetBlockBegin(const void *p) {
|
||||
CHECK(PointerIsMine(p));
|
||||
uptr mem = reinterpret_cast<uptr>(p);
|
||||
uptr beg = ComputeRegionBeg(mem);
|
||||
@ -1006,7 +1006,7 @@ class LargeMmapAllocator {
|
||||
return res;
|
||||
}
|
||||
|
||||
bool PointerIsMine(void *p) {
|
||||
bool PointerIsMine(const void *p) {
|
||||
return GetBlockBegin(p) != 0;
|
||||
}
|
||||
|
||||
@ -1021,7 +1021,7 @@ class LargeMmapAllocator {
|
||||
return GetHeader(p) + 1;
|
||||
}
|
||||
|
||||
void *GetBlockBegin(void *ptr) {
|
||||
void *GetBlockBegin(const void *ptr) {
|
||||
uptr p = reinterpret_cast<uptr>(ptr);
|
||||
SpinMutexLock l(&mutex_);
|
||||
uptr nearest_chunk = 0;
|
||||
@ -1231,7 +1231,7 @@ class CombinedAllocator {
|
||||
return secondary_.GetMetaData(p);
|
||||
}
|
||||
|
||||
void *GetBlockBegin(void *p) {
|
||||
void *GetBlockBegin(const void *p) {
|
||||
if (primary_.PointerIsMine(p))
|
||||
return primary_.GetBlockBegin(p);
|
||||
return secondary_.GetBlockBegin(p);
|
||||
|
Loading…
Reference in New Issue
Block a user