mirror of
https://github.com/RPCSX/llvm.git
synced 2024-12-22 20:18:38 +00:00
Add red zones to BumpPtrAllocator under ASan
To help catch buffer overruns, this patch changes BumpPtrAllocator to insert an extra unused byte between allocations when building with ASan. SpecificBumpPtrAllocator opts out of this behavior, since it needs to destroy its items later by walking the allocated memory. Reviewed by Pete Cooper. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@297310 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
d52e1fa55c
commit
7b285dee52
@ -155,7 +155,7 @@ public:
|
|||||||
BumpPtrAllocatorImpl(BumpPtrAllocatorImpl &&Old)
|
BumpPtrAllocatorImpl(BumpPtrAllocatorImpl &&Old)
|
||||||
: CurPtr(Old.CurPtr), End(Old.End), Slabs(std::move(Old.Slabs)),
|
: CurPtr(Old.CurPtr), End(Old.End), Slabs(std::move(Old.Slabs)),
|
||||||
CustomSizedSlabs(std::move(Old.CustomSizedSlabs)),
|
CustomSizedSlabs(std::move(Old.CustomSizedSlabs)),
|
||||||
BytesAllocated(Old.BytesAllocated),
|
BytesAllocated(Old.BytesAllocated), RedZoneSize(Old.RedZoneSize),
|
||||||
Allocator(std::move(Old.Allocator)) {
|
Allocator(std::move(Old.Allocator)) {
|
||||||
Old.CurPtr = Old.End = nullptr;
|
Old.CurPtr = Old.End = nullptr;
|
||||||
Old.BytesAllocated = 0;
|
Old.BytesAllocated = 0;
|
||||||
@ -175,6 +175,7 @@ public:
|
|||||||
CurPtr = RHS.CurPtr;
|
CurPtr = RHS.CurPtr;
|
||||||
End = RHS.End;
|
End = RHS.End;
|
||||||
BytesAllocated = RHS.BytesAllocated;
|
BytesAllocated = RHS.BytesAllocated;
|
||||||
|
RedZoneSize = RHS.RedZoneSize;
|
||||||
Slabs = std::move(RHS.Slabs);
|
Slabs = std::move(RHS.Slabs);
|
||||||
CustomSizedSlabs = std::move(RHS.CustomSizedSlabs);
|
CustomSizedSlabs = std::move(RHS.CustomSizedSlabs);
|
||||||
Allocator = std::move(RHS.Allocator);
|
Allocator = std::move(RHS.Allocator);
|
||||||
@ -217,10 +218,16 @@ public:
|
|||||||
size_t Adjustment = alignmentAdjustment(CurPtr, Alignment);
|
size_t Adjustment = alignmentAdjustment(CurPtr, Alignment);
|
||||||
assert(Adjustment + Size >= Size && "Adjustment + Size must not overflow");
|
assert(Adjustment + Size >= Size && "Adjustment + Size must not overflow");
|
||||||
|
|
||||||
|
size_t SizeToAllocate = Size;
|
||||||
|
#if LLVM_ADDRESS_SANITIZER_BUILD
|
||||||
|
// Add trailing bytes as a "red zone" under ASan.
|
||||||
|
SizeToAllocate += RedZoneSize;
|
||||||
|
#endif
|
||||||
|
|
||||||
// Check if we have enough space.
|
// Check if we have enough space.
|
||||||
if (Adjustment + Size <= size_t(End - CurPtr)) {
|
if (Adjustment + SizeToAllocate <= size_t(End - CurPtr)) {
|
||||||
char *AlignedPtr = CurPtr + Adjustment;
|
char *AlignedPtr = CurPtr + Adjustment;
|
||||||
CurPtr = AlignedPtr + Size;
|
CurPtr = AlignedPtr + SizeToAllocate;
|
||||||
// Update the allocation point of this memory block in MemorySanitizer.
|
// Update the allocation point of this memory block in MemorySanitizer.
|
||||||
// Without this, MemorySanitizer messages for values originated from here
|
// Without this, MemorySanitizer messages for values originated from here
|
||||||
// will point to the allocation of the entire slab.
|
// will point to the allocation of the entire slab.
|
||||||
@ -231,7 +238,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If Size is really big, allocate a separate slab for it.
|
// If Size is really big, allocate a separate slab for it.
|
||||||
size_t PaddedSize = Size + Alignment - 1;
|
size_t PaddedSize = SizeToAllocate + Alignment - 1;
|
||||||
if (PaddedSize > SizeThreshold) {
|
if (PaddedSize > SizeThreshold) {
|
||||||
void *NewSlab = Allocator.Allocate(PaddedSize, 0);
|
void *NewSlab = Allocator.Allocate(PaddedSize, 0);
|
||||||
// We own the new slab and don't want anyone reading anyting other than
|
// We own the new slab and don't want anyone reading anyting other than
|
||||||
@ -250,10 +257,10 @@ public:
|
|||||||
// Otherwise, start a new slab and try again.
|
// Otherwise, start a new slab and try again.
|
||||||
StartNewSlab();
|
StartNewSlab();
|
||||||
uintptr_t AlignedAddr = alignAddr(CurPtr, Alignment);
|
uintptr_t AlignedAddr = alignAddr(CurPtr, Alignment);
|
||||||
assert(AlignedAddr + Size <= (uintptr_t)End &&
|
assert(AlignedAddr + SizeToAllocate <= (uintptr_t)End &&
|
||||||
"Unable to allocate memory!");
|
"Unable to allocate memory!");
|
||||||
char *AlignedPtr = (char*)AlignedAddr;
|
char *AlignedPtr = (char*)AlignedAddr;
|
||||||
CurPtr = AlignedPtr + Size;
|
CurPtr = AlignedPtr + SizeToAllocate;
|
||||||
__msan_allocated_memory(AlignedPtr, Size);
|
__msan_allocated_memory(AlignedPtr, Size);
|
||||||
__asan_unpoison_memory_region(AlignedPtr, Size);
|
__asan_unpoison_memory_region(AlignedPtr, Size);
|
||||||
return AlignedPtr;
|
return AlignedPtr;
|
||||||
@ -282,6 +289,10 @@ public:
|
|||||||
|
|
||||||
size_t getBytesAllocated() const { return BytesAllocated; }
|
size_t getBytesAllocated() const { return BytesAllocated; }
|
||||||
|
|
||||||
|
void setRedZoneSize(size_t NewSize) {
|
||||||
|
RedZoneSize = NewSize;
|
||||||
|
}
|
||||||
|
|
||||||
void PrintStats() const {
|
void PrintStats() const {
|
||||||
detail::printBumpPtrAllocatorStats(Slabs.size(), BytesAllocated,
|
detail::printBumpPtrAllocatorStats(Slabs.size(), BytesAllocated,
|
||||||
getTotalMemory());
|
getTotalMemory());
|
||||||
@ -307,6 +318,10 @@ private:
|
|||||||
/// Used so that we can compute how much space was wasted.
|
/// Used so that we can compute how much space was wasted.
|
||||||
size_t BytesAllocated = 0;
|
size_t BytesAllocated = 0;
|
||||||
|
|
||||||
|
/// \brief The number of bytes to put between allocations when running under
|
||||||
|
/// a sanitizer.
|
||||||
|
size_t RedZoneSize = 1;
|
||||||
|
|
||||||
/// \brief The allocator instance we use to get slabs of memory.
|
/// \brief The allocator instance we use to get slabs of memory.
|
||||||
AllocatorT Allocator;
|
AllocatorT Allocator;
|
||||||
|
|
||||||
@ -368,7 +383,11 @@ template <typename T> class SpecificBumpPtrAllocator {
|
|||||||
BumpPtrAllocator Allocator;
|
BumpPtrAllocator Allocator;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
SpecificBumpPtrAllocator() = default;
|
SpecificBumpPtrAllocator() {
|
||||||
|
// Because SpecificBumpPtrAllocator walks the memory to call destructors,
|
||||||
|
// it can't have red zones between allocations.
|
||||||
|
Allocator.setRedZoneSize(0);
|
||||||
|
}
|
||||||
SpecificBumpPtrAllocator(SpecificBumpPtrAllocator &&Old)
|
SpecificBumpPtrAllocator(SpecificBumpPtrAllocator &&Old)
|
||||||
: Allocator(std::move(Old.Allocator)) {}
|
: Allocator(std::move(Old.Allocator)) {}
|
||||||
~SpecificBumpPtrAllocator() { DestroyAll(); }
|
~SpecificBumpPtrAllocator() { DestroyAll(); }
|
||||||
|
Loading…
Reference in New Issue
Block a user