mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-12-03 00:47:07 +00:00
[Allocator] Make the underlying allocator a template instead of an
abstract interface. The only user of this functionality is the JIT memory manager and it is quite happy to have a custom type here. This removes a virtual function call and a lot of unnecessary abstraction from the common case where this is just a *very* thin vaneer around a call to malloc. Hopefully still no functionality changed here. =] llvm-svn: 206149
This commit is contained in:
parent
50bea3222c
commit
0ecbcadf5d
@ -54,31 +54,17 @@ public:
|
||||
void PrintStats() const {}
|
||||
};
|
||||
|
||||
/// SlabAllocator - This class can be used to parameterize the underlying
|
||||
/// allocation strategy for the bump allocator. In particular, this is used
|
||||
/// by the JIT to allocate contiguous swathes of executable memory. The
|
||||
/// interface uses MemSlab's instead of void *'s so that the allocator
|
||||
/// doesn't have to remember the size of the pointer it allocated.
|
||||
class SlabAllocator {
|
||||
public:
|
||||
virtual ~SlabAllocator();
|
||||
virtual void *Allocate(size_t Size) = 0;
|
||||
virtual void Deallocate(void *Slab, size_t Size) = 0;
|
||||
};
|
||||
|
||||
/// MallocSlabAllocator - The default slab allocator for the bump allocator
|
||||
/// is an adapter class for MallocAllocator that just forwards the method
|
||||
/// calls and translates the arguments.
|
||||
class MallocSlabAllocator : public SlabAllocator {
|
||||
class MallocSlabAllocator {
|
||||
/// Allocator - The underlying allocator that we forward to.
|
||||
///
|
||||
MallocAllocator Allocator;
|
||||
|
||||
public:
|
||||
MallocSlabAllocator() : Allocator() {}
|
||||
virtual ~MallocSlabAllocator();
|
||||
void *Allocate(size_t Size) override;
|
||||
void Deallocate(void *Slab, size_t Size) override;
|
||||
void *Allocate(size_t Size) { return Allocator.Allocate(Size, 0); }
|
||||
void Deallocate(void *Slab, size_t Size) { Allocator.Deallocate(Slab); }
|
||||
};
|
||||
|
||||
/// \brief Allocate memory in an ever growing pool, as if by bump-pointer.
|
||||
@ -91,7 +77,12 @@ public:
|
||||
///
|
||||
/// Note that this also has a threshold for forcing allocations above a certain
|
||||
/// size into their own slab.
|
||||
template <size_t SlabSize = 4096, size_t SizeThreshold = SlabSize>
|
||||
///
|
||||
/// The BumpPtrAllocatorImpl template defaults to using a MallocSlabAllocator
|
||||
/// object, which wraps malloc, to allocate memory, but it can be changed to
|
||||
/// use a custom allocator.
|
||||
template <typename AllocatorT = MallocSlabAllocator, size_t SlabSize = 4096,
|
||||
size_t SizeThreshold = SlabSize>
|
||||
class BumpPtrAllocatorImpl {
|
||||
BumpPtrAllocatorImpl(const BumpPtrAllocatorImpl &) LLVM_DELETED_FUNCTION;
|
||||
void operator=(const BumpPtrAllocatorImpl &) LLVM_DELETED_FUNCTION;
|
||||
@ -103,11 +94,11 @@ public:
|
||||
"allocation.");
|
||||
|
||||
BumpPtrAllocatorImpl()
|
||||
: CurPtr(nullptr), End(nullptr), BytesAllocated(0), Allocator() {}
|
||||
template <typename T>
|
||||
BumpPtrAllocatorImpl(T &&Allocator)
|
||||
: CurPtr(nullptr), End(nullptr), BytesAllocated(0),
|
||||
Allocator(DefaultSlabAllocator) {}
|
||||
BumpPtrAllocatorImpl(SlabAllocator &Allocator)
|
||||
: CurPtr(nullptr), End(nullptr), BytesAllocated(0), Allocator(Allocator) {
|
||||
}
|
||||
Allocator(std::forward<T &&>(Allocator)) {}
|
||||
~BumpPtrAllocatorImpl() {
|
||||
DeallocateSlabs(Slabs.begin(), Slabs.end());
|
||||
DeallocateCustomSizedSlabs();
|
||||
@ -237,14 +228,8 @@ private:
|
||||
/// Used so that we can compute how much space was wasted.
|
||||
size_t BytesAllocated;
|
||||
|
||||
/// \brief The default allocator used if one is not provided.
|
||||
MallocSlabAllocator DefaultSlabAllocator;
|
||||
|
||||
/// \brief The underlying allocator we use to get slabs of memory.
|
||||
///
|
||||
/// This defaults to MallocSlabAllocator, which wraps malloc, but it could be
|
||||
/// changed to use a custom allocator.
|
||||
SlabAllocator &Allocator;
|
||||
/// \brief The allocator instance we use to get slabs of memory.
|
||||
AllocatorT Allocator;
|
||||
|
||||
static size_t computeSlabSize(unsigned SlabIdx) {
|
||||
// Scale the actual allocated slab size based on the number of slabs
|
||||
@ -313,7 +298,6 @@ template <typename T> class SpecificBumpPtrAllocator {
|
||||
|
||||
public:
|
||||
SpecificBumpPtrAllocator() : Allocator() {}
|
||||
SpecificBumpPtrAllocator(SlabAllocator &allocator) : Allocator(allocator) {}
|
||||
|
||||
~SpecificBumpPtrAllocator() { DestroyAll(); }
|
||||
|
||||
@ -355,10 +339,10 @@ private:
|
||||
|
||||
} // end namespace llvm
|
||||
|
||||
template <size_t SlabSize, size_t SizeThreshold>
|
||||
void *
|
||||
operator new(size_t Size,
|
||||
llvm::BumpPtrAllocatorImpl<SlabSize, SizeThreshold> &Allocator) {
|
||||
template <typename AllocatorT, size_t SlabSize, size_t SizeThreshold>
|
||||
void *operator new(size_t Size,
|
||||
llvm::BumpPtrAllocatorImpl<AllocatorT, SlabSize,
|
||||
SizeThreshold> &Allocator) {
|
||||
struct S {
|
||||
char c;
|
||||
union {
|
||||
@ -372,8 +356,9 @@ operator new(size_t Size,
|
||||
Size, std::min((size_t)llvm::NextPowerOf2(Size), offsetof(S, x)));
|
||||
}
|
||||
|
||||
template <size_t SlabSize, size_t SizeThreshold>
|
||||
void operator delete(void *,
|
||||
llvm::BumpPtrAllocatorImpl<SlabSize, SizeThreshold> &) {}
|
||||
template <typename AllocatorT, size_t SlabSize, size_t SizeThreshold>
|
||||
void operator delete(
|
||||
void *, llvm::BumpPtrAllocatorImpl<AllocatorT, SlabSize, SizeThreshold> &) {
|
||||
}
|
||||
|
||||
#endif // LLVM_SUPPORT_ALLOCATOR_H
|
||||
|
@ -269,13 +269,12 @@ namespace {
|
||||
|
||||
class DefaultJITMemoryManager;
|
||||
|
||||
class JITSlabAllocator : public SlabAllocator {
|
||||
class JITSlabAllocator {
|
||||
DefaultJITMemoryManager &JMM;
|
||||
public:
|
||||
JITSlabAllocator(DefaultJITMemoryManager &jmm) : JMM(jmm) { }
|
||||
virtual ~JITSlabAllocator() { }
|
||||
void *Allocate(size_t Size) override;
|
||||
void Deallocate(void *Slab, size_t Size) override;
|
||||
void *Allocate(size_t Size);
|
||||
void Deallocate(void *Slab, size_t Size);
|
||||
};
|
||||
|
||||
/// DefaultJITMemoryManager - Manage memory for the JIT code generation.
|
||||
@ -313,9 +312,10 @@ namespace {
|
||||
// Memory slabs allocated by the JIT. We refer to them as slabs so we don't
|
||||
// confuse them with the blocks of memory described above.
|
||||
std::vector<sys::MemoryBlock> CodeSlabs;
|
||||
JITSlabAllocator BumpSlabAllocator;
|
||||
BumpPtrAllocatorImpl<DefaultSlabSize, DefaultSizeThreshold> StubAllocator;
|
||||
BumpPtrAllocatorImpl<DefaultSlabSize, DefaultSizeThreshold> DataAllocator;
|
||||
BumpPtrAllocatorImpl<JITSlabAllocator, DefaultSlabSize,
|
||||
DefaultSizeThreshold> StubAllocator;
|
||||
BumpPtrAllocatorImpl<JITSlabAllocator, DefaultSlabSize,
|
||||
DefaultSizeThreshold> DataAllocator;
|
||||
|
||||
// Circular list of free blocks.
|
||||
FreeRangeHeader *FreeMemoryList;
|
||||
@ -579,16 +579,13 @@ void JITSlabAllocator::Deallocate(void *Slab, size_t Size) {
|
||||
}
|
||||
|
||||
DefaultJITMemoryManager::DefaultJITMemoryManager()
|
||||
:
|
||||
:
|
||||
#ifdef NDEBUG
|
||||
PoisonMemory(false),
|
||||
PoisonMemory(false),
|
||||
#else
|
||||
PoisonMemory(true),
|
||||
PoisonMemory(true),
|
||||
#endif
|
||||
LastSlab(0, 0),
|
||||
BumpSlabAllocator(*this),
|
||||
StubAllocator(BumpSlabAllocator),
|
||||
DataAllocator(BumpSlabAllocator) {
|
||||
LastSlab(0, 0), StubAllocator(*this), DataAllocator(*this) {
|
||||
|
||||
// Allocate space for code.
|
||||
sys::MemoryBlock MemBlock = allocateNewSlab(DefaultCodeSlabSize);
|
||||
|
@ -21,18 +21,6 @@
|
||||
|
||||
namespace llvm {
|
||||
|
||||
SlabAllocator::~SlabAllocator() { }
|
||||
|
||||
MallocSlabAllocator::~MallocSlabAllocator() { }
|
||||
|
||||
void *MallocSlabAllocator::Allocate(size_t Size) {
|
||||
return Allocator.Allocate(Size, 0);
|
||||
}
|
||||
|
||||
void MallocSlabAllocator::Deallocate(void *Slab, size_t Size) {
|
||||
Allocator.Deallocate(Slab);
|
||||
}
|
||||
|
||||
void printBumpPtrAllocatorStats(unsigned NumSlabs, size_t BytesAllocated,
|
||||
size_t TotalMemory) {
|
||||
errs() << "\nNumber of memory regions: " << NumSlabs << '\n'
|
||||
|
@ -102,13 +102,13 @@ TEST(AllocatorTest, TestSmallSlabSize) {
|
||||
|
||||
// Mock slab allocator that returns slabs aligned on 4096 bytes. There is no
|
||||
// easy portable way to do this, so this is kind of a hack.
|
||||
class MockSlabAllocator : public SlabAllocator {
|
||||
size_t LastSlabSize;
|
||||
class MockSlabAllocator {
|
||||
static size_t LastSlabSize;
|
||||
|
||||
public:
|
||||
virtual ~MockSlabAllocator() { }
|
||||
~MockSlabAllocator() { }
|
||||
|
||||
virtual void *Allocate(size_t Size) {
|
||||
void *Allocate(size_t Size) {
|
||||
// Allocate space for the alignment, the slab, and a void* that goes right
|
||||
// before the slab.
|
||||
size_t Alignment = 4096;
|
||||
@ -124,19 +124,20 @@ public:
|
||||
return Slab;
|
||||
}
|
||||
|
||||
virtual void Deallocate(void *Slab, size_t Size) {
|
||||
void Deallocate(void *Slab, size_t Size) {
|
||||
free(((void**)Slab)[-1]);
|
||||
}
|
||||
|
||||
size_t GetLastSlabSize() { return LastSlabSize; }
|
||||
static size_t GetLastSlabSize() { return LastSlabSize; }
|
||||
};
|
||||
|
||||
size_t MockSlabAllocator::LastSlabSize = 0;
|
||||
|
||||
// Allocate a large-ish block with a really large alignment so that the
|
||||
// allocator will think that it has space, but after it does the alignment it
|
||||
// will not.
|
||||
TEST(AllocatorTest, TestBigAlignment) {
|
||||
MockSlabAllocator SlabAlloc;
|
||||
BumpPtrAllocator Alloc(SlabAlloc);
|
||||
BumpPtrAllocatorImpl<MockSlabAllocator> Alloc;
|
||||
|
||||
// First allocate a tiny bit to ensure we have to re-align things.
|
||||
(void)Alloc.Allocate(1, 0);
|
||||
@ -146,7 +147,7 @@ TEST(AllocatorTest, TestBigAlignment) {
|
||||
|
||||
// We test that the last slab size is not the default 4096 byte slab, but
|
||||
// rather a custom sized slab that is larger.
|
||||
EXPECT_GT(SlabAlloc.GetLastSlabSize(), 4096u);
|
||||
EXPECT_GT(MockSlabAllocator::GetLastSlabSize(), 4096u);
|
||||
}
|
||||
|
||||
} // anonymous namespace
|
||||
|
Loading…
Reference in New Issue
Block a user