ppsspp/Core/Util/BlockAllocator.cpp

512 lines
12 KiB
C++
Raw Normal View History

// Copyright (c) 2012- PPSSPP Project.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 2.0 or later versions.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License 2.0 for more details.
// A copy of the GPL 2.0 should have been included with the program.
// If not, see http://www.gnu.org/licenses/
// Official git repository and contact information can be found at
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
#include <cstring>
#include "Common/Log.h"
#include "Common/Serialize/Serializer.h"
#include "Common/Serialize/SerializeFuncs.h"
#include "Common/StringUtils.h"
#include "Core/Util/BlockAllocator.h"
#include "Core/Reporting.h"
2012-11-01 15:19:01 +00:00
// Slow freaking thing but works (eventually) :)
BlockAllocator::BlockAllocator(int grain) : bottom_(NULL), top_(NULL), grain_(grain)
2012-11-01 15:19:01 +00:00
{
}
BlockAllocator::~BlockAllocator()
{
Shutdown();
}
2012-11-05 12:36:12 +00:00
void BlockAllocator::Init(u32 rangeStart, u32 rangeSize)
2012-11-01 15:19:01 +00:00
{
Shutdown();
2012-11-05 12:36:12 +00:00
rangeStart_ = rangeStart;
rangeSize_ = rangeSize;
2012-11-01 15:19:01 +00:00
//Initial block, covering everything
top_ = new Block(rangeStart_, rangeSize_, false, NULL, NULL);
bottom_ = top_;
2012-11-01 15:19:01 +00:00
}
void BlockAllocator::Shutdown()
{
while (bottom_ != NULL)
{
Block *next = bottom_->next;
delete bottom_;
bottom_ = next;
}
top_ = NULL;
2012-11-01 15:19:01 +00:00
}
2013-03-04 04:20:49 +00:00
u32 BlockAllocator::AllocAligned(u32 &size, u32 sizeGrain, u32 grain, bool fromTop, const char *tag)
2012-11-01 15:19:01 +00:00
{
2012-11-05 12:36:12 +00:00
// Sanity check
if (size == 0 || size > rangeSize_) {
ERROR_LOG(SCEKERNEL, "Clearly bogus size: %08x - failing allocation", size);
return -1;
2012-11-05 12:36:12 +00:00
}
// It could be off step, but the grain should generally be a power of 2.
if (grain < grain_)
grain = grain_;
2013-03-04 04:20:49 +00:00
if (sizeGrain < grain_)
sizeGrain = grain_;
// upalign size to grain
2013-03-04 04:20:49 +00:00
size = (size + sizeGrain - 1) & ~(sizeGrain - 1);
2012-11-01 15:19:01 +00:00
2012-11-05 12:36:12 +00:00
if (!fromTop)
2012-11-01 15:19:01 +00:00
{
2012-11-05 12:36:12 +00:00
//Allocate from bottom of mem
for (Block *bp = bottom_; bp != NULL; bp = bp->next)
2012-11-01 15:19:01 +00:00
{
Block &b = *bp;
u32 offset = b.start % grain;
2013-03-01 16:14:38 +00:00
if (offset != 0)
offset = grain - offset;
u32 needed = offset + size;
if (b.taken == false && b.size >= needed)
2012-11-01 15:19:01 +00:00
{
if (b.size == needed)
2012-11-01 15:19:01 +00:00
{
if (offset >= grain_)
InsertFreeBefore(&b, offset);
2012-11-01 15:19:01 +00:00
b.taken = true;
b.SetTag(tag);
return b.start;
2012-11-01 15:19:01 +00:00
}
else
{
InsertFreeAfter(&b, b.size - needed);
if (offset >= grain_)
InsertFreeBefore(&b, offset);
2012-11-01 15:19:01 +00:00
b.taken = true;
b.SetTag(tag);
return b.start;
2012-11-01 15:19:01 +00:00
}
}
}
}
else
{
2012-11-05 12:36:12 +00:00
// Allocate from top of mem.
for (Block *bp = top_; bp != NULL; bp = bp->prev)
2012-11-01 15:19:01 +00:00
{
Block &b = *bp;
2013-03-01 16:14:38 +00:00
u32 offset = (b.start + b.size - size) % grain;
u32 needed = offset + size;
if (b.taken == false && b.size >= needed)
2012-11-01 15:19:01 +00:00
{
if (b.size == needed)
2012-11-01 15:19:01 +00:00
{
if (offset >= grain_)
InsertFreeAfter(&b, offset);
2012-11-01 15:19:01 +00:00
b.taken = true;
b.SetTag(tag);
2013-03-01 16:14:38 +00:00
return b.start;
2012-11-01 15:19:01 +00:00
}
else
{
InsertFreeBefore(&b, b.size - needed);
if (offset >= grain_)
InsertFreeAfter(&b, offset);
2012-11-01 15:19:01 +00:00
b.taken = true;
b.SetTag(tag);
2013-03-01 16:14:38 +00:00
return b.start;
2012-11-01 15:19:01 +00:00
}
}
}
}
//Out of memory :(
ListBlocks();
ERROR_LOG(SCEKERNEL, "Block Allocator (%08x-%08x) failed to allocate %i (%08x) bytes of contiguous memory", rangeStart_, rangeStart_ + rangeSize_, size, size);
2012-11-01 15:19:01 +00:00
return -1;
}
u32 BlockAllocator::Alloc(u32 &size, bool fromTop, const char *tag)
{
// We want to make sure it's aligned in case AllocAt() was used.
2013-03-04 04:20:49 +00:00
return AllocAligned(size, grain_, grain_, fromTop, tag);
}
2012-11-01 15:19:01 +00:00
u32 BlockAllocator::AllocAt(u32 position, u32 size, const char *tag)
{
CheckBlocks();
2012-11-05 12:36:12 +00:00
if (size > rangeSize_) {
ERROR_LOG(SCEKERNEL, "Clearly bogus size: %08x - failing allocation", size);
return -1;
2012-11-05 12:36:12 +00:00
}
// Downalign the position so we're allocating full blocks.
u32 alignedPosition = position;
u32 alignedSize = size;
if (position & (grain_ - 1)) {
DEBUG_LOG(SCEKERNEL, "Position %08x does not align to grain.", position);
alignedPosition &= ~(grain_ - 1);
// Since the position was decreased, size must increase.
alignedSize += position - alignedPosition;
}
// Upalign size to grain.
alignedSize = (alignedSize + grain_ - 1) & ~(grain_ - 1);
// Tell the caller the allocated size from their requested starting position.
2020-08-05 11:01:00 +00:00
size = alignedSize - (position - alignedPosition);
Block *bp = GetBlockFromAddress(alignedPosition);
if (bp != NULL)
2012-11-01 15:19:01 +00:00
{
Block &b = *bp;
2012-11-01 15:19:01 +00:00
if (b.taken)
{
ERROR_LOG(SCEKERNEL, "Block allocator AllocAt failed, block taken! %08x, %i", position, size);
2012-11-01 15:19:01 +00:00
return -1;
}
else
{
// Make sure the block is big enough to split.
if (b.start + b.size < alignedPosition + alignedSize)
{
ERROR_LOG(SCEKERNEL, "Block allocator AllocAt failed, not enough contiguous space %08x, %i", position, size);
return -1;
}
2012-11-01 15:19:01 +00:00
//good to go
else if (b.start == alignedPosition)
2012-11-01 15:19:01 +00:00
{
if (b.size != alignedSize)
InsertFreeAfter(&b, b.size - alignedSize);
2012-11-01 15:19:01 +00:00
b.taken = true;
b.SetTag(tag);
CheckBlocks();
2012-11-01 15:19:01 +00:00
return position;
}
else
{
InsertFreeBefore(&b, alignedPosition - b.start);
if (b.size > alignedSize)
InsertFreeAfter(&b, b.size - alignedSize);
2012-11-01 15:19:01 +00:00
b.taken = true;
b.SetTag(tag);
2012-11-01 15:19:01 +00:00
return position;
}
}
}
else
{
ERROR_LOG(SCEKERNEL, "Block allocator AllocAt failed :( %08x, %i", position, size);
2012-11-01 15:19:01 +00:00
}
2012-11-01 15:19:01 +00:00
//Out of memory :(
ListBlocks();
ERROR_LOG(SCEKERNEL, "Block Allocator (%08x-%08x) failed to allocate %i (%08x) bytes of contiguous memory", rangeStart_, rangeStart_ + rangeSize_, alignedSize, alignedSize);
2012-11-01 15:19:01 +00:00
return -1;
}
void BlockAllocator::MergeFreeBlocks(Block *fromBlock)
2012-11-01 15:19:01 +00:00
{
DEBUG_LOG(SCEKERNEL, "Merging Blocks");
Block *prev = fromBlock->prev;
while (prev != NULL && prev->taken == false)
2012-11-01 15:19:01 +00:00
{
DEBUG_LOG(SCEKERNEL, "Block Alloc found adjacent free blocks - merging");
prev->size += fromBlock->size;
if (fromBlock->next == NULL)
top_ = prev;
else
fromBlock->next->prev = prev;
prev->next = fromBlock->next;
delete fromBlock;
fromBlock = prev;
prev = fromBlock->prev;
}
if (prev == NULL)
bottom_ = fromBlock;
else
prev->next = fromBlock;
Block *next = fromBlock->next;
while (next != NULL && next->taken == false)
{
DEBUG_LOG(SCEKERNEL, "Block Alloc found adjacent free blocks - merging");
fromBlock->size += next->size;
fromBlock->next = next->next;
delete next;
next = fromBlock->next;
}
if (next == NULL)
top_ = fromBlock;
else
next->prev = fromBlock;
2012-11-01 15:19:01 +00:00
}
bool BlockAllocator::Free(u32 position)
2012-11-01 15:19:01 +00:00
{
Block *b = GetBlockFromAddress(position);
if (b && b->taken)
2012-11-01 15:19:01 +00:00
{
b->taken = false;
MergeFreeBlocks(b);
return true;
2012-11-01 15:19:01 +00:00
}
else
{
ERROR_LOG(SCEKERNEL, "BlockAllocator : invalid free %08x", position);
return false;
2012-11-01 15:19:01 +00:00
}
}
bool BlockAllocator::FreeExact(u32 position)
{
Block *b = GetBlockFromAddress(position);
if (b && b->taken && b->start == position)
2013-05-27 06:48:55 +00:00
{
b->taken = false;
MergeFreeBlocks(b);
return true;
}
else
{
ERROR_LOG(SCEKERNEL, "BlockAllocator : invalid free %08x", position);
return false;
}
}
BlockAllocator::Block *BlockAllocator::InsertFreeBefore(Block *b, u32 size)
{
Block *inserted = new Block(b->start, size, false, b->prev, b);
b->prev = inserted;
if (inserted->prev == NULL)
bottom_ = inserted;
else
inserted->prev->next = inserted;
b->start += size;
b->size -= size;
return inserted;
}
BlockAllocator::Block *BlockAllocator::InsertFreeAfter(Block *b, u32 size)
{
Block *inserted = new Block(b->start + b->size - size, size, false, b, b->next);
b->next = inserted;
if (inserted->next == NULL)
top_ = inserted;
else
inserted->next->prev = inserted;
b->size -= size;
return inserted;
}
void BlockAllocator::CheckBlocks() const
{
for (const Block *bp = bottom_; bp != NULL; bp = bp->next)
{
const Block &b = *bp;
if (b.start > 0xc0000000) { // probably free'd debug values
ERROR_LOG_REPORT(HLE, "Bogus block in allocator");
}
// Outside the valid range, probably logic bug in allocation.
if (b.start + b.size > rangeStart_ + rangeSize_ || b.start < rangeStart_) {
ERROR_LOG_REPORT(HLE, "Bogus block in allocator");
}
}
}
const char *BlockAllocator::GetBlockTag(u32 addr) const {
const Block *b = GetBlockFromAddress(addr);
return b->tag;
}
2013-05-27 06:48:55 +00:00
inline BlockAllocator::Block *BlockAllocator::GetBlockFromAddress(u32 addr)
2012-11-01 15:19:01 +00:00
{
for (Block *bp = bottom_; bp != NULL; bp = bp->next)
2012-11-01 15:19:01 +00:00
{
Block &b = *bp;
if (b.start <= addr && b.start + b.size > addr)
2012-11-01 15:19:01 +00:00
{
// Got one!
return bp;
2012-11-01 15:19:01 +00:00
}
}
return NULL;
2012-11-01 15:19:01 +00:00
}
const BlockAllocator::Block *BlockAllocator::GetBlockFromAddress(u32 addr) const
2012-11-01 15:19:01 +00:00
{
for (const Block *bp = bottom_; bp != NULL; bp = bp->next)
{
const Block &b = *bp;
if (b.start <= addr && b.start + b.size > addr)
{
// Got one!
return bp;
}
}
return NULL;
2012-11-01 15:19:01 +00:00
}
u32 BlockAllocator::GetBlockStartFromAddress(u32 addr) const
2012-11-01 15:19:01 +00:00
{
const Block *b = GetBlockFromAddress(addr);
2012-11-01 15:19:01 +00:00
if (b)
return b->start;
else
return -1;
}
u32 BlockAllocator::GetBlockSizeFromAddress(u32 addr) const
2012-11-01 15:19:01 +00:00
{
const Block *b = GetBlockFromAddress(addr);
2012-11-01 15:19:01 +00:00
if (b)
return b->size;
else
return -1;
}
void BlockAllocator::ListBlocks() const
2012-11-01 15:19:01 +00:00
{
INFO_LOG(SCEKERNEL,"-----------");
for (const Block *bp = bottom_; bp != NULL; bp = bp->next)
2012-11-01 15:19:01 +00:00
{
const Block &b = *bp;
INFO_LOG(SCEKERNEL, "Block: %08x - %08x size %08x taken=%i tag=%s", b.start, b.start+b.size, b.size, b.taken ? 1:0, b.tag);
2012-11-01 15:19:01 +00:00
}
INFO_LOG(SCEKERNEL,"-----------");
2012-11-01 15:19:01 +00:00
}
u32 BlockAllocator::GetLargestFreeBlockSize() const
2012-11-01 15:19:01 +00:00
{
u32 maxFreeBlock = 0;
for (const Block *bp = bottom_; bp != NULL; bp = bp->next)
2012-11-01 15:19:01 +00:00
{
const Block &b = *bp;
2012-11-01 15:19:01 +00:00
if (!b.taken)
{
if (b.size > maxFreeBlock)
maxFreeBlock = b.size;
}
}
if (maxFreeBlock & (grain_ - 1))
WARN_LOG_REPORT(HLE, "GetLargestFreeBlockSize: free size %08x does not align to grain %08x.", maxFreeBlock, grain_);
2012-11-01 15:19:01 +00:00
return maxFreeBlock;
}
u32 BlockAllocator::GetTotalFreeBytes() const
2012-11-01 15:19:01 +00:00
{
u32 sum = 0;
for (const Block *bp = bottom_; bp != NULL; bp = bp->next)
2012-11-01 15:19:01 +00:00
{
const Block &b = *bp;
2012-11-01 15:19:01 +00:00
if (!b.taken)
{
sum += b.size;
}
}
if (sum & (grain_ - 1))
WARN_LOG_REPORT(HLE, "GetTotalFreeBytes: free size %08x does not align to grain %08x.", sum, grain_);
2012-11-01 15:19:01 +00:00
return sum;
}
void BlockAllocator::DoState(PointerWrap &p)
{
auto s = p.Section("BlockAllocator", 1);
if (!s)
return;
int count = 0;
if (p.mode == p.MODE_READ)
{
Shutdown();
Do(p, count);
bottom_ = new Block(0, 0, false, NULL, NULL);
bottom_->DoState(p);
--count;
top_ = bottom_;
for (int i = 0; i < count; ++i)
{
top_->next = new Block(0, 0, false, top_, NULL);
top_->next->DoState(p);
top_ = top_->next;
}
}
else
{
for (const Block *bp = bottom_; bp != NULL; bp = bp->next)
++count;
Do(p, count);
bottom_->DoState(p);
--count;
Block *last = bottom_;
for (int i = 0; i < count; ++i)
{
last->next->DoState(p);
last = last->next;
}
}
Do(p, rangeStart_);
Do(p, rangeSize_);
Do(p, grain_);
}
BlockAllocator::Block::Block(u32 _start, u32 _size, bool _taken, Block *_prev, Block *_next)
: start(_start), size(_size), taken(_taken), prev(_prev), next(_next)
{
truncate_cpy(tag, "(untitled)");
}
void BlockAllocator::Block::SetTag(const char *_tag)
{
if (_tag)
truncate_cpy(tag, _tag);
else
truncate_cpy(tag, "---");
}
void BlockAllocator::Block::DoState(PointerWrap &p)
{
auto s = p.Section("Block", 1);
if (!s)
return;
Do(p, start);
Do(p, size);
Do(p, taken);
// Since we use truncate_cpy, the empty space is not zeroed. Zero it now.
// This avoids saving uninitialized memory.
size_t tagLen = strlen(tag);
if (tagLen != sizeof(tag))
memset(tag + tagLen, 0, sizeof(tag) - tagLen);
DoArray(p, tag, sizeof(tag));
}