mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-05 00:25:27 +00:00
Merge backout of bug 497495 part four due to leaks.
This commit is contained in:
commit
20475eada4
@ -55,7 +55,6 @@
|
|||||||
#define nsIPresShell_h___
|
#define nsIPresShell_h___
|
||||||
|
|
||||||
#include "nsISupports.h"
|
#include "nsISupports.h"
|
||||||
#include "nsQueryFrame.h"
|
|
||||||
#include "nsCoord.h"
|
#include "nsCoord.h"
|
||||||
#include "nsRect.h"
|
#include "nsRect.h"
|
||||||
#include "nsColor.h"
|
#include "nsColor.h"
|
||||||
@ -122,10 +121,10 @@ typedef struct CapturingContentInfo {
|
|||||||
mAllowed(PR_FALSE), mRetargetToElement(PR_FALSE), mContent(nsnull) { }
|
mAllowed(PR_FALSE), mRetargetToElement(PR_FALSE), mContent(nsnull) { }
|
||||||
} CapturingContentInfo;
|
} CapturingContentInfo;
|
||||||
|
|
||||||
// eed2ef56-133f-4696-9eee-5fc45d816be8
|
// eba51d41-68db-4dab-a57b-dc1a2704de87
|
||||||
#define NS_IPRESSHELL_IID \
|
#define NS_IPRESSHELL_IID \
|
||||||
{ 0xeed2ef56, 0x133f, 0x4696, \
|
{ 0xeba51d41, 0x68db, 0x4dab, \
|
||||||
{ 0x9e, 0xee, 0x5f, 0xc4, 0x5d, 0x81, 0x6b, 0xe8 } }
|
{ 0xa5, 0x7b, 0xdc, 0x1a, 0x27, 0x04, 0xde, 0x87 } }
|
||||||
|
|
||||||
// Constants for ScrollContentIntoView() function
|
// Constants for ScrollContentIntoView() function
|
||||||
#define NS_PRESSHELL_SCROLL_TOP 0
|
#define NS_PRESSHELL_SCROLL_TOP 0
|
||||||
@ -190,11 +189,10 @@ public:
|
|||||||
|
|
||||||
// All frames owned by the shell are allocated from an arena. They
|
// All frames owned by the shell are allocated from an arena. They
|
||||||
// are also recycled using free lists. Separate free lists are
|
// are also recycled using free lists. Separate free lists are
|
||||||
// maintained for each frame type (aCode), which must always
|
// maintained for each combination of aSize and aCode. AllocateFrame
|
||||||
// correspond to the same aSize value. AllocateFrame clears the
|
// clears the memory that it returns.
|
||||||
// memory that it returns.
|
virtual void* AllocateFrame(size_t aSize, unsigned int aCode) = 0;
|
||||||
virtual void* AllocateFrame(nsQueryFrame::FrameIID aCode, size_t aSize) = 0;
|
virtual void FreeFrame(size_t aSize, unsigned int aCode, void* aChunk) = 0;
|
||||||
virtual void FreeFrame(nsQueryFrame::FrameIID aCode, void* aChunk) = 0;
|
|
||||||
|
|
||||||
// Objects closely related to the frame tree, but that are not
|
// Objects closely related to the frame tree, but that are not
|
||||||
// actual frames (subclasses of nsFrame) are also allocated from the
|
// actual frames (subclasses of nsFrame) are also allocated from the
|
||||||
|
@ -47,10 +47,12 @@
|
|||||||
#include "nsPresArena.h"
|
#include "nsPresArena.h"
|
||||||
#include "nsCRT.h"
|
#include "nsCRT.h"
|
||||||
#include "nsDebug.h"
|
#include "nsDebug.h"
|
||||||
#include "nsTArray.h"
|
|
||||||
#include "nsTHashtable.h"
|
|
||||||
#include "prmem.h"
|
#include "prmem.h"
|
||||||
|
|
||||||
|
// Uncomment this to disable arenas, instead forwarding to
|
||||||
|
// malloc for every allocation.
|
||||||
|
//#define DEBUG_TRACEMALLOC_PRESARENA 1
|
||||||
|
|
||||||
#ifndef DEBUG_TRACEMALLOC_PRESARENA
|
#ifndef DEBUG_TRACEMALLOC_PRESARENA
|
||||||
|
|
||||||
// Even on 32-bit systems, we allocate objects from the frame arena
|
// Even on 32-bit systems, we allocate objects from the frame arena
|
||||||
@ -61,72 +63,24 @@
|
|||||||
#define PL_ARENA_CONST_ALIGN_MASK ((PRUword(1) << ALIGN_SHIFT) - 1)
|
#define PL_ARENA_CONST_ALIGN_MASK ((PRUword(1) << ALIGN_SHIFT) - 1)
|
||||||
#include "plarena.h"
|
#include "plarena.h"
|
||||||
|
|
||||||
|
// Largest chunk size we recycle
|
||||||
|
static const size_t MAX_RECYCLED_SIZE = 400;
|
||||||
|
|
||||||
|
// Recycler array entry N (0 <= N < NUM_RECYCLERS) holds chunks of
|
||||||
|
// size (N+1) << ALIGN_SHIFT, thus we need this many array entries.
|
||||||
|
static const size_t NUM_RECYCLERS = MAX_RECYCLED_SIZE >> ALIGN_SHIFT;
|
||||||
|
|
||||||
// Size to use for PLArena block allocations.
|
// Size to use for PLArena block allocations.
|
||||||
static const size_t ARENA_PAGE_SIZE = 4096;
|
static const size_t ARENA_PAGE_SIZE = 4096;
|
||||||
|
|
||||||
// Freed memory is filled with a poison value, which is believed to
|
|
||||||
// form a pointer to an always-unmapped region of the address space on
|
|
||||||
// all platforms of interest. The low 12 bits of this number are
|
|
||||||
// chosen to fall in the middle of the typical 4096-byte page, and
|
|
||||||
// make the address odd.
|
|
||||||
//
|
|
||||||
// With the possible exception of PPC64, current 64-bit CPUs permit
|
|
||||||
// only a subset (2^48 to 2^56, depending) of the full virtual address
|
|
||||||
// space to be used. x86-64 has the inaccessible region in the
|
|
||||||
// *middle* of the address space, whereas all others are believed to
|
|
||||||
// have it at the highest addresses. Use an address in this region if
|
|
||||||
// we possibly can; if the hardware doesn't let anyone use it, we
|
|
||||||
// needn't worry about the OS.
|
|
||||||
//
|
|
||||||
// TODO: Confirm that this value is a pointer to an always-unmapped
|
|
||||||
// address space region on (at least) Win32, Win64, WinCE, ARM Linux,
|
|
||||||
// MacOSX, and add #ifdefs below as necessary. (Bug 507294.)
|
|
||||||
|
|
||||||
#if defined(__x86_64__) || defined(_M_AMD64)
|
|
||||||
const PRUword ARENA_POISON = 0x7FFFFFFFF0DEA7FF;
|
|
||||||
#else
|
|
||||||
// This evaluates to 0xF0DE_A7FF when PRUword is 32 bits long, but to
|
|
||||||
// 0xFFFF_FFFF_F0DE_A7FF when it's 64 bits.
|
|
||||||
const PRUword ARENA_POISON = (~PRUword(0x0FFFFF00) | PRUword(0x0DEA700));
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// All keys to this hash table fit in 32 bits (see below) so we do not
|
|
||||||
// bother actually hashing them.
|
|
||||||
class FreeList : public PLDHashEntryHdr
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
typedef PRUint32 KeyType;
|
|
||||||
nsTArray<void *> mEntries;
|
|
||||||
size_t mEntrySize;
|
|
||||||
|
|
||||||
protected:
|
|
||||||
typedef const void* KeyTypePointer;
|
|
||||||
KeyTypePointer mKey;
|
|
||||||
|
|
||||||
FreeList(KeyTypePointer aKey) : mEntrySize(0), mKey(aKey) {}
|
|
||||||
// Default copy constructor and destructor are ok.
|
|
||||||
|
|
||||||
PRBool KeyEquals(KeyTypePointer const aKey) const
|
|
||||||
{ return mKey == aKey; }
|
|
||||||
|
|
||||||
static KeyTypePointer KeyToPointer(KeyType aKey)
|
|
||||||
{ return NS_INT32_TO_PTR(aKey); }
|
|
||||||
|
|
||||||
static PLDHashNumber HashKey(KeyTypePointer aKey)
|
|
||||||
{ return NS_PTR_TO_INT32(aKey); }
|
|
||||||
|
|
||||||
enum { ALLOW_MEMMOVE = PR_FALSE };
|
|
||||||
friend class nsTHashtable<FreeList>;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct nsPresArena::State {
|
struct nsPresArena::State {
|
||||||
nsTHashtable<FreeList> mFreeLists;
|
void* mRecyclers[NUM_RECYCLERS];
|
||||||
PLArenaPool mPool;
|
PLArenaPool mPool;
|
||||||
|
|
||||||
State()
|
State()
|
||||||
{
|
{
|
||||||
mFreeLists.Init();
|
|
||||||
PL_INIT_ARENA_POOL(&mPool, "PresArena", ARENA_PAGE_SIZE);
|
PL_INIT_ARENA_POOL(&mPool, "PresArena", ARENA_PAGE_SIZE);
|
||||||
|
memset(mRecyclers, 0, sizeof(mRecyclers));
|
||||||
}
|
}
|
||||||
|
|
||||||
~State()
|
~State()
|
||||||
@ -134,81 +88,65 @@ struct nsPresArena::State {
|
|||||||
PL_FinishArenaPool(&mPool);
|
PL_FinishArenaPool(&mPool);
|
||||||
}
|
}
|
||||||
|
|
||||||
void* Allocate(PRUint32 aCode, size_t aSize)
|
void* Allocate(size_t aSize)
|
||||||
{
|
{
|
||||||
NS_ABORT_IF_FALSE(aSize > 0, "PresArena cannot allocate zero bytes");
|
void* result = nsnull;
|
||||||
|
|
||||||
// We only hand out aligned sizes
|
// Recycler lists are indexed by aligned size
|
||||||
aSize = PL_ARENA_ALIGN(&mPool, aSize);
|
aSize = PL_ARENA_ALIGN(&mPool, aSize);
|
||||||
|
|
||||||
// If there is no free-list entry for this type already, we have
|
// Check recyclers first
|
||||||
// to create one now, to record its size.
|
if (aSize <= MAX_RECYCLED_SIZE) {
|
||||||
FreeList* list = mFreeLists.PutEntry(aCode);
|
const size_t index = (aSize >> ALIGN_SHIFT) - 1;
|
||||||
if (!list) {
|
result = mRecyclers[index];
|
||||||
return nsnull;
|
if (result) {
|
||||||
}
|
// Need to move to the next object
|
||||||
|
void* next = *((void**)result);
|
||||||
nsTArray<void*>::index_type len = list->mEntries.Length();
|
mRecyclers[index] = next;
|
||||||
if (list->mEntrySize == 0) {
|
|
||||||
NS_ABORT_IF_FALSE(len == 0, "list with entries but no recorded size");
|
|
||||||
list->mEntrySize = aSize;
|
|
||||||
} else {
|
|
||||||
NS_ABORT_IF_FALSE(list->mEntrySize != aSize,
|
|
||||||
"different sizes for same object type code");
|
|
||||||
}
|
|
||||||
|
|
||||||
void* result;
|
|
||||||
if (len > 0) {
|
|
||||||
// LIFO behavior for best cache utilization
|
|
||||||
result = list->mEntries.ElementAt(len - 1);
|
|
||||||
list->mEntries.RemoveElementAt(len - 1);
|
|
||||||
#ifdef DEBUG
|
|
||||||
{
|
|
||||||
char* p = reinterpret_cast<char*>(result);
|
|
||||||
char* limit = p + list->mEntrySize;
|
|
||||||
for (; p < limit; p += sizeof(PRUword)) {
|
|
||||||
NS_ABORT_IF_FALSE(*reinterpret_cast<PRUword*>(p) == ARENA_POISON,
|
|
||||||
"PresArena: poison overwritten");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Allocate a new chunk from the arena
|
if (!result) {
|
||||||
PL_ARENA_ALLOCATE(result, &mPool, aSize);
|
// Allocate a new chunk from the arena
|
||||||
|
PL_ARENA_ALLOCATE(result, &mPool, aSize);
|
||||||
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Free(PRUint32 aCode, void* aPtr)
|
void Free(size_t aSize, void* aPtr)
|
||||||
{
|
{
|
||||||
// Try to recycle this entry.
|
// Recycler lists are indexed by aligned size
|
||||||
FreeList* list = mFreeLists.GetEntry(aCode);
|
aSize = PL_ARENA_ALIGN(&mPool, aSize);
|
||||||
NS_ABORT_IF_FALSE(list, "no free list for pres arena object");
|
|
||||||
NS_ABORT_IF_FALSE(list->mEntrySize > 0, "PresArena cannot free zero bytes");
|
|
||||||
|
|
||||||
char* p = reinterpret_cast<char*>(aPtr);
|
// See if it's a size that we recycle
|
||||||
char* limit = p + list->mEntrySize;
|
if (aSize <= MAX_RECYCLED_SIZE) {
|
||||||
for (; p < limit; p += sizeof(PRUword)) {
|
const size_t index = (aSize >> ALIGN_SHIFT) - 1;
|
||||||
*reinterpret_cast<PRUword*>(p) = ARENA_POISON;
|
void* currentTop = mRecyclers[index];
|
||||||
|
mRecyclers[index] = aPtr;
|
||||||
|
*((void**)aPtr) = currentTop;
|
||||||
}
|
}
|
||||||
|
#if defined DEBUG_dbaron || defined DEBUG_zack
|
||||||
list->mEntries.AppendElement(aPtr);
|
else {
|
||||||
|
fprintf(stderr,
|
||||||
|
"WARNING: nsPresArena::FreeFrame leaking chunk of %lu bytes.\n",
|
||||||
|
aSize);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
#else
|
#else
|
||||||
// Stub implementation that forwards everything to malloc and does not
|
// Stub implementation that just forwards everything to malloc.
|
||||||
// poison.
|
|
||||||
|
|
||||||
struct nsPresArena::State
|
struct nsPresArena::State
|
||||||
{
|
{
|
||||||
void* Allocate(PRUnit32 /* unused */, size_t aSize)
|
void* Allocate(size_t aSize)
|
||||||
{
|
{
|
||||||
return PR_Malloc(aSize);
|
return PR_Malloc(aSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Free(PRUint32 /* unused */, void* aPtr)
|
void Free(size_t /*unused*/, void* aPtr)
|
||||||
{
|
{
|
||||||
PR_Free(aPtr);
|
PR_Free(aPtr);
|
||||||
}
|
}
|
||||||
@ -219,36 +157,41 @@ struct nsPresArena::State
|
|||||||
// Public interface
|
// Public interface
|
||||||
nsPresArena::nsPresArena()
|
nsPresArena::nsPresArena()
|
||||||
: mState(new nsPresArena::State())
|
: mState(new nsPresArena::State())
|
||||||
|
#ifdef DEBUG
|
||||||
|
, mAllocCount(0)
|
||||||
|
#endif
|
||||||
{}
|
{}
|
||||||
|
|
||||||
nsPresArena::~nsPresArena()
|
nsPresArena::~nsPresArena()
|
||||||
{
|
{
|
||||||
|
#ifdef DEBUG
|
||||||
|
NS_ASSERTION(mAllocCount == 0,
|
||||||
|
"Some PresArena objects were not freed");
|
||||||
|
#endif
|
||||||
delete mState;
|
delete mState;
|
||||||
}
|
}
|
||||||
|
|
||||||
void*
|
void*
|
||||||
nsPresArena::AllocateBySize(size_t aSize)
|
nsPresArena::Allocate(size_t aSize)
|
||||||
{
|
{
|
||||||
return mState->Allocate(PRUint32(aSize) |
|
NS_ABORT_IF_FALSE(aSize > 0, "PresArena cannot allocate zero bytes");
|
||||||
PRUint32(nsQueryFrame::NON_FRAME_MARKER),
|
void* result = mState->Allocate(aSize);
|
||||||
aSize);
|
#ifdef DEBUG
|
||||||
|
if (result)
|
||||||
|
mAllocCount++;
|
||||||
|
#endif
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
nsPresArena::FreeBySize(size_t aSize, void* aPtr)
|
nsPresArena::Free(size_t aSize, void* aPtr)
|
||||||
{
|
{
|
||||||
mState->Free(PRUint32(aSize) |
|
NS_ABORT_IF_FALSE(aSize > 0, "PresArena cannot free zero bytes");
|
||||||
PRUint32(nsQueryFrame::NON_FRAME_MARKER), aPtr);
|
#ifdef DEBUG
|
||||||
}
|
// Mark the memory with 0xdd in DEBUG builds so that there will be
|
||||||
|
// problems if someone tries to access memory that they've freed.
|
||||||
void*
|
memset(aPtr, 0xdd, aSize);
|
||||||
nsPresArena::AllocateByCode(nsQueryFrame::FrameIID aCode, size_t aSize)
|
mAllocCount--;
|
||||||
{
|
#endif
|
||||||
return mState->Allocate(aCode, aSize);
|
mState->Free(aSize, aPtr);
|
||||||
}
|
|
||||||
|
|
||||||
void
|
|
||||||
nsPresArena::FreeByCode(nsQueryFrame::FrameIID aCode, void* aPtr)
|
|
||||||
{
|
|
||||||
mState->Free(aCode, aPtr);
|
|
||||||
}
|
}
|
||||||
|
@ -46,37 +46,22 @@
|
|||||||
#define nsPresArena_h___
|
#define nsPresArena_h___
|
||||||
|
|
||||||
#include "nscore.h"
|
#include "nscore.h"
|
||||||
#include "nsQueryFrame.h"
|
|
||||||
|
|
||||||
// Uncomment this to disable arenas, instead forwarding to
|
|
||||||
// malloc for every allocation.
|
|
||||||
//#define DEBUG_TRACEMALLOC_PRESARENA 1
|
|
||||||
|
|
||||||
// The debugging version of nsPresArena does not free all the memory it
|
|
||||||
// allocated when the arena itself is destroyed.
|
|
||||||
#ifdef DEBUG_TRACEMALLOC_PRESARENA
|
|
||||||
#define PRESARENA_MUST_FREE_DURING_DESTROY PR_TRUE
|
|
||||||
#else
|
|
||||||
#define PRESARENA_MUST_FREE_DURING_DESTROY PR_FALSE
|
|
||||||
#endif
|
|
||||||
|
|
||||||
class nsPresArena {
|
class nsPresArena {
|
||||||
public:
|
public:
|
||||||
nsPresArena();
|
nsPresArena();
|
||||||
~nsPresArena();
|
~nsPresArena();
|
||||||
|
|
||||||
// Pool allocation with recycler lists indexed by object size.
|
// Memory management functions
|
||||||
NS_HIDDEN_(void*) AllocateBySize(size_t aSize);
|
NS_HIDDEN_(void*) Allocate(size_t aSize);
|
||||||
NS_HIDDEN_(void) FreeBySize(size_t aSize, void* aPtr);
|
NS_HIDDEN_(void) Free(size_t aSize, void* aPtr);
|
||||||
|
|
||||||
// Pool allocation with recycler lists indexed by object-type code.
|
|
||||||
// Every type code must always be used with the same object size.
|
|
||||||
NS_HIDDEN_(void*) AllocateByCode(nsQueryFrame::FrameIID aCode, size_t aSize);
|
|
||||||
NS_HIDDEN_(void) FreeByCode(nsQueryFrame::FrameIID aCode, void* aPtr);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
struct State;
|
struct State;
|
||||||
State* mState;
|
State* mState;
|
||||||
|
#ifdef DEBUG
|
||||||
|
PRUint32 mAllocCount;
|
||||||
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -656,9 +656,8 @@ public:
|
|||||||
nsCompatibility aCompatMode);
|
nsCompatibility aCompatMode);
|
||||||
NS_IMETHOD Destroy();
|
NS_IMETHOD Destroy();
|
||||||
|
|
||||||
virtual NS_HIDDEN_(void*) AllocateFrame(nsQueryFrame::FrameIID aCode,
|
virtual NS_HIDDEN_(void*) AllocateFrame(size_t aSize, unsigned int aCode);
|
||||||
size_t aSize);
|
virtual NS_HIDDEN_(void) FreeFrame(size_t aSize, unsigned int aCode,
|
||||||
virtual NS_HIDDEN_(void) FreeFrame(nsQueryFrame::FrameIID aCode,
|
|
||||||
void* aChunk);
|
void* aChunk);
|
||||||
|
|
||||||
virtual NS_HIDDEN_(void*) AllocateMisc(size_t aSize);
|
virtual NS_HIDDEN_(void*) AllocateMisc(size_t aSize);
|
||||||
@ -1273,11 +1272,6 @@ private:
|
|||||||
* only visible if the contents of the view as a whole are translucent.
|
* only visible if the contents of the view as a whole are translucent.
|
||||||
*/
|
*/
|
||||||
nscolor ComputeBackstopColor(nsIView* aView);
|
nscolor ComputeBackstopColor(nsIView* aView);
|
||||||
|
|
||||||
#ifdef DEBUG
|
|
||||||
// Ensure that every allocation from the PresArena is eventually freed.
|
|
||||||
PRUint32 mPresArenaAllocCount;
|
|
||||||
#endif
|
|
||||||
};
|
};
|
||||||
|
|
||||||
class nsAutoCauseReflowNotifier
|
class nsAutoCauseReflowNotifier
|
||||||
@ -1611,9 +1605,6 @@ PresShell::PresShell()
|
|||||||
#endif
|
#endif
|
||||||
mSelectionFlags = nsISelectionDisplay::DISPLAY_TEXT | nsISelectionDisplay::DISPLAY_IMAGES;
|
mSelectionFlags = nsISelectionDisplay::DISPLAY_TEXT | nsISelectionDisplay::DISPLAY_IMAGES;
|
||||||
mIsThemeSupportDisabled = PR_FALSE;
|
mIsThemeSupportDisabled = PR_FALSE;
|
||||||
#ifdef DEBUG
|
|
||||||
mPresArenaAllocCount = 0;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
new (this) nsFrameManager();
|
new (this) nsFrameManager();
|
||||||
}
|
}
|
||||||
@ -1635,12 +1626,7 @@ PresShell::~PresShell()
|
|||||||
NS_ASSERTION(mFirstCallbackEventRequest == nsnull &&
|
NS_ASSERTION(mFirstCallbackEventRequest == nsnull &&
|
||||||
mLastCallbackEventRequest == nsnull,
|
mLastCallbackEventRequest == nsnull,
|
||||||
"post-reflow queues not empty. This means we're leaking");
|
"post-reflow queues not empty. This means we're leaking");
|
||||||
|
|
||||||
#ifdef DEBUG
|
|
||||||
NS_ASSERTION(mPresArenaAllocCount == 0,
|
|
||||||
"Some pres arena objects were not freed");
|
|
||||||
#endif
|
|
||||||
|
|
||||||
delete mStyleSet;
|
delete mStyleSet;
|
||||||
delete mFrameConstructor;
|
delete mFrameConstructor;
|
||||||
|
|
||||||
@ -1975,22 +1961,15 @@ PresShell::AllocateStackMemory(size_t aSize)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
PresShell::FreeFrame(nsQueryFrame::FrameIID aCode, void* aPtr)
|
PresShell::FreeFrame(size_t aSize, unsigned int /*unused*/, void* aPtr)
|
||||||
{
|
{
|
||||||
#ifdef DEBUG
|
mFrameArena.Free(aSize, aPtr);
|
||||||
mPresArenaAllocCount--;
|
|
||||||
#endif
|
|
||||||
if (PRESARENA_MUST_FREE_DURING_DESTROY || !mIsDestroying)
|
|
||||||
mFrameArena.FreeByCode(aCode, aPtr);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void*
|
void*
|
||||||
PresShell::AllocateFrame(nsQueryFrame::FrameIID aCode, size_t aSize)
|
PresShell::AllocateFrame(size_t aSize, unsigned int /*unused*/)
|
||||||
{
|
{
|
||||||
#ifdef DEBUG
|
void* result = mFrameArena.Allocate(aSize);
|
||||||
mPresArenaAllocCount++;
|
|
||||||
#endif
|
|
||||||
void* result = mFrameArena.AllocateByCode(aCode, aSize);
|
|
||||||
|
|
||||||
if (result) {
|
if (result) {
|
||||||
memset(result, 0, aSize);
|
memset(result, 0, aSize);
|
||||||
@ -2001,20 +1980,13 @@ PresShell::AllocateFrame(nsQueryFrame::FrameIID aCode, size_t aSize)
|
|||||||
void
|
void
|
||||||
PresShell::FreeMisc(size_t aSize, void* aPtr)
|
PresShell::FreeMisc(size_t aSize, void* aPtr)
|
||||||
{
|
{
|
||||||
#ifdef DEBUG
|
mFrameArena.Free(aSize, aPtr);
|
||||||
mPresArenaAllocCount--;
|
|
||||||
#endif
|
|
||||||
if (PRESARENA_MUST_FREE_DURING_DESTROY || !mIsDestroying)
|
|
||||||
mFrameArena.FreeBySize(aSize, aPtr);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void*
|
void*
|
||||||
PresShell::AllocateMisc(size_t aSize)
|
PresShell::AllocateMisc(size_t aSize)
|
||||||
{
|
{
|
||||||
#ifdef DEBUG
|
return mFrameArena.Allocate(aSize);
|
||||||
mPresArenaAllocCount++;
|
|
||||||
#endif
|
|
||||||
return mFrameArena.AllocateBySize(aSize);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -457,7 +457,7 @@ nsFrame::Destroy()
|
|||||||
view->Destroy();
|
view->Destroy();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Must retrieve the object ID before calling destructors, so the
|
// Must retrieve the object size before calling destructors, so the
|
||||||
// vtable is still valid.
|
// vtable is still valid.
|
||||||
//
|
//
|
||||||
// Note to future tweakers: having the method that returns the
|
// Note to future tweakers: having the method that returns the
|
||||||
@ -465,12 +465,12 @@ nsFrame::Destroy()
|
|||||||
// the compiler cannot devirtualize the call to the destructor even
|
// the compiler cannot devirtualize the call to the destructor even
|
||||||
// if it's from a method defined in the same class.
|
// if it's from a method defined in the same class.
|
||||||
|
|
||||||
nsQueryFrame::FrameIID id = GetFrameId();
|
size_t sz = GetAllocatedSize();
|
||||||
this->~nsFrame();
|
this->~nsFrame();
|
||||||
|
|
||||||
// Now that we're totally cleaned out, we need to add ourselves to
|
// Now that we're totally cleaned out, we need to add ourselves to
|
||||||
// the presshell's recycler.
|
// the presshell's recycler.
|
||||||
shell->FreeFrame(id, this);
|
shell->FreeFrame(sz, 0 /* dummy */, (void*)this);
|
||||||
}
|
}
|
||||||
|
|
||||||
NS_IMETHODIMP
|
NS_IMETHODIMP
|
||||||
|
@ -118,13 +118,13 @@
|
|||||||
|
|
||||||
#define NS_DECL_FRAMEARENA_HELPERS \
|
#define NS_DECL_FRAMEARENA_HELPERS \
|
||||||
NS_MUST_OVERRIDE void* operator new(size_t, nsIPresShell*); \
|
NS_MUST_OVERRIDE void* operator new(size_t, nsIPresShell*); \
|
||||||
virtual NS_MUST_OVERRIDE nsQueryFrame::FrameIID GetFrameId();
|
virtual NS_MUST_OVERRIDE size_t GetAllocatedSize();
|
||||||
|
|
||||||
#define NS_IMPL_FRAMEARENA_HELPERS(class) \
|
#define NS_IMPL_FRAMEARENA_HELPERS(class) \
|
||||||
void* class::operator new(size_t sz, nsIPresShell* aShell) \
|
void* class::operator new(size_t sz, nsIPresShell* aShell) \
|
||||||
{ return aShell->AllocateFrame(nsQueryFrame::class##_id, sz); } \
|
{ return aShell->AllocateFrame(sz, nsQueryFrame::class##_id); } \
|
||||||
nsQueryFrame::FrameIID class::GetFrameId() \
|
size_t class::GetAllocatedSize() \
|
||||||
{ return nsQueryFrame::class##_id; }
|
{ return sizeof(class); }
|
||||||
|
|
||||||
//----------------------------------------------------------------------
|
//----------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -251,14 +251,7 @@ public:
|
|||||||
nsXULLabelFrame_id,
|
nsXULLabelFrame_id,
|
||||||
nsXULScrollFrame_id,
|
nsXULScrollFrame_id,
|
||||||
SpacerFrame_id,
|
SpacerFrame_id,
|
||||||
ViewportFrame_id,
|
ViewportFrame_id
|
||||||
|
|
||||||
// The PresArena implementation uses this bit to distinguish
|
|
||||||
// objects allocated by size (that is, non-frames) from objects
|
|
||||||
// allocated by code (that is, frames). It should not collide
|
|
||||||
// with any frame ID. It is not 0x80000000 to avoid the question
|
|
||||||
// of whether enumeration constants are signed.
|
|
||||||
NON_FRAME_MARKER = 0x40000000
|
|
||||||
};
|
};
|
||||||
|
|
||||||
virtual void* QueryFrame(FrameIID id) = 0;
|
virtual void* QueryFrame(FrameIID id) = 0;
|
||||||
|
Loading…
Reference in New Issue
Block a user