mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-04 16:15:25 +00:00
Merge backout of bug 497495 part four due to leaks.
This commit is contained in:
commit
20475eada4
@ -55,7 +55,6 @@
|
||||
#define nsIPresShell_h___
|
||||
|
||||
#include "nsISupports.h"
|
||||
#include "nsQueryFrame.h"
|
||||
#include "nsCoord.h"
|
||||
#include "nsRect.h"
|
||||
#include "nsColor.h"
|
||||
@ -122,10 +121,10 @@ typedef struct CapturingContentInfo {
|
||||
mAllowed(PR_FALSE), mRetargetToElement(PR_FALSE), mContent(nsnull) { }
|
||||
} CapturingContentInfo;
|
||||
|
||||
// eed2ef56-133f-4696-9eee-5fc45d816be8
|
||||
// eba51d41-68db-4dab-a57b-dc1a2704de87
|
||||
#define NS_IPRESSHELL_IID \
|
||||
{ 0xeed2ef56, 0x133f, 0x4696, \
|
||||
{ 0x9e, 0xee, 0x5f, 0xc4, 0x5d, 0x81, 0x6b, 0xe8 } }
|
||||
{ 0xeba51d41, 0x68db, 0x4dab, \
|
||||
{ 0xa5, 0x7b, 0xdc, 0x1a, 0x27, 0x04, 0xde, 0x87 } }
|
||||
|
||||
// Constants for ScrollContentIntoView() function
|
||||
#define NS_PRESSHELL_SCROLL_TOP 0
|
||||
@ -190,11 +189,10 @@ public:
|
||||
|
||||
// All frames owned by the shell are allocated from an arena. They
|
||||
// are also recycled using free lists. Separate free lists are
|
||||
// maintained for each frame type (aCode), which must always
|
||||
// correspond to the same aSize value. AllocateFrame clears the
|
||||
// memory that it returns.
|
||||
virtual void* AllocateFrame(nsQueryFrame::FrameIID aCode, size_t aSize) = 0;
|
||||
virtual void FreeFrame(nsQueryFrame::FrameIID aCode, void* aChunk) = 0;
|
||||
// maintained for each combination of aSize and aCode. AllocateFrame
|
||||
// clears the memory that it returns.
|
||||
virtual void* AllocateFrame(size_t aSize, unsigned int aCode) = 0;
|
||||
virtual void FreeFrame(size_t aSize, unsigned int aCode, void* aChunk) = 0;
|
||||
|
||||
// Objects closely related to the frame tree, but that are not
|
||||
// actual frames (subclasses of nsFrame) are also allocated from the
|
||||
|
@ -47,10 +47,12 @@
|
||||
#include "nsPresArena.h"
|
||||
#include "nsCRT.h"
|
||||
#include "nsDebug.h"
|
||||
#include "nsTArray.h"
|
||||
#include "nsTHashtable.h"
|
||||
#include "prmem.h"
|
||||
|
||||
// Uncomment this to disable arenas, instead forwarding to
|
||||
// malloc for every allocation.
|
||||
//#define DEBUG_TRACEMALLOC_PRESARENA 1
|
||||
|
||||
#ifndef DEBUG_TRACEMALLOC_PRESARENA
|
||||
|
||||
// Even on 32-bit systems, we allocate objects from the frame arena
|
||||
@ -61,72 +63,24 @@
|
||||
#define PL_ARENA_CONST_ALIGN_MASK ((PRUword(1) << ALIGN_SHIFT) - 1)
|
||||
#include "plarena.h"
|
||||
|
||||
// Largest chunk size we recycle
|
||||
static const size_t MAX_RECYCLED_SIZE = 400;
|
||||
|
||||
// Recycler array entry N (0 <= N < NUM_RECYCLERS) holds chunks of
|
||||
// size (N+1) << ALIGN_SHIFT, thus we need this many array entries.
|
||||
static const size_t NUM_RECYCLERS = MAX_RECYCLED_SIZE >> ALIGN_SHIFT;
|
||||
|
||||
// Size to use for PLArena block allocations.
|
||||
static const size_t ARENA_PAGE_SIZE = 4096;
|
||||
|
||||
// Freed memory is filled with a poison value, which is believed to
|
||||
// form a pointer to an always-unmapped region of the address space on
|
||||
// all platforms of interest. The low 12 bits of this number are
|
||||
// chosen to fall in the middle of the typical 4096-byte page, and
|
||||
// make the address odd.
|
||||
//
|
||||
// With the possible exception of PPC64, current 64-bit CPUs permit
|
||||
// only a subset (2^48 to 2^56, depending) of the full virtual address
|
||||
// space to be used. x86-64 has the inaccessible region in the
|
||||
// *middle* of the address space, whereas all others are believed to
|
||||
// have it at the highest addresses. Use an address in this region if
|
||||
// we possibly can; if the hardware doesn't let anyone use it, we
|
||||
// needn't worry about the OS.
|
||||
//
|
||||
// TODO: Confirm that this value is a pointer to an always-unmapped
|
||||
// address space region on (at least) Win32, Win64, WinCE, ARM Linux,
|
||||
// MacOSX, and add #ifdefs below as necessary. (Bug 507294.)
|
||||
|
||||
#if defined(__x86_64__) || defined(_M_AMD64)
|
||||
const PRUword ARENA_POISON = 0x7FFFFFFFF0DEA7FF;
|
||||
#else
|
||||
// This evaluates to 0xF0DE_A7FF when PRUword is 32 bits long, but to
|
||||
// 0xFFFF_FFFF_F0DE_A7FF when it's 64 bits.
|
||||
const PRUword ARENA_POISON = (~PRUword(0x0FFFFF00) | PRUword(0x0DEA700));
|
||||
#endif
|
||||
|
||||
// All keys to this hash table fit in 32 bits (see below) so we do not
|
||||
// bother actually hashing them.
|
||||
class FreeList : public PLDHashEntryHdr
|
||||
{
|
||||
public:
|
||||
typedef PRUint32 KeyType;
|
||||
nsTArray<void *> mEntries;
|
||||
size_t mEntrySize;
|
||||
|
||||
protected:
|
||||
typedef const void* KeyTypePointer;
|
||||
KeyTypePointer mKey;
|
||||
|
||||
FreeList(KeyTypePointer aKey) : mEntrySize(0), mKey(aKey) {}
|
||||
// Default copy constructor and destructor are ok.
|
||||
|
||||
PRBool KeyEquals(KeyTypePointer const aKey) const
|
||||
{ return mKey == aKey; }
|
||||
|
||||
static KeyTypePointer KeyToPointer(KeyType aKey)
|
||||
{ return NS_INT32_TO_PTR(aKey); }
|
||||
|
||||
static PLDHashNumber HashKey(KeyTypePointer aKey)
|
||||
{ return NS_PTR_TO_INT32(aKey); }
|
||||
|
||||
enum { ALLOW_MEMMOVE = PR_FALSE };
|
||||
friend class nsTHashtable<FreeList>;
|
||||
};
|
||||
|
||||
struct nsPresArena::State {
|
||||
nsTHashtable<FreeList> mFreeLists;
|
||||
void* mRecyclers[NUM_RECYCLERS];
|
||||
PLArenaPool mPool;
|
||||
|
||||
State()
|
||||
{
|
||||
mFreeLists.Init();
|
||||
PL_INIT_ARENA_POOL(&mPool, "PresArena", ARENA_PAGE_SIZE);
|
||||
memset(mRecyclers, 0, sizeof(mRecyclers));
|
||||
}
|
||||
|
||||
~State()
|
||||
@ -134,81 +88,65 @@ struct nsPresArena::State {
|
||||
PL_FinishArenaPool(&mPool);
|
||||
}
|
||||
|
||||
void* Allocate(PRUint32 aCode, size_t aSize)
|
||||
void* Allocate(size_t aSize)
|
||||
{
|
||||
NS_ABORT_IF_FALSE(aSize > 0, "PresArena cannot allocate zero bytes");
|
||||
void* result = nsnull;
|
||||
|
||||
// We only hand out aligned sizes
|
||||
// Recycler lists are indexed by aligned size
|
||||
aSize = PL_ARENA_ALIGN(&mPool, aSize);
|
||||
|
||||
// If there is no free-list entry for this type already, we have
|
||||
// to create one now, to record its size.
|
||||
FreeList* list = mFreeLists.PutEntry(aCode);
|
||||
if (!list) {
|
||||
return nsnull;
|
||||
}
|
||||
|
||||
nsTArray<void*>::index_type len = list->mEntries.Length();
|
||||
if (list->mEntrySize == 0) {
|
||||
NS_ABORT_IF_FALSE(len == 0, "list with entries but no recorded size");
|
||||
list->mEntrySize = aSize;
|
||||
} else {
|
||||
NS_ABORT_IF_FALSE(list->mEntrySize != aSize,
|
||||
"different sizes for same object type code");
|
||||
}
|
||||
|
||||
void* result;
|
||||
if (len > 0) {
|
||||
// LIFO behavior for best cache utilization
|
||||
result = list->mEntries.ElementAt(len - 1);
|
||||
list->mEntries.RemoveElementAt(len - 1);
|
||||
#ifdef DEBUG
|
||||
{
|
||||
char* p = reinterpret_cast<char*>(result);
|
||||
char* limit = p + list->mEntrySize;
|
||||
for (; p < limit; p += sizeof(PRUword)) {
|
||||
NS_ABORT_IF_FALSE(*reinterpret_cast<PRUword*>(p) == ARENA_POISON,
|
||||
"PresArena: poison overwritten");
|
||||
}
|
||||
// Check recyclers first
|
||||
if (aSize <= MAX_RECYCLED_SIZE) {
|
||||
const size_t index = (aSize >> ALIGN_SHIFT) - 1;
|
||||
result = mRecyclers[index];
|
||||
if (result) {
|
||||
// Need to move to the next object
|
||||
void* next = *((void**)result);
|
||||
mRecyclers[index] = next;
|
||||
}
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
// Allocate a new chunk from the arena
|
||||
PL_ARENA_ALLOCATE(result, &mPool, aSize);
|
||||
if (!result) {
|
||||
// Allocate a new chunk from the arena
|
||||
PL_ARENA_ALLOCATE(result, &mPool, aSize);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void Free(PRUint32 aCode, void* aPtr)
|
||||
void Free(size_t aSize, void* aPtr)
|
||||
{
|
||||
// Try to recycle this entry.
|
||||
FreeList* list = mFreeLists.GetEntry(aCode);
|
||||
NS_ABORT_IF_FALSE(list, "no free list for pres arena object");
|
||||
NS_ABORT_IF_FALSE(list->mEntrySize > 0, "PresArena cannot free zero bytes");
|
||||
// Recycler lists are indexed by aligned size
|
||||
aSize = PL_ARENA_ALIGN(&mPool, aSize);
|
||||
|
||||
char* p = reinterpret_cast<char*>(aPtr);
|
||||
char* limit = p + list->mEntrySize;
|
||||
for (; p < limit; p += sizeof(PRUword)) {
|
||||
*reinterpret_cast<PRUword*>(p) = ARENA_POISON;
|
||||
// See if it's a size that we recycle
|
||||
if (aSize <= MAX_RECYCLED_SIZE) {
|
||||
const size_t index = (aSize >> ALIGN_SHIFT) - 1;
|
||||
void* currentTop = mRecyclers[index];
|
||||
mRecyclers[index] = aPtr;
|
||||
*((void**)aPtr) = currentTop;
|
||||
}
|
||||
|
||||
list->mEntries.AppendElement(aPtr);
|
||||
#if defined DEBUG_dbaron || defined DEBUG_zack
|
||||
else {
|
||||
fprintf(stderr,
|
||||
"WARNING: nsPresArena::FreeFrame leaking chunk of %lu bytes.\n",
|
||||
aSize);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
#else
|
||||
// Stub implementation that forwards everything to malloc and does not
|
||||
// poison.
|
||||
// Stub implementation that just forwards everything to malloc.
|
||||
|
||||
struct nsPresArena::State
|
||||
{
|
||||
void* Allocate(PRUnit32 /* unused */, size_t aSize)
|
||||
void* Allocate(size_t aSize)
|
||||
{
|
||||
return PR_Malloc(aSize);
|
||||
}
|
||||
|
||||
void Free(PRUint32 /* unused */, void* aPtr)
|
||||
void Free(size_t /*unused*/, void* aPtr)
|
||||
{
|
||||
PR_Free(aPtr);
|
||||
}
|
||||
@ -219,36 +157,41 @@ struct nsPresArena::State
|
||||
// Public interface
|
||||
nsPresArena::nsPresArena()
|
||||
: mState(new nsPresArena::State())
|
||||
#ifdef DEBUG
|
||||
, mAllocCount(0)
|
||||
#endif
|
||||
{}
|
||||
|
||||
nsPresArena::~nsPresArena()
|
||||
{
|
||||
#ifdef DEBUG
|
||||
NS_ASSERTION(mAllocCount == 0,
|
||||
"Some PresArena objects were not freed");
|
||||
#endif
|
||||
delete mState;
|
||||
}
|
||||
|
||||
void*
|
||||
nsPresArena::AllocateBySize(size_t aSize)
|
||||
nsPresArena::Allocate(size_t aSize)
|
||||
{
|
||||
return mState->Allocate(PRUint32(aSize) |
|
||||
PRUint32(nsQueryFrame::NON_FRAME_MARKER),
|
||||
aSize);
|
||||
NS_ABORT_IF_FALSE(aSize > 0, "PresArena cannot allocate zero bytes");
|
||||
void* result = mState->Allocate(aSize);
|
||||
#ifdef DEBUG
|
||||
if (result)
|
||||
mAllocCount++;
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
void
|
||||
nsPresArena::FreeBySize(size_t aSize, void* aPtr)
|
||||
nsPresArena::Free(size_t aSize, void* aPtr)
|
||||
{
|
||||
mState->Free(PRUint32(aSize) |
|
||||
PRUint32(nsQueryFrame::NON_FRAME_MARKER), aPtr);
|
||||
}
|
||||
|
||||
void*
|
||||
nsPresArena::AllocateByCode(nsQueryFrame::FrameIID aCode, size_t aSize)
|
||||
{
|
||||
return mState->Allocate(aCode, aSize);
|
||||
}
|
||||
|
||||
void
|
||||
nsPresArena::FreeByCode(nsQueryFrame::FrameIID aCode, void* aPtr)
|
||||
{
|
||||
mState->Free(aCode, aPtr);
|
||||
NS_ABORT_IF_FALSE(aSize > 0, "PresArena cannot free zero bytes");
|
||||
#ifdef DEBUG
|
||||
// Mark the memory with 0xdd in DEBUG builds so that there will be
|
||||
// problems if someone tries to access memory that they've freed.
|
||||
memset(aPtr, 0xdd, aSize);
|
||||
mAllocCount--;
|
||||
#endif
|
||||
mState->Free(aSize, aPtr);
|
||||
}
|
||||
|
@ -46,37 +46,22 @@
|
||||
#define nsPresArena_h___
|
||||
|
||||
#include "nscore.h"
|
||||
#include "nsQueryFrame.h"
|
||||
|
||||
// Uncomment this to disable arenas, instead forwarding to
|
||||
// malloc for every allocation.
|
||||
//#define DEBUG_TRACEMALLOC_PRESARENA 1
|
||||
|
||||
// The debugging version of nsPresArena does not free all the memory it
|
||||
// allocated when the arena itself is destroyed.
|
||||
#ifdef DEBUG_TRACEMALLOC_PRESARENA
|
||||
#define PRESARENA_MUST_FREE_DURING_DESTROY PR_TRUE
|
||||
#else
|
||||
#define PRESARENA_MUST_FREE_DURING_DESTROY PR_FALSE
|
||||
#endif
|
||||
|
||||
class nsPresArena {
|
||||
public:
|
||||
nsPresArena();
|
||||
~nsPresArena();
|
||||
|
||||
// Pool allocation with recycler lists indexed by object size.
|
||||
NS_HIDDEN_(void*) AllocateBySize(size_t aSize);
|
||||
NS_HIDDEN_(void) FreeBySize(size_t aSize, void* aPtr);
|
||||
|
||||
// Pool allocation with recycler lists indexed by object-type code.
|
||||
// Every type code must always be used with the same object size.
|
||||
NS_HIDDEN_(void*) AllocateByCode(nsQueryFrame::FrameIID aCode, size_t aSize);
|
||||
NS_HIDDEN_(void) FreeByCode(nsQueryFrame::FrameIID aCode, void* aPtr);
|
||||
// Memory management functions
|
||||
NS_HIDDEN_(void*) Allocate(size_t aSize);
|
||||
NS_HIDDEN_(void) Free(size_t aSize, void* aPtr);
|
||||
|
||||
private:
|
||||
struct State;
|
||||
State* mState;
|
||||
#ifdef DEBUG
|
||||
PRUint32 mAllocCount;
|
||||
#endif
|
||||
};
|
||||
|
||||
#endif
|
||||
|
@ -656,9 +656,8 @@ public:
|
||||
nsCompatibility aCompatMode);
|
||||
NS_IMETHOD Destroy();
|
||||
|
||||
virtual NS_HIDDEN_(void*) AllocateFrame(nsQueryFrame::FrameIID aCode,
|
||||
size_t aSize);
|
||||
virtual NS_HIDDEN_(void) FreeFrame(nsQueryFrame::FrameIID aCode,
|
||||
virtual NS_HIDDEN_(void*) AllocateFrame(size_t aSize, unsigned int aCode);
|
||||
virtual NS_HIDDEN_(void) FreeFrame(size_t aSize, unsigned int aCode,
|
||||
void* aChunk);
|
||||
|
||||
virtual NS_HIDDEN_(void*) AllocateMisc(size_t aSize);
|
||||
@ -1273,11 +1272,6 @@ private:
|
||||
* only visible if the contents of the view as a whole are translucent.
|
||||
*/
|
||||
nscolor ComputeBackstopColor(nsIView* aView);
|
||||
|
||||
#ifdef DEBUG
|
||||
// Ensure that every allocation from the PresArena is eventually freed.
|
||||
PRUint32 mPresArenaAllocCount;
|
||||
#endif
|
||||
};
|
||||
|
||||
class nsAutoCauseReflowNotifier
|
||||
@ -1611,9 +1605,6 @@ PresShell::PresShell()
|
||||
#endif
|
||||
mSelectionFlags = nsISelectionDisplay::DISPLAY_TEXT | nsISelectionDisplay::DISPLAY_IMAGES;
|
||||
mIsThemeSupportDisabled = PR_FALSE;
|
||||
#ifdef DEBUG
|
||||
mPresArenaAllocCount = 0;
|
||||
#endif
|
||||
|
||||
new (this) nsFrameManager();
|
||||
}
|
||||
@ -1635,12 +1626,7 @@ PresShell::~PresShell()
|
||||
NS_ASSERTION(mFirstCallbackEventRequest == nsnull &&
|
||||
mLastCallbackEventRequest == nsnull,
|
||||
"post-reflow queues not empty. This means we're leaking");
|
||||
|
||||
#ifdef DEBUG
|
||||
NS_ASSERTION(mPresArenaAllocCount == 0,
|
||||
"Some pres arena objects were not freed");
|
||||
#endif
|
||||
|
||||
|
||||
delete mStyleSet;
|
||||
delete mFrameConstructor;
|
||||
|
||||
@ -1975,22 +1961,15 @@ PresShell::AllocateStackMemory(size_t aSize)
|
||||
}
|
||||
|
||||
void
|
||||
PresShell::FreeFrame(nsQueryFrame::FrameIID aCode, void* aPtr)
|
||||
PresShell::FreeFrame(size_t aSize, unsigned int /*unused*/, void* aPtr)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
mPresArenaAllocCount--;
|
||||
#endif
|
||||
if (PRESARENA_MUST_FREE_DURING_DESTROY || !mIsDestroying)
|
||||
mFrameArena.FreeByCode(aCode, aPtr);
|
||||
mFrameArena.Free(aSize, aPtr);
|
||||
}
|
||||
|
||||
void*
|
||||
PresShell::AllocateFrame(nsQueryFrame::FrameIID aCode, size_t aSize)
|
||||
PresShell::AllocateFrame(size_t aSize, unsigned int /*unused*/)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
mPresArenaAllocCount++;
|
||||
#endif
|
||||
void* result = mFrameArena.AllocateByCode(aCode, aSize);
|
||||
void* result = mFrameArena.Allocate(aSize);
|
||||
|
||||
if (result) {
|
||||
memset(result, 0, aSize);
|
||||
@ -2001,20 +1980,13 @@ PresShell::AllocateFrame(nsQueryFrame::FrameIID aCode, size_t aSize)
|
||||
void
|
||||
PresShell::FreeMisc(size_t aSize, void* aPtr)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
mPresArenaAllocCount--;
|
||||
#endif
|
||||
if (PRESARENA_MUST_FREE_DURING_DESTROY || !mIsDestroying)
|
||||
mFrameArena.FreeBySize(aSize, aPtr);
|
||||
mFrameArena.Free(aSize, aPtr);
|
||||
}
|
||||
|
||||
void*
|
||||
PresShell::AllocateMisc(size_t aSize)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
mPresArenaAllocCount++;
|
||||
#endif
|
||||
return mFrameArena.AllocateBySize(aSize);
|
||||
return mFrameArena.Allocate(aSize);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -457,7 +457,7 @@ nsFrame::Destroy()
|
||||
view->Destroy();
|
||||
}
|
||||
|
||||
// Must retrieve the object ID before calling destructors, so the
|
||||
// Must retrieve the object size before calling destructors, so the
|
||||
// vtable is still valid.
|
||||
//
|
||||
// Note to future tweakers: having the method that returns the
|
||||
@ -465,12 +465,12 @@ nsFrame::Destroy()
|
||||
// the compiler cannot devirtualize the call to the destructor even
|
||||
// if it's from a method defined in the same class.
|
||||
|
||||
nsQueryFrame::FrameIID id = GetFrameId();
|
||||
size_t sz = GetAllocatedSize();
|
||||
this->~nsFrame();
|
||||
|
||||
// Now that we're totally cleaned out, we need to add ourselves to
|
||||
// the presshell's recycler.
|
||||
shell->FreeFrame(id, this);
|
||||
shell->FreeFrame(sz, 0 /* dummy */, (void*)this);
|
||||
}
|
||||
|
||||
NS_IMETHODIMP
|
||||
|
@ -118,13 +118,13 @@
|
||||
|
||||
#define NS_DECL_FRAMEARENA_HELPERS \
|
||||
NS_MUST_OVERRIDE void* operator new(size_t, nsIPresShell*); \
|
||||
virtual NS_MUST_OVERRIDE nsQueryFrame::FrameIID GetFrameId();
|
||||
virtual NS_MUST_OVERRIDE size_t GetAllocatedSize();
|
||||
|
||||
#define NS_IMPL_FRAMEARENA_HELPERS(class) \
|
||||
void* class::operator new(size_t sz, nsIPresShell* aShell) \
|
||||
{ return aShell->AllocateFrame(nsQueryFrame::class##_id, sz); } \
|
||||
nsQueryFrame::FrameIID class::GetFrameId() \
|
||||
{ return nsQueryFrame::class##_id; }
|
||||
{ return aShell->AllocateFrame(sz, nsQueryFrame::class##_id); } \
|
||||
size_t class::GetAllocatedSize() \
|
||||
{ return sizeof(class); }
|
||||
|
||||
//----------------------------------------------------------------------
|
||||
|
||||
|
@ -251,14 +251,7 @@ public:
|
||||
nsXULLabelFrame_id,
|
||||
nsXULScrollFrame_id,
|
||||
SpacerFrame_id,
|
||||
ViewportFrame_id,
|
||||
|
||||
// The PresArena implementation uses this bit to distinguish
|
||||
// objects allocated by size (that is, non-frames) from objects
|
||||
// allocated by code (that is, frames). It should not collide
|
||||
// with any frame ID. It is not 0x80000000 to avoid the question
|
||||
// of whether enumeration constants are signed.
|
||||
NON_FRAME_MARKER = 0x40000000
|
||||
ViewportFrame_id
|
||||
};
|
||||
|
||||
virtual void* QueryFrame(FrameIID id) = 0;
|
||||
|
Loading…
Reference in New Issue
Block a user