bug 681884 - faster slow path of GC allocations. r=wmccloskey

This commit is contained in:
Igor Bukanov 2011-08-18 09:16:08 +02:00
parent 781496deb6
commit ca0e173755
25 changed files with 843 additions and 831 deletions

View File

@ -1068,7 +1068,7 @@ JSObject::makeDenseArraySlow(JSContext *cx)
js::Shape *oldMap = lastProp;
/* Create a native scope. */
js::gc::FinalizeKind kind = js::gc::FinalizeKind(arenaHeader()->getThingKind());
gc::AllocKind kind = getAllocKind();
if (!InitScopeForObject(cx, this, &js_SlowArrayClass, getProto()->getNewType(cx), kind))
return false;
@ -3256,7 +3256,7 @@ NewArray(JSContext *cx, jsuint length, JSObject *proto)
{
JS_ASSERT_IF(proto, proto->isArray());
gc::FinalizeKind kind = GuessObjectGCKind(length, true);
gc::AllocKind kind = GuessObjectGCKind(length, true);
JSObject *obj = detail::NewObject<WithProto::Class, false>(cx, &js_ArrayClass, proto, NULL, kind);
if (!obj)
return NULL;

View File

@ -50,6 +50,37 @@ namespace gc {
struct ArenaHeader;
struct Chunk;
/* The GC allocation kinds. */
enum AllocKind {
FINALIZE_OBJECT0,
FINALIZE_OBJECT0_BACKGROUND,
FINALIZE_OBJECT2,
FINALIZE_OBJECT2_BACKGROUND,
FINALIZE_OBJECT4,
FINALIZE_OBJECT4_BACKGROUND,
FINALIZE_OBJECT8,
FINALIZE_OBJECT8_BACKGROUND,
FINALIZE_OBJECT12,
FINALIZE_OBJECT12_BACKGROUND,
FINALIZE_OBJECT16,
FINALIZE_OBJECT16_BACKGROUND,
FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND,
FINALIZE_FUNCTION,
FINALIZE_FUNCTION_AND_OBJECT_LAST = FINALIZE_FUNCTION,
FINALIZE_SCRIPT,
FINALIZE_SHAPE,
FINALIZE_TYPE_OBJECT,
#if JS_HAS_XML_SUPPORT
FINALIZE_XML,
#endif
FINALIZE_SHORT_STRING,
FINALIZE_STRING,
FINALIZE_EXTERNAL_STRING,
FINALIZE_LAST = FINALIZE_EXTERNAL_STRING
};
const size_t FINALIZE_LIMIT = FINALIZE_LAST + 1;
/*
* Live objects are marked black. How many other additional colors are available
* depends on the size of the GCThing.
@ -67,6 +98,7 @@ struct Cell {
inline uintptr_t address() const;
inline ArenaHeader *arenaHeader() const;
inline Chunk *chunk() const;
inline AllocKind getAllocKind() const;
JS_ALWAYS_INLINE bool isMarked(uint32 color = BLACK) const;
JS_ALWAYS_INLINE bool markIfUnmarked(uint32 color = BLACK) const;

View File

@ -427,7 +427,7 @@ struct JSRuntime {
int64 gcNextFullGCTime;
int64 gcJitReleaseTime;
JSGCMode gcMode;
volatile bool gcIsNeeded;
volatile jsuword gcIsNeeded;
js::WeakMapBase *gcWeakMapList;
/* Pre-allocated space for the GC mark stacks. Pointer type ensures alignment. */
@ -1794,17 +1794,33 @@ class AutoXMLRooter : private AutoGCRooter {
class AutoLockGC {
public:
explicit AutoLockGC(JSRuntime *rt
explicit AutoLockGC(JSRuntime *rt = NULL
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: rt(rt)
: runtime(rt)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
if (rt)
JS_LOCK_GC(rt);
}
bool locked() const {
return !!runtime;
}
void lock(JSRuntime *rt) {
JS_ASSERT(rt);
JS_ASSERT(!runtime);
runtime = rt;
JS_LOCK_GC(rt);
}
~AutoLockGC() { JS_UNLOCK_GC(rt); }
~AutoLockGC() {
if (runtime)
JS_UNLOCK_GC(runtime);
}
private:
JSRuntime *rt;
JSRuntime *runtime;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};

View File

@ -129,9 +129,6 @@ JSCompartment::~JSCompartment()
bool
JSCompartment::init(JSContext *cx)
{
for (unsigned i = 0; i < FINALIZE_LIMIT; i++)
arenas[i].init();
activeAnalysis = activeInference = false;
types.init(cx);
@ -140,7 +137,6 @@ JSCompartment::init(JSContext *cx)
JS_InitArenaPool(&pool, "analysis", 4096 - ARENA_HEADER_SIZE_HACK, 8);
freeLists.init();
if (!crossCompartmentWrappers.init())
return false;
@ -188,16 +184,6 @@ JSCompartment::getMjitCodeStats(size_t& method, size_t& regexp, size_t& unused)
}
#endif
bool
JSCompartment::arenaListsAreEmpty()
{
for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
if (!arenas[i].isEmpty())
return false;
}
return true;
}
static bool
IsCrossCompartmentWrapper(JSObject *wrapper)
{
@ -505,10 +491,10 @@ JSCompartment::markTypes(JSTracer *trc)
MarkScript(trc, script, "mark_types_script");
}
for (unsigned thingKind = FINALIZE_OBJECT0;
for (size_t thingKind = FINALIZE_OBJECT0;
thingKind <= FINALIZE_FUNCTION_AND_OBJECT_LAST;
thingKind++) {
for (CellIterUnderGC i(this, FinalizeKind(thingKind)); !i.done(); i.next()) {
for (CellIterUnderGC i(this, AllocKind(thingKind)); !i.done(); i.next()) {
JSObject *object = i.get<JSObject>();
if (!object->isNewborn() && object->hasSingletonType())
MarkObject(trc, *object, "mark_types_singleton");
@ -652,7 +638,7 @@ JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
void
JSCompartment::purge(JSContext *cx)
{
freeLists.purge();
arenas.purge();
dtoaCache.purge();
/*

View File

@ -394,8 +394,7 @@ struct JS_FRIEND_API(JSCompartment) {
JSRuntime *rt;
JSPrincipals *principals;
js::gc::ArenaList arenas[js::gc::FINALIZE_LIMIT];
js::gc::FreeLists freeLists;
js::gc::ArenaLists arenas;
uint32 gcBytes;
uint32 gcTriggerBytes;
@ -535,12 +534,6 @@ struct JS_FRIEND_API(JSCompartment) {
void markTypes(JSTracer *trc);
void sweep(JSContext *cx, uint32 releaseInterval);
void purge(JSContext *cx);
void finishArenaLists();
void finalizeObjectArenaLists(JSContext *cx);
void finalizeStringArenaLists(JSContext *cx);
void finalizeShapeArenaLists(JSContext *cx);
void finalizeScriptArenaLists(JSContext *cx);
bool arenaListsAreEmpty();
void setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind);
void reduceGCTriggerBytes(uint32 amount);

View File

@ -4891,7 +4891,7 @@ JSParseNode::getConstantValue(JSContext *cx, bool strictChecks, Value *vp)
case TOK_RC: {
JS_ASSERT((pn_op == JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST));
gc::FinalizeKind kind = GuessObjectGCKind(pn_count, false);
gc::AllocKind kind = GuessObjectGCKind(pn_count, false);
JSObject *obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
if (!obj)
return false;
@ -7084,7 +7084,7 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
*/
JSObject *obj = NULL;
if (!cg->hasSharps() && cg->compileAndGo()) {
gc::FinalizeKind kind = GuessObjectGCKind(pn->pn_count, false);
gc::AllocKind kind = GuessObjectGCKind(pn->pn_count, false);
obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
if (!obj)
return JS_FALSE;

View File

@ -773,7 +773,7 @@ NewCallObject(JSContext *cx, JSScript *script, JSObject &scopeChain, JSObject *c
Bindings &bindings = script->bindings;
size_t argsVars = bindings.countArgsAndVars();
size_t slots = JSObject::CALL_RESERVED_SLOTS + argsVars;
gc::FinalizeKind kind = gc::GetGCObjectKind(slots);
gc::AllocKind kind = gc::GetGCObjectKind(slots);
JSObject *callobj = js_NewGCObject(cx, kind);
if (!callobj)

File diff suppressed because it is too large Load Diff

View File

@ -80,43 +80,12 @@ namespace gc {
struct Arena;
struct MarkingDelay;
/* The kind of GC thing with a finalizer. */
enum FinalizeKind {
FINALIZE_OBJECT0,
FINALIZE_OBJECT0_BACKGROUND,
FINALIZE_OBJECT2,
FINALIZE_OBJECT2_BACKGROUND,
FINALIZE_OBJECT4,
FINALIZE_OBJECT4_BACKGROUND,
FINALIZE_OBJECT8,
FINALIZE_OBJECT8_BACKGROUND,
FINALIZE_OBJECT12,
FINALIZE_OBJECT12_BACKGROUND,
FINALIZE_OBJECT16,
FINALIZE_OBJECT16_BACKGROUND,
FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND,
FINALIZE_FUNCTION,
FINALIZE_FUNCTION_AND_OBJECT_LAST = FINALIZE_FUNCTION,
FINALIZE_SCRIPT,
FINALIZE_SHAPE,
FINALIZE_TYPE_OBJECT,
#if JS_HAS_XML_SUPPORT
FINALIZE_XML,
#endif
FINALIZE_SHORT_STRING,
FINALIZE_STRING,
FINALIZE_EXTERNAL_STRING,
FINALIZE_LIMIT
};
/*
* This must be an upper bound, but we do not need the least upper bound, so
* we just exclude non-background objects.
*/
const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - (FINALIZE_OBJECT_LAST + 1) / 2;
extern JS_FRIEND_DATA(const uint8) GCThingSizeMap[];
const size_t ArenaShift = 12;
const size_t ArenaSize = size_t(1) << ArenaShift;
const size_t ArenaMask = ArenaSize - 1;
@ -174,7 +143,7 @@ struct FreeSpan {
* To minimize the size of the arena header the first span is encoded
* there as offsets from the arena start.
*/
static size_t encodeOffsets(size_t firstOffset, size_t lastOffset = ArenaSize - 1) {
static size_t encodeOffsets(size_t firstOffset, size_t lastOffset) {
/* Check that we can pack the offsets into uint16. */
JS_STATIC_ASSERT(ArenaShift < 16);
JS_ASSERT(firstOffset <= ArenaSize);
@ -183,7 +152,11 @@ struct FreeSpan {
return firstOffset | (lastOffset << 16);
}
static const size_t EmptyOffsets = ArenaSize | ((ArenaSize - 1) << 16);
/*
* Encoded offsets for a full arena when its first span is the last one
* and empty.
*/
static const size_t FullArenaOffsets = ArenaSize | ((ArenaSize - 1) << 16);
static FreeSpan decodeOffsets(uintptr_t arenaAddr, size_t offsets) {
JS_ASSERT(!(arenaAddr & ArenaMask));
@ -287,6 +260,37 @@ struct FreeSpan {
return reinterpret_cast<void *>(thing);
}
/* A version of allocate when we know that the span is not empty. */
JS_ALWAYS_INLINE void *infallibleAllocate(size_t thingSize) {
JS_ASSERT(thingSize % Cell::CellSize == 0);
checkSpan();
uintptr_t thing = first;
if (thing < last) {
first = thing + thingSize;
} else {
JS_ASSERT(thing == last);
*this = *reinterpret_cast<FreeSpan *>(thing);
}
checkSpan();
return reinterpret_cast<void *>(thing);
}
/*
* Allocate from a newly allocated arena. We do not move the free list
* from the arena. Rather we set the arena up as fully used during the
* initialization so to allocate we simply return the first thing in the
* arena and set the free list to point to the second.
*/
JS_ALWAYS_INLINE void *allocateFromNewArena(uintptr_t arenaAddr, size_t firstThingOffset,
size_t thingSize) {
JS_ASSERT(!(arenaAddr & ArenaMask));
uintptr_t thing = arenaAddr | firstThingOffset;
first = thing + thingSize;
last = arenaAddr | ArenaMask;
checkSpan();
return reinterpret_cast<void *>(thing);
}
void checkSpan() const {
#ifdef DEBUG
/* We do not allow spans at the end of the address space. */
@ -365,13 +369,13 @@ struct ArenaHeader {
size_t firstFreeSpanOffsets;
/*
* One of FinalizeKind constants or FINALIZE_LIMIT when the arena does not
* One of AllocKind constants or FINALIZE_LIMIT when the arena does not
* contain any GC things and is on the list of empty arenas in the GC
* chunk. The later allows to quickly check if the arena is allocated
* chunk. The latter allows to quickly check if the arena is allocated
* during the conservative GC scanning without searching the arena in the
* list.
*/
unsigned thingKind;
unsigned allocKind;
friend struct FreeLists;
@ -380,14 +384,15 @@ struct ArenaHeader {
inline Chunk *chunk() const;
void setAsNotAllocated() {
thingKind = FINALIZE_LIMIT;
allocKind = FINALIZE_LIMIT;
}
bool allocated() const {
return thingKind < FINALIZE_LIMIT;
JS_ASSERT(allocKind <= FINALIZE_LIMIT);
return allocKind < FINALIZE_LIMIT;
}
inline void init(JSCompartment *comp, unsigned thingKind, size_t thingSize);
inline void init(JSCompartment *comp, AllocKind kind);
uintptr_t arenaAddress() const {
return address();
@ -397,17 +402,21 @@ struct ArenaHeader {
return reinterpret_cast<Arena *>(arenaAddress());
}
unsigned getThingKind() const {
AllocKind getAllocKind() const {
JS_ASSERT(allocated());
return thingKind;
return AllocKind(allocKind);
}
inline size_t getThingSize() const;
bool hasFreeThings() const {
return firstFreeSpanOffsets != FreeSpan::EmptyOffsets;
return firstFreeSpanOffsets != FreeSpan::FullArenaOffsets;
}
inline bool isEmpty() const;
void setAsFullyUsed() {
firstFreeSpanOffsets = FreeSpan::EmptyOffsets;
firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
}
FreeSpan getFirstFreeSpan() const {
@ -424,10 +433,6 @@ struct ArenaHeader {
inline MarkingDelay *getMarkingDelay() const;
size_t getThingSize() const {
return GCThingSizeMap[getThingKind()];
}
#ifdef DEBUG
void checkSynchronizedWithFreeList() const;
#endif
@ -446,13 +451,24 @@ struct Arena {
* +-------------+-----+----+----+-----+----+
*
* <----------------------------------------> = ArenaSize bytes
* <-------------------> = thingsStartOffset
* <-------------------> = first thing offset
*/
ArenaHeader aheader;
uint8_t data[ArenaSize - sizeof(ArenaHeader)];
static void staticAsserts() {
JS_STATIC_ASSERT(sizeof(Arena) == ArenaSize);
private:
static JS_FRIEND_DATA(const uint32) ThingSizes[];
static JS_FRIEND_DATA(const uint32) FirstThingOffsets[];
public:
static void staticAsserts();
static size_t thingSize(AllocKind kind) {
return ThingSizes[kind];
}
static size_t firstThingOffset(AllocKind kind) {
return FirstThingOffsets[kind];
}
static size_t thingsPerArena(size_t thingSize) {
@ -461,9 +477,6 @@ struct Arena {
/* We should be able to fit FreeSpan in any GC thing. */
JS_ASSERT(thingSize >= sizeof(FreeSpan));
/* GCThingSizeMap assumes that any thing fits uint8. */
JS_ASSERT(thingSize < 256);
return (ArenaSize - sizeof(ArenaHeader)) / thingSize;
}
@ -471,10 +484,6 @@ struct Arena {
return thingsPerArena(thingSize) * thingSize;
}
static size_t thingsStartOffset(size_t thingSize) {
return ArenaSize - thingsSpan(thingSize);
}
static bool isAligned(uintptr_t thing, size_t thingSize) {
/* Things ends at the arena end. */
uintptr_t tailOffset = (ArenaSize - thing) & ArenaMask;
@ -485,8 +494,8 @@ struct Arena {
return aheader.address();
}
uintptr_t thingsStart(size_t thingSize) {
return address() | thingsStartOffset(thingSize);
uintptr_t thingsStart(AllocKind thingKind) {
return address() | firstThingOffset(thingKind);
}
uintptr_t thingsEnd() {
@ -494,7 +503,7 @@ struct Arena {
}
template <typename T>
bool finalize(JSContext *cx);
bool finalize(JSContext *cx, AllocKind thingKind, size_t thingSize);
};
/*
@ -641,8 +650,7 @@ struct Chunk {
inline void addToAvailableList(JSCompartment *compartment);
inline void removeFromAvailableList();
template <size_t thingSize>
ArenaHeader *allocateArena(JSContext *cx, unsigned thingKind);
ArenaHeader *allocateArena(JSContext *cx, AllocKind kind);
void releaseArena(ArenaHeader *aheader);
};
@ -676,6 +684,12 @@ Cell::chunk() const
return reinterpret_cast<Chunk *>(addr);
}
AllocKind
Cell::getAllocKind() const
{
return arenaHeader()->getAllocKind();
}
#ifdef DEBUG
inline bool
Cell::isAligned() const
@ -685,13 +699,15 @@ Cell::isAligned() const
#endif
inline void
ArenaHeader::init(JSCompartment *comp, unsigned kind, size_t thingSize)
ArenaHeader::init(JSCompartment *comp, AllocKind kind)
{
JS_ASSERT(!allocated());
JS_ASSERT(!getMarkingDelay()->link);
compartment = comp;
thingKind = kind;
firstFreeSpanOffsets = FreeSpan::encodeOffsets(Arena::thingsStartOffset(thingSize));
allocKind = kind;
/* See comments in FreeSpan::allocateFromNewArena. */
firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
}
inline uintptr_t
@ -709,6 +725,22 @@ ArenaHeader::chunk() const
return Chunk::fromAddress(address());
}
inline bool
ArenaHeader::isEmpty() const
{
/* Arena is empty if its first span covers the whole arena. */
JS_ASSERT(allocated());
size_t firstThingOffset = Arena::firstThingOffset(getAllocKind());
return firstFreeSpanOffsets == FreeSpan::encodeOffsets(firstThingOffset, ArenaMask);
}
inline size_t
ArenaHeader::getThingSize() const
{
JS_ASSERT(allocated());
return Arena::thingSize(getAllocKind());
}
JS_ALWAYS_INLINE void
ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32 color,
uintptr_t **wordp, uintptr_t *maskp)
@ -779,7 +811,7 @@ const float GC_HEAP_GROWTH_FACTOR = 3.0f;
static const int64 GC_IDLE_FULL_SPAN = 20 * 1000 * 1000;
static inline JSGCTraceKind
GetFinalizableTraceKind(size_t thingKind)
MapAllocToTraceKind(AllocKind thingKind)
{
static const JSGCTraceKind map[FINALIZE_LIMIT] = {
JSTRACE_OBJECT, /* FINALIZE_OBJECT0 */
@ -805,8 +837,6 @@ GetFinalizableTraceKind(size_t thingKind)
JSTRACE_STRING, /* FINALIZE_STRING */
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING */
};
JS_ASSERT(thingKind < FINALIZE_LIMIT);
return map[thingKind];
}
@ -819,11 +849,46 @@ GetGCThingRuntime(void *thing)
return reinterpret_cast<Cell *>(thing)->chunk()->info.runtime;
}
/* The arenas in a list have uniform kind. */
class ArenaList {
struct ArenaLists {
/*
* ArenaList::head points to the start of the list. Normally cursor points
* to the first arena in the list with some free things and all arenas
* before cursor are fully allocated. However, as the arena currently being
* allocated from is considered full while its list of free spans is moved
* into the freeList, during the GC or cell enumeration, when an
* unallocated freeList is moved back to the arena, we can see an arena
* with some free cells before the cursor. The cursor is an indirect
* pointer to allow for efficient list insertion at the cursor point and
* other list manipulations.
*/
struct ArenaList {
ArenaHeader *head;
ArenaHeader **cursor;
ArenaList() {
clear();
}
void clear() {
head = NULL;
cursor = &head;
}
};
private:
ArenaHeader *head; /* list start */
ArenaHeader **cursor; /* arena with free things */
/*
* For each arena kind its free list is represented as the first span with
* free things. Initially all the spans are initialized as empty. After we
* find a new arena with available things we move its first free span into
* the list and set the arena as fully allocated. way we do not need to
* update the arena header after the initial allocation. When starting the
* GC we only move the head of the of the list of spans back to the arena
* only for the arena that was not fully allocated.
*/
FreeSpan freeLists[FINALIZE_LIMIT];
ArenaList arenaLists[FINALIZE_LIMIT];
#ifdef JS_THREADSAFE
/*
@ -848,116 +913,95 @@ class ArenaList {
BFS_JUST_FINISHED
};
volatile BackgroundFinalizeState backgroundFinalizeState;
volatile uintptr_t backgroundFinalizeState[FINALIZE_LIMIT];
#endif
public:
void init() {
head = NULL;
cursor = &head;
ArenaLists() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
freeLists[i].initAsEmpty();
#ifdef JS_THREADSAFE
backgroundFinalizeState = BFS_DONE;
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
backgroundFinalizeState[i] = BFS_DONE;
#endif
}
ArenaHeader *getHead() { return head; }
inline ArenaHeader *searchForFreeArena();
template <size_t thingSize>
inline ArenaHeader *getArenaWithFreeList(JSContext *cx, unsigned thingKind);
template<typename T>
void finalizeNow(JSContext *cx);
~ArenaLists() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
#ifdef JS_THREADSAFE
template<typename T>
inline void finalizeLater(JSContext *cx);
static void backgroundFinalize(JSContext *cx, ArenaHeader *listHead);
bool willBeFinalizedLater() const {
return backgroundFinalizeState == BFS_RUN;
}
bool doneBackgroundFinalize() const {
return backgroundFinalizeState == BFS_DONE;
}
/*
* We can only call this during the shutdown after the last GC when
* the background finalization is disabled.
*/
JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE);
#endif
ArenaHeader **headp = &arenaLists[i].head;
while (ArenaHeader *aheader = *headp) {
*headp = aheader->next;
aheader->chunk()->releaseArena(aheader);
}
}
}
const FreeSpan *getFreeList(AllocKind thingKind) const {
return &freeLists[thingKind];
}
ArenaHeader *getFirstArena(AllocKind thingKind) const {
return arenaLists[thingKind].head;
}
bool arenaListsAreEmpty() const {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
#ifdef JS_THREADSAFE
/*
* The arena cannot be empty if the background finalization is not yet
* done.
*/
if (backgroundFinalizeState[i] != BFS_DONE)
return false;
#endif
if (arenaLists[i].head)
return false;
}
return true;
}
#ifdef DEBUG
bool markedThingsInArenaList() {
bool checkArenaListAllUnmarked() const {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
# ifdef JS_THREADSAFE
/* The background finalization must have stopped at this point. */
JS_ASSERT(backgroundFinalizeState == BFS_DONE ||
backgroundFinalizeState == BFS_JUST_FINISHED);
/* The background finalization must have stopped at this point. */
JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE ||
backgroundFinalizeState[i] == BFS_JUST_FINISHED);
# endif
for (ArenaHeader *aheader = head; aheader; aheader = aheader->next) {
if (!aheader->chunk()->bitmap.noBitsSet(aheader))
return true;
for (ArenaHeader *aheader = arenaLists[i].head; aheader; aheader = aheader->next) {
if (!aheader->chunk()->bitmap.noBitsSet(aheader))
return false;
}
}
return false;
return true;
}
#endif /* DEBUG */
void releaseAll(unsigned thingKind) {
# ifdef JS_THREADSAFE
/*
* We can only call this during the shutdown after the last GC when
* the background finalization is disabled.
*/
JS_ASSERT(backgroundFinalizeState == BFS_DONE);
# endif
while (ArenaHeader *aheader = head) {
head = aheader->next;
aheader->chunk()->releaseArena(aheader);
}
cursor = &head;
}
bool isEmpty() const {
#ifdef JS_THREADSAFE
/*
* The arena cannot be empty if the background finalization is not yet
* done.
*/
if (backgroundFinalizeState != BFS_DONE)
return false;
#endif
return !head;
}
};
struct FreeLists {
/*
* For each arena kind its free list is represented as the first span with
* free things. Initially all the spans are zeroed to be treated as empty
* spans by the allocation code. After we find a new arena with available
* things we copy its first free span into the list and set the arena as
* if it has no free things. This way we do not need to update the arena
* header after the initial allocation. When starting the GC We only move
* the head of the of the list of spans back to the arena only for the
* arena that was not fully allocated.
*/
FreeSpan lists[FINALIZE_LIMIT];
void init() {
for (size_t i = 0; i != JS_ARRAY_LENGTH(lists); ++i)
lists[i].initAsEmpty();
#ifdef JS_THREADSAFE
bool doneBackgroundFinalize(AllocKind kind) const {
return backgroundFinalizeState[kind] == BFS_DONE;
}
#endif
/*
* Return the free list back to the arena so the GC finalization will not
* run the finalizers over unitialized bytes from free things.
*/
void purge() {
for (size_t i = 0; i != size_t(FINALIZE_LIMIT); ++i) {
FreeSpan *list = &lists[i];
if (!list->isEmpty()) {
ArenaHeader *aheader = list->arenaHeader();
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
FreeSpan *headSpan = &freeLists[i];
if (!headSpan->isEmpty()) {
ArenaHeader *aheader = headSpan->arenaHeader();
JS_ASSERT(!aheader->hasFreeThings());
aheader->setFirstFreeSpan(list);
list->initAsEmpty();
aheader->setFirstFreeSpan(headSpan);
headSpan->initAsEmpty();
}
}
}
@ -967,17 +1011,17 @@ struct FreeLists {
* the proper value in ArenaHeader::freeList when accessing the latter
* outside the GC.
*/
void copyToArenas() {
for (size_t i = 0; i != size_t(FINALIZE_LIMIT); ++i)
copyToArena(FinalizeKind(i));
void copyFreeListsToArenas() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
copyFreeListToArena(AllocKind(i));
}
void copyToArena(FinalizeKind thingKind) {
FreeSpan *list = &lists[thingKind];
if (!list->isEmpty()) {
ArenaHeader *aheader = list->arenaHeader();
void copyFreeListToArena(AllocKind thingKind) {
FreeSpan *headSpan = &freeLists[thingKind];
if (!headSpan->isEmpty()) {
ArenaHeader *aheader = headSpan->arenaHeader();
JS_ASSERT(!aheader->hasFreeThings());
aheader->setFirstFreeSpan(list);
aheader->setFirstFreeSpan(headSpan);
}
}
@ -985,17 +1029,17 @@ struct FreeLists {
* Clear the free lists in arenas that were temporarily set there using
* copyToArenas.
*/
void clearInArenas() {
for (size_t i = 0; i != size_t(FINALIZE_LIMIT); ++i)
clearInArena(FinalizeKind(i));
void clearFreeListsInArenas() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
clearFreeListInArena(AllocKind(i));
}
void clearInArena(FinalizeKind thingKind) {
FreeSpan *list = &lists[thingKind];
if (!list->isEmpty()) {
ArenaHeader *aheader = list->arenaHeader();
JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(list));
void clearFreeListInArena(AllocKind kind) {
FreeSpan *headSpan = &freeLists[kind];
if (!headSpan->isEmpty()) {
ArenaHeader *aheader = headSpan->arenaHeader();
JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan));
aheader->setAsFullyUsed();
}
}
@ -1004,45 +1048,54 @@ struct FreeLists {
* Check that the free list is either empty or were synchronized with the
* arena using copyToArena().
*/
bool isSynchronizedWithArena(FinalizeKind thingKind) {
FreeSpan *list = &lists[thingKind];
if (list->isEmpty())
bool isSynchronizedFreeList(AllocKind kind) {
FreeSpan *headSpan = &freeLists[kind];
if (headSpan->isEmpty())
return true;
ArenaHeader *aheader = list->arenaHeader();
ArenaHeader *aheader = headSpan->arenaHeader();
if (aheader->hasFreeThings()) {
/*
* If the arena has a free list, it must be the same as one in
* lists.
*/
JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(list));
*/
JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan));
return true;
}
return false;
}
JS_ALWAYS_INLINE void *getNext(unsigned thingKind, size_t thingSize) {
return lists[thingKind].allocate(thingSize);
JS_ALWAYS_INLINE void *allocateFromFreeList(AllocKind thingKind, size_t thingSize) {
return freeLists[thingKind].allocate(thingSize);
}
void *populate(ArenaHeader *aheader, unsigned thingKind, size_t thingSize) {
FreeSpan *list = &lists[thingKind];
*list = aheader->getFirstFreeSpan();
aheader->setAsFullyUsed();
void *t = list->allocate(thingSize);
JS_ASSERT(t);
return t;
}
static void *refillFreeList(JSContext *cx, AllocKind thingKind);
void checkEmpty() {
void checkEmptyFreeLists() {
#ifdef DEBUG
for (size_t i = 0; i != JS_ARRAY_LENGTH(lists); ++i)
JS_ASSERT(lists[i].isEmpty());
for (size_t i = 0; i != JS_ARRAY_LENGTH(freeLists); ++i)
JS_ASSERT(freeLists[i].isEmpty());
#endif
}
};
extern void *
RefillFinalizableFreeList(JSContext *cx, unsigned thingKind);
void checkEmptyFreeList(AllocKind kind) {
JS_ASSERT(freeLists[kind].isEmpty());
}
void finalizeObjects(JSContext *cx);
void finalizeStrings(JSContext *cx);
void finalizeShapes(JSContext *cx);
void finalizeScripts(JSContext *cx);
#ifdef JS_THREADSAFE
static void backgroundFinalize(JSContext *cx, ArenaHeader *listHead);
private:
inline void finalizeNow(JSContext *cx, AllocKind thingKind);
inline void finalizeLater(JSContext *cx, AllocKind thingKind);
inline void *allocateFromArena(JSContext *cx, AllocKind thingKind);
#endif
};
/*
* Initial allocation size for data structures holding chunks is set to hold
@ -1254,7 +1307,7 @@ class GCHelperThread {
Vector<js::gc::ArenaHeader *, 64, js::SystemAllocPolicy> finalizeVector;
friend class js::gc::ArenaList;
friend struct js::gc::ArenaLists;
JS_FRIEND_API(void)
replenishAndFreeLater(void *ptr);
@ -1520,7 +1573,7 @@ IterateCompartmentsArenasCells(JSContext *cx, void *data,
* the given compartment or for all compartments if it is null.
*/
extern JS_FRIEND_API(void)
IterateCells(JSContext *cx, JSCompartment *compartment, gc::FinalizeKind thingKind,
IterateCells(JSContext *cx, JSCompartment *compartment, gc::AllocKind thingKind,
void *data, IterateCellCallback cellCallback);
} /* namespace js */

View File

@ -119,17 +119,17 @@ GetGCThingTraceKind(const void *thing)
if (JSAtom::isStatic(thing))
return JSTRACE_STRING;
const Cell *cell = reinterpret_cast<const Cell *>(thing);
return GetFinalizableTraceKind(cell->arenaHeader()->getThingKind());
return MapAllocToTraceKind(cell->getAllocKind());
}
/* Capacity for slotsToThingKind */
const size_t SLOTS_TO_THING_KIND_LIMIT = 17;
/* Get the best kind to use when making an object with the given slot count. */
static inline FinalizeKind
static inline AllocKind
GetGCObjectKind(size_t numSlots, bool isArray = false)
{
extern FinalizeKind slotsToThingKind[];
extern AllocKind slotsToThingKind[];
if (numSlots >= SLOTS_TO_THING_KIND_LIMIT) {
/*
@ -144,37 +144,36 @@ GetGCObjectKind(size_t numSlots, bool isArray = false)
}
static inline bool
IsBackgroundFinalizeKind(FinalizeKind kind)
IsBackgroundAllocKind(AllocKind kind)
{
JS_ASSERT(kind <= FINALIZE_OBJECT_LAST);
return kind % 2 == 1;
}
static inline FinalizeKind
GetBackgroundFinalizeKind(FinalizeKind kind)
static inline AllocKind
GetBackgroundAllocKind(AllocKind kind)
{
JS_ASSERT(!IsBackgroundFinalizeKind(kind));
return (FinalizeKind) (kind + 1);
JS_ASSERT(!IsBackgroundAllocKind(kind));
return (AllocKind) (kind + 1);
}
/*
* Try to get the next larger size for an object, keeping BACKGROUND
* consistent.
*/
static inline bool
CanBumpFinalizeKind(FinalizeKind kind)
TryIncrementAllocKind(AllocKind *kindp)
{
JS_ASSERT(kind <= FINALIZE_OBJECT_LAST);
return (kind + 2) <= FINALIZE_OBJECT_LAST;
}
/* Get the next larger size for an object, keeping BACKGROUND consistent. */
static inline FinalizeKind
BumpFinalizeKind(FinalizeKind kind)
{
JS_ASSERT(CanBumpFinalizeKind(kind));
return (FinalizeKind) (kind + 2);
size_t next = size_t(*kindp) + 2;
if (next > size_t(FINALIZE_OBJECT_LAST))
return false;
*kindp = AllocKind(next);
return true;
}
/* Get the number of fixed slots and initial capacity associated with a kind. */
static inline size_t
GetGCKindSlots(FinalizeKind thingKind)
GetGCKindSlots(AllocKind thingKind)
{
/* Using a switch in hopes that thingKind will usually be a compile-time constant. */
switch (thingKind) {
@ -229,11 +228,11 @@ GCPoke(JSContext *cx, Value oldval)
*/
template <class ArenaOp, class CellOp>
void
ForEachArenaAndCell(JSCompartment *compartment, FinalizeKind thingKind,
ForEachArenaAndCell(JSCompartment *compartment, AllocKind thingKind,
ArenaOp arenaOp, CellOp cellOp)
{
size_t thingSize = GCThingSizeMap[thingKind];
ArenaHeader *aheader = compartment->arenas[thingKind].getHead();
size_t thingSize = Arena::thingSize(thingKind);
ArenaHeader *aheader = compartment->arenas.getFirstArena(thingKind);
for (; aheader; aheader = aheader->next) {
Arena *arena = aheader->getArena();
@ -241,7 +240,7 @@ ForEachArenaAndCell(JSCompartment *compartment, FinalizeKind thingKind,
FreeSpan firstSpan(aheader->getFirstFreeSpan());
const FreeSpan *span = &firstSpan;
for (uintptr_t thing = arena->thingsStart(thingSize); ; thing += thingSize) {
for (uintptr_t thing = arena->thingsStart(thingKind); ; thing += thingSize) {
JS_ASSERT(thing <= arena->thingsEnd());
if (thing == span->first) {
if (!span->hasNext())
@ -258,6 +257,7 @@ ForEachArenaAndCell(JSCompartment *compartment, FinalizeKind thingKind,
class CellIterImpl
{
size_t firstThingOffset;
size_t thingSize;
ArenaHeader *aheader;
FreeSpan firstSpan;
@ -269,9 +269,10 @@ class CellIterImpl
CellIterImpl() {
}
void init(JSCompartment *comp, FinalizeKind thingKind) {
thingSize = GCThingSizeMap[thingKind];
aheader = comp->arenas[thingKind].getHead();
void init(JSCompartment *comp, AllocKind kind) {
firstThingOffset = Arena::firstThingOffset(kind);
thingSize = Arena::thingSize(kind);
aheader = comp->arenas.getFirstArena(kind);
firstSpan.initAsEmpty();
span = &firstSpan;
thing = span->first;
@ -308,7 +309,7 @@ class CellIterImpl
}
firstSpan = aheader->getFirstFreeSpan();
span = &firstSpan;
thing = aheader->getArena()->thingsStart(thingSize);
thing = aheader->arenaAddress() | firstThingOffset;
aheader = aheader->next;
}
cell = reinterpret_cast<Cell *>(thing);
@ -319,10 +320,10 @@ class CellIterImpl
class CellIterUnderGC : public CellIterImpl {
public:
CellIterUnderGC(JSCompartment *comp, FinalizeKind thingKind) {
CellIterUnderGC(JSCompartment *comp, AllocKind kind) {
JS_ASSERT(comp->rt->gcRunning);
JS_ASSERT(comp->freeLists.lists[thingKind].isEmpty());
init(comp, thingKind);
comp->arenas.checkEmptyFreeList(kind);
init(comp, kind);
}
};
@ -333,29 +334,29 @@ class CellIterUnderGC : public CellIterImpl {
*/
class CellIter: public CellIterImpl
{
FreeLists *lists;
FinalizeKind thingKind;
ArenaLists *lists;
AllocKind kind;
#ifdef DEBUG
size_t *counter;
#endif
public:
CellIter(JSContext *cx, JSCompartment *comp, FinalizeKind thingKind)
: lists(&comp->freeLists),
thingKind(thingKind) {
CellIter(JSContext *cx, JSCompartment *comp, AllocKind kind)
: lists(&comp->arenas),
kind(kind) {
#ifdef JS_THREADSAFE
JS_ASSERT(comp->arenas[thingKind].doneBackgroundFinalize());
JS_ASSERT(comp->arenas.doneBackgroundFinalize(kind));
#endif
if (lists->isSynchronizedWithArena(thingKind)) {
if (lists->isSynchronizedFreeList(kind)) {
lists = NULL;
} else {
JS_ASSERT(!comp->rt->gcRunning);
lists->copyToArena(thingKind);
lists->copyFreeListToArena(kind);
}
#ifdef DEBUG
counter = &JS_THREAD_DATA(cx)->noGCOrAllocationCheck;
++*counter;
#endif
init(comp, thingKind);
init(comp, kind);
}
~CellIter() {
@ -364,7 +365,7 @@ class CellIter: public CellIterImpl
--*counter;
#endif
if (lists)
lists->clearInArena(thingKind);
lists->clearFreeListInArena(kind);
}
};
@ -385,14 +386,12 @@ inline void EmptyCellOp(Cell *t) {}
template <typename T>
inline T *
NewGCThing(JSContext *cx, unsigned thingKind, size_t thingSize)
NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
{
JS_ASSERT(thingKind < js::gc::FINALIZE_LIMIT);
JS_ASSERT(thingSize == js::gc::GCThingSizeMap[thingKind]);
JS_ASSERT(thingSize == js::gc::Arena::thingSize(kind));
#ifdef JS_THREADSAFE
JS_ASSERT_IF((cx->compartment == cx->runtime->atomsCompartment),
(thingKind == js::gc::FINALIZE_STRING) ||
(thingKind == js::gc::FINALIZE_SHORT_STRING));
kind == js::gc::FINALIZE_STRING || kind == js::gc::FINALIZE_SHORT_STRING);
#endif
JS_ASSERT(!cx->runtime->gcRunning);
JS_ASSERT(!JS_THREAD_DATA(cx)->noGCOrAllocationCheck);
@ -402,15 +401,15 @@ NewGCThing(JSContext *cx, unsigned thingKind, size_t thingSize)
js::gc::RunDebugGC(cx);
#endif
void *t = cx->compartment->freeLists.getNext(thingKind, thingSize);
return static_cast<T *>(t ? t : js::gc::RefillFinalizableFreeList(cx, thingKind));
void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize);
return static_cast<T *>(t ? t : js::gc::ArenaLists::refillFreeList(cx, kind));
}
inline JSObject *
js_NewGCObject(JSContext *cx, js::gc::FinalizeKind kind)
js_NewGCObject(JSContext *cx, js::gc::AllocKind kind)
{
JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
JSObject *obj = NewGCThing<JSObject>(cx, kind, js::gc::GCThingSizeMap[kind]);
JSObject *obj = NewGCThing<JSObject>(cx, kind, js::gc::Arena::thingSize(kind));
if (obj)
obj->earlyInit(js::gc::GetGCKindSlots(kind));
return obj;

View File

@ -71,7 +71,6 @@ ConservativeGCStats::dump(FILE *fp)
fprintf(fp, " not withing a chunk: %lu\n", ULSTAT(counter[CGCT_NOTCHUNK]));
fprintf(fp, " not within arena range: %lu\n", ULSTAT(counter[CGCT_NOTARENA]));
fprintf(fp, " points to free arena: %lu\n", ULSTAT(counter[CGCT_FREEARENA]));
fprintf(fp, " excluded, wrong tag: %lu\n", ULSTAT(counter[CGCT_WRONGTAG]));
fprintf(fp, " excluded, not live: %lu\n", ULSTAT(counter[CGCT_NOTLIVE]));
fprintf(fp, " valid GC things: %lu\n", ULSTAT(counter[CGCT_VALID]));
fprintf(fp, " valid but not aligned: %lu\n", ULSTAT(unaligned));
@ -204,7 +203,7 @@ GCMarker::dumpConservativeRoots()
volatile GCTimer::JSGCReason gcReason = GCTimer::NOREASON;
const char *gcReasons[] = {" API", "Maybe", "LastC", "DestC", "Compa", "LastD",
"Malloc", "Alloc", "Chunk", "Shape", " None"};
"Malloc", "Refill", "Chunk", "Shape", " None"};
jsrefcount newChunkCount = 0;
jsrefcount destroyChunkCount = 0;

View File

@ -99,7 +99,6 @@ enum ConservativeGCTest
CGCT_NOTARENA, /* not within arena range in a chunk */
CGCT_NOTCHUNK, /* not within a valid chunk */
CGCT_FREEARENA, /* within arena containing only free things */
CGCT_WRONGTAG, /* tagged pointer but wrong type */
CGCT_NOTLIVE, /* gcthing is not allocated */
CGCT_END
};
@ -162,7 +161,7 @@ struct GCTimer
LASTDITCH,
TOOMUCHMALLOC,
ALLOCTRIGGER,
CHUNK,
REFILL,
SHAPE,
NOREASON
};

View File

@ -2042,9 +2042,9 @@ TypeCompartment::nukeTypes(JSContext *cx)
*/
#ifdef JS_THREADSAFE
Maybe<AutoLockGC> maybeLock;
AutoLockGC maybeLock;
if (!cx->runtime->gcMarkAndSweep)
maybeLock.construct(cx->runtime);
maybeLock.lock(cx->runtime);
#endif
inferenceEnabled = false;
@ -4411,7 +4411,7 @@ CheckNewScriptProperties(JSContext *cx, TypeObject *type, JSScript *script)
return;
}
gc::FinalizeKind kind = gc::GetGCObjectKind(baseobj->slotSpan());
gc::AllocKind kind = gc::GetGCObjectKind(baseobj->slotSpan());
/* We should not have overflowed the maximum number of fixed slots for an object. */
JS_ASSERT(gc::GetGCKindSlots(kind) >= baseobj->slotSpan());
@ -4441,7 +4441,7 @@ CheckNewScriptProperties(JSContext *cx, TypeObject *type, JSScript *script)
}
type->newScript->script = script;
type->newScript->finalizeKind = unsigned(kind);
type->newScript->allocKind = kind;
type->newScript->shape = baseobj->lastProperty();
type->newScript->initializerList = (TypeNewScript::Initializer *)

View File

@ -646,8 +646,8 @@ struct TypeNewScript
{
JSScript *script;
/* Finalize kind to use for newly constructed objects. */
/* gc::FinalizeKind */ unsigned finalizeKind;
/* Allocation kind to use for newly constructed objects. */
gc::AllocKind allocKind;
/*
* Shape to use for newly constructed objects. Reflects all definite
@ -806,8 +806,7 @@ struct TypeObject : gc::Cell
* used as the scope of a new object whose prototype is |proto|.
*/
inline bool canProvideEmptyShape(js::Class *clasp);
inline js::EmptyShape *getEmptyShape(JSContext *cx, js::Class *aclasp,
/* gc::FinalizeKind */ unsigned kind);
inline js::EmptyShape *getEmptyShape(JSContext *cx, js::Class *aclasp, gc::AllocKind kind);
/*
* Get or create a property of this object. Only call this for properties which

View File

@ -5238,7 +5238,7 @@ BEGIN_CASE(JSOP_NEWINIT)
if (i == JSProto_Array) {
obj = NewDenseEmptyArray(cx);
} else {
gc::FinalizeKind kind = GuessObjectGCKind(0, false);
gc::AllocKind kind = GuessObjectGCKind(0, false);
obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
}

View File

@ -2913,7 +2913,7 @@ js_Object(JSContext *cx, uintN argc, Value *vp)
if (!obj) {
/* Make an object whether this was called with 'new' or not. */
JS_ASSERT(!argc || vp[2].isNull() || vp[2].isUndefined());
gc::FinalizeKind kind = NewObjectGCKind(cx, &js_ObjectClass);
gc::AllocKind kind = NewObjectGCKind(cx, &js_ObjectClass);
obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
if (!obj)
return JS_FALSE;
@ -2928,7 +2928,7 @@ js_Object(JSContext *cx, uintN argc, Value *vp)
JSObject *
js::NewReshapedObject(JSContext *cx, TypeObject *type, JSObject *parent,
gc::FinalizeKind kind, const Shape *shape)
gc::AllocKind kind, const Shape *shape)
{
JSObject *res = NewObjectWithType(cx, type, parent, kind);
if (!res)
@ -2979,7 +2979,7 @@ js_CreateThis(JSContext *cx, JSObject *callee)
JSObject *proto = protov.isObjectOrNull() ? protov.toObjectOrNull() : NULL;
JSObject *parent = callee->getParent();
gc::FinalizeKind kind = NewObjectGCKind(cx, newclasp);
gc::AllocKind kind = NewObjectGCKind(cx, newclasp);
JSObject *obj = NewObject<WithProto::Class>(cx, newclasp, proto, parent, kind);
if (obj)
obj->syncSpecialEquality();
@ -2995,14 +2995,14 @@ CreateThisForFunctionWithType(JSContext *cx, types::TypeObject *type, JSObject *
* which reflects any properties that will definitely be added to the
* object before it is read from.
*/
gc::FinalizeKind kind = gc::FinalizeKind(type->newScript->finalizeKind);
gc::AllocKind kind = type->newScript->allocKind;
JSObject *res = NewObjectWithType(cx, type, parent, kind);
if (res)
res->setMap((Shape *) type->newScript->shape);
return res;
}
gc::FinalizeKind kind = NewObjectGCKind(cx, &js_ObjectClass);
gc::AllocKind kind = NewObjectGCKind(cx, &js_ObjectClass);
return NewObjectWithType(cx, type, parent, kind);
}
@ -3018,7 +3018,7 @@ js_CreateThisForFunctionWithProto(JSContext *cx, JSObject *callee, JSObject *pro
return NULL;
res = CreateThisForFunctionWithType(cx, type, callee->getParent());
} else {
gc::FinalizeKind kind = NewObjectGCKind(cx, &js_ObjectClass);
gc::AllocKind kind = NewObjectGCKind(cx, &js_ObjectClass);
res = NewNonFunction<WithProto::Class>(cx, &js_ObjectClass, proto, callee->getParent(), kind);
}
@ -3077,7 +3077,7 @@ JSObject* FASTCALL
js_InitializerObject(JSContext* cx, JSObject *proto, JSObject *baseobj)
{
if (!baseobj) {
gc::FinalizeKind kind = GuessObjectGCKind(0, false);
gc::AllocKind kind = GuessObjectGCKind(0, false);
return NewObjectWithClassProto(cx, &js_ObjectClass, proto, kind);
}
@ -3129,7 +3129,7 @@ js_CreateThisFromTrace(JSContext *cx, JSObject *ctor, uintN protoSlot)
return NULL;
}
gc::FinalizeKind kind = NewObjectGCKind(cx, &js_ObjectClass);
gc::AllocKind kind = NewObjectGCKind(cx, &js_ObjectClass);
return NewNativeClassInstance(cx, &js_ObjectClass, proto, parent, kind);
}
JS_DEFINE_CALLINFO_3(extern, CONSTRUCTOR_RETRY, js_CreateThisFromTrace, CONTEXT, OBJECT, UINTN, 0,
@ -3405,7 +3405,7 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, StackFrame *fp)
JS_ASSERT(proto->isStaticBlock());
size_t count = OBJ_BLOCK_COUNT(cx, proto);
gc::FinalizeKind kind = gc::GetGCObjectKind(count + 1);
gc::AllocKind kind = gc::GetGCObjectKind(count + 1);
TypeObject *type = proto->getNewType(cx);
if (!type)
@ -3615,9 +3615,7 @@ JSObject::clone(JSContext *cx, JSObject *proto, JSObject *parent)
return NULL;
}
}
JSObject *clone = NewObject<WithProto::Given>(cx, getClass(),
proto, parent,
gc::FinalizeKind(finalizeKind()));
JSObject *clone = NewObject<WithProto::Given>(cx, getClass(), proto, parent, getAllocKind());
if (!clone)
return NULL;
if (isNative()) {
@ -4364,16 +4362,15 @@ JSObject::allocSlots(JSContext *cx, size_t newcap)
* objects are constructed.
*/
if (!hasLazyType() && type()->newScript) {
gc::FinalizeKind kind = gc::FinalizeKind(type()->newScript->finalizeKind);
gc::AllocKind kind = type()->newScript->allocKind;
unsigned newScriptSlots = gc::GetGCKindSlots(kind);
if (newScriptSlots == numFixedSlots() && gc::CanBumpFinalizeKind(kind)) {
kind = gc::BumpFinalizeKind(kind);
if (newScriptSlots == numFixedSlots() && gc::TryIncrementAllocKind(&kind)) {
JSObject *obj = NewReshapedObject(cx, type(), getParent(), kind,
type()->newScript->shape);
if (!obj)
return false;
type()->newScript->finalizeKind = kind;
type()->newScript->allocKind = kind;
type()->newScript->shape = obj->lastProperty();
type()->markStateChange(cx);
}

View File

@ -664,8 +664,6 @@ struct JSObject : js::gc::Cell {
inline bool hasPropertyTable() const;
/* gc::FinalizeKind */ unsigned finalizeKind() const;
uint32 numSlots() const { return uint32(capacity); }
inline size_t structSize() const;
@ -1279,7 +1277,7 @@ struct JSObject : js::gc::Cell {
js::types::TypeObject *type,
JSObject *parent,
void *priv,
/* gc::FinalizeKind */ unsigned kind);
js::gc::AllocKind kind);
inline bool hasProperty(JSContext *cx, jsid id, bool *foundp, uintN flags = 0);

View File

@ -402,12 +402,6 @@ JSObject::setPrimitiveThis(const js::Value &pthis)
setFixedSlot(JSSLOT_PRIMITIVE_THIS, pthis);
}
inline /* gc::FinalizeKind */ unsigned
JSObject::finalizeKind() const
{
return js::gc::FinalizeKind(arenaHeader()->getThingKind());
}
inline bool
JSObject::hasSlotsArray() const
{
@ -964,7 +958,7 @@ JSObject::initSharingEmptyShape(JSContext *cx,
js::types::TypeObject *type,
JSObject *parent,
void *privateValue,
/* js::gc::FinalizeKind */ unsigned kind)
js::gc::AllocKind kind)
{
init(cx, aclasp, type, parent, privateValue, false);
@ -1245,7 +1239,7 @@ class AutoPropertyDescriptorRooter : private AutoGCRooter, public PropertyDescri
static inline bool
InitScopeForObject(JSContext* cx, JSObject* obj, js::Class *clasp, js::types::TypeObject *type,
gc::FinalizeKind kind)
gc::AllocKind kind)
{
JS_ASSERT(clasp->isNative());
@ -1273,7 +1267,7 @@ InitScopeForObject(JSContext* cx, JSObject* obj, js::Class *clasp, js::types::Ty
}
static inline bool
CanBeFinalizedInBackground(gc::FinalizeKind kind, Class *clasp)
CanBeFinalizedInBackground(gc::AllocKind kind, Class *clasp)
{
#ifdef JS_THREADSAFE
JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
@ -1281,10 +1275,10 @@ CanBeFinalizedInBackground(gc::FinalizeKind kind, Class *clasp)
* a different thread, we change the finalize kind. For example,
* FINALIZE_OBJECT0 calls the finalizer on the main thread,
* FINALIZE_OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread.
* IsBackgroundFinalizeKind is called to prevent recursively incrementing
* IsBackgroundAllocKind is called to prevent recursively incrementing
* the finalize kind; kind may already be a background finalize kind.
*/
if (!gc::IsBackgroundFinalizeKind(kind) &&
if (!gc::IsBackgroundAllocKind(kind) &&
(!clasp->finalize || clasp->flags & JSCLASS_CONCURRENT_FINALIZER)) {
return true;
}
@ -1300,7 +1294,7 @@ CanBeFinalizedInBackground(gc::FinalizeKind kind, Class *clasp)
*/
static inline JSObject *
NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto,
JSObject *parent, gc::FinalizeKind kind)
JSObject *parent, gc::AllocKind kind)
{
JS_ASSERT(proto);
JS_ASSERT(parent);
@ -1316,7 +1310,7 @@ NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto,
*/
if (CanBeFinalizedInBackground(kind, clasp))
kind = GetBackgroundFinalizeKind(kind);
kind = GetBackgroundAllocKind(kind);
JSObject* obj = js_NewGCObject(cx, kind);
@ -1343,7 +1337,7 @@ NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto,
static inline JSObject *
NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto, JSObject *parent)
{
gc::FinalizeKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
return NewNativeClassInstance(cx, clasp, proto, parent, kind);
}
@ -1358,7 +1352,7 @@ FindClassPrototype(JSContext *cx, JSObject *scope, JSProtoKey protoKey, JSObject
* right default proto and parent for clasp in cx.
*/
static inline JSObject *
NewBuiltinClassInstance(JSContext *cx, Class *clasp, gc::FinalizeKind kind)
NewBuiltinClassInstance(JSContext *cx, Class *clasp, gc::AllocKind kind)
{
VOUCH_DOES_NOT_REQUIRE_STACK();
@ -1392,7 +1386,7 @@ NewBuiltinClassInstance(JSContext *cx, Class *clasp, gc::FinalizeKind kind)
static inline JSObject *
NewBuiltinClassInstance(JSContext *cx, Class *clasp)
{
gc::FinalizeKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
return NewBuiltinClassInstance(cx, clasp, kind);
}
@ -1457,7 +1451,7 @@ namespace detail
template <bool withProto, bool isFunction>
static JS_ALWAYS_INLINE JSObject *
NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
gc::FinalizeKind kind)
gc::AllocKind kind)
{
/* Bootstrap the ur-object, and make it the default prototype object. */
if (withProto == WithProto::Class && !proto) {
@ -1478,7 +1472,7 @@ NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
*/
if (!isFunction && CanBeFinalizedInBackground(kind, clasp))
kind = GetBackgroundFinalizeKind(kind);
kind = GetBackgroundAllocKind(kind);
JSObject* obj = isFunction ? js_NewGCFunction(cx) : js_NewGCObject(cx, kind);
if (!obj)
@ -1530,7 +1524,7 @@ NewFunction(JSContext *cx, JSObject *parent)
template <WithProto::e withProto>
static JS_ALWAYS_INLINE JSObject *
NewNonFunction(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
gc::FinalizeKind kind)
gc::AllocKind kind)
{
return detail::NewObject<withProto, false>(cx, clasp, proto, parent, kind);
}
@ -1539,14 +1533,14 @@ template <WithProto::e withProto>
static JS_ALWAYS_INLINE JSObject *
NewNonFunction(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent)
{
gc::FinalizeKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
return detail::NewObject<withProto, false>(cx, clasp, proto, parent, kind);
}
template <WithProto::e withProto>
static JS_ALWAYS_INLINE JSObject *
NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
gc::FinalizeKind kind)
gc::AllocKind kind)
{
if (clasp == &js_FunctionClass)
return detail::NewObject<withProto, true>(cx, clasp, proto, parent, kind);
@ -1557,7 +1551,7 @@ template <WithProto::e withProto>
static JS_ALWAYS_INLINE JSObject *
NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent)
{
gc::FinalizeKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
return NewObject<withProto>(cx, clasp, proto, parent, kind);
}
@ -1566,12 +1560,12 @@ NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent)
* avoid losing creation site information for objects made by scripted 'new'.
*/
static JS_ALWAYS_INLINE JSObject *
NewObjectWithType(JSContext *cx, types::TypeObject *type, JSObject *parent, gc::FinalizeKind kind)
NewObjectWithType(JSContext *cx, types::TypeObject *type, JSObject *parent, gc::AllocKind kind)
{
JS_ASSERT(type == type->proto->newType);
if (CanBeFinalizedInBackground(kind, &js_ObjectClass))
kind = GetBackgroundFinalizeKind(kind);
kind = GetBackgroundAllocKind(kind);
JSObject* obj = js_NewGCObject(cx, kind);
if (!obj)
@ -1597,14 +1591,14 @@ out:
extern JSObject *
NewReshapedObject(JSContext *cx, js::types::TypeObject *type, JSObject *parent,
gc::FinalizeKind kind, const Shape *shape);
gc::AllocKind kind, const Shape *shape);
/*
* As for gc::GetGCObjectKind, where numSlots is a guess at the final size of
* the object, zero if the final size is unknown. This should only be used for
* objects that do not require any fixed slots.
*/
static inline gc::FinalizeKind
static inline gc::AllocKind
GuessObjectGCKind(size_t numSlots, bool isArray)
{
if (numSlots)
@ -1616,7 +1610,7 @@ GuessObjectGCKind(size_t numSlots, bool isArray)
* Get the GC kind to use for scripted 'new' on the given class.
* FIXME bug 547327: estimate the size from the allocation site.
*/
static inline gc::FinalizeKind
static inline gc::AllocKind
NewObjectGCKind(JSContext *cx, js::Class *clasp)
{
if (clasp == &js_ArrayClass || clasp == &js_SlowArrayClass)
@ -1628,17 +1622,16 @@ NewObjectGCKind(JSContext *cx, js::Class *clasp)
static JS_ALWAYS_INLINE JSObject*
NewObjectWithClassProto(JSContext *cx, Class *clasp, JSObject *proto,
/*gc::FinalizeKind*/ unsigned _kind)
gc::AllocKind kind)
{
JS_ASSERT(clasp->isNative());
gc::FinalizeKind kind = gc::FinalizeKind(_kind);
types::TypeObject *type = proto->getNewType(cx);
if (!type)
return NULL;
if (CanBeFinalizedInBackground(kind, clasp))
kind = GetBackgroundFinalizeKind(kind);
kind = GetBackgroundAllocKind(kind);
JSObject* obj = js_NewGCObject(cx, kind);
if (!obj)
@ -1656,8 +1649,7 @@ CopyInitializerObject(JSContext *cx, JSObject *baseobj, types::TypeObject *type)
JS_ASSERT(baseobj->getClass() == &js_ObjectClass);
JS_ASSERT(!baseobj->inDictionaryMode());
gc::FinalizeKind kind = gc::FinalizeKind(baseobj->finalizeKind());
JSObject *obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
JSObject *obj = NewBuiltinClassInstance(cx, &js_ObjectClass, baseobj->getAllocKind());
if (!obj || !obj->ensureSlots(cx, baseobj->numSlots()))
return NULL;

View File

@ -1421,7 +1421,7 @@ FixProxy(JSContext *cx, JSObject *proxy, JSBool *bp)
* Make a blank object from the recipe fix provided to us. This must have
* number of fixed slots as the proxy so that we can swap their contents.
*/
gc::FinalizeKind kind = gc::FinalizeKind(proxy->arenaHeader()->getThingKind());
gc::AllocKind kind = proxy->getAllocKind();
JSObject *newborn = NewNonFunction<WithProto::Given>(cx, clasp, proto, parent, kind);
if (!newborn)
return false;

View File

@ -68,7 +68,7 @@ js::Shape::freeTable(JSContext *cx)
inline js::EmptyShape *
js::types::TypeObject::getEmptyShape(JSContext *cx, js::Class *aclasp,
/* gc::FinalizeKind */ unsigned kind)
gc::AllocKind kind)
{
JS_ASSERT(!singleton);

View File

@ -1236,10 +1236,10 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist
*/
Jump getNewObject(JSContext *cx, RegisterID result, JSObject *templateObject)
{
unsigned thingKind = templateObject->arenaHeader()->getThingKind();
gc::AllocKind allocKind = templateObject->getAllocKind();
JS_ASSERT(thingKind >= gc::FINALIZE_OBJECT0 && thingKind <= gc::FINALIZE_OBJECT_LAST);
size_t thingSize = gc::GCThingSizeMap[thingKind];
JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
size_t thingSize = gc::Arena::thingSize(allocKind);
JS_ASSERT(cx->typeInferenceEnabled());
JS_ASSERT(!templateObject->hasSlotsArray());
@ -1253,7 +1253,8 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist
* Inline FreeSpan::allocate. Only the case where the current freelist
* span is not empty is handled.
*/
gc::FreeSpan *list = &cx->compartment->freeLists.lists[thingKind];
gc::FreeSpan *list = const_cast<gc::FreeSpan *>
(cx->compartment->arenas.getFreeList(allocKind));
loadPtr(&list->first, result);
Jump jump = branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(&list->last), result);

View File

@ -1350,7 +1350,7 @@ stubs::NewInitObject(VMFrame &f, JSObject *baseobj)
TypeObject *type = (TypeObject *) f.scratch;
if (!baseobj) {
gc::FinalizeKind kind = GuessObjectGCKind(0, false);
gc::AllocKind kind = GuessObjectGCKind(0, false);
JSObject *obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
if (!obj)
THROW();

View File

@ -416,10 +416,10 @@ inline void
JSAtom::finalize(JSRuntime *rt)
{
JS_ASSERT(isAtom());
if (arenaHeader()->getThingKind() == js::gc::FINALIZE_STRING)
if (getAllocKind() == js::gc::FINALIZE_STRING)
asFlat().finalize(rt);
else
JS_ASSERT(arenaHeader()->getThingKind() == js::gc::FINALIZE_SHORT_STRING);
JS_ASSERT(getAllocKind() == js::gc::FINALIZE_SHORT_STRING);
}
inline void

View File

@ -1,4 +1,4 @@
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=79 ft=cpp:
*
* ***** BEGIN LICENSE BLOCK *****
@ -49,7 +49,7 @@ using namespace js;
bool
JSString::isShort() const
{
bool is_short = arenaHeader()->getThingKind() == gc::FINALIZE_SHORT_STRING;
bool is_short = (getAllocKind() == gc::FINALIZE_SHORT_STRING);
JS_ASSERT_IF(is_short, isFlat());
return is_short;
}
@ -69,7 +69,7 @@ JSString::isInline() const
bool
JSString::isExternal() const
{
bool is_external = arenaHeader()->getThingKind() == gc::FINALIZE_EXTERNAL_STRING;
bool is_external = (getAllocKind() == gc::FINALIZE_EXTERNAL_STRING);
JS_ASSERT_IF(is_external, isFixed());
return is_external;
}

View File

@ -1324,13 +1324,14 @@ ArenaCallback(JSContext *cx, void *vdata, js::gc::Arena *arena,
IterateData *data = static_cast<IterateData *>(vdata);
data->currCompartmentStats->gcHeapArenaHeaders +=
sizeof(js::gc::ArenaHeader);
size_t allocationSpace = arena->thingsSpan(thingSize);
data->currCompartmentStats->gcHeapArenaPadding +=
arena->thingsStartOffset(thingSize) - sizeof(js::gc::ArenaHeader);
js::gc::ArenaSize - allocationSpace - sizeof(js::gc::ArenaHeader);
// We don't call the callback on unused things. So we compute the
// unused space like this: arenaUnused = maxArenaUnused - arenaUsed.
// We do this by setting arenaUnused to maxArenaUnused here, and then
// subtracting thingSize for every used cell, in CellCallback().
data->currCompartmentStats->gcHeapArenaUnused += arena->thingsSpan(thingSize);
data->currCompartmentStats->gcHeapArenaUnused += allocationSpace;
}
void