Bug 988486 - Make more GCRuntime state private and add accessors r=terrence

This commit is contained in:
Jon Coppeard 2014-06-16 10:40:44 +01:00
parent 38d8b6c291
commit 6140e80291
15 changed files with 307 additions and 290 deletions

View File

@ -571,7 +571,7 @@ GCState(JSContext *cx, unsigned argc, jsval *vp)
}
const char *state;
gc::State globalState = cx->runtime()->gc.incrementalState;
gc::State globalState = cx->runtime()->gc.state();
if (globalState == gc::NO_INCREMENTAL)
state = "none";
else if (globalState == gc::MARK)

View File

@ -232,7 +232,7 @@ ForkJoinNursery::pjsCollection(int op)
TIME_START(pjsCollection);
rt->incFJMinorCollecting();
rt->gc.incFJMinorCollecting();
if (evacuate) {
isEvacuating_ = true;
evacuationZone_ = shared_->zone();
@ -265,7 +265,7 @@ ForkJoinNursery::pjsCollection(int op)
tail_ = &head_;
movedSize_ = 0;
rt->decFJMinorCollecting();
rt->gc.decFJMinorCollecting();
TIME_END(pjsCollection);

View File

@ -32,6 +32,7 @@ typedef Vector<JS::Zone *, 4, SystemAllocPolicy> ZoneVector;
class MarkingValidator;
class AutoPrepareForTracing;
class AutoTraceSession;
struct ConservativeGCData
{
@ -104,11 +105,23 @@ class GCRuntime
void removeRoot(void *rp);
void setMarkStackLimit(size_t limit);
void setParameter(JSGCParamKey key, uint32_t value);
uint32_t getParameter(JSGCParamKey key);
bool isHeapBusy() { return heapState != js::Idle; }
bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
// Performance note: if isFJMinorCollecting turns out to be slow because
// reading the counter is slow then we may be able to augment the counter
// with a volatile flag that is set iff the counter is greater than
// zero. (It will require some care to make sure the two variables stay in
// sync.)
bool isFJMinorCollecting() { return fjCollectionCounter > 0; }
void incFJMinorCollecting() { fjCollectionCounter++; }
void decFJMinorCollecting() { fjCollectionCounter--; }
bool triggerGC(JS::gcreason::Reason reason);
bool triggerZoneGC(Zone *zone, JS::gcreason::Reason reason);
void maybeGC(Zone *zone);
@ -117,12 +130,15 @@ class GCRuntime
void gcIfNeeded(JSContext *cx);
void collect(bool incremental, int64_t budget, JSGCInvocationKind gckind,
JS::gcreason::Reason reason);
void gcSlice(JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis);
void gcSlice(JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis = 0);
void runDebugGC();
inline void poke();
void markRuntime(JSTracer *trc, bool useSavedRoots = false);
void notifyDidPaint();
void shrinkBuffers();
#ifdef JS_GC_ZEAL
const void *addressOfZealMode() { return &zealMode; }
void setZeal(uint8_t zeal, uint32_t frequency);
@ -138,6 +154,7 @@ class GCRuntime
public:
// Internal public interface
js::gc::State state() { return incrementalState; }
void recordNativeStackTop();
#ifdef JS_THREADSAFE
void notifyRequestEnd() { conservativeGC.updateForRequestEnd(); }
@ -145,7 +162,6 @@ class GCRuntime
bool isBackgroundSweeping() { return helperState.isBackgroundSweeping(); }
void waitBackgroundSweepEnd() { helperState.waitBackgroundSweepEnd(); }
void waitBackgroundSweepOrAllocEnd() { helperState.waitBackgroundSweepOrAllocEnd(); }
void startBackgroundShrink() { helperState.startBackgroundShrink(); }
void startBackgroundAllocationIfIdle() { helperState.startBackgroundAllocationIfIdle(); }
void freeLater(void *p) { helperState.freeLater(p); }
@ -205,6 +221,9 @@ class GCRuntime
void setAlwaysPreserveCode() { alwaysPreserveCode = true; }
bool isIncrementalGCEnabled() { return incrementalEnabled; }
void disableIncrementalGC() { incrementalEnabled = false; }
bool isGenerationalGCEnabled() { return generationalDisabled == 0; }
void disableGenerationalGC();
void enableGenerationalGC();
@ -227,6 +246,17 @@ class GCRuntime
void setValidate(bool enable);
void setFullCompartmentChecks(bool enable);
bool isManipulatingDeadZones() { return manipulatingDeadZones; }
void setManipulatingDeadZones(bool value) { manipulatingDeadZones = value; }
unsigned objectsMarkedInDeadZonesCount() { return objectsMarkedInDeadZones; }
void incObjectsMarkedInDeadZone() {
JS_ASSERT(manipulatingDeadZones);
++objectsMarkedInDeadZones;
}
JS::Zone *getCurrentZoneGroup() { return currentZoneGroup; }
void setFoundBlackGrayEdges() { foundBlackGrayEdges = true; }
#ifdef JS_GC_ZEAL
void startVerifyPreBarriers();
bool endVerifyPreBarriers();
@ -239,6 +269,7 @@ class GCRuntime
// For ArenaLists::allocateFromArenaInline()
friend class ArenaLists;
Chunk *pickChunk(Zone *zone, AutoMaybeStartBackgroundAllocation &maybeStartBackgroundAllocation);
inline void arenaAllocatedDuringGC(JS::Zone *zone, ArenaHeader *arena);
inline bool wantBackgroundAllocation() const;
@ -270,6 +301,11 @@ class GCRuntime
bool sweepPhase(SliceBudget &sliceBudget);
void endSweepPhase(JSGCInvocationKind gckind, bool lastGC);
void sweepZones(FreeOp *fop, bool lastGC);
void decommitArenasFromAvailableList(Chunk **availableListHeadp);
void decommitArenas();
void expireChunksAndArenas(bool shouldShrink);
void sweepBackgroundThings(bool onBackgroundThread);
void assertBackgroundSweepingFinished();
void computeNonIncrementalMarkingForValidation();
void validateIncrementalMarking();
@ -312,6 +348,11 @@ class GCRuntime
js::gc::Chunk *userAvailableChunkListHead;
js::gc::ChunkPool chunkPool;
#ifdef JSGC_GENERATIONAL
js::Nursery nursery;
js::gc::StoreBuffer storeBuffer;
#endif
js::RootedValueMap rootsHash;
/* This is updated by both the main and GC helper threads. */
@ -397,6 +438,7 @@ class GCRuntime
uintptr_t unused1;
#endif
private:
/*
* The current incremental GC phase. This is also used internally in
* non-incremental GC.
@ -479,11 +521,6 @@ class GCRuntime
volatile js::HeapState heapState;
#ifdef JSGC_GENERATIONAL
js::Nursery nursery;
js::gc::StoreBuffer storeBuffer;
#endif
/*
* ForkJoin workers enter and leave GC independently; this counter
* tracks the number that are currently in GC.
@ -494,7 +531,6 @@ class GCRuntime
*/
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> fjCollectionCounter;
private:
/*
* These options control the zealousness of the GC. The fundamental values
* are nextScheduled and gcDebugCompartmentGC. At every allocation,
@ -583,6 +619,7 @@ class GCRuntime
friend class js::GCHelperState;
friend class js::gc::MarkingValidator;
friend class js::gc::AutoTraceSession;
};
#ifdef JS_GC_ZEAL
@ -616,7 +653,6 @@ inline bool GCRuntime::upcomingZealousGC() { return false; }
inline bool GCRuntime::needZealousGC() { return false; }
#endif
} /* namespace gc */
} /* namespace js */

View File

@ -198,7 +198,7 @@ CheckMarkedThing(JSTracer *trc, T **thingp)
DebugOnly<JSRuntime *> rt = trc->runtime();
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gc.manipulatingDeadZones,
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gc.isManipulatingDeadZones(),
!thing->zone()->scheduledForDestruction);
JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
@ -285,8 +285,8 @@ MarkInternal(JSTracer *trc, T **thingp)
#define JS_ROOT_MARKING_ASSERT(trc) \
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc), \
trc->runtime()->gc.incrementalState == NO_INCREMENTAL || \
trc->runtime()->gc.incrementalState == MARK_ROOTS);
trc->runtime()->gc.state() == NO_INCREMENTAL || \
trc->runtime()->gc.state() == MARK_ROOTS);
namespace js {
namespace gc {
@ -859,7 +859,7 @@ ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell)
*/
if (cell->isMarked(GRAY)) {
JS_ASSERT(!zone->isCollecting());
trc->runtime()->gc.foundBlackGrayEdges = true;
trc->runtime()->gc.setFoundBlackGrayEdges();
}
return zone->isGCMarking();
} else {
@ -1611,7 +1611,7 @@ GCMarker::processMarkStackTop(SliceBudget &budget)
// if the gloal has no custom trace hook of it's own, or has been moved to a different
// compartment, and so can't have one.
JS_ASSERT_IF(runtime()->gcMode() == JSGC_MODE_INCREMENTAL &&
runtime()->gc.incrementalEnabled &&
runtime()->gc.isIncrementalGCEnabled() &&
!(clasp->trace == JS_GlobalObjectTraceHook &&
(!obj->compartment()->options().getTrace() ||
!obj->isOwnGlobal())),

View File

@ -350,7 +350,7 @@ class MinorCollectionTracer : public JSTracer
* sweep their dead views. Incremental collection also use these lists,
* so we may need to save and restore their contents here.
*/
if (rt->gc.incrementalState != NO_INCREMENTAL) {
if (rt->gc.state() != NO_INCREMENTAL) {
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
if (!ArrayBufferObject::saveArrayBufferList(c, liveArrayBuffers))
CrashAtUnhandlableOOM("OOM while saving live array buffers");
@ -361,7 +361,7 @@ class MinorCollectionTracer : public JSTracer
~MinorCollectionTracer() {
runtime()->setNeedsBarrier(savedRuntimeNeedBarrier);
if (runtime()->gc.incrementalState != NO_INCREMENTAL)
if (runtime()->gc.state() != NO_INCREMENTAL)
ArrayBufferObject::restoreArrayBufferLists(liveArrayBuffers);
}
};

View File

@ -266,7 +266,7 @@ MarkIfGCThingWord(JSTracer *trc, uintptr_t w)
JS_ASSERT(tmp == thing);
#ifdef DEBUG
if (trc->runtime()->gc.incrementalState == MARK_ROOTS)
if (trc->runtime()->gc.state() == MARK_ROOTS)
trc->runtime()->mainThread.gcSavedRoots.append(
PerThreadData::SavedGCRoot(thing, traceKind));
#endif

View File

@ -556,7 +556,7 @@ Statistics::endGC()
(*cb)(JS_TELEMETRY_GC_MARK_ROOTS_MS, t(phaseTimes[PHASE_MARK_ROOTS]));
(*cb)(JS_TELEMETRY_GC_MARK_GRAY_MS, t(phaseTimes[PHASE_SWEEP_MARK_GRAY]));
(*cb)(JS_TELEMETRY_GC_NON_INCREMENTAL, !!nonincrementalReason);
(*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gc.incrementalEnabled);
(*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gc.isIncrementalGCEnabled());
(*cb)(JS_TELEMETRY_GC_SCC_SWEEP_TOTAL_MS, t(sccTotal));
(*cb)(JS_TELEMETRY_GC_SCC_SWEEP_MAX_PAUSE_MS, t(sccLongest));
@ -576,7 +576,7 @@ Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount,
this->zoneCount = zoneCount;
this->compartmentCount = compartmentCount;
bool first = runtime->gc.incrementalState == gc::NO_INCREMENTAL;
bool first = runtime->gc.state() == gc::NO_INCREMENTAL;
if (first)
beginGC();
@ -606,7 +606,7 @@ Statistics::endSlice()
(*cb)(JS_TELEMETRY_GC_RESET, !!slices.back().resetReason);
}
bool last = runtime->gc.incrementalState == gc::NO_INCREMENTAL;
bool last = runtime->gc.state() == gc::NO_INCREMENTAL;
if (last)
endGC();

View File

@ -531,7 +531,7 @@ bool
GCMarker::markDelayedChildren(SliceBudget &budget)
{
gcstats::MaybeAutoPhase ap;
if (runtime()->gc.incrementalState == MARK)
if (runtime()->gc.state() == MARK)
ap.construct(runtime()->gc.stats, gcstats::PHASE_MARK_DELAYED);
JS_ASSERT(unmarkedArenaStackTop);

View File

@ -26,7 +26,7 @@ BEGIN_TEST(testGCFinalizeCallback)
FinalizeCalls = 0;
JS::PrepareForFullGC(rt);
JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
CHECK(rt->gc.isFull);
CHECK(checkMultipleGroups());
CHECK(checkFinalizeStatus());
@ -63,7 +63,7 @@ BEGIN_TEST(testGCFinalizeCallback)
FinalizeCalls = 0;
JS::PrepareZoneForGC(global1->zone());
JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
CHECK(!rt->gc.isFull);
CHECK(checkSingleGroup());
CHECK(checkFinalizeStatus());
@ -75,7 +75,7 @@ BEGIN_TEST(testGCFinalizeCallback)
JS::PrepareZoneForGC(global2->zone());
JS::PrepareZoneForGC(global3->zone());
JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
CHECK(!rt->gc.isFull);
CHECK(checkMultipleGroups());
CHECK(checkFinalizeStatus());
@ -89,12 +89,12 @@ BEGIN_TEST(testGCFinalizeCallback)
JS_SetGCZeal(cx, 9, 1000000);
JS::PrepareForFullGC(rt);
js::GCDebugSlice(rt, true, 1);
CHECK(rt->gc.incrementalState == js::gc::MARK);
CHECK(rt->gc.state() == js::gc::MARK);
CHECK(rt->gc.isFull);
JS::RootedObject global4(cx, createGlobal());
js::GCDebugSlice(rt, true, 1);
CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
CHECK(rt->gc.state() == js::gc::NO_INCREMENTAL);
CHECK(!rt->gc.isFull);
CHECK(checkMultipleGroups());
CHECK(checkFinalizeStatus());

View File

@ -154,7 +154,7 @@ namespace js {
void
AssertHeapIsIdle(JSRuntime *rt)
{
JS_ASSERT(rt->gc.heapState == js::Idle);
JS_ASSERT(!rt->isHeapBusy());
}
void
@ -1926,106 +1926,13 @@ JS_IsAboutToBeFinalizedUnbarriered(JSObject **objp)
JS_PUBLIC_API(void)
JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value)
{
switch (key) {
case JSGC_MAX_BYTES: {
JS_ASSERT(value >= rt->gc.bytes);
rt->gc.maxBytes = value;
break;
}
case JSGC_MAX_MALLOC_BYTES:
rt->gc.setMaxMallocBytes(value);
break;
case JSGC_SLICE_TIME_BUDGET:
rt->gc.sliceBudget = SliceBudget::TimeBudget(value);
break;
case JSGC_MARK_STACK_LIMIT:
js::SetMarkStackLimit(rt, value);
break;
case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
rt->gc.highFrequencyTimeThreshold = value;
break;
case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
rt->gc.highFrequencyLowLimitBytes = value * 1024 * 1024;
break;
case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
rt->gc.highFrequencyHighLimitBytes = value * 1024 * 1024;
break;
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX:
rt->gc.highFrequencyHeapGrowthMax = value / 100.0;
MOZ_ASSERT(rt->gc.highFrequencyHeapGrowthMax / 0.85 > 1.0);
break;
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN:
rt->gc.highFrequencyHeapGrowthMin = value / 100.0;
MOZ_ASSERT(rt->gc.highFrequencyHeapGrowthMin / 0.85 > 1.0);
break;
case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
rt->gc.lowFrequencyHeapGrowth = value / 100.0;
MOZ_ASSERT(rt->gc.lowFrequencyHeapGrowth / 0.9 > 1.0);
break;
case JSGC_DYNAMIC_HEAP_GROWTH:
rt->gc.dynamicHeapGrowth = value;
break;
case JSGC_DYNAMIC_MARK_SLICE:
rt->gc.dynamicMarkSlice = value;
break;
case JSGC_ALLOCATION_THRESHOLD:
rt->gc.allocationThreshold = value * 1024 * 1024;
break;
case JSGC_DECOMMIT_THRESHOLD:
rt->gc.decommitThreshold = value * 1024 * 1024;
break;
default:
JS_ASSERT(key == JSGC_MODE);
rt->setGCMode(JSGCMode(value));
JS_ASSERT(rt->gcMode() == JSGC_MODE_GLOBAL ||
rt->gcMode() == JSGC_MODE_COMPARTMENT ||
rt->gcMode() == JSGC_MODE_INCREMENTAL);
return;
}
rt->gc.setParameter(key, value);
}
JS_PUBLIC_API(uint32_t)
JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key)
{
switch (key) {
case JSGC_MAX_BYTES:
return uint32_t(rt->gc.maxBytes);
case JSGC_MAX_MALLOC_BYTES:
return rt->gc.maxMallocBytes;
case JSGC_BYTES:
return uint32_t(rt->gc.bytes);
case JSGC_MODE:
return uint32_t(rt->gcMode());
case JSGC_UNUSED_CHUNKS:
return uint32_t(rt->gc.chunkPool.getEmptyCount());
case JSGC_TOTAL_CHUNKS:
return uint32_t(rt->gc.chunkSet.count() + rt->gc.chunkPool.getEmptyCount());
case JSGC_SLICE_TIME_BUDGET:
return uint32_t(rt->gc.sliceBudget > 0 ? rt->gc.sliceBudget / PRMJ_USEC_PER_MSEC : 0);
case JSGC_MARK_STACK_LIMIT:
return rt->gc.marker.maxCapacity();
case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
return rt->gc.highFrequencyTimeThreshold;
case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
return rt->gc.highFrequencyLowLimitBytes / 1024 / 1024;
case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
return rt->gc.highFrequencyHighLimitBytes / 1024 / 1024;
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX:
return uint32_t(rt->gc.highFrequencyHeapGrowthMax * 100);
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN:
return uint32_t(rt->gc.highFrequencyHeapGrowthMin * 100);
case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
return uint32_t(rt->gc.lowFrequencyHeapGrowth * 100);
case JSGC_DYNAMIC_HEAP_GROWTH:
return rt->gc.dynamicHeapGrowth;
case JSGC_DYNAMIC_MARK_SLICE:
return rt->gc.dynamicMarkSlice;
case JSGC_ALLOCATION_THRESHOLD:
return rt->gc.allocationThreshold / 1024 / 1024;
default:
JS_ASSERT(key == JSGC_NUMBER);
return uint32_t(rt->gc.number);
}
return rt->gc.getParameter(key);
}
JS_PUBLIC_API(void)

View File

@ -887,46 +887,25 @@ GCDescription::formatJSON(JSRuntime *rt, uint64_t timestamp) const
JS_FRIEND_API(void)
JS::NotifyDidPaint(JSRuntime *rt)
{
if (rt->gcZeal() == gc::ZealFrameVerifierPreValue) {
gc::VerifyBarriers(rt, gc::PreBarrierVerifier);
return;
}
if (rt->gcZeal() == gc::ZealFrameVerifierPostValue) {
gc::VerifyBarriers(rt, gc::PostBarrierVerifier);
return;
}
if (rt->gcZeal() == gc::ZealFrameGCValue) {
PrepareForFullGC(rt);
GCSlice(rt, GC_NORMAL, gcreason::REFRESH_FRAME);
return;
}
if (JS::IsIncrementalGCInProgress(rt) && !rt->gc.interFrameGC) {
JS::PrepareForIncrementalGC(rt);
GCSlice(rt, GC_NORMAL, gcreason::REFRESH_FRAME);
}
rt->gc.interFrameGC = false;
rt->gc.notifyDidPaint();
}
JS_FRIEND_API(bool)
JS::IsIncrementalGCEnabled(JSRuntime *rt)
{
return rt->gc.incrementalEnabled && rt->gcMode() == JSGC_MODE_INCREMENTAL;
return rt->gc.isIncrementalGCEnabled() && rt->gcMode() == JSGC_MODE_INCREMENTAL;
}
JS_FRIEND_API(bool)
JS::IsIncrementalGCInProgress(JSRuntime *rt)
{
return rt->gc.incrementalState != gc::NO_INCREMENTAL && !rt->gc.verifyPreData;
return rt->gc.state() != gc::NO_INCREMENTAL && !rt->gc.verifyPreData;
}
JS_FRIEND_API(void)
JS::DisableIncrementalGC(JSRuntime *rt)
{
rt->gc.incrementalEnabled = false;
rt->gc.disableIncrementalGC();
}
JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime *rt)
@ -961,7 +940,7 @@ JS::IsGenerationalGCEnabled(JSRuntime *rt)
JS_FRIEND_API(bool)
JS::IsIncrementalBarrierNeeded(JSRuntime *rt)
{
return rt->gc.incrementalState == gc::MARK && !rt->isHeapBusy();
return rt->gc.state() == gc::MARK && !rt->isHeapBusy();
}
JS_FRIEND_API(bool)
@ -1194,7 +1173,7 @@ js_DefineOwnProperty(JSContext *cx, JSObject *objArg, jsid idArg,
{
RootedObject obj(cx, objArg);
RootedId id(cx, idArg);
JS_ASSERT(cx->runtime()->gc.heapState == js::Idle);
js::AssertHeapIsIdle(cx);
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj, id, descriptor.value());
if (descriptor.hasGetterObject())

View File

@ -1333,6 +1333,111 @@ js::gc::FinishPersistentRootedChains(JSRuntime *rt)
rt->valuePersistentRooteds.clear();
}
void
GCRuntime::setParameter(JSGCParamKey key, uint32_t value)
{
switch (key) {
case JSGC_MAX_BYTES: {
JS_ASSERT(value >= bytes);
maxBytes = value;
break;
}
case JSGC_MAX_MALLOC_BYTES:
setMaxMallocBytes(value);
break;
case JSGC_SLICE_TIME_BUDGET:
sliceBudget = SliceBudget::TimeBudget(value);
break;
case JSGC_MARK_STACK_LIMIT:
js::SetMarkStackLimit(rt, value);
break;
case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
highFrequencyTimeThreshold = value;
break;
case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
highFrequencyLowLimitBytes = value * 1024 * 1024;
break;
case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
highFrequencyHighLimitBytes = value * 1024 * 1024;
break;
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX:
highFrequencyHeapGrowthMax = value / 100.0;
MOZ_ASSERT(highFrequencyHeapGrowthMax / 0.85 > 1.0);
break;
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN:
highFrequencyHeapGrowthMin = value / 100.0;
MOZ_ASSERT(highFrequencyHeapGrowthMin / 0.85 > 1.0);
break;
case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
lowFrequencyHeapGrowth = value / 100.0;
MOZ_ASSERT(lowFrequencyHeapGrowth / 0.9 > 1.0);
break;
case JSGC_DYNAMIC_HEAP_GROWTH:
dynamicHeapGrowth = value;
break;
case JSGC_DYNAMIC_MARK_SLICE:
dynamicMarkSlice = value;
break;
case JSGC_ALLOCATION_THRESHOLD:
allocationThreshold = value * 1024 * 1024;
break;
case JSGC_DECOMMIT_THRESHOLD:
decommitThreshold = value * 1024 * 1024;
break;
default:
JS_ASSERT(key == JSGC_MODE);
mode = JSGCMode(value);
JS_ASSERT(mode == JSGC_MODE_GLOBAL ||
mode == JSGC_MODE_COMPARTMENT ||
mode == JSGC_MODE_INCREMENTAL);
return;
}
}
uint32_t
GCRuntime::getParameter(JSGCParamKey key)
{
switch (key) {
case JSGC_MAX_BYTES:
return uint32_t(maxBytes);
case JSGC_MAX_MALLOC_BYTES:
return maxMallocBytes;
case JSGC_BYTES:
return uint32_t(bytes);
case JSGC_MODE:
return uint32_t(rt->gcMode());
case JSGC_UNUSED_CHUNKS:
return uint32_t(chunkPool.getEmptyCount());
case JSGC_TOTAL_CHUNKS:
return uint32_t(chunkSet.count() + chunkPool.getEmptyCount());
case JSGC_SLICE_TIME_BUDGET:
return uint32_t(sliceBudget > 0 ? sliceBudget / PRMJ_USEC_PER_MSEC : 0);
case JSGC_MARK_STACK_LIMIT:
return marker.maxCapacity();
case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
return highFrequencyTimeThreshold;
case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
return highFrequencyLowLimitBytes / 1024 / 1024;
case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
return highFrequencyHighLimitBytes / 1024 / 1024;
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX:
return uint32_t(highFrequencyHeapGrowthMax * 100);
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN:
return uint32_t(highFrequencyHeapGrowthMin * 100);
case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
return uint32_t(lowFrequencyHeapGrowth * 100);
case JSGC_DYNAMIC_HEAP_GROWTH:
return dynamicHeapGrowth;
case JSGC_DYNAMIC_MARK_SLICE:
return dynamicMarkSlice;
case JSGC_ALLOCATION_THRESHOLD:
return allocationThreshold / 1024 / 1024;
default:
JS_ASSERT(key == JSGC_NUMBER);
return uint32_t(number);
}
}
template <typename T> struct BarrierOwner {};
template <typename T> struct BarrierOwner<T *> { typedef T result; };
template <> struct BarrierOwner<Value> { typedef HeapValue result; };
@ -1622,11 +1727,16 @@ ArenaLists::prepareForIncrementalGC(JSRuntime *rt)
}
}
static inline void
PushArenaAllocatedDuringSweep(JSRuntime *runtime, ArenaHeader *arena)
inline void
GCRuntime::arenaAllocatedDuringGC(JS::Zone *zone, ArenaHeader *arena)
{
arena->setNextAllocDuringSweep(runtime->gc.arenasAllocatedDuringSweep);
runtime->gc.arenasAllocatedDuringSweep = arena;
if (zone->needsBarrier()) {
arena->allocatedDuringIncremental = true;
marker.delayMarkingArena(arena);
} else if (zone->isGCSweeping()) {
arena->setNextAllocDuringSweep(arenasAllocatedDuringSweep);
arenasAllocatedDuringSweep = arena;
}
}
inline void *
@ -1687,14 +1797,8 @@ ArenaLists::allocateFromArenaInline(Zone *zone, AllocKind thingKind,
FreeSpan firstFreeSpan = aheader->getFirstFreeSpan();
freeLists[thingKind].setHead(&firstFreeSpan);
aheader->setAsFullyUsed();
if (MOZ_UNLIKELY(zone->wasGCStarted())) {
if (zone->needsBarrier()) {
aheader->allocatedDuringIncremental = true;
zone->runtimeFromMainThread()->gc.marker.delayMarkingArena(aheader);
} else if (zone->isGCSweeping()) {
PushArenaAllocatedDuringSweep(zone->runtimeFromMainThread(), aheader);
}
}
if (MOZ_UNLIKELY(zone->wasGCStarted()))
zone->runtimeFromMainThread()->gc.arenaAllocatedDuringGC(zone, aheader);
void *thing = freeLists[thingKind].allocate(Arena::thingSize(thingKind));
JS_ASSERT(thing); // This allocation is infallible.
return thing;
@ -1722,14 +1826,8 @@ ArenaLists::allocateFromArenaInline(Zone *zone, AllocKind thingKind,
if (!aheader)
return nullptr;
if (MOZ_UNLIKELY(zone->wasGCStarted())) {
if (zone->needsBarrier()) {
aheader->allocatedDuringIncremental = true;
zone->runtimeFromMainThread()->gc.marker.delayMarkingArena(aheader);
} else if (zone->isGCSweeping()) {
PushArenaAllocatedDuringSweep(zone->runtimeFromMainThread(), aheader);
}
}
if (MOZ_UNLIKELY(zone->wasGCStarted()))
rt->gc.arenaAllocatedDuringGC(zone, aheader);
al->insertAtStart(aheader);
/*
@ -2268,8 +2366,8 @@ GCRuntime::maybeGC(Zone *zone)
#endif
}
static void
DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp)
void
GCRuntime::decommitArenasFromAvailableList(Chunk **availableListHeadp)
{
Chunk *chunk = *availableListHeadp;
if (!chunk)
@ -2320,9 +2418,9 @@ DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp)
* lock.
*/
Maybe<AutoUnlockGC> maybeUnlock;
if (!rt->isHeapBusy())
if (!isHeapBusy())
maybeUnlock.construct(rt);
ok = rt->gc.pageAllocator.markPagesUnused(aheader->getArena(), ArenaSize);
ok = pageAllocator.markPagesUnused(aheader->getArena(), ArenaSize);
}
if (ok) {
@ -2352,7 +2450,7 @@ DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp)
JS_ASSERT(chunk->info.prevp);
}
if (rt->gc.chunkAllocationSinceLastGC || !ok) {
if (chunkAllocationSinceLastGC || !ok) {
/*
* The allocator thread has started to get new chunks. We should stop
* to avoid decommitting arenas in just allocated chunks.
@ -2377,32 +2475,32 @@ DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp)
}
}
static void
DecommitArenas(JSRuntime *rt)
void
GCRuntime::decommitArenas()
{
DecommitArenasFromAvailableList(rt, &rt->gc.systemAvailableChunkListHead);
DecommitArenasFromAvailableList(rt, &rt->gc.userAvailableChunkListHead);
decommitArenasFromAvailableList(&systemAvailableChunkListHead);
decommitArenasFromAvailableList(&userAvailableChunkListHead);
}
/* Must be called with the GC lock taken. */
static void
ExpireChunksAndArenas(JSRuntime *rt, bool shouldShrink)
void
GCRuntime::expireChunksAndArenas(bool shouldShrink)
{
#ifdef JSGC_FJGENERATIONAL
rt->threadPool.pruneChunkCache();
#endif
if (Chunk *toFree = rt->gc.chunkPool.expire(rt, shouldShrink)) {
if (Chunk *toFree = chunkPool.expire(rt, shouldShrink)) {
AutoUnlockGC unlock(rt);
FreeChunkList(rt, toFree);
}
if (shouldShrink)
DecommitArenas(rt);
decommitArenas();
}
static void
SweepBackgroundThings(JSRuntime* rt, bool onBackgroundThread)
void
GCRuntime::sweepBackgroundThings(bool onBackgroundThread)
{
/*
* We must finalize in the correct order, see comments in
@ -2410,7 +2508,7 @@ SweepBackgroundThings(JSRuntime* rt, bool onBackgroundThread)
*/
FreeOp fop(rt, false);
for (int phase = 0 ; phase < BackgroundPhaseCount ; ++phase) {
for (Zone *zone = rt->gc.sweepingZones; zone; zone = zone->gcNextGraphNode) {
for (Zone *zone = sweepingZones; zone; zone = zone->gcNextGraphNode) {
for (int index = 0 ; index < BackgroundPhaseLength[phase] ; ++index) {
AllocKind kind = BackgroundPhases[phase][index];
ArenaHeader *arenas = zone->allocator.arenas.arenaListsToSweep[kind];
@ -2420,15 +2518,14 @@ SweepBackgroundThings(JSRuntime* rt, bool onBackgroundThread)
}
}
rt->gc.sweepingZones = nullptr;
sweepingZones = nullptr;
}
#ifdef JS_THREADSAFE
static void
AssertBackgroundSweepingFinished(JSRuntime *rt)
void
GCRuntime::assertBackgroundSweepingFinished()
{
#ifdef DEBUG
JS_ASSERT(!rt->gc.sweepingZones);
#if defined(JS_THREADSAFE) && defined(DEBUG)
JS_ASSERT(!sweepingZones);
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
for (unsigned i = 0; i < FINALIZE_LIMIT; ++i) {
JS_ASSERT(!zone->allocator.arenas.arenaListsToSweep[i]);
@ -2438,6 +2535,7 @@ AssertBackgroundSweepingFinished(JSRuntime *rt)
#endif
}
#ifdef JS_THREADSAFE
unsigned
js::GetCPUCount()
{
@ -2641,7 +2739,7 @@ GCHelperState::waitBackgroundSweepEnd()
while (state() == SWEEPING)
waitForBackgroundThread();
if (rt->gc.incrementalState == NO_INCREMENTAL)
AssertBackgroundSweepingFinished(rt);
rt->gc.assertBackgroundSweepingFinished();
#endif /* JS_THREADSAFE */
}
@ -2655,7 +2753,7 @@ GCHelperState::waitBackgroundSweepOrAllocEnd()
while (state() == SWEEPING || state() == CANCEL_ALLOCATION)
waitForBackgroundThread();
if (rt->gc.incrementalState == NO_INCREMENTAL)
AssertBackgroundSweepingFinished(rt);
rt->gc.assertBackgroundSweepingFinished();
#endif /* JS_THREADSAFE */
}
@ -2697,7 +2795,7 @@ GCHelperState::doSweep()
sweepFlag = false;
AutoUnlockGC unlock(rt);
SweepBackgroundThings(rt, true);
rt->gc.sweepBackgroundThings(true);
if (freeCursor) {
void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
@ -2716,7 +2814,7 @@ GCHelperState::doSweep()
}
bool shrinking = shrinkFlag;
ExpireChunksAndArenas(rt, shrinking);
rt->gc.expireChunksAndArenas(shrinking);
/*
* The main thread may have called ShrinkGCBuffers while
@ -2725,7 +2823,7 @@ GCHelperState::doSweep()
*/
if (!shrinking && shrinkFlag) {
shrinkFlag = false;
ExpireChunksAndArenas(rt, true);
rt->gc.expireChunksAndArenas(true);
}
}
#endif /* JS_THREADSAFE */
@ -4276,7 +4374,7 @@ GCRuntime::endSweepPhase(JSGCInvocationKind gckind, bool lastGC)
* Expire needs to unlock it for other callers.
*/
AutoLockGC lock(rt);
ExpireChunksAndArenas(rt, gckind == GC_SHRINK);
expireChunksAndArenas(gckind == GC_SHRINK);
}
}
@ -4305,7 +4403,7 @@ GCRuntime::endSweepPhase(JSGCInvocationKind gckind, bool lastGC)
if (!sweepOnBackgroundThread) {
gcstats::AutoPhase ap(stats, gcstats::PHASE_DESTROY);
SweepBackgroundThings(rt, false);
sweepBackgroundThings(false);
rt->freeLifoAlloc.freeAll();
@ -4350,24 +4448,6 @@ GCRuntime::endSweepPhase(JSGCInvocationKind gckind, bool lastGC)
lastGCTime = PRMJ_Now();
}
namespace {
/* ...while this class is to be used only for garbage collection. */
class AutoGCSession
{
GCRuntime *gc;
AutoTraceSession session;
bool canceled;
public:
explicit AutoGCSession(GCRuntime *gc);
~AutoGCSession();
void cancel() { canceled = true; }
};
} /* anonymous namespace */
/* Start a new heap session. */
AutoTraceSession::AutoTraceSession(JSRuntime *rt, js::HeapState heapState)
: lock(rt),
@ -4420,49 +4500,6 @@ AutoTraceSession::~AutoTraceSession()
}
}
AutoGCSession::AutoGCSession(GCRuntime *gc)
: gc(gc),
session(gc->rt, MajorCollecting),
canceled(false)
{
gc->isNeeded = false;
gc->interFrameGC = true;
gc->number++;
// It's ok if threads other than the main thread have suppressGC set, as
// they are operating on zones which will not be collected from here.
JS_ASSERT(!gc->rt->mainThread.suppressGC);
// Assert if this is a GC unsafe region.
JS::AutoAssertOnGC::VerifyIsSafeToGC(gc->rt);
}
AutoGCSession::~AutoGCSession()
{
if (canceled)
return;
#ifndef JS_MORE_DETERMINISTIC
gc->nextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
#endif
gc->chunkAllocationSinceLastGC = false;
#ifdef JS_GC_ZEAL
/* Keeping these around after a GC is dangerous. */
gc->clearSelectedForMarking();
#endif
/* Clear gcMallocBytes for all compartments */
for (ZonesIter zone(gc->rt, WithAtoms); !zone.done(); zone.next()) {
zone->resetGCMallocBytes();
zone->unscheduleGC();
}
gc->resetMallocBytes();
}
AutoCopyFreeListToArenas::AutoCopyFreeListToArenas(JSRuntime *rt, ZoneSelector selector)
: runtime(rt),
selector(selector)
@ -4769,7 +4806,7 @@ gc::IsIncrementalGCSafe(JSRuntime *rt)
if (rt->keepAtoms())
return IncrementalSafety::Unsafe("keepAtoms set");
if (!rt->gc.incrementalEnabled)
if (!rt->gc.isIncrementalGCEnabled())
return IncrementalSafety::Unsafe("incremental permanently disabled");
return IncrementalSafety::Safe();
@ -4834,7 +4871,19 @@ MOZ_NEVER_INLINE bool
GCRuntime::gcCycle(bool incremental, int64_t budget, JSGCInvocationKind gckind,
JS::gcreason::Reason reason)
{
AutoGCSession gcsession(this);
AutoTraceSession session(rt, MajorCollecting);
isNeeded = false;
interFrameGC = true;
number++;
// It's ok if threads other than the main thread have suppressGC set, as
// they are operating on zones which will not be collected from here.
JS_ASSERT(!rt->mainThread.suppressGC);
// Assert if this is a GC unsafe region.
JS::AutoAssertOnGC::VerifyIsSafeToGC(rt);
/*
* As we about to purge caches and clear the mark bits we must wait for
@ -4859,12 +4908,30 @@ GCRuntime::gcCycle(bool incremental, int64_t budget, JSGCInvocationKind gckind,
}
/* The GC was reset, so we need a do-over. */
if (prevState != NO_INCREMENTAL && incrementalState == NO_INCREMENTAL) {
gcsession.cancel();
if (prevState != NO_INCREMENTAL && incrementalState == NO_INCREMENTAL)
return true;
}
incrementalCollectSlice(budget, reason, gckind);
#ifndef JS_MORE_DETERMINISTIC
nextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
#endif
chunkAllocationSinceLastGC = false;
#ifdef JS_GC_ZEAL
/* Keeping these around after a GC is dangerous. */
clearSelectedForMarking();
#endif
/* Clear gcMallocBytes for all compartments */
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
zone->resetGCMallocBytes();
zone->unscheduleGC();
}
resetMallocBytes();
return false;
}
@ -5057,6 +5124,35 @@ js::GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason
rt->gc.collect(true, SliceBudget::Unlimited, gckind, reason);
}
void
GCRuntime::notifyDidPaint()
{
#ifdef JS_GC_ZEAL
if (zealMode == ZealFrameVerifierPreValue) {
verifyPreBarriers();
return;
}
if (zealMode == ZealFrameVerifierPostValue) {
verifyPostBarriers();
return;
}
if (zealMode == ZealFrameGCValue) {
JS::PrepareForFullGC(rt);
gcSlice(GC_NORMAL, JS::gcreason::REFRESH_FRAME);
return;
}
#endif
if (JS::IsIncrementalGCInProgress(rt) && !interFrameGC) {
JS::PrepareForIncrementalGC(rt);
gcSlice(GC_NORMAL, JS::gcreason::REFRESH_FRAME);
}
interFrameGC = false;
}
static bool
ZonesSelected(JSRuntime *rt)
{
@ -5090,15 +5186,21 @@ js::PrepareForDebugGC(JSRuntime *rt)
JS_FRIEND_API(void)
JS::ShrinkGCBuffers(JSRuntime *rt)
{
rt->gc.shrinkBuffers();
}
void
GCRuntime::shrinkBuffers()
{
AutoLockHelperThreadState helperLock;
AutoLockGC lock(rt);
JS_ASSERT(!rt->isHeapBusy());
#ifdef JS_THREADSAFE
rt->gc.startBackgroundShrink();
helperState.startBackgroundShrink();
#else
ExpireChunksAndArenas(rt, true);
expireChunksAndArenas(true);
#endif
}
@ -5554,27 +5656,27 @@ ArenaLists::containsArena(JSRuntime *rt, ArenaHeader *needle)
AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSContext *cx)
: runtime(cx->runtime()),
markCount(runtime->gc.objectsMarkedInDeadZones),
markCount(runtime->gc.objectsMarkedInDeadZonesCount()),
inIncremental(JS::IsIncrementalGCInProgress(runtime)),
manipulatingDeadZones(runtime->gc.manipulatingDeadZones)
manipulatingDeadZones(runtime->gc.isManipulatingDeadZones())
{
runtime->gc.manipulatingDeadZones = true;
runtime->gc.setManipulatingDeadZones(true);
}
AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSObject *obj)
: runtime(obj->compartment()->runtimeFromMainThread()),
markCount(runtime->gc.objectsMarkedInDeadZones),
markCount(runtime->gc.objectsMarkedInDeadZonesCount()),
inIncremental(JS::IsIncrementalGCInProgress(runtime)),
manipulatingDeadZones(runtime->gc.manipulatingDeadZones)
manipulatingDeadZones(runtime->gc.isManipulatingDeadZones())
{
runtime->gc.manipulatingDeadZones = true;
runtime->gc.setManipulatingDeadZones(true);
}
AutoMaybeTouchDeadZones::~AutoMaybeTouchDeadZones()
{
runtime->gc.manipulatingDeadZones = manipulatingDeadZones;
runtime->gc.setManipulatingDeadZones(manipulatingDeadZones);
if (inIncremental && runtime->gc.objectsMarkedInDeadZones != markCount) {
if (inIncremental && runtime->gc.objectsMarkedInDeadZonesCount() != markCount) {
JS::PrepareForFullGC(runtime);
js::GC(runtime, GC_NORMAL, JS::gcreason::TRANSPLANT);
}

View File

@ -27,9 +27,9 @@ struct AutoMarkInDeadZone
: zone(zone),
scheduled(zone->scheduledForDestruction)
{
JSRuntime *rt = zone->runtimeFromMainThread();
if (rt->gc.manipulatingDeadZones && zone->scheduledForDestruction) {
rt->gc.objectsMarkedInDeadZones++;
gc::GCRuntime &gc = zone->runtimeFromMainThread()->gc;
if (gc.isManipulatingDeadZones() && zone->scheduledForDestruction) {
gc.incObjectsMarkedInDeadZone();
zone->scheduledForDestruction = false;
}
}
@ -453,7 +453,7 @@ class GCZoneGroupIter {
public:
explicit GCZoneGroupIter(JSRuntime *rt) {
JS_ASSERT(rt->isHeapBusy());
current = rt->gc.currentZoneGroup;
current = rt->gc.getCurrentZoneGroup();
}
bool done() const { return !current; }

View File

@ -1427,7 +1427,7 @@ NewObject(ExclusiveContext *cx, types::TypeObject *type_, JSObject *parent, gc::
if (!cx->shouldBeJSContext())
return nullptr;
JSRuntime *rt = cx->asJSContext()->runtime();
rt->gc.incrementalEnabled = false;
rt->gc.disableIncrementalGC();
#ifdef DEBUG
if (rt->gcMode() == JSGC_MODE_INCREMENTAL) {

View File

@ -958,14 +958,7 @@ struct JSRuntime : public JS::shadow::Runtime,
bool isHeapMinorCollecting() { return gc.isHeapMinorCollecting(); }
bool isHeapCollecting() { return gc.isHeapCollecting(); }
// Performance note: if isFJMinorCollecting turns out to be slow
// because reading the counter is slow then we may be able to
// augment the counter with a volatile flag that is set iff the
// counter is greater than zero. (It will require some care to
// make sure the two variables stay in sync.)
bool isFJMinorCollecting() { return gc.fjCollectionCounter > 0; }
void incFJMinorCollecting() { gc.fjCollectionCounter++; }
void decFJMinorCollecting() { gc.fjCollectionCounter--; }
bool isFJMinorCollecting() { return gc.isFJMinorCollecting(); }
int gcZeal() { return gc.zeal(); }