Bug 1677765 - Perform arena unmarking concurrently r=sfink

This adds a new background task for unmarking which gets kicked off in the
first GC slice. There's a new explicit 'prepare' state for both the GCRuntime
and zones. The GC proper starts in a second (or later) slice when this has
finished.  Because of this we have to be a bit careful when checking GC
state because the 'prepare' state is not part of the GC from the point of view
of snapshot-at-the-beginning.

I had to update a bunch of test code that assumed that GC started in the mark
state.

This fixes cancelleling a parallel task in the case that the task was idle so
that it doesn't leave the cancel flag set.

Also it stops us sending telemetry about how much was collected in GCs that
were reset as we don't always have the data for the original heap sizes by this
point if the reset happened while we were in the prepare phase.

Finally there's a new zeal mode to test this, named YieldBeforeRootMarking
(slightly confusing but in line with the other modes).

Differential Revision: https://phabricator.services.mozilla.com/D98481
This commit is contained in:
Jon Coppeard 2020-12-10 10:14:01 +00:00
parent 4efdce1a1d
commit ebbac14d74
31 changed files with 556 additions and 208 deletions

View File

@ -556,21 +556,12 @@ namespace gc {
extern JS_PUBLIC_API void PerformIncrementalReadBarrier(JS::GCCellPtr thing);
static MOZ_ALWAYS_INLINE bool IsIncrementalBarrierNeededOnTenuredGCThing(
const JS::GCCellPtr thing) {
MOZ_ASSERT(thing);
MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
static MOZ_ALWAYS_INLINE void ExposeGCThingToActiveJS(JS::GCCellPtr thing) {
// TODO: I'd like to assert !RuntimeHeapIsBusy() here but this gets
// called while we are tracing the heap, e.g. during memory reporting
// (see bug 1313318).
MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
JS::Zone* zone = JS::GetTenuredGCThingZone(thing);
return JS::shadow::Zone::from(zone)->needsIncrementalBarrier();
}
static MOZ_ALWAYS_INLINE void ExposeGCThingToActiveJS(JS::GCCellPtr thing) {
// GC things residing in the nursery cannot be gray: they have no mark bits.
// All live objects in the nursery are moved to tenured at the beginning of
// each GC slice, so the gray marker never sees nursery things.
@ -584,13 +575,16 @@ static MOZ_ALWAYS_INLINE void ExposeGCThingToActiveJS(JS::GCCellPtr thing) {
return;
}
if (IsIncrementalBarrierNeededOnTenuredGCThing(thing)) {
auto* zone = JS::shadow::Zone::from(JS::GetTenuredGCThingZone(thing));
if (zone->needsIncrementalBarrier()) {
PerformIncrementalReadBarrier(thing);
} else if (detail::TenuredCellIsMarkedGray(thing.asCell())) {
JS::UnmarkGrayGCThingRecursively(thing);
} else if (!zone->isGCPreparing() &&
detail::TenuredCellIsMarkedGray(thing.asCell())) {
MOZ_ALWAYS_TRUE(JS::UnmarkGrayGCThingRecursively(thing));
}
MOZ_ASSERT(!detail::TenuredCellIsMarkedGray(thing.asCell()));
MOZ_ASSERT_IF(!zone->isGCPreparing(),
!detail::TenuredCellIsMarkedGray(thing.asCell()));
}
template <typename T>

View File

@ -74,6 +74,9 @@ struct Zone {
bool isGCMarking() const {
return isGCMarkingBlackOnly() || isGCMarkingBlackAndGray();
}
bool isGCMarkingOrSweeping() const {
return gcState_ >= MarkBlackOnly && gcState_ <= Sweep;
}
bool isGCSweepingOrCompacting() const {
return gcState_ == Sweep || gcState_ == Compact;
}

View File

@ -3639,6 +3639,8 @@ void Debugger::traceCrossCompartmentEdges(JSTracer* trc) {
*/
/* static */
void DebugAPI::traceCrossCompartmentEdges(JSTracer* trc) {
MOZ_ASSERT(JS::RuntimeHeapIsMajorCollecting());
JSRuntime* rt = trc->runtime();
gc::State state = rt->gc.state();

View File

@ -465,7 +465,7 @@ void GCRuntime::checkIncrementalZoneState(JSContext* cx, T* t) {
TenuredCell* cell = &t->asTenured();
Zone* zone = cell->zone();
if (zone->isGCMarking() || zone->isGCSweeping()) {
if (zone->isGCMarkingOrSweeping()) {
MOZ_ASSERT(cell->isMarkedBlack());
} else {
MOZ_ASSERT(!cell->isMarkedAny());
@ -610,7 +610,8 @@ inline TenuredCell* FreeLists::setArenaAndAllocate(Arena* arena,
FreeSpan* span = arena->getFirstFreeSpan();
freeLists_[kind] = span;
if (MOZ_UNLIKELY(arena->zone->wasGCStarted())) {
Zone* zone = arena->zone;
if (MOZ_UNLIKELY(zone->isGCMarkingOrSweeping())) {
arena->arenaAllocatedDuringGC();
}
@ -625,12 +626,10 @@ void Arena::arenaAllocatedDuringGC() {
// incremental GC will be marked black by pre-marking all free cells in the
// arena we are about to allocate from.
if (zone->needsIncrementalBarrier() || zone->isGCSweeping()) {
for (ArenaFreeCellIter iter(this); !iter.done(); iter.next()) {
TenuredCell* cell = iter.getCell();
MOZ_ASSERT(!cell->isMarkedAny());
cell->markBlack();
}
MOZ_ASSERT(zone->isGCMarkingOrSweeping());
for (ArenaFreeCellIter cell(this); !cell.done(); cell.next()) {
MOZ_ASSERT(!cell->isMarkedAny());
cell->markBlack();
}
}
@ -654,6 +653,25 @@ void ArenaLists::setParallelAllocEnabled(bool enabled) {
}
}
void GCRuntime::setParallelUnmarkEnabled(bool enabled) {
// This can only be changed on the main thread otherwise we could race.
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
MOZ_ASSERT(JS::RuntimeHeapIsMajorCollecting());
for (GCZonesIter zone(this); !zone.done(); zone.next()) {
zone->arenas.setParallelUnmarkEnabled(enabled);
}
}
void ArenaLists::setParallelUnmarkEnabled(bool enabled) {
static const ConcurrentUse states[2] = {ConcurrentUse::None,
ConcurrentUse::ParallelUnmark};
for (auto kind : AllAllocKinds()) {
MOZ_ASSERT(concurrentUse(kind) == states[!enabled]);
concurrentUse(kind) = states[enabled];
}
}
// /////////// Chunk -> Arena Allocator //////////////////////////////////////
bool GCRuntime::wantBackgroundAllocation(const AutoLockGC& lock) const {
@ -808,7 +826,7 @@ void BackgroundAllocTask::run(AutoLockHelperThreadState& lock) {
AutoTraceLog logAllocation(logger, TraceLogger_GCAllocation);
AutoLockGC gcLock(gc);
while (!cancel_ && gc->wantBackgroundAllocation(gcLock)) {
while (!isCancelled() && gc->wantBackgroundAllocation(gcLock)) {
Chunk* chunk;
{
AutoUnlockGC unlock(gcLock);

View File

@ -153,6 +153,10 @@ class ArenaList {
Arena* relocateArenas(Arena* toRelocate, Arena* relocated,
js::SliceBudget& sliceBudget,
gcstats::Statistics& stats);
#ifdef DEBUG
void dump();
#endif
};
/*
@ -254,7 +258,8 @@ class ArenaLists {
enum class ConcurrentUse : uint32_t {
None,
BackgroundFinalize,
ParallelAlloc
ParallelAlloc,
ParallelUnmark
};
using ConcurrentUseState =
@ -343,6 +348,7 @@ class ArenaLists {
static void backgroundFinalize(JSFreeOp* fop, Arena* listHead, Arena** empty);
void setParallelAllocEnabled(bool enabled);
void setParallelUnmarkEnabled(bool enabled);
inline void mergeNewArenasInMarkPhase();

View File

@ -48,6 +48,7 @@
* The collector proceeds through the following states, the current state being
* held in JSRuntime::gcIncrementalState:
*
* - Prepare - unmarks GC things, discards JIT code and other setup
* - MarkRoots - marks the stack and other roots
* - Mark - incrementally marks reachable things
* - Sweep - sweeps zones in groups and continues marking unswept zones
@ -55,18 +56,22 @@
* - Compact - incrementally compacts by zone
* - Decommit - performs background decommit and chunk removal
*
* The MarkRoots activity always takes place in the first slice. The next two
* states can take place over one or more slices.
* Roots are marked in the first MarkRoots slice; this is the start of the GC
* proper. The following states can take place over one or more slices.
*
* In other words an incremental collection proceeds like this:
*
* Slice 1: MarkRoots: Roots pushed onto the mark stack.
* Slice 1: Prepare: Starts background task to unmark GC things
*
* ... JS code runs, background unmarking finishes ...
*
* Slice 2: MarkRoots: Roots are pushed onto the mark stack.
* Mark: The mark stack is processed by popping an element,
* marking it, and pushing its children.
*
* ... JS code runs ...
*
* Slice 2: Mark: More mark stack processing.
* Slice 3: Mark: More mark stack processing.
*
* ... JS code runs ...
*
@ -409,17 +414,32 @@ void Arena::unmarkAll() {
}
void Arena::unmarkPreMarkedFreeCells() {
for (ArenaFreeCellIter iter(this); !iter.done(); iter.next()) {
TenuredCell* cell = iter.getCell();
for (ArenaFreeCellIter cell(this); !cell.done(); cell.next()) {
MOZ_ASSERT(cell->isMarkedBlack());
cell->unmark();
}
}
#ifdef DEBUG
void Arena::checkNoMarkedFreeCells() {
for (ArenaFreeCellIter iter(this); !iter.done(); iter.next()) {
MOZ_ASSERT(!iter.getCell()->isMarkedAny());
for (ArenaFreeCellIter cell(this); !cell.done(); cell.next()) {
MOZ_ASSERT(!cell->isMarkedAny());
}
}
void Arena::checkAllCellsMarkedBlack() {
for (ArenaCellIter cell(this); !cell.done(); cell.next()) {
MOZ_ASSERT(cell->isMarkedBlack());
}
}
#endif
#if defined(DEBUG) || defined(JS_GC_ZEAL)
void Arena::checkNoMarkedCells() {
for (ArenaCellIter cell(this); !cell.done(); cell.next()) {
MOZ_ASSERT(!cell->isMarkedAny());
}
}
#endif
@ -949,6 +969,7 @@ GCRuntime::GCRuntime(JSRuntime* rt)
lowMemoryState(false),
lock(mutexid::GCLock),
allocTask(this, emptyChunks_.ref()),
unmarkTask(this),
sweepTask(this),
freeTask(this),
decommitTask(this),
@ -979,6 +1000,8 @@ const char gc::ZealModeHelpText[] =
" 1: (RootsChange) Collect when roots are added or removed\n"
" 2: (Alloc) Collect when every N allocations (default: 100)\n"
" 4: (VerifierPre) Verify pre write barriers between instructions\n"
" 6: (YieldBeforeRootMarking) Incremental GC in two slices that yields "
"before root marking\n"
" 7: (GenerationalGC) Collect the nursery every N nursery allocations\n"
" 8: (YieldBeforeMarking) Incremental GC in two slices that yields "
"between\n"
@ -1019,6 +1042,7 @@ const char gc::ZealModeHelpText[] =
// The set of zeal modes that control incremental slices. These modes are
// mutually exclusive.
static const mozilla::EnumSet<ZealMode> IncrementalSliceZealModes = {
ZealMode::YieldBeforeRootMarking,
ZealMode::YieldBeforeMarking,
ZealMode::YieldBeforeSweeping,
ZealMode::IncrementalMultipleSlices,
@ -1748,8 +1772,10 @@ bool GCRuntime::addRoot(Value* vp, const char* name) {
* or ModifyBusyCount in workers). We need a read barrier to cover these
* cases.
*/
if (isIncrementalGCInProgress()) {
GCPtrValue::preWriteBarrier(*vp);
MOZ_ASSERT(vp);
Value value = *vp;
if (value.isGCThing()) {
ValuePreWriteBarrier(value);
}
return rootsHash.ref().put(vp, name);
@ -1834,6 +1860,22 @@ bool GCRuntime::canRelocateZone(Zone* zone) const {
return true;
}
#ifdef DEBUG
void js::gc::ArenaList::dump() {
fprintf(stderr, "ArenaList %p:", this);
if (cursorp_ == &head_) {
fprintf(stderr, " *");
}
for (Arena* arena = head(); arena; arena = arena->next) {
fprintf(stderr, " %p", arena);
if (cursorp_ == &arena->next) {
fprintf(stderr, " *");
}
}
fprintf(stderr, "\n");
}
#endif
Arena* ArenaList::removeRemainingArenas(Arena** arenap) {
// This is only ever called to remove arenas that are after the cursor, so
// we don't need to update it.
@ -3091,8 +3133,9 @@ TriggerResult GCRuntime::checkHeapThreshold(
MOZ_ASSERT_IF(heapThreshold.hasSliceThreshold(), zone->wasGCStarted());
size_t usedBytes = heapSize.bytes();
size_t thresholdBytes = zone->wasGCStarted() ? heapThreshold.sliceBytes()
: heapThreshold.startBytes();
size_t thresholdBytes = zone->gcState() > Zone::Prepare
? heapThreshold.sliceBytes()
: heapThreshold.startBytes();
size_t niThreshold = heapThreshold.incrementalLimitBytes();
MOZ_ASSERT(niThreshold >= thresholdBytes);
@ -3948,8 +3991,6 @@ bool GCRuntime::prepareZonesForCollection(JS::GCReason reason,
*isFullOut = true;
bool any = false;
auto currentTime = ReallyNow();
for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
/* Set up which zones will be collected. */
bool shouldCollect = ShouldCollectZone(zone, reason);
@ -3962,35 +4003,6 @@ bool GCRuntime::prepareZonesForCollection(JS::GCReason reason,
}
zone->setWasCollected(shouldCollect);
zone->setPreservingCode(false);
}
// Discard JIT code more aggressively if the process is approaching its
// executable code limit.
bool canAllocateMoreCode = jit::CanLikelyAllocateMoreExecutableMemory();
for (CompartmentsIter c(rt); !c.done(); c.next()) {
c->gcState.scheduledForDestruction = false;
c->gcState.maybeAlive = false;
c->gcState.hasEnteredRealm = false;
for (RealmsInCompartmentIter r(c); !r.done(); r.next()) {
if (r->shouldTraceGlobal() || !r->zone()->isGCScheduled()) {
c->gcState.maybeAlive = true;
}
if (shouldPreserveJITCode(r, currentTime, reason, canAllocateMoreCode)) {
r->zone()->setPreservingCode(true);
}
if (r->hasBeenEnteredIgnoringJit()) {
c->gcState.hasEnteredRealm = true;
}
}
}
if (!cleanUpEverything && canAllocateMoreCode) {
jit::JitActivationIterator activation(rt->mainContextFromOwnThread());
if (!activation.done()) {
activation->compartment()->zone()->setPreservingCode(true);
}
}
/*
@ -4057,20 +4069,6 @@ void GCRuntime::purgeSourceURLsForShrinkingGC() {
}
}
using ArenasToUnmark = NestedIterator<GCZonesIter, ArenasToUpdate>;
static size_t UnmarkArenaListSegment(GCRuntime* gc,
const ArenaListSegment& arenas) {
MOZ_ASSERT(arenas.begin);
MovingTracer trc(gc->rt);
size_t count = 0;
for (Arena* arena = arenas.begin; arena != arenas.end; arena = arena->next) {
arena->unmarkAll();
count++;
}
return count * 256;
}
void GCRuntime::unmarkWeakMaps() {
for (GCZonesIter zone(this); !zone.done(); zone.next()) {
/* Unmark all weak maps in the zones being collected. */
@ -4078,7 +4076,9 @@ void GCRuntime::unmarkWeakMaps() {
}
}
bool GCRuntime::beginMarkPhase(JS::GCReason reason, AutoGCSession& session) {
bool GCRuntime::beginPreparePhase(JS::GCReason reason, AutoGCSession& session) {
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::PREPARE);
#ifdef DEBUG
if (fullCompartmentChecks) {
checkForCompartmentMismatches();
@ -4095,37 +4095,136 @@ bool GCRuntime::beginMarkPhase(JS::GCReason reason, AutoGCSession& session) {
}
/*
* In an incremental GC, clear the area free lists to ensure that subsequent
* allocations refill them and end up marking new cells back. See
* arenaAllocatedDuringGC().
* Start a parallel task to clear all mark state for the zones we are
* collecting. This is linear in the size of the heap we are collecting and so
* can be slow. This happens concurrently with the mutator and GC proper does
* not start until this is complete.
*/
setParallelUnmarkEnabled(true);
unmarkTask.initZones();
unmarkTask.start();
/*
* Process any queued source compressions during the start of a major
* GC.
*/
if (!IsShutdownReason(reason) && reason != JS::GCReason::ROOTS_REMOVED) {
StartHandlingCompressionsOnGC(rt);
}
return true;
}
void BackgroundUnmarkTask::initZones() {
MOZ_ASSERT(isIdle());
MOZ_ASSERT(zones.empty());
MOZ_ASSERT(!isCancelled());
// We can't safely iterate the zones vector from another thread so we copy the
// zones to be collected into another vector.
AutoEnterOOMUnsafeRegion oomUnsafe;
for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
if (!zones.append(zone.get())) {
oomUnsafe.crash("BackgroundUnmarkTask::initZones");
}
}
}
void BackgroundUnmarkTask::run(AutoLockHelperThreadState& helperTheadLock) {
AutoUnlockHelperThreadState unlock(helperTheadLock);
AutoTraceLog log(TraceLoggerForCurrentThread(), TraceLogger_GCUnmarking);
// We need to hold the GC lock while traversing the arena lists.
AutoLockGC gcLock(gc);
unmarkZones(gcLock);
zones.clear();
}
void BackgroundUnmarkTask::unmarkZones(AutoLockGC& lock) {
for (Zone* zone : zones) {
for (auto kind : AllAllocKinds()) {
for (ArenaIter arena(zone, kind); !arena.done(); arena.next()) {
AutoUnlockGC unlock(lock);
arena->unmarkAll();
if (isCancelled()) {
return;
}
}
}
}
}
void GCRuntime::endPreparePhase(JS::GCReason reason) {
MOZ_ASSERT(unmarkTask.isIdle());
setParallelUnmarkEnabled(false);
for (GCZonesIter zone(this); !zone.done(); zone.next()) {
/*
* In an incremental GC, clear the area free lists to ensure that subsequent
* allocations refill them and end up marking new cells back. See
* arenaAllocatedDuringGC().
*/
zone->arenas.clearFreeLists();
zone->arenas.checkGCStateNotInUse();
zone->markedStrings = 0;
zone->finalizedStrings = 0;
zone->setPreservingCode(false);
#ifdef JS_GC_ZEAL
if (hasZealMode(ZealMode::YieldBeforeRootMarking)) {
for (auto kind : AllAllocKinds()) {
for (ArenaIter arena(zone, kind); !arena.done(); arena.next()) {
arena->checkNoMarkedCells();
}
}
}
#endif
}
marker.start();
GCMarker* gcmarker = &marker;
gcmarker->clearMarkCount();
// Discard JIT code more aggressively if the process is approaching its
// executable code limit.
bool canAllocateMoreCode = jit::CanLikelyAllocateMoreExecutableMemory();
auto currentTime = ReallyNow();
for (CompartmentsIter c(rt); !c.done(); c.next()) {
c->gcState.scheduledForDestruction = false;
c->gcState.maybeAlive = false;
c->gcState.hasEnteredRealm = false;
for (RealmsInCompartmentIter r(c); !r.done(); r.next()) {
if (r->shouldTraceGlobal() || !r->zone()->isGCScheduled()) {
c->gcState.maybeAlive = true;
}
if (shouldPreserveJITCode(r, currentTime, reason, canAllocateMoreCode)) {
r->zone()->setPreservingCode(true);
}
if (r->hasBeenEnteredIgnoringJit()) {
c->gcState.hasEnteredRealm = true;
}
}
}
if (!cleanUpEverything && canAllocateMoreCode) {
jit::JitActivationIterator activation(rt->mainContextFromOwnThread());
if (!activation.done()) {
activation->compartment()->zone()->setPreservingCode(true);
}
}
/*
* Perform remaining preparation work that must take place in the first true
* GC slice.
*/
{
gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::PREPARE);
AutoLockHelperThreadState helperLock;
/*
* Clear all mark state for the zones we are collecting. This is linear in
* the size of the heap we are collecting and so can be slow. Do this in
* parallel across multiple helper threads.
*/
ArenasToUnmark unmarkingWork(this);
AutoRunParallelWork unmarkCollectedZones(
this, UnmarkArenaListSegment, gcstats::PhaseKind::UNMARK, unmarkingWork,
SliceBudget::unlimited(), helperLock);
/* Clear mark state for WeakMaps in parallel with other work. */
AutoRunParallelTask unmarkWeakMaps(this, &GCRuntime::unmarkWeakMaps,
gcstats::PhaseKind::UNMARK_WEAKMAPS,
@ -4181,13 +4280,23 @@ bool GCRuntime::beginMarkPhase(JS::GCReason reason, AutoGCSession& session) {
}
}
}
}
void GCRuntime::beginMarkPhase(AutoGCSession& session) {
/*
* Mark phase.
*/
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK);
// This is the slice we actually start collecting. The number can be used to
// check whether a major GC has started so we must not increment it until we
// get here.
incMajorGcNumber();
marker.start();
GCMarker* gcmarker = &marker;
gcmarker->clearMarkCount();
for (GCZonesIter zone(this); !zone.done(); zone.next()) {
// Incremental marking barriers are enabled at this point.
zone->changeGCState(Zone::Prepare, Zone::MarkBlackOnly);
@ -4205,16 +4314,6 @@ bool GCRuntime::beginMarkPhase(JS::GCReason reason, AutoGCSession& session) {
updateMemoryCountersOnGCStart();
stats().measureInitialHeapSize();
/*
* Process any queued source compressions during the start of a major
* GC.
*/
if (!IsShutdownReason(reason) && reason != JS::GCReason::ROOTS_REMOVED) {
StartHandlingCompressionsOnGC(rt);
}
return true;
}
void GCRuntime::findDeadCompartments() {
@ -6318,29 +6417,40 @@ void GCRuntime::endCompactPhase() { startedCompacting = false; }
void GCRuntime::finishCollection() {
assertBackgroundSweepingFinished();
MOZ_ASSERT(marker.isDrained());
marker.stop();
clearBufferedGrayRoots();
auto currentTime = ReallyNow();
schedulingState.updateHighFrequencyMode(lastGCEndTime_, currentTime,
tunables);
clearBufferedGrayRoots();
maybeStopStringPretenuring();
{
AutoLockGC lock(this);
updateGCThresholdsAfterCollection(lock);
for (GCZonesIter zone(this); !zone.done(); zone.next()) {
zone->changeGCState(Zone::Finished, Zone::NoGC);
zone->notifyObservingDebuggers();
zone->arenas.checkGCStateNotInUse();
}
}
for (GCZonesIter zone(this); !zone.done(); zone.next()) {
zone->changeGCState(Zone::Finished, Zone::NoGC);
zone->notifyObservingDebuggers();
}
auto currentTime = ReallyNow();
schedulingState.updateHighFrequencyMode(lastGCEndTime_, currentTime,
tunables);
lastGCEndTime_ = currentTime;
checkGCStateNotInUse();
}
void GCRuntime::checkGCStateNotInUse() {
#ifdef DEBUG
MOZ_ASSERT(!marker.isActive());
for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
if (zone->wasCollected()) {
zone->arenas.checkGCStateNotInUse();
}
MOZ_ASSERT(!zone->wasGCStarted());
MOZ_ASSERT(!zone->needsIncrementalBarrier());
MOZ_ASSERT(!zone->isOnList());
@ -6349,14 +6459,9 @@ void GCRuntime::finishCollection() {
MOZ_ASSERT(zonesToMaybeCompact.ref().isEmpty());
MOZ_ASSERT(cellsToAssertNotGray.ref().empty());
#ifdef DEBUG
{
AutoLockHelperThreadState lock;
MOZ_ASSERT(!requestSliceAfterBackgroundTask);
}
AutoLockHelperThreadState lock;
MOZ_ASSERT(!requestSliceAfterBackgroundTask);
#endif
lastGCEndTime_ = currentTime;
}
void GCRuntime::maybeStopStringPretenuring() {
@ -6500,6 +6605,18 @@ GCRuntime::IncrementalResult GCRuntime::resetIncrementalGC(
MOZ_CRASH("Unexpected GC state in resetIncrementalGC");
break;
case State::Prepare:
unmarkTask.cancelAndWait();
setParallelUnmarkEnabled(false);
for (GCZonesIter zone(this); !zone.done(); zone.next()) {
zone->changeGCState(Zone::Prepare, Zone::NoGC);
}
incrementalState = State::NotActive;
checkGCStateNotInUse();
break;
case State::Mark: {
// Cancel any ongoing marking.
marker.reset();
@ -6654,7 +6771,6 @@ void GCRuntime::incrementalSlice(SliceBudget& budget,
switch (incrementalState) {
case State::NotActive:
incMajorGcNumber();
invocationKind = gckind.valueOr(GC_NORMAL);
initialReason = reason;
cleanUpEverything = ShouldCleanUpEverything(reason, invocationKind);
@ -6670,16 +6786,32 @@ void GCRuntime::incrementalSlice(SliceBudget& budget,
}
#endif
incrementalState = State::MarkRoots;
[[fallthrough]];
case State::MarkRoots:
if (!beginMarkPhase(reason, session)) {
incrementalState = State::Prepare;
if (!beginPreparePhase(reason, session)) {
incrementalState = State::NotActive;
return;
}
if (isIncremental && useZeal &&
hasZealMode(ZealMode::YieldBeforeRootMarking)) {
break;
}
[[fallthrough]];
case State::Prepare:
if (waitForBackgroundTask(unmarkTask, budget,
DontTriggerSliceWhenFinished) == NotFinished) {
break;
}
endPreparePhase(reason);
incrementalState = State::MarkRoots;
[[fallthrough]];
case State::MarkRoots:
beginMarkPhase(session);
/* If we needed delayed marking for gray roots, then collect until done.
*/
if (isIncremental && !hasValidGrayRootsBuffer()) {
@ -6764,7 +6896,8 @@ void GCRuntime::incrementalSlice(SliceBudget& budget,
[[fallthrough]];
case State::Finalize:
if (waitForBackgroundTask(sweepTask, budget) == NotFinished) {
if (waitForBackgroundTask(sweepTask, budget, TriggerSliceWhenFinished) ==
NotFinished) {
break;
}
@ -6810,7 +6943,8 @@ void GCRuntime::incrementalSlice(SliceBudget& budget,
[[fallthrough]];
case State::Decommit:
if (waitForBackgroundTask(decommitTask, budget) == NotFinished) {
if (waitForBackgroundTask(decommitTask, budget,
TriggerSliceWhenFinished) == NotFinished) {
break;
}
@ -6845,14 +6979,17 @@ bool GCRuntime::hasForegroundWork() const {
}
}
IncrementalProgress GCRuntime::waitForBackgroundTask(GCParallelTask& task,
SliceBudget& budget) {
IncrementalProgress GCRuntime::waitForBackgroundTask(
GCParallelTask& task, const SliceBudget& budget,
ShouldTriggerSliceWhenFinished triggerSlice) {
// In incremental collections, yield if the task has not finished and request
// a slice to notify us when this happens.
if (!budget.isUnlimited()) {
AutoLockHelperThreadState lock;
if (task.wasStarted(lock)) {
requestSliceAfterBackgroundTask = true;
if (triggerSlice) {
requestSliceAfterBackgroundTask = true;
}
return NotFinished;
}
}
@ -6860,7 +6997,9 @@ IncrementalProgress GCRuntime::waitForBackgroundTask(GCParallelTask& task,
// Otherwise in non-incremental collections, wait here.
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::WAIT_BACKGROUND_THREAD);
task.join();
cancelRequestedGCAfterBackgroundTask();
if (triggerSlice) {
cancelRequestedGCAfterBackgroundTask();
}
return Finished;
}
@ -7122,6 +7261,12 @@ MOZ_NEVER_INLINE GCRuntime::IncrementalResult GCRuntime::gcCycle(
auto result = budgetIncrementalGC(nonincrementalByAPI, reason, budget);
if (result == IncrementalResult::ResetIncremental) {
if (incrementalState == State::NotActive) {
// The collection was reset and has finished.
return result;
}
// The collection was reset but we must finish up some remaining work.
reason = JS::GCReason::RESET;
}
@ -7193,6 +7338,8 @@ bool GCRuntime::shouldCollectNurseryForSlice(bool nonincrementalByAPI,
switch (incrementalState) {
case State::NotActive:
return true;
case State::Prepare:
return true;
case State::Mark:
return (mightSweepInThisSlice(nonIncremental) &&
shouldCollectForSweeping) ||
@ -7339,9 +7486,9 @@ struct MOZ_RAII AutoSetZoneSliceThresholds {
// On entry, zones that are already collecting should have a slice threshold
// set.
for (ZonesIter zone(gc, WithAtoms); !zone.done(); zone.next()) {
MOZ_ASSERT(zone->wasGCStarted() ==
MOZ_ASSERT((zone->gcState() > Zone::Prepare) ==
zone->gcHeapThreshold.hasSliceThreshold());
MOZ_ASSERT(zone->wasGCStarted() ==
MOZ_ASSERT((zone->gcState() > Zone::Prepare) ==
zone->mallocHeapThreshold.hasSliceThreshold());
}
}
@ -7349,7 +7496,7 @@ struct MOZ_RAII AutoSetZoneSliceThresholds {
~AutoSetZoneSliceThresholds() {
// On exit, update the thresholds for all collecting zones.
for (ZonesIter zone(gc, WithAtoms); !zone.done(); zone.next()) {
if (zone->wasGCStarted()) {
if (zone->gcState() > Zone::Prepare) {
zone->setGCSliceThresholds(*gc);
} else {
MOZ_ASSERT(!zone->gcHeapThreshold.hasSliceThreshold());
@ -7887,8 +8034,7 @@ void GCRuntime::mergeRealms(Realm* source, Realm* target) {
// Fixup zone pointers in source's zone to refer to target's zone.
bool targetZoneIsCollecting =
isIncrementalGCInProgress() && target->zone()->wasGCStarted();
bool targetZoneIsCollecting = target->zone()->gcState() > Zone::Prepare;
for (auto thingKind : AllAllocKinds()) {
for (ArenaIter aiter(source->zone(), thingKind); !aiter.done();
aiter.next()) {
@ -8067,7 +8213,14 @@ void ArenaLists::adoptArenas(ArenaLists* fromArenaLists,
// Copy fromArena->next before releasing/reinserting.
next = fromArena->next;
#ifdef DEBUG
MOZ_ASSERT(!fromArena->isEmpty());
if (targetZoneIsCollecting) {
fromArena->checkAllCellsMarkedBlack();
} else {
fromArena->checkNoMarkedCells();
}
#endif
// If the target zone is being collected then we need to add the
// arenas before the cursor because the collector assumes that the
@ -8748,7 +8901,13 @@ static inline bool CanCheckGrayBits(const Cell* cell) {
auto tc = &cell->asTenured();
auto rt = tc->runtimeFromAnyThread();
return CurrentThreadCanAccessRuntime(rt) && rt->gc.areGrayBitsValid();
if (!CurrentThreadCanAccessRuntime(rt) || !rt->gc.areGrayBitsValid()) {
return false;
}
// If the zone's mark bits are being cleared concurrently we can't depend on
// the contents.
return !tc->zone()->isGCPreparing();
}
JS_PUBLIC_API bool js::gc::detail::CellIsMarkedGrayIfKnown(const Cell* cell) {

View File

@ -25,6 +25,7 @@ enum class MarkColor : uint8_t { Gray = 1, Black = 2 };
// The phases of an incremental GC.
#define GCSTATES(D) \
D(NotActive) \
D(Prepare) \
D(MarkRoots) \
D(Mark) \
D(Sweep) \
@ -42,6 +43,7 @@ enum class State {
D(RootsChange, 1) \
D(Alloc, 2) \
D(VerifierPre, 4) \
D(YieldBeforeRootMarking, 6) \
D(GenerationalGC, 7) \
D(YieldBeforeMarking, 8) \
D(YieldBeforeSweeping, 9) \

View File

@ -265,6 +265,8 @@ class GCMarker : public JSTracer {
void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); }
size_t maxCapacity() const { return stack.maxCapacity(); }
bool isActive() const { return state != MarkingState::NotActive; }
void start();
void stop();
void reset();

View File

@ -37,6 +37,11 @@ void js::GCParallelTask::startWithLockHeld(AutoLockHelperThreadState& lock) {
}
void js::GCParallelTask::start() {
if (!CanUseExtraThreads()) {
runFromMainThread();
return;
}
AutoLockHelperThreadState lock;
startWithLockHeld(lock);
}
@ -59,6 +64,13 @@ void js::GCParallelTask::startOrRunIfIdle(AutoLockHelperThreadState& lock) {
startWithLockHeld(lock);
}
void js::GCParallelTask::cancelAndWait() {
MOZ_ASSERT(!isCancelled());
cancel_ = true;
join();
cancel_ = false;
}
void js::GCParallelTask::join() {
AutoLockHelperThreadState lock;
joinWithLockHeld(lock);
@ -93,7 +105,6 @@ void js::GCParallelTask::joinRunningOrFinishedTask(
}
setIdle(lock);
cancel_ = false;
}
void js::GCParallelTask::cancelDispatchedTask(AutoLockHelperThreadState& lock) {

View File

@ -106,10 +106,7 @@ class GCParallelTask : public mozilla::LinkedListElement<GCParallelTask>,
void cancelDispatchedTask(AutoLockHelperThreadState& lock);
// Set the cancel flag and wait for the task to finish.
void cancelAndWait() {
cancel_ = true;
join();
}
void cancelAndWait();
// Report whether the task is idle. This means either before start() has been
// called or after join() has been called.
@ -134,6 +131,8 @@ class GCParallelTask : public mozilla::LinkedListElement<GCParallelTask>,
// Override this method to provide the task's functionality.
virtual void run(AutoLockHelperThreadState& lock) = 0;
bool isCancelled() const { return cancel_; }
private:
void assertIdle() const {
// Don't lock here because that adds extra synchronization in debug

View File

@ -130,6 +130,18 @@ class ChunkPool {
};
};
class BackgroundUnmarkTask : public GCParallelTask {
public:
explicit BackgroundUnmarkTask(GCRuntime* gc) : GCParallelTask(gc) {}
void initZones();
void run(AutoLockHelperThreadState& lock) override;
private:
void unmarkZones(AutoLockGC& lock);
ZoneVector zones;
};
class BackgroundSweepTask : public GCParallelTask {
public:
explicit BackgroundSweepTask(GCRuntime* gc) : GCParallelTask(gc) {}
@ -570,6 +582,7 @@ class GCRuntime {
static TenuredCell* refillFreeListInGC(Zone* zone, AllocKind thingKind);
void setParallelAtomsAllocEnabled(bool enabled);
void setParallelUnmarkEnabled(bool enabled);
/*
* Concurrent sweep infrastructure.
@ -687,10 +700,13 @@ class GCRuntime {
void maybeCallGCCallback(JSGCStatus status, JS::GCReason reason);
void purgeRuntime();
MOZ_MUST_USE bool beginMarkPhase(JS::GCReason reason, AutoGCSession& session);
MOZ_MUST_USE bool beginPreparePhase(JS::GCReason reason,
AutoGCSession& session);
bool prepareZonesForCollection(JS::GCReason reason, bool* isFullOut);
void bufferGrayRoots();
void unmarkWeakMaps();
void endPreparePhase(JS::GCReason reason);
void beginMarkPhase(AutoGCSession& session);
bool shouldPreserveJITCode(JS::Realm* realm,
const mozilla::TimeStamp& currentTime,
JS::GCReason reason, bool canAllocateMoreCode);
@ -792,12 +808,26 @@ class GCRuntime {
void releaseRelocatedArenas(Arena* arenaList);
void releaseRelocatedArenasWithoutUnlocking(Arena* arenaList,
const AutoLockGC& lock);
IncrementalProgress waitForBackgroundTask(GCParallelTask& task,
SliceBudget& budget);
/*
* Whether to immediately trigger a slice after a background task
* finishes. This may not happen at a convenient time, so the consideration is
* whether the slice will run quickly or may take a long time.
*/
enum ShouldTriggerSliceWhenFinished : bool {
DontTriggerSliceWhenFinished = false,
TriggerSliceWhenFinished = true
};
IncrementalProgress waitForBackgroundTask(
GCParallelTask& task, const SliceBudget& budget,
ShouldTriggerSliceWhenFinished triggerSlice);
void maybeRequestGCAfterBackgroundTask(const AutoLockHelperThreadState& lock);
void cancelRequestedGCAfterBackgroundTask();
void finishCollection();
void maybeStopStringPretenuring();
void checkGCStateNotInUse();
IncrementalProgress joinSweepMarkTask();
#ifdef JS_GC_ZEAL
@ -1166,6 +1196,7 @@ class GCRuntime {
friend class BackgroundFreeTask;
BackgroundAllocTask allocTask;
BackgroundUnmarkTask unmarkTask;
BackgroundSweepTask sweepTask;
BackgroundFreeTask freeTask;
BackgroundDecommitTask decommitTask;
@ -1233,7 +1264,8 @@ inline bool GCRuntime::needZealousGC() {
}
inline bool GCRuntime::hasIncrementalTwoSliceZealMode() {
return hasZealMode(ZealMode::YieldBeforeMarking) ||
return hasZealMode(ZealMode::YieldBeforeRootMarking) ||
hasZealMode(ZealMode::YieldBeforeMarking) ||
hasZealMode(ZealMode::YieldBeforeSweeping) ||
hasZealMode(ZealMode::YieldBeforeSweepingAtoms) ||
hasZealMode(ZealMode::YieldBeforeSweepingCaches) ||

View File

@ -443,6 +443,11 @@ class Arena {
#ifdef DEBUG
void checkNoMarkedFreeCells();
void checkAllCellsMarkedBlack();
#endif
#if defined(DEBUG) || defined(JS_GC_ZEAL)
void checkNoMarkedCells();
#endif
};

View File

@ -2429,7 +2429,10 @@ void GCMarker::stop() {
MOZ_ASSERT(isDrained());
MOZ_ASSERT(!delayedMarkingList);
MOZ_ASSERT(markLaterArenas == 0);
MOZ_ASSERT(state != MarkingState::NotActive);
if (state == MarkingState::NotActive) {
return;
}
state = MarkingState::NotActive;
stack.clear();
@ -3729,7 +3732,8 @@ static inline bool ShouldCheckMarkState(JSRuntime* rt, T** thingp) {
TenuredCell& thing = (*thingp)->asTenured();
Zone* zone = thing.zoneFromAnyThread();
if (!zone->isCollectingFromAnyThread() || zone->isGCFinished()) {
if (zone->gcState() <= Zone::Prepare || zone->isGCFinished()) {
return false;
}
@ -3996,12 +4000,18 @@ void UnmarkGrayTracer::onChild(const JS::GCCellPtr& thing) {
}
TenuredCell& tenured = cell->asTenured();
Zone* zone = tenured.zone();
// If the cell is in a zone whose mark bits are being cleared, then it will
// end up white.
if (zone->isGCPreparing()) {
return;
}
// If the cell is in a zone that we're currently marking, then it's possible
// that it is currently white but will end up gray. To handle this case, push
// any cells in zones that are currently being marked onto the mark stack and
// they will eventually get marked black.
Zone* zone = tenured.zone();
if (zone->isGCMarking()) {
if (!cell->isMarkedBlack()) {
Cell* tmp = cell;
@ -4061,6 +4071,11 @@ JS_FRIEND_API bool JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr thing) {
MOZ_ASSERT(!JS::RuntimeHeapIsCycleCollecting());
JSRuntime* rt = thing.asCell()->runtimeFromMainThread();
if (thing.asCell()->zone()->isGCPreparing()) {
// Mark bits are being cleared in preparation for GC.
return false;
}
gcstats::AutoPhase outerPhase(rt->gc.stats(), gcstats::PhaseKind::BARRIER);
gcstats::AutoPhase innerPhase(rt->gc.stats(),
gcstats::PhaseKind::UNMARK_GRAY);

View File

@ -55,7 +55,7 @@ class GCZonesIter {
MOZ_ASSERT_IF(gc->atomsZone->wasGCStarted(),
!gc->rt->hasHelperThreadZones());
if (!done() && !zone->isCollectingFromAnyThread()) {
if (!done() && !zone->wasGCStarted()) {
next();
}
}
@ -67,7 +67,7 @@ class GCZonesIter {
MOZ_ASSERT(!done());
do {
zone.next();
} while (!zone.done() && !zone->isCollectingFromAnyThread());
} while (!zone.done() && !zone->wasGCStarted());
}
JS::Zone* get() const {
@ -138,7 +138,7 @@ class ArenaFreeCellIter {
return !thing;
}
TenuredCell* getCell() const {
TenuredCell* get() const {
MOZ_ASSERT(!done());
return reinterpret_cast<TenuredCell*>(uintptr_t(arena) + thing);
}
@ -156,6 +156,9 @@ class ArenaFreeCellIter {
MOZ_ASSERT(thing < ArenaSize);
}
operator TenuredCell*() const { return get(); }
TenuredCell* operator->() const { return get(); }
};
} // namespace gc

View File

@ -1091,28 +1091,30 @@ void Statistics::sendGCTelemetry() {
}
}
size_t bytesSurvived = 0;
for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next()) {
if (zone->wasCollected()) {
bytesSurvived += zone->gcHeapSize.retainedBytes();
if (!lastSlice.wasReset()) {
size_t bytesSurvived = 0;
for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next()) {
if (zone->wasCollected()) {
bytesSurvived += zone->gcHeapSize.retainedBytes();
}
}
}
MOZ_ASSERT(preCollectedHeapBytes >= bytesSurvived);
double survialRate =
100.0 * double(bytesSurvived) / double(preCollectedHeapBytes);
runtime->addTelemetry(JS_TELEMETRY_GC_TENURED_SURVIVAL_RATE,
uint32_t(survialRate));
MOZ_ASSERT(preCollectedHeapBytes >= bytesSurvived);
double survialRate =
100.0 * double(bytesSurvived) / double(preCollectedHeapBytes);
runtime->addTelemetry(JS_TELEMETRY_GC_TENURED_SURVIVAL_RATE,
uint32_t(survialRate));
// Calculate 'effectiveness' in MB / second, on main thread only for now.
if (!runtime->parentRuntime) {
size_t bytesFreed = preCollectedHeapBytes - bytesSurvived;
TimeDuration clampedTotal =
TimeDuration::Max(total, TimeDuration::FromMilliseconds(1));
double effectiveness =
(double(bytesFreed) / BYTES_PER_MB) / clampedTotal.ToSeconds();
runtime->addTelemetry(JS_TELEMETRY_GC_EFFECTIVENESS,
uint32_t(effectiveness));
// Calculate 'effectiveness' in MB / second, on main thread only for now.
if (!runtime->parentRuntime) {
size_t bytesFreed = preCollectedHeapBytes - bytesSurvived;
TimeDuration clampedTotal =
TimeDuration::Max(total, TimeDuration::FromMilliseconds(1));
double effectiveness =
(double(bytesFreed) / BYTES_PER_MB) / clampedTotal.ToSeconds();
runtime->addTelemetry(JS_TELEMETRY_GC_EFFECTIVENESS,
uint32_t(effectiveness));
}
}
}

View File

@ -1051,7 +1051,7 @@ bool js::gc::CheckWeakMapEntryMarking(const WeakMapBase* map, Cell* key,
if (cell->runtimeFromAnyThread() != mapRuntime) {
return CellColor::Black;
}
if (cellZone->isGCMarking() || cellZone->isGCSweeping()) {
if (cellZone->isGCMarkingOrSweeping()) {
return cell->color();
}
return CellColor::Black;

View File

@ -113,7 +113,7 @@ WeakMap<K, V>::WeakMap(JSContext* cx, JSObject* memOf)
"Object's TraceKind should be added to CC graph.");
zone()->gcWeakMapList().insertFront(this);
if (zone()->wasGCStarted()) {
if (zone()->gcState() > Zone::Prepare) {
mapColor = CellColor::Black;
}
}

View File

@ -39,17 +39,20 @@ function test() {
// Check that pointers without associated length get truncated to strlen().
const shortU8cs = ctypes.unsigned_char.array(10)("abc\0\0\0").addressOfElement(0).readTypedArray();
const unsignedCharArray = ctypes.unsigned_char.array(10)("abc\0\0\0");
const shortU8cs = unsignedCharArray.addressOfElement(0).readTypedArray();
assertEq(shortU8cs instanceof Uint8Array, true);
assertEq(shortU8cs.length, 3);
assertEq(typedArrayMatchesString(shortU8cs, "abc", 'stop at NUL, please'), true);
const shortI8cs = ctypes.signed_char.array(10)("abc\0\0\0").addressOfElement(0).readTypedArray();
const signedCharArray = ctypes.signed_char.array(10)("abc\0\0\0");
const shortI8cs = signedCharArray.addressOfElement(0).readTypedArray();
assertEq(shortI8cs instanceof Int8Array, true);
assertEq(shortI8cs.length, 3);
assertEq(typedArrayMatchesString(shortI8cs, "abc", 'stop at NUL, please'), true);
const shortU16cs = ctypes.char16_t.array(10)("千\0").addressOfElement(0).readTypedArray();
const char16Array = ctypes.char16_t.array(10)("千\0");
const shortU16cs = char16Array.addressOfElement(0).readTypedArray();
assertEq(shortU16cs instanceof Uint16Array, true);
assertEq(shortU16cs.length, 1);
assertEq(typedArrayMatchesString(shortU16cs, "千", 'ignore zero-padding, please'), true);

View File

@ -20,7 +20,11 @@ dbg.onEnterFrame = frame => {
// slices) and use long slices, to make sure that the debuggee removal occurs
// during a slice.
gczeal(10, 0);
gcslice(1000000);
gcslice(1);
while (gcstate() !== "NotAcctive" && gcstate() !== "Sweep") {
gcslice(1000);
}
let genObj = g.f();
genObj.return();
assertEq(gcstate(), "Sweep");

View File

@ -1,5 +1,7 @@
// Test class field initializers have reasonable lineno/column values
gczeal(0);
// Use the Debugger API to introspect the line / column.
let d = new Debugger();
let g = newGlobal({newCompartment: true})

View File

@ -9,7 +9,8 @@ var g = newGlobal();
// Start an off thread compilation that will not run until GC has finished
if ("gcstate" in this)
assertEq(gcstate(), "Mark");
assertEq(gcstate() === "NotActive", false);
g.offThreadCompileScript('23;', {});
// Wait for the compilation to finish, which must finish the GC first

View File

@ -16,6 +16,13 @@ function createOtherCompartment() {
return g;
}
function startGCMarking() {
startgc(1);
while (gcstate() === "Prepare") {
gcslice(1);
}
}
gczeal(0);
let g = createOtherCompartment();
@ -31,7 +38,7 @@ assertEq(getMarks()[0], "gray");
// If a barrier marks the gray wrapper black after the start of the
// GC, the target ends up black.
schedulezone(this);
startgc(1);
startGCMarking()
assertEq(getMarks()[0], "unmarked");
g.eval(`grayRoot()`); // Barrier marks gray roots black.
assertEq(getMarks()[0], "black");
@ -45,7 +52,7 @@ assertEq(getMarks()[0], "gray");
// already been marked gray, the target ends up black.
gczeal(25); // Yield during gray marking.
schedulezone(this);
startgc(1);
startGCMarking();
assertEq(getMarks()[0], "gray");
g.eval(`grayRoot()`); // Barrier marks gray roots black.
assertEq(getMarks()[0], "black");

View File

@ -1,5 +1,11 @@
// Test that a zone GC collects the selected zones.
function waitForState(state) {
while (gcstate() !== state && gcstate() !== "NotActive") {
gcslice(100);
}
}
gczeal(0);
gc();
@ -15,6 +21,10 @@ assertEq(gcstate(z2), "NoGC");
// No zones selected => full GC.
startgc(1);
// It's non-deterministic whether we see the prepare state or not.
waitForState("Mark");
assertEq(gcstate(), "Mark");
assertEq(gcstate(z1), "MarkBlackOnly");
assertEq(gcstate(z2), "MarkBlackOnly");
@ -24,6 +34,7 @@ finishgc();
schedulezone(z1);
startgc(1);
waitForState("Mark");
assertEq(gcstate(), "Mark");
assertEq(gcstate(z1), "MarkBlackOnly");
assertEq(gcstate(z2), "NoGC");
@ -31,6 +42,7 @@ finishgc();
schedulezone(z2);
startgc(1);
waitForState("Mark");
assertEq(gcstate(), "Mark");
assertEq(gcstate(z1), "NoGC");
assertEq(gcstate(z2), "MarkBlackOnly");
@ -39,6 +51,7 @@ finishgc();
schedulezone(z1);
schedulezone(z2);
startgc(1);
waitForState("Mark");
assertEq(gcstate(), "Mark");
assertEq(gcstate(z1), "MarkBlackOnly");
assertEq(gcstate(z2), "MarkBlackOnly");

View File

@ -4,6 +4,12 @@ function assert(x) {
assertEq(true, x);
}
function waitForState(state) {
while (gcstate() !== state && gcstate() !== "NotActive") {
gcslice(100);
}
}
// Test expected state changes during collection.
gczeal(0);
@ -22,6 +28,7 @@ assertEq(gcstate(), "NotActive");
// we yield before we start sweeping.
gczeal(0);
gcslice(1);
waitForState("Mark");
assertEq(gcstate(), "Mark");
gcslice(1000000);
assertEq(gcstate(), "Mark");
@ -29,8 +36,17 @@ gcslice(1000000);
assert(gcstate() !== "Mark");
finishgc();
// Zeal mode 6: Incremental GC in two slices:
// 1) prepare
// 2) mark roots, mark and sweep
gczeal(6, 0);
gcslice(1);
assertEq(gcstate(), "Prepare");
gcslice(1);
assertEq(gcstate(), "NotActive");
// Zeal mode 8: Incremental GC in two slices:
// 1) mark roots
// 1) prepare and mark roots
// 2) mark and sweep
gczeal(8, 0);
gcslice(1);
@ -39,7 +55,7 @@ gcslice(1);
assertEq(gcstate(), "NotActive");
// Zeal mode 9: Incremental GC in two slices:
// 1) mark roots and marking
// 1) prepare, mark roots and marking
// 2) new marking and sweeping
gczeal(9, 0);
gcslice(1);
@ -52,6 +68,9 @@ assertEq(gcstate(), "NotActive");
// in sweeping, where normal IGC (above) does not.
gczeal(10, 0);
gcslice(1000000);
while (gcstate() === "Prepare") {
gcslice(1000000);
}
assertEq(gcstate(), "Sweep");
gcslice(1000000);
assert(gcstate() !== "Sweep");

View File

@ -28,6 +28,13 @@ function reportMarks(prefix = "") {
return markstr;
}
function startGCMarking() {
startgc(100000);
while (gcstate() === "Prepare") {
gcslice(100000);
}
}
function purgeKey() {
const m = new WeakMap();
const vals = {};
@ -47,7 +54,7 @@ function purgeKey() {
vals.key = vals.val = null;
startgc(100000);
startGCMarking();
// getMarks() returns map/key/value
assertEq(getMarks().join("/"), "black/unmarked/unmarked",
"marked the map black");
@ -87,7 +94,7 @@ function removeKey() {
enqueueMark(m);
enqueueMark("yield");
startgc(100000);
startGCMarking();
reportMarks("first: ");
var marks = getMarks();
assertEq(marks[0], "black", "map is black");
@ -106,6 +113,9 @@ function removeKey() {
m.set(vals.key, vals.val);
vals.key = vals.val = null;
startgc(10000);
while (gcstate() !== "Mark") {
gcslice(100000);
}
marks = getMarks();
assertEq(marks[0], "black", "map is black");
assertEq(marks[1], "unmarked", "key not marked yet");
@ -161,6 +171,9 @@ function nukeMarking() {
// Okay, run through the GC now.
startgc(1000000);
while (gcstate() !== "Mark") {
gcslice(100000);
}
assertEq(gcstate(), "Mark", "expected to yield after marking map");
// We should have marked the map and then yielded back here.
nukeCCW(vals.key);
@ -195,6 +208,9 @@ function transplantMarking() {
// Okay, run through the GC now.
startgc(1000000);
while (gcstate() !== "Mark") {
gcslice(100000);
}
assertEq(gcstate(), "Mark", "expected to yield after marking map");
// We should have marked the map and then yielded back here.
transplant(g1);
@ -248,7 +264,7 @@ function grayMarkingMapFirst() {
};
print("Starting incremental GC");
startgc(100000);
startGCMarking();
// Checkpoint 1, after marking map
showmarks();
var marks = getMarks();
@ -340,7 +356,7 @@ function grayMarkingMapLast() {
};
print("Starting incremental GC");
startgc(100000);
startGCMarking();
// Checkpoint 1, after marking key
showmarks();
var marks = labeledMarks();
@ -405,7 +421,7 @@ function grayMapKey() {
vals.key = vals.val = null;
startgc(100000);
startGCMarking();
assertEq(getMarks().join("/"), "gray/unmarked/unmarked",
"marked the map gray");
@ -457,7 +473,7 @@ function grayKeyMap() {
// created additional zones.
schedulezone(vals);
startgc(100000);
startGCMarking();
// getMarks() returns map/key/value
reportMarks("1: ");
assertEq(getMarks().join("/"), "unmarked/black/unmarked",
@ -541,7 +557,7 @@ function blackDuringGray() {
};
print("Starting incremental GC");
startgc(100000);
startGCMarking();
// Checkpoint 1, after marking delegate black
showmarks();
var marks = getMarks();

View File

@ -13,7 +13,7 @@
var g7 = newGlobal({newCompartment: true});
g7.parent = this;
g7.eval("Debugger(parent)");
g7.eval("var dbg = Debugger(parent)");
assertEq(typeof WebAssembly, "object");
// Test that validation works even if compilers are not available.

View File

@ -497,10 +497,14 @@ bool TestCCWs() {
CHECK(IsMarkedGray(wrapper));
CHECK(IsMarkedBlack(target));
JSRuntime* rt = cx->runtime();
JS_SetGCParameter(cx, JSGC_MODE, JSGC_MODE_ZONE_INCREMENTAL);
JS::PrepareForFullGC(cx);
js::SliceBudget budget(js::WorkBudget(1));
cx->runtime()->gc.startDebugGC(GC_NORMAL, budget);
rt->gc.startDebugGC(GC_NORMAL, budget);
while (rt->gc.state() == gc::State::Prepare) {
rt->gc.debugGCSlice(budget);
}
CHECK(JS::IsIncrementalGCInProgress(cx));
CHECK(!IsMarkedBlack(wrapper));
@ -526,7 +530,10 @@ bool TestCCWs() {
JS_SetGCParameter(cx, JSGC_MODE, JSGC_MODE_ZONE_INCREMENTAL);
JS::PrepareZoneForGC(wrapper->zone());
budget = js::SliceBudget(js::WorkBudget(1));
cx->runtime()->gc.startDebugGC(GC_NORMAL, budget);
rt->gc.startDebugGC(GC_NORMAL, budget);
while (rt->gc.state() == gc::State::Prepare) {
rt->gc.debugGCSlice(budget);
}
CHECK(JS::IsIncrementalGCInProgress(cx));
CHECK(wrapper->zone()->isGCMarkingBlackOnly());
CHECK(!target->zone()->wasGCStarted());

View File

@ -460,6 +460,9 @@ BEGIN_TEST(testGCHeapPreBarriers) {
SliceBudget budget(WorkBudget(1));
gc::GCRuntime* gc = &cx->runtime()->gc;
gc->startDebugGC(GC_NORMAL, budget);
while (gc->state() != gc::State::Mark) {
gc->debugGCSlice(budget);
}
MOZ_ASSERT(cx->zone()->needsIncrementalBarrier());
TestWrapper<HeapPtr<JSObject*>>(obj1, obj2);

View File

@ -12,6 +12,8 @@
#include "jsapi-tests/tests.h"
#include "vm/Realm.h"
using namespace js;
static bool ConstructCCW(JSContext* cx, const JSClass* globalClasp,
JS::HandleObject global1,
JS::MutableHandleObject wrapper,
@ -374,6 +376,9 @@ BEGIN_TEST(testIncrementalRoots) {
js::SliceBudget budget(js::WorkBudget(1000));
JS_SetGCParameter(cx, JSGC_MODE, JSGC_MODE_ZONE_INCREMENTAL);
rt->gc.startDebugGC(GC_NORMAL, budget);
while (rt->gc.state() != gc::State::Mark) {
rt->gc.debugGCSlice(budget);
}
// We'd better be between iGC slices now. There's always a risk that
// something will decide that we need to do a full GC (such as gczeal, but

View File

@ -10,6 +10,8 @@
#include "jsapi-tests/tests.h"
#include "vm/Realm.h"
using namespace js;
JSObject* keyDelegate = nullptr;
BEGIN_TEST(testWeakMap_basicOperations) {
@ -95,11 +97,17 @@ BEGIN_TEST(testWeakMap_keyDelegates) {
* Perform an incremental GC, introducing an unmarked CCW to force the map
* zone to finish marking before the delegate zone.
*/
JSRuntime* rt = cx->runtime();
CHECK(newCCW(map, delegateRoot));
js::SliceBudget budget(js::WorkBudget(1000000));
cx->runtime()->gc.startDebugGC(GC_NORMAL, budget);
js::SliceBudget budget(js::WorkBudget(1000));
rt->gc.startDebugGC(GC_NORMAL, budget);
if (JS::IsIncrementalGCInProgress(cx)) {
cx->runtime()->gc.finishGC(JS::GCReason::DEBUG_GC);
// Wait until we've started marking before finishing the GC
// non-incrementally.
while (rt->gc.state() == gc::State::Prepare) {
rt->gc.debugGCSlice(budget);
}
rt->gc.finishGC(JS::GCReason::DEBUG_GC);
}
#ifdef DEBUG
CHECK(map->zone()->lastSweepGroupIndex() <
@ -111,14 +119,20 @@ BEGIN_TEST(testWeakMap_keyDelegates) {
CHECK(SetWeakMapEntry(cx, map, key, val));
CHECK(checkSize(map, 1));
/* Check the delegate keeps the entry alive even if the key is not reachable.
/*
* Check the delegate keeps the entry alive even if the key is not reachable.
*/
key = nullptr;
CHECK(newCCW(map, delegateRoot));
budget = js::SliceBudget(js::WorkBudget(100000));
cx->runtime()->gc.startDebugGC(GC_NORMAL, budget);
budget = js::SliceBudget(js::WorkBudget(1000));
rt->gc.startDebugGC(GC_NORMAL, budget);
if (JS::IsIncrementalGCInProgress(cx)) {
cx->runtime()->gc.finishGC(JS::GCReason::DEBUG_GC);
// Wait until we've started marking before finishing the GC
// non-incrementally.
while (rt->gc.state() == gc::State::Prepare) {
rt->gc.debugGCSlice(budget);
}
rt->gc.finishGC(JS::GCReason::DEBUG_GC);
}
CHECK(checkSize(map, 1));

View File

@ -27,6 +27,7 @@ class JSLinearString;
_(Engine) \
_(GC) \
_(GCAllocation) \
_(GCUnmarking) \
_(GCSweeping) \
_(GCFree) \
_(Interpreter) \