mirror of
https://github.com/mozilla/gecko-dev.git
synced 2025-02-27 04:38:02 +00:00
Bug 1468867 - Rename heap state checking functions r=sfink
This commit is contained in:
parent
2e9d9b18a8
commit
5aa3632b14
@ -591,10 +591,10 @@ IsIncrementalBarrierNeededOnTenuredGCThing(const JS::GCCellPtr thing)
|
||||
MOZ_ASSERT(thing);
|
||||
MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
|
||||
|
||||
// TODO: I'd like to assert !CurrentThreadIsHeapBusy() here but this gets
|
||||
// TODO: I'd like to assert !RuntimeHeapIsBusy() here but this gets
|
||||
// called while we are tracing the heap, e.g. during memory reporting
|
||||
// (see bug 1313318).
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
|
||||
|
||||
JS::Zone* zone = JS::GetTenuredGCThingZone(thing);
|
||||
return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
|
||||
@ -632,7 +632,7 @@ EdgeNeedsSweepUnbarriered(JSObject** objp)
|
||||
// This function does not handle updating nursery pointers. Raw JSObject
|
||||
// pointers should be updated separately or replaced with
|
||||
// JS::Heap<JSObject*> which handles this automatically.
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapMinorCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
|
||||
if (IsInsideNursery(reinterpret_cast<Cell*>(*objp)))
|
||||
return false;
|
||||
|
||||
|
@ -403,7 +403,7 @@ void
|
||||
js::intl::SharedIntlData::trace(JSTracer* trc)
|
||||
{
|
||||
// Atoms are always tenured.
|
||||
if (!JS::CurrentThreadIsHeapMinorCollecting()) {
|
||||
if (!JS::RuntimeHeapIsMinorCollecting()) {
|
||||
availableTimeZones.trace(trc);
|
||||
ianaZonesTreatedAsLinksByICU.trace(trc);
|
||||
ianaLinksCanonicalizedDifferentlyByICU.trace(trc);
|
||||
|
@ -284,7 +284,7 @@ GCRuntime::checkAllocatorState(JSContext* cx, AllocKind kind)
|
||||
MOZ_ASSERT_IF(!cx->zone()->isAtomsZone(),
|
||||
kind != AllocKind::ATOM &&
|
||||
kind != AllocKind::FAT_INLINE_ATOM);
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
MOZ_ASSERT(cx->isAllocAllowed());
|
||||
#endif
|
||||
|
||||
@ -380,7 +380,7 @@ GCRuntime::refillFreeListFromMainThread(JSContext* cx, AllocKind thingKind)
|
||||
// It should not be possible to allocate on the main thread while we are
|
||||
// inside a GC.
|
||||
Zone *zone = cx->zone();
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy(), "allocating while under GC");
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy(), "allocating while under GC");
|
||||
|
||||
return cx->arenas()->allocateFromArena(zone, thingKind, ShouldCheckThresholds::CheckThresholds);
|
||||
}
|
||||
@ -405,8 +405,8 @@ GCRuntime::refillFreeListInGC(Zone* zone, AllocKind thingKind)
|
||||
|
||||
zone->arenas.checkEmptyFreeList(thingKind);
|
||||
mozilla::DebugOnly<JSRuntime*> rt = zone->runtimeFromMainThread();
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT_IF(!JS::CurrentThreadIsHeapMinorCollecting(), !rt->gc.isBackgroundSweeping());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
|
||||
MOZ_ASSERT_IF(!JS::RuntimeHeapIsMinorCollecting(), !rt->gc.isBackgroundSweeping());
|
||||
|
||||
return zone->arenas.allocateFromArena(zone, thingKind, ShouldCheckThresholds::DontCheckThresholds);
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ bool
|
||||
RuntimeFromMainThreadIsHeapMajorCollecting(JS::shadow::Zone* shadowZone)
|
||||
{
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(shadowZone->runtimeFromMainThread()));
|
||||
return JS::CurrentThreadIsHeapMajorCollecting();
|
||||
return JS::RuntimeHeapIsMajorCollecting();
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
|
@ -393,7 +393,7 @@ TenuredCell::readBarrier(TenuredCell* thing)
|
||||
if (thing->isMarkedGray()) {
|
||||
// There shouldn't be anything marked grey unless we're on the main thread.
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread()));
|
||||
if (!JS::CurrentThreadIsHeapCollecting())
|
||||
if (!JS::RuntimeHeapIsCollecting())
|
||||
JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(thing, thing->getTraceKind()));
|
||||
}
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ struct GCManagedDeletePolicy
|
||||
void operator()(const T* constPtr) {
|
||||
if (constPtr) {
|
||||
auto ptr = const_cast<T*>(constPtr);
|
||||
if (JS::CurrentThreadIsHeapCollecting()) {
|
||||
if (JS::RuntimeHeapIsCollecting()) {
|
||||
MOZ_ASSERT(js::CurrentThreadIsGCSweeping());
|
||||
// Do not attempt to clear out storebuffer edges.
|
||||
} else {
|
||||
|
@ -130,7 +130,7 @@ class ArenaCellIterImpl
|
||||
firstThingOffset = Arena::firstThingOffset(kind);
|
||||
thingSize = Arena::thingSize(kind);
|
||||
traceKind = MapAllocToTraceKind(kind);
|
||||
needsBarrier = mayNeedBarrier && !JS::CurrentThreadIsHeapCollecting();
|
||||
needsBarrier = mayNeedBarrier && !JS::RuntimeHeapIsCollecting();
|
||||
reset(arena);
|
||||
}
|
||||
|
||||
@ -188,7 +188,7 @@ class ArenaCellIter : public ArenaCellIterImpl
|
||||
explicit ArenaCellIter(Arena* arena)
|
||||
: ArenaCellIterImpl(arena, CellIterMayNeedBarrier)
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapTracing());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsTracing());
|
||||
}
|
||||
};
|
||||
|
||||
@ -217,7 +217,7 @@ class ZoneCellIter<TenuredCell> {
|
||||
|
||||
// If called from outside a GC, ensure that the heap is in a state
|
||||
// that allows us to iterate.
|
||||
if (!JS::CurrentThreadIsHeapBusy()) {
|
||||
if (!JS::RuntimeHeapIsBusy()) {
|
||||
// Assert that no GCs can occur while a ZoneCellIter is live.
|
||||
nogc.emplace();
|
||||
}
|
||||
|
@ -1734,7 +1734,7 @@ GCRuntime::getParameter(JSGCParamKey key, const AutoLockGC& lock)
|
||||
void
|
||||
GCRuntime::setMarkStackLimit(size_t limit, AutoLockGC& lock)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
AutoUnlockGC unlock(lock);
|
||||
AutoStopVerifyingBarriers pauseVerification(rt, false);
|
||||
marker.setMaxCapacity(limit);
|
||||
@ -3330,7 +3330,7 @@ GCRuntime::triggerGC(JS::gcreason::Reason reason)
|
||||
return false;
|
||||
|
||||
/* GC is already running. */
|
||||
if (JS::CurrentThreadIsHeapCollecting())
|
||||
if (JS::RuntimeHeapIsCollecting())
|
||||
return false;
|
||||
|
||||
JS::PrepareForFullGC(rt->mainContextFromOwnThread());
|
||||
@ -3347,7 +3347,7 @@ GCRuntime::maybeAllocTriggerZoneGC(Zone* zone, const AutoLockGC& lock)
|
||||
return;
|
||||
}
|
||||
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
|
||||
|
||||
size_t usedBytes = zone->usage.gcBytes();
|
||||
size_t thresholdBytes = zone->threshold.gcTriggerBytes();
|
||||
@ -3394,7 +3394,7 @@ GCRuntime::triggerZoneGC(Zone* zone, JS::gcreason::Reason reason, size_t used, s
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
|
||||
|
||||
/* GC is already running. */
|
||||
if (JS::CurrentThreadIsHeapBusy())
|
||||
if (JS::RuntimeHeapIsBusy())
|
||||
return false;
|
||||
|
||||
#ifdef JS_GC_ZEAL
|
||||
@ -3454,7 +3454,7 @@ GCRuntime::triggerFullGCForAtoms(JSContext* cx)
|
||||
{
|
||||
MOZ_ASSERT(fullGCForAtomsRequested_);
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
|
||||
MOZ_ASSERT(cx->canCollectAtoms());
|
||||
fullGCForAtomsRequested_ = false;
|
||||
MOZ_RELEASE_ASSERT(triggerGC(JS::gcreason::DELAYED_ATOMS_GC));
|
||||
@ -3705,7 +3705,7 @@ GCRuntime::queueZonesForBackgroundSweep(ZoneList& zones)
|
||||
void
|
||||
GCRuntime::freeUnusedLifoBlocksAfterSweeping(LifoAlloc* lifo)
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsBusy());
|
||||
AutoLockGC lock(rt);
|
||||
blocksToFreeAfterSweeping.ref().transferUnusedFrom(lifo);
|
||||
}
|
||||
@ -3713,7 +3713,7 @@ GCRuntime::freeUnusedLifoBlocksAfterSweeping(LifoAlloc* lifo)
|
||||
void
|
||||
GCRuntime::freeAllLifoBlocksAfterSweeping(LifoAlloc* lifo)
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsBusy());
|
||||
AutoLockGC lock(rt);
|
||||
blocksToFreeAfterSweeping.ref().transferFrom(lifo);
|
||||
}
|
||||
@ -5893,7 +5893,7 @@ GCRuntime::beginSweepPhase(JS::gcreason::Reason reason, AutoTraceSession& sessio
|
||||
/*
|
||||
* Sweep phase.
|
||||
*
|
||||
* Finalize as we sweep, outside of lock but with CurrentThreadIsHeapBusy()
|
||||
* Finalize as we sweep, outside of lock but with RuntimeHeapIsBusy()
|
||||
* true so that any attempt to allocate a GC-thing from a finalizer will
|
||||
* fail, rather than nest badly and leave the unmarked newborn to be swept.
|
||||
*/
|
||||
@ -6893,12 +6893,12 @@ AutoTraceSession::AutoTraceSession(JSRuntime* rt, JS::HeapState heapState)
|
||||
|
||||
AutoTraceSession::~AutoTraceSession()
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsBusy());
|
||||
runtime->heapState_ = prevState;
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(JS::HeapState)
|
||||
JS::CurrentThreadHeapState()
|
||||
JS::RuntimeHeapState()
|
||||
{
|
||||
return TlsContext.get()->runtime()->heapState();
|
||||
}
|
||||
@ -7638,7 +7638,7 @@ GCRuntime::checkCanCallAPI()
|
||||
MOZ_RELEASE_ASSERT(CurrentThreadCanAccessRuntime(rt));
|
||||
|
||||
/* If we attempt to invoke the GC while we are running in the GC, assert. */
|
||||
MOZ_RELEASE_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_RELEASE_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
|
||||
MOZ_ASSERT(rt->mainContextFromOwnThread()->isAllocAllowed());
|
||||
}
|
||||
@ -7895,7 +7895,7 @@ GCRuntime::onOutOfMallocMemory(const AutoLockGC& lock)
|
||||
void
|
||||
GCRuntime::minorGC(JS::gcreason::Reason reason, gcstats::PhaseKind phase)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
|
||||
MOZ_ASSERT_IF(reason == JS::gcreason::EVICT_NURSERY,
|
||||
!rt->mainContextFromOwnThread()->suppressGC);
|
||||
@ -8294,7 +8294,7 @@ GCRuntime::runDebugGC()
|
||||
void
|
||||
GCRuntime::setFullCompartmentChecks(bool enabled)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
|
||||
fullCompartmentChecks = enabled;
|
||||
}
|
||||
|
||||
@ -8314,7 +8314,7 @@ GCRuntime::notifyRootsRemoved()
|
||||
bool
|
||||
GCRuntime::selectForMarking(JSObject* object)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
|
||||
return selectedForMarking.ref().append(object);
|
||||
}
|
||||
|
||||
@ -8327,7 +8327,7 @@ GCRuntime::clearSelectedForMarking()
|
||||
void
|
||||
GCRuntime::setDeterministic(bool enabled)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
|
||||
deterministicOnly = enabled;
|
||||
}
|
||||
#endif
|
||||
@ -8502,20 +8502,20 @@ JS::AutoEnterCycleCollection::AutoEnterCycleCollection(JSRuntime* rt)
|
||||
: runtime_(rt)
|
||||
{
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
runtime_->heapState_ = HeapState::CycleCollecting;
|
||||
}
|
||||
|
||||
JS::AutoEnterCycleCollection::~AutoEnterCycleCollection()
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapCycleCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsCycleCollecting());
|
||||
runtime_->heapState_ = HeapState::Idle;
|
||||
}
|
||||
|
||||
JS::AutoAssertGCCallback::AutoAssertGCCallback()
|
||||
: AutoSuppressGCAnalysis()
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
|
||||
}
|
||||
|
||||
#endif // DEBUG
|
||||
@ -8810,7 +8810,7 @@ JS::IsIncrementalGCInProgress(JSRuntime* rt)
|
||||
JS_PUBLIC_API(bool)
|
||||
JS::IsIncrementalBarrierNeeded(JSContext* cx)
|
||||
{
|
||||
if (JS::CurrentThreadIsHeapBusy())
|
||||
if (JS::RuntimeHeapIsBusy())
|
||||
return false;
|
||||
|
||||
auto state = cx->runtime()->gc.state();
|
||||
@ -8823,7 +8823,7 @@ JS::IncrementalPreWriteBarrier(JSObject* obj)
|
||||
if (!obj)
|
||||
return;
|
||||
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
|
||||
JSObject::writeBarrierPre(obj);
|
||||
}
|
||||
|
||||
@ -8837,7 +8837,7 @@ JS::IncrementalReadBarrier(GCCellPtr thing)
|
||||
if (!thing)
|
||||
return;
|
||||
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapMajorCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
|
||||
DispatchTyped(IncrementalReadBarrierFunctor(), thing);
|
||||
}
|
||||
|
||||
@ -9205,7 +9205,7 @@ js::gc::detail::CellIsNotGray(const Cell* cell)
|
||||
|
||||
// TODO: I'd like to AssertHeapIsIdle() here, but this ends up getting
|
||||
// called during GC and while iterating the heap for memory reporting.
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCycleCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCycleCollecting());
|
||||
|
||||
auto tc = &cell->asTenured();
|
||||
if (!detail::CellIsMarkedGray(tc))
|
||||
|
@ -268,7 +268,7 @@ js::CheckTracedThing(JSTracer* trc, T* thing)
|
||||
* thread during compacting GC and reading the contents of the thing by
|
||||
* IsThingPoisoned would be racy in this case.
|
||||
*/
|
||||
MOZ_ASSERT_IF(JS::CurrentThreadIsHeapBusy() &&
|
||||
MOZ_ASSERT_IF(JS::RuntimeHeapIsBusy() &&
|
||||
!zone->isGCCompacting() &&
|
||||
!rt->gc.isBackgroundSweeping(),
|
||||
!IsThingPoisoned(thing) || !InFreeList(thing->asTenured().arena(), thing));
|
||||
@ -2575,7 +2575,7 @@ GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf,
|
||||
Zone*
|
||||
GCMarker::stackContainsCrossZonePointerTo(const Cell* target) const
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
|
||||
|
||||
Zone* targetZone = target->asTenured().zone();
|
||||
|
||||
@ -3214,7 +3214,7 @@ CheckIsMarkedThing(T* thingp)
|
||||
MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(*thingp),
|
||||
CurrentThreadCanAccessRuntime(rt) ||
|
||||
CurrentThreadCanAccessZone((*thingp)->zoneFromAnyThread()) ||
|
||||
(JS::CurrentThreadIsHeapCollecting() && rt->gc.state() == State::Sweep));
|
||||
(JS::RuntimeHeapIsCollecting() && rt->gc.state() == State::Sweep));
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -3299,7 +3299,7 @@ js::gc::IsAboutToBeFinalizedInternal(T** thingp)
|
||||
return false;
|
||||
|
||||
if (IsInsideNursery(thing)) {
|
||||
return JS::CurrentThreadIsHeapMinorCollecting() &&
|
||||
return JS::RuntimeHeapIsMinorCollecting() &&
|
||||
!Nursery::getForwardedPointer(reinterpret_cast<Cell**>(thingp));
|
||||
}
|
||||
|
||||
@ -3514,8 +3514,8 @@ UnmarkGrayGCThing(JSRuntime* rt, JS::GCCellPtr thing)
|
||||
JS_FRIEND_API(bool)
|
||||
JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr thing)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCycleCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCycleCollecting());
|
||||
|
||||
JSRuntime* rt = thing.asCell()->runtimeFromMainThread();
|
||||
gcstats::AutoPhase outerPhase(rt->gc.stats(), gcstats::PhaseKind::BARRIER);
|
||||
|
@ -354,7 +354,7 @@ void*
|
||||
js::Nursery::allocate(size_t size)
|
||||
{
|
||||
MOZ_ASSERT(isEnabled());
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
|
||||
MOZ_ASSERT_IF(currentChunk_ == currentStartChunk_, position() >= currentStartPosition_);
|
||||
MOZ_ASSERT(position() % CellAlignBytes == 0);
|
||||
|
@ -63,7 +63,7 @@ class GCZonesIter
|
||||
|
||||
public:
|
||||
explicit GCZonesIter(JSRuntime* rt, ZoneSelector selector = WithAtoms) : zone(rt, selector) {
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsBusy());
|
||||
MOZ_ASSERT_IF(rt->gc.atomsZone->isCollectingFromAnyThread(),
|
||||
!rt->hasHelperThreadZones());
|
||||
|
||||
|
@ -120,7 +120,7 @@ IterateGrayObjects(Zone* zone, GCThingCallback cellCallback, void* data)
|
||||
void
|
||||
js::IterateGrayObjects(Zone* zone, GCThingCallback cellCallback, void* data)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
AutoPrepareForTracing prep(TlsContext.get());
|
||||
::IterateGrayObjects(zone, cellCallback, data);
|
||||
}
|
||||
@ -129,7 +129,7 @@ void
|
||||
js::IterateGrayObjectsUnderCC(Zone* zone, GCThingCallback cellCallback, void* data)
|
||||
{
|
||||
mozilla::DebugOnly<JSRuntime*> rt = zone->runtimeFromMainThread();
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapCycleCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsCycleCollecting());
|
||||
MOZ_ASSERT(!rt->gc.isIncrementalGCInProgress());
|
||||
::IterateGrayObjects(zone, cellCallback, data);
|
||||
}
|
||||
|
@ -389,7 +389,7 @@ js::gc::GCRuntime::traceRuntimeCommon(JSTracer* trc, TraceOrMarkRuntime traceOrM
|
||||
HelperThreadState().trace(trc, session);
|
||||
|
||||
// Trace the embedding's black and gray roots.
|
||||
if (!JS::CurrentThreadIsHeapMinorCollecting()) {
|
||||
if (!JS::RuntimeHeapIsMinorCollecting()) {
|
||||
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_EMBEDDING);
|
||||
|
||||
/*
|
||||
@ -528,7 +528,7 @@ template <typename T>
|
||||
inline void
|
||||
BufferGrayRootsTracer::bufferRoot(T* thing)
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsBusy());
|
||||
MOZ_ASSERT(thing);
|
||||
// Check if |thing| is corrupt by calling a method that touches the heap.
|
||||
MOZ_ASSERT(thing->getTraceKind() <= JS::TraceKind::Null);
|
||||
|
@ -406,7 +406,7 @@ class StoreBuffer
|
||||
|
||||
template <typename Buffer, typename Edge>
|
||||
void unput(Buffer& buffer, const Edge& edge) {
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
|
||||
if (!isEnabled())
|
||||
return;
|
||||
@ -416,7 +416,7 @@ class StoreBuffer
|
||||
|
||||
template <typename Buffer, typename Edge>
|
||||
void put(Buffer& buffer, const Edge& edge) {
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
|
||||
if (!isEnabled())
|
||||
return;
|
||||
|
@ -718,7 +718,7 @@ CheckGrayMarkingTracer::check(AutoTraceSession& session)
|
||||
JS_FRIEND_API(bool)
|
||||
js::CheckGrayMarkingState(JSRuntime* rt)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
|
||||
MOZ_ASSERT(!rt->gc.isIncrementalGCInProgress());
|
||||
if (!rt->gc.areGrayBitsValid())
|
||||
return true;
|
||||
|
@ -200,7 +200,7 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, ZoneAllocPolicy>,
|
||||
}
|
||||
|
||||
void trace(JSTracer* trc) override {
|
||||
MOZ_ASSERT_IF(JS::CurrentThreadIsHeapBusy(), isInList());
|
||||
MOZ_ASSERT_IF(JS::RuntimeHeapIsBusy(), isInList());
|
||||
|
||||
TraceNullableEdge(trc, &memberOf, "WeakMap owner");
|
||||
|
||||
|
@ -226,7 +226,7 @@ class Zone : public JS::shadow::Zone,
|
||||
|
||||
bool hasMarkedRealms();
|
||||
|
||||
void scheduleGC() { MOZ_ASSERT(!CurrentThreadIsHeapBusy()); gcScheduled_ = true; }
|
||||
void scheduleGC() { MOZ_ASSERT(!RuntimeHeapIsBusy()); gcScheduled_ = true; }
|
||||
void unscheduleGC() { gcScheduled_ = false; }
|
||||
bool isGCScheduled() { return gcScheduled_; }
|
||||
|
||||
@ -238,7 +238,7 @@ class Zone : public JS::shadow::Zone,
|
||||
bool canCollect();
|
||||
|
||||
void changeGCState(GCState prev, GCState next) {
|
||||
MOZ_ASSERT(CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(RuntimeHeapIsBusy());
|
||||
MOZ_ASSERT(gcState() == prev);
|
||||
MOZ_ASSERT_IF(next != NoGC, canCollect());
|
||||
gcState_ = next;
|
||||
@ -250,7 +250,7 @@ class Zone : public JS::shadow::Zone,
|
||||
}
|
||||
|
||||
bool isCollectingFromAnyThread() const {
|
||||
if (CurrentThreadIsHeapCollecting())
|
||||
if (RuntimeHeapIsCollecting())
|
||||
return gcState_ != NoGC;
|
||||
else
|
||||
return needsIncrementalBarrier();
|
||||
@ -260,7 +260,7 @@ class Zone : public JS::shadow::Zone,
|
||||
// tracer.
|
||||
bool requireGCTracer() const {
|
||||
JSRuntime* rt = runtimeFromAnyThread();
|
||||
return CurrentThreadIsHeapMajorCollecting() && !rt->gc.isHeapCompacting() && gcState_ != NoGC;
|
||||
return RuntimeHeapIsMajorCollecting() && !rt->gc.isHeapCompacting() && gcState_ != NoGC;
|
||||
}
|
||||
|
||||
bool shouldMarkInZone() const {
|
||||
|
@ -584,7 +584,7 @@ jit::LazyLinkTopActivation(JSContext* cx, LazyLinkExitFrameLayout* frame)
|
||||
/* static */ void
|
||||
JitRuntime::Trace(JSTracer* trc, AutoLockForExclusiveAccess& lock)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapMinorCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
|
||||
|
||||
// Shared stubs are allocated in the atoms zone, so do not iterate
|
||||
// them after the atoms heap after it has been "finished."
|
||||
@ -778,7 +778,7 @@ JitCode::traceChildren(JSTracer* trc)
|
||||
}
|
||||
if (dataRelocTableBytes_) {
|
||||
// If we're moving objects, we need writable JIT code.
|
||||
bool movingObjects = JS::CurrentThreadIsHeapMinorCollecting() || zone()->isGCCompacting();
|
||||
bool movingObjects = JS::RuntimeHeapIsMinorCollecting() || zone()->isGCCompacting();
|
||||
MaybeAutoWritableJitCode awjc(this, movingObjects ? Reprotect : DontReprotect);
|
||||
|
||||
uint8_t* start = code_ + dataRelocTableOffset();
|
||||
|
@ -409,7 +409,7 @@ JSJitFrameIter::verifyReturnAddressUsingNativeToBytecodeMap()
|
||||
if (!TlsContext.get()->isProfilerSamplingEnabled())
|
||||
return true;
|
||||
|
||||
if (JS::CurrentThreadIsHeapMinorCollecting())
|
||||
if (JS::RuntimeHeapIsMinorCollecting())
|
||||
return true;
|
||||
|
||||
JitRuntime* jitrt = rt->jitRuntime();
|
||||
|
@ -1340,7 +1340,7 @@ TraceJitActivations(JSContext* cx, JSTracer* trc)
|
||||
void
|
||||
UpdateJitActivationsForMinorGC(JSRuntime* rt)
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapMinorCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
|
||||
JSContext* cx = rt->mainContextFromOwnThread();
|
||||
for (JitActivationIterator activations(cx); !activations.done(); ++activations) {
|
||||
for (OnlyJSJitFrameIter iter(activations); !iter.done(); ++iter) {
|
||||
|
@ -740,7 +740,7 @@ JitcodeGlobalTable::traceForMinorGC(JSTracer* trc)
|
||||
// Trace only entries that can directly contain nursery pointers.
|
||||
|
||||
MOZ_ASSERT(trc->runtime()->geckoProfiler().enabled());
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapMinorCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
|
||||
|
||||
JSContext* cx = trc->runtime()->mainContextFromOwnThread();
|
||||
AutoSuppressProfilerSampling suppressSampling(cx);
|
||||
@ -790,7 +790,7 @@ JitcodeGlobalTable::markIteratively(GCMarker* marker)
|
||||
// The approach above obviates the need for read barriers. The assumption
|
||||
// above is checked in JitcodeGlobalTable::lookupForSampler.
|
||||
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapMinorCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
|
||||
|
||||
AutoSuppressProfilerSampling suppressSampling(TlsContext.get());
|
||||
|
||||
|
@ -323,7 +323,7 @@ namespace js {
|
||||
void
|
||||
AssertHeapIsIdle()
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
}
|
||||
|
||||
} // namespace js
|
||||
@ -331,7 +331,7 @@ AssertHeapIsIdle()
|
||||
static void
|
||||
AssertHeapIsIdleOrIterating()
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
|
||||
}
|
||||
|
||||
static void
|
||||
@ -341,7 +341,7 @@ AssertHeapIsIdleOrStringIsFlat(JSString* str)
|
||||
* We allow some functions to be called during a GC as long as the argument
|
||||
* is a flat string, since that will not cause allocation.
|
||||
*/
|
||||
MOZ_ASSERT_IF(JS::CurrentThreadIsHeapBusy(), str->isFlat());
|
||||
MOZ_ASSERT_IF(JS::RuntimeHeapIsBusy(), str->isFlat());
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(bool)
|
||||
|
@ -118,43 +118,43 @@ enum class HeapState {
|
||||
};
|
||||
|
||||
JS_PUBLIC_API(HeapState)
|
||||
CurrentThreadHeapState();
|
||||
RuntimeHeapState();
|
||||
|
||||
static inline bool
|
||||
CurrentThreadIsHeapBusy()
|
||||
RuntimeHeapIsBusy()
|
||||
{
|
||||
return CurrentThreadHeapState() != HeapState::Idle;
|
||||
return RuntimeHeapState() != HeapState::Idle;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
CurrentThreadIsHeapTracing()
|
||||
RuntimeHeapIsTracing()
|
||||
{
|
||||
return CurrentThreadHeapState() == HeapState::Tracing;
|
||||
return RuntimeHeapState() == HeapState::Tracing;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
CurrentThreadIsHeapMajorCollecting()
|
||||
RuntimeHeapIsMajorCollecting()
|
||||
{
|
||||
return CurrentThreadHeapState() == HeapState::MajorCollecting;
|
||||
return RuntimeHeapState() == HeapState::MajorCollecting;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
CurrentThreadIsHeapMinorCollecting()
|
||||
RuntimeHeapIsMinorCollecting()
|
||||
{
|
||||
return CurrentThreadHeapState() == HeapState::MinorCollecting;
|
||||
return RuntimeHeapState() == HeapState::MinorCollecting;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
CurrentThreadIsHeapCollecting()
|
||||
RuntimeHeapIsCollecting()
|
||||
{
|
||||
HeapState state = CurrentThreadHeapState();
|
||||
HeapState state = RuntimeHeapState();
|
||||
return state == HeapState::MajorCollecting || state == HeapState::MinorCollecting;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
CurrentThreadIsHeapCycleCollecting()
|
||||
RuntimeHeapIsCycleCollecting()
|
||||
{
|
||||
return CurrentThreadHeapState() == HeapState::CycleCollecting;
|
||||
return RuntimeHeapState() == HeapState::CycleCollecting;
|
||||
}
|
||||
|
||||
// Decorates the Unlinking phase of CycleCollection so that accidental use
|
||||
|
@ -375,7 +375,7 @@ js::UncheckedUnwrapWithoutExpose(JSObject* wrapped)
|
||||
JS_FRIEND_API(JSObject*)
|
||||
js::UncheckedUnwrap(JSObject* wrapped, bool stopAtWindowProxy, unsigned* flagsp)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(wrapped->runtimeFromAnyThread()));
|
||||
|
||||
unsigned flags = 0;
|
||||
@ -407,7 +407,7 @@ js::CheckedUnwrap(JSObject* obj, bool stopAtWindowProxy)
|
||||
JS_FRIEND_API(JSObject*)
|
||||
js::UnwrapOneChecked(JSObject* obj, bool stopAtWindowProxy)
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(obj->runtimeFromAnyThread()));
|
||||
|
||||
if (!obj->is<WrapperObject>() ||
|
||||
|
@ -1663,7 +1663,7 @@ my_LargeAllocFailCallback()
|
||||
if (!cx || cx->helperThread())
|
||||
return;
|
||||
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
|
||||
JS::PrepareForFullGC(cx);
|
||||
cx->runtime()->gc.gc(GC_NORMAL, JS::gcreason::SHARED_MEMORY_LIMIT);
|
||||
|
@ -1149,7 +1149,7 @@ ToDisassemblySource(JSContext* cx, HandleValue v, JSAutoByteString* bytes)
|
||||
return true;
|
||||
}
|
||||
|
||||
if (JS::CurrentThreadIsHeapBusy() || !cx->isAllocAllowed()) {
|
||||
if (JS::RuntimeHeapIsBusy() || !cx->isAllocAllowed()) {
|
||||
UniqueChars source = JS_smprintf("<value>");
|
||||
if (!source) {
|
||||
ReportOutOfMemory(cx);
|
||||
|
@ -385,7 +385,7 @@ Compartment::wrap(JSContext* cx, MutableHandle<GCVector<Value>> vec)
|
||||
void
|
||||
Compartment::traceOutgoingCrossCompartmentWrappers(JSTracer* trc)
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapMajorCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsMajorCollecting());
|
||||
MOZ_ASSERT(!zone()->isCollectingFromAnyThread() || trc->runtime()->gc.isHeapCompacting());
|
||||
|
||||
for (NonStringWrapperEnum e(this); !e.empty(); e.popFront()) {
|
||||
@ -406,7 +406,7 @@ Compartment::traceOutgoingCrossCompartmentWrappers(JSTracer* trc)
|
||||
Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(JSTracer* trc)
|
||||
{
|
||||
gcstats::AutoPhase ap(trc->runtime()->gc.stats(), gcstats::PhaseKind::MARK_CCWS);
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapMajorCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsMajorCollecting());
|
||||
for (CompartmentsIter c(trc->runtime()); !c.done(); c.next()) {
|
||||
if (!c->zone()->isCollecting())
|
||||
c->traceOutgoingCrossCompartmentWrappers(trc);
|
||||
|
@ -181,7 +181,7 @@ class CompartmentChecker
|
||||
* depends on other objects not having been swept yet.
|
||||
*/
|
||||
#define START_ASSERT_SAME_COMPARTMENT() \
|
||||
if (JS::CurrentThreadIsHeapCollecting()) \
|
||||
if (JS::RuntimeHeapIsCollecting()) \
|
||||
return; \
|
||||
CompartmentChecker c(cx)
|
||||
|
||||
|
@ -1549,7 +1549,7 @@ JS::AutoCheckRequestDepth::AutoCheckRequestDepth(JSContext* cxArg)
|
||||
: cx(cxArg->helperThread() ? nullptr : cxArg)
|
||||
{
|
||||
if (cx) {
|
||||
MOZ_ASSERT(cx->requestDepth || JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(cx->requestDepth || JS::RuntimeHeapIsBusy());
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
|
||||
cx->checkRequestDepth++;
|
||||
}
|
||||
|
@ -282,7 +282,7 @@ Realm::traceGlobal(JSTracer* trc)
|
||||
savedStacks_.trace(trc);
|
||||
|
||||
// Atoms are always tenured.
|
||||
if (!JS::CurrentThreadIsHeapMinorCollecting())
|
||||
if (!JS::RuntimeHeapIsMinorCollecting())
|
||||
varNames_.trace(trc);
|
||||
}
|
||||
|
||||
@ -308,7 +308,7 @@ Realm::traceRoots(JSTracer* trc, js::gc::GCRuntime::TraceOrMarkRuntime traceOrMa
|
||||
"on-stack object pending metadata");
|
||||
}
|
||||
|
||||
if (!JS::CurrentThreadIsHeapMinorCollecting()) {
|
||||
if (!JS::RuntimeHeapIsMinorCollecting()) {
|
||||
// The global is never nursery allocated, so we don't need to
|
||||
// trace it when doing a minor collection.
|
||||
//
|
||||
@ -343,7 +343,7 @@ Realm::traceRoots(JSTracer* trc, js::gc::GCRuntime::TraceOrMarkRuntime traceOrMa
|
||||
// keys of the map to hold the JSScript alive.
|
||||
if (scriptCountsMap &&
|
||||
trc->runtime()->profilingScripts &&
|
||||
!JS::CurrentThreadIsHeapMinorCollecting())
|
||||
!JS::RuntimeHeapIsMinorCollecting())
|
||||
{
|
||||
MOZ_ASSERT_IF(!trc->runtime()->isBeingDestroyed(), collectCoverage());
|
||||
for (ScriptCountsMap::Range r = scriptCountsMap->all(); !r.empty(); r.popFront()) {
|
||||
|
@ -144,12 +144,12 @@ IsMarkingTrace(JSTracer* trc)
|
||||
// Determine whether tracing is happening during normal marking. We need to
|
||||
// test all the following conditions, since:
|
||||
//
|
||||
// 1. During TraceRuntime, CurrentThreadIsHeapBusy() is true, but the
|
||||
// 1. During TraceRuntime, RuntimeHeapIsBusy() is true, but the
|
||||
// tracer might not be a marking tracer.
|
||||
// 2. When a write barrier executes, IsMarkingTracer is true, but
|
||||
// CurrentThreadIsHeapBusy() will be false.
|
||||
// RuntimeHeapIsBusy() will be false.
|
||||
|
||||
return JS::CurrentThreadIsHeapCollecting() && trc->isMarkingTracer();
|
||||
return JS::RuntimeHeapIsCollecting() && trc->isMarkingTracer();
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -256,7 +256,7 @@ JSRuntime::init(JSContext* cx, uint32_t maxbytes, uint32_t maxNurseryBytes)
|
||||
void
|
||||
JSRuntime::destroyRuntime()
|
||||
{
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
|
||||
MOZ_ASSERT(childRuntimeCount == 0);
|
||||
MOZ_ASSERT(initialized_);
|
||||
|
||||
@ -727,7 +727,7 @@ JSRuntime::onOutOfMemory(AllocFunction allocFunc, size_t nbytes, void* reallocPt
|
||||
{
|
||||
MOZ_ASSERT_IF(allocFunc != AllocFunction::Realloc, !reallocPtr);
|
||||
|
||||
if (JS::CurrentThreadIsHeapBusy())
|
||||
if (JS::RuntimeHeapIsBusy())
|
||||
return nullptr;
|
||||
|
||||
if (!oom::IsSimulatedOOMAllocation()) {
|
||||
@ -778,7 +778,7 @@ JSRuntime::activeGCInAtomsZone()
|
||||
bool
|
||||
JSRuntime::createAtomsAddedWhileSweepingTable()
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
|
||||
MOZ_ASSERT(!atomsAddedWhileSweeping_);
|
||||
|
||||
atomsAddedWhileSweeping_ = js_new<AtomSet>();
|
||||
@ -796,7 +796,7 @@ JSRuntime::createAtomsAddedWhileSweepingTable()
|
||||
void
|
||||
JSRuntime::destroyAtomsAddedWhileSweepingTable()
|
||||
{
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
|
||||
MOZ_ASSERT(atomsAddedWhileSweeping_);
|
||||
|
||||
js_delete(atomsAddedWhileSweeping_.ref());
|
||||
|
@ -719,7 +719,7 @@ struct JSRuntime : public js::MallocProvider<JSRuntime>
|
||||
bool atomsAreFinished() const { return !atoms_; }
|
||||
|
||||
js::AtomSet* atomsForSweeping() {
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
|
||||
return atoms_;
|
||||
}
|
||||
|
||||
|
@ -4145,7 +4145,7 @@ TypeNewScript::trace(JSTracer* trc)
|
||||
/* static */ void
|
||||
TypeNewScript::writeBarrierPre(TypeNewScript* newScript)
|
||||
{
|
||||
if (JS::CurrentThreadIsHeapCollecting())
|
||||
if (JS::RuntimeHeapIsCollecting())
|
||||
return;
|
||||
|
||||
JS::Zone* zone = newScript->function()->zoneFromAnyThread();
|
||||
@ -4185,7 +4185,7 @@ ConstraintTypeSet::trace(Zone* zone, JSTracer* trc)
|
||||
checkMagic();
|
||||
|
||||
// ConstraintTypeSets only hold strong references during minor collections.
|
||||
MOZ_ASSERT(JS::CurrentThreadIsHeapMinorCollecting());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
|
||||
|
||||
unsigned objectCount = baseObjectCount();
|
||||
if (objectCount >= 2) {
|
||||
@ -4263,7 +4263,7 @@ AssertGCStateForSweep(Zone* zone)
|
||||
|
||||
// IsAboutToBeFinalized doesn't work right on tenured objects when called
|
||||
// during a minor collection.
|
||||
MOZ_ASSERT(!JS::CurrentThreadIsHeapMinorCollecting());
|
||||
MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
|
||||
}
|
||||
|
||||
void
|
||||
|
Loading…
x
Reference in New Issue
Block a user