Backed out changeset 36f2908f6650 (bug 1361458) for crashing [@ js::GCMarker::drainMarkStack], e.g. in devtools' devtools/client/debugger/new/test/mochitest/browser_dbg-sourcemaps.js. r=backout

This commit is contained in:
Sebastian Hengst 2017-05-17 12:53:58 +02:00
parent 845d4f803d
commit 0c5a87d59f
14 changed files with 645 additions and 692 deletions

View File

@ -64,8 +64,6 @@ included_inclnames_to_ignore = set([
'double-conversion.h', # strange MFBT case
'javascript-trace.h', # generated in $OBJDIR if HAVE_DTRACE is defined
'frontend/ReservedWordsGenerated.h', # generated in $OBJDIR
'gc/StatsPhasesGenerated.h', # generated in $OBJDIR
'gc/StatsPhasesGenerated.cpp', # generated in $OBJDIR
'jscustomallocator.h', # provided by embedders; allowed to be missing
'js-config.h', # generated in $OBJDIR
'fdlibm.h', # fdlibm
@ -106,8 +104,6 @@ included_inclnames_to_ignore = set([
oddly_ordered_inclnames = set([
'ctypes/typedefs.h', # Included multiple times in the body of ctypes/CTypes.h
'frontend/ReservedWordsGenerated.h', # Included in the body of frontend/TokenStream.h
'gc/StatsPhasesGenerated.h', # Included in the body of gc/Statistics.h
'gc/StatsPhasesGenerated.cpp', # Included in the body of gc/Statistics.cpp
'jswin.h', # Must be #included before <psapi.h>
'machine/endian.h', # Must be included after <sys/types.h> on BSD
'winbase.h', # Must precede other system headers(?)

View File

@ -85,15 +85,15 @@ class MOZ_RAII AutoStopVerifyingBarriers
// inside of an outer minor GC. This is not allowed by the
// gc::Statistics phase tree. So we pause the "real" GC, if in fact one
// is in progress.
gcstats::PhaseKind outer = gc->stats().currentPhaseKind();
if (outer != gcstats::PhaseKind::NONE)
gcstats::Phase outer = gc->stats().currentPhase();
if (outer != gcstats::PHASE_NONE)
gc->stats().endPhase(outer);
MOZ_ASSERT(gc->stats().currentPhaseKind() == gcstats::PhaseKind::NONE);
MOZ_ASSERT(gc->stats().currentPhase() == gcstats::PHASE_NONE);
if (restartPreVerifier)
gc->startVerifyPreBarriers();
if (outer != gcstats::PhaseKind::NONE)
if (outer != gcstats::PHASE_NONE)
gc->stats().beginPhase(outer);
}
};

View File

@ -973,14 +973,14 @@ class GCRuntime
void bufferGrayRoots();
void maybeDoCycleCollection();
void markCompartments();
IncrementalProgress drainMarkStack(SliceBudget& sliceBudget, gcstats::PhaseKind phase);
template <class CompartmentIterT> void markWeakReferences(gcstats::PhaseKind phase);
void markWeakReferencesInCurrentGroup(gcstats::PhaseKind phase);
template <class ZoneIterT, class CompartmentIterT> void markGrayReferences(gcstats::PhaseKind phase);
IncrementalProgress drainMarkStack(SliceBudget& sliceBudget, gcstats::Phase phase);
template <class CompartmentIterT> void markWeakReferences(gcstats::Phase phase);
void markWeakReferencesInCurrentGroup(gcstats::Phase phase);
template <class ZoneIterT, class CompartmentIterT> void markGrayReferences(gcstats::Phase phase);
void markBufferedGrayRoots(JS::Zone* zone);
void markGrayReferencesInCurrentGroup(gcstats::PhaseKind phase);
void markAllWeakReferences(gcstats::PhaseKind phase);
void markAllGrayReferences(gcstats::PhaseKind phase);
void markGrayReferencesInCurrentGroup(gcstats::Phase phase);
void markAllWeakReferences(gcstats::Phase phase);
void markAllGrayReferences(gcstats::Phase phase);
void beginSweepPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAccess& lock);
void groupZonesForSweeping(JS::gcreason::Reason reason, AutoLockForExclusiveAccess& lock);
@ -1228,8 +1228,8 @@ class GCRuntime
/*
* Concurrent sweep infrastructure.
*/
void startTask(GCParallelTask& task, gcstats::PhaseKind phase, AutoLockHelperThreadState& locked);
void joinTask(GCParallelTask& task, gcstats::PhaseKind phase, AutoLockHelperThreadState& locked);
void startTask(GCParallelTask& task, gcstats::Phase phase, AutoLockHelperThreadState& locked);
void joinTask(GCParallelTask& task, gcstats::Phase phase, AutoLockHelperThreadState& locked);
friend class AutoRunParallelTask;
/*
@ -1366,9 +1366,9 @@ class GCRuntime
}
void minorGC(JS::gcreason::Reason reason,
gcstats::PhaseKind phase = gcstats::PhaseKind::MINOR_GC) JS_HAZ_GC_CALL;
gcstats::Phase phase = gcstats::PHASE_MINOR_GC) JS_HAZ_GC_CALL;
void evictNursery(JS::gcreason::Reason reason = JS::gcreason::EVICT_NURSERY) {
minorGC(reason, gcstats::PhaseKind::EVICT_NURSERY);
minorGC(reason, gcstats::PHASE_EVICT_NURSERY);
}
void freeAllLifoBlocksAfterMinorGC(LifoAlloc* lifo);

View File

@ -1,292 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Generate graph structures for GC statistics recording.
#
# Stats phases are nested and form a directed acyclic graph starting
# from a set of root phases. Importantly, a phase may appear under more
# than one parent phase.
#
# For example, the following arrangement is possible:
#
# +---+
# | A |
# +---+
# |
# +-------+-------+
# | | |
# v v v
# +---+ +---+ +---+
# | B | | C | | D |
# +---+ +---+ +---+
# | |
# +---+---+
# |
# v
# +---+
# | E |
# +---+
#
# This graph is expanded into a tree (or really a forest) and phases
# with multiple parents are duplicated.
#
# For example, the input example above would be expanded to:
#
# +---+
# | A |
# +---+
# |
# +-------+-------+
# | | |
# v v v
# +---+ +---+ +---+
# | B | | C | | D |
# +---+ +---+ +---+
# | |
# v v
# +---+ +---+
# | E | | E'|
# +---+ +---+
import sys
import collections
class PhaseKind():
def __init__(self, name, descr, bucket, children = []):
self.name = name
self.descr = descr
self.bucket = bucket
self.children = children
# The root marking phase appears in several places in the graph.
MarkRootsPhaseKind = PhaseKind("MARK_ROOTS", "Mark Roots", 48, [
PhaseKind("BUFFER_GRAY_ROOTS", "Buffer Gray Roots", 49),
PhaseKind("MARK_CCWS", "Mark Cross Compartment Wrappers", 50),
PhaseKind("MARK_STACK", "Mark C and JS stacks", 51),
PhaseKind("MARK_RUNTIME_DATA", "Mark Runtime-wide Data", 52),
PhaseKind("MARK_EMBEDDING", "Mark Embedding", 53),
PhaseKind("MARK_COMPARTMENTS", "Mark Compartments", 54),
])
PhaseKindGraphRoots = [
PhaseKind("MUTATOR", "Mutator Running", 0),
PhaseKind("GC_BEGIN", "Begin Callback", 1),
PhaseKind("WAIT_BACKGROUND_THREAD", "Wait Background Thread", 2),
PhaseKind("MARK_DISCARD_CODE", "Mark Discard Code", 3),
PhaseKind("RELAZIFY_FUNCTIONS", "Relazify Functions", 4),
PhaseKind("PURGE", "Purge", 5),
PhaseKind("MARK", "Mark", 6, [
PhaseKind("UNMARK", "Unmark", 7),
MarkRootsPhaseKind,
PhaseKind("MARK_DELAYED", "Mark Delayed", 8),
]),
PhaseKind("SWEEP", "Sweep", 9, [
PhaseKind("SWEEP_MARK", "Mark During Sweeping", 10, [
PhaseKind("SWEEP_MARK_TYPES", "Mark Types During Sweeping", 11),
PhaseKind("SWEEP_MARK_INCOMING_BLACK", "Mark Incoming Black Pointers", 12),
PhaseKind("SWEEP_MARK_WEAK", "Mark Weak", 13),
PhaseKind("SWEEP_MARK_INCOMING_GRAY", "Mark Incoming Gray Pointers", 14),
PhaseKind("SWEEP_MARK_GRAY", "Mark Gray", 15),
PhaseKind("SWEEP_MARK_GRAY_WEAK", "Mark Gray and Weak", 16)
]),
PhaseKind("FINALIZE_START", "Finalize Start Callbacks", 17, [
PhaseKind("WEAK_ZONES_CALLBACK", "Per-Slice Weak Callback", 57),
PhaseKind("WEAK_COMPARTMENT_CALLBACK", "Per-Compartment Weak Callback", 58)
]),
PhaseKind("SWEEP_ATOMS", "Sweep Atoms", 18),
PhaseKind("SWEEP_COMPARTMENTS", "Sweep Compartments", 20, [
PhaseKind("SWEEP_DISCARD_CODE", "Sweep Discard Code", 21),
PhaseKind("SWEEP_INNER_VIEWS", "Sweep Inner Views", 22),
PhaseKind("SWEEP_CC_WRAPPER", "Sweep Cross Compartment Wrappers", 23),
PhaseKind("SWEEP_BASE_SHAPE", "Sweep Base Shapes", 24),
PhaseKind("SWEEP_INITIAL_SHAPE", "Sweep Initial Shapes", 25),
PhaseKind("SWEEP_TYPE_OBJECT", "Sweep Type Objects", 26),
PhaseKind("SWEEP_BREAKPOINT", "Sweep Breakpoints", 27),
PhaseKind("SWEEP_REGEXP", "Sweep Regexps", 28),
PhaseKind("SWEEP_COMPRESSION", "Sweep Compression Tasks", 62),
PhaseKind("SWEEP_WEAKMAPS", "Sweep WeakMaps", 63),
PhaseKind("SWEEP_UNIQUEIDS", "Sweep Unique IDs", 64),
PhaseKind("SWEEP_JIT_DATA", "Sweep JIT Data", 65),
PhaseKind("SWEEP_WEAK_CACHES", "Sweep Weak Caches", 66),
PhaseKind("SWEEP_MISC", "Sweep Miscellaneous", 29),
PhaseKind("SWEEP_TYPES", "Sweep type information", 30, [
PhaseKind("SWEEP_TYPES_BEGIN", "Sweep type tables and compilations", 31),
PhaseKind("SWEEP_TYPES_END", "Free type arena", 32),
]),
]),
PhaseKind("SWEEP_OBJECT", "Sweep Object", 33),
PhaseKind("SWEEP_STRING", "Sweep String", 34),
PhaseKind("SWEEP_SCRIPT", "Sweep Script", 35),
PhaseKind("SWEEP_SCOPE", "Sweep Scope", 59),
PhaseKind("SWEEP_REGEXP_SHARED", "Sweep RegExpShared", 61),
PhaseKind("SWEEP_SHAPE", "Sweep Shape", 36),
PhaseKind("SWEEP_JITCODE", "Sweep JIT code", 37),
PhaseKind("FINALIZE_END", "Finalize End Callback", 38),
PhaseKind("DESTROY", "Deallocate", 39)
]),
PhaseKind("COMPACT", "Compact", 40, [
PhaseKind("COMPACT_MOVE", "Compact Move", 41),
PhaseKind("COMPACT_UPDATE", "Compact Update", 42, [
MarkRootsPhaseKind,
PhaseKind("COMPACT_UPDATE_CELLS", "Compact Update Cells", 43),
]),
]),
PhaseKind("GC_END", "End Callback", 44),
PhaseKind("MINOR_GC", "All Minor GCs", 45, [
MarkRootsPhaseKind,
]),
PhaseKind("EVICT_NURSERY", "Minor GCs to Evict Nursery", 46, [
MarkRootsPhaseKind,
]),
PhaseKind("TRACE_HEAP", "Trace Heap", 47, [
MarkRootsPhaseKind,
]),
PhaseKind("BARRIER", "Barriers", 55, [
PhaseKind("UNMARK_GRAY", "Unmark gray", 56),
]),
PhaseKind("PURGE_SHAPE_TABLES", "Purge ShapeTables", 60)
]
# Make a linear list of all unique phases by performing a depth first
# search on the phase graph starting at the roots. This will be used to
# generate the PhaseKind enum.
def findAllPhaseKinds():
phases = []
seen = set()
def dfs(phase):
if phase in seen:
return
phases.append(phase)
seen.add(phase)
for child in phase.children:
dfs(child)
for phase in PhaseKindGraphRoots:
dfs(phase)
return phases
AllPhaseKinds = findAllPhaseKinds()
# Expand the DAG into a tree, duplicating phases which have more than
# one parent.
class Phase:
def __init__(self, phaseKind, parent, depth):
self.phaseKind = phaseKind
self.parent = parent
self.depth = depth
self.children = []
self.nextSibling = None
self.nextInPhaseKind = None
def expandPhases():
phases = []
phasesForPhase = collections.defaultdict(list)
def traverse(phaseKind, parent, depth):
ep = Phase(phaseKind, parent, depth)
phases.append(ep)
# Update list of expanded phases for this phase kind.
if phasesForPhase[phaseKind]:
phasesForPhase[phaseKind][-1].nextInPhaseKind = ep
phasesForPhase[phaseKind].append(ep)
# Recurse over children.
for child in phaseKind.children:
child_ep = traverse(child, ep, depth + 1)
if ep.children:
ep.children[-1].nextSibling = child_ep
ep.children.append(child_ep)
return ep
for phaseKind in PhaseKindGraphRoots:
traverse(phaseKind, None, 0)
return phases, phasesForPhase
AllPhases, PhasesForPhaseKind = expandPhases()
# Name expanded phases based on phase kind name and index if there are
# multiple expanded phases corresponding to a single phase kind.
for phaseKind in AllPhaseKinds:
phases = PhasesForPhaseKind[phaseKind]
if len(phases) == 1:
phases[0].name = "%s" % phaseKind.name
else:
for index, xphase in enumerate(phases):
xphase.name = "%s_%d" % (phaseKind.name, index + 1)
# Generate code.
def writeList(out, items):
if items:
out.write(",\n".join(" " + item for item in items) + "\n")
def writeEnumClass(out, name, type, items, extraItems):
items = [ "FIRST" ] + items + [ "LIMIT" ] + extraItems
items[1] += " = " + items[0]
out.write("enum class %s : %s {\n" % (name, type));
writeList(out, items)
out.write("};\n")
def generateHeader(out):
#
# Generate PhaseKind enum.
#
phaseKindNames = map(lambda phaseKind: phaseKind.name, AllPhaseKinds)
extraPhaseKinds = [
"NONE = LIMIT",
"EXPLICIT_SUSPENSION = LIMIT",
"IMPLICIT_SUSPENSION"
]
writeEnumClass(out, "PhaseKind", "uint8_t", phaseKindNames, extraPhaseKinds)
out.write("\n")
#
# Generate Phase enum.
#
expandedPhaseNames = map(lambda xphase: xphase.name, AllPhases)
extraPhases = [
"NONE = LIMIT",
"EXPLICIT_SUSPENSION = LIMIT",
"IMPLICIT_SUSPENSION"
]
writeEnumClass(out, "Phase", "uint8_t", expandedPhaseNames, extraPhases)
def generateCpp(out):
#
# Generate the PhaseKindInfo table.
#
out.write("static const PhaseKindTable phaseKinds = {\n")
for phaseKind in AllPhaseKinds:
xPhase = PhasesForPhaseKind[phaseKind][0]
out.write(" /* PhaseKind::%s */ PhaseKindInfo { Phase::%s, %d },\n" %
(phaseKind.name, xPhase.name, phaseKind.bucket))
out.write("};\n")
out.write("\n")
#
# Generate the PhaseInfo tree.
#
def name(xphase):
return "Phase::" + xphase.name if xphase else "Phase::NONE"
out.write("static const PhaseTable phases = {\n")
for xphase in AllPhases:
firstChild = xphase.children[0] if xphase.children else None
phaseKind = xphase.phaseKind
out.write(" /* %s */ PhaseInfo { %s, %s, %s, %s, PhaseKind::%s, %d, \"%s\" },\n" %
(name(xphase),
name(xphase.parent),
name(firstChild),
name(xphase.nextSibling),
name(xphase.nextInPhaseKind),
phaseKind.name,
xphase.depth,
phaseKind.descr))
out.write("};\n")

View File

@ -2523,7 +2523,7 @@ bool
GCMarker::markDelayedChildren(SliceBudget& budget)
{
GCRuntime& gc = runtime()->gc;
gcstats::AutoPhase ap(gc.stats(), gc.state() == State::Mark, gcstats::PhaseKind::MARK_DELAYED);
gcstats::AutoPhase ap(gc.stats(), gc.state() == State::Mark, gcstats::PHASE_MARK_DELAYED);
MOZ_ASSERT(unmarkedArenaStackTop);
do {
@ -3406,8 +3406,8 @@ TypedUnmarkGrayCellRecursively(T* t)
MOZ_ASSERT(!JS::CurrentThreadIsHeapCycleCollecting());
UnmarkGrayTracer unmarker(rt);
gcstats::AutoPhase outerPhase(rt->gc.stats(), gcstats::PhaseKind::BARRIER);
gcstats::AutoPhase innerPhase(rt->gc.stats(), gcstats::PhaseKind::UNMARK_GRAY);
gcstats::AutoPhase outerPhase(rt->gc.stats(), gcstats::PHASE_BARRIER);
gcstats::AutoPhase innerPhase(rt->gc.stats(), gcstats::PHASE_UNMARK_GRAY);
unmarker.unmark(JS::GCCellPtr(t, MapTypeToTraceKind<T>::kind));
return unmarker.unmarkedAny;
}

View File

@ -738,7 +738,7 @@ js::Nursery::doCollection(JS::gcreason::Reason reason,
maybeStartProfile(ProfileKey::MarkDebugger);
{
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::MARK_ROOTS);
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PHASE_MARK_ROOTS);
Debugger::traceAllForMovingGC(&mover);
}
maybeEndProfile(ProfileKey::MarkDebugger);

View File

@ -259,7 +259,7 @@ js::gc::GCRuntime::traceRuntimeForMajorGC(JSTracer* trc, AutoLockForExclusiveAcc
if (rt->isBeingDestroyed())
return;
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_ROOTS);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_MARK_ROOTS);
if (rt->atomsCompartment(lock)->zone()->isCollecting())
traceRuntimeAtoms(trc, lock);
JSCompartment::traceIncomingCrossCompartmentEdgesForZoneGC(trc);
@ -276,7 +276,7 @@ js::gc::GCRuntime::traceRuntimeForMinorGC(JSTracer* trc, AutoLockForExclusiveAcc
// the map. And we can reach its trace function despite having finished the
// roots via the edges stored by the pre-barrier verifier when we finish
// the verifier for the last time.
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_ROOTS);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_MARK_ROOTS);
jit::JitRuntime::TraceJitcodeGlobalTableForMinorGC(trc);
@ -291,7 +291,7 @@ js::TraceRuntime(JSTracer* trc)
JSRuntime* rt = trc->runtime();
EvictAllNurseries(rt);
AutoPrepareForTracing prep(TlsContext.get(), WithAtoms);
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PHASE_TRACE_HEAP);
rt->gc.traceRuntime(trc, prep.session().lock);
}
@ -300,7 +300,7 @@ js::gc::GCRuntime::traceRuntime(JSTracer* trc, AutoLockForExclusiveAccess& lock)
{
MOZ_ASSERT(!rt->isBeingDestroyed());
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_ROOTS);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_MARK_ROOTS);
traceRuntimeAtoms(trc, lock);
traceRuntimeCommon(trc, TraceRuntime, lock);
}
@ -308,7 +308,7 @@ js::gc::GCRuntime::traceRuntime(JSTracer* trc, AutoLockForExclusiveAccess& lock)
void
js::gc::GCRuntime::traceRuntimeAtoms(JSTracer* trc, AutoLockForExclusiveAccess& lock)
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_RUNTIME_DATA);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_MARK_RUNTIME_DATA);
TracePermanentAtoms(trc);
TraceAtoms(trc, lock);
TraceWellKnownSymbols(trc);
@ -322,7 +322,7 @@ js::gc::GCRuntime::traceRuntimeCommon(JSTracer* trc, TraceOrMarkRuntime traceOrM
MOZ_ASSERT(!TlsContext.get()->suppressGC);
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_STACK);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_MARK_STACK);
JSContext* cx = TlsContext.get();
for (const CooperatingContext& target : rt->cooperatingContexts()) {
@ -370,7 +370,7 @@ js::gc::GCRuntime::traceRuntimeCommon(JSTracer* trc, TraceOrMarkRuntime traceOrM
// Trace the embedding's black and gray roots.
if (!JS::CurrentThreadIsHeapMinorCollecting()) {
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_EMBEDDING);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_MARK_EMBEDDING);
/*
* The embedding can register additional roots here.
@ -431,7 +431,7 @@ js::gc::GCRuntime::finishRoots()
AssertNoRootsTracer trc(rt, TraceWeakMapKeysValues);
AutoPrepareForTracing prep(TlsContext.get(), WithAtoms);
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PHASE_TRACE_HEAP);
traceRuntime(&trc, prep.session().lock);
// Restore the wrapper tracing so that we leak instead of leaving dangling
@ -481,7 +481,7 @@ js::gc::GCRuntime::bufferGrayRoots()
for (GCZonesIter zone(rt); !zone.done(); zone.next())
MOZ_ASSERT(zone->gcGrayRoots().empty());
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::BUFFER_GRAY_ROOTS);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_BUFFER_GRAY_ROOTS);
BufferGrayRootsTracer grayBufferer(rt);
if (JSTraceDataOp op = grayRootTracer.op)

View File

@ -31,7 +31,6 @@ using namespace js::gc;
using namespace js::gcstats;
using mozilla::DebugOnly;
using mozilla::EnumeratedArray;
using mozilla::IntegerRange;
using mozilla::PodArrayZero;
using mozilla::PodZero;
@ -45,10 +44,10 @@ using mozilla::TimeDuration;
*/
JS_STATIC_ASSERT(JS::gcreason::NUM_TELEMETRY_REASONS >= JS::gcreason::NUM_REASONS);
static inline decltype(mozilla::MakeEnumeratedRange(PhaseKind::FIRST, PhaseKind::LIMIT))
AllPhaseKinds()
static inline decltype(mozilla::MakeEnumeratedRange(PHASE_FIRST, PHASE_LIMIT))
AllPhases()
{
return mozilla::MakeEnumeratedRange(PhaseKind::FIRST, PhaseKind::LIMIT);
return mozilla::MakeEnumeratedRange(PHASE_FIRST, PHASE_LIMIT);
}
const char*
@ -91,92 +90,219 @@ js::gcstats::ExplainAbortReason(gc::AbortReason reason)
}
}
struct PhaseKindInfo
{
Phase firstPhase;
uint8_t telemetryBucket;
};
// PhaseInfo objects form a tree.
struct PhaseInfo
{
Phase parent;
Phase firstChild;
Phase nextSibling;
Phase nextInPhase;
PhaseKind phaseKind;
uint8_t depth;
const char* name;
};
// A table of ExpandePhaseInfo indexed by Phase.
using PhaseTable = EnumeratedArray<Phase, Phase::LIMIT, PhaseInfo>;
// A table of PhaseKindInfo indexed by Phase.
using PhaseKindTable = EnumeratedArray<PhaseKind, PhaseKind::LIMIT, PhaseKindInfo>;
#include "gc/StatsPhasesGenerated.cpp"
static double
t(TimeDuration duration)
{
return duration.ToMilliseconds();
}
Phase
Statistics::currentPhase() const
struct PhaseInfo
{
return phaseNestingDepth ? phaseNesting[phaseNestingDepth - 1] : Phase::NONE;
}
Phase index;
const char* name;
Phase parent;
uint8_t telemetryBucket;
};
PhaseKind
Statistics::currentPhaseKind() const
// The zeroth entry in the timing arrays is used for phases that have a
// unique lineage.
static const size_t PHASE_DAG_NONE = 0;
// These are really just fields of PhaseInfo, but I have to initialize them
// programmatically, which prevents making phases[] const. (And marking these
// fields mutable does not work on Windows; the whole thing gets created in
// read-only memory anyway.)
struct ExtraPhaseInfo
{
// Public API to get the current phase. Return the current phase,
// suppressing the synthetic PhaseKind::MUTATOR phase.
// Depth in the tree of each phase type
size_t depth;
Phase phase = currentPhase();
MOZ_ASSERT_IF(phase == Phase::MUTATOR, phaseNestingDepth == 1);
if (phase == Phase::NONE || phase == Phase::MUTATOR)
return PhaseKind::NONE;
// Index into the set of parallel arrays of timing data, for parents with
// at least one multi-parented child
size_t dagSlot;
return phases[phase].phaseKind;
}
ExtraPhaseInfo() : depth(0), dagSlot(0) {}
};
Phase
Statistics::lookupChildPhase(PhaseKind phaseKind) const
{
if (phaseKind == PhaseKind::IMPLICIT_SUSPENSION)
return Phase::IMPLICIT_SUSPENSION;
if (phaseKind == PhaseKind::EXPLICIT_SUSPENSION)
return Phase::EXPLICIT_SUSPENSION;
static const Phase PHASE_NO_PARENT = PHASE_LIMIT;
MOZ_ASSERT(phaseKind < PhaseKind::LIMIT);
struct DagChildEdge {
Phase parent;
Phase child;
} dagChildEdges[] = {
{ PHASE_MARK, PHASE_MARK_ROOTS },
{ PHASE_MINOR_GC, PHASE_MARK_ROOTS },
{ PHASE_TRACE_HEAP, PHASE_MARK_ROOTS },
{ PHASE_EVICT_NURSERY, PHASE_MARK_ROOTS },
{ PHASE_COMPACT_UPDATE, PHASE_MARK_ROOTS }
};
// Most phases only correspond to a single expanded phase so check for that
// first.
Phase phase = phaseKinds[phaseKind].firstPhase;
if (phases[phase].nextInPhase == Phase::NONE) {
MOZ_ASSERT(phases[phase].parent == currentPhase());
return phase;
/*
* Note that PHASE_MUTATOR never has any child phases. If beginPhase is called
* while PHASE_MUTATOR is active, it will automatically be suspended and
* resumed when the phase stack is next empty. Timings for these phases are
* thus exclusive of any other phase.
*/
static const PhaseInfo phases[] = {
{ PHASE_MUTATOR, "Mutator Running", PHASE_NO_PARENT, 0 },
{ PHASE_GC_BEGIN, "Begin Callback", PHASE_NO_PARENT, 1 },
{ PHASE_WAIT_BACKGROUND_THREAD, "Wait Background Thread", PHASE_NO_PARENT, 2 },
{ PHASE_MARK_DISCARD_CODE, "Mark Discard Code", PHASE_NO_PARENT, 3 },
{ PHASE_RELAZIFY_FUNCTIONS, "Relazify Functions", PHASE_NO_PARENT, 4 },
{ PHASE_PURGE, "Purge", PHASE_NO_PARENT, 5 },
{ PHASE_MARK, "Mark", PHASE_NO_PARENT, 6 },
{ PHASE_UNMARK, "Unmark", PHASE_MARK, 7 },
/* PHASE_MARK_ROOTS */
{ PHASE_MARK_DELAYED, "Mark Delayed", PHASE_MARK, 8 },
{ PHASE_SWEEP, "Sweep", PHASE_NO_PARENT, 9 },
{ PHASE_SWEEP_MARK, "Mark During Sweeping", PHASE_SWEEP, 10 },
{ PHASE_SWEEP_MARK_TYPES, "Mark Types During Sweeping", PHASE_SWEEP_MARK, 11 },
{ PHASE_SWEEP_MARK_INCOMING_BLACK, "Mark Incoming Black Pointers", PHASE_SWEEP_MARK, 12 },
{ PHASE_SWEEP_MARK_WEAK, "Mark Weak", PHASE_SWEEP_MARK, 13 },
{ PHASE_SWEEP_MARK_INCOMING_GRAY, "Mark Incoming Gray Pointers", PHASE_SWEEP_MARK, 14 },
{ PHASE_SWEEP_MARK_GRAY, "Mark Gray", PHASE_SWEEP_MARK, 15 },
{ PHASE_SWEEP_MARK_GRAY_WEAK, "Mark Gray and Weak", PHASE_SWEEP_MARK, 16 },
{ PHASE_FINALIZE_START, "Finalize Start Callbacks", PHASE_SWEEP, 17 },
{ PHASE_WEAK_ZONES_CALLBACK, "Per-Slice Weak Callback", PHASE_FINALIZE_START, 57 },
{ PHASE_WEAK_COMPARTMENT_CALLBACK, "Per-Compartment Weak Callback", PHASE_FINALIZE_START, 58 },
{ PHASE_SWEEP_ATOMS, "Sweep Atoms", PHASE_SWEEP, 18 },
{ PHASE_SWEEP_COMPARTMENTS, "Sweep Compartments", PHASE_SWEEP, 20 },
{ PHASE_SWEEP_DISCARD_CODE, "Sweep Discard Code", PHASE_SWEEP_COMPARTMENTS, 21 },
{ PHASE_SWEEP_INNER_VIEWS, "Sweep Inner Views", PHASE_SWEEP_COMPARTMENTS, 22 },
{ PHASE_SWEEP_CC_WRAPPER, "Sweep Cross Compartment Wrappers", PHASE_SWEEP_COMPARTMENTS, 23 },
{ PHASE_SWEEP_BASE_SHAPE, "Sweep Base Shapes", PHASE_SWEEP_COMPARTMENTS, 24 },
{ PHASE_SWEEP_INITIAL_SHAPE, "Sweep Initial Shapes", PHASE_SWEEP_COMPARTMENTS, 25 },
{ PHASE_SWEEP_TYPE_OBJECT, "Sweep Type Objects", PHASE_SWEEP_COMPARTMENTS, 26 },
{ PHASE_SWEEP_BREAKPOINT, "Sweep Breakpoints", PHASE_SWEEP_COMPARTMENTS, 27 },
{ PHASE_SWEEP_REGEXP, "Sweep Regexps", PHASE_SWEEP_COMPARTMENTS, 28 },
{ PHASE_SWEEP_COMPRESSION, "Sweep Compression Tasks", PHASE_SWEEP_COMPARTMENTS, 62 },
{ PHASE_SWEEP_WEAKMAPS, "Sweep WeakMaps", PHASE_SWEEP_COMPARTMENTS, 63 },
{ PHASE_SWEEP_UNIQUEIDS, "Sweep Unique IDs", PHASE_SWEEP_COMPARTMENTS, 64 },
{ PHASE_SWEEP_JIT_DATA, "Sweep JIT Data", PHASE_SWEEP_COMPARTMENTS, 65 },
{ PHASE_SWEEP_WEAK_CACHES, "Sweep Weak Caches", PHASE_SWEEP_COMPARTMENTS, 66 },
{ PHASE_SWEEP_MISC, "Sweep Miscellaneous", PHASE_SWEEP_COMPARTMENTS, 29 },
{ PHASE_SWEEP_TYPES, "Sweep type information", PHASE_SWEEP_COMPARTMENTS, 30 },
{ PHASE_SWEEP_TYPES_BEGIN, "Sweep type tables and compilations", PHASE_SWEEP_TYPES, 31 },
{ PHASE_SWEEP_TYPES_END, "Free type arena", PHASE_SWEEP_TYPES, 32 },
{ PHASE_SWEEP_OBJECT, "Sweep Object", PHASE_SWEEP, 33 },
{ PHASE_SWEEP_STRING, "Sweep String", PHASE_SWEEP, 34 },
{ PHASE_SWEEP_SCRIPT, "Sweep Script", PHASE_SWEEP, 35 },
{ PHASE_SWEEP_SCOPE, "Sweep Scope", PHASE_SWEEP, 59 },
{ PHASE_SWEEP_REGEXP_SHARED, "Sweep RegExpShared", PHASE_SWEEP, 61 },
{ PHASE_SWEEP_SHAPE, "Sweep Shape", PHASE_SWEEP, 36 },
{ PHASE_SWEEP_JITCODE, "Sweep JIT code", PHASE_SWEEP, 37 },
{ PHASE_FINALIZE_END, "Finalize End Callback", PHASE_SWEEP, 38 },
{ PHASE_DESTROY, "Deallocate", PHASE_SWEEP, 39 },
{ PHASE_COMPACT, "Compact", PHASE_NO_PARENT, 40 },
{ PHASE_COMPACT_MOVE, "Compact Move", PHASE_COMPACT, 41 },
{ PHASE_COMPACT_UPDATE, "Compact Update", PHASE_COMPACT, 42 },
/* PHASE_MARK_ROOTS */
{ PHASE_COMPACT_UPDATE_CELLS, "Compact Update Cells", PHASE_COMPACT_UPDATE, 43 },
{ PHASE_GC_END, "End Callback", PHASE_NO_PARENT, 44 },
{ PHASE_MINOR_GC, "All Minor GCs", PHASE_NO_PARENT, 45 },
/* PHASE_MARK_ROOTS */
{ PHASE_EVICT_NURSERY, "Minor GCs to Evict Nursery", PHASE_NO_PARENT, 46 },
/* PHASE_MARK_ROOTS */
{ PHASE_TRACE_HEAP, "Trace Heap", PHASE_NO_PARENT, 47 },
/* PHASE_MARK_ROOTS */
{ PHASE_BARRIER, "Barriers", PHASE_NO_PARENT, 55 },
{ PHASE_UNMARK_GRAY, "Unmark gray", PHASE_BARRIER, 56 },
{ PHASE_MARK_ROOTS, "Mark Roots", PHASE_MULTI_PARENTS, 48 },
{ PHASE_BUFFER_GRAY_ROOTS, "Buffer Gray Roots", PHASE_MARK_ROOTS, 49 },
{ PHASE_MARK_CCWS, "Mark Cross Compartment Wrappers", PHASE_MARK_ROOTS, 50 },
{ PHASE_MARK_STACK, "Mark C and JS stacks", PHASE_MARK_ROOTS, 51 },
{ PHASE_MARK_RUNTIME_DATA, "Mark Runtime-wide Data", PHASE_MARK_ROOTS, 52 },
{ PHASE_MARK_EMBEDDING, "Mark Embedding", PHASE_MARK_ROOTS, 53 },
{ PHASE_MARK_COMPARTMENTS, "Mark Compartments", PHASE_MARK_ROOTS, 54 },
{ PHASE_PURGE_SHAPE_TABLES, "Purge ShapeTables", PHASE_NO_PARENT, 60 },
{ PHASE_LIMIT, nullptr, PHASE_NO_PARENT, 66 }
// The current number of telemetryBuckets is equal to the value for
// PHASE_LIMIT. If you insert new phases somewhere, start at that number and
// count up. Do not change any existing numbers.
};
static mozilla::EnumeratedArray<Phase, PHASE_LIMIT, ExtraPhaseInfo> phaseExtra;
// Mapping from all nodes with a multi-parented child to a Vector of all
// multi-parented children and their descendants. (Single-parented children will
// not show up in this list.)
static mozilla::Vector<Phase, 0, SystemAllocPolicy> dagDescendants[Statistics::NumTimingArrays];
// Preorder iterator over all phases in the expanded tree. Positions are
// returned as <phase,dagSlot> pairs (dagSlot will be zero aka PHASE_DAG_NONE
// for the top nodes with a single path from the parent, and 1 or more for
// nodes in multiparented subtrees).
struct AllPhaseIterator {
// If 'descendants' is empty, the current Phase position.
int current;
// The depth of the current multiparented node that we are processing, or
// zero if we are pointing to the top portion of the tree.
int baseLevel;
// When looking at multiparented descendants, the dag slot (index into
// PhaseTimeTables) containing the entries for the current parent.
size_t activeSlot;
// When iterating over a multiparented subtree, the list of (remaining)
// subtree nodes.
mozilla::Vector<Phase, 0, SystemAllocPolicy>::Range descendants;
explicit AllPhaseIterator()
: current(0)
, baseLevel(0)
, activeSlot(PHASE_DAG_NONE)
, descendants(dagDescendants[PHASE_DAG_NONE].all()) /* empty range */
{
}
// Otherwise search all expanded phases that correspond to the required
// phase to find the one whose parent is the current expanded phase.
Phase parent = currentPhase();
while (phases[phase].parent != parent) {
phase = phases[phase].nextInPhase;
MOZ_ASSERT(phase != Phase::NONE);
void get(Phase* phase, size_t* dagSlot, int* level = nullptr) {
MOZ_ASSERT(!done());
*dagSlot = activeSlot;
*phase = descendants.empty() ? Phase(current) : descendants.front();
if (level)
*level = phaseExtra[*phase].depth + baseLevel;
}
return phase;
}
void advance() {
MOZ_ASSERT(!done());
inline decltype(mozilla::MakeEnumeratedRange(Phase::FIRST, Phase::LIMIT))
AllPhases()
{
return mozilla::MakeEnumeratedRange(Phase::FIRST, Phase::LIMIT);
}
if (!descendants.empty()) {
// Currently iterating over a multiparented subtree.
descendants.popFront();
if (!descendants.empty())
return;
// Just before leaving the last child, reset the iterator to look
// at "main" phases (in PHASE_DAG_NONE) instead of multiparented
// subtree phases.
++current;
activeSlot = PHASE_DAG_NONE;
baseLevel = 0;
return;
}
auto phase = Phase(current);
if (phaseExtra[phase].dagSlot != PHASE_DAG_NONE) {
// The current phase has a shared subtree. Load them up into
// 'descendants' and advance to the first child.
activeSlot = phaseExtra[phase].dagSlot;
descendants = dagDescendants[activeSlot].all();
MOZ_ASSERT(!descendants.empty());
baseLevel += phaseExtra[phase].depth + 1;
return;
}
++current;
}
bool done() const {
return phases[current].parent == PHASE_MULTI_PARENTS;
}
};
void
Statistics::gcDuration(TimeDuration* total, TimeDuration* maxPause) const
@ -204,8 +330,7 @@ Statistics::sccDurations(TimeDuration* total, TimeDuration* maxPause) const
typedef Vector<UniqueChars, 8, SystemAllocPolicy> FragmentVector;
static UniqueChars
Join(const FragmentVector& fragments, const char* separator = "")
{
Join(const FragmentVector& fragments, const char* separator = "") {
const size_t separatorLength = strlen(separator);
size_t length = 0;
for (size_t i = 0; i < fragments.length(); ++i) {
@ -215,10 +340,8 @@ Join(const FragmentVector& fragments, const char* separator = "")
}
char* joined = js_pod_malloc<char>(length + 1);
if (!joined)
return UniqueChars();
joined[length] = '\0';
char* cursor = joined;
for (size_t i = 0; i < fragments.length(); ++i) {
if (fragments[i])
@ -235,14 +358,24 @@ Join(const FragmentVector& fragments, const char* separator = "")
}
static TimeDuration
SumChildTimes(Phase phase, const Statistics::PhaseTimeTable& phaseTimes)
SumChildTimes(size_t phaseSlot, Phase phase, const Statistics::PhaseTimeTable& phaseTimes)
{
// Sum the contributions from single-parented children.
TimeDuration total = 0;
for (phase = phases[phase].firstChild;
phase != Phase::NONE;
phase = phases[phase].nextSibling)
{
total += phaseTimes[phase];
size_t depth = phaseExtra[phase].depth;
for (unsigned i = phase + 1; i < PHASE_LIMIT && phaseExtra[Phase(i)].depth > depth; i++) {
if (phases[i].parent == phase)
total += phaseTimes[phaseSlot][Phase(i)];
}
// Sum the contributions from multi-parented children.
size_t dagSlot = phaseExtra[phase].dagSlot;
MOZ_ASSERT(dagSlot <= Statistics::MaxMultiparentPhases - 1);
if (dagSlot != PHASE_DAG_NONE) {
for (auto edge : dagChildEdges) {
if (edge.parent == phase)
total += phaseTimes[dagSlot][edge.child];
}
}
return total;
}
@ -337,12 +470,15 @@ Statistics::formatCompactSlicePhaseTimes(const PhaseTimeTable& phaseTimes) const
FragmentVector fragments;
char buffer[128];
for (auto phase : AllPhases()) {
DebugOnly<uint8_t> level = phases[phase].depth;
for (AllPhaseIterator iter; !iter.done(); iter.advance()) {
Phase phase;
size_t dagSlot;
int level;
iter.get(&phase, &dagSlot, &level);
MOZ_ASSERT(level < 4);
TimeDuration ownTime = phaseTimes[phase];
TimeDuration childTime = SumChildTimes(phase, phaseTimes);
TimeDuration ownTime = phaseTimes[dagSlot][phase];
TimeDuration childTime = SumChildTimes(dagSlot, phase, phaseTimes);
if (ownTime > MaxUnaccountedTime) {
SprintfLiteral(buffer, "%s: %.3fms", phases[phase].name, t(ownTime));
if (!fragments.append(DuplicateString(buffer)))
@ -461,10 +597,14 @@ Statistics::formatDetailedPhaseTimes(const PhaseTimeTable& phaseTimes) const
FragmentVector fragments;
char buffer[128];
for (auto phase : AllPhases()) {
uint8_t level = phases[phase].depth;
TimeDuration ownTime = phaseTimes[phase];
TimeDuration childTime = SumChildTimes(phase, phaseTimes);
for (AllPhaseIterator iter; !iter.done(); iter.advance()) {
Phase phase;
size_t dagSlot;
int level;
iter.get(&phase, &dagSlot, &level);
TimeDuration ownTime = phaseTimes[dagSlot][phase];
TimeDuration childTime = SumChildTimes(dagSlot, phase, phaseTimes);
if (!ownTime.IsZero()) {
SprintfLiteral(buffer, " %*s: %.3fms\n",
level * 2, phases[phase].name, t(ownTime));
@ -642,11 +782,15 @@ SanitizeJsonKey(const char* const buffer)
void
Statistics::formatJsonPhaseTimes(const PhaseTimeTable& phaseTimes, JSONPrinter& json) const
{
for (auto phase : AllPhases()) {
for (AllPhaseIterator iter; !iter.done(); iter.advance()) {
Phase phase;
size_t dagSlot;
iter.get(&phase, &dagSlot);
UniqueChars name = SanitizeJsonKey(phases[phase].name);
if (!name)
json.outOfMemory();
TimeDuration ownTime = phaseTimes[phase];
TimeDuration ownTime = phaseTimes[dagSlot][phase];
if (!ownTime.IsZero())
json.property(name.get(), ownTime, JSONPrinter::MILLISECONDS);
}
@ -659,6 +803,7 @@ Statistics::Statistics(JSRuntime* rt)
preBytes(0),
maxPauseInInterval(0),
phaseNestingDepth(0),
activeDagSlot(PHASE_DAG_NONE),
suspended(0),
sliceCallback(nullptr),
nurseryCollectionCallback(nullptr),
@ -708,37 +853,54 @@ Statistics::~Statistics()
Statistics::initialize()
{
#ifdef DEBUG
// Sanity check generated tables.
for (auto i : AllPhases()) {
auto parent = phases[i].parent;
if (parent != Phase::NONE) {
MOZ_ASSERT(phases[i].depth == phases[parent].depth + 1);
}
auto firstChild = phases[i].firstChild;
if (firstChild != Phase::NONE) {
MOZ_ASSERT(i == phases[firstChild].parent);
MOZ_ASSERT(phases[i].depth == phases[firstChild].depth - 1);
}
auto nextSibling = phases[i].nextSibling;
if (nextSibling != Phase::NONE) {
MOZ_ASSERT(parent == phases[nextSibling].parent);
MOZ_ASSERT(phases[i].depth == phases[nextSibling].depth);
}
auto nextInPhase = phases[i].nextInPhase;
if (nextInPhase != Phase::NONE) {
MOZ_ASSERT(phases[i].phaseKind == phases[nextInPhase].phaseKind);
MOZ_ASSERT(parent != phases[nextInPhase].parent);
}
}
for (auto i : AllPhaseKinds()) {
MOZ_ASSERT(phases[phaseKinds[i].firstPhase].phaseKind == i);
for (auto j : AllPhaseKinds()) {
MOZ_ASSERT_IF(i != j,
phaseKinds[i].telemetryBucket != phaseKinds[j].telemetryBucket);
}
MOZ_ASSERT(phases[i].index == i);
for (auto j : AllPhases())
MOZ_ASSERT_IF(i != j, phases[i].telemetryBucket != phases[j].telemetryBucket);
}
#endif
// Create a static table of descendants for every phase with multiple
// children. This assumes that all descendants come linearly in the
// list, which is reasonable since full dags are not supported; any
// path from the leaf to the root must encounter at most one node with
// multiple parents.
size_t dagSlot = 0;
for (size_t i = 0; i < mozilla::ArrayLength(dagChildEdges); i++) {
Phase parent = dagChildEdges[i].parent;
if (!phaseExtra[parent].dagSlot)
phaseExtra[parent].dagSlot = ++dagSlot;
Phase child = dagChildEdges[i].child;
MOZ_ASSERT(phases[child].parent == PHASE_MULTI_PARENTS);
int j = child;
do {
if (!dagDescendants[phaseExtra[parent].dagSlot].append(Phase(j)))
return false;
j++;
} while (j != PHASE_LIMIT && phases[j].parent != PHASE_MULTI_PARENTS);
}
MOZ_ASSERT(dagSlot <= MaxMultiparentPhases - 1);
// Fill in the depth of each node in the tree. Multi-parented nodes
// have depth 0.
mozilla::Vector<Phase, 0, SystemAllocPolicy> stack;
if (!stack.append(PHASE_LIMIT)) // Dummy entry to avoid special-casing the first node
return false;
for (auto i : AllPhases()) {
if (phases[i].parent == PHASE_NO_PARENT ||
phases[i].parent == PHASE_MULTI_PARENTS)
{
stack.clear();
} else {
while (stack.back() != phases[i].parent)
stack.popBack();
}
phaseExtra[i].depth = stack.length();
if (!stack.append(i))
return false;
}
return true;
}
@ -775,65 +937,68 @@ Statistics::getMaxGCPauseSinceClear()
// Sum up the time for a phase, including instances of the phase with different
// parents.
static TimeDuration
SumPhase(PhaseKind phaseKind, const Statistics::PhaseTimeTable& times)
SumPhase(Phase phase, const Statistics::PhaseTimeTable& times)
{
TimeDuration sum = 0;
for (Phase phase = phaseKinds[phaseKind].firstPhase;
phase != Phase::NONE;
phase = phases[phase].nextInPhase)
{
sum += times[phase];
}
for (const auto& phaseTimes : times)
sum += phaseTimes[phase];
return sum;
}
static void
CheckSelfTime(Phase parent,
Phase child,
const Statistics::PhaseTimeTable& times,
const Statistics::PhaseTimeTable& selfTimes,
TimeDuration childTime)
CheckSelfTime(Phase parent, Phase child, const Statistics::PhaseTimeTable& times, TimeDuration selfTimes[PHASE_LIMIT], TimeDuration childTime)
{
if (selfTimes[parent] < childTime) {
fprintf(stderr,
"Parent %s time = %.3fms with %.3fms remaining, child %s time %.3fms\n",
phases[parent].name,
times[parent].ToMilliseconds(),
"Parent %s time = %.3fms"
" with %.3fms remaining, "
"child %s time %.3fms\n",
phases[parent].name, SumPhase(parent, times).ToMilliseconds(),
selfTimes[parent].ToMilliseconds(),
phases[child].name,
childTime.ToMilliseconds());
MOZ_CRASH();
phases[child].name, childTime.ToMilliseconds());
}
}
static PhaseKind
static Phase
LongestPhaseSelfTime(const Statistics::PhaseTimeTable& times)
{
// Start with total times per expanded phase, including children's times.
Statistics::PhaseTimeTable selfTimes(times);
TimeDuration selfTimes[PHASE_LIMIT];
// Start with total times, including children's times.
for (auto i : AllPhases())
selfTimes[i] = SumPhase(i, times);
// We have the total time spent in each phase, including descendant times.
// Loop over the children and subtract their times from their parent's self
// time.
for (auto i : AllPhases()) {
Phase parent = phases[i].parent;
if (parent != Phase::NONE) {
CheckSelfTime(parent, i, times, selfTimes, times[i]);
selfTimes[parent] -= times[i];
if (parent == PHASE_MULTI_PARENTS) {
// Current phase i has multiple parents. Each "instance" of this
// phase is in a parallel array of times indexed by 'dagSlot', so
// subtract only the dagSlot-specific child's time from the parent.
for (auto edge : dagChildEdges) {
if (edge.parent == i) {
size_t dagSlot = phaseExtra[edge.parent].dagSlot;
MOZ_ASSERT(dagSlot <= Statistics::MaxMultiparentPhases - 1);
CheckSelfTime(edge.parent, edge.child, times,
selfTimes, times[dagSlot][edge.child]);
MOZ_ASSERT(selfTimes[edge.parent] >= times[dagSlot][edge.child]);
selfTimes[edge.parent] -= times[dagSlot][edge.child];
}
}
} else if (parent != PHASE_NO_PARENT) {
CheckSelfTime(parent, i, times, selfTimes, selfTimes[i]);
MOZ_ASSERT(selfTimes[parent] >= selfTimes[i]);
selfTimes[parent] -= selfTimes[i];
}
}
// Sum expanded phases corresponding to the same phase.
EnumeratedArray<PhaseKind, PhaseKind::LIMIT, TimeDuration> phaseTimes;
for (auto i : AllPhaseKinds())
phaseTimes[i] = SumPhase(i, selfTimes);
// Loop over this table to find the longest phase.
TimeDuration longestTime = 0;
PhaseKind longestPhase = PhaseKind::NONE;
for (auto i : AllPhaseKinds()) {
if (phaseTimes[i] > longestTime) {
longestTime = phaseTimes[i];
Phase longestPhase = PHASE_NONE;
for (auto i : AllPhases()) {
if (selfTimes[i] > longestTime) {
longestTime = selfTimes[i];
longestPhase = i;
}
}
@ -872,20 +1037,25 @@ Statistics::beginGC(JSGCInvocationKind kind)
void
Statistics::endGC()
{
for (auto j : IntegerRange(NumTimingArrays)) {
for (auto i : AllPhases())
phaseTotals[j][i] += phaseTimes[j][i];
}
TimeDuration sccTotal, sccLongest;
sccDurations(&sccTotal, &sccLongest);
runtime->addTelemetry(JS_TELEMETRY_GC_IS_ZONE_GC, !zoneStats.isCollectingAllZones());
TimeDuration markTotal = SumPhase(PhaseKind::MARK, phaseTimes);
TimeDuration markRootsTotal = SumPhase(PhaseKind::MARK_ROOTS, phaseTimes);
TimeDuration markTotal = SumPhase(PHASE_MARK, phaseTimes);
TimeDuration markRootsTotal = SumPhase(PHASE_MARK_ROOTS, phaseTimes);
runtime->addTelemetry(JS_TELEMETRY_GC_MARK_MS, t(markTotal));
runtime->addTelemetry(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[Phase::SWEEP]));
runtime->addTelemetry(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_DAG_NONE][PHASE_SWEEP]));
if (runtime->gc.isCompactingGc()) {
runtime->addTelemetry(JS_TELEMETRY_GC_COMPACT_MS,
t(phaseTimes[Phase::COMPACT]));
t(phaseTimes[PHASE_DAG_NONE][PHASE_COMPACT]));
}
runtime->addTelemetry(JS_TELEMETRY_GC_MARK_ROOTS_MS, t(markRootsTotal));
runtime->addTelemetry(JS_TELEMETRY_GC_MARK_GRAY_MS, t(phaseTimes[Phase::SWEEP_MARK_GRAY]));
runtime->addTelemetry(JS_TELEMETRY_GC_MARK_GRAY_MS, t(phaseTimes[PHASE_DAG_NONE][PHASE_SWEEP_MARK_GRAY]));
runtime->addTelemetry(JS_TELEMETRY_GC_NON_INCREMENTAL, nonincremental());
if (nonincremental())
runtime->addTelemetry(JS_TELEMETRY_GC_NON_INCREMENTAL_REASON, uint32_t(nonincrementalReason_));
@ -986,9 +1156,8 @@ Statistics::endSlice()
// Record any phase that goes more than 2x over its budget.
if (sliceTime.ToMilliseconds() > 2 * budget_ms) {
PhaseKind longest = LongestPhaseSelfTime(slices_.back().phaseTimes);
uint8_t bucket = phaseKinds[longest].telemetryBucket;
runtime->addTelemetry(JS_TELEMETRY_GC_SLOW_PHASE, bucket);
Phase longest = LongestPhaseSelfTime(slices_.back().phaseTimes);
runtime->addTelemetry(JS_TELEMETRY_GC_SLOW_PHASE, phases[longest].telemetryBucket);
}
}
@ -1016,13 +1185,11 @@ Statistics::endSlice()
for (auto& count : counts)
count = 0;
// Clear the timers at the end of a GC, preserving the data for PhaseKind::MUTATOR.
auto mutatorStartTime = phaseStartTimes[Phase::MUTATOR];
auto mutatorTime = phaseTimes[Phase::MUTATOR];
PodZero(&phaseStartTimes);
PodZero(&phaseTimes);
phaseStartTimes[Phase::MUTATOR] = mutatorStartTime;
phaseTimes[Phase::MUTATOR] = mutatorTime;
// Clear the timers at the end of a GC because we accumulate time in
// between GCs for some (which come before PHASE_GC_BEGIN in the list.)
PodZero(&phaseStartTimes[PHASE_GC_BEGIN], PHASE_LIMIT - PHASE_GC_BEGIN);
for (size_t d = PHASE_DAG_NONE; d < NumTimingArrays; d++)
PodZero(&phaseTimes[d][PHASE_GC_BEGIN], PHASE_LIMIT - PHASE_GC_BEGIN);
}
}
@ -1032,18 +1199,18 @@ Statistics::startTimingMutator()
if (phaseNestingDepth != 0) {
// Should only be called from outside of GC.
MOZ_ASSERT(phaseNestingDepth == 1);
MOZ_ASSERT(phaseNesting[0] == Phase::MUTATOR);
MOZ_ASSERT(phaseNesting[0] == PHASE_MUTATOR);
return false;
}
MOZ_ASSERT(suspended == 0);
timedGCTime = 0;
phaseStartTimes[Phase::MUTATOR] = TimeStamp();
phaseTimes[Phase::MUTATOR] = 0;
phaseStartTimes[PHASE_MUTATOR] = TimeStamp();
phaseTimes[PHASE_DAG_NONE][PHASE_MUTATOR] = 0;
timedGCStart = TimeStamp();
beginPhase(PhaseKind::MUTATOR);
beginPhase(PHASE_MUTATOR);
return true;
}
@ -1051,76 +1218,75 @@ bool
Statistics::stopTimingMutator(double& mutator_ms, double& gc_ms)
{
// This should only be called from outside of GC, while timing the mutator.
if (phaseNestingDepth != 1 || phaseNesting[0] != Phase::MUTATOR)
if (phaseNestingDepth != 1 || phaseNesting[0] != PHASE_MUTATOR)
return false;
endPhase(PhaseKind::MUTATOR);
mutator_ms = t(phaseTimes[Phase::MUTATOR]);
endPhase(PHASE_MUTATOR);
mutator_ms = t(phaseTimes[PHASE_DAG_NONE][PHASE_MUTATOR]);
gc_ms = t(timedGCTime);
return true;
}
void
Statistics::suspendPhases(PhaseKind suspension)
Statistics::suspendPhases(Phase suspension)
{
MOZ_ASSERT(suspension == PhaseKind::EXPLICIT_SUSPENSION ||
suspension == PhaseKind::IMPLICIT_SUSPENSION);
MOZ_ASSERT(suspension == PHASE_EXPLICIT_SUSPENSION || suspension == PHASE_IMPLICIT_SUSPENSION);
while (phaseNestingDepth) {
MOZ_ASSERT(suspended < mozilla::ArrayLength(suspendedPhases));
Phase parent = phaseNesting[phaseNestingDepth - 1];
suspendedPhases[suspended++] = parent;
recordPhaseEnd(parent);
}
suspendedPhases[suspended++] = lookupChildPhase(suspension);
suspendedPhases[suspended++] = suspension;
}
void
Statistics::resumePhases()
{
#ifdef DEBUG
Phase popped = suspendedPhases[--suspended];
MOZ_ASSERT(popped == Phase::EXPLICIT_SUSPENSION ||
popped == Phase::IMPLICIT_SUSPENSION);
#endif
DebugOnly<Phase> popped = suspendedPhases[--suspended];
MOZ_ASSERT(popped == PHASE_EXPLICIT_SUSPENSION || popped == PHASE_IMPLICIT_SUSPENSION);
while (suspended &&
suspendedPhases[suspended - 1] != Phase::EXPLICIT_SUSPENSION &&
suspendedPhases[suspended - 1] != Phase::IMPLICIT_SUSPENSION)
suspendedPhases[suspended - 1] != PHASE_EXPLICIT_SUSPENSION &&
suspendedPhases[suspended - 1] != PHASE_IMPLICIT_SUSPENSION)
{
Phase resumePhase = suspendedPhases[--suspended];
if (resumePhase == Phase::MUTATOR)
if (resumePhase == PHASE_MUTATOR)
timedGCTime += TimeStamp::Now() - timedGCStart;
recordPhaseBegin(resumePhase);
beginPhase(resumePhase);
}
}
void
Statistics::beginPhase(PhaseKind phaseKind)
Statistics::beginPhase(Phase phase)
{
// No longer timing these phases. We should never see these.
MOZ_ASSERT(phaseKind != PhaseKind::GC_BEGIN && phaseKind != PhaseKind::GC_END);
MOZ_ASSERT(phase != PHASE_GC_BEGIN && phase != PHASE_GC_END);
// PhaseKind::MUTATOR is suspended while performing GC.
if (currentPhase() == Phase::MUTATOR) {
suspendPhases(PhaseKind::IMPLICIT_SUSPENSION);
Phase parent = phaseNestingDepth ? phaseNesting[phaseNestingDepth - 1] : PHASE_NO_PARENT;
// PHASE_MUTATOR is suspended while performing GC.
if (parent == PHASE_MUTATOR) {
suspendPhases(PHASE_IMPLICIT_SUSPENSION);
parent = phaseNestingDepth ? phaseNesting[phaseNestingDepth - 1] : PHASE_NO_PARENT;
}
recordPhaseBegin(lookupChildPhase(phaseKind));
}
void
Statistics::recordPhaseBegin(Phase phase)
{
// Guard against any other re-entry.
MOZ_ASSERT(!phaseStartTimes[phase]);
MOZ_ASSERT(phases[phase].index == phase);
MOZ_ASSERT(phaseNestingDepth < MAX_NESTING);
MOZ_ASSERT(phases[phase].parent == currentPhase());
MOZ_ASSERT(phases[phase].parent == parent || phases[phase].parent == PHASE_MULTI_PARENTS);
phaseNesting[phaseNestingDepth] = phase;
phaseNestingDepth++;
if (phases[phase].parent == PHASE_MULTI_PARENTS) {
MOZ_ASSERT(parent != PHASE_NO_PARENT);
activeDagSlot = phaseExtra[parent].dagSlot;
}
MOZ_ASSERT(activeDagSlot <= MaxMultiparentPhases - 1);
phaseStartTimes[phase] = TimeStamp::Now();
}
@ -1129,46 +1295,40 @@ Statistics::recordPhaseEnd(Phase phase)
{
TimeStamp now = TimeStamp::Now();
if (phase == Phase::MUTATOR)
if (phase == PHASE_MUTATOR)
timedGCStart = now;
phaseNestingDepth--;
TimeDuration t = now - phaseStartTimes[phase];
if (!slices_.empty())
slices_.back().phaseTimes[phase] += t;
phaseTimes[phase] += t;
slices_.back().phaseTimes[activeDagSlot][phase] += t;
phaseTimes[activeDagSlot][phase] += t;
phaseStartTimes[phase] = TimeStamp();
}
void
Statistics::endPhase(PhaseKind phaseKind)
Statistics::endPhase(Phase phase)
{
Phase phase = currentPhase();
MOZ_ASSERT(phase != Phase::NONE);
MOZ_ASSERT(phases[phase].phaseKind == phaseKind);
recordPhaseEnd(phase);
if (phases[phase].parent == PHASE_MULTI_PARENTS)
activeDagSlot = PHASE_DAG_NONE;
// When emptying the stack, we may need to return to timing the mutator
// (PhaseKind::MUTATOR).
if (phaseNestingDepth == 0 &&
suspended > 0 &&
suspendedPhases[suspended - 1] == Phase::IMPLICIT_SUSPENSION)
{
// (PHASE_MUTATOR).
if (phaseNestingDepth == 0 && suspended > 0 && suspendedPhases[suspended - 1] == PHASE_IMPLICIT_SUSPENSION)
resumePhases();
}
}
void
Statistics::endParallelPhase(PhaseKind phaseKind, const GCParallelTask* task)
Statistics::endParallelPhase(Phase phase, const GCParallelTask* task)
{
Phase phase = lookupChildPhase(phaseKind);
phaseNestingDepth--;
if (!slices_.empty())
slices_.back().phaseTimes[phase] += task->duration();
phaseTimes[phase] += task->duration();
slices_.back().phaseTimes[PHASE_DAG_NONE][phase] += task->duration();
phaseTimes[PHASE_DAG_NONE][phase] += task->duration();
phaseStartTimes[phase] = TimeStamp();
}
@ -1281,7 +1441,7 @@ Statistics::printSliceProfile()
totalTimes_[ProfileKey::Total] += times[ProfileKey::Total];
#define GET_PROFILE_TIME(name, text, phase) \
times[ProfileKey::name] = SumPhase(phase, slice.phaseTimes); \
times[ProfileKey::name] = slice.phaseTimes[PHASE_DAG_NONE][phase]; \
totalTimes_[ProfileKey::name] += times[ProfileKey::name];
FOR_EACH_GC_PROFILE_TIME(GET_PROFILE_TIME)
#undef GET_PROFILE_TIME

View File

@ -30,10 +30,82 @@ class GCParallelTask;
namespace gcstats {
// Phase data is generated by a script. If you need to add phases, edit
// js/src/gc/GenerateStatsPhases.py
enum Phase : uint8_t {
PHASE_FIRST,
#include "gc/StatsPhasesGenerated.h"
PHASE_MUTATOR = PHASE_FIRST,
PHASE_GC_BEGIN,
PHASE_WAIT_BACKGROUND_THREAD,
PHASE_MARK_DISCARD_CODE,
PHASE_RELAZIFY_FUNCTIONS,
PHASE_PURGE,
PHASE_MARK,
PHASE_UNMARK,
PHASE_MARK_DELAYED,
PHASE_SWEEP,
PHASE_SWEEP_MARK,
PHASE_SWEEP_MARK_TYPES,
PHASE_SWEEP_MARK_INCOMING_BLACK,
PHASE_SWEEP_MARK_WEAK,
PHASE_SWEEP_MARK_INCOMING_GRAY,
PHASE_SWEEP_MARK_GRAY,
PHASE_SWEEP_MARK_GRAY_WEAK,
PHASE_FINALIZE_START,
PHASE_WEAK_ZONES_CALLBACK,
PHASE_WEAK_COMPARTMENT_CALLBACK,
PHASE_SWEEP_ATOMS,
PHASE_SWEEP_COMPARTMENTS,
PHASE_SWEEP_DISCARD_CODE,
PHASE_SWEEP_INNER_VIEWS,
PHASE_SWEEP_CC_WRAPPER,
PHASE_SWEEP_BASE_SHAPE,
PHASE_SWEEP_INITIAL_SHAPE,
PHASE_SWEEP_TYPE_OBJECT,
PHASE_SWEEP_BREAKPOINT,
PHASE_SWEEP_REGEXP,
PHASE_SWEEP_COMPRESSION,
PHASE_SWEEP_WEAKMAPS,
PHASE_SWEEP_UNIQUEIDS,
PHASE_SWEEP_JIT_DATA,
PHASE_SWEEP_WEAK_CACHES,
PHASE_SWEEP_MISC,
PHASE_SWEEP_TYPES,
PHASE_SWEEP_TYPES_BEGIN,
PHASE_SWEEP_TYPES_END,
PHASE_SWEEP_OBJECT,
PHASE_SWEEP_STRING,
PHASE_SWEEP_SCRIPT,
PHASE_SWEEP_SCOPE,
PHASE_SWEEP_REGEXP_SHARED,
PHASE_SWEEP_SHAPE,
PHASE_SWEEP_JITCODE,
PHASE_FINALIZE_END,
PHASE_DESTROY,
PHASE_COMPACT,
PHASE_COMPACT_MOVE,
PHASE_COMPACT_UPDATE,
PHASE_COMPACT_UPDATE_CELLS,
PHASE_GC_END,
PHASE_MINOR_GC,
PHASE_EVICT_NURSERY,
PHASE_TRACE_HEAP,
PHASE_BARRIER,
PHASE_UNMARK_GRAY,
PHASE_MARK_ROOTS,
PHASE_BUFFER_GRAY_ROOTS,
PHASE_MARK_CCWS,
PHASE_MARK_STACK,
PHASE_MARK_RUNTIME_DATA,
PHASE_MARK_EMBEDDING,
PHASE_MARK_COMPARTMENTS,
PHASE_PURGE_SHAPE_TABLES,
PHASE_LIMIT,
PHASE_NONE = PHASE_LIMIT,
PHASE_EXPLICIT_SUSPENSION = PHASE_LIMIT,
PHASE_IMPLICIT_SUSPENSION,
PHASE_MULTI_PARENTS
};
enum Stat {
STAT_NEW_CHUNK,
@ -79,17 +151,17 @@ struct ZoneGCStats
};
#define FOR_EACH_GC_PROFILE_TIME(_) \
_(BeginCallback, "bgnCB", PhaseKind::GC_BEGIN) \
_(WaitBgThread, "waitBG", PhaseKind::WAIT_BACKGROUND_THREAD) \
_(DiscardCode, "discrd", PhaseKind::MARK_DISCARD_CODE) \
_(RelazifyFunc, "relzfy", PhaseKind::RELAZIFY_FUNCTIONS) \
_(PurgeTables, "prgTbl", PhaseKind::PURGE_SHAPE_TABLES) \
_(Purge, "purge", PhaseKind::PURGE) \
_(Mark, "mark", PhaseKind::MARK) \
_(Sweep, "sweep", PhaseKind::SWEEP) \
_(Compact, "cmpct", PhaseKind::COMPACT) \
_(EndCallback, "endCB", PhaseKind::GC_END) \
_(Barriers, "brrier", PhaseKind::BARRIER)
_(BeginCallback, "bgnCB", PHASE_GC_BEGIN) \
_(WaitBgThread, "waitBG", PHASE_WAIT_BACKGROUND_THREAD) \
_(DiscardCode, "discrd", PHASE_MARK_DISCARD_CODE) \
_(RelazifyFunc, "relzfy", PHASE_RELAZIFY_FUNCTIONS) \
_(PurgeTables, "prgTbl", PHASE_PURGE_SHAPE_TABLES) \
_(Purge, "purge", PHASE_PURGE) \
_(Mark, "mark", PHASE_MARK) \
_(Sweep, "sweep", PHASE_SWEEP) \
_(Compact, "cmpct", PHASE_COMPACT) \
_(EndCallback, "endCB", PHASE_GC_END) \
_(Barriers, "brrier", PHASE_BARRIER)
const char* ExplainAbortReason(gc::AbortReason reason);
const char* ExplainInvocationKind(JSGCInvocationKind gckind);
@ -102,7 +174,7 @@ const char* ExplainInvocationKind(JSGCInvocationKind gckind);
*
* During execution, a child phase can be activated multiple times, and the
* total time will be accumulated. (So for example, you can start and end
* PhaseKind::MARK_ROOTS multiple times before completing the parent phase.)
* PHASE_MARK_ROOTS multiple times before completing the parent phase.)
*
* Incremental GC is represented by recording separate timing results for each
* slice within the overall GC.
@ -118,8 +190,26 @@ struct Statistics
using TimeDuration = mozilla::TimeDuration;
using TimeStamp = mozilla::TimeStamp;
// Create a convenient type for referring to tables of phase times.
using PhaseTimeTable = EnumeratedArray<Phase, Phase::LIMIT, TimeDuration>;
/*
* Phases are allowed to have multiple parents, though any path from root
* to leaf is allowed at most one multi-parented phase. We keep a full set
* of timings for each of the multi-parented phases, to be able to record
* all the timings in the expanded tree induced by our dag.
*
* Note that this wastes quite a bit of space, since we have a whole
* separate array of timing data containing all the phases. We could be
* more clever and keep an array of pointers biased by the offset of the
* multi-parented phase, and thereby preserve the simple
* timings[slot][PHASE_*] indexing. But the complexity doesn't seem worth
* the few hundred bytes of savings. If we want to extend things to full
* DAGs, this decision should be reconsidered.
*/
static const size_t MaxMultiparentPhases = 6;
static const size_t NumTimingArrays = MaxMultiparentPhases + 1;
/* Create a convenient type for referring to tables of phase times. */
using PhaseTimeTable =
Array<EnumeratedArray<Phase, PHASE_LIMIT, TimeDuration>, NumTimingArrays>;
static MOZ_MUST_USE bool initialize();
@ -129,9 +219,9 @@ struct Statistics
Statistics(const Statistics&) = delete;
Statistics& operator=(const Statistics&) = delete;
void beginPhase(PhaseKind phaseKind);
void endPhase(PhaseKind phaseKind);
void endParallelPhase(PhaseKind phaseKind, const GCParallelTask* task);
void beginPhase(Phase phase);
void endPhase(Phase phase);
void endParallelPhase(Phase phase, const GCParallelTask* task);
// Occasionally, we may be in the middle of something that is tracked by
// this class, and we need to do something unusual (eg evict the nursery)
@ -139,11 +229,11 @@ struct Statistics
// currently tracked phase stack, at which time the caller is free to do
// other tracked operations.
//
// This also happens internally with the PhaseKind::MUTATOR "phase". While in
// This also happens internally with the PHASE_MUTATOR "phase". While in
// this phase, any beginPhase will automatically suspend the non-GC phase,
// until that inner stack is complete, at which time it will automatically
// resume the non-GC phase. Explicit suspensions do not get auto-resumed.
void suspendPhases(PhaseKind suspension = PhaseKind::EXPLICIT_SUSPENSION);
void suspendPhases(Phase suspension = PHASE_EXPLICIT_SUSPENSION);
// Resume a suspended stack of phases.
void resumePhases();
@ -203,7 +293,14 @@ struct Statistics
TimeDuration clearMaxGCPauseAccumulator();
TimeDuration getMaxGCPauseSinceClear();
PhaseKind currentPhaseKind() const;
// Return the current phase, suppressing the synthetic PHASE_MUTATOR phase.
Phase currentPhase() {
if (phaseNestingDepth == 0)
return PHASE_NONE;
if (phaseNestingDepth == 1)
return phaseNesting[0] == PHASE_MUTATOR ? PHASE_NONE : phaseNesting[0];
return phaseNesting[phaseNestingDepth - 1];
}
static const size_t MAX_NESTING = 20;
@ -276,7 +373,7 @@ struct Statistics
SliceDataVector slices_;
/* Most recent time when the given phase started. */
EnumeratedArray<Phase, Phase::LIMIT, TimeStamp> phaseStartTimes;
EnumeratedArray<Phase, PHASE_LIMIT, TimeStamp> phaseStartTimes;
/* Bookkeeping for GC timings when timingMutator is true */
TimeStamp timedGCStart;
@ -285,6 +382,9 @@ struct Statistics
/* Total time in a given phase for this GC. */
PhaseTimeTable phaseTimes;
/* Total time in a given phase over all GCs. */
PhaseTimeTable phaseTotals;
/* Number of events of this type for this GC. */
EnumeratedArray<Stat,
STAT_LIMIT,
@ -303,12 +403,13 @@ struct Statistics
/* Phases that are currently on stack. */
Array<Phase, MAX_NESTING> phaseNesting;
size_t phaseNestingDepth;
size_t activeDagSlot;
/*
* Certain phases can interrupt the phase stack, eg callback phases. When
* this happens, we move the suspended phases over to a sepearate list,
* terminated by a dummy PhaseKind::SUSPENSION phase (so that we can nest
* suspensions by suspending multiple stacks with a PhaseKind::SUSPENSION in
* terminated by a dummy PHASE_SUSPENSION phase (so that we can nest
* suspensions by suspending multiple stacks with a PHASE_SUSPENSION in
* between).
*/
Array<Phase, MAX_NESTING * 3> suspendedPhases;
@ -345,13 +446,9 @@ FOR_EACH_GC_PROFILE_TIME(DEFINE_TIME_KEY)
ProfileDurations totalTimes_;
uint64_t sliceCount_;
Phase currentPhase() const;
Phase lookupChildPhase(PhaseKind phaseKind) const;
void beginGC(JSGCInvocationKind kind);
void endGC();
void recordPhaseBegin(Phase phase);
void recordPhaseEnd(Phase phase);
void gcDuration(TimeDuration* total, TimeDuration* maxPause) const;
@ -391,24 +488,24 @@ struct MOZ_RAII AutoGCSlice
struct MOZ_RAII AutoPhase
{
AutoPhase(Statistics& stats, PhaseKind phaseKind)
: stats(stats), task(nullptr), phaseKind(phaseKind), enabled(true)
AutoPhase(Statistics& stats, Phase phase)
: stats(stats), task(nullptr), phase(phase), enabled(true)
{
stats.beginPhase(phaseKind);
stats.beginPhase(phase);
}
AutoPhase(Statistics& stats, bool condition, PhaseKind phaseKind)
: stats(stats), task(nullptr), phaseKind(phaseKind), enabled(condition)
AutoPhase(Statistics& stats, bool condition, Phase phase)
: stats(stats), task(nullptr), phase(phase), enabled(condition)
{
if (enabled)
stats.beginPhase(phaseKind);
stats.beginPhase(phase);
}
AutoPhase(Statistics& stats, const GCParallelTask& task, PhaseKind phaseKind)
: stats(stats), task(&task), phaseKind(phaseKind), enabled(true)
AutoPhase(Statistics& stats, const GCParallelTask& task, Phase phase)
: stats(stats), task(&task), phase(phase), enabled(true)
{
if (enabled)
stats.beginPhase(phaseKind);
stats.beginPhase(phase);
}
~AutoPhase() {
@ -417,13 +514,13 @@ struct MOZ_RAII AutoPhase
// spent waiting to join with helper threads), but should start
// recording total work on helper threads sometime by calling
// endParallelPhase here if task is nonnull.
stats.endPhase(phaseKind);
stats.endPhase(phase);
}
}
Statistics& stats;
const GCParallelTask* task;
PhaseKind phaseKind;
Phase phase;
bool enabled;
};

View File

@ -199,7 +199,7 @@ gc::GCRuntime::startVerifyPreBarriers()
for (auto chunk = allNonEmptyChunks(); !chunk.done(); chunk.next())
chunk->bitmap.clear();
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::TRACE_HEAP);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_TRACE_HEAP);
const size_t size = 64 * 1024 * 1024;
trc->root = (VerifyNode*)js_malloc(size);
@ -680,7 +680,7 @@ js::CheckGrayMarkingState(JSRuntime* rt)
if (!rt->gc.areGrayBitsValid())
return true;
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PHASE_TRACE_HEAP);
AutoTraceSession session(rt, JS::HeapState::Tracing);
CheckGrayMarkingTracer tracer(rt);
if (!tracer.init())

View File

@ -698,7 +698,7 @@ JSCompartment::traceOutgoingCrossCompartmentWrappers(JSTracer* trc)
/* static */ void
JSCompartment::traceIncomingCrossCompartmentEdgesForZoneGC(JSTracer* trc)
{
gcstats::AutoPhase ap(trc->runtime()->gc.stats(), gcstats::PhaseKind::MARK_CCWS);
gcstats::AutoPhase ap(trc->runtime()->gc.stats(), gcstats::PHASE_MARK_CCWS);
MOZ_ASSERT(JS::CurrentThreadIsHeapMajorCollecting());
for (CompartmentsIter c(trc->runtime(), SkipAtoms); !c.done(); c.next()) {
if (!c->zone()->isCollecting())

View File

@ -1189,7 +1189,7 @@ js::DumpHeap(JSContext* cx, FILE* fp, js::DumpHeapNurseryBehaviour nurseryBehavi
{
JSRuntime* rt = cx->runtime();
js::gc::AutoPrepareForTracing prep(cx, WithAtoms);
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PHASE_TRACE_HEAP);
rt->gc.traceRuntime(&dtrc, prep.session().lock);
}

View File

@ -323,7 +323,7 @@ FOR_EACH_ALLOCKIND(EXPAND_THINGS_PER_ARENA)
struct js::gc::FinalizePhase
{
gcstats::PhaseKind statsPhase;
gcstats::Phase statsPhase;
AllocKinds kinds;
};
@ -331,7 +331,7 @@ struct js::gc::FinalizePhase
* Finalization order for objects swept incrementally on the active thread.
*/
static const FinalizePhase ForegroundObjectFinalizePhase = {
gcstats::PhaseKind::SWEEP_OBJECT, {
gcstats::PHASE_SWEEP_OBJECT, {
AllocKind::OBJECT0,
AllocKind::OBJECT2,
AllocKind::OBJECT4,
@ -346,17 +346,17 @@ static const FinalizePhase ForegroundObjectFinalizePhase = {
*/
static const FinalizePhase IncrementalFinalizePhases[] = {
{
gcstats::PhaseKind::SWEEP_STRING, {
gcstats::PHASE_SWEEP_STRING, {
AllocKind::EXTERNAL_STRING
}
},
{
gcstats::PhaseKind::SWEEP_SCRIPT, {
gcstats::PHASE_SWEEP_SCRIPT, {
AllocKind::SCRIPT
}
},
{
gcstats::PhaseKind::SWEEP_JITCODE, {
gcstats::PHASE_SWEEP_JITCODE, {
AllocKind::JITCODE
}
}
@ -367,12 +367,12 @@ static const FinalizePhase IncrementalFinalizePhases[] = {
*/
static const FinalizePhase BackgroundFinalizePhases[] = {
{
gcstats::PhaseKind::SWEEP_SCRIPT, {
gcstats::PHASE_SWEEP_SCRIPT, {
AllocKind::LAZY_SCRIPT
}
},
{
gcstats::PhaseKind::SWEEP_OBJECT, {
gcstats::PHASE_SWEEP_OBJECT, {
AllocKind::FUNCTION,
AllocKind::FUNCTION_EXTENDED,
AllocKind::OBJECT0_BACKGROUND,
@ -384,17 +384,17 @@ static const FinalizePhase BackgroundFinalizePhases[] = {
}
},
{
gcstats::PhaseKind::SWEEP_SCOPE, {
gcstats::PHASE_SWEEP_SCOPE, {
AllocKind::SCOPE,
}
},
{
gcstats::PhaseKind::SWEEP_REGEXP_SHARED, {
gcstats::PHASE_SWEEP_REGEXP_SHARED, {
AllocKind::REGEXP_SHARED,
}
},
{
gcstats::PhaseKind::SWEEP_STRING, {
gcstats::PHASE_SWEEP_STRING, {
AllocKind::FAT_INLINE_STRING,
AllocKind::STRING,
AllocKind::FAT_INLINE_ATOM,
@ -403,7 +403,7 @@ static const FinalizePhase BackgroundFinalizePhases[] = {
}
},
{
gcstats::PhaseKind::SWEEP_SHAPE, {
gcstats::PHASE_SWEEP_SHAPE, {
AllocKind::SHAPE,
AllocKind::ACCESSOR_SHAPE,
AllocKind::BASE_SHAPE,
@ -2137,7 +2137,7 @@ bool
GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, Arena*& relocatedListOut,
SliceBudget& sliceBudget)
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_MOVE);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_COMPACT_MOVE);
MOZ_ASSERT(!zone->isPreservingCode());
MOZ_ASSERT(CanRelocateZone(zone));
@ -2465,7 +2465,7 @@ GCRuntime::updateCellPointers(MovingTracer* trc, Zone* zone, AllocKinds kinds, s
for (size_t i = 0; i < bgTaskCount && !bgArenas.done(); i++) {
bgTasks[i].emplace(rt, &bgArenas, lock);
startTask(*bgTasks[i], gcstats::PhaseKind::COMPACT_UPDATE_CELLS, lock);
startTask(*bgTasks[i], gcstats::PHASE_COMPACT_UPDATE_CELLS, lock);
tasksStarted = i;
}
}
@ -2476,7 +2476,7 @@ GCRuntime::updateCellPointers(MovingTracer* trc, Zone* zone, AllocKinds kinds, s
AutoLockHelperThreadState lock;
for (size_t i = 0; i < tasksStarted; i++)
joinTask(*bgTasks[i], gcstats::PhaseKind::COMPACT_UPDATE_CELLS, lock);
joinTask(*bgTasks[i], gcstats::PHASE_COMPACT_UPDATE_CELLS, lock);
}
}
@ -2560,7 +2560,7 @@ GCRuntime::updateZonePointersToRelocatedCells(Zone* zone, AutoLockForExclusiveAc
MOZ_ASSERT(!rt->isBeingDestroyed());
MOZ_ASSERT(zone->isGCCompacting());
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_COMPACT_UPDATE);
MovingTracer trc(rt);
zone->fixupAfterMovingGC();
@ -2578,7 +2578,7 @@ GCRuntime::updateZonePointersToRelocatedCells(Zone* zone, AutoLockForExclusiveAc
// Mark roots to update them.
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_ROOTS);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_MARK_ROOTS);
WeakMapBase::traceZone(zone, &trc);
for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
@ -2603,7 +2603,7 @@ GCRuntime::updateRuntimePointersToRelocatedCells(AutoLockForExclusiveAccess& loc
{
MOZ_ASSERT(!rt->isBeingDestroyed());
gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
gcstats::AutoPhase ap1(stats(), gcstats::PHASE_COMPACT_UPDATE);
MovingTracer trc(rt);
JSCompartment::fixupCrossCompartmentWrappersAfterMovingGC(&trc);
@ -2614,7 +2614,7 @@ GCRuntime::updateRuntimePointersToRelocatedCells(AutoLockForExclusiveAccess& loc
// Mark roots to update them.
{
gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::MARK_ROOTS);
gcstats::AutoPhase ap2(stats(), gcstats::PHASE_MARK_ROOTS);
Debugger::traceAllForMovingGC(&trc);
Debugger::traceIncomingCrossCompartmentEdges(&trc);
@ -3884,7 +3884,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces
if (isIncremental) {
js::CancelOffThreadIonCompile(rt, JS::Zone::Mark);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_DISCARD_CODE);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_MARK_DISCARD_CODE);
zone->discardJitCode(rt->defaultFreeOp());
}
}
@ -3899,7 +3899,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces
*/
if (invocationKind == GC_SHRINK) {
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::RELAZIFY_FUNCTIONS);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_RELAZIFY_FUNCTIONS);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
if (zone->isSelfHostingZone())
continue;
@ -3909,7 +3909,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces
}
/* Purge ShapeTables. */
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::PURGE_SHAPE_TABLES);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_PURGE_SHAPE_TABLES);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
if (zone->keepShapeTables() || zone->isSelfHostingZone())
continue;
@ -3939,17 +3939,17 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces
* a GC hazard would exist.
*/
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::PURGE);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_PURGE);
purgeRuntime(lock);
}
/*
* Mark phase.
*/
gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::MARK);
gcstats::AutoPhase ap1(stats(), gcstats::PHASE_MARK);
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::UNMARK);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_UNMARK);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
/* Unmark everything in the zones being collected. */
@ -3964,7 +3964,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces
traceRuntimeForMajorGC(gcmarker, lock);
gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::MARK_ROOTS);
gcstats::AutoPhase ap2(stats(), gcstats::PHASE_MARK_ROOTS);
if (isIncremental) {
bufferGrayRoots();
@ -3977,7 +3977,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces
void
GCRuntime::markCompartments()
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_COMPARTMENTS);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_MARK_COMPARTMENTS);
/*
* This code ensures that if a compartment is "dead", then it will be
@ -4045,7 +4045,7 @@ GCRuntime::markCompartments()
template <class ZoneIterT>
void
GCRuntime::markWeakReferences(gcstats::PhaseKind phase)
GCRuntime::markWeakReferences(gcstats::Phase phase)
{
MOZ_ASSERT(marker.isDrained());
@ -4082,14 +4082,14 @@ GCRuntime::markWeakReferences(gcstats::PhaseKind phase)
}
void
GCRuntime::markWeakReferencesInCurrentGroup(gcstats::PhaseKind phase)
GCRuntime::markWeakReferencesInCurrentGroup(gcstats::Phase phase)
{
markWeakReferences<GCSweepGroupIter>(phase);
}
template <class ZoneIterT, class CompartmentIterT>
void
GCRuntime::markGrayReferences(gcstats::PhaseKind phase)
GCRuntime::markGrayReferences(gcstats::Phase phase)
{
gcstats::AutoPhase ap(stats(), phase);
if (hasBufferedGrayRoots()) {
@ -4105,19 +4105,19 @@ GCRuntime::markGrayReferences(gcstats::PhaseKind phase)
}
void
GCRuntime::markGrayReferencesInCurrentGroup(gcstats::PhaseKind phase)
GCRuntime::markGrayReferencesInCurrentGroup(gcstats::Phase phase)
{
markGrayReferences<GCSweepGroupIter, GCCompartmentGroupIter>(phase);
}
void
GCRuntime::markAllWeakReferences(gcstats::PhaseKind phase)
GCRuntime::markAllWeakReferences(gcstats::Phase phase)
{
markWeakReferences<GCZonesIter>(phase);
}
void
GCRuntime::markAllGrayReferences(gcstats::PhaseKind phase)
GCRuntime::markAllGrayReferences(gcstats::Phase phase)
{
markGrayReferences<GCZonesIter, GCCompartmentsIter>(phase);
}
@ -4245,10 +4245,10 @@ js::gc::MarkingValidator::nonIncrementalMark(AutoLockForExclusiveAccess& lock)
gc->incrementalState = State::MarkRoots;
{
gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::MARK);
gcstats::AutoPhase ap(gc->stats(), gcstats::PHASE_MARK);
{
gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::UNMARK);
gcstats::AutoPhase ap(gc->stats(), gcstats::PHASE_UNMARK);
for (GCZonesIter zone(runtime); !zone.done(); zone.next())
WeakMapBase::unmarkZone(zone);
@ -4269,10 +4269,10 @@ js::gc::MarkingValidator::nonIncrementalMark(AutoLockForExclusiveAccess& lock)
gc->incrementalState = State::Sweep;
{
gcstats::AutoPhase ap1(gc->stats(), gcstats::PhaseKind::SWEEP);
gcstats::AutoPhase ap2(gc->stats(), gcstats::PhaseKind::SWEEP_MARK);
gcstats::AutoPhase ap1(gc->stats(), gcstats::PHASE_SWEEP);
gcstats::AutoPhase ap2(gc->stats(), gcstats::PHASE_SWEEP_MARK);
gc->markAllWeakReferences(gcstats::PhaseKind::SWEEP_MARK_WEAK);
gc->markAllWeakReferences(gcstats::PHASE_SWEEP_MARK_WEAK);
/* Update zone state for gray marking. */
for (GCZonesIter zone(runtime); !zone.done(); zone.next()) {
@ -4281,8 +4281,8 @@ js::gc::MarkingValidator::nonIncrementalMark(AutoLockForExclusiveAccess& lock)
}
gc->marker.setMarkColorGray();
gc->markAllGrayReferences(gcstats::PhaseKind::SWEEP_MARK_GRAY);
gc->markAllWeakReferences(gcstats::PhaseKind::SWEEP_MARK_GRAY_WEAK);
gc->markAllGrayReferences(gcstats::PHASE_SWEEP_MARK_GRAY);
gc->markAllWeakReferences(gcstats::PHASE_SWEEP_MARK_GRAY_WEAK);
/* Restore zone state. */
for (GCZonesIter zone(runtime); !zone.done(); zone.next()) {
@ -4770,9 +4770,9 @@ MarkIncomingCrossCompartmentPointers(JSRuntime* rt, const uint32_t color)
{
MOZ_ASSERT(color == BLACK || color == GRAY);
static const gcstats::PhaseKind statsPhases[] = {
gcstats::PhaseKind::SWEEP_MARK_INCOMING_BLACK,
gcstats::PhaseKind::SWEEP_MARK_INCOMING_GRAY
static const gcstats::Phase statsPhases[] = {
gcstats::PHASE_SWEEP_MARK_INCOMING_BLACK,
gcstats::PHASE_SWEEP_MARK_INCOMING_GRAY
};
gcstats::AutoPhase ap1(rt->gc.stats(), statsPhases[color]);
@ -4896,7 +4896,7 @@ js::NotifyGCPostSwap(JSObject* a, JSObject* b, unsigned removedFlags)
void
GCRuntime::endMarkingSweepGroup()
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_SWEEP_MARK);
/*
* Mark any incoming black pointers from previously swept compartments
@ -4904,7 +4904,7 @@ GCRuntime::endMarkingSweepGroup()
* black by the action of UnmarkGray.
*/
MarkIncomingCrossCompartmentPointers(rt, BLACK);
markWeakReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_WEAK);
markWeakReferencesInCurrentGroup(gcstats::PHASE_SWEEP_MARK_WEAK);
/*
* Change state of current group to MarkGray to restrict marking to this
@ -4922,8 +4922,8 @@ GCRuntime::endMarkingSweepGroup()
MarkIncomingCrossCompartmentPointers(rt, GRAY);
/* Mark gray roots and mark transitively inside the current compartment group. */
markGrayReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_GRAY);
markWeakReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_GRAY_WEAK);
markGrayReferencesInCurrentGroup(gcstats::PHASE_SWEEP_MARK_GRAY);
markWeakReferencesInCurrentGroup(gcstats::PHASE_SWEEP_MARK_GRAY_WEAK);
/* Restore marking state. */
for (GCSweepGroupIter zone(rt); !zone.done(); zone.next()) {
@ -5063,7 +5063,7 @@ SweepUniqueIds(JSRuntime* runtime)
}
void
GCRuntime::startTask(GCParallelTask& task, gcstats::PhaseKind phase, AutoLockHelperThreadState& locked)
GCRuntime::startTask(GCParallelTask& task, gcstats::Phase phase, AutoLockHelperThreadState& locked)
{
if (!task.startWithLockHeld(locked)) {
AutoUnlockHelperThreadState unlock(locked);
@ -5073,7 +5073,7 @@ GCRuntime::startTask(GCParallelTask& task, gcstats::PhaseKind phase, AutoLockHel
}
void
GCRuntime::joinTask(GCParallelTask& task, gcstats::PhaseKind phase, AutoLockHelperThreadState& locked)
GCRuntime::joinTask(GCParallelTask& task, gcstats::Phase phase, AutoLockHelperThreadState& locked)
{
gcstats::AutoPhase ap(stats(), task, phase);
task.joinWithLockHeld(locked);
@ -5086,13 +5086,13 @@ GCRuntime::sweepDebuggerOnMainThread(FreeOp* fop)
// This can modify weakmaps and so must happen before weakmap sweeping.
Debugger::sweepAll(fop);
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_COMPARTMENTS);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_SWEEP_COMPARTMENTS);
// Sweep debug environment information. This performs lookups in the Zone's
// unique IDs table and so must not happen in parallel with sweeping that
// table.
{
gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::SWEEP_MISC);
gcstats::AutoPhase ap2(stats(), gcstats::PHASE_SWEEP_MISC);
for (GCCompartmentGroupIter c(rt); !c.done(); c.next())
c->sweepDebugEnvironments();
}
@ -5100,7 +5100,7 @@ GCRuntime::sweepDebuggerOnMainThread(FreeOp* fop)
// Sweep breakpoints. This is done here to be with the other debug sweeping,
// although note that it can cause JIT code to be patched.
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_BREAKPOINT);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_SWEEP_BREAKPOINT);
for (GCSweepGroupIter zone(rt); !zone.done(); zone.next())
zone->sweepBreakpoints(fop);
}
@ -5110,7 +5110,7 @@ void
GCRuntime::sweepJitDataOnMainThread(FreeOp* fop)
{
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_JIT_DATA);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_SWEEP_JIT_DATA);
// Cancel any active or pending off thread compilations.
js::CancelOffThreadIonCompile(rt, JS::Zone::Sweep);
@ -5132,14 +5132,14 @@ GCRuntime::sweepJitDataOnMainThread(FreeOp* fop)
}
{
gcstats::AutoPhase apdc(stats(), gcstats::PhaseKind::SWEEP_DISCARD_CODE);
gcstats::AutoPhase apdc(stats(), gcstats::PHASE_SWEEP_DISCARD_CODE);
for (GCSweepGroupIter zone(rt); !zone.done(); zone.next())
zone->discardJitCode(fop);
}
{
gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::SWEEP_TYPES);
gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::SWEEP_TYPES_BEGIN);
gcstats::AutoPhase ap1(stats(), gcstats::PHASE_SWEEP_TYPES);
gcstats::AutoPhase ap2(stats(), gcstats::PHASE_SWEEP_TYPES_BEGIN);
for (GCSweepGroupIter zone(rt); !zone.done(); zone.next())
zone->beginSweepTypes(fop, releaseObservedTypes && !zone->isPreservingCode());
}
@ -5193,11 +5193,11 @@ class MOZ_RAII js::gc::AutoRunParallelTask : public GCParallelTask
using Func = void (*)(JSRuntime*);
Func func_;
gcstats::PhaseKind phase_;
gcstats::Phase phase_;
AutoLockHelperThreadState& lock_;
public:
AutoRunParallelTask(JSRuntime* rt, Func func, gcstats::PhaseKind phase,
AutoRunParallelTask(JSRuntime* rt, Func func, gcstats::Phase phase,
AutoLockHelperThreadState& lock)
: GCParallelTask(rt),
func_(func),
@ -5248,14 +5248,14 @@ GCRuntime::beginSweepingSweepGroup()
FreeOp fop(rt);
{
AutoPhase ap(stats(), PhaseKind::FINALIZE_START);
AutoPhase ap(stats(), PHASE_FINALIZE_START);
callFinalizeCallbacks(&fop, JSFINALIZE_GROUP_PREPARE);
{
AutoPhase ap2(stats(), PhaseKind::WEAK_ZONES_CALLBACK);
AutoPhase ap2(stats(), PHASE_WEAK_ZONES_CALLBACK);
callWeakPointerZonesCallbacks();
}
{
AutoPhase ap2(stats(), PhaseKind::WEAK_COMPARTMENT_CALLBACK);
AutoPhase ap2(stats(), PHASE_WEAK_COMPARTMENT_CALLBACK);
for (GCSweepGroupIter zone(rt); !zone.done(); zone.next()) {
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
callWeakPointerCompartmentCallbacks(comp);
@ -5271,22 +5271,22 @@ GCRuntime::beginSweepingSweepGroup()
Maybe<AutoRunParallelTask> sweepAtoms;
if (sweepingAtoms)
sweepAtoms.emplace(rt, SweepAtoms, PhaseKind::SWEEP_ATOMS, lock);
sweepAtoms.emplace(rt, SweepAtoms, PHASE_SWEEP_ATOMS, lock);
AutoPhase ap(stats(), PhaseKind::SWEEP_COMPARTMENTS);
AutoPhase ap(stats(), PHASE_SWEEP_COMPARTMENTS);
AutoSCC scc(stats(), sweepGroupIndex);
AutoRunParallelTask sweepCCWrappers(rt, SweepCCWrappers, PhaseKind::SWEEP_CC_WRAPPER, lock);
AutoRunParallelTask sweepObjectGroups(rt, SweepObjectGroups, PhaseKind::SWEEP_TYPE_OBJECT, lock);
AutoRunParallelTask sweepRegExps(rt, SweepRegExps, PhaseKind::SWEEP_REGEXP, lock);
AutoRunParallelTask sweepMisc(rt, SweepMisc, PhaseKind::SWEEP_MISC, lock);
AutoRunParallelTask sweepCompTasks(rt, SweepCompressionTasks, PhaseKind::SWEEP_COMPRESSION, lock);
AutoRunParallelTask sweepWeakMaps(rt, SweepWeakMaps, PhaseKind::SWEEP_WEAKMAPS, lock);
AutoRunParallelTask sweepUniqueIds(rt, SweepUniqueIds, PhaseKind::SWEEP_UNIQUEIDS, lock);
AutoRunParallelTask sweepCCWrappers(rt, SweepCCWrappers, PHASE_SWEEP_CC_WRAPPER, lock);
AutoRunParallelTask sweepObjectGroups(rt, SweepObjectGroups, PHASE_SWEEP_TYPE_OBJECT, lock);
AutoRunParallelTask sweepRegExps(rt, SweepRegExps, PHASE_SWEEP_REGEXP, lock);
AutoRunParallelTask sweepMisc(rt, SweepMisc, PHASE_SWEEP_MISC, lock);
AutoRunParallelTask sweepCompTasks(rt, SweepCompressionTasks, PHASE_SWEEP_COMPRESSION, lock);
AutoRunParallelTask sweepWeakMaps(rt, SweepWeakMaps, PHASE_SWEEP_WEAKMAPS, lock);
AutoRunParallelTask sweepUniqueIds(rt, SweepUniqueIds, PHASE_SWEEP_UNIQUEIDS, lock);
WeakCacheTaskVector sweepCacheTasks = PrepareWeakCacheTasks(rt);
for (auto& task : sweepCacheTasks)
startTask(task, PhaseKind::SWEEP_WEAK_CACHES, lock);
startTask(task, PHASE_SWEEP_WEAK_CACHES, lock);
{
AutoUnlockHelperThreadState unlock(lock);
@ -5294,7 +5294,7 @@ GCRuntime::beginSweepingSweepGroup()
}
for (auto& task : sweepCacheTasks)
joinTask(task, PhaseKind::SWEEP_WEAK_CACHES, lock);
joinTask(task, PHASE_SWEEP_WEAK_CACHES, lock);
}
// Queue all GC things in all zones for sweeping, either on the foreground
@ -5322,7 +5322,7 @@ void
GCRuntime::endSweepingSweepGroup()
{
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::FINALIZE_END);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_FINALIZE_END);
FreeOp fop(rt);
callFinalizeCallbacks(&fop, JSFINALIZE_GROUP_END);
}
@ -5371,7 +5371,7 @@ GCRuntime::beginSweepPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcce
computeNonIncrementalMarkingForValidation(lock);
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_SWEEP);
sweepOnBackgroundThread =
reason != JS::gcreason::DESTROY_RUNTIME && !TraceEnabled() && CanUseExtraThreads();
@ -5422,7 +5422,7 @@ ArenaLists::foregroundFinalize(FreeOp* fop, AllocKind thingKind, SliceBudget& sl
}
IncrementalProgress
GCRuntime::drainMarkStack(SliceBudget& sliceBudget, gcstats::PhaseKind phase)
GCRuntime::drainMarkStack(SliceBudget& sliceBudget, gcstats::Phase phase)
{
/* Run a marking slice and return whether the stack is now empty. */
gcstats::AutoPhase ap(stats(), phase);
@ -5480,8 +5480,8 @@ GCRuntime::sweepTypeInformation(GCRuntime* gc, FreeOp* fop, Zone* zone, SliceBud
MOZ_ASSERT(kind == AllocKind::LIMIT);
gcstats::AutoPhase ap1(gc->stats(), gcstats::PhaseKind::SWEEP_COMPARTMENTS);
gcstats::AutoPhase ap2(gc->stats(), gcstats::PhaseKind::SWEEP_TYPES);
gcstats::AutoPhase ap1(gc->stats(), gcstats::PHASE_SWEEP_COMPARTMENTS);
gcstats::AutoPhase ap2(gc->stats(), gcstats::PHASE_SWEEP_TYPES);
ArenaLists& al = zone->arenas;
@ -5495,7 +5495,7 @@ GCRuntime::sweepTypeInformation(GCRuntime* gc, FreeOp* fop, Zone* zone, SliceBud
// Finish sweeping type information in the zone.
{
gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::SWEEP_TYPES_END);
gcstats::AutoPhase ap(gc->stats(), gcstats::PHASE_SWEEP_TYPES_END);
zone->types.endSweep(gc->rt);
}
@ -5541,7 +5541,7 @@ GCRuntime::sweepShapeTree(GCRuntime* gc, FreeOp* fop, Zone* zone, SliceBudget& b
MOZ_ASSERT(kind == AllocKind::LIMIT);
gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::SWEEP_SHAPE);
gcstats::AutoPhase ap(gc->stats(), gcstats::PHASE_SWEEP_SHAPE);
ArenaLists& al = zone->arenas;
@ -5598,10 +5598,10 @@ GCRuntime::performSweepActions(SliceBudget& budget, AutoLockForExclusiveAccess&
{
AutoSetThreadIsSweeping threadIsSweeping;
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_SWEEP);
FreeOp fop(rt);
if (drainMarkStack(budget, gcstats::PhaseKind::SWEEP_MARK) == NotFinished)
if (drainMarkStack(budget, gcstats::PHASE_SWEEP_MARK) == NotFinished)
return NotFinished;
for (;;) {
@ -5664,7 +5664,7 @@ GCRuntime::endSweepPhase(bool destroyingRuntime, AutoLockForExclusiveAccess& loc
{
AutoSetThreadIsSweeping threadIsSweeping;
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_SWEEP);
FreeOp fop(rt);
MOZ_ASSERT_IF(destroyingRuntime, !sweepOnBackgroundThread);
@ -5683,7 +5683,7 @@ GCRuntime::endSweepPhase(bool destroyingRuntime, AutoLockForExclusiveAccess& loc
}
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::DESTROY);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_DESTROY);
/*
* Sweep script filenames after sweeping functions in the generic loop
@ -5701,7 +5701,7 @@ GCRuntime::endSweepPhase(bool destroyingRuntime, AutoLockForExclusiveAccess& loc
}
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::FINALIZE_END);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_FINALIZE_END);
callFinalizeCallbacks(&fop, JSFINALIZE_COLLECTION_END);
if (allCCVisibleZonesWereCollected())
@ -5728,7 +5728,7 @@ GCRuntime::beginCompactPhase()
{
MOZ_ASSERT(!isBackgroundSweeping());
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_COMPACT);
MOZ_ASSERT(zonesToMaybeCompact.ref().isEmpty());
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
@ -5747,7 +5747,7 @@ GCRuntime::compactPhase(JS::gcreason::Reason reason, SliceBudget& sliceBudget,
assertBackgroundSweepingFinished();
MOZ_ASSERT(startedCompacting);
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_COMPACT);
// TODO: JSScripts can move. If the sampler interrupts the GC in the
// middle of relocating an arena, invalid JSScript pointers may be
@ -5965,7 +5965,7 @@ GCRuntime::resetIncrementalGC(gc::AbortReason reason, AutoLockForExclusiveAccess
isCompacting = wasCompacting;
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::WAIT_BACKGROUND_THREAD);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_WAIT_BACKGROUND_THREAD);
rt->gc.waitBackgroundSweepOrAllocEnd();
}
break;
@ -5973,7 +5973,7 @@ GCRuntime::resetIncrementalGC(gc::AbortReason reason, AutoLockForExclusiveAccess
case State::Finalize: {
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::WAIT_BACKGROUND_THREAD);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_WAIT_BACKGROUND_THREAD);
rt->gc.waitBackgroundSweepOrAllocEnd();
}
@ -6169,7 +6169,7 @@ GCRuntime::incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason rea
isIncremental = false;
}
if (drainMarkStack(budget, gcstats::PhaseKind::MARK) == NotFinished)
if (drainMarkStack(budget, gcstats::PHASE_MARK) == NotFinished)
break;
MOZ_ASSERT(marker.isDrained());
@ -6230,7 +6230,7 @@ GCRuntime::incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason rea
case State::Finalize:
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::WAIT_BACKGROUND_THREAD);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_WAIT_BACKGROUND_THREAD);
// Yield until background finalization is done.
if (!budget.isUnlimited()) {
@ -6246,8 +6246,8 @@ GCRuntime::incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason rea
{
// Re-sweep the zones list, now that background finalization is
// finished to actually remove and free dead zones.
gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::SWEEP);
gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::DESTROY);
gcstats::AutoPhase ap1(stats(), gcstats::PHASE_SWEEP);
gcstats::AutoPhase ap2(stats(), gcstats::PHASE_DESTROY);
AutoSetThreadIsSweeping threadIsSweeping;
FreeOp fop(rt);
sweepZoneGroups(&fop, destroyingRuntime);
@ -6280,7 +6280,7 @@ GCRuntime::incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason rea
case State::Decommit:
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::WAIT_BACKGROUND_THREAD);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_WAIT_BACKGROUND_THREAD);
// Yield until background decommit is done.
if (!budget.isUnlimited() && decommitTask.isRunning())
@ -6474,7 +6474,7 @@ GCRuntime::gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason::
TlsContext.get()->verifyIsSafeToGC();
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::WAIT_BACKGROUND_THREAD);
gcstats::AutoPhase ap(stats(), gcstats::PHASE_WAIT_BACKGROUND_THREAD);
// Background finalization and decommit are finished by defininition
// before we can start a new GC session.
@ -6688,7 +6688,7 @@ GCRuntime::collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::R
#ifdef JS_GC_ZEAL
if (rt->hasZealMode(ZealMode::CheckHeapAfterGC)) {
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PHASE_TRACE_HEAP);
CheckHeapAfterGC(rt);
}
#endif
@ -6866,7 +6866,7 @@ GCRuntime::onOutOfMallocMemory(const AutoLockGC& lock)
}
void
GCRuntime::minorGC(JS::gcreason::Reason reason, gcstats::PhaseKind phase)
GCRuntime::minorGC(JS::gcreason::Reason reason, gcstats::Phase phase)
{
MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());

View File

@ -739,11 +739,3 @@ if CONFIG['CLANG_CXX']:
if CONFIG['OS_ARCH'] == 'WINNT':
DEFINES['NOMINMAX'] = True
# Generate GC statistics phase data.
GENERATED_FILES += ['gc/StatsPhasesGenerated.h']
StatsPhasesGeneratedHeader = GENERATED_FILES['gc/StatsPhasesGenerated.h']
StatsPhasesGeneratedHeader.script = 'gc/GenerateStatsPhases.py:generateHeader'
GENERATED_FILES += ['gc/StatsPhasesGenerated.cpp']
StatsPhasesGeneratedCpp = GENERATED_FILES['gc/StatsPhasesGenerated.cpp']
StatsPhasesGeneratedCpp.script = 'gc/GenerateStatsPhases.py:generateCpp'