Backout CGC (Bug 650161, ae2eec4a74ea) to deal with a couple crashes that cropped up with wider exposure.

This commit is contained in:
Terrence Cole 2015-01-16 14:25:58 -08:00
parent 64793c37ee
commit 180ec33276
18 changed files with 128 additions and 4 deletions

View File

@ -3054,6 +3054,18 @@ MOZ_ARG_WITH_STRING(wrap-malloc,
[ --with-wrap-malloc=DIR Location of malloc wrapper library],
WRAP_LDFLAGS="${WRAP_LDFLAGS} $withval")
dnl ========================================================
dnl = Use compacting GC
dnl ========================================================
dnl Compact the heap by moving GC things when doing a shrinking colletion.
MOZ_ARG_ENABLE_BOOL(gccompacting,
[ --enable-gccompacting Compact the heap by moving GC things],
JSGC_COMPACTING=1,
JSGC_COMPACTING= )
if test -n "$JSGC_COMPACTING"; then
AC_DEFINE(JSGC_COMPACTING)
fi
dnl ========================================================
dnl = Use a smaller chunk size for GC chunks
dnl ========================================================

View File

@ -143,6 +143,7 @@ void
CheckHashTablesAfterMovingGC(JSRuntime *rt);
#endif
#ifdef JSGC_COMPACTING
struct MovingTracer : JSTracer {
explicit MovingTracer(JSRuntime *rt) : JSTracer(rt, Visit, TraceWeakMapKeysValues) {}
@ -151,6 +152,7 @@ struct MovingTracer : JSTracer {
return trc->callback == Visit;
}
};
#endif
} /* namespace gc */
} /* namespace js */

View File

@ -32,8 +32,11 @@ struct FinalizePhase;
class MarkingValidator;
struct AutoPrepareForTracing;
class AutoTraceSession;
#ifdef JSGC_COMPACTING
struct ArenasToUpdate;
struct MovingTracer;
#endif
class ChunkPool
{
@ -292,7 +295,11 @@ class GCRuntime
bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
#ifdef JSGC_COMPACTING
bool isHeapCompacting() { return isHeapMajorCollecting() && state() == COMPACT; }
#else
bool isHeapCompacting() { return false; }
#endif
bool triggerGC(JS::gcreason::Reason reason);
void maybeAllocTriggerZoneGC(Zone *zone, const AutoLockGC &lock);
@ -428,9 +435,11 @@ class GCRuntime
void disableGenerationalGC();
void enableGenerationalGC();
#ifdef JSGC_COMPACTING
void disableCompactingGC();
void enableCompactingGC();
bool isCompactingGCEnabled();
#endif
void setGrayRootsTracer(JSTraceDataOp traceOp, void *data);
bool addBlackRootsTracer(JSTraceDataOp traceOp, void *data);
@ -592,6 +601,7 @@ class GCRuntime
void assertBackgroundSweepingFinished();
bool shouldCompact();
bool compactPhase(bool lastGC);
#ifdef JSGC_COMPACTING
void sweepTypesAfterCompacting(Zone *zone);
void sweepZoneAfterCompacting(Zone *zone);
ArenaHeader *relocateArenas();
@ -603,6 +613,7 @@ class GCRuntime
#ifdef DEBUG
void protectRelocatedArenas(ArenaHeader *relocatedList);
void unprotectRelocatedArenas(ArenaHeader *relocatedList);
#endif
#endif
void finishCollection();
@ -799,11 +810,13 @@ class GCRuntime
*/
unsigned generationalDisabled;
#ifdef JSGC_COMPACTING
/*
* Some code cannot tolerate compacting GC so it can be disabled with this
* counter.
*/
unsigned compactingDisabled;
#endif
/*
* This is true if we are in the middle of a brain transplant (e.g.,
@ -904,7 +917,9 @@ class GCRuntime
size_t noGCOrAllocationCheck;
#ifdef JSGC_COMPACTING
ArenaHeader* relocatedArenasToRelease;
#endif
#endif

View File

@ -642,8 +642,10 @@ struct ArenaHeader
void unmarkAll();
#ifdef JSGC_COMPACTING
size_t countUsedCells();
size_t countFreeCells();
#endif
};
static_assert(ArenaZoneOffset == offsetof(ArenaHeader, zone),
"The hardcoded API zone offset must match the actual offset.");

View File

@ -161,14 +161,18 @@ CheckMarkedThing(JSTracer *trc, T **thingp)
T *thing = *thingp;
MOZ_ASSERT(*thingp);
#ifdef JSGC_COMPACTING
thing = MaybeForwarded(thing);
#endif
/* This function uses data that's not available in the nursery. */
if (IsInsideNursery(thing))
return;
#ifdef JSGC_COMPACTING
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc) && !Nursery::IsMinorCollectionTracer(trc),
!IsForwarded(*thingp));
#endif
/*
* Permanent atoms are not associated with this runtime, but will be ignored
@ -180,8 +184,13 @@ CheckMarkedThing(JSTracer *trc, T **thingp)
Zone *zone = thing->zoneFromAnyThread();
JSRuntime *rt = trc->runtime();
#ifdef JSGC_COMPACTING
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc), CurrentThreadCanAccessZone(zone));
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc), CurrentThreadCanAccessRuntime(rt));
#else
MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
#endif
MOZ_ASSERT(zone->runtimeFromAnyThread() == trc->runtime());
MOZ_ASSERT(trc->hasTracingDetails());
@ -428,8 +437,10 @@ IsMarkedFromAnyThread(T **thingp)
Zone *zone = (*thingp)->asTenured().zoneFromAnyThread();
if (!zone->isCollectingFromAnyThread() || zone->isGCFinished())
return true;
#ifdef JSGC_COMPACTING
if (zone->isGCCompacting() && IsForwarded(*thingp))
*thingp = Forwarded(*thingp);
#endif
return (*thingp)->asTenured().isMarked();
}
@ -470,10 +481,12 @@ IsAboutToBeFinalizedFromAnyThread(T **thingp)
return false;
return !thing->asTenured().isMarked();
}
#ifdef JSGC_COMPACTING
else if (zone->isGCCompacting() && IsForwarded(thing)) {
*thingp = Forwarded(thing);
return false;
}
#endif
return false;
}
@ -491,10 +504,11 @@ UpdateIfRelocated(JSRuntime *rt, T **thingp)
return *thingp;
}
#ifdef JSGC_COMPACTING
Zone *zone = (*thingp)->zone();
if (zone->isGCCompacting() && IsForwarded(*thingp))
*thingp = Forwarded(*thingp);
#endif
return *thingp;
}

View File

@ -31,6 +31,9 @@
/* Define to 1 if SpiderMonkey should use small chunks. */
#undef JS_GC_SMALL_CHUNK_SIZE
/* Define to 1 if SpiderMonkey should use Compacting GC. */
#undef JSGC_COMPACTING
/* Define to 1 if the <endian.h> header is present and
useable. See jscpucfg.h. */
#undef JS_HAVE_ENDIAN_H

View File

@ -66,6 +66,10 @@ checkSize(JS::HandleObject map, uint32_t expected)
}
END_TEST(testWeakMap_basicOperations)
// TODO: this test stores object pointers in a private slot which is not marked
// and so doesn't work with compacting GC.
#ifndef JSGC_COMPACTING
BEGIN_TEST(testWeakMap_keyDelegates)
{
JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_INCREMENTAL);
@ -249,3 +253,5 @@ checkSize(JS::HandleObject map, uint32_t expected)
return true;
}
END_TEST(testWeakMap_keyDelegates)
#endif

View File

@ -649,6 +649,8 @@ JSCompartment::sweepCrossCompartmentWrappers()
}
}
#ifdef JSGC_COMPACTING
void JSCompartment::fixupAfterMovingGC()
{
fixupGlobal();
@ -665,6 +667,8 @@ JSCompartment::fixupGlobal()
global_.set(MaybeForwarded(global));
}
#endif // JSGC_COMPACTING
void
JSCompartment::purge()
{

View File

@ -398,10 +398,12 @@ struct JSCompartment
void purge();
void clearTables();
#ifdef JSGC_COMPACTING
void fixupInitialShapeTable();
void fixupNewTypeObjectTable(js::types::NewTypeObjectTable &table);
void fixupAfterMovingGC();
void fixupGlobal();
#endif
bool hasObjectMetadataCallback() const { return objectMetadataCallback; }
void setObjectMetadataCallback(js::ObjectMetadataCallback callback);

View File

@ -1129,7 +1129,9 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
sliceBudget(SliceBudget::Unlimited),
incrementalAllowed(true),
generationalDisabled(0),
#ifdef JSGC_COMPACTING
compactingDisabled(0),
#endif
manipulatingDeadZones(false),
objectsMarkedInDeadZones(0),
poked(false),
@ -1149,7 +1151,9 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
#ifdef DEBUG
inUnsafeRegion(0),
noGCOrAllocationCheck(0),
#ifdef JSGC_COMPACTING
relocatedArenasToRelease(nullptr),
#endif
#endif
lock(nullptr),
lockOwner(nullptr),
@ -1943,9 +1947,15 @@ ArenaLists::allocateFromArenaInner(JS::Zone *zone, ArenaHeader *aheader, AllocKi
bool
GCRuntime::shouldCompact()
{
#ifdef JSGC_COMPACTING
return invocationKind == GC_SHRINK && isCompactingGCEnabled();
#else
return false;
#endif
}
#ifdef JSGC_COMPACTING
void
GCRuntime::disableCompactingGC()
{
@ -2704,10 +2714,12 @@ GCRuntime::releaseRelocatedArenasWithoutUnlocking(ArenaHeader *relocatedList, co
}
}
#endif // JSGC_COMPACTING
void
GCRuntime::releaseHeldRelocatedArenas()
{
#ifdef DEBUG
#if defined(JSGC_COMPACTING) && defined(DEBUG)
// In debug mode we don't release relocated arenas straight away. Instead
// we protect them and hold onto them until the next GC sweep phase to catch
// any pointers to them that didn't get forwarded.
@ -5479,6 +5491,9 @@ GCRuntime::endSweepPhase(bool lastGC)
bool
GCRuntime::compactPhase(bool lastGC)
{
#ifndef JSGC_COMPACTING
MOZ_CRASH();
#else
gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT);
if (isIncremental) {
@ -5543,6 +5558,8 @@ GCRuntime::compactPhase(bool lastGC)
}
}
#endif
#endif // JSGC_COMPACTING
return true;
}
@ -5702,6 +5719,7 @@ GCRuntime::resetIncrementalGC(const char *reason)
break;
}
#ifdef JSGC_COMPACTING
case COMPACT: {
{
gcstats::AutoPhase ap(stats, gcstats::PHASE_WAIT_BACKGROUND_THREAD);
@ -5717,6 +5735,7 @@ GCRuntime::resetIncrementalGC(const char *reason)
invocationKind = oldInvocationKind;
break;
}
#endif
default:
MOZ_CRASH("Invalid incremental GC state");
@ -6404,7 +6423,7 @@ GCRuntime::onOutOfMallocMemory(const AutoLockGC &lock)
{
// Release any relocated arenas we may be holding on to, without releasing
// the GC lock.
#ifdef DEBUG
#if defined(JSGC_COMPACTING) && defined(DEBUG)
unprotectRelocatedArenas(relocatedArenasToRelease);
releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
relocatedArenasToRelease = nullptr;
@ -7125,13 +7144,19 @@ JS::IsIncrementalGCEnabled(JSRuntime *rt)
JS_PUBLIC_API(void)
JS::DisableCompactingGC(JSRuntime *rt)
{
#ifdef JSGC_COMPACTING
rt->gc.disableCompactingGC();
#endif
}
JS_PUBLIC_API(bool)
JS::IsCompactingGCEnabled(JSRuntime *rt)
{
#ifdef JSGC_COMPACTING
return rt->gc.isCompactingGCEnabled();
#else
return false;
#endif
}
JS_PUBLIC_API(bool)

View File

@ -454,9 +454,11 @@ class ArenaList {
return *this;
}
#ifdef JSGC_COMPACTING
ArenaHeader *removeRemainingArenas(ArenaHeader **arenap, const AutoLockGC &lock);
ArenaHeader *pickArenasToRelocate(JSRuntime *runtime);
ArenaHeader *relocateArenas(ArenaHeader *toRelocate, ArenaHeader *relocated);
#endif
};
/*
@ -783,7 +785,9 @@ class ArenaLists
MOZ_ASSERT(freeLists[kind].isEmpty());
}
#ifdef JSGC_COMPACTING
ArenaHeader *relocateArenas(ArenaHeader *relocatedList);
#endif
void queueForegroundObjectsForSweep(FreeOp *fop);
void queueForegroundThingsForSweep(FreeOp *fop);
@ -1266,7 +1270,9 @@ inline void
CheckGCThingAfterMovingGC(T *t)
{
MOZ_ASSERT_IF(t, !IsInsideNursery(t));
#ifdef JSGC_COMPACTING
MOZ_ASSERT_IF(t, !IsForwarded(t));
#endif
}
inline void
@ -1423,11 +1429,16 @@ struct AutoDisableProxyCheck
struct AutoDisableCompactingGC
{
#ifdef JSGC_COMPACTING
explicit AutoDisableCompactingGC(JSRuntime *rt);
~AutoDisableCompactingGC();
private:
gc::GCRuntime &gc;
#else
explicit AutoDisableCompactingGC(JSRuntime *rt) {}
~AutoDisableCompactingGC() {}
#endif
};
void

View File

@ -4891,6 +4891,8 @@ JSCompartment::sweepNewTypeObjectTable(NewTypeObjectTable &table)
}
}
#ifdef JSGC_COMPACTING
void
JSCompartment::fixupNewTypeObjectTable(NewTypeObjectTable &table)
{
@ -4964,6 +4966,8 @@ TypeObject::fixupAfterMovingGC()
}
}
#endif // JSGC_COMPACTING
#ifdef JSGC_HASH_TABLE_CHECKS
void

View File

@ -925,7 +925,10 @@ class TypeNewScript
void trace(JSTracer *trc);
void sweep();
#ifdef JSGC_COMPACTING
void fixupAfterMovingGC();
#endif
void registerNewObject(PlainObject *res);
void unregisterNewObject(PlainObject *res);
@ -1238,7 +1241,9 @@ struct TypeObject : public gc::TenuredCell
flags_ |= generation << OBJECT_FLAG_GENERATION_SHIFT;
}
#ifdef JSGC_COMPACTING
void fixupAfterMovingGC();
#endif
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;

View File

@ -235,6 +235,8 @@ Shape::finalize(FreeOp *fop)
fop->delete_(kids.toHash());
}
#ifdef JSGC_COMPACTING
void
Shape::fixupDictionaryShapeAfterMovingGC()
{
@ -320,6 +322,8 @@ Shape::fixupAfterMovingGC()
fixupShapeTreeAfterMovingGC();
}
#endif // JSGC_COMPACTING
void
ShapeGetterSetterRef::mark(JSTracer *trc)
{

View File

@ -20,7 +20,8 @@
#include "js/TypeDecls.h"
#if (defined(JS_GC_ZEAL)) || defined(DEBUG)
#if (defined(JS_GC_ZEAL)) || \
(defined(JSGC_COMPACTING) && defined(DEBUG))
# define JSGC_HASH_TABLE_CHECKS
#endif

View File

@ -224,12 +224,14 @@ GetShapeAttributes(JSObject *obj, Shape *shape)
return shape->attributes();
}
#ifdef JSGC_COMPACTING
inline void
BaseShape::fixupAfterMovingGC()
{
if (hasTable())
table().fixupAfterMovingGC();
}
#endif
} /* namespace js */

View File

@ -256,6 +256,7 @@ ShapeTable::search(jsid id, bool adding)
MOZ_CRASH("Shape::search failed to find an expected entry.");
}
#ifdef JSGC_COMPACTING
void
ShapeTable::fixupAfterMovingGC()
{
@ -267,6 +268,7 @@ ShapeTable::fixupAfterMovingGC()
entry.setPreservingCollision(Forwarded(shape));
}
}
#endif
bool
ShapeTable::change(int log2Delta, ExclusiveContext *cx)
@ -1691,6 +1693,7 @@ JSCompartment::sweepInitialShapeTable()
}
}
#ifdef JSGC_COMPACTING
void
JSCompartment::fixupInitialShapeTable()
{
@ -1729,6 +1732,7 @@ JSCompartment::fixupInitialShapeTable()
}
}
}
#endif // JSGC_COMPACTING
void
AutoRooterGetterSetter::Inner::trace(JSTracer *trc)

View File

@ -227,8 +227,10 @@ class ShapeTable {
bool change(int log2Delta, ExclusiveContext *cx);
Entry &search(jsid id, bool adding);
#ifdef JSGC_COMPACTING
/* Update entries whose shapes have been moved */
void fixupAfterMovingGC();
#endif
private:
Entry &getEntry(uint32_t i) const {
@ -528,7 +530,9 @@ class BaseShape : public gc::TenuredCell
gc::MarkObject(trc, &metadata, "metadata");
}
#ifdef JSGC_COMPACTING
void fixupAfterMovingGC();
#endif
private:
static void staticAsserts() {
@ -1057,7 +1061,9 @@ class Shape : public gc::TenuredCell
inline Shape *search(ExclusiveContext *cx, jsid id);
inline Shape *searchLinear(jsid id);
#ifdef JSGC_COMPACTING
void fixupAfterMovingGC();
#endif
/* For JIT usage */
static inline size_t offsetOfBase() { return offsetof(Shape, base_); }
@ -1065,8 +1071,10 @@ class Shape : public gc::TenuredCell
static inline uint32_t fixedSlotsMask() { return FIXED_SLOTS_MASK; }
private:
#ifdef JSGC_COMPACTING
void fixupDictionaryShapeAfterMovingGC();
void fixupShapeTreeAfterMovingGC();
#endif
static void staticAsserts() {
JS_STATIC_ASSERT(offsetof(Shape, base_) == offsetof(js::shadow::Shape, base));