Bug 1107349 - Always build in enerational GC support; r=jonco

This commit is contained in:
Terrence Cole 2014-12-04 09:45:05 -08:00
parent 10cb7f0ea7
commit d0e0d193b8
85 changed files with 26 additions and 496 deletions

View File

@ -677,11 +677,7 @@ pref("javascript.options.mem.gc_low_frequency_heap_growth", 120);
pref("javascript.options.mem.high_water_mark", 6);
pref("javascript.options.mem.gc_allocation_threshold_mb", 1);
pref("javascript.options.mem.gc_decommit_threshold_mb", 1);
#ifdef JSGC_GENERATIONAL
pref("javascript.options.mem.gc_min_empty_chunk_count", 1);
#else
pref("javascript.options.mem.gc_min_empty_chunk_count", 0);
#endif
pref("javascript.options.mem.gc_max_empty_chunk_count", 2);
// Show/Hide scrollbars when active/inactive

View File

@ -65,5 +65,4 @@ MOZ_JSDOWNLOADS=1
MOZ_BUNDLED_FONTS=1
export JSGC_GENERATIONAL=1
export JS_GC_SMALL_CHUNK_SIZE=1

View File

@ -62,5 +62,3 @@ MOZ_PAY=1
MOZ_ACTIVITIES=1
MOZ_JSDOWNLOADS=1
MOZ_WEBM_ENCODER=1
# Enable generational GC on desktop.
export JSGC_GENERATIONAL=1

View File

@ -101,7 +101,6 @@ struct GCMethods<nsXBLMaybeCompiled<UncompiledT> >
return function.IsCompiled() && Base::needsPostBarrier(function.GetJSFunction());
}
#ifdef JSGC_GENERATIONAL
static void postBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp)
{
Base::postBarrier(&functionp->UnsafeGetJSFunction());
@ -111,7 +110,6 @@ struct GCMethods<nsXBLMaybeCompiled<UncompiledT> >
{
Base::relocate(&functionp->UnsafeGetJSFunction());
}
#endif
};
template <class UncompiledT>

View File

@ -349,7 +349,7 @@ WasIncrementalGC(JSRuntime *rt);
class JS_FRIEND_API(AutoDisableGenerationalGC)
{
js::gc::GCRuntime *gc;
#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
#ifdef JS_GC_ZEAL
bool restartVerifier;
#endif
@ -499,7 +499,6 @@ ExposeGCThingToActiveJS(JS::GCCellPtr thing)
MOZ_ASSERT(thing.kind() != JSTRACE_SHAPE);
JS::shadow::Runtime *rt = GetGCThingRuntime(thing.asCell());
#ifdef JSGC_GENERATIONAL
/*
* GC things residing in the nursery cannot be gray: they have no mark bits.
* All live objects in the nursery are moved to tenured at the beginning of
@ -507,7 +506,6 @@ ExposeGCThingToActiveJS(JS::GCCellPtr thing)
*/
if (IsInsideNursery(thing.asCell()))
return;
#endif
if (IsIncrementalBarrierNeededOnTenuredGCThing(rt, thing))
JS::IncrementalReferenceBarrier(thing);
else if (JS::GCThingIsMarkedGray(thing.asCell()))
@ -518,13 +516,11 @@ static MOZ_ALWAYS_INLINE void
MarkGCThingAsLive(JSRuntime *aRt, JS::GCCellPtr thing)
{
JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(aRt);
#ifdef JSGC_GENERATIONAL
/*
* Any object in the nursery will not be freed during any GC running at that time.
*/
if (IsInsideNursery(thing.asCell()))
return;
#endif
if (IsIncrementalBarrierNeededOnTenuredGCThing(rt, thing))
JS::IncrementalReferenceBarrier(thing);
}

View File

@ -341,7 +341,6 @@ GetGCThingArena(void *thing)
MOZ_ALWAYS_INLINE bool
IsInsideNursery(const js::gc::Cell *cell)
{
#ifdef JSGC_GENERATIONAL
if (!cell)
return false;
uintptr_t addr = uintptr_t(cell);
@ -350,9 +349,6 @@ IsInsideNursery(const js::gc::Cell *cell)
uint32_t location = *reinterpret_cast<uint32_t *>(addr);
MOZ_ASSERT(location != 0);
return location & ChunkLocationAnyNursery;
#else
return false;
#endif
}
} /* namespace gc */
@ -364,9 +360,7 @@ static MOZ_ALWAYS_INLINE Zone *
GetTenuredGCThingZone(void *thing)
{
MOZ_ASSERT(thing);
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(!js::gc::IsInsideNursery((js::gc::Cell *)thing));
#endif
return js::gc::GetGCThingArena(thing)->zone;
}
@ -377,7 +371,6 @@ static MOZ_ALWAYS_INLINE bool
GCThingIsMarkedGray(void *thing)
{
MOZ_ASSERT(thing);
#ifdef JSGC_GENERATIONAL
/*
* GC things residing in the nursery cannot be gray: they have no mark bits.
* All live objects in the nursery are moved to tenured at the beginning of
@ -385,7 +378,6 @@ GCThingIsMarkedGray(void *thing)
*/
if (js::gc::IsInsideNursery((js::gc::Cell *)thing))
return false;
#endif
uintptr_t *word, mask;
js::gc::GetGCThingMarkWordAndMask(thing, js::gc::GRAY, &word, &mask);
return *word & mask;
@ -400,9 +392,7 @@ static MOZ_ALWAYS_INLINE bool
IsIncrementalBarrierNeededOnTenuredGCThing(JS::shadow::Runtime *rt, const JS::GCCellPtr thing)
{
MOZ_ASSERT(thing);
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
#endif
if (!rt->needsIncrementalBarrier())
return false;
JS::Zone *zone = JS::GetTenuredGCThingZone(thing.asCell());

View File

@ -182,10 +182,8 @@ template <> struct GCMethods<jsid>
static jsid initial() { return JSID_VOID; }
static bool poisoned(jsid id) { return IsPoisonedId(id); }
static bool needsPostBarrier(jsid id) { return false; }
#ifdef JSGC_GENERATIONAL
static void postBarrier(jsid *idp) {}
static void relocate(jsid *idp) {}
#endif
};
#undef id

View File

@ -167,10 +167,8 @@ struct JS_PUBLIC_API(NullPtr)
static void * const constNullValue;
};
#ifdef JSGC_GENERATIONAL
JS_FRIEND_API(void) HeapCellPostBarrier(js::gc::Cell **cellp);
JS_FRIEND_API(void) HeapCellRelocate(js::gc::Cell **cellp);
#endif
#ifdef JS_DEBUG
/*
@ -284,16 +282,12 @@ class Heap : public js::HeapBase<T>
}
void post() {
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(js::GCMethods<T>::needsPostBarrier(ptr));
js::GCMethods<T>::postBarrier(&ptr);
#endif
}
void relocate() {
#ifdef JSGC_GENERATIONAL
js::GCMethods<T>::relocate(&ptr);
#endif
}
enum {
@ -656,10 +650,8 @@ struct GCMethods<T *>
static T *initial() { return nullptr; }
static bool poisoned(T *v) { return JS::IsPoisonedPtr(v); }
static bool needsPostBarrier(T *v) { return false; }
#ifdef JSGC_GENERATIONAL
static void postBarrier(T **vp) {}
static void relocate(T **vp) {}
#endif
};
template <>
@ -676,14 +668,12 @@ struct GCMethods<JSObject *>
static bool needsPostBarrier(JSObject *v) {
return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell *>(v));
}
#ifdef JSGC_GENERATIONAL
static void postBarrier(JSObject **vp) {
JS::HeapCellPostBarrier(reinterpret_cast<js::gc::Cell **>(vp));
}
static void relocate(JSObject **vp) {
JS::HeapCellRelocate(reinterpret_cast<js::gc::Cell **>(vp));
}
#endif
};
template <>
@ -694,14 +684,12 @@ struct GCMethods<JSFunction *>
static bool needsPostBarrier(JSFunction *v) {
return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell *>(v));
}
#ifdef JSGC_GENERATIONAL
static void postBarrier(JSFunction **vp) {
JS::HeapCellPostBarrier(reinterpret_cast<js::gc::Cell **>(vp));
}
static void relocate(JSFunction **vp) {
JS::HeapCellRelocate(reinterpret_cast<js::gc::Cell **>(vp));
}
#endif
};
#ifdef JS_DEBUG

View File

@ -1630,12 +1630,10 @@ SameType(const Value &lhs, const Value &rhs)
/************************************************************************/
#ifdef JSGC_GENERATIONAL
namespace JS {
JS_PUBLIC_API(void) HeapValuePostBarrier(Value *valuep);
JS_PUBLIC_API(void) HeapValueRelocate(Value *valuep);
}
#endif
namespace js {
@ -1659,10 +1657,8 @@ template <> struct GCMethods<JS::Value>
static bool needsPostBarrier(const JS::Value &v) {
return v.isObject() && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(&v.toObject()));
}
#ifdef JSGC_GENERATIONAL
static void postBarrier(JS::Value *v) { JS::HeapValuePostBarrier(v); }
static void relocate(JS::Value *v) { JS::HeapValueRelocate(v); }
#endif
};
template <class Outer> class MutableValueOperations;

View File

@ -1113,7 +1113,6 @@ MapObject::mark(JSTracer *trc, JSObject *obj)
}
}
#ifdef JSGC_GENERATIONAL
struct UnbarrieredHashPolicy {
typedef Value Lookup;
static HashNumber hash(const Lookup &v) { return v.asRawBits(); }
@ -1139,30 +1138,25 @@ class OrderedHashTableRef : public gc::BufferableRef
table->rekeyOneEntry(prior, key);
}
};
#endif
inline static void
WriteBarrierPost(JSRuntime *rt, ValueMap *map, const Value &key)
{
#ifdef JSGC_GENERATIONAL
typedef OrderedHashMap<Value, Value, UnbarrieredHashPolicy, RuntimeAllocPolicy> UnbarrieredMap;
if (MOZ_UNLIKELY(key.isObject() && IsInsideNursery(&key.toObject()))) {
rt->gc.storeBuffer.putGeneric(OrderedHashTableRef<UnbarrieredMap>(
reinterpret_cast<UnbarrieredMap *>(map), key));
}
#endif
}
inline static void
WriteBarrierPost(JSRuntime *rt, ValueSet *set, const Value &key)
{
#ifdef JSGC_GENERATIONAL
typedef OrderedHashSet<Value, UnbarrieredHashPolicy, RuntimeAllocPolicy> UnbarrieredSet;
if (MOZ_UNLIKELY(key.isObject() && IsInsideNursery(&key.toObject()))) {
rt->gc.storeBuffer.putGeneric(OrderedHashTableRef<UnbarrieredSet>(
reinterpret_cast<UnbarrieredSet *>(set), key));
}
#endif
}
bool

View File

@ -69,6 +69,9 @@ GetBuildConfiguration(JSContext *cx, unsigned argc, jsval *vp)
if (!JS_SetProperty(cx, info, "incremental-gc", TrueHandleValue))
return false;
if (!JS_SetProperty(cx, info, "generational-gc", TrueHandleValue))
return false;
RootedValue value(cx);
#ifdef DEBUG
value = BooleanValue(true);
@ -150,14 +153,6 @@ GetBuildConfiguration(JSContext *cx, unsigned argc, jsval *vp)
if (!JS_SetProperty(cx, info, "dtrace", value))
return false;
#ifdef JSGC_GENERATIONAL
value = BooleanValue(true);
#else
value = BooleanValue(false);
#endif
if (!JS_SetProperty(cx, info, "generational-gc", value))
return false;
#ifdef MOZ_VALGRIND
value = BooleanValue(true);
#else
@ -264,12 +259,10 @@ static bool
MinorGC(JSContext *cx, unsigned argc, jsval *vp)
{
CallArgs args = CallArgsFromVp(argc, vp);
#ifdef JSGC_GENERATIONAL
if (args.get(0) == BooleanValue(true))
cx->runtime()->gc.storeBuffer.setAboutToOverflow();
cx->minorGC(JS::gcreason::API);
#endif
args.rval().setUndefined();
return true;
}

View File

@ -2398,7 +2398,6 @@ LazyArrayBufferTable::addBuffer(JSContext *cx, InlineTransparentTypedObject *obj
return false;
}
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(!IsInsideNursery(buffer));
if (IsInsideNursery(obj)) {
// Strip the barriers from the type before inserting into the store
@ -2415,7 +2414,6 @@ LazyArrayBufferTable::addBuffer(JSContext *cx, InlineTransparentTypedObject *obj
// updated after the typed object moves.
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(buffer);
}
#endif
return true;
}

View File

@ -3132,23 +3132,6 @@ MOZ_ARG_WITH_STRING(wrap-malloc,
[ --with-wrap-malloc=DIR Location of malloc wrapper library],
WRAP_LDFLAGS="${WRAP_LDFLAGS} $withval")
dnl ========================================================
dnl = Use generational GC
dnl ========================================================
dnl Use generational GC by default in all shell builds. The top-level mozilla
dnl configure.in will configure SpiderMonkey with --disable-gcgenerational as
dnl needed on a per-platform basis.
JSGC_GENERATIONAL=1
MOZ_ARG_DISABLE_BOOL(gcgenerational,
[ --disable-gcgenerational Disable generational GC],
JSGC_GENERATIONAL= ,
JSGC_GENERATIONAL=1 )
if test -n "$JSGC_GENERATIONAL"; then
AC_DEFINE(JSGC_GENERATIONAL)
fi
JSGC_GENERATIONAL_CONFIGURED=$JSGC_GENERATIONAL
AC_SUBST(JSGC_GENERATIONAL_CONFIGURED)
dnl ========================================================
dnl = Use compacting GC
dnl ========================================================

View File

@ -10,9 +10,7 @@
#include "NamespaceImports.h"
#include "gc/Heap.h"
#ifdef JSGC_GENERATIONAL
# include "gc/StoreBuffer.h"
#endif
#include "gc/StoreBuffer.h"
#include "js/HashTable.h"
#include "js/Id.h"
#include "js/RootingAPI.h"
@ -349,36 +347,30 @@ struct InternalGCMethods<Value>
}
static void postBarrier(Value *vp) {
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
if (vp->isObject()) {
gc::StoreBuffer *sb = reinterpret_cast<gc::Cell *>(&vp->toObject())->storeBuffer();
if (sb)
sb->putValueFromAnyThread(vp);
}
#endif
}
static void postBarrierRelocate(Value *vp) {
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
if (vp->isObject()) {
gc::StoreBuffer *sb = reinterpret_cast<gc::Cell *>(&vp->toObject())->storeBuffer();
if (sb)
sb->putRelocatableValueFromAnyThread(vp);
}
#endif
}
static void postBarrierRemove(Value *vp) {
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(vp);
MOZ_ASSERT(vp->isMarkable());
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
JSRuntime *rt = static_cast<js::gc::Cell *>(vp->toGCThing())->runtimeFromAnyThread();
JS::shadow::Runtime *shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
shadowRuntime->gcStoreBufferPtr()->removeRelocatableValueFromAnyThread(vp);
#endif
}
static void readBarrier(const Value &v) { ValueReadBarrier(v); }
@ -672,17 +664,13 @@ class RelocatablePtr : public BarrieredBase<T>
protected:
void post() {
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
InternalGCMethods<T>::postBarrierRelocate(&this->value);
#endif
}
void relocate() {
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
InternalGCMethods<T>::postBarrierRemove(&this->value);
#endif
}
};
@ -916,13 +904,11 @@ class HeapSlot : public BarrieredBase<Value>
private:
void post(NativeObject *owner, Kind kind, uint32_t slot, const Value &target) {
MOZ_ASSERT(preconditionForWriteBarrierPost(owner, kind, slot, target));
#ifdef JSGC_GENERATIONAL
if (this->value.isObject()) {
gc::Cell *cell = reinterpret_cast<gc::Cell *>(&this->value.toObject());
if (cell->storeBuffer())
cell->storeBuffer()->putSlotFromAnyThread(owner, kind, slot, 1);
}
#endif
}
};

View File

@ -10,10 +10,6 @@
#ifdef JSGC_FJGENERATIONAL
#ifndef JSGC_GENERATIONAL
#error "JSGC_GENERATIONAL is required for the ForkJoinNursery"
#endif
#include "jsalloc.h"
#include "jspubtd.h"

View File

@ -10,13 +10,9 @@
#include "jsgc.h"
#include "gc/Heap.h"
#ifdef JSGC_GENERATIONAL
# include "gc/Nursery.h"
#endif
#include "gc/Nursery.h"
#include "gc/Statistics.h"
#ifdef JSGC_GENERATIONAL
# include "gc/StoreBuffer.h"
#endif
#include "gc/StoreBuffer.h"
#include "gc/Tracer.h"
/* Perform validation of incremental marking in debug builds but not on B2G. */
@ -374,9 +370,7 @@ class GCRuntime
allocTask.cancel(GCParallelTask::CancelAndWait);
}
#ifdef JSGC_GENERATIONAL
void requestMinorGC(JS::gcreason::Reason reason);
#endif
#ifdef DEBUG
@ -643,10 +637,8 @@ class GCRuntime
/* List of compartments and zones (protected by the GC lock). */
js::gc::ZoneVector zones;
#ifdef JSGC_GENERATIONAL
js::Nursery nursery;
js::gc::StoreBuffer storeBuffer;
#endif
js::gcstats::Statistics stats;
@ -708,10 +700,8 @@ class GCRuntime
volatile uintptr_t majorGCRequested;
JS::gcreason::Reason majorGCTriggerReason;
#ifdef JSGC_GENERATIONAL
bool minorGCRequested;
JS::gcreason::Reason minorGCTriggerReason;
#endif
/* Incremented at the start of every major GC. */
uint64_t majorGCNumber;

View File

@ -23,9 +23,7 @@
#include "jsinferinlines.h"
#include "jsobjinlines.h"
#ifdef JSGC_GENERATIONAL
# include "gc/Nursery-inl.h"
#endif
#include "gc/Nursery-inl.h"
#include "vm/String-inl.h"
#include "vm/Symbol-inl.h"
@ -439,10 +437,9 @@ IsMarked(T **thingp)
{
MOZ_ASSERT(thingp);
MOZ_ASSERT(*thingp);
#ifdef JSGC_GENERATIONAL
JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
#ifdef JSGC_FJGENERATIONAL
// Must precede the case for JSGC_GENERATIONAL because IsInsideNursery()
// Must precede the case for GGC because IsInsideNursery()
// will also be true for the ForkJoinNursery.
if (rt->isFJMinorCollecting()) {
ForkJoinContext *ctx = ForkJoinContext::current();
@ -458,7 +455,7 @@ IsMarked(T **thingp)
return nursery.getForwardedPointer(thingp);
}
}
#endif // JSGC_GENERATIONAL
Zone *zone = (*thingp)->asTenured().zone();
if (!zone->isCollecting() || zone->isGCFinished())
return true;
@ -492,7 +489,6 @@ IsAboutToBeFinalizedFromAnyThread(T **thingp)
if (ThingIsPermanentAtom(thing) && !TlsPerThreadData.get()->associatedWith(rt))
return false;
#ifdef JSGC_GENERATIONAL
#ifdef JSGC_FJGENERATIONAL
if (rt->isFJMinorCollecting()) {
ForkJoinContext *ctx = ForkJoinContext::current();
@ -511,7 +507,6 @@ IsAboutToBeFinalizedFromAnyThread(T **thingp)
return false;
}
}
#endif // JSGC_GENERATIONAL
Zone *zone = thing->asTenured().zoneFromAnyThread();
if (zone->isGCSweeping()) {
@ -537,8 +532,6 @@ UpdateIfRelocated(JSRuntime *rt, T **thingp)
if (!*thingp)
return nullptr;
#ifdef JSGC_GENERATIONAL
#ifdef JSGC_FJGENERATIONAL
if (rt->isFJMinorCollecting()) {
ForkJoinContext *ctx = ForkJoinContext::current();
@ -553,7 +546,6 @@ UpdateIfRelocated(JSRuntime *rt, T **thingp)
rt->gc.nursery.getForwardedPointer(thingp);
return *thingp;
}
#endif // JSGC_GENERATIONAL
#ifdef JSGC_COMPACTING
Zone *zone = (*thingp)->zone();

View File

@ -8,8 +8,6 @@
#ifndef gc_Nursery_inl_h
#define gc_Nursery_inl_h
#ifdef JSGC_GENERATIONAL
#include "gc/Nursery.h"
#include "gc/Heap.h"
@ -36,6 +34,4 @@ js::Nursery::forwardBufferPointer(JSTracer* trc, HeapSlot **pSlotElems)
trc->runtime()->gc.nursery.forwardBufferPointer(pSlotElems);
}
#endif /* JSGC_GENERATIONAL */
#endif /* gc_Nursery_inl_h */

View File

@ -5,8 +5,6 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifdef JSGC_GENERATIONAL
#include "gc/Nursery-inl.h"
#include "mozilla/IntegerPrintfMacros.h"
@ -978,5 +976,3 @@ js::Nursery::shrinkAllocableSpace()
numActiveChunks_ = Max(numActiveChunks_ - 1, 1);
updateDecommittedRegion();
}
#endif /* JSGC_GENERATIONAL */

View File

@ -8,8 +8,6 @@
#ifndef gc_Nursery_h
#define gc_Nursery_h
#ifdef JSGC_GENERATIONAL
#include "jsalloc.h"
#include "jspubtd.h"
@ -339,5 +337,4 @@ class Nursery
} /* namespace js */
#endif /* JSGC_GENERATIONAL */
#endif /* gc_Nursery_h */

View File

@ -4,8 +4,6 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifdef JSGC_GENERATIONAL
#include "gc/StoreBuffer.h"
#include "mozilla/Assertions.h"
@ -256,5 +254,3 @@ template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>;
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>;
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>;
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>;
#endif /* JSGC_GENERATIONAL */

View File

@ -7,8 +7,6 @@
#ifndef gc_StoreBuffer_h
#define gc_StoreBuffer_h
#ifdef JSGC_GENERATIONAL
#include "mozilla/Attributes.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/ReentrancyGuard.h"
@ -503,6 +501,4 @@ class StoreBuffer
} /* namespace gc */
} /* namespace js */
#endif /* JSGC_GENERATIONAL */
#endif /* gc_StoreBuffer_h */

View File

@ -403,7 +403,6 @@ struct VerifyPostTracer : JSTracer
void
gc::GCRuntime::startVerifyPostBarriers()
{
#ifdef JSGC_GENERATIONAL
if (verifyPostData ||
incrementalState != NO_INCREMENTAL)
{
@ -419,10 +418,8 @@ gc::GCRuntime::startVerifyPostBarriers()
return;
verifyPostData = trc;
#endif
}
#ifdef JSGC_GENERATIONAL
void
PostVerifierCollectStoreBufferEdges(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
{
@ -485,12 +482,10 @@ PostVerifierVisitEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
AssertStoreBufferContainsEdge(trc->edges, loc, dst);
}
#endif
bool
js::gc::GCRuntime::endVerifyPostBarriers()
{
#ifdef JSGC_GENERATIONAL
VerifyPostTracer *trc = (VerifyPostTracer *)verifyPostData;
if (!trc)
return false;
@ -523,9 +518,6 @@ oom:
js_delete(trc);
verifyPostData = nullptr;
return true;
#else
return false;
#endif
}
/*** Barrier Verifier Scheduling ***/
@ -579,7 +571,6 @@ gc::GCRuntime::maybeVerifyPreBarriers(bool always)
void
gc::GCRuntime::maybeVerifyPostBarriers(bool always)
{
#ifdef JSGC_GENERATIONAL
if (zealMode != ZealVerifierPostValue)
return;
@ -593,7 +584,6 @@ gc::GCRuntime::maybeVerifyPostBarriers(bool always)
endVerifyPostBarriers();
}
startVerifyPostBarriers();
#endif
}
void
@ -611,12 +601,10 @@ js::gc::GCRuntime::finishVerifier()
js_delete(trc);
verifyPreData = nullptr;
}
#ifdef JSGC_GENERATIONAL
if (VerifyPostTracer *trc = (VerifyPostTracer *)verifyPostData) {
js_delete(trc);
verifyPostData = nullptr;
}
#endif
}
#endif /* JS_GC_ZEAL */

View File

@ -103,10 +103,8 @@ BaselineCompiler::compile()
if (!emitEpilogue())
return Method_Error;
#ifdef JSGC_GENERATIONAL
if (!emitOutOfLinePostBarrierSlot())
return Method_Error;
#endif
if (masm.oom())
return Method_Error;
@ -448,7 +446,6 @@ BaselineCompiler::emitEpilogue()
return true;
}
#ifdef JSGC_GENERATIONAL
// On input:
// R2.scratchReg() contains object being written to.
// Called with the baseline stack synced, except for R0 which is preserved.
@ -485,7 +482,6 @@ BaselineCompiler::emitOutOfLinePostBarrierSlot()
masm.ret();
return true;
}
#endif // JSGC_GENERATIONAL
bool
BaselineCompiler::emitIC(ICStub *stub, ICEntry::Kind kind)
@ -2247,7 +2243,6 @@ BaselineCompiler::emit_JSOP_SETALIASEDVAR()
masm.storeValue(R0, address);
frame.push(R0);
#ifdef JSGC_GENERATIONAL
// Only R0 is live at this point.
// Scope coordinate object is already in R2.scratchReg().
Register temp = R1.scratchReg();
@ -2259,8 +2254,6 @@ BaselineCompiler::emit_JSOP_SETALIASEDVAR()
masm.call(&postBarrierSlot_); // Won't clobber R0
masm.bind(&skipBarrier);
#endif
return true;
}
@ -2587,7 +2580,6 @@ BaselineCompiler::emitFormalArgAccess(uint32_t arg, bool get)
masm.loadValue(frame.addressOfStackValue(frame.peek(-1)), R0);
masm.storeValue(R0, argAddr);
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(frame.numUnsyncedSlots() == 0);
Register temp = R1.scratchReg();
@ -2604,7 +2596,6 @@ BaselineCompiler::emitFormalArgAccess(uint32_t arg, bool get)
masm.call(&postBarrierSlot_);
masm.bind(&skipBarrier);
#endif
}
masm.bind(&done);
@ -3379,7 +3370,6 @@ BaselineCompiler::emit_JSOP_INITIALYIELD()
masm.patchableCallPreBarrier(scopeChainSlot, MIRType_Value);
masm.storeValue(JSVAL_TYPE_OBJECT, scopeObj, scopeChainSlot);
#ifdef JSGC_GENERATIONAL
Register temp = R1.scratchReg();
Label skipBarrier;
masm.branchPtrInNurseryRange(Assembler::Equal, genObj, temp, &skipBarrier);
@ -3389,7 +3379,6 @@ BaselineCompiler::emit_JSOP_INITIALYIELD()
masm.call(&postBarrierSlot_);
masm.pop(genObj);
masm.bind(&skipBarrier);
#endif
masm.tagValue(JSVAL_TYPE_OBJECT, genObj, JSReturnOperand);
return emitReturn();
@ -3438,7 +3427,6 @@ BaselineCompiler::emit_JSOP_YIELD()
masm.patchableCallPreBarrier(scopeChainSlot, MIRType_Value);
masm.storeValue(JSVAL_TYPE_OBJECT, scopeObj, scopeChainSlot);
#ifdef JSGC_GENERATIONAL
Register temp = R1.scratchReg();
Label skipBarrier;
masm.branchPtrInNurseryRange(Assembler::Equal, genObj, temp, &skipBarrier);
@ -3446,7 +3434,6 @@ BaselineCompiler::emit_JSOP_YIELD()
MOZ_ASSERT(genObj == R2.scratchReg());
masm.call(&postBarrierSlot_);
masm.bind(&skipBarrier);
#endif
} else {
masm.loadBaselineFramePtr(BaselineFrameReg, R1.scratchReg());

View File

@ -196,9 +196,7 @@ class BaselineCompiler : public BaselineCompilerSpecific
{
FixedList<Label> labels_;
NonAssertingLabel return_;
#ifdef JSGC_GENERATIONAL
NonAssertingLabel postBarrierSlot_;
#endif
// Native code offset right before the scope chain is initialized.
CodeOffsetLabel prologueOffset_;
@ -241,9 +239,7 @@ class BaselineCompiler : public BaselineCompilerSpecific
void emitInitializeLocals(size_t n, const Value &v);
bool emitPrologue();
bool emitEpilogue();
#ifdef JSGC_GENERATIONAL
bool emitOutOfLinePostBarrierSlot();
#endif
bool emitIC(ICStub *stub, ICEntry::Kind kind);
bool emitOpIC(ICStub *stub) {
return emitIC(stub, ICEntry::Kind_Op);

View File

@ -819,10 +819,8 @@ jit::RecompileOnStackBaselineScriptsForDebugMode(JSContext *cx,
if (entries.empty())
return true;
#ifdef JSGC_GENERATIONAL
// Scripts can entrain nursery things. See note in js::ReleaseAllJITCode.
cx->runtime()->gc.evictNursery();
#endif
// When the profiler is enabled, we need to have suppressed sampling,
// since the basline jit scripts are in a state of flux.

View File

@ -774,7 +774,6 @@ ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, GeneralRegisterSet reg
emitProfilingUpdate(masm, regs.takeAny(), regs.takeAny(), stubPcOffset);
}
#ifdef JSGC_GENERATIONAL
inline bool
ICStubCompiler::emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val,
Register scratch, GeneralRegisterSet saveRegs)
@ -799,7 +798,6 @@ ICStubCompiler::emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, Val
masm.bind(&skipBarrier);
return true;
}
#endif // JSGC_GENERATIONAL
//
// WarmUpCounter_Fallback
@ -3477,14 +3475,12 @@ IsCacheableGetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *sh
JSFunction *func = &shape->getterObject()->as<JSFunction>();
#ifdef JSGC_GENERATIONAL
// Information from get prop call ICs may be used directly from Ion code,
// and should not be nursery allocated.
if (IsInsideNursery(holder) || IsInsideNursery(func)) {
*isTemporarilyUnoptimizable = true;
return false;
}
#endif
if (func->isNative()) {
*isScripted = false;
@ -3601,14 +3597,12 @@ IsCacheableSetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *sh
JSFunction *func = &shape->setterObject()->as<JSFunction>();
#ifdef JSGC_GENERATIONAL
// Information from set prop call ICs may be used directly from Ion code,
// and should not be nursery allocated.
if (IsInsideNursery(holder) || IsInsideNursery(func)) {
*isTemporarilyUnoptimizable = true;
return false;
}
#endif
if (func->isNative()) {
*isScripted = false;
@ -5433,14 +5427,12 @@ ICSetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm)
EmitPreBarrier(masm, element, MIRType_Value);
masm.storeValue(tmpVal, element);
regs.add(key);
#ifdef JSGC_GENERATIONAL
if (cx->runtime()->gc.nursery.exists()) {
Register r = regs.takeAny();
GeneralRegisterSet saveRegs;
emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs);
regs.add(r);
}
#endif
EmitReturnFromIC(masm);
@ -5621,14 +5613,12 @@ ICSetElemDenseAddCompiler::generateStubCode(MacroAssembler &masm)
masm.loadValue(valueAddr, tmpVal);
masm.storeValue(tmpVal, element);
regs.add(key);
#ifdef JSGC_GENERATIONAL
if (cx->runtime()->gc.nursery.exists()) {
Register r = regs.takeAny();
GeneralRegisterSet saveRegs;
emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs);
regs.add(r);
}
#endif
EmitReturnFromIC(masm);
// Failure case - fail but first unstow R0 and R1
@ -8414,7 +8404,6 @@ ICSetProp_Native::Compiler::generateStubCode(MacroAssembler &masm)
masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne));
if (holderReg != objReg)
regs.add(holderReg);
#ifdef JSGC_GENERATIONAL
if (cx->runtime()->gc.nursery.exists()) {
Register scr = regs.takeAny();
GeneralRegisterSet saveRegs;
@ -8422,7 +8411,6 @@ ICSetProp_Native::Compiler::generateStubCode(MacroAssembler &masm)
emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs);
regs.add(scr);
}
#endif
// The RHS has to be in R0.
masm.moveValue(R1, R0);
@ -8560,14 +8548,12 @@ ICSetPropNativeAddCompiler::generateStubCode(MacroAssembler &masm)
if (holderReg != objReg)
regs.add(holderReg);
#ifdef JSGC_GENERATIONAL
if (cx->runtime()->gc.nursery.exists()) {
Register scr = regs.takeAny();
GeneralRegisterSet saveRegs;
saveRegs.add(R1);
emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs);
}
#endif
// The RHS has to be in R0.
masm.moveValue(R1, R0);

View File

@ -1152,10 +1152,8 @@ class ICStubCompiler
return regs;
}
#ifdef JSGC_GENERATIONAL
inline bool emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val,
Register scratch, GeneralRegisterSet saveRegs);
#endif
public:
virtual ICStub *getStub(ICStubSpace *space) = 0;

View File

@ -429,7 +429,6 @@ BaselineScript::Trace(JSTracer *trc, BaselineScript *script)
void
BaselineScript::Destroy(FreeOp *fop, BaselineScript *script)
{
#ifdef JSGC_GENERATIONAL
/*
* When the script contains pointers to nursery things, the store buffer
* will contain entries refering to the referenced things. Since we can
@ -438,7 +437,6 @@ BaselineScript::Destroy(FreeOp *fop, BaselineScript *script)
* outside of a GC that we at least emptied the nursery first.
*/
MOZ_ASSERT(fop->runtime()->gc.nursery.isEmpty());
#endif
script->unlinkDependentAsmJSModules(fop);

View File

@ -19,9 +19,7 @@
#include "asmjs/AsmJSModule.h"
#include "builtin/Eval.h"
#include "builtin/TypedObject.h"
#ifdef JSGC_GENERATIONAL
# include "gc/Nursery.h"
#endif
#include "gc/Nursery.h"
#include "irregexp/NativeRegExpMacroAssembler.h"
#include "jit/BaselineCompiler.h"
#include "jit/IonBuilder.h"
@ -2541,7 +2539,6 @@ CodeGenerator::visitMonitorTypes(LMonitorTypes *lir)
bailoutFrom(&miss, lir->snapshot());
}
#ifdef JSGC_GENERATIONAL
// Out-of-line path to update the store buffer.
class OutOfLineCallPostWriteBarrier : public OutOfLineCodeBase<CodeGenerator>
{
@ -2599,12 +2596,10 @@ CodeGenerator::visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier
masm.jump(ool->rejoin());
}
#endif
void
CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
{
#ifdef JSGC_GENERATIONAL
OutOfLineCallPostWriteBarrier *ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
addOutOfLineCode(ool, lir->mir());
@ -2622,13 +2617,11 @@ CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->value()), temp, ool->entry());
masm.bind(ool->rejoin());
#endif
}
void
CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV *lir)
{
#ifdef JSGC_GENERATIONAL
OutOfLineCallPostWriteBarrier *ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
addOutOfLineCode(ool, lir->mir());
@ -2647,7 +2640,6 @@ CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV *lir)
masm.branchValueIsNurseryObject(Assembler::Equal, value, temp, ool->entry());
masm.bind(ool->rejoin());
#endif
}
void
@ -6961,21 +6953,11 @@ CodeGenerator::visitIteratorStart(LIteratorStart *lir)
// Write barrier for stores to the iterator. We only need to take a write
// barrier if NativeIterator::obj is actually going to change.
{
#ifdef JSGC_GENERATIONAL
// Bug 867815: When using a nursery, we unconditionally take this out-
// of-line so that we do not have to post-barrier the store to
// NativeIter::obj. This just needs JIT support for the Cell* buffer.
// Bug 867815: Unconditionally take this out- of-line so that we do not
// have to post-barrier the store to NativeIter::obj. This just needs
// JIT support for the Cell* buffer.
Address objAddr(niTemp, offsetof(NativeIterator, obj));
masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
#else
Label noBarrier;
masm.branchTestNeedsIncrementalBarrier(Assembler::Zero, &noBarrier);
Address objAddr(niTemp, offsetof(NativeIterator, obj));
masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
masm.bind(&noBarrier);
#endif // !JSGC_GENERATIONAL
}
// Mark iterator as active.

View File

@ -176,13 +176,11 @@ CompileRuntime::maybeGetMathCache()
return runtime()->maybeGetMathCache();
}
#ifdef JSGC_GENERATIONAL
const Nursery &
CompileRuntime::gcNursery()
{
return runtime()->gc.nursery;
}
#endif
Zone *
CompileZone::zone()

View File

@ -80,9 +80,7 @@ class CompileRuntime
const MathCache *maybeGetMathCache();
#ifdef JSGC_GENERATIONAL
const Nursery &gcNursery();
#endif
};
class CompileZone

View File

@ -7065,10 +7065,8 @@ jit::TypeSetIncludes(types::TypeSet *types, MIRType input, types::TypeSet *input
bool
jit::NeedsPostBarrier(CompileInfo &info, MDefinition *value)
{
#ifdef JSGC_GENERATIONAL
if (!GetJitContext()->runtime->gcNursery().exists())
return false;
#endif
return info.executionMode() != ParallelExecution && value->mightBeType(MIRType_Object);
}
@ -8049,11 +8047,7 @@ IonBuilder::addTypedArrayLengthAndData(MDefinition *obj,
void *data = AnyTypedArrayViewData(tarr);
// Bug 979449 - Optimistically embed the elements and use TI to
// invalidate if we move them.
#ifdef JSGC_GENERATIONAL
bool isTenured = !tarr->runtimeFromMainThread()->gc.nursery.isInside(data);
#else
bool isTenured = true;
#endif
if (isTenured && tarr->hasSingletonType()) {
// The 'data' pointer of TypedArrayObject can change in rare circumstances
// (ArrayBufferObject::changeContents).
@ -8397,10 +8391,8 @@ IonBuilder::setElemTryTypedStatic(bool *emitted, MDefinition *object,
if (!tarrObj)
return true;
#ifdef JSGC_GENERATIONAL
if (tarrObj->runtimeFromMainThread()->gc.nursery.isInside(AnyTypedArrayViewData(tarrObj)))
return true;
#endif
types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarrObj);
if (tarrType->unknownProperties())

View File

@ -1104,7 +1104,6 @@ MarkBailoutFrame(JSTracer *trc, const JitFrameIterator &frame)
}
#ifdef JSGC_GENERATIONAL
template <typename T>
void
UpdateIonJSFrameForMinorGC(JSTracer *trc, const JitFrameIterator &frame)
@ -1154,7 +1153,6 @@ UpdateIonJSFrameForMinorGC(JSTracer *trc, const JitFrameIterator &frame)
trc->runtime()->gc.nursery.forwardBufferPointer(slots);
}
}
#endif
static void
MarkBaselineStubFrame(JSTracer *trc, const JitFrameIterator &frame)
@ -1474,7 +1472,6 @@ TopmostIonActivationCompartment(JSRuntime *rt)
return nullptr;
}
#ifdef JSGC_GENERATIONAL
template <typename T>
void UpdateJitActivationsForMinorGC(PerThreadData *ptd, JSTracer *trc)
{
@ -1499,8 +1496,6 @@ template
void UpdateJitActivationsForMinorGC<gc::ForkJoinNursery>(PerThreadData *ptd, JSTracer *trc);
#endif
#endif
void
GetPcScript(JSContext *cx, JSScript **scriptRes, jsbytecode **pcRes)
{

View File

@ -279,10 +279,8 @@ void MarkIonCompilerRoots(JSTracer *trc);
JSCompartment *
TopmostIonActivationCompartment(JSRuntime *rt);
#ifdef JSGC_GENERATIONAL
template<typename T>
void UpdateJitActivationsForMinorGC(PerThreadData *ptd, JSTracer *trc);
#endif
static inline uint32_t
MakeFrameDescriptor(uint32_t frameSize, FrameType type)

View File

@ -62,10 +62,8 @@ class Linker
return fail(cx);
code->copyFrom(masm);
masm.link(code);
#ifdef JSGC_GENERATIONAL
if (masm.embedsNurseryPointers())
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(code);
#endif
return code;
}

View File

@ -2493,7 +2493,6 @@ LIRGenerator::visitMonitorTypes(MMonitorTypes *ins)
void
LIRGenerator::visitPostWriteBarrier(MPostWriteBarrier *ins)
{
#ifdef JSGC_GENERATIONAL
switch (ins->value()->type()) {
case MIRType_Object:
case MIRType_ObjectOrNull: {
@ -2519,7 +2518,6 @@ LIRGenerator::visitPostWriteBarrier(MPostWriteBarrier *ins)
// types cannot hold nursery pointers.
break;
}
#endif // JSGC_GENERATIONAL
}
void

View File

@ -22,9 +22,7 @@
#include "vm/ForkJoin.h"
#include "vm/TraceLogging.h"
#ifdef JSGC_GENERATIONAL
# include "jsgcinlines.h"
#endif
#include "jsgcinlines.h"
#include "jsinferinlines.h"
#include "jsobjinlines.h"
#include "vm/Interpreter-inl.h"
@ -639,16 +637,12 @@ MacroAssembler::checkAllocatorState(Label *fail)
bool
MacroAssembler::shouldNurseryAllocate(gc::AllocKind allocKind, gc::InitialHeap initialHeap)
{
#ifdef JSGC_GENERATIONAL
// Note that Ion elides barriers on writes to objects known to be in the
// nursery, so any allocation that can be made into the nursery must be made
// into the nursery, even if the nursery is disabled. At runtime these will
// take the out-of-line path, which is required to insert a barrier for the
// initializing writes.
return IsNurseryAllocable(allocKind) && initialHeap != gc::TenuredHeap;
#else
return false;
#endif
}
// Inline version of Nursery::allocateObject.
@ -656,7 +650,6 @@ void
MacroAssembler::nurseryAllocate(Register result, Register slots, gc::AllocKind allocKind,
size_t nDynamicSlots, gc::InitialHeap initialHeap, Label *fail)
{
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(IsNurseryAllocable(allocKind));
MOZ_ASSERT(initialHeap != gc::TenuredHeap);
@ -681,7 +674,6 @@ MacroAssembler::nurseryAllocate(Register result, Register slots, gc::AllocKind a
if (nDynamicSlots)
computeEffectiveAddress(Address(result, thingSize), slots);
#endif // JSGC_GENERATIONAL
}
// Inlined version of FreeList::allocate.

View File

@ -543,13 +543,11 @@ NewCallObject(JSContext *cx, HandleShape shape, HandleTypeObject type, uint32_t
if (!obj)
return nullptr;
#ifdef JSGC_GENERATIONAL
// The JIT creates call objects in the nursery, so elides barriers for
// the initializing writes. The interpreter, however, may have allocated
// the call object tenured, so barrier as needed before re-entering.
if (!IsInsideNursery(obj))
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(obj);
#endif
return obj;
}
@ -561,14 +559,12 @@ NewSingletonCallObject(JSContext *cx, HandleShape shape, uint32_t lexicalBegin)
if (!obj)
return nullptr;
#ifdef JSGC_GENERATIONAL
// The JIT creates call objects in the nursery, so elides barriers for
// the initializing writes. The interpreter, however, may have allocated
// the call object tenured, so barrier as needed before re-entering.
MOZ_ASSERT(!IsInsideNursery(obj),
"singletons are created in the tenured heap");
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(obj);
#endif
return obj;
}
@ -704,7 +700,6 @@ FilterArgumentsOrEval(JSContext *cx, JSString *str)
!StringHasPattern(linear, eval, mozilla::ArrayLength(eval));
}
#ifdef JSGC_GENERATIONAL
void
PostWriteBarrier(JSRuntime *rt, JSObject *obj)
{
@ -721,7 +716,6 @@ PostGlobalWriteBarrier(JSRuntime *rt, JSObject *obj)
obj->compartment()->globalWriteBarriered = true;
}
}
#endif
uint32_t
GetIndexFromString(JSString *str)

View File

@ -726,10 +726,8 @@ void GetDynamicName(JSContext *cx, JSObject *scopeChain, JSString *str, Value *v
bool FilterArgumentsOrEval(JSContext *cx, JSString *str);
#ifdef JSGC_GENERATIONAL
void PostWriteBarrier(JSRuntime *rt, JSObject *obj);
void PostGlobalWriteBarrier(JSRuntime *rt, JSObject *obj);
#endif
uint32_t GetIndexFromString(JSString *str);

View File

@ -4687,8 +4687,6 @@ MacroAssemblerARMCompat::jumpWithPatch(RepatchLabel *label, Condition cond)
return ret;
}
#ifdef JSGC_GENERATIONAL
void
MacroAssemblerARMCompat::branchPtrInNurseryRange(Condition cond, Register ptr, Register temp,
Label *label)
@ -4995,5 +4993,3 @@ template void
js::jit::MacroAssemblerARMCompat::atomicFetchOp(int nbytes, bool signExtend, AtomicOp op,
const Register &value, const BaseIndex &mem,
Register temp, Register output);
#endif

View File

@ -1822,10 +1822,8 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
as_vmov(VFPRegister(dest).singleOverlay(), VFPRegister(src).singleOverlay());
}
#ifdef JSGC_GENERATIONAL
void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label *label);
#endif
void loadAsmJSActivation(Register dest) {
loadPtr(Address(GlobalReg, AsmJSActivationGlobalDataOffset - AsmJSGlobalRegBias), dest);

View File

@ -3641,8 +3641,6 @@ MacroAssemblerMIPSCompat::toggledCall(JitCode *target, bool enabled)
return offset;
}
#ifdef JSGC_GENERATIONAL
void
MacroAssemblerMIPSCompat::branchPtrInNurseryRange(Condition cond, Register ptr, Register temp,
Label *label)
@ -3671,5 +3669,3 @@ MacroAssemblerMIPSCompat::branchValueIsNurseryObject(Condition cond, ValueOperan
bind(&done);
}
#endif

View File

@ -1453,11 +1453,9 @@ public:
as_movs(dest, src);
}
#ifdef JSGC_GENERATIONAL
void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp,
Label *label);
#endif
void loadAsmJSActivation(Register dest) {
loadPtr(Address(GlobalReg, AsmJSActivationGlobalDataOffset - AsmJSGlobalRegBias), dest);

View File

@ -413,10 +413,8 @@ class MacroAssemblerNone : public Assembler
void handleFailureWithHandler(void *) { MOZ_CRASH(); }
void makeFrameDescriptor(Register, FrameType) { MOZ_CRASH(); }
#ifdef JSGC_GENERATIONAL
void branchPtrInNurseryRange(Condition, Register, Register, Label *) { MOZ_CRASH(); }
void branchValueIsNurseryObject(Condition, ValueOperand, Register, Label *) { MOZ_CRASH(); }
#endif
void buildFakeExitFrame(Register, uint32_t *) { MOZ_CRASH(); }
bool buildOOLFakeExitFrame(void *) { MOZ_CRASH(); }

View File

@ -923,7 +923,6 @@ class AssemblerShared
}
ImmGCPtr noteMaybeNurseryPtr(ImmMaybeNurseryPtr ptr) {
#ifdef JSGC_GENERATIONAL
if (ptr.value && gc::IsInsideNursery(ptr.value)) {
// FIXME: Ideally we'd assert this in all cases, but PJS needs to
// compile IC's from off-main-thread; it will not touch
@ -931,7 +930,6 @@ class AssemblerShared
MOZ_ASSERT(GetJitContext()->runtime->onMainThread());
embedsNurseryPointers_ = true;
}
#endif
return ImmGCPtr(ptr);
}

View File

@ -497,8 +497,6 @@ template void
MacroAssemblerX64::storeUnboxedValue(ConstantOrRegister value, MIRType valueType, const BaseIndex &dest,
MIRType slotType);
#ifdef JSGC_GENERATIONAL
void
MacroAssemblerX64::branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label)
{
@ -528,5 +526,3 @@ MacroAssemblerX64::branchValueIsNurseryObject(Condition cond, ValueOperand value
branchPtr(cond == Assembler::Equal ? Assembler::Below : Assembler::AboveOrEqual,
ScratchReg, Imm32(nursery.nurserySize()), label);
}
#endif

View File

@ -1433,10 +1433,8 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
storeValue(JSVAL_TYPE_INT32, ScratchReg, Dest);
}
#ifdef JSGC_GENERATIONAL
void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label *label);
#endif
};
typedef MacroAssemblerX64 MacroAssemblerSpecific;

View File

@ -493,8 +493,6 @@ template void
MacroAssemblerX86::storeUnboxedValue(ConstantOrRegister value, MIRType valueType, const BaseIndex &dest,
MIRType slotType);
#ifdef JSGC_GENERATIONAL
void
MacroAssemblerX86::branchPtrInNurseryRange(Condition cond, Register ptr, Register temp,
Label *label)
@ -523,5 +521,3 @@ MacroAssemblerX86::branchValueIsNurseryObject(Condition cond, ValueOperand value
bind(&done);
}
#endif

View File

@ -1168,10 +1168,8 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
call(target);
}
#ifdef JSGC_GENERATIONAL
void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label *label);
#endif
};
typedef MacroAssemblerX86 MacroAssemblerSpecific;

View File

@ -31,9 +31,6 @@
/* Define to 1 if SpiderMonkey should use small chunks. */
#undef JS_GC_SMALL_CHUNK_SIZE
/* Define to 1 if SpiderMonkey should use Generational GC. */
#undef JSGC_GENERATIONAL
/* Define to 1 if SpiderMonkey should use Compacting GC. */
#undef JSGC_COMPACTING

View File

@ -5,8 +5,6 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifdef JSGC_GENERATIONAL
#include "js/RootingAPI.h"
#include "jsapi-tests/tests.h"
@ -85,5 +83,3 @@ JSFunction *NurseryFunction()
}
END_TEST(testGCHeapPostBarriers)
#endif

View File

@ -5,8 +5,6 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifdef JSGC_GENERATIONAL
#include "gc/Barrier.h"
#include "jsapi-tests/tests.h"
@ -121,5 +119,3 @@ JSObject *NurseryObject()
return JS_NewObject(cx, nullptr, JS::NullPtr(), JS::NullPtr());
}
END_TEST(testGCStoreBufferRemoval)
#endif

View File

@ -7,8 +7,6 @@
#include "jsapi-tests/tests.h"
#ifdef JSGC_GENERATIONAL
BEGIN_TEST(testIsInsideNursery)
{
/* Non-GC things are never inside the nursery. */
@ -30,5 +28,3 @@ BEGIN_TEST(testIsInsideNursery)
return true;
}
END_TEST(testIsInsideNursery)
#endif

View File

@ -535,11 +535,9 @@ struct JSContext : public js::ExclusiveContext,
MaybeAllowCrossCompartment = DONT_ALLOW_CROSS_COMPARTMENT) const;
// The generational GC nursery may only be used on the main thread.
#ifdef JSGC_GENERATIONAL
inline js::Nursery &nursery() {
return runtime_->gc.nursery;
}
#endif
void minorGC(JS::gcreason::Reason reason) {
runtime_->gc.minorGC(this, reason);

View File

@ -182,8 +182,6 @@ JSCompartment::ensureJitCompartmentExists(JSContext *cx)
return true;
}
#ifdef JSGC_GENERATIONAL
/*
* This class is used to add a post barrier on the crossCompartmentWrappers map,
* as the key is calculated based on objects which may be moved by generational
@ -238,8 +236,6 @@ JSCompartment::checkWrapperMapAfterMovingGC()
}
#endif
#endif
bool
JSCompartment::putWrapper(JSContext *cx, const CrossCompartmentKey &wrapped, const js::Value &wrapper)
{
@ -251,7 +247,6 @@ JSCompartment::putWrapper(JSContext *cx, const CrossCompartmentKey &wrapped, con
MOZ_ASSERT_IF(wrapped.kind != CrossCompartmentKey::StringWrapper, wrapper.isObject());
bool success = crossCompartmentWrappers.put(wrapped, ReadBarriered<Value>(wrapper));
#ifdef JSGC_GENERATIONAL
/* There's no point allocating wrappers in the nursery since we will tenure them anyway. */
MOZ_ASSERT(!IsInsideNursery(static_cast<gc::Cell *>(wrapper.toGCThing())));
@ -259,7 +254,6 @@ JSCompartment::putWrapper(JSContext *cx, const CrossCompartmentKey &wrapped, con
WrapperMapRef ref(&crossCompartmentWrappers, wrapped);
cx->runtime()->gc.storeBuffer.putGeneric(ref);
}
#endif
return success;
}

View File

@ -1020,10 +1020,8 @@ DumpHeapVisitRoot(JSTracer *trc, void **thingp, JSGCTraceKind kind)
void
js::DumpHeapComplete(JSRuntime *rt, FILE *fp, js::DumpHeapNurseryBehaviour nurseryBehaviour)
{
#ifdef JSGC_GENERATIONAL
if (nurseryBehaviour == js::CollectNurseryBeforeDump)
rt->gc.evictNursery(JS::gcreason::API);
#endif
DumpHeapTracer dtrc(fp, rt, DumpHeapVisitRoot, TraceWeakMapKeysValues);
TraceRuntime(&dtrc);
@ -1145,11 +1143,11 @@ JS::IsCompactingGCEnabled(JSRuntime *rt)
JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime *rt)
: gc(&rt->gc)
#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
#ifdef JS_GC_ZEAL
, restartVerifier(false)
#endif
{
#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
#ifdef JS_GC_ZEAL
restartVerifier = gc->endVerifyPostBarriers();
#endif
gc->disableGenerationalGC();
@ -1158,7 +1156,7 @@ JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime *rt)
JS::AutoDisableGenerationalGC::~AutoDisableGenerationalGC()
{
gc->enableGenerationalGC();
#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
#ifdef JS_GC_ZEAL
if (restartVerifier) {
MOZ_ASSERT(gc->isGenerationalGCEnabled());
gc->startVerifyPostBarriers();
@ -1447,7 +1445,6 @@ js::HasObjectMovedOp(JSObject *obj) {
}
#endif
#ifdef JSGC_GENERATIONAL
JS_FRIEND_API(void)
JS_StoreObjectPostBarrierCallback(JSContext* cx,
void (*callback)(JSTracer *trc, JSObject *key, void *data),
@ -1467,7 +1464,6 @@ JS_StoreStringPostBarrierCallback(JSContext* cx,
if (IsInsideNursery(key))
rt->gc.storeBuffer.putCallback(callback, key, data);
}
#endif /* JSGC_GENERATIONAL */
JS_FRIEND_API(bool)
js::ForwardToNative(JSContext *cx, JSNative native, const CallArgs &args)

View File

@ -2684,7 +2684,6 @@ js_DefineOwnProperty(JSContext *cx, JSObject *objArg, jsid idArg,
extern JS_FRIEND_API(bool)
js_ReportIsNotFunction(JSContext *cx, JS::HandleValue v);
#ifdef JSGC_GENERATIONAL
extern JS_FRIEND_API(void)
JS_StoreObjectPostBarrierCallback(JSContext* cx,
void (*callback)(JSTracer *trc, JSObject *key, void *data),
@ -2694,16 +2693,5 @@ extern JS_FRIEND_API(void)
JS_StoreStringPostBarrierCallback(JSContext* cx,
void (*callback)(JSTracer *trc, JSString *key, void *data),
JSString *key, void *data);
#else
inline void
JS_StoreObjectPostBarrierCallback(JSContext* cx,
void (*callback)(JSTracer *trc, JSObject *key, void *data),
JSObject *key, void *data) {}
inline void
JS_StoreStringPostBarrierCallback(JSContext* cx,
void (*callback)(JSTracer *trc, JSString *key, void *data),
JSString *key, void *data) {}
#endif /* JSGC_GENERATIONAL */
#endif /* jsfriendapi_h */

View File

@ -1106,10 +1106,8 @@ GCRuntime::releaseArena(ArenaHeader *aheader, const AutoLockGC &lock)
GCRuntime::GCRuntime(JSRuntime *rt) :
rt(rt),
systemZone(nullptr),
#ifdef JSGC_GENERATIONAL
nursery(rt),
storeBuffer(rt, nursery),
#endif
stats(rt),
marker(rt),
usage(nullptr),
@ -1126,10 +1124,8 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
grayBitsValid(false),
majorGCRequested(0),
majorGCTriggerReason(JS::gcreason::NO_REASON),
#ifdef JSGC_GENERATIONAL
minorGCRequested(false),
minorGCTriggerReason(JS::gcreason::NO_REASON),
#endif
majorGCNumber(0),
jitReleaseNumber(0),
number(0),
@ -1219,7 +1215,6 @@ GCRuntime::setZeal(uint8_t zeal, uint32_t frequency)
if (verifyPostData)
VerifyBarriers(rt, PostBarrierVerifier);
#ifdef JSGC_GENERATIONAL
if (zealMode == ZealGenerationalGCValue) {
evictNursery(JS::gcreason::DEBUG_GC);
nursery.leaveZealMode();
@ -1227,7 +1222,6 @@ GCRuntime::setZeal(uint8_t zeal, uint32_t frequency)
if (zeal == ZealGenerationalGCValue)
nursery.enterZealMode();
#endif
bool schedule = zeal >= js::gc::ZealAllocValue;
zealMode = zeal;
@ -1299,7 +1293,6 @@ GCRuntime::init(uint32_t maxbytes, uint32_t maxNurseryBytes)
jitReleaseNumber = majorGCNumber + JIT_SCRIPT_RELEASE_TYPES_PERIOD;
#ifdef JSGC_GENERATIONAL
if (!nursery.init(maxNurseryBytes))
return false;
@ -1311,7 +1304,6 @@ GCRuntime::init(uint32_t maxbytes, uint32_t maxNurseryBytes)
if (!storeBuffer.enable())
return false;
}
#endif
#ifdef JS_GC_ZEAL
const char *zealSpec = getenv("JS_GC_ZEAL");
@ -5656,9 +5648,7 @@ AutoTraceSession::AutoTraceSession(JSRuntime *rt, js::HeapState heapState)
MOZ_ASSERT(rt->gc.isAllocAllowed());
MOZ_ASSERT(rt->gc.heapState == Idle);
MOZ_ASSERT(heapState != Idle);
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT_IF(heapState == MajorCollecting, rt->gc.nursery.isEmpty());
#endif
// Threads with an exclusive context can hit refillFreeList while holding
// the exclusive access lock. To avoid deadlocking when we try to acquire
@ -6063,7 +6053,6 @@ GCRuntime::budgetIncrementalGC(SliceBudget &budget)
namespace {
#ifdef JSGC_GENERATIONAL
class AutoDisableStoreBuffer
{
StoreBuffer &sb;
@ -6079,12 +6068,6 @@ class AutoDisableStoreBuffer
sb.enable();
}
};
#else
struct AutoDisableStoreBuffer
{
AutoDisableStoreBuffer(GCRuntime *gc) {}
};
#endif
} /* anonymous namespace */
@ -6473,13 +6456,11 @@ GCRuntime::onOutOfMallocMemory(const AutoLockGC &lock)
void
GCRuntime::minorGC(JS::gcreason::Reason reason)
{
#ifdef JSGC_GENERATIONAL
minorGCRequested = false;
TraceLogger *logger = TraceLoggerForMainThread(rt);
AutoTraceLog logMinorGC(logger, TraceLogger::MinorGC);
nursery.collect(rt, reason, nullptr);
MOZ_ASSERT_IF(!rt->mainThread.suppressGC, nursery.isEmpty());
#endif
}
void
@ -6487,7 +6468,6 @@ GCRuntime::minorGC(JSContext *cx, JS::gcreason::Reason reason)
{
// Alternate to the runtime-taking form above which allows marking type
// objects as needing pretenuring.
#ifdef JSGC_GENERATIONAL
minorGCRequested = false;
TraceLogger *logger = TraceLoggerForMainThread(rt);
AutoTraceLog logMinorGC(logger, TraceLogger::MinorGC);
@ -6498,19 +6478,16 @@ GCRuntime::minorGC(JSContext *cx, JS::gcreason::Reason reason)
pretenureTypes[i]->setShouldPreTenure(cx);
}
MOZ_ASSERT_IF(!rt->mainThread.suppressGC, nursery.isEmpty());
#endif
}
void
GCRuntime::disableGenerationalGC()
{
#ifdef JSGC_GENERATIONAL
if (isGenerationalGCEnabled()) {
minorGC(JS::gcreason::API);
nursery.disable();
storeBuffer.disable();
}
#endif
++rt->gc.generationalDisabled;
}
@ -6519,12 +6496,10 @@ GCRuntime::enableGenerationalGC()
{
MOZ_ASSERT(generationalDisabled > 0);
--generationalDisabled;
#ifdef JSGC_GENERATIONAL
if (generationalDisabled == 0) {
nursery.enable();
storeBuffer.enable();
}
#endif
}
bool
@ -6532,14 +6507,12 @@ GCRuntime::gcIfNeeded(JSContext *cx /* = nullptr */)
{
// This method returns whether a major GC was performed.
#ifdef JSGC_GENERATIONAL
if (minorGCRequested) {
if (cx)
minorGC(cx, minorGCTriggerReason);
else
minorGC(minorGCTriggerReason);
}
#endif
if (majorGCRequested) {
gcSlice(GC_NORMAL, rt->gc.majorGCTriggerReason);
@ -6776,13 +6749,11 @@ void PreventGCDuringInteractiveDebug()
void
js::ReleaseAllJITCode(FreeOp *fop)
{
#ifdef JSGC_GENERATIONAL
/*
* Scripts can entrain nursery things, inserting references to the script
* into the store buffer. Clear the store buffer before discarding scripts.
*/
fop->runtime()->gc.evictNursery();
#endif
for (ZonesIter zone(fop->runtime(), SkipAtoms); !zone.done(); zone.next()) {
if (!zone->jitZone())

View File

@ -75,7 +75,6 @@ template <> struct MapTypeToFinalizeKind<JSExternalString> { static const Alloc
template <> struct MapTypeToFinalizeKind<JS::Symbol> { static const AllocKind kind = FINALIZE_SYMBOL; };
template <> struct MapTypeToFinalizeKind<jit::JitCode> { static const AllocKind kind = FINALIZE_JITCODE; };
#if defined(JSGC_GENERATIONAL) || defined(DEBUG)
static inline bool
IsNurseryAllocable(AllocKind kind)
{
@ -108,7 +107,6 @@ IsNurseryAllocable(AllocKind kind)
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
return map[kind];
}
#endif
#if defined(JSGC_FJGENERATIONAL)
// This is separate from IsNurseryAllocable() so that the latter can evolve
@ -1220,8 +1218,6 @@ namespace gc {
void
MergeCompartments(JSCompartment *source, JSCompartment *target);
#if defined(JSGC_GENERATIONAL) || defined(JSGC_COMPACTING)
/*
* This structure overlays a Cell in the Nursery and re-purposes its memory
* for managing the Nursery collection process.
@ -1329,14 +1325,6 @@ MaybeForwarded(T t)
return IsForwarded(t) ? Forwarded(t) : t;
}
#else
template <typename T> inline bool IsForwarded(T t) { return false; }
template <typename T> inline T Forwarded(T t) { return t; }
template <typename T> inline T MaybeForwarded(T t) { return t; }
#endif // JSGC_GENERATIONAL || JSGC_COMPACTING
#ifdef JSGC_HASH_TABLE_CHECKS
template <typename T>

View File

@ -67,13 +67,11 @@ GetGCObjectKind(const Class *clasp)
return GetGCObjectKind(nslots);
}
#ifdef JSGC_GENERATIONAL
inline bool
ShouldNurseryAllocate(const Nursery &nursery, AllocKind kind, InitialHeap heap)
{
return nursery.isEnabled() && IsNurseryAllocable(kind) && heap != TenuredHeap;
}
#endif
#ifdef JSGC_FJGENERATIONAL
inline bool
@ -88,10 +86,8 @@ GetGCThingTraceKind(const void *thing)
{
MOZ_ASSERT(thing);
const Cell *cell = static_cast<const Cell *>(thing);
#ifdef JSGC_GENERATIONAL
if (IsInsideNursery(cell))
return JSTRACE_OBJECT;
#endif
return MapAllocToTraceKind(cell->asTenured().getAllocKind());
}
@ -317,9 +313,7 @@ class ZoneCellIterUnderGC : public ZoneCellIterImpl
{
public:
ZoneCellIterUnderGC(JS::Zone *zone, AllocKind kind) {
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(zone->runtimeFromAnyThread()->gc.nursery.isEmpty());
#endif
MOZ_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy());
init(zone, kind);
}
@ -348,11 +342,9 @@ class ZoneCellIter : public ZoneCellIterImpl
zone->runtimeFromMainThread()->gc.waitBackgroundSweepEnd();
}
#ifdef JSGC_GENERATIONAL
/* Evict the nursery before iterating so we can see all things. */
JSRuntime *rt = zone->runtimeFromMainThread();
rt->gc.evictNursery();
#endif
if (lists->isSynchronizedFreeList(kind)) {
lists = nullptr;
@ -435,7 +427,6 @@ class GCZoneGroupIter {
typedef CompartmentsIterT<GCZoneGroupIter> GCCompartmentGroupIter;
#ifdef JSGC_GENERATIONAL
/*
* Attempt to allocate a new GC thing out of the nursery. If there is not enough
* room in the nursery or there is an OOM, this method will return nullptr.
@ -462,7 +453,6 @@ TryNewNurseryObject(JSContext *cx, size_t thingSize, size_t nDynamicSlots)
}
return nullptr;
}
#endif /* JSGC_GENERATIONAL */
#ifdef JSGC_FJGENERATIONAL
template <AllowGC allowGC>
@ -573,14 +563,12 @@ AllocateObject(ThreadSafeContext *cx, AllocKind kind, size_t nDynamicSlots, Init
if (!CheckAllocatorState<allowGC>(cx, kind))
return nullptr;
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext() &&
ShouldNurseryAllocate(cx->asJSContext()->nursery(), kind, heap)) {
JSObject *obj = TryNewNurseryObject<allowGC>(cx->asJSContext(), thingSize, nDynamicSlots);
if (obj)
return obj;
}
#endif
#ifdef JSGC_FJGENERATIONAL
if (cx->isForkJoinContext() &&
ShouldFJNurseryAllocate(cx->asForkJoinContext()->nursery(), kind, heap))
@ -654,7 +642,6 @@ template <AllowGC allowGC>
inline JSObject *
AllocateObjectForCacheHit(JSContext *cx, AllocKind kind, InitialHeap heap)
{
#ifdef JSGC_GENERATIONAL
if (ShouldNurseryAllocate(cx->nursery(), kind, heap)) {
size_t thingSize = Arena::thingSize(kind);
@ -669,7 +656,6 @@ AllocateObjectForCacheHit(JSContext *cx, AllocKind kind, InitialHeap heap)
}
return obj;
}
#endif
JSObject *obj = AllocateObject<NoGC>(cx, kind, 0, heap);
if (!obj && allowGC) {
@ -683,7 +669,6 @@ AllocateObjectForCacheHit(JSContext *cx, AllocKind kind, InitialHeap heap)
inline bool
IsInsideGGCNursery(const js::gc::Cell *cell)
{
#ifdef JSGC_GENERATIONAL
if (!cell)
return false;
uintptr_t addr = uintptr_t(cell);
@ -692,9 +677,6 @@ IsInsideGGCNursery(const js::gc::Cell *cell)
uint32_t location = *reinterpret_cast<uint32_t *>(addr);
MOZ_ASSERT(location != 0);
return location & js::gc::ChunkLocationBitNursery;
#else
return false;
#endif
}
} /* namespace gc */

View File

@ -25,18 +25,14 @@ struct DependentAddPtr
template <class Lookup>
DependentAddPtr(const ExclusiveContext *cx, const T &table, const Lookup &lookup)
: addPtr(table.lookupForAdd(lookup))
#ifdef JSGC_GENERATIONAL
, originalGcNumber(cx->zone()->gcNumber())
#endif
{}
{}
template <class KeyInput, class ValueInput>
bool add(const ExclusiveContext *cx, T &table, const KeyInput &key, const ValueInput &value) {
#ifdef JSGC_GENERATIONAL
bool gcHappened = originalGcNumber != cx->zone()->gcNumber();
if (gcHappened)
addPtr = table.lookupForAdd(key);
#endif
return table.relookupOrAdd(addPtr, key, value);
}
@ -50,9 +46,7 @@ struct DependentAddPtr
private:
AddPtr addPtr ;
#ifdef JSGC_GENERATIONAL
const uint64_t originalGcNumber;
#endif
DependentAddPtr() MOZ_DELETE;
DependentAddPtr(const DependentAddPtr&) MOZ_DELETE;

View File

@ -4373,7 +4373,6 @@ JSObject::setNewTypeUnknown(JSContext *cx, const Class *clasp, HandleObject obj)
return true;
}
#ifdef JSGC_GENERATIONAL
/*
* This class is used to add a post barrier on the newTypeObjects set, as the
* key is calculated from a prototype object which may be moved by generational
@ -4425,7 +4424,6 @@ TypeObjectTablePostBarrier(ExclusiveContext *cx, TypeObjectWithNewScriptSet *tab
sb.putGeneric(NewTypeObjectsSetRef(table, clasp, proto.toObject(), fun));
}
}
#endif
TypeObject *
ExclusiveContext::getNewType(const Class *clasp, TaggedProto proto, JSFunction *fun)
@ -4481,9 +4479,7 @@ ExclusiveContext::getNewType(const Class *clasp, TaggedProto proto, JSFunction *
if (!newTypeObjects.add(p, TypeObjectWithNewScriptEntry(type, fun)))
return nullptr;
#ifdef JSGC_GENERATIONAL
TypeObjectTablePostBarrier(this, &newTypeObjects, clasp, proto, fun);
#endif
if (proto.isObject()) {
RootedObject obj(this, proto.toObject());
@ -4549,9 +4545,7 @@ ExclusiveContext::getSingletonType(const Class *clasp, TaggedProto proto)
if (!table.add(p, TypeObjectWithNewScriptEntry(type, nullptr)))
return nullptr;
#ifdef JSGC_GENERATIONAL
TypeObjectTablePostBarrier(this, &table, clasp, proto, nullptr);
#endif
type->initSingleton((JSObject *) TypeObject::LAZY_SINGLETON);
MOZ_ASSERT(type->singleton(), "created type must be a proper singleton");

View File

@ -1333,7 +1333,6 @@ struct TypeObjectWithNewScriptEntry
: clasp(clasp), hashProto(proto), matchProto(proto), newFunction(newFunction)
{}
#ifdef JSGC_GENERATIONAL
/*
* For use by generational post barriers only. Look up an entry whose
* proto has been moved, but was hashed with the original value.
@ -1341,7 +1340,6 @@ struct TypeObjectWithNewScriptEntry
Lookup(const Class *clasp, TaggedProto hashProto, TaggedProto matchProto, JSFunction *newFunction)
: clasp(clasp), hashProto(hashProto), matchProto(matchProto), newFunction(newFunction)
{}
#endif
};

View File

@ -2294,11 +2294,9 @@ JSObject::swap(JSContext *cx, HandleObject a, HandleObject b)
* Neither object may be in the nursery, but ensure we update any embedded
* nursery pointers in either object.
*/
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(!IsInsideNursery(a) && !IsInsideNursery(b));
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(a);
cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(b);
#endif
unsigned r = NotifyGCPreSwap(a, b);

View File

@ -828,14 +828,12 @@ JSObject::writeBarrierPre(JSObject *obj)
JSObject::writeBarrierPost(JSObject *obj, void *cellp)
{
MOZ_ASSERT(cellp);
#ifdef JSGC_GENERATIONAL
if (IsNullTaggedPointer(obj))
return;
MOZ_ASSERT(obj == *static_cast<JSObject **>(cellp));
js::gc::StoreBuffer *storeBuffer = obj->storeBuffer();
if (storeBuffer)
storeBuffer->putCellFromAnyThread(static_cast<js::gc::Cell **>(cellp));
#endif
}
/* static */ MOZ_ALWAYS_INLINE void
@ -844,11 +842,9 @@ JSObject::writeBarrierPostRelocate(JSObject *obj, void *cellp)
MOZ_ASSERT(cellp);
MOZ_ASSERT(obj);
MOZ_ASSERT(obj == *static_cast<JSObject **>(cellp));
#ifdef JSGC_GENERATIONAL
js::gc::StoreBuffer *storeBuffer = obj->storeBuffer();
if (storeBuffer)
storeBuffer->putRelocatableCellFromAnyThread(static_cast<js::gc::Cell **>(cellp));
#endif
}
/* static */ MOZ_ALWAYS_INLINE void
@ -857,10 +853,8 @@ JSObject::writeBarrierPostRemove(JSObject *obj, void *cellp)
MOZ_ASSERT(cellp);
MOZ_ASSERT(obj);
MOZ_ASSERT(obj == *static_cast<JSObject **>(cellp));
#ifdef JSGC_GENERATIONAL
obj->shadowRuntimeFromAnyThread()->gcStoreBufferPtr()->removeRelocatableCellFromAnyThread(
static_cast<js::gc::Cell **>(cellp));
#endif
}
namespace js {

View File

@ -347,7 +347,6 @@ Shape::fixupAfterMovingGC()
#endif // JSGC_COMPACTING
#ifdef JSGC_GENERATIONAL
void
ShapeGetterSetterRef::mark(JSTracer *trc)
{
@ -376,7 +375,6 @@ ShapeGetterSetterRef::mark(JSTracer *trc)
*objp = obj;
MOZ_ALWAYS_TRUE(kh->putNew(StackShape(shape), shape));
}
#endif
#ifdef DEBUG

View File

@ -21,7 +21,7 @@
#include "js/TypeDecls.h"
#if (defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)) || \
#if (defined(JS_GC_ZEAL)) || \
(defined(JSGC_COMPACTING) && defined(DEBUG))
# define JSGC_HASH_TABLE_CHECKS
#endif
@ -160,30 +160,20 @@ struct Runtime
/* Restrict zone access during Minor GC. */
bool needsIncrementalBarrier_;
#ifdef JSGC_GENERATIONAL
private:
js::gc::StoreBuffer *gcStoreBufferPtr_;
#endif
public:
explicit Runtime(
#ifdef JSGC_GENERATIONAL
js::gc::StoreBuffer *storeBuffer
#endif
)
explicit Runtime(js::gc::StoreBuffer *storeBuffer)
: needsIncrementalBarrier_(false)
#ifdef JSGC_GENERATIONAL
, gcStoreBufferPtr_(storeBuffer)
#endif
{}
bool needsIncrementalBarrier() const {
return needsIncrementalBarrier_;
}
#ifdef JSGC_GENERATIONAL
js::gc::StoreBuffer *gcStoreBufferPtr() { return gcStoreBufferPtr_; }
#endif
static JS::shadow::Runtime *asShadowRuntime(JSRuntime *rt) {
return reinterpret_cast<JS::shadow::Runtime*>(rt);

View File

@ -353,12 +353,10 @@ TryPreserveReflector(JSContext *cx, HandleObject obj)
static inline void
WeakMapPostWriteBarrier(JSRuntime *rt, ObjectValueMap *weakMap, JSObject *key)
{
#ifdef JSGC_GENERATIONAL
// Strip the barriers from the type before inserting into the store buffer.
// This will automatically ensure that barriers do not fire during GC.
if (key && IsInsideNursery(key))
rt->gc.storeBuffer.putGeneric(UnbarrieredRef(weakMap, key));
#endif
}
static MOZ_ALWAYS_INLINE bool

View File

@ -475,7 +475,7 @@ if CONFIG['NIGHTLY_BUILD']:
DEFINES['ENABLE_PARALLEL_JS'] = True
DEFINES['ENABLE_BINARYDATA'] = True
DEFINES['ENABLE_SHARED_ARRAY_BUFFER'] = True
if CONFIG['ENABLE_ION'] and CONFIG['JSGC_GENERATIONAL_CONFIGURED']:
if CONFIG['ENABLE_ION']:
DEFINES['JSGC_FJGENERATIONAL'] = True
DEFINES['EXPORT_JS_API'] = True

View File

@ -5853,9 +5853,7 @@ main(int argc, char **argv, char **envp)
|| !op.addBoolOption('\0', "dump-entrained-variables", "Print variables which are "
"unnecessarily entrained by inner functions")
#endif
#ifdef JSGC_GENERATIONAL
|| !op.addBoolOption('\0', "no-ggc", "Disable Generational GC")
#endif
|| !op.addBoolOption('\0', "no-incremental-gc", "Disable Incremental GC")
|| !op.addIntOption('\0', "available-memory", "SIZE",
"Select GC settings based on available memory (MB)", 0)
@ -5878,9 +5876,7 @@ main(int argc, char **argv, char **envp)
|| !op.addIntOption('\0', "mips-sim-stop-at", "NUMBER", "Stop the MIPS simulator after the given "
"NUMBER of instructions.", -1)
#endif
#ifdef JSGC_GENERATIONAL
|| !op.addIntOption('\0', "nursery-size", "SIZE-MB", "Set the maximum nursery size in MB", 16)
#endif
#ifdef JS_GC_ZEAL
|| !op.addStringOption('z', "gc-zeal", "LEVEL[,N]",
"Specifies zealous garbage collection, overriding the environement "
@ -5951,9 +5947,7 @@ main(int argc, char **argv, char **envp)
return 1;
size_t nurseryBytes = JS::DefaultNurseryBytes;
#ifdef JSGC_GENERATIONAL
nurseryBytes = op.getIntOption("nursery-size") * 1024L * 1024L;
#endif
/* Use the same parameters as the browser in xpcjsruntime.cpp. */
rt = JS_NewRuntime(JS::DefaultHeapMaxBytes, nurseryBytes);
@ -5968,11 +5962,9 @@ main(int argc, char **argv, char **envp)
gInterruptFunc.emplace(rt, NullValue());
JS_SetGCParameter(rt, JSGC_MAX_BYTES, 0xffffffff);
#ifdef JSGC_GENERATIONAL
Maybe<JS::AutoDisableGenerationalGC> noggc;
if (op.getBoolOption("no-ggc"))
noggc.emplace(rt);
#endif
size_t availMem = op.getIntOption("available-memory");
if (availMem > 0)
@ -6036,9 +6028,7 @@ main(int argc, char **argv, char **envp)
for (size_t i = 0; i < workerThreads.length(); i++)
PR_JoinThread(workerThreads[i]);
#ifdef JSGC_GENERATIONAL
noggc.reset();
#endif
JS_DestroyRuntime(rt);
JS_ShutDown();

View File

@ -1888,10 +1888,8 @@ UpdateExecutionObservabilityOfScriptsInZone(JSContext *cx, Zone *zone,
{
using namespace js::jit;
#ifdef JSGC_GENERATIONAL
// See note in js::ReleaseAllJITCode.
cx->runtime()->gc.evictNursery();
#endif
AutoSuppressProfilerSampling suppressProfilerSampling(cx);

View File

@ -296,7 +296,7 @@ NativeObject::initDenseElementsUnbarriered(uint32_t dstStart, const Value *src,
*/
MOZ_ASSERT(dstStart + count <= getDenseCapacity());
MOZ_ASSERT(!denseElementsAreCopyOnWrite());
#if defined(DEBUG) && defined(JSGC_GENERATIONAL)
#ifdef DEBUG
/*
* This asserts a global invariant: parallel code does not
* observe objects inside the generational GC's nursery.

View File

@ -412,10 +412,8 @@ NativeObject::setSlotSpan(ThreadSafeContext *cx, HandleNativeObject obj, uint32_
static HeapSlot *
AllocateSlots(ThreadSafeContext *cx, JSObject *obj, uint32_t nslots)
{
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext())
return cx->asJSContext()->runtime()->gc.nursery.allocateSlots(obj, nslots);
#endif
#ifdef JSGC_FJGENERATIONAL
if (cx->isForkJoinContext())
return cx->asForkJoinContext()->nursery().allocateSlots(obj, nslots);
@ -431,12 +429,10 @@ static HeapSlot *
ReallocateSlots(ThreadSafeContext *cx, JSObject *obj, HeapSlot *oldSlots,
uint32_t oldCount, uint32_t newCount)
{
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext()) {
return cx->asJSContext()->runtime()->gc.nursery.reallocateSlots(obj, oldSlots,
oldCount, newCount);
}
#endif
#ifdef JSGC_FJGENERATIONAL
if (cx->isForkJoinContext()) {
return cx->asForkJoinContext()->nursery().reallocateSlots(obj, oldSlots,
@ -483,11 +479,9 @@ NativeObject::growSlots(ThreadSafeContext *cx, HandleNativeObject obj, uint32_t
static void
FreeSlots(ThreadSafeContext *cx, HeapSlot *slots)
{
#ifdef JSGC_GENERATIONAL
// Note: threads without a JSContext do not have access to GGC nursery allocated things.
if (cx->isJSContext())
return cx->asJSContext()->runtime()->gc.nursery.freeSlots(slots);
#endif
#ifdef JSGC_FJGENERATIONAL
if (cx->isForkJoinContext())
return cx->asForkJoinContext()->nursery().freeSlots(slots);
@ -722,10 +716,8 @@ NativeObject::maybeDensifySparseElements(js::ExclusiveContext *cx, HandleNativeO
static ObjectElements *
AllocateElements(ThreadSafeContext *cx, JSObject *obj, uint32_t nelems)
{
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext())
return cx->asJSContext()->runtime()->gc.nursery.allocateElements(obj, nelems);
#endif
#ifdef JSGC_FJGENERATIONAL
if (cx->isForkJoinContext())
return cx->asForkJoinContext()->nursery().allocateElements(obj, nelems);
@ -740,12 +732,10 @@ static ObjectElements *
ReallocateElements(ThreadSafeContext *cx, JSObject *obj, ObjectElements *oldHeader,
uint32_t oldCount, uint32_t newCount)
{
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext()) {
return cx->asJSContext()->runtime()->gc.nursery.reallocateElements(obj, oldHeader,
oldCount, newCount);
}
#endif
#ifdef JSGC_FJGENERATIONAL
if (cx->isForkJoinContext()) {
return cx->asForkJoinContext()->nursery().reallocateElements(obj, oldHeader,

View File

@ -311,12 +311,10 @@ IsObjectValueInCompartment(Value v, JSCompartment *comp);
inline void
DenseRangeWriteBarrierPost(JSRuntime *rt, NativeObject *obj, uint32_t start, uint32_t count)
{
#ifdef JSGC_GENERATIONAL
if (count > 0) {
JS::shadow::Runtime *shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
shadowRuntime->gcStoreBufferPtr()->putSlotFromAnyThread(obj, HeapSlot::Element, start, count);
}
#endif
}
/*
@ -1164,14 +1162,12 @@ class NativeObject : public JSObject
inline void privateWriteBarrierPre(void **oldval);
void privateWriteBarrierPost(void **pprivate) {
#ifdef JSGC_GENERATIONAL
gc::Cell **cellp = reinterpret_cast<gc::Cell **>(pprivate);
MOZ_ASSERT(cellp);
MOZ_ASSERT(*cellp);
gc::StoreBuffer *storeBuffer = (*cellp)->storeBuffer();
if (storeBuffer)
storeBuffer->putCellFromAnyThread(cellp);
#endif
}
/* Private data accessors. */

View File

@ -130,11 +130,7 @@ ReturnZeroSize(const void *p)
}
JSRuntime::JSRuntime(JSRuntime *parentRuntime)
: JS::shadow::Runtime(
#ifdef JSGC_GENERATIONAL
&gc.storeBuffer
#endif
),
: JS::shadow::Runtime(&gc.storeBuffer),
mainThread(this),
parentRuntime(parentRuntime),
interrupt_(false),
@ -438,10 +434,8 @@ JSRuntime::~JSRuntime()
js_delete(ionPcScriptCache);
#ifdef JSGC_GENERATIONAL
gc.storeBuffer.disable();
gc.nursery.disable();
#endif
#if defined(JS_ARM_SIMULATOR) || defined(JS_MIPS_SIMULATOR)
js::jit::DestroySimulatorRuntime(simulatorRuntime_);
@ -474,7 +468,6 @@ JSRuntime::setTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallback
void
NewObjectCache::clearNurseryObjects(JSRuntime *rt)
{
#ifdef JSGC_GENERATIONAL
for (unsigned i = 0; i < mozilla::ArrayLength(entries); ++i) {
Entry &e = entries[i];
NativeObject *obj = reinterpret_cast<NativeObject *>(&e.templateObject);
@ -485,7 +478,6 @@ NewObjectCache::clearNurseryObjects(JSRuntime *rt)
PodZero(&e);
}
}
#endif
}
void
@ -530,12 +522,10 @@ JSRuntime::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::Runtim
jitRuntime()->ionAlloc(this)->addSizeOfCode(&rtSizes->code);
rtSizes->gc.marker += gc.marker.sizeOfExcludingThis(mallocSizeOf);
#ifdef JSGC_GENERATIONAL
rtSizes->gc.nurseryCommitted += gc.nursery.sizeOfHeapCommitted();
rtSizes->gc.nurseryDecommitted += gc.nursery.sizeOfHeapDecommitted();
rtSizes->gc.nurseryHugeSlots += gc.nursery.sizeOfHugeSlots(mallocSizeOf);
gc.storeBuffer.addSizeOfExcludingThis(mallocSizeOf, &rtSizes->gc);
#endif
}
static bool

View File

@ -347,10 +347,8 @@ class NewObjectCache
static void copyCachedToObject(JSObject *dst, JSObject *src, gc::AllocKind kind) {
js_memcpy(dst, src, gc::Arena::thingSize(kind));
#ifdef JSGC_GENERATIONAL
Shape::writeBarrierPost(dst->shape_, &dst->shape_);
types::TypeObject::writeBarrierPost(dst->type_, &dst->type_);
#endif
}
};

View File

@ -1879,13 +1879,10 @@ js_IsDebugScopeSlow(ProxyObject *proxy)
DebugScopes::proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map,
const PreBarrieredObject &key)
{
#ifdef JSGC_GENERATIONAL
if (key && IsInsideNursery(key))
rt->gc.storeBuffer.putGeneric(UnbarrieredRef(map, key.get()));
#endif
}
#ifdef JSGC_GENERATIONAL
class DebugScopes::MissingScopesRef : public gc::BufferableRef
{
MissingScopeMap *map;
@ -1904,22 +1901,18 @@ class DebugScopes::MissingScopesRef : public gc::BufferableRef
map->rekeyIfMoved(prior, key);
}
};
#endif
/* static */ MOZ_ALWAYS_INLINE void
DebugScopes::missingScopesPostWriteBarrier(JSRuntime *rt, MissingScopeMap *map,
const ScopeIterKey &key)
{
#ifdef JSGC_GENERATIONAL
if (key.enclosingScope() && IsInsideNursery(key.enclosingScope()))
rt->gc.storeBuffer.putGeneric(MissingScopesRef(map, key));
#endif
}
/* static */ MOZ_ALWAYS_INLINE void
DebugScopes::liveScopesPostWriteBarrier(JSRuntime *rt, LiveScopeMap *map, ScopeObject *key)
{
#ifdef JSGC_GENERATIONAL
// As above. Otherwise, barriers could fire during GC when moving the
// value.
typedef HashMap<ScopeObject *,
@ -1929,7 +1922,6 @@ DebugScopes::liveScopesPostWriteBarrier(JSRuntime *rt, LiveScopeMap *map, ScopeO
typedef gc::HashKeyRef<UnbarrieredLiveScopeMap, ScopeObject *> Ref;
if (key && IsInsideNursery(key))
rt->gc.storeBuffer.putGeneric(Ref(reinterpret_cast<UnbarrieredLiveScopeMap *>(map), key));
#endif
}
DebugScopes::DebugScopes(JSContext *cx)

View File

@ -929,7 +929,7 @@ class DebugScopes
public:
void mark(JSTracer *trc);
void sweep(JSRuntime *rt);
#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
#ifdef JS_GC_ZEAL
void checkHashTablesAfterMovingGC(JSRuntime *rt);
#endif

View File

@ -1590,8 +1590,6 @@ InitialShapeEntry::match(const InitialShapeEntry &key, const Lookup &lookup)
&& lookup.baseFlags == shape->getObjectFlags();
}
#ifdef JSGC_GENERATIONAL
/*
* This class is used to add a post barrier on the initialShapes set, as the key
* is calculated based on several objects which may be moved by generational GC.
@ -1656,8 +1654,6 @@ class InitialShapeSetRef : public BufferableRef
}
};
#endif // JSGC_GENERATIONAL
#ifdef JSGC_HASH_TABLE_CHECKS
void
@ -1745,7 +1741,7 @@ EmptyShape::getInitialShape(ExclusiveContext *cx, const Class *clasp, TaggedProt
if (!p.add(cx, table, lookup, InitialShapeEntry(ReadBarrieredShape(shape), protoRoot)))
return nullptr;
#ifdef JSGC_GENERATIONAL
// Post-barrier for the initial shape table update.
if (cx->isJSContext()) {
if ((protoRoot.isObject() && IsInsideNursery(protoRoot.toObject())) ||
IsInsideNursery(parentRoot.get()) ||
@ -1756,7 +1752,6 @@ EmptyShape::getInitialShape(ExclusiveContext *cx, const Class *clasp, TaggedProt
cx->asJSContext()->runtime()->gc.storeBuffer.putGeneric(ref);
}
}
#endif
return shape;
}

View File

@ -263,7 +263,6 @@ namespace gc {
void MergeCompartments(JSCompartment *source, JSCompartment *target);
}
#ifdef JSGC_GENERATIONAL
// This class is used to add a post barrier on the AccessorShape's getter/setter
// objects. It updates the shape's entry in the parent's KidsHash table.
class ShapeGetterSetterRef : public gc::BufferableRef
@ -278,28 +277,23 @@ class ShapeGetterSetterRef : public gc::BufferableRef
void mark(JSTracer *trc);
};
#endif
static inline void
GetterSetterWriteBarrierPost(AccessorShape *shape, JSObject **objp)
{
#ifdef JSGC_GENERATIONAL
MOZ_ASSERT(shape);
MOZ_ASSERT(objp);
MOZ_ASSERT(*objp);
gc::Cell **cellp = reinterpret_cast<gc::Cell **>(objp);
if (gc::StoreBuffer *sb = (*cellp)->storeBuffer())
sb->putGeneric(ShapeGetterSetterRef(shape, objp));
#endif
}
static inline void
GetterSetterWriteBarrierPostRemove(JSRuntime *rt, JSObject **objp)
{
#ifdef JSGC_GENERATIONAL
JS::shadow::Runtime *shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
shadowRuntime->gcStoreBufferPtr()->removeRelocatableCellFromAnyThread(reinterpret_cast<gc::Cell **>(objp));
#endif
}
class BaseShape : public gc::TenuredCell
@ -1176,7 +1170,6 @@ struct InitialShapeEntry
nfixed(nfixed), baseFlags(baseFlags)
{}
#ifdef JSGC_GENERATIONAL
/*
* For use by generational GC post barriers. Look up an entry whose
* parent and metadata fields may have been moved, but was hashed with
@ -1192,7 +1185,6 @@ struct InitialShapeEntry
hashMetadata(hashMetadata), matchMetadata(matchMetadata),
nfixed(nfixed), baseFlags(baseFlags)
{}
#endif
};
inline InitialShapeEntry();

View File

@ -381,11 +381,7 @@ pref("javascript.options.mem.gc_low_frequency_heap_growth", 120);
pref("javascript.options.mem.high_water_mark", 16);
pref("javascript.options.mem.gc_allocation_threshold_mb", 3);
pref("javascript.options.mem.gc_decommit_threshold_mb", 1);
#ifdef JSGC_GENERATIONAL
pref("javascript.options.mem.gc_min_empty_chunk_count", 1);
#else
pref("javascript.options.mem.gc_min_empty_chunk_count", 0);
#endif
pref("javascript.options.mem.gc_max_empty_chunk_count", 2);
#else
pref("javascript.options.mem.high_water_mark", 32);

View File

@ -90,8 +90,5 @@ if test ! "$RELEASE_BUILD"; then
MOZ_ANDROID_DOWNLOADS_INTEGRATION=1
fi
# Enable generational GC on mobile.
export JSGC_GENERATIONAL=1
# Use the low-memory GC tuning.
export JS_GC_SMALL_CHUNK_SIZE=1

View File

@ -1041,11 +1041,7 @@ pref("javascript.options.mem.gc_dynamic_heap_growth", true);
pref("javascript.options.mem.gc_dynamic_mark_slice", true);
pref("javascript.options.mem.gc_allocation_threshold_mb", 30);
pref("javascript.options.mem.gc_decommit_threshold_mb", 32);
#ifdef JSGC_GENERATIONAL
pref("javascript.options.mem.gc_min_empty_chunk_count", 1);
#else
pref("javascript.options.mem.gc_min_empty_chunk_count", 0);
#endif
pref("javascript.options.mem.gc_max_empty_chunk_count", 30);
pref("javascript.options.showInConsole", false);

View File

@ -55,11 +55,7 @@ RunTest(JSRuntime* rt, JSContext* cx, ArrayT* array)
const char* property = "foo";
for (size_t i = 0; i < ElementCount; ++i) {
RootedObject obj(cx, JS_NewObject(cx, nullptr, JS::NullPtr(), JS::NullPtr()));
#ifdef JSGC_GENERATIONAL
ASSERT_TRUE(js::gc::IsInsideNursery(AsCell(obj)));
#else
ASSERT_FALSE(js::gc::IsInsideNursery(AsCell(obj)));
#endif
value = Int32Value(i);
ASSERT_TRUE(JS_SetProperty(cx, obj, property, value));
array->AppendElement(obj);