Bug 1267551 (part 8) - Use MOZ_MUST_USE more in js/src/gc/. r=terrence.

--HG--
extra : rebase_source : bb87bee366dcc3c5f14f15532a902ac0b5e430a5
This commit is contained in:
Nicholas Nethercote 2016-05-09 11:17:26 +10:00
parent f4698b0aac
commit a36723b8a8
15 changed files with 80 additions and 70 deletions

View File

@ -586,7 +586,8 @@ class JS_PUBLIC_API(AutoCheckCannotGC) : public AutoAssertOnGC
/**
* Unsets the gray bit for anything reachable from |thing|. |kind| should not be
* JS::TraceKind::Shape. |thing| should be non-null.
* JS::TraceKind::Shape. |thing| should be non-null. The return value indicates
* if anything was unmarked.
*/
extern JS_FRIEND_API(bool)
UnmarkGrayGCThingRecursively(GCCellPtr thing);

View File

@ -85,7 +85,12 @@ class MOZ_RAII AutoStopVerifyingBarriers
AutoStopVerifyingBarriers(JSRuntime* rt, bool isShutdown)
: gc(&rt->gc)
{
restartPreVerifier = gc->endVerifyPreBarriers() && !isShutdown;
if (gc->isVerifyPreBarriersEnabled()) {
gc->endVerifyPreBarriers();
restartPreVerifier = !isShutdown;
} else {
restartPreVerifier = false;
}
}
~AutoStopVerifyingBarriers() {

View File

@ -194,7 +194,7 @@ class GCSchedulingTunables
unsigned minEmptyChunkCount(const AutoLockGC&) const { return minEmptyChunkCount_; }
unsigned maxEmptyChunkCount() const { return maxEmptyChunkCount_; }
bool setParameter(JSGCParamKey key, uint32_t value, const AutoLockGC& lock);
MOZ_MUST_USE bool setParameter(JSGCParamKey key, uint32_t value, const AutoLockGC& lock);
};
/*
@ -584,7 +584,7 @@ class GCRuntime
{
public:
explicit GCRuntime(JSRuntime* rt);
bool init(uint32_t maxbytes, uint32_t maxNurseryBytes);
MOZ_MUST_USE bool init(uint32_t maxbytes, uint32_t maxNurseryBytes);
void finishRoots();
void finish();
@ -593,17 +593,18 @@ class GCRuntime
inline bool upcomingZealousGC();
inline bool needZealousGC();
bool addRoot(Value* vp, const char* name);
MOZ_MUST_USE bool addRoot(Value* vp, const char* name);
void removeRoot(Value* vp);
void setMarkStackLimit(size_t limit, AutoLockGC& lock);
bool setParameter(JSGCParamKey key, uint32_t value, AutoLockGC& lock);
MOZ_MUST_USE bool setParameter(JSGCParamKey key, uint32_t value, AutoLockGC& lock);
uint32_t getParameter(JSGCParamKey key, const AutoLockGC& lock);
bool triggerGC(JS::gcreason::Reason reason);
MOZ_MUST_USE bool triggerGC(JS::gcreason::Reason reason);
void maybeAllocTriggerZoneGC(Zone* zone, const AutoLockGC& lock);
// The return value indicates if we were able to do the GC.
bool triggerZoneGC(Zone* zone, JS::gcreason::Reason reason);
bool maybeGC(Zone* zone);
MOZ_MUST_USE bool maybeGC(Zone* zone);
void maybePeriodicFullGC();
void minorGC(JS::gcreason::Reason reason) {
gcstats::AutoPhase ap(stats, gcstats::PHASE_MINOR_GC);
@ -614,6 +615,7 @@ class GCRuntime
gcstats::AutoPhase ap(stats, gcstats::PHASE_EVICT_NURSERY);
minorGCImpl(reason, nullptr);
}
// The return value indicates whether a major GC was performed.
bool gcIfRequested(JSContext* cx = nullptr);
void gc(JSGCInvocationKind gckind, JS::gcreason::Reason reason);
void startGC(JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis = 0);
@ -626,7 +628,7 @@ class GCRuntime
void triggerFullGCForAtoms() {
MOZ_ASSERT(fullGCForAtomsRequested_);
fullGCForAtomsRequested_ = false;
triggerGC(JS::gcreason::ALLOC_TRIGGER);
MOZ_RELEASE_ASSERT(triggerGC(JS::gcreason::ALLOC_TRIGGER));
}
void runDebugGC();
@ -755,7 +757,7 @@ class GCRuntime
bool isCompactingGCEnabled() const;
void setGrayRootsTracer(JSTraceDataOp traceOp, void* data);
bool addBlackRootsTracer(JSTraceDataOp traceOp, void* data);
MOZ_MUST_USE bool addBlackRootsTracer(JSTraceDataOp traceOp, void* data);
void removeBlackRootsTracer(JSTraceDataOp traceOp, void* data);
void setMaxMallocBytes(size_t value);
@ -770,11 +772,13 @@ class GCRuntime
void setObjectsTenuredCallback(JSObjectsTenuredCallback callback,
void* data);
void callObjectsTenuredCallback();
bool addFinalizeCallback(JSFinalizeCallback callback, void* data);
MOZ_MUST_USE bool addFinalizeCallback(JSFinalizeCallback callback, void* data);
void removeFinalizeCallback(JSFinalizeCallback func);
bool addWeakPointerZoneGroupCallback(JSWeakPointerZoneGroupCallback callback, void* data);
MOZ_MUST_USE bool addWeakPointerZoneGroupCallback(JSWeakPointerZoneGroupCallback callback,
void* data);
void removeWeakPointerZoneGroupCallback(JSWeakPointerZoneGroupCallback callback);
bool addWeakPointerCompartmentCallback(JSWeakPointerCompartmentCallback callback, void* data);
MOZ_MUST_USE bool addWeakPointerCompartmentCallback(JSWeakPointerCompartmentCallback callback,
void* data);
void removeWeakPointerCompartmentCallback(JSWeakPointerCompartmentCallback callback);
JS::GCSliceCallback setSliceCallback(JS::GCSliceCallback callback);
JS::GCNurseryCollectionCallback setNurseryCollectionCallback(
@ -841,7 +845,7 @@ class GCRuntime
#ifdef JS_GC_ZEAL
void startVerifyPreBarriers();
bool endVerifyPreBarriers();
void endVerifyPreBarriers();
void finishVerifier();
bool isVerifyPreBarriersEnabled() const { return !!verifyPreData; }
#else
@ -860,7 +864,7 @@ class GCRuntime
// Allocator
template <AllowGC allowGC>
bool checkAllocatorState(JSContext* cx, AllocKind kind);
MOZ_MUST_USE bool checkAllocatorState(JSContext* cx, AllocKind kind);
template <AllowGC allowGC>
JSObject* tryNewNurseryObject(JSContext* cx, size_t thingSize, size_t nDynamicSlots,
const Class* clasp);
@ -888,7 +892,7 @@ class GCRuntime
void arenaAllocatedDuringGC(JS::Zone* zone, Arena* arena);
// Allocator internals
bool gcIfNeededPerAllocation(JSContext* cx);
MOZ_MUST_USE bool gcIfNeededPerAllocation(JSContext* cx);
template <typename T>
static void checkIncrementalZoneState(ExclusiveContext* cx, T* t);
static void* refillFreeListFromAnyThread(ExclusiveContext* cx, AllocKind thingKind,
@ -921,16 +925,17 @@ class GCRuntime
// Check if the system state is such that GC has been supressed
// or otherwise delayed.
bool checkIfGCAllowedInCurrentState(JS::gcreason::Reason reason);
MOZ_MUST_USE bool checkIfGCAllowedInCurrentState(JS::gcreason::Reason reason);
gcstats::ZoneGCStats scanZonesBeforeGC();
void collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::Reason reason) JS_HAZ_GC_CALL;
bool gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason::Reason reason);
MOZ_MUST_USE bool gcCycle(bool nonincrementalByAPI, SliceBudget& budget,
JS::gcreason::Reason reason);
void incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason reason);
void pushZealSelectedObjects();
void purgeRuntime();
bool beginMarkPhase(JS::gcreason::Reason reason);
MOZ_MUST_USE bool beginMarkPhase(JS::gcreason::Reason reason);
bool shouldPreserveJITCode(JSCompartment* comp, int64_t currentTime,
JS::gcreason::Reason reason);
void bufferGrayRoots();
@ -946,7 +951,7 @@ class GCRuntime
void beginSweepPhase(bool lastGC);
void findZoneGroups();
bool findZoneEdgesForWeakMaps();
MOZ_MUST_USE bool findZoneEdgesForWeakMaps();
void getNextZoneGroup();
void endMarkingZoneGroup();
void beginSweepingZoneGroup();
@ -967,8 +972,8 @@ class GCRuntime
void endCompactPhase(JS::gcreason::Reason reason);
void sweepTypesAfterCompacting(Zone* zone);
void sweepZoneAfterCompacting(Zone* zone);
bool relocateArenas(Zone* zone, JS::gcreason::Reason reason, Arena*& relocatedListOut,
SliceBudget& sliceBudget);
MOZ_MUST_USE bool relocateArenas(Zone* zone, JS::gcreason::Reason reason,
Arena*& relocatedListOut, SliceBudget& sliceBudget);
void updateTypeDescrObjects(MovingTracer* trc, Zone* zone);
void updateCellPointers(MovingTracer* trc, Zone* zone, AllocKinds kinds, size_t bgTaskCount);
void updateAllCellPointers(MovingTracer* trc, Zone* zone);
@ -1047,6 +1052,8 @@ class GCRuntime
*/
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> numArenasFreeCommitted;
VerifyPreTracer* verifyPreData;
private:
bool chunkAllocationSinceLastGC;
int64_t nextFullGCTime;
int64_t lastGCTime;

View File

@ -17,7 +17,7 @@ namespace gc {
#ifdef JS_GC_TRACE
extern bool InitTrace(GCRuntime& gc);
extern MOZ_MUST_USE bool InitTrace(GCRuntime& gc);
extern void FinishTrace();
extern bool TraceEnabled();
extern void TraceNurseryAlloc(Cell* thing, size_t size);
@ -33,7 +33,7 @@ extern void TraceTypeNewScript(js::ObjectGroup* group);
#else
inline bool InitTrace(GCRuntime& gc) { return true; }
inline MOZ_MUST_USE bool InitTrace(GCRuntime& gc) { return true; }
inline void FinishTrace() {}
inline bool TraceEnabled() { return false; }
inline void TraceNurseryAlloc(Cell* thing, size_t size) {}

View File

@ -53,6 +53,7 @@ extern bool
CurrentThreadIsIonCompiling();
#endif
// The return value indicates if anything was unmarked.
extern bool
UnmarkGrayCellRecursively(gc::Cell* cell, JS::TraceKind kind);
@ -295,6 +296,7 @@ class TenuredCell : public Cell
// Mark bit management.
MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
// The return value indicates if the cell went from unmarked to marked.
MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const TenuredCell* src);
@ -877,6 +879,7 @@ struct ChunkBitmap
return *word & mask;
}
// The return value indicates if the cell went from unmarked to marked.
MOZ_ALWAYS_INLINE bool markIfUnmarked(const Cell* cell, uint32_t color) {
uintptr_t* word, mask;
getMarkWordAndMask(cell, BLACK, &word, &mask);
@ -995,7 +998,7 @@ struct Chunk
void releaseArena(JSRuntime* rt, Arena* arena, const AutoLockGC& lock);
void recycleArena(Arena* arena, SortedArenaList& dest, size_t thingsPerArena);
bool decommitOneFreeArena(JSRuntime* rt, AutoLockGC& lock);
MOZ_MUST_USE bool decommitOneFreeArena(JSRuntime* rt, AutoLockGC& lock);
void decommitAllArenasWithoutUnlocking(const AutoLockGC& lock);
static Chunk* allocate(JSRuntime* rt);

View File

@ -87,13 +87,13 @@ class MarkStack
end_ = stack + capacity;
}
bool init(JSGCMode gcMode);
MOZ_MUST_USE bool init(JSGCMode gcMode);
void setBaseCapacity(JSGCMode mode);
size_t maxCapacity() const { return maxCapacity_; }
void setMaxCapacity(size_t maxCapacity);
bool push(uintptr_t item) {
MOZ_MUST_USE bool push(uintptr_t item) {
if (tos_ == end_) {
if (!enlarge(1))
return false;
@ -103,7 +103,7 @@ class MarkStack
return true;
}
bool push(uintptr_t item1, uintptr_t item2, uintptr_t item3) {
MOZ_MUST_USE bool push(uintptr_t item1, uintptr_t item2, uintptr_t item3) {
uintptr_t* nextTos = tos_ + 3;
if (nextTos > end_) {
if (!enlarge(3))
@ -130,7 +130,7 @@ class MarkStack
void reset();
/* Grow the stack, ensuring there is space for at least count elements. */
bool enlarge(unsigned count);
MOZ_MUST_USE bool enlarge(unsigned count);
void setGCMode(JSGCMode gcMode);
@ -168,7 +168,7 @@ class GCMarker : public JSTracer
{
public:
explicit GCMarker(JSRuntime* rt);
bool init(JSGCMode gcMode);
MOZ_MUST_USE bool init(JSGCMode gcMode);
void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); }
size_t maxCapacity() const { return stack.maxCapacity(); }
@ -216,7 +216,7 @@ class GCMarker : public JSTracer
void delayMarkingArena(gc::Arena* arena);
void delayMarkingChildren(const void* thing);
void markDelayedChildren(gc::Arena* arena);
bool markDelayedChildren(SliceBudget& budget);
MOZ_MUST_USE bool markDelayedChildren(SliceBudget& budget);
bool hasDelayedChildren() const {
return !!unmarkedArenaStackTop;
}
@ -225,7 +225,7 @@ class GCMarker : public JSTracer
return isMarkStackEmpty() && !unmarkedArenaStackTop;
}
bool drainMarkStack(SliceBudget& budget);
MOZ_MUST_USE bool drainMarkStack(SliceBudget& budget);
void setGCMode(JSGCMode mode) { stack.setGCMode(mode); }
@ -289,7 +289,7 @@ class GCMarker : public JSTracer
// Mark the given GC thing, but do not trace its children. Return true
// if the thing became marked.
template <typename T>
bool mark(T* thing);
MOZ_MUST_USE bool mark(T* thing);
void pushTaggedPtr(StackTag tag, void* ptr) {
checkZone(ptr);
@ -319,7 +319,7 @@ class GCMarker : public JSTracer
return stack.isEmpty();
}
bool restoreValueArray(JSObject* obj, void** vpp, void** endp);
MOZ_MUST_USE bool restoreValueArray(JSObject* obj, void** vpp, void** endp);
void saveValueRanges();
inline void processMarkStackTop(SliceBudget& budget);
@ -451,6 +451,7 @@ struct RewrapTaggedPointer<Value, T>
} /* namespace gc */
// The return value indicates if anything was unmarked.
bool
UnmarkGrayShapeRecursively(Shape* shape);

View File

@ -262,14 +262,13 @@ MarkPagesUnused(void* p, size_t size)
return p2 == p;
}
bool
void
MarkPagesInUse(void* p, size_t size)
{
if (!DecommitEnabled())
return true;
return;
MOZ_ASSERT(OffsetFromAligned(p, pageSize) == 0);
return true;
}
size_t
@ -320,7 +319,6 @@ bool
MarkPagesInUse(void* p, size_t size)
{
MOZ_ASSERT(OffsetFromAligned(p, pageSize) == 0);
return true;
}
size_t
@ -399,10 +397,9 @@ bool
MarkPagesInUse(void* p, size_t size)
{
if (!DecommitEnabled())
return true;
return;
MOZ_ASSERT(OffsetFromAligned(p, pageSize) == 0);
return true;
}
size_t
@ -667,14 +664,13 @@ MarkPagesUnused(void* p, size_t size)
return result != -1;
}
bool
void
MarkPagesInUse(void* p, size_t size)
{
if (!DecommitEnabled())
return true;
return;
MOZ_ASSERT(OffsetFromAligned(p, pageSize) == 0);
return true;
}
size_t

View File

@ -29,7 +29,7 @@ bool MarkPagesUnused(void* p, size_t size);
// Undo |MarkPagesUnused|: tell the OS that the given pages are of interest
// and should be paged in and out normally. This may be a no-op on some
// platforms.
bool MarkPagesInUse(void* p, size_t size);
void MarkPagesInUse(void* p, size_t size);
// Returns #(hard faults) + #(soft faults)
size_t GetPageFaultCount();

View File

@ -110,7 +110,7 @@ class Nursery
{}
~Nursery();
bool init(uint32_t maxNurseryBytes);
MOZ_MUST_USE bool init(uint32_t maxNurseryBytes);
bool exists() const { return numNurseryChunks_ != 0; }
size_t numChunks() const { return numNurseryChunks_; }
@ -171,7 +171,7 @@ class Nursery
* sets |*ref| to the new location of the object and returns true. Otherwise
* returns false and leaves |*ref| unset.
*/
MOZ_ALWAYS_INLINE bool getForwardedPointer(JSObject** ref) const;
MOZ_ALWAYS_INLINE MOZ_MUST_USE bool getForwardedPointer(JSObject** ref) const;
/* Forward a slots/elements pointer stored in an Ion frame. */
void forwardBufferPointer(HeapSlot** pSlotsElems);
@ -188,7 +188,7 @@ class Nursery
void waitBackgroundFreeEnd();
bool addedUniqueIdToCell(gc::Cell* cell) {
MOZ_MUST_USE bool addedUniqueIdToCell(gc::Cell* cell) {
if (!IsInsideNursery(cell) || !isEnabled())
return true;
MOZ_ASSERT(cellsWithUid_.initialized());

View File

@ -160,7 +160,7 @@ struct Statistics
/* Create a convenient type for referring to tables of phase times. */
using PhaseTimeTable = int64_t[NumTimingArrays][PHASE_LIMIT];
static bool initialize();
static MOZ_MUST_USE bool initialize();
explicit Statistics(JSRuntime* rt);
~Statistics();
@ -174,8 +174,8 @@ struct Statistics
void endSlice();
void setSliceCycleCount(unsigned cycleCount);
bool startTimingMutator();
bool stopTimingMutator(double& mutator_ms, double& gc_ms);
MOZ_MUST_USE bool startTimingMutator();
MOZ_MUST_USE bool stopTimingMutator(double& mutator_ms, double& gc_ms);
void reset(const char* reason) {
if (!aborted)

View File

@ -66,11 +66,11 @@ StoreBuffer::disable()
enabled_ = false;
}
bool
void
StoreBuffer::clear()
{
if (!enabled_)
return true;
return;
aboutToOverflow_ = false;
cancelIonCompilations_ = false;
@ -80,8 +80,6 @@ StoreBuffer::clear()
bufferSlot.clear();
bufferWholeCell.clear();
bufferGeneric.clear();
return true;
}
void

View File

@ -74,7 +74,7 @@ class StoreBuffer
explicit MonoTypeBuffer() : last_(T()) {}
~MonoTypeBuffer() { stores_.finish(); }
bool init() {
MOZ_MUST_USE bool init() {
if (!stores_.initialized() && !stores_.init())
return false;
clear();
@ -141,7 +141,7 @@ class StoreBuffer
explicit GenericBuffer() : storage_(nullptr) {}
~GenericBuffer() { js_delete(storage_); }
bool init() {
MOZ_MUST_USE bool init() {
if (!storage_)
storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
clear();
@ -410,7 +410,7 @@ class StoreBuffer
void disable();
bool isEnabled() const { return enabled_; }
bool clear();
void clear();
/* Get the overflowed status. */
bool isAboutToOverflow() const { return aboutToOverflow_; }

View File

@ -302,13 +302,13 @@ AssertMarkedOrAllocated(const EdgeValue& edge)
MOZ_CRASH();
}
bool
void
gc::GCRuntime::endVerifyPreBarriers()
{
VerifyPreTracer* trc = verifyPreData;
if (!trc)
return false;
return;
MOZ_ASSERT(!JS::IsGenerationalGCEnabled(rt));
@ -357,7 +357,6 @@ gc::GCRuntime::endVerifyPreBarriers()
marker.stop();
js_delete(trc);
return true;
}
/*** Barrier Verifier Scheduling ***/

View File

@ -672,7 +672,7 @@ class ZoneAllocPolicy
void free_(void* p) { js_free(p); }
void reportAllocOverflow() const {}
bool checkSimulatedOOM() const {
MOZ_MUST_USE bool checkSimulatedOOM() const {
return !js::oom::ShouldFailWithOOM();
}
};

View File

@ -3382,7 +3382,7 @@ GCRuntime::triggerZoneGC(Zone* zone, JS::gcreason::Reason reason)
#ifdef JS_GC_ZEAL
if (hasZealMode(ZealMode::Alloc)) {
triggerGC(reason);
MOZ_RELEASE_ASSERT(triggerGC(reason));
return true;
}
#endif
@ -3395,7 +3395,7 @@ GCRuntime::triggerZoneGC(Zone* zone, JS::gcreason::Reason reason)
fullGCForAtomsRequested_ = true;
return false;
}
triggerGC(reason);
MOZ_RELEASE_ASSERT(triggerGC(reason));
return true;
}
@ -4360,8 +4360,8 @@ GCRuntime::markWeakReferences(gcstats::Phase phase)
marker.enterWeakMarkingMode();
// TODO bug 1167452: Make weak marking incremental
SliceBudget budget = SliceBudget::unlimited();
marker.drainMarkStack(budget);
auto unlimited = SliceBudget::unlimited();
MOZ_RELEASE_ASSERT(marker.drainMarkStack(unlimited));
for (;;) {
bool markedAny = false;
@ -4380,7 +4380,7 @@ GCRuntime::markWeakReferences(gcstats::Phase phase)
break;
auto unlimited = SliceBudget::unlimited();
marker.drainMarkStack(unlimited);
MOZ_RELEASE_ASSERT(marker.drainMarkStack(unlimited));
}
MOZ_ASSERT(marker.isDrained());
@ -4407,7 +4407,7 @@ GCRuntime::markGrayReferences(gcstats::Phase phase)
(*op)(&marker, grayRootTracer.data);
}
auto unlimited = SliceBudget::unlimited();
marker.drainMarkStack(unlimited);
MOZ_RELEASE_ASSERT(marker.drainMarkStack(unlimited));
}
void
@ -4568,9 +4568,9 @@ js::gc::MarkingValidator::nonIncrementalMark()
gc->markRuntime(gcmarker, GCRuntime::MarkRuntime);
auto unlimited = SliceBudget::unlimited();
gc->incrementalState = MARK;
gc->marker.drainMarkStack(unlimited);
auto unlimited = SliceBudget::unlimited();
MOZ_RELEASE_ASSERT(gc->marker.drainMarkStack(unlimited));
}
gc->incrementalState = SWEEP;
@ -5031,7 +5031,7 @@ MarkIncomingCrossCompartmentPointers(JSRuntime* rt, const uint32_t color)
}
auto unlimited = SliceBudget::unlimited();
rt->gc.marker.drainMarkStack(unlimited);
MOZ_RELEASE_ASSERT(rt->gc.marker.drainMarkStack(unlimited));
}
static bool