Bug 1276908 - Reimplement whole cell store buffer using a bit vector associated with the arena r=terrence

This commit is contained in:
Jon Coppeard 2016-06-21 15:30:35 +01:00
parent 926ea71286
commit 8beac5161e
11 changed files with 236 additions and 96 deletions

View File

@ -17,6 +17,8 @@
#include "jsobjinlines.h"
#include "gc/Heap-inl.h"
using namespace js;
using namespace gc;

29
js/src/gc/Heap-inl.h Normal file
View File

@ -0,0 +1,29 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef gc_Heap_inl_h
#define gc_Heap_inl_h
#include "gc/StoreBuffer.h"
inline void
js::gc::Arena::init(JS::Zone* zoneArg, AllocKind kind)
{
MOZ_ASSERT(firstFreeSpan.isEmpty());
MOZ_ASSERT(!zone);
MOZ_ASSERT(!allocated());
MOZ_ASSERT(!hasDelayedMarking);
MOZ_ASSERT(!allocatedDuringIncremental);
MOZ_ASSERT(!markOverflow);
MOZ_ASSERT(!auxNextLink);
zone = zoneArg;
allocKind = size_t(kind);
setAsFullyUnused();
bufferedCells = &ArenaCellSet::Empty;
}
#endif

View File

@ -63,6 +63,7 @@ TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, gc::Cell** thingp, const
namespace gc {
class Arena;
class ArenaCellSet;
class ArenaList;
class SortedArenaList;
struct Chunk;
@ -328,16 +329,23 @@ class TenuredCell : public Cell
/* Cells are aligned to CellShift, so the largest tagged null pointer is: */
const uintptr_t LargestTaggedNullCellPointer = (1 << CellShift) - 1;
MOZ_CONSTEXPR size_t
DivideAndRoundUp(size_t numerator, size_t divisor) {
return (numerator + divisor - 1) / divisor;
}
const size_t ArenaCellCount = ArenaSize / CellSize;
static_assert(ArenaSize % CellSize == 0, "Arena size must be a multiple of cell size");
/*
* The mark bitmap has one bit per each GC cell. For multi-cell GC things this
* wastes space but allows to avoid expensive devisions by thing's size when
* accessing the bitmap. In addition this allows to use some bits for colored
* marking during the cycle GC.
*/
const size_t ArenaCellCount = size_t(1) << (ArenaShift - CellShift);
const size_t ArenaBitmapBits = ArenaCellCount;
const size_t ArenaBitmapBytes = ArenaBitmapBits / 8;
const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
const size_t ArenaBitmapBytes = DivideAndRoundUp(ArenaBitmapBits, 8);
const size_t ArenaBitmapWords = DivideAndRoundUp(ArenaBitmapBits, JS_BITS_PER_WORD);
/*
* A FreeSpan represents a contiguous sequence of free cells in an Arena. It
@ -521,8 +529,11 @@ class Arena
"Arena::auxNextLink packing assumes that ArenaShift has "
"enough bits to cover allocKind and hasDelayedMarking.");
/* Extra field for content-specific data. */
void* extra;
/*
* If non-null, points to an ArenaCellSet that represents the set of cells
* in this arena that are in the nursery's store buffer.
*/
ArenaCellSet* bufferedCells;
/*
* The size of data should be |ArenaSize - offsetof(data)|, but the offset
@ -532,20 +543,7 @@ class Arena
*/
uint8_t data[ArenaSize - ArenaHeaderSize];
void init(JS::Zone* zoneArg, AllocKind kind) {
MOZ_ASSERT(firstFreeSpan.isEmpty());
MOZ_ASSERT(!zone);
MOZ_ASSERT(!allocated());
MOZ_ASSERT(!hasDelayedMarking);
MOZ_ASSERT(!allocatedDuringIncremental);
MOZ_ASSERT(!markOverflow);
MOZ_ASSERT(!auxNextLink);
zone = zoneArg;
allocKind = size_t(kind);
setAsFullyUnused();
extra = nullptr;
}
void init(JS::Zone* zoneArg, AllocKind kind);
// Sets |firstFreeSpan| to the Arena's entire valid range, and
// also sets the next span stored at |firstFreeSpan.last| as empty.
@ -565,6 +563,7 @@ class Arena
allocatedDuringIncremental = 0;
markOverflow = 0;
auxNextLink = 0;
bufferedCells = nullptr;
}
uintptr_t address() const {

View File

@ -2051,8 +2051,6 @@ js::gc::StoreBuffer::MonoTypeBuffer<T>::trace(StoreBuffer* owner, TenuringTracer
namespace js {
namespace gc {
template void
StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>::trace(StoreBuffer*, TenuringTracer&);
template void
StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>::trace(StoreBuffer*, TenuringTracer&);
template void
StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>::trace(StoreBuffer*, TenuringTracer&);
@ -2088,15 +2086,11 @@ js::gc::StoreBuffer::SlotsEdge::trace(TenuringTracer& mover) const
}
void
js::gc::StoreBuffer::WholeCellEdges::trace(TenuringTracer& mover) const
js::gc::StoreBuffer::traceWholeCell(TenuringTracer& mover, JS::TraceKind kind, Cell* edge)
{
MOZ_ASSERT(edge->isTenured());
JS::TraceKind kind = edge->getTraceKind();
if (kind == JS::TraceKind::Object) {
JSObject *object = static_cast<JSObject*>(edge);
if (object->is<NativeObject>())
object->as<NativeObject>().clearInWholeCellBuffer();
mover.traceObject(object);
// Additionally trace the expando object attached to any unboxed plain
@ -2120,6 +2114,27 @@ js::gc::StoreBuffer::WholeCellEdges::trace(TenuringTracer& mover) const
MOZ_CRASH();
}
void
js::gc::StoreBuffer::traceWholeCells(TenuringTracer& mover)
{
for (ArenaCellSet* cells = bufferWholeCell; cells; cells = cells->next) {
Arena* arena = cells->arena;
MOZ_ASSERT(arena->bufferedCells == cells);
arena->bufferedCells = &ArenaCellSet::Empty;
JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
for (size_t i = 0; i < ArenaCellCount; i++) {
if (cells->hasCell(i)) {
auto cell = reinterpret_cast<Cell*>(uintptr_t(arena) + CellSize * i);
traceWholeCell(mover, kind, cell);
}
}
}
bufferWholeCell = nullptr;
}
void
js::gc::StoreBuffer::CellPtrEdge::trace(TenuringTracer& mover) const
{

View File

@ -218,6 +218,11 @@ class Nursery
return heapEnd_;
}
// Free space remaining, not counting chunk trailers.
MOZ_ALWAYS_INLINE size_t approxFreeSpace() const {
return heapEnd_ - position_;
}
#ifdef JS_GC_ZEAL
void enterZealMode();
void leaveZealMode();

View File

@ -7,21 +7,66 @@
#ifndef gc_StoreBuffer_inl_h
#define gc_StoreBuffer_inl_h
#include "gc/StoreBuffer.h"
#include "gc/Heap.h"
namespace js {
namespace gc {
inline /* static */ size_t
ArenaCellSet::getCellIndex(const TenuredCell* cell)
{
MOZ_ASSERT((uintptr_t(cell) & ~ArenaMask) % CellSize == 0);
return (uintptr_t(cell) & ArenaMask) / CellSize;
}
inline /* static */ void
ArenaCellSet::getWordIndexAndMask(size_t cellIndex, size_t* wordp, uint32_t* maskp)
{
BitArray<ArenaCellCount>::getIndexAndMask(cellIndex, wordp, maskp);
}
inline bool
ArenaCellSet::hasCell(size_t cellIndex) const
{
MOZ_ASSERT(cellIndex < ArenaCellCount);
return bits.get(cellIndex);
}
inline void
ArenaCellSet::putCell(size_t cellIndex)
{
MOZ_ASSERT(cellIndex < ArenaCellCount);
bits.set(cellIndex);
}
inline void
ArenaCellSet::check() const
{
#ifdef DEBUG
bool bitsZero = bits.isAllClear();
MOZ_ASSERT(isEmpty() == bitsZero);
MOZ_ASSERT(isEmpty() == !arena);
MOZ_ASSERT_IF(!isEmpty(), arena->bufferedCells == this);
#endif
}
inline void
StoreBuffer::putWholeCell(Cell* cell)
{
MOZ_ASSERT(cell->isTenured());
if (cell->getTraceKind() == JS::TraceKind::Object) {
JSObject *obj = static_cast<JSObject*>(cell);
if (obj->is<NativeObject>())
obj->as<NativeObject>().setInWholeCellBuffer();
Arena* arena = cell->asTenured().arena();
ArenaCellSet* cells = arena->bufferedCells;
if (cells->isEmpty()) {
cells = AllocateWholeCellSet(arena);
if (!cells)
return;
}
put(bufferWholeCell, WholeCellEdges(cell));
cells->putCell(&cell->asTenured());
cells->check();
}
} // namespace gc

View File

@ -4,7 +4,7 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "gc/StoreBuffer.h"
#include "gc/StoreBuffer-inl.h"
#include "mozilla/Assertions.h"
@ -45,7 +45,6 @@ StoreBuffer::enable()
if (!bufferVal.init() ||
!bufferCell.init() ||
!bufferSlot.init() ||
!bufferWholeCell.init() ||
!bufferGeneric.init())
{
return false;
@ -78,8 +77,11 @@ StoreBuffer::clear()
bufferVal.clear();
bufferCell.clear();
bufferSlot.clear();
bufferWholeCell.clear();
bufferGeneric.clear();
for (ArenaCellSet* set = bufferWholeCell; set; set = set->next)
set->arena->bufferedCells = nullptr;
bufferWholeCell = nullptr;
}
void
@ -99,11 +101,53 @@ StoreBuffer::addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSi
sizes->storeBufferVals += bufferVal.sizeOfExcludingThis(mallocSizeOf);
sizes->storeBufferCells += bufferCell.sizeOfExcludingThis(mallocSizeOf);
sizes->storeBufferSlots += bufferSlot.sizeOfExcludingThis(mallocSizeOf);
sizes->storeBufferWholeCells += bufferWholeCell.sizeOfExcludingThis(mallocSizeOf);
sizes->storeBufferGenerics += bufferGeneric.sizeOfExcludingThis(mallocSizeOf);
for (ArenaCellSet* set = bufferWholeCell; set; set = set->next)
sizes->storeBufferWholeCells += sizeof(ArenaCellSet);
}
void
StoreBuffer::addToWholeCellBuffer(ArenaCellSet* set)
{
set->next = bufferWholeCell;
bufferWholeCell = set;
}
ArenaCellSet ArenaCellSet::Empty(nullptr);
ArenaCellSet::ArenaCellSet(Arena* arena)
: arena(arena), next(nullptr)
{
bits.clear(false);
}
ArenaCellSet*
js::gc::AllocateWholeCellSet(Arena* arena)
{
Zone* zone = arena->zone;
JSRuntime* rt = zone->runtimeFromMainThread();
if (!rt->gc.nursery.isEnabled())
return nullptr;
AutoEnterOOMUnsafeRegion oomUnsafe;
Nursery& nursery = rt->gc.nursery;
void* data = nursery.allocateBuffer(zone, sizeof(ArenaCellSet));
if (!data) {
oomUnsafe.crash("Failed to allocate WholeCellSet");
return nullptr;
}
if (nursery.approxFreeSpace() < ArenaCellSet::NurseryFreeThresholdBytes)
rt->gc.storeBuffer.setAboutToOverflow();
auto cells = static_cast<ArenaCellSet*>(data);
new (cells) ArenaCellSet(arena);
arena->bufferedCells = cells;
rt->gc.storeBuffer.addToWholeCellBuffer(cells);
return cells;
}
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>;
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>;
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>;
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>;

View File

@ -21,6 +21,8 @@
namespace js {
namespace gc {
class ArenaCellSet;
/*
* BufferableRef represents an abstract reference for use in the generational
* GC's remembered set. Entries in the store buffer that cannot be represented
@ -334,27 +336,6 @@ class StoreBuffer
} Hasher;
};
struct WholeCellEdges
{
Cell* edge;
WholeCellEdges() : edge(nullptr) {}
explicit WholeCellEdges(Cell* cell) : edge(cell) {
MOZ_ASSERT(edge->isTenured());
}
bool operator==(const WholeCellEdges& other) const { return edge == other.edge; }
bool operator!=(const WholeCellEdges& other) const { return edge != other.edge; }
bool maybeInRememberedSet(const Nursery&) const { return true; }
void trace(TenuringTracer& mover) const;
explicit operator bool() const { return edge != nullptr; }
typedef PointerEdgeHasher<WholeCellEdges> Hasher;
};
template <typename Buffer, typename Edge>
void unput(Buffer& buffer, const Edge& edge) {
MOZ_ASSERT(!JS::shadow::Runtime::asShadowRuntime(runtime_)->isHeapBusy());
@ -379,7 +360,7 @@ class StoreBuffer
MonoTypeBuffer<ValueEdge> bufferVal;
MonoTypeBuffer<CellPtrEdge> bufferCell;
MonoTypeBuffer<SlotsEdge> bufferSlot;
MonoTypeBuffer<WholeCellEdges> bufferWholeCell;
ArenaCellSet* bufferWholeCell;
GenericBuffer bufferGeneric;
bool cancelIonCompilations_;
@ -394,7 +375,7 @@ class StoreBuffer
public:
explicit StoreBuffer(JSRuntime* rt, const Nursery& nursery)
: bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(), bufferGeneric(),
: bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(nullptr), bufferGeneric(),
cancelIonCompilations_(false), runtime_(rt), nursery_(nursery), aboutToOverflow_(false),
enabled_(false)
#ifdef DEBUG
@ -440,15 +421,67 @@ class StoreBuffer
void traceValues(TenuringTracer& mover) { bufferVal.trace(this, mover); }
void traceCells(TenuringTracer& mover) { bufferCell.trace(this, mover); }
void traceSlots(TenuringTracer& mover) { bufferSlot.trace(this, mover); }
void traceWholeCells(TenuringTracer& mover) { bufferWholeCell.trace(this, mover); }
void traceGenericEntries(JSTracer *trc) { bufferGeneric.trace(this, trc); }
void traceWholeCells(TenuringTracer& mover);
void traceWholeCell(TenuringTracer& mover, JS::TraceKind kind, Cell* cell);
/* For use by our owned buffers and for testing. */
void setAboutToOverflow();
void addToWholeCellBuffer(ArenaCellSet* set);
void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes* sizes);
};
// A set of cells in an arena used to implement the whole cell store buffer.
class ArenaCellSet
{
friend class StoreBuffer;
// The arena this relates to.
Arena* arena;
// Pointer to next set forming a linked list.
ArenaCellSet* next;
// Bit vector for each possible cell start position.
BitArray<ArenaCellCount> bits;
public:
ArenaCellSet(Arena* arena);
bool hasCell(const TenuredCell* cell) const {
return hasCell(getCellIndex(cell));
}
void putCell(const TenuredCell* cell) {
putCell(getCellIndex(cell));
}
bool isEmpty() const {
return this == &Empty;
}
bool hasCell(size_t cellIndex) const;
void putCell(size_t cellIndex);
void check() const;
// Sentinel object used for all empty sets.
static ArenaCellSet Empty;
static size_t getCellIndex(const TenuredCell* cell);
static void getWordIndexAndMask(size_t cellIndex, size_t* wordp, uint32_t* maskp);
// Attempt to trigger a minor GC if free space in the nursery (where these
// objects are allocated) falls below this threshold.
static const size_t NurseryFreeThresholdBytes = 64 * 1024;
};
ArenaCellSet* AllocateWholeCellSet(Arena* arena);
} /* namespace gc */
} /* namespace js */

View File

@ -1970,6 +1970,7 @@ RelocateArena(Arena* arena, SliceBudget& sliceBudget)
MOZ_ASSERT(!arena->hasDelayedMarking);
MOZ_ASSERT(!arena->markOverflow);
MOZ_ASSERT(!arena->allocatedDuringIncremental);
MOZ_ASSERT(arena->bufferedCells->isEmpty());
Zone* zone = arena->zone;

View File

@ -92,12 +92,6 @@ ObjectElements::MakeElementsCopyOnWrite(ExclusiveContext* cx, NativeObject* obj)
ObjectElements* header = obj->getElementsHeader();
// As soon as we have (or may soon have) multiple objects referencing a
// single header, it isn't clear which object the "I'm already in the
// whole-cell store buffer" bit is describing, so just disable that
// optimization.
header->clearInWholeCellBuffer();
// Note: this method doesn't update type information to indicate that the
// elements might be copy on write. Handling this is left to the caller.
MOZ_ASSERT(!header->isCopyOnWrite());

View File

@ -181,14 +181,6 @@ class ObjectElements
// memory. This is a static property of the TypedArray, set when it
// is created and never changed.
SHARED_MEMORY = 0x8,
// Set if the object has already been added to the whole-cell store
// buffer, and therefore adding individual elements into the slots store
// buffer would be pointless. This is never set for the empty or shared
// elements headers, nor if the elements are copy on write; in such
// situations it isn't clear *which* object that references this
// elements header has already been put in the whole-cell store buffer.
IN_WHOLE_CELL_BUFFER = 0x10,
};
private:
@ -245,19 +237,6 @@ class ObjectElements
MOZ_ASSERT(isCopyOnWrite());
flags &= ~COPY_ON_WRITE;
}
bool isInWholeCellBuffer() const {
return flags & IN_WHOLE_CELL_BUFFER;
}
void setInWholeCellBuffer() {
MOZ_ASSERT(!isSharedMemory());
MOZ_ASSERT(!isCopyOnWrite());
flags |= IN_WHOLE_CELL_BUFFER;
}
void clearInWholeCellBuffer() {
MOZ_ASSERT(!isSharedMemory());
MOZ_ASSERT(!isCopyOnWrite());
flags &= ~IN_WHOLE_CELL_BUFFER;
}
public:
MOZ_CONSTEXPR ObjectElements(uint32_t capacity, uint32_t length)
@ -481,15 +460,9 @@ class NativeObject : public JSObject
}
bool isInWholeCellBuffer() const {
return getElementsHeader()->isInWholeCellBuffer();
}
void setInWholeCellBuffer() {
if (!hasEmptyElements() && !isSharedMemory() && !getElementsHeader()->isCopyOnWrite())
getElementsHeader()->setInWholeCellBuffer();
}
void clearInWholeCellBuffer() {
if (!hasEmptyElements() && !isSharedMemory() && !getElementsHeader()->isCopyOnWrite())
getElementsHeader()->clearInWholeCellBuffer();
const gc::TenuredCell* cell = &asTenured();
gc::ArenaCellSet* cells = cell->arena()->bufferedCells;
return cells && cells->hasCell(cell);
}
protected: