Bug 848374 - Have a single allocator per compartment for optimized stubs. r=djvj,njn

This commit is contained in:
Jan de Mooij 2013-03-07 11:42:24 +01:00
parent ebc67de1cd
commit 08aea52e66
10 changed files with 115 additions and 67 deletions

View File

@ -170,7 +170,8 @@ struct CompartmentStats
, scriptData(0)
, jaegerData(0)
, baselineData(0)
, baselineStubs(0)
, baselineFallbackStubs(0)
, baselineOptimizedStubs(0)
, ionData(0)
, compartmentObject(0)
, crossCompartmentWrappersTable(0)
@ -208,7 +209,8 @@ struct CompartmentStats
, scriptData(other.scriptData)
, jaegerData(other.jaegerData)
, baselineData(other.baselineData)
, baselineStubs(other.baselineStubs)
, baselineFallbackStubs(other.baselineFallbackStubs)
, baselineOptimizedStubs(other.baselineOptimizedStubs)
, ionData(other.ionData)
, compartmentObject(other.compartmentObject)
, crossCompartmentWrappersTable(other.crossCompartmentWrappersTable)
@ -252,7 +254,8 @@ struct CompartmentStats
size_t scriptData;
size_t jaegerData;
size_t baselineData;
size_t baselineStubs;
size_t baselineFallbackStubs;
size_t baselineOptimizedStubs;
size_t ionData;
size_t compartmentObject;
size_t crossCompartmentWrappersTable;
@ -294,7 +297,8 @@ struct CompartmentStats
ADD(scriptData);
ADD(jaegerData);
ADD(baselineData);
ADD(baselineStubs);
ADD(baselineFallbackStubs);
ADD(baselineOptimizedStubs);
ADD(ionData);
ADD(compartmentObject);
ADD(crossCompartmentWrappersTable);

View File

@ -893,9 +893,9 @@ class ICStubCompiler
virtual ICStub *getStub(ICStubSpace *space) = 0;
ICStubSpace *getStubSpace(JSScript *script) {
return ICStub::CanMakeCalls(kind)
? script->baselineScript()->fallbackStubSpace()
: script->baselineScript()->optimizedStubSpace();
if (ICStub::CanMakeCalls(kind))
return script->baselineScript()->fallbackStubSpace();
return script->compartment()->ionCompartment()->optimizedStubSpace();
}
};

View File

@ -35,7 +35,6 @@ PCMappingSlotInfo::ToSlotLocation(const StackValue *stackVal)
BaselineScript::BaselineScript(uint32_t prologueOffset)
: method_(NULL),
fallbackStubSpace_(),
optimizedStubSpace_(),
prologueOffset_(prologueOffset),
flags_(0)
{ }
@ -509,7 +508,7 @@ BaselineScript::copyICEntries(HandleScript script, const ICEntry *entries, Macro
}
void
BaselineScript::adoptFallbackStubs(ICStubSpace *stubSpace)
BaselineScript::adoptFallbackStubs(FallbackICStubSpace *stubSpace)
{
fallbackStubSpace_.adoptFrom(stubSpace);
}
@ -680,8 +679,6 @@ BaselineScript::purgeOptimizedStubs(Zone *zone)
}
}
#endif
optimizedStubSpace_.free();
}
void
@ -716,11 +713,11 @@ ion::IonCompartment::toggleBaselineStubBarriers(bool enabled)
void
ion::SizeOfBaselineData(JSScript *script, JSMallocSizeOfFun mallocSizeOf, size_t *data,
size_t *stubs)
size_t *fallbackStubs)
{
*data = 0;
*stubs = 0;
*fallbackStubs = 0;
if (script->hasBaselineScript())
script->baseline->sizeOfIncludingThis(mallocSizeOf, data, stubs);
script->baseline->sizeOfIncludingThis(mallocSizeOf, data, fallbackStubs);
}

View File

@ -24,35 +24,6 @@ class StackValue;
struct ICEntry;
class ICStub;
// ICStubSpace is an abstraction for allocation policy and storage for stub data.
struct ICStubSpace
{
private:
const static size_t STUB_DEFAULT_CHUNK_SIZE = 256;
LifoAlloc allocator_;
public:
inline ICStubSpace()
: allocator_(STUB_DEFAULT_CHUNK_SIZE) {}
inline void *alloc(size_t size) {
return allocator_.alloc(size);
}
JS_DECLARE_NEW_METHODS(allocate, alloc, inline)
inline void adoptFrom(ICStubSpace *other) {
allocator_.steal(&(other->allocator_));
}
void free() {
allocator_.freeAll();
}
size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const {
return allocator_.sizeOfExcludingThis(mallocSizeOf);
}
};
class PCMappingSlotInfo
{
uint8_t slotInfo_;
@ -132,10 +103,7 @@ struct BaselineScript
HeapPtr<IonCode> method_;
// Allocated space for fallback stubs.
ICStubSpace fallbackStubSpace_;
// Allocated space for optimized stubs.
ICStubSpace optimizedStubSpace_;
FallbackICStubSpace fallbackStubSpace_;
// Native code offset right before the scope chain is initialized.
uint32_t prologueOffset_;
@ -181,13 +149,13 @@ struct BaselineScript
return offsetof(BaselineScript, method_);
}
void sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *data, size_t *stubs) const {
void sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *data,
size_t *fallbackStubs) const {
*data = mallocSizeOf(this);
// data already includes the ICStubSpace itself, so use
// sizeOfExcludingThis.
*stubs = fallbackStubSpace_.sizeOfExcludingThis(mallocSizeOf) +
optimizedStubSpace_.sizeOfExcludingThis(mallocSizeOf);
*fallbackStubs = fallbackStubSpace_.sizeOfExcludingThis(mallocSizeOf);
}
bool active() const {
@ -220,14 +188,10 @@ struct BaselineScript
uint8_t *pcMappingData() {
return reinterpret_cast<uint8_t *>(this) + pcMappingOffset_;
}
ICStubSpace *fallbackStubSpace() {
FallbackICStubSpace *fallbackStubSpace() {
return &fallbackStubSpace_;
}
ICStubSpace *optimizedStubSpace() {
return &optimizedStubSpace_;
}
IonCode *method() const {
return method_;
}
@ -252,7 +216,7 @@ struct BaselineScript
}
void copyICEntries(HandleScript script, const ICEntry *entries, MacroAssembler &masm);
void adoptFallbackStubs(ICStubSpace *stubSpace);
void adoptFallbackStubs(FallbackICStubSpace *stubSpace);
PCMappingIndexEntry &pcMappingIndexEntry(size_t index);
CompactBufferReader pcMappingReader(size_t indexEntry);
@ -294,7 +258,8 @@ void
FinishDiscardBaselineScript(FreeOp *fop, UnrootedScript script);
void
SizeOfBaselineData(JSScript *script, JSMallocSizeOfFun mallocSizeOf, size_t *data, size_t *stubs);
SizeOfBaselineData(JSScript *script, JSMallocSizeOfFun mallocSizeOf, size_t *data,
size_t *fallbackStubs);
struct BaselineBailoutInfo
{

View File

@ -33,6 +33,66 @@ class IonBuilder;
typedef Vector<IonBuilder*, 0, SystemAllocPolicy> OffThreadCompilationVector;
// ICStubSpace is an abstraction for allocation policy and storage for stub data.
// There are two kinds of stubs: optimized stubs and fallback stubs (the latter
// also includes stubs that can make non-tail calls that can GC).
//
// Optimized stubs are allocated per-compartment and are always purged when
// JIT-code is discarded. Fallback stubs are allocated per BaselineScript and
// are only destroyed when the BaselineScript is destroyed.
struct ICStubSpace
{
protected:
LifoAlloc allocator_;
explicit ICStubSpace(size_t chunkSize)
: allocator_(chunkSize)
{}
public:
inline void *alloc(size_t size) {
return allocator_.alloc(size);
}
JS_DECLARE_NEW_METHODS(allocate, alloc, inline)
size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const {
return allocator_.sizeOfExcludingThis(mallocSizeOf);
}
};
// Space for optimized stubs. Every IonCompartment has a single
// OptimizedICStubSpace.
struct OptimizedICStubSpace : public ICStubSpace
{
const static size_t STUB_DEFAULT_CHUNK_SIZE = 4 * 1024;
public:
OptimizedICStubSpace()
: ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
{}
void free() {
allocator_.freeAll();
}
};
// Space for fallback stubs. Every BaselineScript has a
// FallbackICStubSpace.
struct FallbackICStubSpace : public ICStubSpace
{
const static size_t STUB_DEFAULT_CHUNK_SIZE = 256;
public:
FallbackICStubSpace()
: ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
{}
inline void adoptFrom(FallbackICStubSpace *other) {
allocator_.steal(&(other->allocator_));
}
};
class IonRuntime
{
friend class IonCompartment;
@ -131,6 +191,9 @@ class IonCompartment
// point from called script.
void *baselineCallReturnAddr_;
// Allocated space for optimized baseline stubs.
OptimizedICStubSpace optimizedStubSpace_;
public:
IonCode *getVMWrapper(const VMFunction &f);
@ -221,6 +284,9 @@ class IonCompartment
if (!flusher_ || !fl)
flusher_ = fl;
}
OptimizedICStubSpace *optimizedStubSpace() {
return &optimizedStubSpace_;
}
};
class BailoutClosure;

View File

@ -29,7 +29,7 @@ class BaselineCompilerShared
FrameInfo frame;
ICStubSpace stubSpace_;
FallbackICStubSpace stubSpace_;
js::Vector<ICEntry, 16, SystemAllocPolicy> icEntries_;
// Stores the native code offset for a bytecode pc.

View File

@ -634,6 +634,12 @@ JSCompartment::discardJitCode(FreeOp *fop, bool discardConstraints)
script->resetUseCount();
}
#ifdef JS_ION
/* Free optimized baseline stubs. */
if (ionCompartment())
ionCompartment()->optimizedStubSpace()->free();
#endif
types.sweepCompilerOutputs(fop, discardConstraints);
}
@ -1016,7 +1022,7 @@ void
JSCompartment::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *compartmentObject,
TypeInferenceSizes *tiSizes, size_t *shapesCompartmentTables,
size_t *crossCompartmentWrappersArg, size_t *regexpCompartment,
size_t *debuggeesSet)
size_t *debuggeesSet, size_t *baselineOptimizedStubs)
{
*compartmentObject = mallocSizeOf(this);
sizeOfTypeInferenceData(tiSizes, mallocSizeOf);
@ -1027,6 +1033,9 @@ JSCompartment::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *compa
*crossCompartmentWrappersArg = crossCompartmentWrappers.sizeOfExcludingThis(mallocSizeOf);
*regexpCompartment = regExps.sizeOfExcludingThis(mallocSizeOf);
*debuggeesSet = debuggees.sizeOfExcludingThis(mallocSizeOf);
*baselineOptimizedStubs = ionCompartment()
? ionCompartment()->optimizedStubSpace()->sizeOfExcludingThis(mallocSizeOf)
: 0;
}
void

View File

@ -364,7 +364,8 @@ struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JS
void sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *compartmentObject,
JS::TypeInferenceSizes *tiSizes,
size_t *shapesCompartmentTables, size_t *crossCompartmentWrappers,
size_t *regexpCompartment, size_t *debuggeesSet);
size_t *regexpCompartment, size_t *debuggeesSet,
size_t *baselineOptimizedStubs);
/*
* Shared scope property tree, and arena-pool for allocating its nodes.

View File

@ -96,7 +96,8 @@ StatsCompartmentCallback(JSRuntime *rt, void *data, JSCompartment *compartment)
&cStats.shapesCompartmentTables,
&cStats.crossCompartmentWrappersTable,
&cStats.regexpCompartment,
&cStats.debuggeesSet);
&cStats.debuggeesSet,
&cStats.baselineOptimizedStubs);
}
static void
@ -218,10 +219,11 @@ StatsCellCallback(JSRuntime *rt, void *data, void *thing, JSGCTraceKind traceKin
#ifdef JS_METHODJIT
cStats->jaegerData += script->sizeOfJitScripts(rtStats->mallocSizeOf_);
# ifdef JS_ION
size_t baselineData = 0, baselineStubs = 0;
ion::SizeOfBaselineData(script, rtStats->mallocSizeOf_, &baselineData, &baselineStubs);
size_t baselineData = 0, baselineFallbackStubs = 0;
ion::SizeOfBaselineData(script, rtStats->mallocSizeOf_, &baselineData,
&baselineFallbackStubs);
cStats->baselineData += baselineData;
cStats->baselineStubs += baselineStubs;
cStats->baselineFallbackStubs += baselineFallbackStubs;
cStats->ionData += ion::SizeOfIonData(script, rtStats->mallocSizeOf_);
# endif
#endif

View File

@ -1716,9 +1716,13 @@ ReportCompartmentStats(const JS::CompartmentStats &cStats,
"Memory used by the Baseline JIT for compilation data: "
"BaselineScripts.");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("baseline-stubs"),
cStats.baselineStubs,
"Memory used by Baseline IC stubs (excluding code).");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("baseline-fallback-stubs"),
cStats.baselineFallbackStubs,
"Memory used by Baseline fallback IC stubs (excluding code).");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("baseline-optimized-stubs"),
cStats.baselineOptimizedStubs,
"Memory used by Baseline optimized IC stubs (excluding code).");
CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("ion-data"),
cStats.ionData,