Bug 673158 - Separate regexp JIT code and normal mjit code in about:memory. code=sandervv,Bas.Weelinck, r=nnethercote.

This commit is contained in:
Sander van Veen 2011-08-30 17:21:36 -07:00
parent 626d8d3053
commit 5bdf386f72
25 changed files with 123 additions and 71 deletions

View File

@ -110,7 +110,7 @@ void test1 ( void )
// constructor for LinkBuffer asks ep to allocate r-x memory,
// then copies it there.
JSC::LinkBuffer patchBuffer(am, ep);
JSC::LinkBuffer patchBuffer(am, ep, JSC::METHOD_CODE);
// finalize
JSC::MacroAssemblerCodeRef cr = patchBuffer.finalizeCode();
@ -266,7 +266,7 @@ void test2 ( void )
// constructor for LinkBuffer asks ep to allocate r-x memory,
// then copies it there.
JSC::LinkBuffer patchBuffer(am, ep);
JSC::LinkBuffer patchBuffer(am, ep, JSC::METHOD_CODE);
// finalize
JSC::MacroAssemblerCodeRef cr = patchBuffer.finalizeCode();
@ -453,7 +453,7 @@ void test3 ( void )
// constructor for LinkBuffer asks ep to allocate r-x memory,
// then copies it there.
JSC::LinkBuffer patchBuffer(am, ep);
JSC::LinkBuffer patchBuffer(am, ep, JSC::METHOD_CODE);
// finalize
JSC::MacroAssemblerCodeRef cr = patchBuffer.finalizeCode();
@ -663,7 +663,7 @@ void test4 ( void )
// constructor for LinkBuffer asks ep to allocate r-x memory,
// then copies it there.
JSC::LinkBuffer patchBuffer(am, ep);
JSC::LinkBuffer patchBuffer(am, ep, JSC::METHOD_CODE);
// now fix up any branches/calls
//JSC::FunctionPtr target = JSC::FunctionPtr::FunctionPtr( &cube );
@ -869,7 +869,7 @@ void test5 ( void )
// constructor for LinkBuffer asks ep to allocate r-x memory,
// then copies it there.
JSC::LinkBuffer patchBuffer(am, ep);
JSC::LinkBuffer patchBuffer(am, ep, JSC::METHOD_CODE);
// now fix up any branches/calls
JSC::FunctionPtr target = JSC::FunctionPtr::FunctionPtr( &cube );

View File

@ -634,14 +634,14 @@ inline void ARMAssembler::fixUpOffsets(void * buffer)
}
}
void* ARMAssembler::executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool **poolp)
void* ARMAssembler::executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool **poolp, CodeKind kind)
{
// 64-bit alignment is required for next constant pool and JIT code as well
m_buffer.flushWithoutBarrier(true);
if (m_buffer.uncheckedSize() & 0x7)
bkpt(0);
void * data = m_buffer.executableAllocAndCopy(allocator, poolp);
void * data = m_buffer.executableAllocAndCopy(allocator, poolp, kind);
if (data)
fixUpOffsets(data);
return data;

View File

@ -987,7 +987,7 @@ namespace JSC {
return loadBranchTarget(ARMRegisters::pc, cc, useConstantPool);
}
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool **poolp);
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool **poolp, CodeKind kind);
void executableCopy(void* buffer);
void fixUpOffsets(void* buffer);

View File

@ -1558,9 +1558,9 @@ public:
return m_formatter.size();
}
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool** poolp)
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool** poolp, CodeKind kind)
{
void* copy = m_formatter.executableAllocAndCopy(allocator, poolp);
void* copy = m_formatter.executableAllocAndCopy(allocator, poolp, kind);
unsigned jumpCount = m_jumpsToLink.size();
for (unsigned i = 0; i < jumpCount; ++i) {
@ -1909,8 +1909,8 @@ private:
size_t size() const { return m_buffer.size(); }
bool isAligned(int alignment) const { return m_buffer.isAligned(alignment); }
void* data() const { return m_buffer.data(); }
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool** poolp) {
return m_buffer.executableAllocAndCopy(allocator, poolp);
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool** poolp, CodeKind kind) {
return m_buffer.executableAllocAndCopy(allocator, poolp, kind);
}
bool oom() const { return m_buffer.oom(); }

View File

@ -137,14 +137,14 @@ namespace JSC {
* The user must check for a NULL return value, which means
* no code was generated, or there was an OOM.
*/
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool** poolp)
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool** poolp, CodeKind kind)
{
if (m_oom || m_size == 0) {
*poolp = NULL;
return 0;
}
void* result = allocator->alloc(m_size, poolp);
void* result = allocator->alloc(m_size, poolp, kind);
if (!result) {
*poolp = NULL;
return 0;

View File

@ -194,10 +194,10 @@ public:
return AssemblerBuffer::size();
}
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool** poolp)
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool** poolp, CodeKind kind)
{
flushConstantPool(false);
return AssemblerBuffer::executableAllocAndCopy(allocator, poolp);
return AssemblerBuffer::executableAllocAndCopy(allocator, poolp, kind);
}
void putIntWithConstantInt(uint32_t insn, uint32_t constant, bool isReusable = false)

View File

@ -64,8 +64,9 @@ class LinkBuffer {
public:
// 'ok' should be checked after this constructor is called; it's false if OOM occurred.
LinkBuffer(MacroAssembler* masm, ExecutableAllocator* executableAllocator,
ExecutablePool** poolp, bool* ok)
ExecutablePool** poolp, bool* ok, CodeKind codeKind)
{
m_codeKind = codeKind;
m_code = executableAllocAndCopy(*masm, executableAllocator, poolp);
m_executablePool = *poolp;
m_size = masm->m_assembler.size(); // must come after call to executableAllocAndCopy()!
@ -75,20 +76,22 @@ public:
*ok = !!m_code;
}
LinkBuffer()
LinkBuffer(CodeKind kind)
: m_executablePool(NULL)
, m_code(NULL)
, m_size(0)
, m_codeKind(kind)
#ifndef NDEBUG
, m_completed(false)
#endif
{
}
LinkBuffer(uint8* ncode, size_t size)
LinkBuffer(uint8* ncode, size_t size, CodeKind kind)
: m_executablePool(NULL)
, m_code(ncode)
, m_size(size)
, m_codeKind(kind)
#ifndef NDEBUG
, m_completed(false)
#endif
@ -200,7 +203,7 @@ protected:
void *executableAllocAndCopy(MacroAssembler &masm, ExecutableAllocator *allocator,
ExecutablePool **poolp)
{
return masm.m_assembler.executableAllocAndCopy(allocator, poolp);
return masm.m_assembler.executableAllocAndCopy(allocator, poolp, m_codeKind);
}
void performFinalization()
@ -217,6 +220,7 @@ protected:
ExecutablePool* m_executablePool;
void* m_code;
size_t m_size;
CodeKind m_codeKind;
#ifndef NDEBUG
bool m_completed;
#endif

View File

@ -1045,9 +1045,9 @@ namespace JSC {
return reinterpret_cast<void*>(reinterpret_cast<ptrdiff_t>(code) + destination.m_offset);
}
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool **poolp)
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool **poolp, CodeKind kind)
{
return m_buffer.executableAllocAndCopy(allocator, poolp);
return m_buffer.executableAllocAndCopy(allocator, poolp, kind);
}
void* executableCopy(void* buffer)

View File

@ -2491,9 +2491,9 @@ public:
return dst.m_offset - src.m_offset;
}
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool **poolp)
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool **poolp, CodeKind kind)
{
return m_formatter.executableAllocAndCopy(allocator, poolp);
return m_formatter.executableAllocAndCopy(allocator, poolp, kind);
}
void executableCopy(void* buffer)
@ -2843,8 +2843,8 @@ private:
bool oom() const { return m_buffer.oom(); }
bool isAligned(int alignment) const { return m_buffer.isAligned(alignment); }
void* data() const { return m_buffer.data(); }
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool** poolp) {
return m_buffer.executableAllocAndCopy(allocator, poolp);
void* executableAllocAndCopy(ExecutableAllocator* allocator, ExecutablePool** poolp, CodeKind kind) {
return m_buffer.executableAllocAndCopy(allocator, poolp, kind);
}
private:

View File

@ -37,15 +37,19 @@ ExecutablePool::~ExecutablePool()
m_allocator->releasePoolPages(this);
}
size_t
ExecutableAllocator::getCodeSize() const
void
ExecutableAllocator::getCodeStats(size_t& method, size_t& regexp, size_t& unused) const
{
size_t n = 0;
method = 0;
regexp = 0;
unused = 0;
for (ExecPoolHashSet::Range r = m_pools.all(); !r.empty(); r.popFront()) {
ExecutablePool* pool = r.front();
n += pool->m_allocation.size;
method += pool->m_mjitCodeMethod;
regexp += pool->m_mjitCodeRegexp;
unused += pool->m_allocation.size - pool->m_mjitCodeMethod - pool->m_mjitCodeRegexp;
}
return n;
}
}

View File

@ -81,6 +81,8 @@ namespace JSC {
class ExecutableAllocator;
enum CodeKind { METHOD_CODE, REGEXP_CODE };
// These are reference-counted. A new one starts with a count of 1.
class ExecutablePool {
@ -102,6 +104,10 @@ private:
// Reference count for automatic reclamation.
unsigned m_refCount;
// Number of bytes currently used for Method and Regexp JIT code.
size_t m_mjitCodeMethod;
size_t m_mjitCodeRegexp;
public:
// Flag for downstream use, whether to try to release references to this pool.
@ -133,16 +139,22 @@ private:
ExecutablePool(ExecutableAllocator* allocator, Allocation a)
: m_allocator(allocator), m_freePtr(a.pages), m_end(m_freePtr + a.size), m_allocation(a),
m_refCount(1), m_destroy(false), m_gcNumber(0)
m_refCount(1), m_mjitCodeMethod(0), m_mjitCodeRegexp(0), m_destroy(false), m_gcNumber(0)
{ }
~ExecutablePool();
void* alloc(size_t n)
void* alloc(size_t n, CodeKind kind)
{
JS_ASSERT(n <= available());
void *result = m_freePtr;
m_freePtr += n;
if ( kind == REGEXP_CODE )
m_mjitCodeRegexp += n;
else
m_mjitCodeMethod += n;
return result;
}
@ -185,7 +197,7 @@ public:
// alloc() returns a pointer to some memory, and also (by reference) a
// pointer to reference-counted pool. The caller owns a reference to the
// pool; i.e. alloc() increments the count before returning the object.
void* alloc(size_t n, ExecutablePool** poolp)
void* alloc(size_t n, ExecutablePool** poolp, CodeKind type)
{
// Round 'n' up to a multiple of word size; if all allocations are of
// word sized quantities, then all subsequent allocations will be
@ -202,7 +214,7 @@ public:
// This alloc is infallible because poolForSize() just obtained
// (found, or created if necessary) a pool that had enough space.
void *result = (*poolp)->alloc(n);
void *result = (*poolp)->alloc(n, type);
JS_ASSERT(result);
return result;
}
@ -213,7 +225,7 @@ public:
m_pools.remove(m_pools.lookup(pool)); // this asserts if |pool| is not in m_pools
}
size_t getCodeSize() const;
void getCodeStats(size_t& method, size_t& regexp, size_t& unused) const;
private:
static size_t pageSize;
@ -358,7 +370,7 @@ public:
//
// Modify "start" and "end" to avoid GCC 4.3.0-4.4.2 bug in
// mips_expand_synci_loop that may execute synci one more time.
// "start" points to the fisrt byte of the cache line.
// "start" points to the first byte of the cache line.
// "end" points to the last byte of the line before the last cache line.
// Because size is always a multiple of 4, this is safe to set
// "end" to the last byte.

View File

@ -44,7 +44,7 @@ ExecutablePool::Allocation ExecutableAllocator::systemAlloc(size_t n)
if (DosAllocMem(&allocation, n, OBJ_ANY|PAG_COMMIT|PAG_READ|PAG_WRITE) &&
DosAllocMem(&allocation, n, PAG_COMMIT|PAG_READ|PAG_WRITE))
CRASH();
ExecutablePool::Allocation alloc = {reinterpret_cast<char*>(allocation), n};
ExecutablePool::Allocation alloc = { reinterpret_cast<char*>(allocation), n };
return alloc;
}

View File

@ -42,7 +42,7 @@ size_t ExecutableAllocator::determinePageSize()
ExecutablePool::Allocation ExecutableAllocator::systemAlloc(size_t n)
{
void *allocation = VirtualAlloc(0, n, MEM_COMMIT | MEM_RESERVE, PAGE_EXECUTE_READWRITE);
ExecutablePool::Allocation alloc = {reinterpret_cast<char*>(allocation), n};
ExecutablePool::Allocation alloc = { reinterpret_cast<char*>(allocation), n };
return alloc;
}

View File

@ -50,6 +50,7 @@
#include "jswatchpoint.h"
#include "jswrapper.h"
#include "assembler/wtf/Platform.h"
#include "assembler/jit/ExecutableAllocator.h"
#include "yarr/BumpPointerAllocator.h"
#include "methodjit/MethodJIT.h"
#include "methodjit/PolyIC.h"
@ -176,10 +177,16 @@ JSCompartment::ensureJaegerCompartmentExists(JSContext *cx)
return true;
}
size_t
JSCompartment::getMjitCodeSize() const
void
JSCompartment::getMjitCodeStats(size_t& method, size_t& regexp, size_t& unused) const
{
return jaegerCompartment_ ? jaegerCompartment_->execAlloc()->getCodeSize() : 0;
if (jaegerCompartment_) {
jaegerCompartment_->execAlloc()->getCodeStats(method, regexp, unused);
} else {
method = 0;
regexp = 0;
unused = 0;
}
}
#endif

View File

@ -455,7 +455,7 @@ struct JS_FRIEND_API(JSCompartment) {
bool ensureJaegerCompartmentExists(JSContext *cx);
size_t getMjitCodeSize() const;
void getMjitCodeStats(size_t& method, size_t& regexp, size_t& unused) const;
#endif
WTF::BumpPointerAllocator *regExpAllocator;

View File

@ -112,7 +112,8 @@ class LinkerHelper : public JSC::LinkBuffer
#endif
public:
LinkerHelper(Assembler &masm) : masm(masm)
LinkerHelper(Assembler &masm, JSC::CodeKind kind) : JSC::LinkBuffer(kind)
, masm(masm)
#ifdef DEBUG
, verifiedRange(false)
#endif

View File

@ -860,8 +860,8 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
jumpTableOffsets.length() * sizeof(void *);
JSC::ExecutablePool *execPool;
uint8 *result =
(uint8 *)script->compartment->jaegerCompartment()->execAlloc()->alloc(codeSize, &execPool);
uint8 *result = (uint8 *)script->compartment->jaegerCompartment()->execAlloc()->
alloc(codeSize, &execPool, JSC::METHOD_CODE);
if (!result) {
js_ReportOutOfMemory(cx);
return Compile_Error;
@ -871,8 +871,8 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
masm.executableCopy(result);
stubcc.masm.executableCopy(result + masm.size());
JSC::LinkBuffer fullCode(result, codeSize);
JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size());
JSC::LinkBuffer fullCode(result, codeSize, JSC::METHOD_CODE);
JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size(), JSC::METHOD_CODE);
size_t nNmapLive = loopEntries.length();
for (size_t i = 0; i < script->length; i++) {

View File

@ -224,7 +224,7 @@ AttachSetGlobalNameStub(VMFrame &f, ic::SetGlobalNameIC *ic, JSObject *obj, cons
Jump done = masm.jump();
JITScript *jit = f.jit();
LinkerHelper linker(masm);
LinkerHelper linker(masm, JSC::METHOD_CODE);
JSC::ExecutablePool *ep = linker.init(f.cx);
if (!ep)
return Lookup_Error;
@ -340,7 +340,7 @@ class EqualityICLinker : public LinkerHelper
public:
EqualityICLinker(Assembler &masm, VMFrame &f)
: LinkerHelper(masm), f(f)
: LinkerHelper(masm, JSC::METHOD_CODE), f(f)
{ }
bool init(JSContext *cx) {
@ -701,7 +701,7 @@ class CallCompiler : public BaseCompiler
masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), JSParamReg_Argc);
masm.jump(t0);
LinkerHelper linker(masm);
LinkerHelper linker(masm, JSC::METHOD_CODE);
JSC::ExecutablePool *ep = poolForSize(linker, CallICInfo::Pool_ScriptStub);
if (!ep)
return false;
@ -718,7 +718,7 @@ class CallCompiler : public BaseCompiler
(unsigned long) masm.size());
if (f.regs.inlined()) {
JSC::LinkBuffer code((uint8 *) cs.executableAddress(), masm.size());
JSC::LinkBuffer code((uint8 *) cs.executableAddress(), masm.size(), JSC::METHOD_CODE);
code.patch(inlined, f.regs.inlined());
}
@ -783,7 +783,7 @@ class CallCompiler : public BaseCompiler
Jump funGuard = masm.branchPtr(Assembler::NotEqual, t0, ImmPtr(fun));
Jump done = masm.jump();
LinkerHelper linker(masm);
LinkerHelper linker(masm, JSC::METHOD_CODE);
JSC::ExecutablePool *ep = poolForSize(linker, CallICInfo::Pool_ClosureStub);
if (!ep)
return false;
@ -1037,7 +1037,7 @@ class CallCompiler : public BaseCompiler
masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
masm.throwInJIT();
LinkerHelper linker(masm);
LinkerHelper linker(masm, JSC::METHOD_CODE);
JSC::ExecutablePool *ep = poolForSize(linker, CallICInfo::Pool_NativeStub);
if (!ep)
THROWV(true);
@ -1336,7 +1336,7 @@ ic::GenerateArgumentCheckStub(VMFrame &f)
Jump done = masm.jump();
LinkerHelper linker(masm);
LinkerHelper linker(masm, JSC::METHOD_CODE);
JSC::ExecutablePool *ep = linker.init(f.cx);
if (!ep)
return;

View File

@ -122,7 +122,7 @@ class PICLinker : public LinkerHelper
public:
PICLinker(Assembler &masm, ic::BasePolyIC &ic)
: LinkerHelper(masm), ic(ic)
: LinkerHelper(masm, JSC::METHOD_CODE), ic(ic)
{ }
bool init(JSContext *cx) {
@ -2978,7 +2978,7 @@ SetElementIC::attachHoleStub(JSContext *cx, JSObject *obj, int32 keyval)
JS_ASSERT(!execPool);
JS_ASSERT(!inlineHoleGuardPatched);
LinkerHelper buffer(masm);
LinkerHelper buffer(masm, JSC::METHOD_CODE);
execPool = buffer.init(cx);
if (!execPool)
return error(cx);
@ -3059,7 +3059,7 @@ SetElementIC::attachTypedArray(JSContext *cx, JSObject *obj, int32 key)
// by a GC or shape regenerated GC. We let this stub live for the lifetime
// of the script.
JS_ASSERT(!execPool);
LinkerHelper buffer(masm);
LinkerHelper buffer(masm, JSC::METHOD_CODE);
execPool = buffer.init(cx);
if (!execPool)
return error(cx);

View File

@ -209,8 +209,8 @@ StubCompiler::emitStubCall(void *ptr, RejoinState rejoin, int32 slots)
void
StubCompiler::fixCrossJumps(uint8 *ncode, size_t offset, size_t total)
{
JSC::LinkBuffer fast(ncode, total);
JSC::LinkBuffer slow(ncode + offset, total - offset);
JSC::LinkBuffer fast(ncode, total, JSC::METHOD_CODE);
JSC::LinkBuffer slow(ncode + offset, total - offset, JSC::METHOD_CODE);
for (size_t i = 0; i < exits.length(); i++)
fast.link(exits[i].from, slow.locationOf(exits[i].to));

View File

@ -41,6 +41,7 @@
#include "TrampolineCompiler.h"
#include "StubCalls.h"
#include "assembler/assembler/LinkBuffer.h"
#include "assembler/jit/ExecutableAllocator.h"
namespace js {
namespace mjit {
@ -96,7 +97,7 @@ TrampolineCompiler::compileTrampoline(Trampolines::TrampolinePtr *where,
JS_ASSERT(entry.isSet());
bool ok;
JSC::LinkBuffer buffer(&masm, execAlloc, poolp, &ok);
JSC::LinkBuffer buffer(&masm, execAlloc, poolp, &ok, JSC::METHOD_CODE);
if (!ok)
return false;
masm.finalize(buffer);

View File

@ -4042,9 +4042,12 @@ MJitCodeStats(JSContext *cx, uintN argc, jsval *vp)
#ifdef JS_METHODJIT
JSRuntime *rt = cx->runtime;
AutoLockGC lock(rt);
size_t n = 0;
size_t n = 0, method, regexp, unused;
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
n += (*c)->getMjitCodeSize();
{
(*c)->getMjitCodeStats(method, regexp, unused);
n += method + regexp + unused;
}
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(n));
#else
JS_SET_RVAL(cx, vp, JSVAL_VOID);

View File

@ -1265,10 +1265,11 @@ GetCompartmentScriptsSize(JSCompartment *c)
#ifdef JS_METHODJIT
PRInt64
GetCompartmentMjitCodeSize(JSCompartment *c)
void
GetCompartmentMjitCodeStats(JSCompartment *c, size_t& method, size_t& regexp,
size_t& unused)
{
return c->getMjitCodeSize();
c->getMjitCodeStats(method, regexp, unused);
}
PRInt64
@ -1339,7 +1340,11 @@ CompartmentCallback(JSContext *cx, void *vdata, JSCompartment *compartment)
// Get the compartment-level numbers.
curr->scripts = GetCompartmentScriptsSize(compartment);
#ifdef JS_METHODJIT
curr->mjitCode = GetCompartmentMjitCodeSize(compartment);
size_t method, regexp, unused;
GetCompartmentMjitCodeStats(compartment, method, regexp, unused);
curr->mjitCodeMethod = method;
curr->mjitCodeRegexp = regexp;
curr->mjitCodeUnused = unused;
curr->mjitData = GetCompartmentMjitDataSize(compartment);
#endif
#ifdef JS_TRACER
@ -1799,11 +1804,24 @@ ReportCompartmentStats(const CompartmentStats &stats,
#ifdef JS_METHODJIT
ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
"mjit-code"),
nsIMemoryReporter::KIND_NONHEAP, stats.mjitCode,
"mjit-code/method"),
nsIMemoryReporter::KIND_NONHEAP, stats.mjitCodeMethod,
"Memory used by the method JIT to hold the compartment's generated code.",
callback, closure);
ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
"mjit-code/regexp"),
nsIMemoryReporter::KIND_NONHEAP, stats.mjitCodeRegexp,
"Memory used by the regexp JIT to hold the compartment's generated code.",
callback, closure);
ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
"mjit-code/unused"),
nsIMemoryReporter::KIND_NONHEAP, stats.mjitCodeUnused,
"Memory allocated by the method and/or regexp JIT to hold the "
"compartment's code, but which is currently unused.",
callback, closure);
ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
"mjit-data"),
nsIMemoryReporter::KIND_HEAP, stats.mjitData,

View File

@ -213,7 +213,9 @@ struct CompartmentStats
PRInt64 scripts;
#ifdef JS_METHODJIT
PRInt64 mjitCode;
PRInt64 mjitCodeMethod;
PRInt64 mjitCodeRegexp;
PRInt64 mjitCodeUnused;
PRInt64 mjitData;
#endif
#ifdef JS_TRACER

View File

@ -2397,7 +2397,7 @@ public:
// XXX yarr-oom
ExecutablePool *pool;
bool ok;
LinkBuffer linkBuffer(this, globalData->regexAllocator, &pool, &ok);
LinkBuffer linkBuffer(this, globalData->regexAllocator, &pool, &ok, REGEXP_CODE);
m_backtrackingState.linkDataLabels(linkBuffer);
jitObject.set(linkBuffer.finalizeCode());
jitObject.setFallBack(m_shouldFallBack);