Bug 723313 - Stop using conservative stack scanner for VM stack marking (r=luke,bhackett)

This commit is contained in:
Bill McCloskey 2012-02-10 18:32:08 -08:00
parent 9265c12d77
commit 5401bf1ba6
5 changed files with 90 additions and 29 deletions

View File

@ -1891,26 +1891,6 @@ gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
MarkRootGCThing(trc, entry.key, "locked object");
}
void
js_TraceStackFrame(JSTracer *trc, StackFrame *fp)
{
MarkRoot(trc, &fp->scopeChain(), "scope chain");
if (fp->isDummyFrame())
return;
if (fp->hasArgsObj())
MarkRoot(trc, &fp->argsObj(), "arguments");
if (fp->isFunctionFrame()) {
MarkRoot(trc, fp->fun(), "fun");
if (fp->isEvalFrame()) {
MarkRoot(trc, fp->script(), "eval script");
}
} else {
MarkRoot(trc, fp->script(), "script");
}
fp->script()->compartment()->active = true;
MarkRoot(trc, fp->returnValue(), "rval");
}
void
AutoIdArray::trace(JSTracer *trc)
{

View File

@ -1371,9 +1371,6 @@ IsAboutToBeFinalized(const js::gc::Cell *thing);
extern bool
IsAboutToBeFinalized(const js::Value &value);
extern void
js_TraceStackFrame(JSTracer *trc, js::StackFrame *fp);
extern bool
js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, js::gc::AllocKind *thingKind, void **thing);

View File

@ -1379,7 +1379,7 @@ MarkGenerator(JSTracer *trc, JSGenerator *gen)
* this code and save someone an hour later.
*/
MarkStackRangeConservatively(trc, gen->floatingStack, fp->formalArgsEnd());
js_TraceStackFrame(trc, fp);
fp->mark(trc);
MarkStackRangeConservatively(trc, fp->slots(), gen->regs.sp);
}

View File

@ -211,6 +211,31 @@ StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, JSInlinedSi
return next->prevpc(pinlined);
}
void
StackFrame::mark(JSTracer *trc)
{
/*
* Normally we would use MarkRoot here, except that generators also take
* this path. However, generators use a special write barrier when the stack
* frame is copied to the floating frame. Therefore, no barrier is needed.
*/
gc::MarkObjectUnbarriered(trc, &scopeChain(), "scope chain");
if (isDummyFrame())
return;
if (hasArgsObj())
gc::MarkObjectUnbarriered(trc, &argsObj(), "arguments");
if (isFunctionFrame()) {
gc::MarkObjectUnbarriered(trc, fun(), "fun");
if (isEvalFrame())
gc::MarkScriptUnbarriered(trc, script(), "eval script");
} else {
gc::MarkScriptUnbarriered(trc, script(), "script");
}
if (IS_GC_MARKING_TRACER(trc))
script()->compartment()->active = true;
gc::MarkValueUnbarriered(trc, returnValue(), "rval");
}
/*****************************************************************************/
bool
@ -381,6 +406,51 @@ StackSpace::containingSegment(const StackFrame *target) const
return *(StackSegment *)NULL;
}
void
StackSpace::markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
{
Value *slotsBegin = fp->slots();
if (!fp->isScriptFrame()) {
JS_ASSERT(fp->isDummyFrame());
gc::MarkRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
return;
}
/* If it's a scripted frame, we should have a pc. */
JS_ASSERT(pc);
JSScript *script = fp->script();
if (!script->hasAnalysis() || !script->analysis()->ranLifetimes()) {
gc::MarkRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
return;
}
/*
* If the JIT ran a lifetime analysis, then it may have left garbage in the
* slots considered not live. We need to avoid marking them. Additionally,
* in case the analysis information is thrown out later, we overwrite these
* dead slots with valid values so that future GCs won't crash. Analysis
* results are thrown away during the sweeping phase, so we always have at
* least one GC to do this.
*/
analyze::AutoEnterAnalysis aea(script->compartment());
analyze::ScriptAnalysis *analysis = script->analysis();
uint32_t offset = pc - script->code;
Value *fixedEnd = slotsBegin + script->nfixed;
for (Value *vp = slotsBegin; vp < fixedEnd; vp++) {
uint32_t slot = analyze::LocalSlot(script, vp - slotsBegin);
/* Will this slot be synced by the JIT? */
if (!analysis->trackSlot(slot) || analysis->liveness(slot).live(offset))
gc::MarkRoot(trc, *vp, "vm_stack");
else
*vp = UndefinedValue();
}
gc::MarkRootRange(trc, fixedEnd, slotsEnd, "vm_stack");
}
void
StackSpace::mark(JSTracer *trc)
{
@ -401,15 +471,21 @@ StackSpace::mark(JSTracer *trc)
* calls. Thus, marking can view the stack as the regex:
* (segment slots (frame slots)*)*
* which gets marked in reverse order.
*
*/
Value *slotsEnd = nextSegEnd;
jsbytecode *pc = seg->maybepc();
for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
MarkStackRangeConservatively(trc, fp->slots(), slotsEnd);
js_TraceStackFrame(trc, fp);
/* Mark from fp->slots() to slotsEnd. */
markFrameSlots(trc, fp, slotsEnd, pc);
fp->mark(trc);
slotsEnd = (Value *)fp;
JSInlinedSite *site;
pc = fp->prevpc(&site);
JS_ASSERT_IF(fp->prev(), !site);
}
MarkStackRangeConservatively(trc, seg->slotsBegin(), slotsEnd);
gc::MarkRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
nextSegEnd = (Value *)seg;
}
}

View File

@ -1,4 +1,4 @@
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=79 ft=cpp:
*
* ***** BEGIN LICENSE BLOCK *****
@ -1193,6 +1193,9 @@ class StackFrame
#endif
void methodjitStaticAsserts();
public:
void mark(JSTracer *trc);
};
static const size_t VALUES_PER_STACK_FRAME = sizeof(StackFrame) / sizeof(Value);
@ -1365,6 +1368,10 @@ class StackSegment
return regs_ ? regs_->fp() : NULL;
}
jsbytecode *maybepc() const {
return regs_ ? regs_->pc : NULL;
}
CallArgsList &calls() const {
JS_ASSERT(calls_);
return *calls_;
@ -1535,6 +1542,7 @@ class StackSpace
/* Called during GC: mark segments, frames, and slots under firstUnused. */
void mark(JSTracer *trc);
void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
/* We only report the committed size; uncommitted size is uninteresting. */
JS_FRIEND_API(size_t) sizeOfCommitted();