mirror of
https://github.com/mozilla/gecko-dev.git
synced 2025-01-12 06:52:25 +00:00
Bug 927782 - Part 4: Generators allocate all locals on the scope chain. r=luke
This commit is contained in:
parent
622f257d78
commit
616863d982
@ -22,11 +22,6 @@ function testSteps()
|
||||
objectStore.add(Bob);
|
||||
yield undefined;
|
||||
|
||||
// This direct eval causes locals to be aliased, and thus allocated on
|
||||
// the scope chain. Comment it out (and the workarounds below) and
|
||||
// the test passes. Bug 943409.
|
||||
eval('');
|
||||
|
||||
db.transaction("foo", "readwrite").objectStore("foo")
|
||||
.index("name").openCursor().onsuccess = function(event) {
|
||||
event.target.transaction.oncomplete = continueToNextStep;
|
||||
|
@ -35,11 +35,6 @@ function testSteps()
|
||||
let objectStore = db.createObjectStore("foo", { keyPath: "ss" });
|
||||
objectStore.createIndex("name", "name", { unique: true });
|
||||
|
||||
// This direct eval causes locals to be aliased, and thus allocated on
|
||||
// the scope chain. Comment it out (and the workarounds below) and
|
||||
// the test passes. Bug 943409.
|
||||
eval('');
|
||||
|
||||
for (let i = 0; i < objectStoreData.length - 1; i++) {
|
||||
objectStore.add(objectStoreData[i]);
|
||||
}
|
||||
|
@ -34,9 +34,6 @@ function testSteps()
|
||||
|
||||
event.target.onsuccess = continueToNextStep;
|
||||
|
||||
// Bug 943409.
|
||||
eval('');
|
||||
|
||||
for (let objectStoreIndex in objectStoreData) {
|
||||
const objectStoreInfo = objectStoreData[objectStoreIndex];
|
||||
let objectStore = db.createObjectStore(objectStoreInfo.name,
|
||||
|
@ -18,9 +18,6 @@ function testSteps()
|
||||
let db = event.target.result;
|
||||
db.onerror = errorHandler;
|
||||
|
||||
// Bug 943409.
|
||||
eval('');
|
||||
|
||||
for each (let autoIncrement in [false, true]) {
|
||||
let objectStore =
|
||||
db.createObjectStore(autoIncrement, { keyPath: "id",
|
||||
|
@ -1024,9 +1024,14 @@ BytecodeEmitter::isAliasedName(ParseNode *pn)
|
||||
/*
|
||||
* There are two ways to alias a let variable: nested functions and
|
||||
* dynamic scope operations. (This is overly conservative since the
|
||||
* bindingsAccessedDynamically flag is function-wide.)
|
||||
* bindingsAccessedDynamically flag, checked by allLocalsAliased, is
|
||||
* function-wide.)
|
||||
*
|
||||
* In addition all locals in generators are marked as aliased, to ensure
|
||||
* that they are allocated on scope chains instead of on the stack. See
|
||||
* the definition of SharedContext::allLocalsAliased.
|
||||
*/
|
||||
return dn->isClosed() || sc->bindingsAccessedDynamically();
|
||||
return dn->isClosed() || sc->allLocalsAliased();
|
||||
case Definition::ARG:
|
||||
/*
|
||||
* Consult the bindings, since they already record aliasing. We might
|
||||
@ -1035,12 +1040,13 @@ BytecodeEmitter::isAliasedName(ParseNode *pn)
|
||||
* a given name is aliased. This is necessary to avoid generating a
|
||||
* shape for the call object with with more than one name for a given
|
||||
* slot (which violates internal engine invariants). All this means that
|
||||
* the '|| sc->bindingsAccessedDynamically' disjunct is incorrect since
|
||||
* it will mark both parameters in function(x,x) as aliased.
|
||||
* the '|| sc->allLocalsAliased()' disjunct is incorrect since it will
|
||||
* mark both parameters in function(x,x) as aliased.
|
||||
*/
|
||||
return script->formalIsAliased(pn->pn_cookie.slot());
|
||||
case Definition::VAR:
|
||||
case Definition::CONST:
|
||||
JS_ASSERT_IF(sc->allLocalsAliased(), script->varIsAliased(pn->pn_cookie.slot()));
|
||||
return script->varIsAliased(pn->pn_cookie.slot());
|
||||
case Definition::PLACEHOLDER:
|
||||
case Definition::NAMED_LAMBDA:
|
||||
@ -1072,6 +1078,17 @@ AdjustBlockSlot(ExclusiveContext *cx, BytecodeEmitter *bce, int slot)
|
||||
return slot;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
static bool
|
||||
AllLocalsAliased(StaticBlockObject &obj)
|
||||
{
|
||||
for (unsigned i = 0; i < obj.slotCount(); i++)
|
||||
if (!obj.isAliased(i))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
static bool
|
||||
EmitEnterBlock(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSOp op)
|
||||
{
|
||||
@ -1108,7 +1125,7 @@ EmitEnterBlock(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSOp o
|
||||
|
||||
/* Beware the empty destructuring dummy. */
|
||||
if (!dn) {
|
||||
blockObj->setAliased(i, bce->sc->bindingsAccessedDynamically());
|
||||
blockObj->setAliased(i, bce->sc->allLocalsAliased());
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -1129,6 +1146,8 @@ EmitEnterBlock(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSOp o
|
||||
blockObj->setAliased(i, bce->isAliasedName(dn));
|
||||
}
|
||||
|
||||
JS_ASSERT_IF(bce->sc->allLocalsAliased(), AllLocalsAliased(*blockObj));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -278,7 +278,7 @@ AppendPackedBindings(const ParseContext<ParseHandler> *pc, const DeclVector &vec
|
||||
*/
|
||||
JS_ASSERT_IF(dn->isClosed(), pc->decls().lookupFirst(name) == dn);
|
||||
bool aliased = dn->isClosed() ||
|
||||
(pc->sc->bindingsAccessedDynamically() &&
|
||||
(pc->sc->allLocalsAliased() &&
|
||||
pc->decls().lookupFirst(name) == dn);
|
||||
|
||||
*dst = Binding(name, kind, aliased);
|
||||
|
@ -200,6 +200,8 @@ class SharedContext
|
||||
void setBindingsAccessedDynamically() { anyCxFlags.bindingsAccessedDynamically = true; }
|
||||
void setHasDebuggerStatement() { anyCxFlags.hasDebuggerStatement = true; }
|
||||
|
||||
inline bool allLocalsAliased();
|
||||
|
||||
// JSOPTION_EXTRA_WARNINGS warnings or strict mode errors.
|
||||
bool needStrictChecks() {
|
||||
return strict || extraWarnings;
|
||||
@ -307,7 +309,8 @@ class FunctionBox : public ObjectBox, public SharedContext
|
||||
// Note: this should be kept in sync with JSFunction::isHeavyweight().
|
||||
return bindings.hasAnyAliasedBindings() ||
|
||||
hasExtensibleScope() ||
|
||||
needsDeclEnvObject();
|
||||
needsDeclEnvObject() ||
|
||||
isGenerator();
|
||||
}
|
||||
};
|
||||
|
||||
@ -318,6 +321,18 @@ SharedContext::asFunctionBox()
|
||||
return static_cast<FunctionBox*>(this);
|
||||
}
|
||||
|
||||
// In generators, we treat all locals as aliased so that they get stored on the
|
||||
// heap. This way there is less information to copy off the stack when
|
||||
// suspending, and back on when resuming. It also avoids the need to create and
|
||||
// invalidate DebugScope proxies for unaliased locals in a generator frame, as
|
||||
// the generator frame will be copied out to the heap and released only by GC.
|
||||
inline bool
|
||||
SharedContext::allLocalsAliased()
|
||||
{
|
||||
return bindingsAccessedDynamically() || (isFunctionBox() && asFunctionBox()->isGenerator());
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* NB: If you add a new type of statement that is a scope, add it between
|
||||
* STMT_WITH and STMT_CATCH, or you will break StmtInfoBase::linksScope. If you
|
||||
|
@ -100,7 +100,8 @@ class JSFunction : public JSObject
|
||||
// Note: this should be kept in sync with FunctionBox::isHeavyweight().
|
||||
return nonLazyScript()->bindings.hasAnyAliasedBindings() ||
|
||||
nonLazyScript()->funHasExtensibleScope() ||
|
||||
nonLazyScript()->funNeedsDeclEnvObject();
|
||||
nonLazyScript()->funNeedsDeclEnvObject() ||
|
||||
isGenerator();
|
||||
}
|
||||
|
||||
/* A function can be classified as either native (C++) or interpreted (JS): */
|
||||
|
@ -885,18 +885,6 @@ ScopeIter::ScopeIter(AbstractFramePtr frame, JSContext *cx
|
||||
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
|
||||
}
|
||||
|
||||
ScopeIter::ScopeIter(const ScopeIter &si, AbstractFramePtr frame, JSContext *cx
|
||||
MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
|
||||
: cx(si.cx),
|
||||
frame_(frame),
|
||||
cur_(cx, si.cur_),
|
||||
block_(cx, si.block_),
|
||||
type_(si.type_),
|
||||
hasScopeObject_(si.hasScopeObject_)
|
||||
{
|
||||
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
|
||||
}
|
||||
|
||||
ScopeIter::ScopeIter(AbstractFramePtr frame, ScopeObject &scope, JSContext *cx
|
||||
MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
|
||||
: cx(cx),
|
||||
@ -1095,8 +1083,7 @@ class DebugScopeProxy : public BaseProxyHandler
|
||||
* the normal Call/BlockObject scope objects and thus must be recovered
|
||||
* from somewhere else:
|
||||
* + if the invocation for which the scope was created is still executing,
|
||||
* there is a StackFrame (either live on the stack or floating in a
|
||||
* generator object) holding the values;
|
||||
* there is a StackFrame live on the stack holding the values;
|
||||
* + if the invocation for which the scope was created finished executing:
|
||||
* - and there was a DebugScopeObject associated with scope, then the
|
||||
* DebugScopes::onPop(Call|Block) handler copied out the unaliased
|
||||
@ -1595,9 +1582,8 @@ void
|
||||
DebugScopes::sweep(JSRuntime *rt)
|
||||
{
|
||||
/*
|
||||
* Note: missingScopes points to debug scopes weakly not just so that debug
|
||||
* scopes can be released more eagerly, but, more importantly, to avoid
|
||||
* creating an uncollectable cycle with suspended generator frames.
|
||||
* missingScopes points to debug scopes weakly so that debug scopes can be
|
||||
* released more eagerly.
|
||||
*/
|
||||
for (MissingScopeMap::Enum e(missingScopes); !e.empty(); e.popFront()) {
|
||||
DebugScopeObject **debugScope = e.front().value().unsafeGet();
|
||||
@ -1626,7 +1612,6 @@ DebugScopes::sweep(JSRuntime *rt)
|
||||
|
||||
for (LiveScopeMap::Enum e(liveScopes); !e.empty(); e.popFront()) {
|
||||
ScopeObject *scope = e.front().key();
|
||||
AbstractFramePtr frame = e.front().value();
|
||||
|
||||
/*
|
||||
* Scopes can be finalized when a debugger-synthesized ScopeObject is
|
||||
@ -1636,19 +1621,6 @@ DebugScopes::sweep(JSRuntime *rt)
|
||||
e.removeFront();
|
||||
continue;
|
||||
}
|
||||
|
||||
/*
|
||||
* As explained in onGeneratorFrameChange, liveScopes includes
|
||||
* suspended generator frames. Since a generator can be finalized while
|
||||
* its scope is live, we must explicitly detect finalized generators.
|
||||
*/
|
||||
if (JSGenerator *gen = frame.maybeSuspendedGenerator(rt)) {
|
||||
JS_ASSERT(gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN);
|
||||
if (IsObjectAboutToBeFinalized(&gen->obj)) {
|
||||
e.removeFront();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1739,6 +1711,7 @@ DebugScopes::addDebugScope(JSContext *cx, const ScopeIter &si, DebugScopeObject
|
||||
{
|
||||
JS_ASSERT(!si.hasScopeObject());
|
||||
JS_ASSERT(cx->compartment() == debugScope.compartment());
|
||||
JS_ASSERT_IF(si.frame().isFunctionFrame(), !si.frame().callee()->isGenerator());
|
||||
|
||||
if (!CanUseDebugScopeMaps(cx))
|
||||
return true;
|
||||
@ -1890,43 +1863,6 @@ DebugScopes::onPopStrictEvalScope(AbstractFramePtr frame)
|
||||
scopes->liveScopes.remove(&frame.scopeChain()->as<CallObject>());
|
||||
}
|
||||
|
||||
void
|
||||
DebugScopes::onGeneratorFrameChange(AbstractFramePtr from, AbstractFramePtr to, JSContext *cx)
|
||||
{
|
||||
for (ScopeIter toIter(to, cx); !toIter.done(); ++toIter) {
|
||||
DebugScopes *scopes = ensureCompartmentData(cx);
|
||||
if (!scopes)
|
||||
return;
|
||||
|
||||
if (toIter.hasScopeObject()) {
|
||||
/*
|
||||
* Not only must we correctly replace mappings [scope -> from] with
|
||||
* mappings [scope -> to], but we must add [scope -> to] if it
|
||||
* doesn't already exist so that if we need to proxy a generator's
|
||||
* scope while it is suspended, we can find its frame (which would
|
||||
* otherwise not be found by AllFramesIter).
|
||||
*/
|
||||
JS_ASSERT(toIter.scope().compartment() == cx->compartment());
|
||||
LiveScopeMap::AddPtr livePtr = scopes->liveScopes.lookupForAdd(&toIter.scope());
|
||||
if (livePtr) {
|
||||
livePtr->value() = to;
|
||||
} else {
|
||||
scopes->liveScopes.add(livePtr, &toIter.scope(), to); // OOM here?
|
||||
liveScopesPostWriteBarrier(cx->runtime(), &scopes->liveScopes, &toIter.scope());
|
||||
}
|
||||
} else {
|
||||
ScopeIter si(toIter, from, cx);
|
||||
JS_ASSERT(si.frame().scopeChain()->compartment() == cx->compartment());
|
||||
if (MissingScopeMap::Ptr p = scopes->missingScopes.lookup(si)) {
|
||||
DebugScopeObject &debugScope = *p->value();
|
||||
scopes->liveScopes.lookup(&debugScope.scope())->value() = to;
|
||||
scopes->missingScopes.remove(p);
|
||||
scopes->missingScopes.put(toIter, &debugScope); // OOM here?
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
DebugScopes::onCompartmentLeaveDebugMode(JSCompartment *c)
|
||||
{
|
||||
@ -1966,6 +1902,9 @@ DebugScopes::updateLiveScopes(JSContext *cx)
|
||||
if (frame.scopeChain()->compartment() != cx->compartment())
|
||||
continue;
|
||||
|
||||
if (frame.isFunctionFrame() && frame.callee()->isGenerator())
|
||||
continue;
|
||||
|
||||
for (ScopeIter si(frame, cx); !si.done(); ++si) {
|
||||
if (si.hasScopeObject()) {
|
||||
JS_ASSERT(si.scope().compartment() == cx->compartment());
|
||||
@ -1994,25 +1933,9 @@ DebugScopes::hasLiveFrame(ScopeObject &scope)
|
||||
if (!scopes)
|
||||
return NullFramePtr();
|
||||
|
||||
if (LiveScopeMap::Ptr p = scopes->liveScopes.lookup(&scope)) {
|
||||
AbstractFramePtr frame = p->value();
|
||||
if (LiveScopeMap::Ptr p = scopes->liveScopes.lookup(&scope))
|
||||
return p->value();
|
||||
|
||||
/*
|
||||
* Since liveScopes is effectively a weak pointer, we need a read
|
||||
* barrier. The scenario where this is necessary is:
|
||||
* 1. GC starts, a suspended generator is not live
|
||||
* 2. hasLiveFrame returns a StackFrame* to the (soon to be dead)
|
||||
* suspended generator
|
||||
* 3. stack frame values (which will neve be marked) are read from the
|
||||
* StackFrame
|
||||
* 4. GC completes, live objects may now point to values that weren't
|
||||
* marked and thus may point to swept GC things
|
||||
*/
|
||||
if (JSGenerator *gen = frame.maybeSuspendedGenerator(scope.compartment()->runtimeFromMainThread()))
|
||||
JSObject::readBarrier(gen->obj);
|
||||
|
||||
return frame;
|
||||
}
|
||||
return NullFramePtr();
|
||||
}
|
||||
|
||||
@ -2074,6 +1997,8 @@ GetDebugScopeForMissing(JSContext *cx, const ScopeIter &si)
|
||||
DebugScopeObject *debugScope = nullptr;
|
||||
switch (si.type()) {
|
||||
case ScopeIter::Call: {
|
||||
// Generators should always reify their scopes.
|
||||
JS_ASSERT(!si.frame().callee()->isGenerator());
|
||||
Rooted<CallObject*> callobj(cx, CallObject::createForFunction(cx, si.frame()));
|
||||
if (!callobj)
|
||||
return nullptr;
|
||||
@ -2090,6 +2015,8 @@ GetDebugScopeForMissing(JSContext *cx, const ScopeIter &si)
|
||||
break;
|
||||
}
|
||||
case ScopeIter::Block: {
|
||||
// Generators should always reify their scopes.
|
||||
JS_ASSERT_IF(si.frame().isFunctionFrame(), !si.frame().callee()->isGenerator());
|
||||
Rooted<StaticBlockObject *> staticBlock(cx, &si.staticBlock());
|
||||
ClonedBlockObject *block = ClonedBlockObject::create(cx, staticBlock, si.frame());
|
||||
if (!block)
|
||||
|
@ -548,13 +548,6 @@ class ScopeIter
|
||||
explicit ScopeIter(JSObject &enclosingScope, JSContext *cx
|
||||
MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
|
||||
|
||||
/*
|
||||
* For the special case of generators, copy the given ScopeIter, with 'fp'
|
||||
* as the StackFrame instead of si.fp(). Not for general use.
|
||||
*/
|
||||
ScopeIter(const ScopeIter &si, AbstractFramePtr frame, JSContext *cx
|
||||
MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
|
||||
|
||||
/* Like ScopeIter(StackFrame *) except start at 'scope'. */
|
||||
ScopeIter(AbstractFramePtr frame, ScopeObject &scope, JSContext *cx
|
||||
MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
|
||||
@ -719,15 +712,12 @@ class DebugScopes
|
||||
static bool updateLiveScopes(JSContext *cx);
|
||||
static AbstractFramePtr hasLiveFrame(ScopeObject &scope);
|
||||
|
||||
/*
|
||||
* In debug-mode, these must be called whenever exiting a call/block or
|
||||
* when activating/yielding a generator.
|
||||
*/
|
||||
// In debug-mode, these must be called whenever exiting a scope that might
|
||||
// have stack-allocated locals.
|
||||
static void onPopCall(AbstractFramePtr frame, JSContext *cx);
|
||||
static void onPopBlock(JSContext *cx, AbstractFramePtr frame);
|
||||
static void onPopWith(AbstractFramePtr frame);
|
||||
static void onPopStrictEvalScope(AbstractFramePtr frame);
|
||||
static void onGeneratorFrameChange(AbstractFramePtr from, AbstractFramePtr to, JSContext *cx);
|
||||
static void onCompartmentLeaveDebugMode(JSCompartment *c);
|
||||
};
|
||||
|
||||
|
@ -516,14 +516,6 @@ AbstractFramePtr::unaliasedActual(unsigned i, MaybeCheckAliasing checkAliasing)
|
||||
#endif
|
||||
}
|
||||
|
||||
inline JSGenerator *
|
||||
AbstractFramePtr::maybeSuspendedGenerator(JSRuntime *rt) const
|
||||
{
|
||||
if (isStackFrame())
|
||||
return asStackFrame()->maybeSuspendedGenerator(rt);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
inline StaticBlockObject *
|
||||
AbstractFramePtr::maybeBlockChain() const
|
||||
{
|
||||
|
@ -123,9 +123,6 @@ StackFrame::copyFrameAndValues(JSContext *cx, Value *vp, StackFrame *otherfp,
|
||||
if (doPostBarrier)
|
||||
HeapValue::writeBarrierPost(*dst, dst);
|
||||
}
|
||||
|
||||
if (JS_UNLIKELY(cx->compartment()->debugMode()))
|
||||
DebugScopes::onGeneratorFrameChange(otherfp, this, cx);
|
||||
}
|
||||
|
||||
/* Note: explicit instantiation for js_NewGenerator located in jsiter.cpp. */
|
||||
@ -155,27 +152,6 @@ StackFrame::writeBarrierPost()
|
||||
HeapValue::writeBarrierPost(rval_, &rval_);
|
||||
}
|
||||
|
||||
JSGenerator *
|
||||
StackFrame::maybeSuspendedGenerator(JSRuntime *rt)
|
||||
{
|
||||
/*
|
||||
* A suspended generator's frame is embedded inside the JSGenerator object
|
||||
* and is not currently running.
|
||||
*/
|
||||
if (!isGeneratorFrame() || !isSuspended())
|
||||
return nullptr;
|
||||
|
||||
/*
|
||||
* Once we know we have a suspended generator frame, there is a static
|
||||
* offset from the frame's snapshot to beginning of the JSGenerator.
|
||||
*/
|
||||
char *vp = reinterpret_cast<char *>(generatorArgsSnapshotBegin());
|
||||
char *p = vp - offsetof(JSGenerator, stackSnapshot);
|
||||
JSGenerator *gen = reinterpret_cast<JSGenerator *>(p);
|
||||
JS_ASSERT(gen->fp == this);
|
||||
return gen;
|
||||
}
|
||||
|
||||
bool
|
||||
StackFrame::copyRawFrameSlots(AutoValueVector *vec)
|
||||
{
|
||||
|
@ -172,8 +172,6 @@ class AbstractFramePtr
|
||||
|
||||
operator bool() const { return !!ptr_; }
|
||||
|
||||
inline JSGenerator *maybeSuspendedGenerator(JSRuntime *rt) const;
|
||||
|
||||
inline JSObject *scopeChain() const;
|
||||
inline CallObject &callObj() const;
|
||||
inline bool initFunctionScopeObjects(JSContext *cx);
|
||||
@ -863,8 +861,6 @@ class StackFrame
|
||||
void copyFrameAndValues(JSContext *cx, Value *vp, StackFrame *otherfp,
|
||||
const Value *othervp, Value *othersp);
|
||||
|
||||
JSGenerator *maybeSuspendedGenerator(JSRuntime *rt);
|
||||
|
||||
/*
|
||||
* js::Execute pushes both global and function frames (since eval() in a
|
||||
* function pushes a frame with isFunctionFrame() && isEvalFrame()). Most
|
||||
|
Loading…
x
Reference in New Issue
Block a user