Bug 868990 - rm CallArgsList, StackIter cleanup. r=luke

This commit is contained in:
Jan de Mooij 2013-05-07 09:00:24 +02:00
parent 70569bdd14
commit 9379257a6f
11 changed files with 136 additions and 442 deletions

View File

@ -2080,7 +2080,7 @@ ion::SideCannon(JSContext *cx, StackFrame *fp, jsbytecode *pc)
}
IonExecStatus
ion::FastInvoke(JSContext *cx, HandleFunction fun, CallArgsList &args)
ion::FastInvoke(JSContext *cx, HandleFunction fun, CallArgs &args)
{
JS_CHECK_RECURSION(cx, return IonExec_Error);
@ -2122,10 +2122,8 @@ ion::FastInvoke(JSContext *cx, HandleFunction fun, CallArgsList &args)
JS_ASSERT(args.length() >= fun->nargs);
JSAutoResolveFlags rf(cx, RESOLVE_INFER);
args.setActive();
enter(jitcode, args.length() + 1, args.array() - 1, fp, calleeToken,
/* scopeChain = */ NULL, 0, result.address());
args.setInactive();
if (clearCallingIntoIon)
fp->clearCallingIntoIon();

View File

@ -313,7 +313,7 @@ IonExecStatus Cannon(JSContext *cx, StackFrame *fp);
IonExecStatus SideCannon(JSContext *cx, StackFrame *fp, jsbytecode *pc);
// Used to enter Ion from C++ natives like Array.map. Called from FastInvokeGuard.
IonExecStatus FastInvoke(JSContext *cx, HandleFunction fun, CallArgsList &args);
IonExecStatus FastInvoke(JSContext *cx, HandleFunction fun, CallArgs &args);
// Walk the stack and invalidate active Ion frames for the invalid scripts.
void Invalidate(types::TypeCompartment &types, FreeOp *fop,

View File

@ -88,21 +88,17 @@ IonFrameIterator::calleeToken() const
JSFunction *
IonFrameIterator::callee() const
{
if (isScripted()) {
JS_ASSERT(isFunctionFrame() || isParallelFunctionFrame());
if (isFunctionFrame())
return CalleeTokenToFunction(calleeToken());
return CalleeTokenToParallelFunction(calleeToken());
}
JS_ASSERT(isNative());
return exitFrame()->nativeExit()->vp()[0].toObject().toFunction();
JS_ASSERT(isScripted());
JS_ASSERT(isFunctionFrame() || isParallelFunctionFrame());
if (isFunctionFrame())
return CalleeTokenToFunction(calleeToken());
return CalleeTokenToParallelFunction(calleeToken());
}
JSFunction *
IonFrameIterator::maybeCallee() const
{
if ((isScripted() && (isFunctionFrame() || isParallelFunctionFrame())) || isNative())
if (isScripted() && (isFunctionFrame() || isParallelFunctionFrame()))
return callee();
return NULL;
}

View File

@ -135,7 +135,7 @@ fun_getProperty(JSContext *cx, HandleObject obj_, HandleId id, MutableHandleValu
#ifdef JS_METHODJIT
StackFrame *fp = NULL;
if (iter.isScript() && !iter.isIon())
if (!iter.isIon())
fp = iter.interpFrame();
if (JSID_IS_ATOM(id, cx->names().caller) && fp && fp->prev()) {

View File

@ -311,12 +311,10 @@ js::RunScript(JSContext *cx, StackFrame *fp)
StackIter iter(cx);
if (!iter.done()) {
++iter;
if (iter.isScript()) {
JSScript *script = iter.script();
jsbytecode *pc = iter.pc();
if (UseNewType(cx, script, pc))
fp->setUseNewType();
}
JSScript *script = iter.script();
jsbytecode *pc = iter.pc();
if (UseNewType(cx, script, pc))
fp->setUseNewType();
}
}
@ -392,7 +390,7 @@ js::RunScript(JSContext *cx, StackFrame *fp)
* when done. Then push the return value.
*/
bool
js::InvokeKernel(JSContext *cx, CallArgs args, MaybeConstruct construct)
js::Invoke(JSContext *cx, CallArgs args, MaybeConstruct construct)
{
JS_ASSERT(args.length() <= StackSpace::ARGS_LENGTH_MAX);
JS_ASSERT(!cx->compartment->activeAnalysis);
@ -480,7 +478,7 @@ js::Invoke(JSContext *cx, const Value &thisv, const Value &fval, unsigned argc,
}
bool
js::InvokeConstructorKernel(JSContext *cx, CallArgs args)
js::InvokeConstructor(JSContext *cx, CallArgs args)
{
JS_ASSERT(!FunctionClass.construct);
@ -501,7 +499,7 @@ js::InvokeConstructorKernel(JSContext *cx, CallArgs args)
if (!fun->isInterpretedConstructor())
return ReportIsNotFunction(cx, args.calleev().get(), args.length() + 1, CONSTRUCT);
if (!InvokeKernel(cx, args, CONSTRUCT))
if (!Invoke(cx, args, CONSTRUCT))
return false;
JS_ASSERT(args.rval().isObject());
@ -2342,7 +2340,7 @@ BEGIN_CASE(JSOP_EVAL)
if (!DirectEval(cx, args))
goto error;
} else {
if (!InvokeKernel(cx, args))
if (!Invoke(cx, args))
goto error;
}
regs.sp = args.spAfterCall();
@ -2389,10 +2387,10 @@ BEGIN_CASE(JSOP_FUNCALL)
/* Don't bother trying to fast-path calls to scripted non-constructors. */
if (!isFunction || !fun->isInterpretedConstructor()) {
if (construct) {
if (!InvokeConstructorKernel(cx, args))
if (!InvokeConstructor(cx, args))
goto error;
} else {
if (!InvokeKernel(cx, args))
if (!Invoke(cx, args))
goto error;
}
Value *newsp = args.spAfterCall();

View File

@ -116,25 +116,11 @@ ValueToCallable(JSContext *cx, const Value &vp, int numToSkip = -1,
MaybeConstruct construct = NO_CONSTRUCT);
/*
* InvokeKernel assumes that the given args have been pushed on the top of the
* VM stack. Additionally, if 'args' is contained in a CallArgsList, that they
* have already been marked 'active'.
* Invoke assumes that the given args have been pushed on the top of the
* VM stack.
*/
extern bool
InvokeKernel(JSContext *cx, CallArgs args, MaybeConstruct construct = NO_CONSTRUCT);
/*
* Invoke assumes that 'args' has been pushed (via ContextStack::pushInvokeArgs)
* and is currently at the top of the VM stack.
*/
inline bool
Invoke(JSContext *cx, InvokeArgsGuard &args, MaybeConstruct construct = NO_CONSTRUCT)
{
args.setActive();
bool ok = InvokeKernel(cx, args, construct);
args.setInactive();
return ok;
}
Invoke(JSContext *cx, CallArgs args, MaybeConstruct construct = NO_CONSTRUCT);
/*
* This Invoke overload places the least requirements on the caller: it may be
@ -154,21 +140,11 @@ InvokeGetterOrSetter(JSContext *cx, JSObject *obj, const Value &fval, unsigned a
Value *rval);
/*
* InvokeConstructor* implement a function call from a constructor context
* InvokeConstructor implement a function call from a constructor context
* (e.g. 'new') handling the the creation of the new 'this' object.
*/
extern bool
InvokeConstructorKernel(JSContext *cx, CallArgs args);
/* See the InvokeArgsGuard overload of Invoke. */
inline bool
InvokeConstructor(JSContext *cx, InvokeArgsGuard &args)
{
args.setActive();
bool ok = InvokeConstructorKernel(cx, ImplicitCast<CallArgs>(args));
args.setInactive();
return ok;
}
InvokeConstructor(JSContext *cx, CallArgs args);
/* See the fval overload of Invoke. */
extern bool

View File

@ -5279,16 +5279,12 @@ js_DumpBacktrace(JSContext *cx)
sprinter.init();
size_t depth = 0;
for (StackIter i(cx); !i.done(); ++i, ++depth) {
if (i.isScript()) {
const char *filename = JS_GetScriptFilename(cx, i.script());
unsigned line = JS_PCToLineNumber(cx, i.script(), i.pc());
JSScript *script = i.script();
sprinter.printf("#%d %14p %s:%d (%p @ %d)\n",
depth, (i.isIon() ? 0 : i.interpFrame()), filename, line,
script, i.pc() - script->code);
} else {
sprinter.printf("#%d ???\n", depth);
}
const char *filename = JS_GetScriptFilename(cx, i.script());
unsigned line = JS_PCToLineNumber(cx, i.script(), i.pc());
JSScript *script = i.script();
sprinter.printf("#%d %14p %s:%d (%p @ %d)\n",
depth, (i.isIon() ? 0 : i.interpFrame()), filename, line,
script, i.pc() - script->code);
}
fprintf(stdout, "%s", sprinter.string());
}

View File

@ -1628,8 +1628,6 @@ DecompileArgumentFromStack(JSContext *cx, int formalIndex, char **res)
* called the intrinsic.
*/
StackIter frameIter(cx);
while (!frameIter.done() && !frameIter.isScript())
++frameIter;
JS_ASSERT(!frameIter.done());
/*
@ -1637,9 +1635,7 @@ DecompileArgumentFromStack(JSContext *cx, int formalIndex, char **res)
* intrinsic.
*/
++frameIter;
/* If this frame isn't a script, we can't decompile. */
if (frameIter.done() || !frameIter.isScript())
if (frameIter.done())
return true;
RootedScript script(cx, frameIter.script());

View File

@ -161,7 +161,7 @@ stubs::SlowCall(VMFrame &f, uint32_t argc)
if (!MaybeCloneAndPatchCallee(f.cx, args, fscript, f.pc()))
THROW();
if (!InvokeKernel(f.cx, args))
if (!Invoke(f.cx, args))
THROW();
types::TypeScript::Monitor(f.cx, fscript, f.pc(), args.rval());
@ -175,7 +175,7 @@ stubs::SlowNew(VMFrame &f, uint32_t argc)
if (!MaybeCloneAndPatchCallee(f.cx, args, fscript, f.pc()))
THROW();
if (!InvokeConstructorKernel(f.cx, args))
if (!InvokeConstructor(f.cx, args))
THROW();
types::TypeScript::Monitor(f.cx, fscript, f.pc(), args.rval());
@ -432,7 +432,7 @@ stubs::UncachedNewHelper(VMFrame &f, uint32_t argc, UncachedCallResult &ucr)
if (!UncachedInlineCall(f, INITIAL_CONSTRUCT, &ucr.codeAddr, &ucr.unjittable, argc))
THROW();
} else {
if (!InvokeConstructorKernel(cx, args))
if (!InvokeConstructor(cx, args))
THROW();
types::TypeScript::Monitor(f.cx, fscript, f.pc(), args.rval());
}
@ -460,7 +460,7 @@ stubs::Eval(VMFrame &f, uint32_t argc)
CallArgs args = CallArgsFromSp(argc, f.regs.sp);
if (!IsBuiltinEvalForScope(f.fp()->scopeChain(), args.calleev())) {
if (!InvokeKernel(f.cx, args))
if (!Invoke(f.cx, args))
THROW();
RootedScript fscript(f.cx, f.script());
@ -505,7 +505,7 @@ stubs::UncachedCallHelper(VMFrame &f, uint32_t argc, bool lowered, UncachedCallR
}
}
if (!InvokeKernel(f.cx, args))
if (!Invoke(f.cx, args))
THROW();
types::TypeScript::Monitor(f.cx, fscript, f.pc(), args.rval());
@ -1026,7 +1026,7 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
nextDepth = analysis->getCode(nextpc).stackDepth;
enter.destroy();
f.regs.sp = nextsp + 2 + f.u.call.dynamicArgc;
if (!InvokeKernel(cx, CallArgsFromSp(f.u.call.dynamicArgc, f.regs.sp)))
if (!Invoke(cx, CallArgsFromSp(f.u.call.dynamicArgc, f.regs.sp)))
return js_InternalThrow(f);
nextsp[-1] = nextsp[0];
f.regs.pc = nextpc;

View File

@ -510,17 +510,6 @@ StackSegment::contains(const FrameRegs *regs) const
return regs && contains(regs->fp());
}
bool
StackSegment::contains(const CallArgsList *call) const
{
if (!call || !calls_)
return false;
/* NB: this depends on the continuity of segments in memory. */
Value *vp = call->array();
return vp > slotsBegin() && vp <= calls_->array();
}
StackFrame *
StackSegment::computeNextFrame(const StackFrame *f, size_t maxDepth) const
{
@ -540,14 +529,10 @@ Value *
StackSegment::end() const
{
/* NB: this depends on the continuity of segments in memory. */
JS_ASSERT_IF(calls_ || regs_, contains(calls_) || contains(regs_));
Value *p = calls_
? regs_
? Max(regs_->sp, calls_->end())
: calls_->end()
: regs_
? regs_->sp
: slotsBegin();
JS_ASSERT_IF(regs_, contains(regs_));
Value *p = regs_ ? regs_->sp : slotsBegin();
if (invokeArgsEnd_ > p)
p = invokeArgsEnd_;
JS_ASSERT(p >= slotsBegin());
return p;
}
@ -568,25 +553,6 @@ StackSegment::popRegs(FrameRegs *regs)
regs_ = regs;
}
void
StackSegment::pushCall(CallArgsList &callList)
{
callList.prev_ = calls_;
calls_ = &callList;
}
void
StackSegment::pointAtCall(CallArgsList &callList)
{
calls_ = &callList;
}
void
StackSegment::popCall()
{
calls_ = calls_->prev_;
}
/*****************************************************************************/
StackSpace::StackSpace()
@ -945,16 +911,9 @@ ContextStack::ensureOnTop(JSContext *cx, MaybeReportError report, unsigned nvars
if (!space().ensureSpace(cx, report, firstUnused, VALUES_PER_STACK_SEGMENT + nvars))
return NULL;
CallArgsList *calls;
if (seg_ && extend) {
regs = seg_->maybeRegs();
calls = seg_->maybeCalls();
} else {
regs = NULL;
calls = NULL;
}
regs = (seg_ && extend) ? seg_->maybeRegs() : NULL;
seg_ = new(firstUnused) StackSegment(cx, seg_, space().seg_, regs, calls);
seg_ = new(firstUnused) StackSegment(cx, seg_, space().seg_, regs);
space().seg_ = seg_;
*pushedSeg = true;
return seg_->slotsBegin();
@ -985,7 +944,8 @@ ContextStack::pushInvokeArgs(JSContext *cx, unsigned argc, InvokeArgsGuard *iag,
ImplicitCast<CallArgs>(*iag) = CallArgsFromVp(argc, firstUnused);
seg_->pushCall(*iag);
seg_->pushInvokeArgsEnd(iag->end(), &iag->prevInvokeArgsEnd_);
JS_ASSERT(space().firstUnused() == iag->end());
iag->setPushed(*this);
return true;
@ -996,11 +956,12 @@ ContextStack::popInvokeArgs(const InvokeArgsGuard &iag)
{
JS_ASSERT(iag.pushed());
JS_ASSERT(onTop());
JS_ASSERT(space().firstUnused() == seg_->calls().end());
JS_ASSERT(space().firstUnused() == seg_->invokeArgsEnd());
Value *oldend = seg_->end();
seg_->popCall();
seg_->popInvokeArgsEnd(iag.prevInvokeArgsEnd_);
if (iag.pushedSeg_)
popSegment();
@ -1061,7 +1022,6 @@ ContextStack::pushExecuteFrame(JSContext *cx, HandleScript script, const Value &
* below. If |evalInFrame| is a baseline JIT frame, prev-link to its entry
* frame.
*/
CallArgsList *evalInFrameCalls = NULL; /* quell overwarning */
MaybeExtend extend;
StackFrame *prevLink;
AbstractFramePtr prev = NullFramePtr();
@ -1078,12 +1038,11 @@ ContextStack::pushExecuteFrame(JSContext *cx, HandleScript script, const Value &
/* Debug-mode currently disables Ion compilation. */
JS_ASSERT_IF(evalInFrame.isStackFrame(), !evalInFrame.asStackFrame()->runningInIon());
JS_ASSERT_IF(evalInFrame.compartment() == iter.compartment(), !iter.isIonOptimizedJS());
while (!iter.isScript() || iter.isIonOptimizedJS() || iter.abstractFramePtr() != evalInFrame) {
while (iter.isIonOptimizedJS() || iter.abstractFramePtr() != evalInFrame) {
++iter;
JS_ASSERT_IF(evalInFrame.compartment() == iter.compartment(), !iter.isIonOptimizedJS());
}
JS_ASSERT(iter.abstractFramePtr() == evalInFrame);
evalInFrameCalls = iter.data_.calls_;
prevLink = iter.data_.fp_;
prev = evalInFrame;
extend = CANT_EXTEND;
@ -1106,10 +1065,6 @@ ContextStack::pushExecuteFrame(JSContext *cx, HandleScript script, const Value &
fp->initVarsToUndefined();
efg->regs_.prepareToRun(*fp, script);
/* pushRegs() below links the prev-frame; manually link the prev-call. */
if (evalInFrame && evalInFrameCalls)
seg_->pointAtCall(*evalInFrameCalls);
efg->prevRegs_ = seg_->pushRegs(efg->regs_);
JS_ASSERT(space().firstUnused() == efg->regs_.sp);
efg->setPushed(*this);
@ -1278,16 +1233,6 @@ StackIter::popFrame()
}
}
void
StackIter::popCall()
{
DebugOnly<CallArgsList*> oldCall = data_.calls_;
JS_ASSERT(data_.seg_->contains(oldCall));
data_.calls_ = data_.calls_->prev();
if (!data_.seg_->contains(data_.fp_))
poisonRegs();
}
void
StackIter::settleOnNewSegment()
{
@ -1302,41 +1247,36 @@ StackIter::startOnSegment(StackSegment *seg)
{
data_.seg_ = seg;
data_.fp_ = data_.seg_->maybefp();
data_.calls_ = data_.seg_->maybeCalls();
settleOnNewSegment();
}
/*
* Given that the iterator's current value of fp_ and calls_ (initialized on
* construction or after operator++ popped the previous scripted/native call),
* "settle" the iterator on a new StackIter::State value. The goal is to
* present the client a simple linear sequence of native/scripted calls while
* covering up unpleasant stack implementation details:
* Given the iterator's current value of fp_ (initialized on construction or
* after operator++ popped the previous call), "settle" the iterator on a new
* StackIter::State value. The goal is to present the client a simple linear
* sequence of scripted calls while covering up unpleasant stack implementation
* details:
* - The frame chain can be "saved" and "restored" (see JS_SaveFrameChain).
* This artificially cuts the call chain and the StackIter client may want
* to continue through this cut to the previous frame by passing
* GO_THROUGH_SAVED.
* - fp->prev can be in a different contiguous segment from fp. In this case,
* the current values of sp/pc after calling popFrame/popCall are incorrect
* and should be recovered from fp->prev's segment.
* - there is no explicit relationship to determine whether fp_ or calls_ is
* the innermost invocation so implicit memory ordering is used since both
* push values on the stack.
* - a native call's 'callee' argument is clobbered on return while the
* CallArgsList element is still visible.
* the current values of sp/pc after calling popFrame are incorrect and
* should be recovered from fp->prev's segment.
*/
/* PGO causes xpcshell startup crashes with VS2010. */
#if defined(_MSC_VER)
# pragma optimize("g", off)
#endif
void
StackIter::settleOnNewState()
{
/* Reset whether or we popped a call last time we settled. */
data_.poppedCallDuringSettle_ = false;
/*
* There are elements of the calls_ and fp_ chains that we want to skip
* over so iterate until we settle on one or until there are no more.
* There are elements of the fp_ chain that we want to skip over so iterate
* until we settle on one or until there are no more.
*/
while (true) {
if (!data_.fp_ && !data_.calls_) {
if (!data_.fp_) {
if (data_.savedOption_ == GO_THROUGH_SAVED && data_.seg_->prevInContext()) {
startOnSegment(data_.seg_->prevInContext());
continue;
@ -1345,116 +1285,80 @@ StackIter::settleOnNewState()
return;
}
/* Check if popFrame/popCall changed segment. */
/* Check if popFrame changed segment. */
bool containsFrame = data_.seg_->contains(data_.fp_);
bool containsCall = data_.seg_->contains(data_.calls_);
while (!containsFrame && !containsCall) {
while (!containsFrame) {
/* Eval-in-frame can cross contexts, so use prevInMemory. */
data_.seg_ = data_.seg_->prevInMemory();
containsFrame = data_.seg_->contains(data_.fp_);
containsCall = data_.seg_->contains(data_.calls_);
/* Eval-in-frame allows jumping into the middle of a segment. */
if (containsFrame &&
(data_.seg_->fp() != data_.fp_ || data_.seg_->maybeCalls() != data_.calls_))
{
if (containsFrame && data_.seg_->fp() != data_.fp_) {
/* Avoid duplicating logic; seg_ contains fp_, so no iloop. */
StackIter tmp = *this;
tmp.startOnSegment(data_.seg_);
tmp.settleOnNewState();
while (!tmp.isScript() || tmp.data_.fp_ != data_.fp_)
while (tmp.data_.fp_ != data_.fp_)
++tmp;
JS_ASSERT(tmp.isScript() &&
JS_ASSERT(!tmp.done() &&
tmp.data_.seg_ == data_.seg_ &&
tmp.data_.fp_ == data_.fp_);
*this = tmp;
return;
}
/* There is no eval-in-frame equivalent for native calls. */
JS_ASSERT_IF(containsCall, &data_.seg_->calls() == data_.calls_);
settleOnNewSegment();
}
/*
* In case of both a scripted frame and call record, use linear memory
* ordering to decide which was the most recent.
*/
if (containsFrame && (!containsCall || (Value *)data_.fp_ >= data_.calls_->array())) {
#ifdef JS_ION
if (data_.fp_->beginsIonActivation()) {
/*
* Eval-in-frame can link to an arbitrary frame on the stack.
* Skip any IonActivation's until we reach the one for the
* current StackFrame. Treat activations with NULL entryfp
* (pushed by FastInvoke) as belonging to the previous
* activation.
*/
while (true) {
ion::IonActivation *act = data_.ionActivations_.activation();
while (!act->entryfp())
act = act->prev();
if (act->entryfp() == data_.fp_)
break;
if (data_.fp_->beginsIonActivation()) {
/*
* Eval-in-frame can link to an arbitrary frame on the stack.
* Skip any IonActivation's until we reach the one for the
* current StackFrame. Treat activations with NULL entryfp
* (pushed by FastInvoke) as belonging to the previous
* activation.
*/
while (true) {
ion::IonActivation *act = data_.ionActivations_.activation();
while (!act->entryfp())
act = act->prev();
if (act->entryfp() == data_.fp_)
break;
++data_.ionActivations_;
}
++data_.ionActivations_;
}
data_.ionFrames_ = ion::IonFrameIterator(data_.ionActivations_);
data_.ionFrames_ = ion::IonFrameIterator(data_.ionActivations_);
if (data_.ionFrames_.isNative()) {
data_.state_ = ION;
return;
}
while (!data_.ionFrames_.isScripted() && !data_.ionFrames_.done())
++data_.ionFrames_;
while (!data_.ionFrames_.isScripted() && !data_.ionFrames_.done())
++data_.ionFrames_;
// When invoked from JM, we don't re-use the entryfp, so we
// may have an empty Ion activation.
if (data_.ionFrames_.done()) {
data_.state_ = SCRIPTED;
return;
}
data_.state_ = ION;
nextIonFrame();
// When invoked from JM, we don't re-use the entryfp, so we
// may have an empty Ion activation.
if (data_.ionFrames_.done()) {
data_.state_ = SCRIPTED;
return;
}
data_.state_ = ION;
nextIonFrame();
return;
}
#endif /* JS_ION */
data_.state_ = SCRIPTED;
return;
}
/*
* A CallArgsList element is pushed for any call to Invoke, regardless
* of whether the callee is a scripted function or even a callable
* object. Thus, it is necessary to filter calleev for natives.
*
* Second, stuff can happen after the args are pushed but before/after
* the actual call, so only consider "active" calls. (Since Invoke
* necessarily clobbers the callee, "active" is also necessary to
* ensure that the callee slot is valid.)
*/
if (data_.calls_->active() && IsNativeFunction(data_.calls_->calleev())) {
data_.state_ = NATIVE;
data_.args_ = *data_.calls_;
return;
}
/* Pop the call and keep looking. */
popCall();
data_.poppedCallDuringSettle_ = true;
data_.state_ = SCRIPTED;
return;
}
}
#if defined(_MSC_VER)
# pragma optimize("", on)
#endif
StackIter::Data::Data(JSContext *cx, PerThreadData *perThread, SavedOption savedOption)
: perThread_(perThread),
cx_(cx),
savedOption_(savedOption),
poppedCallDuringSettle_(false)
savedOption_(savedOption)
#ifdef JS_ION
, ionActivations_(cx),
ionFrames_((uint8_t *)NULL)
@ -1465,8 +1369,7 @@ StackIter::Data::Data(JSContext *cx, PerThreadData *perThread, SavedOption saved
StackIter::Data::Data(JSContext *cx, JSRuntime *rt, StackSegment *seg)
: perThread_(&rt->mainThread),
cx_(cx),
savedOption_(STOP_AT_SAVED),
poppedCallDuringSettle_(false)
savedOption_(STOP_AT_SAVED)
#ifdef JS_ION
, ionActivations_(rt),
ionFrames_((uint8_t *)NULL)
@ -1480,11 +1383,8 @@ StackIter::Data::Data(const StackIter::Data &other)
savedOption_(other.savedOption_),
state_(other.state_),
fp_(other.fp_),
calls_(other.calls_),
seg_(other.seg_),
pc_(other.pc_),
args_(other.args_),
poppedCallDuringSettle_(other.poppedCallDuringSettle_)
pc_(other.pc_)
#ifdef JS_ION
, ionActivations_(other.ionActivations_),
ionFrames_(other.ionFrames_)
@ -1606,12 +1506,6 @@ StackIter::popBaselineDebuggerFrame()
popFrame();
settleOnNewState();
/* Pop native and Ion frames until we reach the target frame. */
while (data_.state_ == NATIVE) {
popCall();
settleOnNewState();
}
JS_ASSERT(data_.state_ == ION);
while (!data_.ionFrames_.isBaselineJS() || data_.ionFrames_.baselineFrame() != prevBaseline)
popIonFrame();
@ -1635,10 +1529,6 @@ StackIter::operator++()
popFrame();
settleOnNewState();
break;
case NATIVE:
popCall();
settleOnNewState();
break;
case ION:
#ifdef JS_ION
popIonFrame();
@ -1654,10 +1544,7 @@ bool
StackIter::operator==(const StackIter &rhs) const
{
return done() == rhs.done() &&
(done() ||
(isScript() == rhs.isScript() &&
((isScript() && data_.fp_ == rhs.data_.fp_) ||
(!isScript() && nativeArgs().base() == rhs.nativeArgs().base()))));
(done() || data_.fp_ == rhs.data_.fp_);
}
StackIter::Data *
@ -1687,8 +1574,6 @@ StackIter::compartment() const
#else
break;
#endif
case NATIVE:
return data_.calls_->callee().compartment();
}
JS_NOT_REACHED("Unexpected state");
return NULL;
@ -1711,8 +1596,6 @@ StackIter::isFunctionFrame() const
#else
break;
#endif
case NATIVE:
return false;
}
JS_NOT_REACHED("Unexpected state");
return false;
@ -1735,8 +1618,6 @@ StackIter::isGlobalFrame() const
#else
break;
#endif
case NATIVE:
return false;
}
JS_NOT_REACHED("Unexpected state");
return false;
@ -1759,8 +1640,6 @@ StackIter::isEvalFrame() const
#else
break;
#endif
case NATIVE:
return false;
}
JS_NOT_REACHED("Unexpected state");
return false;
@ -1776,7 +1655,6 @@ StackIter::isNonEvalFunctionFrame() const
case SCRIPTED:
return interpFrame()->isNonEvalFunctionFrame();
case ION:
case NATIVE:
return !isEvalFrame() && isFunctionFrame();
}
JS_NOT_REACHED("Unexpected state");
@ -1792,7 +1670,6 @@ StackIter::isGeneratorFrame() const
case SCRIPTED:
return interpFrame()->isGeneratorFrame();
case ION:
case NATIVE:
return false;
}
JS_NOT_REACHED("Unexpected state");
@ -1815,7 +1692,6 @@ StackIter::isConstructing() const
break;
#endif
case SCRIPTED:
case NATIVE:
return interpFrame()->isConstructing();
}
JS_NOT_REACHED("Unexpected state");
@ -1837,8 +1713,6 @@ StackIter::abstractFramePtr() const
case SCRIPTED:
JS_ASSERT(interpFrame());
return AbstractFramePtr(interpFrame());
case NATIVE:
break;
}
JS_NOT_REACHED("Unexpected state");
return NullFramePtr();
@ -1877,8 +1751,6 @@ StackIter::updatePcQuadratic()
}
#endif
break;
case NATIVE:
break;
}
JS_NOT_REACHED("Unexpected state");
}
@ -1896,15 +1768,11 @@ StackIter::callee() const
#ifdef JS_ION
if (data_.ionFrames_.isBaselineJS())
return data_.ionFrames_.callee();
if (data_.ionFrames_.isOptimizedJS())
return ionInlineFrames_.callee();
JS_ASSERT(data_.ionFrames_.isNative());
return data_.ionFrames_.callee();
JS_ASSERT(data_.ionFrames_.isOptimizedJS());
return ionInlineFrames_.callee();
#else
break;
#endif
case NATIVE:
return nativeArgs().callee().toFunction();
}
JS_NOT_REACHED("Unexpected state");
return NULL;
@ -1925,8 +1793,6 @@ StackIter::calleev() const
#else
break;
#endif
case NATIVE:
return nativeArgs().calleev();
}
JS_NOT_REACHED("Unexpected state");
return Value();
@ -1951,8 +1817,6 @@ StackIter::numActualArgs() const
#else
break;
#endif
case NATIVE:
return nativeArgs().length();
}
JS_NOT_REACHED("Unexpected state");
return 0;
@ -1973,8 +1837,6 @@ StackIter::unaliasedActual(unsigned i, MaybeCheckAliasing checkAliasing) const
#else
break;
#endif
case NATIVE:
break;
}
JS_NOT_REACHED("Unexpected state");
return NullValue();
@ -1996,8 +1858,6 @@ StackIter::scopeChain() const
#endif
case SCRIPTED:
return interpFrame()->scopeChain();
case NATIVE:
break;
}
JS_NOT_REACHED("Unexpected state");
return NULL;
@ -2029,8 +1889,6 @@ StackIter::hasArgsObj() const
#else
break;
#endif
case NATIVE:
break;
}
JS_NOT_REACHED("Unexpected state");
return false;
@ -2053,8 +1911,6 @@ StackIter::argsObj() const
#endif
case SCRIPTED:
return interpFrame()->argsObj();
case NATIVE:
break;
}
JS_NOT_REACHED("Unexpected state");
return interpFrame()->argsObj();
@ -2063,7 +1919,8 @@ StackIter::argsObj() const
bool
StackIter::computeThis() const
{
if (isScript() && !isIonOptimizedJS()) {
JS_ASSERT(!done());
if (!isIonOptimizedJS()) {
JS_ASSERT(data_.cx_);
return ComputeThis(data_.cx_, abstractFramePtr());
}
@ -2085,7 +1942,6 @@ StackIter::thisv() const
break;
#endif
case SCRIPTED:
case NATIVE:
return interpFrame()->thisValue();
}
JS_NOT_REACHED("Unexpected state");
@ -2106,8 +1962,6 @@ StackIter::returnValue() const
break;
case SCRIPTED:
return interpFrame()->returnValue();
case NATIVE:
break;
}
JS_NOT_REACHED("Unexpected state");
return NullValue();
@ -2130,8 +1984,6 @@ StackIter::setReturnValue(const Value &v)
case SCRIPTED:
interpFrame()->setReturnValue(v);
return;
case NATIVE:
break;
}
JS_NOT_REACHED("Unexpected state");
}
@ -2141,7 +1993,6 @@ StackIter::numFrameSlots() const
{
switch (data_.state_) {
case DONE:
case NATIVE:
break;
case ION: {
#ifdef JS_ION
@ -2167,7 +2018,6 @@ StackIter::frameSlotValue(size_t index) const
{
switch (data_.state_) {
case DONE:
case NATIVE:
break;
case ION:
#ifdef JS_ION
@ -2313,7 +2163,7 @@ AbstractFramePtr::evalPrevScopeChain(JSRuntime *rt) const
/* Eval frames are not compiled by Ion, though their caller might be. */
StackIter iter(rt, *alliter.seg());
while (!iter.isScript() || iter.isIonOptimizedJS() || iter.abstractFramePtr() != *this)
while (iter.isIonOptimizedJS() || iter.abstractFramePtr() != *this)
++iter;
++iter;
return iter.scopeChain();

View File

@ -111,38 +111,10 @@ namespace ion {
* segment's "current regs", which contains the stack pointer 'sp'. In the
* interpreter, sp is adjusted as individual values are pushed and popped from
* the stack and the FrameRegs struct (pointed by the StackSegment) is a local
* var of js::Interpret. JIT code simulates this by lazily updating FrameRegs
* var of js::Interpret. JM JIT code simulates this by lazily updating FrameRegs
* when calling from JIT code into the VM. Ideally, we'd like to remove all
* dependence on FrameRegs outside the interpreter.
*
* A call to a native (C++) function does not push a frame. Instead, an array
* of values is passed to the native. The layout of this array is abstracted by
* JS::CallArgs. With respect to the StackSegment layout above, the args to a
* native call are inserted anywhere there can be values. A sample memory layout
* looks like:
*
* regs
* .------------------------------------------.
* | V
* | fp .--FrameRegs--. sp
* | V V
* |StackSegment| native call | values |StackFrame| values | native call |
* | vp <--argc--> end vp <--argc--> end
* | CallArgs <------------------------------ CallArgs
* | prev ^
* `-------------------------------------------------------'
* calls
*
* Here there are two native calls on the stack. The start of each native arg
* range is recorded by a CallArgs element which is prev-linked like stack
* frames. Note that, in full generality, native and scripted calls can
* interleave arbitrarily. Thus, the end of a segment is the maximum of its
* current frame and its current native call. Similarly, the top of the entire
* thread stack is the end of its current segment.
*
* Note that, between any two StackFrames there may be any number
* of native calls, so the meaning of 'prev' is not 'directly called by'.
*
* An additional feature (perhaps not for much longer: bug 650361) is that
* multiple independent "contexts" can interleave (LIFO) on a single contiguous
* stack. "Independent" here means that each context has its own callstack.
@ -163,51 +135,6 @@ namespace ion {
/*****************************************************************************/
/*
* For calls to natives, the InvokeArgsGuard object provides a record of the
* call for the debugger's callstack. For this to work, the InvokeArgsGuard
* record needs to know when the call is actually active (because the
* InvokeArgsGuard can be pushed long before and popped long after the actual
* call, during which time many stack-observing things can happen).
*/
class MOZ_STACK_CLASS CallArgsList : public JS::CallArgs
{
friend class StackSegment;
CallArgsList *prev_;
bool active_;
protected:
CallArgsList() : prev_(NULL), active_(false) {}
public:
friend CallArgsList CallArgsListFromVp(unsigned, Value *, CallArgsList *);
friend CallArgsList CallArgsListFromArgv(unsigned, Value *, CallArgsList *);
CallArgsList *prev() const { return prev_; }
bool active() const { return active_; }
void setActive() { active_ = true; }
void setInactive() { active_ = false; }
};
JS_ALWAYS_INLINE CallArgsList
CallArgsListFromArgv(unsigned argc, Value *argv, CallArgsList *prev)
{
CallArgsList args;
#ifdef DEBUG
args.usedRval_ = false;
#endif
args.argv_ = argv;
args.argc_ = argc;
args.prev_ = prev;
args.active_ = false;
return args;
}
JS_ALWAYS_INLINE CallArgsList
CallArgsListFromVp(unsigned argc, Value *vp, CallArgsList *prev)
{
return CallArgsListFromArgv(argc, vp + 2, prev);
}
/*****************************************************************************/
enum MaybeCheckAliasing { CHECK_ALIASING = true, DONT_CHECK_ALIASING = false };
/*****************************************************************************/
@ -1388,8 +1315,8 @@ class StackSegment
/* Execution registers for most recent script in this segment (or null). */
FrameRegs *regs_;
/* Call args for most recent native call in this segment (or null). */
CallArgsList *calls_;
/* End of CallArgs pushed by pushInvokeArgs. */
Value *invokeArgsEnd_;
#if JS_BITS_PER_WORD == 32
/*
@ -1404,13 +1331,12 @@ class StackSegment
StackSegment(JSContext *cx,
StackSegment *prevInContext,
StackSegment *prevInMemory,
FrameRegs *regs,
CallArgsList *calls)
FrameRegs *regs)
: cx_(cx),
prevInContext_(prevInContext),
prevInMemory_(prevInMemory),
regs_(regs),
calls_(calls)
invokeArgsEnd_(NULL)
{}
/* A segment is followed in memory by the arguments of the first call. */
@ -1442,23 +1368,6 @@ class StackSegment
return regs_ ? regs_->pc : NULL;
}
CallArgsList &calls() const {
JS_ASSERT(calls_);
return *calls_;
}
CallArgsList *maybeCalls() const {
return calls_;
}
Value *callArgv() const {
return calls_->array();
}
Value *maybeCallArgv() const {
return calls_ ? calls_->array() : NULL;
}
JSContext *cx() const {
return cx_;
}
@ -1476,12 +1385,11 @@ class StackSegment
}
bool isEmpty() const {
return !calls_ && !regs_;
return !regs_;
}
bool contains(const StackFrame *fp) const;
bool contains(const FrameRegs *regs) const;
bool contains(const CallArgsList *call) const;
StackFrame *computeNextFrame(const StackFrame *fp, size_t maxDepth) const;
@ -1489,9 +1397,17 @@ class StackSegment
FrameRegs *pushRegs(FrameRegs &regs);
void popRegs(FrameRegs *regs);
void pushCall(CallArgsList &callList);
void pointAtCall(CallArgsList &callList);
void popCall();
Value *invokeArgsEnd() const {
return invokeArgsEnd_;
}
void pushInvokeArgsEnd(Value *end, Value **prev) {
*prev = invokeArgsEnd_;
invokeArgsEnd_ = end;
}
void popInvokeArgsEnd(Value *prev) {
invokeArgsEnd_ = prev;
}
/* For jit access: */
@ -1799,14 +1715,15 @@ class ContextStack
/*****************************************************************************/
class InvokeArgsGuard : public CallArgsList
class InvokeArgsGuard : public JS::CallArgs
{
friend class ContextStack;
ContextStack *stack_;
Value *prevInvokeArgsEnd_;
bool pushedSeg_;
void setPushed(ContextStack &stack) { JS_ASSERT(!pushed()); stack_ = &stack; }
public:
InvokeArgsGuard() : CallArgsList(), stack_(NULL), pushedSeg_(false) {}
InvokeArgsGuard() : CallArgs(), stack_(NULL), prevInvokeArgsEnd_(NULL), pushedSeg_(false) {}
~InvokeArgsGuard() { if (pushed()) stack_->popInvokeArgs(*this); }
bool pushed() const { return !!stack_; }
void pop() { stack_->popInvokeArgs(*this); stack_ = NULL; }
@ -1888,7 +1805,7 @@ class StackIter
{
public:
enum SavedOption { STOP_AT_SAVED, GO_THROUGH_SAVED };
enum State { DONE, SCRIPTED, NATIVE, ION };
enum State { DONE, SCRIPTED, ION };
/*
* Unlike StackIter itself, StackIter::Data can be allocated on the heap,
@ -1903,13 +1820,9 @@ class StackIter
State state_;
StackFrame *fp_;
CallArgsList *calls_;
StackSegment *seg_;
jsbytecode *pc_;
CallArgs args_;
bool poppedCallDuringSettle_;
#ifdef JS_ION
ion::IonActivationIterator ionActivations_;
@ -1931,7 +1844,6 @@ class StackIter
void poisonRegs();
void popFrame();
void popCall();
#ifdef JS_ION
void nextIonFrame();
void popIonFrame();
@ -1957,18 +1869,8 @@ class StackIter
JSCompartment *compartment() const;
bool poppedCallDuringSettle() const { return data_.poppedCallDuringSettle_; }
bool isScript() const {
JS_ASSERT(!done());
#ifdef JS_ION
if (data_.state_ == ION)
return data_.ionFrames_.isScripted();
#endif
return data_.state_ == SCRIPTED;
}
JSScript *script() const {
JS_ASSERT(isScript());
JS_ASSERT(!done());
if (data_.state_ == SCRIPTED)
return interpFrame()->script();
#ifdef JS_ION
@ -2001,15 +1903,6 @@ class StackIter
#endif
}
bool isNativeCall() const {
JS_ASSERT(!done());
#ifdef JS_ION
if (data_.state_ == ION)
return data_.ionFrames_.isNative();
#endif
return data_.state_ == NATIVE;
}
bool isFunctionFrame() const;
bool isGlobalFrame() const;
bool isEvalFrame() const;
@ -2027,9 +1920,9 @@ class StackIter
* contents of the frame are ignored by Ion code (and GC) and thus
* immediately become garbage and must not be touched directly.
*/
StackFrame *interpFrame() const { JS_ASSERT(isScript() && !isIon()); return data_.fp_; }
StackFrame *interpFrame() const { JS_ASSERT(data_.state_ == SCRIPTED); return data_.fp_; }
jsbytecode *pc() const { JS_ASSERT(isScript()); return data_.pc_; }
jsbytecode *pc() const { JS_ASSERT(!done()); return data_.pc_; }
void updatePcQuadratic();
JSFunction *callee() const;
Value calleev() const;
@ -2058,8 +1951,6 @@ class StackIter
size_t numFrameSlots() const;
Value frameSlotValue(size_t index) const;
CallArgs nativeArgs() const { JS_ASSERT(isNativeCall()); return data_.args_; }
template <class Op>
inline void ionForEachCanonicalActualArg(JSContext *cx, Op op);
};
@ -2067,27 +1958,20 @@ class StackIter
/* A filtering of the StackIter to only stop at scripts. */
class ScriptFrameIter : public StackIter
{
void settle() {
while (!done() && !isScript())
StackIter::operator++();
}
public:
ScriptFrameIter(JSContext *cx, StackIter::SavedOption opt = StackIter::STOP_AT_SAVED)
: StackIter(cx, opt) { settle(); }
: StackIter(cx, opt) { }
ScriptFrameIter(const StackIter::Data &data)
: StackIter(data)
{}
ScriptFrameIter &operator++() { StackIter::operator++(); settle(); return *this; }
};
/* A filtering of the StackIter to only stop at non-self-hosted scripts. */
class NonBuiltinScriptFrameIter : public StackIter
{
void settle() {
while (!done() && (!isScript() || script()->selfHosted))
while (!done() && script()->selfHosted)
StackIter::operator++();
}