diff --git a/dom/base/nsGlobalWindow.cpp b/dom/base/nsGlobalWindow.cpp index 3b3ecd7fead6..9752b681bd0f 100644 --- a/dom/base/nsGlobalWindow.cpp +++ b/dom/base/nsGlobalWindow.cpp @@ -63,6 +63,7 @@ #include "prmem.h" #include "jsapi.h" // for JSAutoRequest #include "jsdbgapi.h" // for JS_ClearWatchPointsForObject +#include "jsfriendapi.h" // for JS_GetFrameScopeChainRaw #include "nsReadableUtils.h" #include "nsDOMClassInfo.h" #include "nsJSEnvironment.h" @@ -5797,13 +5798,13 @@ nsGlobalWindow::CallerInnerWindow() JSStackFrame *fp = nsnull; JS_FrameIterator(cx, &fp); if (fp) { - while (fp->isDummyFrame()) { + while (!JS_IsScriptFrame(cx, fp)) { if (!JS_FrameIterator(cx, &fp)) break; } if (fp) - scope = &fp->scopeChain(); + scope = JS_GetFrameScopeChainRaw(fp); } if (!scope) diff --git a/dom/base/nsJSEnvironment.cpp b/dom/base/nsJSEnvironment.cpp index 78b056d9927d..994c4ff58022 100644 --- a/dom/base/nsJSEnvironment.cpp +++ b/dom/base/nsJSEnvironment.cpp @@ -116,6 +116,8 @@ #include "mozilla/FunctionTimer.h" +using namespace mozilla; + const size_t gStackSize = 8192; #ifdef PR_LOGGING @@ -1884,8 +1886,8 @@ nsJSContext::CallEventHandler(nsISupports* aTarget, void *aScope, void *aHandler return NS_ERROR_FAILURE; } - js::LazilyConstructed poolRelease; - js::LazilyConstructed tvr; + Maybe poolRelease; + Maybe tvr; // Use |target| as the scope for wrapping the arguments, since aScope is // the safe scope in many cases, which isn't very useful. Wrapping aTarget @@ -2373,8 +2375,8 @@ nsJSContext::SetProperty(void *aTarget, const char *aPropName, nsISupports *aArg JSAutoRequest ar(mContext); - js::LazilyConstructed poolRelease; - js::LazilyConstructed tvr; + Maybe poolRelease; + Maybe tvr; nsresult rv; rv = ConvertSupportsTojsvals(aArgs, GetNativeGlobal(), &argc, @@ -2414,8 +2416,8 @@ nsJSContext::ConvertSupportsTojsvals(nsISupports *aArgs, void *aScope, PRUint32 *aArgc, jsval **aArgv, - js::LazilyConstructed &aPoolRelease, - js::LazilyConstructed &aRooter) + Maybe &aPoolRelease, + Maybe &aRooter) { nsresult rv = NS_OK; diff --git a/dom/base/nsJSEnvironment.h b/dom/base/nsJSEnvironment.h index 306f8f174b11..598cb19361c5 100644 --- a/dom/base/nsJSEnvironment.h +++ b/dom/base/nsJSEnvironment.h @@ -51,7 +51,9 @@ class nsIXPConnectJSObjectHolder; class nsAutoPoolRelease; namespace js { class AutoArrayRooter; -template class LazilyConstructed; +} +namespace mozilla { +template class Maybe; } class nsJSContext : public nsIScriptContext, @@ -205,8 +207,8 @@ protected: void *aScope, PRUint32 *aArgc, jsval **aArgv, - js::LazilyConstructed &aPoolRelease, - js::LazilyConstructed &aRooter); + mozilla::Maybe &aPoolRelease, + mozilla::Maybe &aRooter); nsresult AddSupportsPrimitiveTojsvals(nsISupports *aArg, jsval *aArgv); diff --git a/ipc/testshell/XPCShellEnvironment.cpp b/ipc/testshell/XPCShellEnvironment.cpp index 5074dcd958f2..547fbe54b3ab 100644 --- a/ipc/testshell/XPCShellEnvironment.cpp +++ b/ipc/testshell/XPCShellEnvironment.cpp @@ -46,8 +46,6 @@ #include "base/basictypes.h" #include "jsapi.h" -#include "jscntxt.h" -#include "jsdbgapi.h" #include "jsprf.h" #include "xpcpublic.h" @@ -406,13 +404,14 @@ GC(JSContext *cx, jsval *vp) { JSRuntime *rt; - uint32 preBytes; + uint32 preBytes, postBytes; - rt = cx->runtime; - preBytes = rt->gcBytes; + rt = JS_GetRuntime(cx); + preBytes = JS_GetGCParameter(rt, JSGC_BYTES); JS_GC(cx); + postBytes = JS_GetGCParameter(rt, JSGC_BYTES); fprintf(stdout, "before %lu, after %lu, break %08lx\n", - (unsigned long)preBytes, (unsigned long)rt->gcBytes, + (unsigned long)preBytes, (unsigned long)postBytes, #ifdef XP_UNIX (unsigned long)sbrk(0) #else diff --git a/js/jetpack/JetpackActorCommon.cpp b/js/jetpack/JetpackActorCommon.cpp index f9ac64a1ec7f..8a9bb09d6636 100644 --- a/js/jetpack/JetpackActorCommon.cpp +++ b/js/jetpack/JetpackActorCommon.cpp @@ -40,7 +40,6 @@ #include "jscntxt.h" #include "jsapi.h" -#include "jstl.h" #include "jshashtable.h" #include "mozilla/jetpack/JetpackActorCommon.h" @@ -51,15 +50,7 @@ #include "nsJSUtils.h" -using mozilla::jetpack::JetpackActorCommon; -using mozilla::jetpack::PHandleParent; -using mozilla::jetpack::HandleParent; -using mozilla::jetpack::PHandleChild; -using mozilla::jetpack::HandleChild; -using mozilla::jetpack::KeyValue; -using mozilla::jetpack::PrimVariant; -using mozilla::jetpack::CompVariant; -using mozilla::jetpack::Variant; +using namespace mozilla::jetpack; class JetpackActorCommon::OpaqueSeenType { @@ -176,7 +167,7 @@ JetpackActorCommon::jsval_to_CompVariant(JSContext* cx, JSType type, jsval from, if (type != JSTYPE_OBJECT) return false; - js::LazilyConstructed lost; + Maybe lost; if (!seen) { lost.construct(); seen = lost.addr(); @@ -337,7 +328,7 @@ JetpackActorCommon::jsval_from_CompVariant(JSContext* cx, jsval* to, OpaqueSeenType* seen) { - js::LazilyConstructed lost; + Maybe lost; if (!seen) { lost.construct(); seen = lost.addr(); diff --git a/js/src/Makefile.in b/js/src/Makefile.in index 7d601b3cfe4b..4faaff3aa0d4 100644 --- a/js/src/Makefile.in +++ b/js/src/Makefile.in @@ -181,6 +181,7 @@ CPPSRCS = \ jsxml.cpp \ prmjtime.cpp \ sharkctl.cpp \ + Stack.cpp \ $(NULL) INSTALLED_HEADERS = \ @@ -267,6 +268,20 @@ INSTALLED_HEADERS = \ prmjtime.h \ $(NULL) +############################################### +# BEGIN include sources for the vm subdirectory +# +VPATH += \ + $(srcdir)/vm \ + $(NULL) + +EXPORTS_NAMESPACES = vm + +EXPORTS_vm = \ + Stack.h \ + StringObject.h \ + $(NULL) + ############################################### # BEGIN include sources for low-level code shared with Gecko # @@ -274,9 +289,10 @@ VPATH += \ $(srcdir)/../../mfbt \ $(NULL) -EXPORTS_NAMESPACES = mozilla +EXPORTS_NAMESPACES += mozilla EXPORTS_mozilla = \ + Types.h \ Util.h \ $(NULL) @@ -636,7 +652,7 @@ check-malloc-function-usage: $(filter-out %jsalloc.h %jscntxt.h %jsutil.h, $(ALL # We desire these numbers to go down, not up. See "User guide to memory # management within SpiderMonkey" in jsutil.h. - $(srcdir)/config/check_source_count.py OffTheBooks:: 53 \ + $(srcdir)/config/check_source_count.py OffTheBooks:: 52 \ "in Makefile.in" "{cx,rt}->{new_,new_array,malloc_,calloc_,realloc_}" $^ # This should go to zero, if possible. $(srcdir)/config/check_source_count.py UnwantedForeground:: 34 \ @@ -713,6 +729,11 @@ export:: DEFINES += -DEXPORT_JS_API +# mfbt is always packed with us, so if we're building a shared object, +# we need to declare "exported" mfbt symbols on its behalf when we use +# its headers. +DEFINES += -DIMPL_MFBT + # Some platforms that have stdint.h include it in system headers. So # to reliably get limit macros defined, we'd always have to define the # one below before including any header, but that's obscure and diff --git a/js/src/jit-test/tests/basic/bug642772-1.js b/js/src/jit-test/tests/basic/bug642772-1.js new file mode 100644 index 000000000000..b7dc483fcc5e --- /dev/null +++ b/js/src/jit-test/tests/basic/bug642772-1.js @@ -0,0 +1,19 @@ +var n1 = Number.prototype.toFixed; +var s1 = String.prototype.split; +delete Number; +delete String; + +var n2 = (5).toFixed; +var s2 = ("foo").split; + +// Check enumeration doesn't resurrect deleted standard classes +for (x in this) {} + +// Ensure the prototypes are shared. +var n3 = (5).toFixed; +var s3 = ("foo").split; + +assertEq(s1, s2); +assertEq(s1, s3); +assertEq(n1, n2); +assertEq(n1, n3); diff --git a/js/src/jit-test/tests/basic/bug642772-2.js b/js/src/jit-test/tests/basic/bug642772-2.js new file mode 100644 index 000000000000..cd7c4004b1fa --- /dev/null +++ b/js/src/jit-test/tests/basic/bug642772-2.js @@ -0,0 +1,104 @@ +function failWrapper(callback) { + try { + callback(); // this should fail + throw "test-error"; // and if it didn't we have a problem` + } catch (e) { + if (e == "test-error") + throw ("Testing error when running " + callback.toString()); + } +} + + +print ("Deleting standard classes"); +delete Function; +delete Object; +delete Array; +delete Boolean; +delete JSON; +delete Date; +delete Math; +delete Number; +delete String; +delete Regexp; +delete XML; +delete Reflect; +delete Proxy; +delete Error; +delete Iterator; +delete Generator; +delete StopIteration; +delete Float32Array; +delete Float64Array; +delete Int16Array; +delete Int32Array; +delete Int32Array; +delete Uint16Array; +delete Uint32Array; +delete Uint8Array; +delete Uint8ClampedArray; +delete Weakmap; + + +print ("Accessing standard classes shouldn't recreate them"); +failWrapper(function () { Function; }); +failWrapper(function () { Object; }); +failWrapper(function () { Array; }); +failWrapper(function () { Boolean; }); +failWrapper(function () { JSON; }); +failWrapper(function () { Date; }); +failWrapper(function () { Math; }); +failWrapper(function () { Number; }); +failWrapper(function () { String; }); +failWrapper(function () { Regexp; }); +failWrapper(function () { XML; }); +failWrapper(function () { Reflect; }); +failWrapper(function () { Proxy; }); +failWrapper(function () { Error; }); +failWrapper(function () { Iterator; }); +failWrapper(function () { Generator; }); +failWrapper(function () { StopIteration; }); +failWrapper(function () { Float32Array; }); +failWrapper(function () { Float64Array; }); +failWrapper(function () { Int16Array; }); +failWrapper(function () { Int32Array; }); +failWrapper(function () { Int32Array; }); +failWrapper(function () { Uint16Array; }); +failWrapper(function () { Uint32Array; }); +failWrapper(function () { Uint8Array; }); +failWrapper(function () { Uint8ClampedArray; }); +failWrapper(function () { Weakmap; }); + + +print ("Enumerate over the global object"); +for (c in this) {} + +print ("That shouldn't have recreated the standard classes either"); +failWrapper(function () { Function; }); +failWrapper(function () { Object; }); +failWrapper(function () { Array; }); +failWrapper(function () { Boolean; }); +failWrapper(function () { JSON; }); +failWrapper(function () { Date; }); +failWrapper(function () { Math; }); +failWrapper(function () { Number; }); +failWrapper(function () { String; }); +failWrapper(function () { Regexp; }); +failWrapper(function () { XML; }); +failWrapper(function () { Reflect; }); +failWrapper(function () { Proxy; }); +failWrapper(function () { Error; }); +failWrapper(function () { Iterator; }); +failWrapper(function () { Generator; }); +failWrapper(function () { StopIteration; }); +failWrapper(function () { Float32Array; }); +failWrapper(function () { Float64Array; }); +failWrapper(function () { Int16Array; }); +failWrapper(function () { Int32Array; }); +failWrapper(function () { Int32Array; }); +failWrapper(function () { Uint16Array; }); +failWrapper(function () { Uint32Array; }); +failWrapper(function () { Uint8Array; }); +failWrapper(function () { Uint8ClampedArray; }); +failWrapper(function () { Weakmap; }); + +print ("success"); diff --git a/js/src/jit-test/tests/basic/bug642772-3.js b/js/src/jit-test/tests/basic/bug642772-3.js new file mode 100644 index 000000000000..a722569b4573 --- /dev/null +++ b/js/src/jit-test/tests/basic/bug642772-3.js @@ -0,0 +1,5 @@ +// Catch memory leaks when enumerating over the global object. + +for (let z = 1; z <= 16000; ++z) { + for each (y in this); +} diff --git a/js/src/js.msg b/js/src/js.msg index 36de8ed00625..957e3496ef94 100644 --- a/js/src/js.msg +++ b/js/src/js.msg @@ -112,7 +112,7 @@ MSG_DEF(JSMSG_BAD_SHARP_DEF, 29, 1, JSEXN_ERR, "invalid sharp variable MSG_DEF(JSMSG_BAD_SHARP_USE, 30, 1, JSEXN_ERR, "invalid sharp variable use #{0}#") MSG_DEF(JSMSG_BAD_INSTANCEOF_RHS, 31, 1, JSEXN_TYPEERR, "invalid 'instanceof' operand {0}") MSG_DEF(JSMSG_BAD_BYTECODE, 32, 1, JSEXN_INTERNALERR, "unimplemented JavaScript bytecode {0}") -MSG_DEF(JSMSG_BAD_RADIX, 33, 1, JSEXN_ERR, "illegal radix {0}") +MSG_DEF(JSMSG_BAD_RADIX, 33, 0, JSEXN_RANGEERR, "radix must be an integer at least 2 and no greater than 36") MSG_DEF(JSMSG_PAREN_BEFORE_LET, 34, 0, JSEXN_SYNTAXERR, "missing ( before let head") MSG_DEF(JSMSG_CANT_CONVERT, 35, 1, JSEXN_ERR, "can't convert {0} to an integer") MSG_DEF(JSMSG_CYCLIC_VALUE, 36, 1, JSEXN_TYPEERR, "cyclic {0} value") diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 9cca598433aa..82c60f173fad 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -90,16 +90,15 @@ #include "jstypedarray.h" #include "jsatominlines.h" -#include "jscntxtinlines.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsscopeinlines.h" -#include "jscntxtinlines.h" #include "jsregexpinlines.h" #include "jsscriptinlines.h" #include "jsstrinlines.h" #include "assembler/wtf/Platform.h" +#include "vm/Stack-inl.h" + #if ENABLE_YARR_JIT #include "assembler/jit/ExecutableAllocator.h" #include "methodjit/Logging.h" @@ -845,7 +844,7 @@ JS_SetRuntimePrivate(JSRuntime *rt, void *data) static void StartRequest(JSContext *cx) { - JSThread *t = cx->thread; + JSThread *t = cx->thread(); JS_ASSERT(CURRENT_THREAD_IS_ME(t)); if (t->data.requestDepth) { @@ -855,7 +854,7 @@ StartRequest(JSContext *cx) AutoLockGC lock(rt); /* Wait until the GC is finished. */ - if (rt->gcThread != cx->thread) { + if (rt->gcThread != cx->thread()) { while (rt->gcThread) JS_AWAIT_GC_DONE(rt); } @@ -879,7 +878,7 @@ StartRequest(JSContext *cx) static void StopRequest(JSContext *cx) { - JSThread *t = cx->thread; + JSThread *t = cx->thread(); JS_ASSERT(CURRENT_THREAD_IS_ME(t)); JS_ASSERT(t->data.requestDepth != 0); if (t->data.requestDepth != 1) { @@ -947,7 +946,7 @@ JS_PUBLIC_API(jsrefcount) JS_SuspendRequest(JSContext *cx) { #ifdef JS_THREADSAFE - JSThread *t = cx->thread; + JSThread *t = cx->thread(); JS_ASSERT(CURRENT_THREAD_IS_ME(t)); jsrefcount saveDepth = t->data.requestDepth; @@ -967,7 +966,7 @@ JS_PUBLIC_API(void) JS_ResumeRequest(JSContext *cx, jsrefcount saveDepth) { #ifdef JS_THREADSAFE - JSThread *t = cx->thread; + JSThread *t = cx->thread(); JS_ASSERT(CURRENT_THREAD_IS_ME(t)); if (saveDepth == 0) return; @@ -984,7 +983,7 @@ JS_PUBLIC_API(JSBool) JS_IsInRequest(JSContext *cx) { #ifdef JS_THREADSAFE - JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread)); + JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread())); return JS_THREAD_DATA(cx)->requestDepth != 0; #else return false; @@ -1519,7 +1518,7 @@ JS_SetGlobalObject(JSContext *cx, JSObject *obj) CHECK_REQUEST(cx); cx->globalObject = obj; - if (!cx->hasfp()) + if (!cx->running()) cx->resetCompartment(); } @@ -1775,8 +1774,7 @@ JS_ResolveStandardClass(JSContext *cx, JSObject *obj, jsid id, JSBool *resolved) if (stdnm->clasp->flags & JSCLASS_IS_ANONYMOUS) return JS_TRUE; - JSProtoKey key = JSCLASS_CACHED_PROTO_KEY(stdnm->clasp); - if (obj->getReservedSlot(key).isObject()) + if (IsStandardClassResolved(obj, stdnm->clasp)) return JS_TRUE; if (!stdnm->init(cx, obj)) @@ -1797,7 +1795,10 @@ JS_EnumerateStandardClasses(JSContext *cx, JSObject *obj) assertSameCompartment(cx, obj); rt = cx->runtime; - /* Check whether we need to bind 'undefined' and define it if so. */ + /* + * Check whether we need to bind 'undefined' and define it if so. + * Since ES5 15.1.1.3 undefined can't be deleted. + */ atom = rt->atomState.typeAtoms[JSTYPE_VOID]; if (!obj->nativeContains(ATOM_TO_JSID(atom)) && !obj->defineProperty(cx, ATOM_TO_JSID(atom), UndefinedValue(), @@ -1806,12 +1807,12 @@ JS_EnumerateStandardClasses(JSContext *cx, JSObject *obj) return JS_FALSE; } - /* Initialize any classes that have not been resolved yet. */ + /* Initialize any classes that have not been initialized yet. */ for (i = 0; standard_class_atoms[i].init; i++) { - atom = OFFSET_TO_ATOM(rt, standard_class_atoms[i].atomOffset); - if (!obj->nativeContains(ATOM_TO_JSID(atom)) && - !standard_class_atoms[i].init(cx, obj)) { - return JS_FALSE; + if (!js::IsStandardClassResolved(obj, standard_class_atoms[i].clasp) && + !standard_class_atoms[i].init(cx, obj)) + { + return JS_FALSE; } } @@ -2762,7 +2763,7 @@ JS_PUBLIC_API(void) JS_SetNativeStackQuota(JSContext *cx, size_t stackSize) { #ifdef JS_THREADSAFE - JS_ASSERT(cx->thread); + JS_ASSERT(cx->thread()); #endif #if JS_STACK_GROWTH_DIRECTION > 0 @@ -4210,7 +4211,7 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent) CHECK_REQUEST(cx); assertSameCompartment(cx, parent); // XXX no funobj for now if (!parent) { - if (cx->hasfp()) + if (cx->running()) parent = GetScopeChain(cx, cx->fp()); if (!parent) parent = cx->globalObject; @@ -5107,7 +5108,7 @@ JS_New(JSContext *cx, JSObject *ctor, uintN argc, jsval *argv) // of object to create, create it, and clamp the return value to an object, // among other details. js_InvokeConstructor does the hard work. InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, argc, &args)) + if (!cx->stack.pushInvokeArgs(cx, argc, &args)) return NULL; args.calleev().setObject(*ctor); @@ -5141,7 +5142,7 @@ JS_PUBLIC_API(JSOperationCallback) JS_SetOperationCallback(JSContext *cx, JSOperationCallback callback) { #ifdef JS_THREADSAFE - JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread)); + JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread())); #endif JSOperationCallback old = cx->operationCallback; cx->operationCallback = callback; @@ -5183,9 +5184,9 @@ JS_IsRunning(JSContext *cx) VOUCH_DOES_NOT_REQUIRE_STACK(); #ifdef JS_TRACER - JS_ASSERT_IF(JS_ON_TRACE(cx) && JS_TRACE_MONITOR_ON_TRACE(cx)->tracecx == cx, cx->hasfp()); + JS_ASSERT_IF(JS_ON_TRACE(cx) && JS_TRACE_MONITOR_ON_TRACE(cx)->tracecx == cx, cx->running()); #endif - JSStackFrame *fp = cx->maybefp(); + StackFrame *fp = cx->maybefp(); while (fp && fp->isDummyFrame()) fp = fp->prev(); return fp != NULL; @@ -5195,11 +5196,11 @@ JS_PUBLIC_API(JSStackFrame *) JS_SaveFrameChain(JSContext *cx) { CHECK_REQUEST(cx); - JSStackFrame *fp = js_GetTopStackFrame(cx); + StackFrame *fp = js_GetTopStackFrame(cx); if (!fp) return NULL; - cx->saveActiveSegment(); - return fp; + cx->stack.saveActiveSegment(); + return Jsvalify(fp); } JS_PUBLIC_API(void) @@ -5207,10 +5208,10 @@ JS_RestoreFrameChain(JSContext *cx, JSStackFrame *fp) { CHECK_REQUEST(cx); JS_ASSERT_NOT_ON_TRACE(cx); - JS_ASSERT(!cx->hasfp()); + JS_ASSERT(!cx->running()); if (!fp) return; - cx->restoreSegment(); + cx->stack.restoreSegment(); } /************************************************************************/ @@ -5580,7 +5581,6 @@ JS_WriteStructuredClone(JSContext *cx, jsval v, uint64 **bufp, size_t *nbytesp, JS_PUBLIC_API(JSBool) JS_StructuredClone(JSContext *cx, jsval v, jsval *vp, - ReadStructuredCloneOp optionalReadOp, const JSStructuredCloneCallbacks *optionalCallbacks, void *closure) { @@ -6031,9 +6031,9 @@ JS_SetContextThread(JSContext *cx) { #ifdef JS_THREADSAFE JS_ASSERT(!cx->outstandingRequests); - if (cx->thread) { - JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread)); - return reinterpret_cast(cx->thread->id); + if (cx->thread()) { + JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread())); + return reinterpret_cast(cx->thread()->id); } if (!js_InitContextThread(cx)) { @@ -6057,7 +6057,7 @@ JS_ClearContextThread(JSContext *cx) * is a harmless no-op. */ JS_ASSERT(cx->outstandingRequests == 0); - JSThread *t = cx->thread; + JSThread *t = cx->thread(); if (!t) return 0; JS_ASSERT(CURRENT_THREAD_IS_ME(t)); diff --git a/js/src/jsarray.cpp b/js/src/jsarray.cpp index 4e7d133d9403..ebcf193074e8 100644 --- a/js/src/jsarray.cpp +++ b/js/src/jsarray.cpp @@ -111,6 +111,8 @@ #include "jsobjinlines.h" #include "jsstrinlines.h" +#include "vm/Stack-inl.h" + using namespace js; using namespace js::gc; @@ -366,7 +368,7 @@ GetElement(JSContext *cx, JSObject *obj, jsdouble index, JSBool *hole, Value *vp index < obj->getArgsInitialLength() && !(*vp = obj->getArgsElement(uint32(index))).isMagic(JS_ARGS_HOLE)) { *hole = JS_FALSE; - JSStackFrame *fp = (JSStackFrame *)obj->getPrivate(); + StackFrame *fp = (StackFrame *)obj->getPrivate(); if (fp != JS_ARGUMENTS_OBJECT_ON_TRACE) { if (fp) *vp = fp->canonicalActualArg(index); @@ -434,7 +436,7 @@ GetElements(JSContext *cx, JSObject *aobj, jsuint length, Value *vp) * fast path for deleted properties (MagicValue(JS_ARGS_HOLE) since * this requires general-purpose property lookup. */ - if (JSStackFrame *fp = (JSStackFrame *) aobj->getPrivate()) { + if (StackFrame *fp = (StackFrame *) aobj->getPrivate()) { JS_ASSERT(fp->numActualArgs() <= JS_ARGS_LENGTH_MAX); if (!fp->forEachCanonicalActualArg(CopyNonHoleArgsTo(aobj, vp))) goto found_deleted_prop; @@ -1062,7 +1064,7 @@ JSObject::makeDenseArraySlow(JSContext *cx) /* Create a native scope. */ JSObject *arrayProto = getProto(); - js::gc::FinalizeKind kind = js::gc::FinalizeKind(arena()->header()->thingKind); + js::gc::FinalizeKind kind = js::gc::FinalizeKind(arenaHeader()->getThingKind()); if (!InitScopeForObject(cx, this, &js_SlowArrayClass, arrayProto, kind)) return false; @@ -1310,8 +1312,6 @@ static JSBool array_toString_sub(JSContext *cx, JSObject *obj, JSBool locale, JSString *sepstr, Value *rval) { - JS_CHECK_RECURSION(cx, return false); - static const jschar comma = ','; const jschar *sep; size_t seplen; @@ -1393,6 +1393,8 @@ array_toString_sub(JSContext *cx, JSObject *obj, JSBool locale, static JSBool array_toString(JSContext *cx, uintN argc, Value *vp) { + JS_CHECK_RECURSION(cx, return false); + JSObject *obj = ToObject(cx, &vp[1]); if (!obj) return false; @@ -1411,14 +1413,14 @@ array_toString(JSContext *cx, uintN argc, Value *vp) LeaveTrace(cx); InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, 0, &args)) + if (!cx->stack.pushInvokeArgs(cx, 0, &args)) return false; args.calleev() = join; args.thisv().setObject(*obj); /* Do the call. */ - if (!Invoke(cx, args, 0)) + if (!Invoke(cx, args)) return false; *vp = args.rval(); return true; @@ -1427,6 +1429,8 @@ array_toString(JSContext *cx, uintN argc, Value *vp) static JSBool array_toLocaleString(JSContext *cx, uintN argc, Value *vp) { + JS_CHECK_RECURSION(cx, return false); + JSObject *obj = ToObject(cx, &vp[1]); if (!obj) return false; @@ -1524,6 +1528,8 @@ InitArrayObject(JSContext *cx, JSObject *obj, jsuint length, const Value *vector static JSBool array_join(JSContext *cx, uintN argc, Value *vp) { + JS_CHECK_RECURSION(cx, return false); + JSString *str; if (argc == 0 || vp[2].isUndefined()) { str = NULL; @@ -2394,9 +2400,8 @@ array_splice(JSContext *cx, uintN argc, Value *vp) /* Convert the first argument into a starting index. */ jsdouble d; - if (!ValueToNumber(cx, *argv, &d)) + if (!ToInteger(cx, *argv, &d)) return JS_FALSE; - d = js_DoubleToInteger(d); if (d < 0) { d += length; if (d < 0) @@ -2414,9 +2419,8 @@ array_splice(JSContext *cx, uintN argc, Value *vp) count = delta; end = length; } else { - if (!ValueToNumber(cx, *argv, &d)) - return JS_FALSE; - d = js_DoubleToInteger(d); + if (!ToInteger(cx, *argv, &d)) + return false; if (d < 0) d = 0; else if (d > delta) @@ -2638,9 +2642,8 @@ array_slice(JSContext *cx, uintN argc, Value *vp) if (argc > 0) { jsdouble d; - if (!ValueToNumber(cx, argv[0], &d)) - return JS_FALSE; - d = js_DoubleToInteger(d); + if (!ToInteger(cx, argv[0], &d)) + return false; if (d < 0) { d += length; if (d < 0) @@ -2651,9 +2654,8 @@ array_slice(JSContext *cx, uintN argc, Value *vp) begin = (jsuint)d; if (argc > 1 && !argv[1].isUndefined()) { - if (!ValueToNumber(cx, argv[1], &d)) - return JS_FALSE; - d = js_DoubleToInteger(d); + if (!ToInteger(cx, argv[1], &d)) + return false; if (d < 0) { d += length; if (d < 0) @@ -2721,9 +2723,8 @@ array_indexOfHelper(JSContext *cx, JSBool isLast, uintN argc, Value *vp) jsdouble start; tosearch = vp[2]; - if (!ValueToNumber(cx, vp[3], &start)) - return JS_FALSE; - start = js_DoubleToInteger(start); + if (!ToInteger(cx, vp[3], &start)) + return false; if (start < 0) { start += length; if (start < 0) { diff --git a/js/src/jsatom.h b/js/src/jsatom.h index 72a595fec95a..108ecbf53c2c 100644 --- a/js/src/jsatom.h +++ b/js/src/jsatom.h @@ -125,6 +125,20 @@ IdToString(JSContext *cx, jsid id) return js_ValueToString(cx, IdToValue(id)); } +template<> +struct DefaultHasher +{ + typedef jsid Lookup; + static HashNumber hash(const Lookup &l) { + JS_ASSERT(l == js_CheckForStringIndex(l)); + return JSID_BITS(l); + } + static bool match(const jsid &id, const Lookup &l) { + JS_ASSERT(l == js_CheckForStringIndex(l)); + return id == l; + } +}; + } #if JS_BYTES_PER_WORD == 4 diff --git a/js/src/jscell.h b/js/src/jscell.h index f85457c924e9..5389a38a6c15 100644 --- a/js/src/jscell.h +++ b/js/src/jscell.h @@ -47,6 +47,7 @@ namespace gc { template struct Arena; struct ArenaBitmap; +struct ArenaHeader; struct MarkingDelay; struct Chunk; struct FreeCell; @@ -64,7 +65,8 @@ struct Cell { static const size_t CellSize = size_t(1) << CellShift; static const size_t CellMask = CellSize - 1; - inline Arena *arena() const; + inline uintptr_t address() const; + inline ArenaHeader *arenaHeader() const; inline Chunk *chunk() const; inline ArenaBitmap *bitmap() const; JS_ALWAYS_INLINE size_t cellIndex() const; @@ -82,9 +84,12 @@ struct Cell { JS_ALWAYS_INLINE const js::gc::FreeCell *asFreeCell() const { return reinterpret_cast(this); } + +#ifdef DEBUG + inline bool isAligned() const; +#endif }; -/* FreeCell has always size 8 */ struct FreeCell : Cell { union { FreeCell *link; @@ -92,7 +97,7 @@ struct FreeCell : Cell { }; }; -JS_STATIC_ASSERT(sizeof(FreeCell) == 8); +JS_STATIC_ASSERT(sizeof(FreeCell) == Cell::CellSize); } /* namespace gc */ } /* namespace js */ diff --git a/js/src/jscntxt.cpp b/js/src/jscntxt.cpp index 94c2c9ca5877..b9b8070d40e7 100644 --- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -74,6 +74,7 @@ #include "jsobj.h" #include "jsopcode.h" #include "jspubtd.h" +#include "jsscan.h" #include "jsscope.h" #include "jsscript.h" #include "jsstaticcheck.h" @@ -86,449 +87,11 @@ #include "jscntxtinlines.h" #include "jscompartment.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" -#ifdef XP_WIN -# include "jswin.h" -#elif defined(XP_OS2) -# define INCL_DOSMEMMGR -# include -#else -# include -# include -# if !defined(MAP_ANONYMOUS) -# if defined(MAP_ANON) -# define MAP_ANONYMOUS MAP_ANON -# else -# define MAP_ANONYMOUS 0 -# endif -# endif -#endif - using namespace js; using namespace js::gc; -JS_REQUIRES_STACK bool -StackSegment::contains(const JSStackFrame *fp) const -{ - JS_ASSERT(inContext()); - - if (fp < initialFrame) - return false; - - JSStackFrame *start; - if (isActive()) { - JS_ASSERT(cx->hasfp() && this == cx->activeSegment()); - start = cx->fp(); - } else { - JS_ASSERT(suspendedRegs && suspendedRegs->fp); - start = suspendedRegs->fp; - } - - if (fp > start) - return false; - -#ifdef DEBUG - bool found = false; - JSStackFrame *stop = initialFrame->prev(); - for (JSStackFrame *f = start; !found && f != stop; f = f->prev()) { - if (f == fp) { - found = true; - break; - } - } - JS_ASSERT(found); -#endif - - return true; -} - -JSStackFrame * -StackSegment::computeNextFrame(JSStackFrame *fp) const -{ - JS_ASSERT(contains(fp)); - JS_ASSERT(fp != getCurrentFrame()); - - JSStackFrame *next = getCurrentFrame(); - JSStackFrame *prev; - while ((prev = next->prev()) != fp) - next = prev; - return next; -} - -StackSpace::StackSpace() - : base(NULL), -#ifdef XP_WIN - commitEnd(NULL), -#endif - end(NULL), - currentSegment(NULL), -#ifdef DEBUG - invokeSegment(NULL), - invokeFrame(NULL), -#endif - invokeArgEnd(NULL) -{ -} - -bool -StackSpace::init() -{ - void *p; -#ifdef XP_WIN - p = VirtualAlloc(NULL, CAPACITY_BYTES, MEM_RESERVE, PAGE_READWRITE); - if (!p) - return false; - void *check = VirtualAlloc(p, COMMIT_BYTES, MEM_COMMIT, PAGE_READWRITE); - if (p != check) - return false; - base = reinterpret_cast(p); - commitEnd = base + COMMIT_VALS; - end = base + CAPACITY_VALS; -#elif defined(XP_OS2) - if (DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE | OBJ_ANY) && - DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE)) - return false; - base = reinterpret_cast(p); - end = base + CAPACITY_VALS; -#else - JS_ASSERT(CAPACITY_BYTES % getpagesize() == 0); - p = mmap(NULL, CAPACITY_BYTES, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); - if (p == MAP_FAILED) - return false; - base = reinterpret_cast(p); - end = base + CAPACITY_VALS; -#endif - return true; -} - -StackSpace::~StackSpace() -{ - if (!base) - return; -#ifdef XP_WIN - VirtualFree(base, (commitEnd - base) * sizeof(Value), MEM_DECOMMIT); - VirtualFree(base, 0, MEM_RELEASE); -#elif defined(XP_OS2) - DosFreeMem(base); -#else -#ifdef SOLARIS - munmap((caddr_t)base, CAPACITY_BYTES); -#else - munmap(base, CAPACITY_BYTES); -#endif -#endif -} - -#ifdef XP_WIN -JS_FRIEND_API(bool) -StackSpace::bumpCommit(Value *from, ptrdiff_t nvals) const -{ - JS_ASSERT(end - from >= nvals); - Value *newCommit = commitEnd; - Value *request = from + nvals; - - /* Use a dumb loop; will probably execute once. */ - JS_ASSERT((end - newCommit) % COMMIT_VALS == 0); - do { - newCommit += COMMIT_VALS; - JS_ASSERT((end - newCommit) >= 0); - } while (newCommit < request); - - /* The cast is safe because CAPACITY_BYTES is small. */ - int32 size = static_cast(newCommit - commitEnd) * sizeof(Value); - - if (!VirtualAlloc(commitEnd, size, MEM_COMMIT, PAGE_READWRITE)) - return false; - commitEnd = newCommit; - return true; -} -#endif - -void -StackSpace::mark(JSTracer *trc) -{ - /* - * The correctness/completeness of marking depends on the continuity - * invariants described by the StackSegment and StackSpace definitions. - * - * NB: - * Stack slots might be torn or uninitialized in the presence of method - * JIT'd code. Arguments are an exception and are always fully synced - * (so they can be read by functions). - */ - Value *end = firstUnused(); - for (StackSegment *seg = currentSegment; seg; seg = seg->getPreviousInMemory()) { - STATIC_ASSERT(ubound(end) >= 0); - if (seg->inContext()) { - /* This may be the only pointer to the initialVarObj. */ - if (seg->hasInitialVarObj()) - MarkObject(trc, seg->getInitialVarObj(), "varobj"); - - /* Mark slots/args trailing off of the last stack frame. */ - JSStackFrame *fp = seg->getCurrentFrame(); - MarkStackRangeConservatively(trc, fp->slots(), end); - - /* Mark stack frames and slots/args between stack frames. */ - JSStackFrame *initial = seg->getInitialFrame(); - for (JSStackFrame *f = fp; f != initial; f = f->prev()) { - js_TraceStackFrame(trc, f); - MarkStackRangeConservatively(trc, f->prev()->slots(), (Value *)f); - } - - /* Mark initial stack frame and leading args. */ - js_TraceStackFrame(trc, initial); - MarkStackRangeConservatively(trc, seg->valueRangeBegin(), (Value *)initial); - } else { - /* Mark slots/args trailing off segment. */ - MarkValueRange(trc, seg->valueRangeBegin(), end, "stack"); - } - end = (Value *)seg; - } -} - -bool -StackSpace::pushSegmentForInvoke(JSContext *cx, uintN argc, InvokeArgsGuard *ag) -{ - Value *start = firstUnused(); - ptrdiff_t nvals = VALUES_PER_STACK_SEGMENT + 2 + argc; - if (!ensureSpace(cx, start, nvals)) - return false; - - StackSegment *seg = new(start) StackSegment; - seg->setPreviousInMemory(currentSegment); - currentSegment = seg; - - ag->cx = cx; - ag->seg = seg; - ImplicitCast(*ag) = CallArgsFromVp(argc, seg->valueRangeBegin()); - - /* Use invokeArgEnd to root [vp, vpend) until the frame is pushed. */ -#ifdef DEBUG - ag->prevInvokeSegment = invokeSegment; - invokeSegment = seg; - ag->prevInvokeFrame = invokeFrame; - invokeFrame = NULL; -#endif - ag->prevInvokeArgEnd = invokeArgEnd; - invokeArgEnd = ag->argv() + ag->argc(); - return true; -} - -void -StackSpace::popSegmentForInvoke(const InvokeArgsGuard &ag) -{ - JS_ASSERT(!currentSegment->inContext()); - JS_ASSERT(ag.seg == currentSegment); - JS_ASSERT(invokeSegment == currentSegment); - JS_ASSERT(invokeArgEnd == ag.argv() + ag.argc()); - - currentSegment = currentSegment->getPreviousInMemory(); - -#ifdef DEBUG - invokeSegment = ag.prevInvokeSegment; - invokeFrame = ag.prevInvokeFrame; -#endif - invokeArgEnd = ag.prevInvokeArgEnd; -} - -bool -StackSpace::getSegmentAndFrame(JSContext *cx, uintN vplen, uintN nslots, - FrameGuard *fg) const -{ - Value *start = firstUnused(); - uintN nvals = VALUES_PER_STACK_SEGMENT + vplen + VALUES_PER_STACK_FRAME + nslots; - if (!ensureSpace(cx, start, nvals)) - return false; - - fg->seg_ = new(start) StackSegment; - fg->vp_ = start + VALUES_PER_STACK_SEGMENT; - fg->fp_ = reinterpret_cast(fg->vp() + vplen); - return true; -} - -void -StackSpace::pushSegmentAndFrame(JSContext *cx, JSFrameRegs *regs, FrameGuard *fg) -{ - /* Caller should have already initialized regs. */ - JS_ASSERT(regs->fp == fg->fp()); - StackSegment *seg = fg->segment(); - - /* Register new segment/frame with the context. */ - cx->pushSegmentAndFrame(seg, *regs); - - /* Officially push the segment/frame on the stack. */ - seg->setPreviousInMemory(currentSegment); - currentSegment = seg; - - /* Mark as 'pushed' in the guard. */ - fg->cx_ = cx; -} - -void -StackSpace::popSegmentAndFrame(JSContext *cx) -{ - JS_ASSERT(isCurrentAndActive(cx)); - JS_ASSERT(cx->hasActiveSegment()); - - PutActivationObjects(cx, cx->fp()); - - /* Officially pop the segment/frame from the stack. */ - currentSegment = currentSegment->getPreviousInMemory(); - - /* Unregister pushed segment/frame from the context. */ - cx->popSegmentAndFrame(); - - /* - * N.B. This StackSpace should be GC-able without any operations after - * cx->popSegmentAndFrame executes since it can trigger GC. - */ -} - -FrameGuard::~FrameGuard() -{ - if (!pushed()) - return; - JS_ASSERT(cx_->activeSegment() == segment()); - JS_ASSERT(cx_->maybefp() == fp()); - cx_->stack().popSegmentAndFrame(cx_); -} - -bool -StackSpace::getExecuteFrame(JSContext *cx, JSScript *script, ExecuteFrameGuard *fg) const -{ - return getSegmentAndFrame(cx, 2, script->nslots, fg); -} - -void -StackSpace::pushExecuteFrame(JSContext *cx, JSObject *initialVarObj, ExecuteFrameGuard *fg) -{ - JSStackFrame *fp = fg->fp(); - JSScript *script = fp->script(); - fg->regs_.pc = script->code; - fg->regs_.fp = fp; - fg->regs_.sp = fp->base(); - pushSegmentAndFrame(cx, &fg->regs_, fg); - fg->seg_->setInitialVarObj(initialVarObj); -} - -bool -StackSpace::pushDummyFrame(JSContext *cx, JSObject &scopeChain, DummyFrameGuard *fg) -{ - if (!getSegmentAndFrame(cx, 0 /*vplen*/, 0 /*nslots*/, fg)) - return false; - fg->fp()->initDummyFrame(cx, scopeChain); - fg->regs_.fp = fg->fp(); - fg->regs_.pc = NULL; - fg->regs_.sp = fg->fp()->slots(); - pushSegmentAndFrame(cx, &fg->regs_, fg); - return true; -} - -bool -StackSpace::getGeneratorFrame(JSContext *cx, uintN vplen, uintN nslots, GeneratorFrameGuard *fg) -{ - return getSegmentAndFrame(cx, vplen, nslots, fg); -} - -void -StackSpace::pushGeneratorFrame(JSContext *cx, JSFrameRegs *regs, GeneratorFrameGuard *fg) -{ - JS_ASSERT(regs->fp == fg->fp()); - JS_ASSERT(regs->fp->prev() == cx->maybefp()); - pushSegmentAndFrame(cx, regs, fg); -} - -bool -StackSpace::bumpCommitAndLimit(JSStackFrame *base, Value *sp, uintN nvals, Value **limit) const -{ - JS_ASSERT(sp >= firstUnused()); - JS_ASSERT(sp + nvals >= *limit); -#ifdef XP_WIN - if (commitEnd <= *limit) { - Value *quotaEnd = (Value *)base + STACK_QUOTA; - if (sp + nvals < quotaEnd) { - if (!ensureSpace(NULL, sp, nvals)) - return false; - *limit = Min(quotaEnd, commitEnd); - return true; - } - } -#endif - return false; -} - -void -FrameRegsIter::initSlow() -{ - if (!curseg) { - curfp = NULL; - cursp = NULL; - curpc = NULL; - return; - } - - JS_ASSERT(curseg->isSuspended()); - curfp = curseg->getSuspendedFrame(); - cursp = curseg->getSuspendedRegs()->sp; - curpc = curseg->getSuspendedRegs()->pc; -} - -/* - * Using the invariant described in the js::StackSegment comment, we know that, - * when a pair of prev-linked stack frames are in the same segment, the - * first frame's address is the top of the prev-frame's stack, modulo missing - * arguments. - */ -void -FrameRegsIter::incSlow(JSStackFrame *fp, JSStackFrame *prev) -{ - JS_ASSERT(prev); - JS_ASSERT(curpc == curfp->pc(cx, fp)); - JS_ASSERT(fp == curseg->getInitialFrame()); - - /* - * If fp is in cs and the prev-frame is in csprev, it is not necessarily - * the case that |cs->getPreviousInContext == csprev| or that - * |csprev->getSuspendedFrame == prev| (because of indirect eval and - * JS_EvaluateInStackFrame). To compute prev's sp, we need to do a linear - * scan, keeping track of what is immediately after prev in memory. - */ - curseg = curseg->getPreviousInContext(); - cursp = curseg->getSuspendedRegs()->sp; - JSStackFrame *f = curseg->getSuspendedFrame(); - while (f != prev) { - if (f == curseg->getInitialFrame()) { - curseg = curseg->getPreviousInContext(); - cursp = curseg->getSuspendedRegs()->sp; - f = curseg->getSuspendedFrame(); - } else { - cursp = f->formalArgsEnd(); - f = f->prev(); - } - } -} - -AllFramesIter::AllFramesIter(JSContext *cx) - : curcs(cx->stack().getCurrentSegment()), - curfp(curcs ? curcs->getCurrentFrame() : NULL) -{ -} - -AllFramesIter& -AllFramesIter::operator++() -{ - JS_ASSERT(!done()); - if (curfp == curcs->getInitialFrame()) { - curcs = curcs->getPreviousInMemory(); - curfp = curcs ? curcs->getCurrentFrame() : NULL; - } else { - curfp = curfp->prev(); - } - return *this; -} - namespace js { ThreadData::ThreadData() @@ -649,16 +212,23 @@ js_InitContextThread(JSContext *cx) return false; JS_APPEND_LINK(&cx->threadLinks, &thread->contextList); - cx->thread = thread; + cx->setThread(thread); return true; } +void +JSContext::setThread(JSThread *thread) +{ + thread_ = thread; + stack.threadReset(); +} + void js_ClearContextThread(JSContext *cx) { - JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread)); + JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread())); JS_REMOVE_AND_INIT_LINK(&cx->threadLinks); - cx->thread = NULL; + cx->setThread(NULL); } #endif /* JS_THREADSAFE */ @@ -711,7 +281,7 @@ js_PurgeThreads(JSContext *cx) JSThread *thread = e.front().value; if (JS_CLIST_IS_EMPTY(&thread->contextList)) { - JS_ASSERT(cx->thread != thread); + JS_ASSERT(cx->thread() != thread); Foreground::delete_(thread); e.removeFront(); } else { @@ -892,7 +462,7 @@ DumpEvalCacheMeter(JSContext *cx) fprintf(fp, "eval cache meter (%p):\n", #ifdef JS_THREADSAFE - (void *) cx->thread + (void *) cx->thread() #else (void *) cx->runtime #endif @@ -980,8 +550,8 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode) * optimized builds. We assume that the embedding knows that an OOM error * cannot happen in JS_SetContextThread. */ - JS_ASSERT(cx->thread && CURRENT_THREAD_IS_ME(cx->thread)); - if (!cx->thread) + JS_ASSERT(cx->thread() && CURRENT_THREAD_IS_ME(cx->thread())); + if (!cx->thread()) JS_SetContextThread(cx); /* @@ -990,7 +560,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode) * on this cx contributes to cx->thread->data.requestDepth and there is no * JS_SuspendRequest calls that set aside the counter. */ - JS_ASSERT(cx->outstandingRequests <= cx->thread->data.requestDepth); + JS_ASSERT(cx->outstandingRequests <= cx->thread()->data.requestDepth); #endif if (mode != JSDCM_NEW_FAILED) { @@ -1012,7 +582,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode) * Typically we are called outside a request, so ensure that the GC is not * running before removing the context from rt->contextList, see bug 477021. */ - if (cx->thread->data.requestDepth == 0) + if (cx->thread()->data.requestDepth == 0) js_WaitForGC(rt); #endif JS_REMOVE_LINK(&cx->link); @@ -1043,7 +613,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode) * force or maybe run the GC, but by that point, rt->state will * not be JSRTS_UP, and that GC attempt will return early. */ - if (cx->thread->data.requestDepth == 0) + if (cx->thread()->data.requestDepth == 0) JS_BeginRequest(cx); #endif @@ -1089,7 +659,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode) } #ifdef JS_THREADSAFE #ifdef DEBUG - JSThread *t = cx->thread; + JSThread *t = cx->thread(); #endif js_ClearContextThread(cx); JS_ASSERT_IF(JS_CLIST_IS_EMPTY(&t->contextList), !t->data.requestDepth); @@ -1109,7 +679,9 @@ js_ContextIterator(JSRuntime *rt, JSBool unlocked, JSContext **iterp) { JSContext *cx = *iterp; - Conditionally lockIf(!!unlocked, rt); + Maybe lockIf; + if (unlocked) + lockIf.construct(rt); cx = js_ContextFromLinkField(cx ? cx->link.next : rt->contextList.next); if (&cx->link == &rt->contextList) cx = NULL; @@ -1123,7 +695,7 @@ js_NextActiveContext(JSRuntime *rt, JSContext *cx) JSContext *iter = cx; #ifdef JS_THREADSAFE while ((cx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) { - if (cx->outstandingRequests && cx->thread->data.requestDepth) + if (cx->outstandingRequests && cx->thread()->data.requestDepth) break; } return cx; @@ -1191,7 +763,7 @@ PopulateReportBlame(JSContext *cx, JSErrorReport *report) * Walk stack until we find a frame that is associated with some script * rather than a native frame. */ - for (JSStackFrame *fp = js_GetTopStackFrame(cx); fp; fp = fp->prev()) { + for (StackFrame *fp = js_GetTopStackFrame(cx); fp; fp = fp->prev()) { if (fp->pc(cx)) { report->filename = fp->script()->filename; report->lineno = js_FramePCToLineNumber(cx, fp); @@ -1253,22 +825,24 @@ js_ReportOutOfMemory(JSContext *cx) } void -js_ReportOutOfScriptQuota(JSContext *cx) +js_ReportOutOfScriptQuota(JSContext *maybecx) { - JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, - JSMSG_SCRIPT_STACK_QUOTA); + if (maybecx) + JS_ReportErrorNumber(maybecx, js_GetErrorMessage, NULL, JSMSG_SCRIPT_STACK_QUOTA); } JS_FRIEND_API(void) -js_ReportOverRecursed(JSContext *cx) +js_ReportOverRecursed(JSContext *maybecx) { - JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_OVER_RECURSED); + if (maybecx) + JS_ReportErrorNumber(maybecx, js_GetErrorMessage, NULL, JSMSG_OVER_RECURSED); } void -js_ReportAllocationOverflow(JSContext *cx) +js_ReportAllocationOverflow(JSContext *maybecx) { - JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_ALLOC_OVERFLOW); + if (maybecx) + JS_ReportErrorNumber(maybecx, js_GetErrorMessage, NULL, JSMSG_ALLOC_OVERFLOW); } /* @@ -1286,7 +860,7 @@ checkReportFlags(JSContext *cx, uintN *flags) * We assume that if the top frame is a native, then it is strict if * the nearest scripted frame is strict, see bug 536306. */ - JSStackFrame *fp = js_GetScriptedCaller(cx, NULL); + StackFrame *fp = js_GetScriptedCaller(cx, NULL); if (fp && fp->script()->strictModeCode) *flags &= ~JSREPORT_WARNING; else if (cx->hasStrictOption()) @@ -1781,7 +1355,7 @@ TriggerOperationCallback(JSContext *cx) */ ThreadData *td; #ifdef JS_THREADSAFE - JSThread *thread = cx->thread; + JSThread *thread = cx->thread(); if (!thread) return; td = &thread->data; @@ -1800,8 +1374,8 @@ TriggerAllOperationCallbacks(JSRuntime *rt) } /* namespace js */ -JSStackFrame * -js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp) +StackFrame * +js_GetScriptedCaller(JSContext *cx, StackFrame *fp) { if (!fp) fp = js_GetTopStackFrame(cx); @@ -1824,7 +1398,7 @@ js_GetCurrentBytecodePC(JSContext* cx) #endif { JS_ASSERT_NOT_ON_TRACE(cx); /* for static analysis */ - pc = cx->regs ? cx->regs->pc : NULL; + pc = cx->running() ? cx->regs().pc : NULL; if (!pc) return NULL; imacpc = cx->fp()->maybeImacropc(); @@ -1891,14 +1465,14 @@ JSContext::JSContext(JSRuntime *rt) : hasVersionOverride(false), runtime(rt), compartment(NULL), - regs(NULL), + stack(thisDuringConstruction()), busyArrays() {} JSContext::~JSContext() { #ifdef JS_THREADSAFE - JS_ASSERT(!thread); + JS_ASSERT(!thread_); #endif /* Free the stuff hanging off of cx. */ @@ -1924,7 +1498,7 @@ void JSContext::resetCompartment() { JSObject *scopeobj; - if (hasfp()) { + if (stack.running()) { scopeobj = &fp()->scopeChain(); } else { scopeobj = globalObject; @@ -1969,71 +1543,10 @@ JSContext::wrapPendingException() setPendingException(v); } -void -JSContext::pushSegmentAndFrame(js::StackSegment *newseg, JSFrameRegs &newregs) -{ - JS_ASSERT(regs != &newregs); - if (hasActiveSegment()) - currentSegment->suspend(regs); - newseg->setPreviousInContext(currentSegment); - currentSegment = newseg; - setCurrentRegs(&newregs); - newseg->joinContext(this, newregs.fp); -} - -void -JSContext::popSegmentAndFrame() -{ - JS_ASSERT_IF(regs->fp->hasCallObj(), !regs->fp->callObj().getPrivate()); - JS_ASSERT_IF(regs->fp->hasArgsObj(), !regs->fp->argsObj().getPrivate()); - JS_ASSERT(currentSegment->maybeContext() == this); - JS_ASSERT(currentSegment->getInitialFrame() == regs->fp); - - /* - * NB: This function calls resetCompartment, which may GC, so the stack needs - * to be in a GC-able state by that point. - */ - - currentSegment->leaveContext(); - currentSegment = currentSegment->getPreviousInContext(); - if (currentSegment) { - if (currentSegment->isSaved()) { - setCurrentRegs(NULL); - resetCompartment(); - } else { - setCurrentRegs(currentSegment->getSuspendedRegs()); - currentSegment->resume(); - } - } else { - JS_ASSERT(regs->fp->prev() == NULL); - setCurrentRegs(NULL); - resetCompartment(); - } - maybeMigrateVersionOverride(); -} - -void -JSContext::saveActiveSegment() -{ - JS_ASSERT(hasActiveSegment()); - currentSegment->save(regs); - setCurrentRegs(NULL); - resetCompartment(); -} - -void -JSContext::restoreSegment() -{ - js::StackSegment *ccs = currentSegment; - setCurrentRegs(ccs->getSuspendedRegs()); - ccs->restore(); - resetCompartment(); -} - JSGenerator * -JSContext::generatorFor(JSStackFrame *fp) const +JSContext::generatorFor(StackFrame *fp) const { - JS_ASSERT(stack().contains(fp) && fp->isGeneratorFrame()); + JS_ASSERT(stack.contains(fp) && fp->isGeneratorFrame()); JS_ASSERT(!fp->isFloatingGenerator()); JS_ASSERT(!genStack.empty()); @@ -2049,17 +1562,6 @@ JSContext::generatorFor(JSStackFrame *fp) const return NULL; } -StackSegment * -StackSpace::containingSegment(const JSStackFrame *target) -{ - for (StackSegment *seg = currentSegment; seg; seg = seg->getPreviousInMemory()) { - if (seg->contains(target)) - return seg; - } - JS_NOT_REACHED("frame not in stack space"); - return NULL; -} - JS_FRIEND_API(void) JSRuntime::onTooMuchMalloc() { diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 66245eb61b02..4461ff3dd080 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -67,6 +67,8 @@ #include "jsvector.h" #include "prmjtime.h" +#include "vm/Stack.h" + #ifdef _MSC_VER #pragma warning(push) #pragma warning(disable:4100) /* Silence unreferenced formal parameter warnings */ @@ -91,11 +93,8 @@ namespace js { /* Tracer constants. */ static const size_t MONITOR_N_GLOBAL_STATES = 4; static const size_t FRAGMENT_TABLE_SIZE = 512; -static const size_t MAX_NATIVE_STACK_SLOTS = 4096; -static const size_t MAX_CALL_STACK_ENTRIES = 500; static const size_t MAX_GLOBAL_SLOTS = 4096; static const size_t GLOBAL_SLOTS_BUFFER_SIZE = MAX_GLOBAL_SLOTS + 1; -static const size_t MAX_SLOW_NATIVE_EXTRA_SLOTS = 16; /* Forward declarations of tracer types. */ class VMAllocator; @@ -118,647 +117,6 @@ namespace mjit { class JaegerCompartment; } -/* - * A StackSegment (referred to as just a 'segment') contains a prev-linked set - * of stack frames and the slots associated with each frame. A segment and its - * contained frames/slots also have a precise memory layout that is described - * in the js::StackSpace comment. A key layout invariant for segments is that - * prev-linked frames are adjacent in memory, separated only by the values that - * constitute the locals and expression stack of the prev-frame. - * - * The set of stack frames in a non-empty segment start at the segment's - * "current frame", which is the most recently pushed frame, and ends at the - * segment's "initial frame". Note that, while all stack frames in a segment - * are prev-linked, not all prev-linked frames are in the same segment. Hence, - * for a segment |ss|, |ss->getInitialFrame()->prev| may be non-null and in a - * different segment. This occurs when the VM reenters itself (via Invoke or - * Execute). In full generality, a single context may contain a forest of trees - * of stack frames. With respect to this forest, a segment contains a linear - * path along a single tree, not necessarily to the root. - * - * The frames of a non-empty segment must all be in the same context and thus - * each non-empty segment is referred to as being "in" a context. Segments in a - * context have an additional state of being either "active" or "suspended". A - * suspended segment |ss| has a "suspended frame" which is snapshot of |cx->regs| - * when the segment was suspended and serves as the current frame of |ss|. - * There is at most one active segment in a given context. Segments in a - * context execute LIFO and are maintained in a stack. The top of this stack - * is the context's "current segment". If a context |cx| has an active segment - * |ss|, then: - * 1. |ss| is |cx|'s current segment, - * 2. |cx->regs != NULL|, and - * 3. |ss|'s current frame is |cx->regs->fp|. - * Moreover, |cx->regs != NULL| iff |cx| has an active segment. - * - * An empty segment is not associated with any context. Empty segments are - * created when there is not an active segment for a context at the top of the - * stack and claim space for the arguments of an Invoke before the Invoke's - * stack frame is pushed. During the intervals when the arguments have been - * pushed, but not the stack frame, the segment cannot be pushed onto the - * context, since that would require some hack to deal with cx->fp not being - * the current frame of cx->currentSegment. - * - * Finally, (to support JS_SaveFrameChain/JS_RestoreFrameChain) a suspended - * segment may or may not be "saved". Normally, when the active segment is - * popped, the previous segment (which is necessarily suspended) becomes - * active. If the previous segment was saved, however, then it stays suspended - * until it is made active by a call to JS_RestoreFrameChain. This is why a - * context may have a current segment, but not an active segment. - */ -class StackSegment -{ - /* The context to which this segment belongs. */ - JSContext *cx; - - /* Link for JSContext segment stack mentioned in big comment above. */ - StackSegment *previousInContext; - - /* Link for StackSpace segment stack mentioned in StackSpace comment. */ - StackSegment *previousInMemory; - - /* The first frame executed in this segment. null iff cx is null */ - JSStackFrame *initialFrame; - - /* If this segment is suspended, |cx->regs| when it was suspended. */ - JSFrameRegs *suspendedRegs; - - /* The varobj on entry to initialFrame. */ - JSObject *initialVarObj; - - /* Whether this segment was suspended by JS_SaveFrameChain. */ - bool saved; - - /* Align at 8 bytes on all platforms. */ -#if JS_BITS_PER_WORD == 32 - void *padding; -#endif - - /* - * To make isActive a single null-ness check, this non-null constant is - * assigned to suspendedRegs when !inContext. - */ -#define NON_NULL_SUSPENDED_REGS ((JSFrameRegs *)0x1) - - public: - StackSegment() - : cx(NULL), previousInContext(NULL), previousInMemory(NULL), - initialFrame(NULL), suspendedRegs(NON_NULL_SUSPENDED_REGS), - initialVarObj(NULL), saved(false) - { - JS_ASSERT(!inContext()); - } - - /* Safe casts guaranteed by the contiguous-stack layout. */ - - Value *valueRangeBegin() const { - return (Value *)(this + 1); - } - - /* - * As described in the comment at the beginning of the class, a segment - * is in one of three states: - * - * !inContext: the segment has been created to root arguments for a - * future call to Invoke. - * isActive: the segment describes a set of stack frames in a context, - * where the top frame currently executing. - * isSuspended: like isActive, but the top frame has been suspended. - */ - - bool inContext() const { - JS_ASSERT(!!cx == !!initialFrame); - JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS && !saved); - return cx; - } - - bool isActive() const { - JS_ASSERT_IF(!suspendedRegs, cx && !saved); - JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS); - return !suspendedRegs; - } - - bool isSuspended() const { - JS_ASSERT_IF(!cx || !suspendedRegs, !saved); - JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS); - return cx && suspendedRegs; - } - - /* Substate of suspended, queryable in any state. */ - - bool isSaved() const { - JS_ASSERT_IF(saved, isSuspended()); - return saved; - } - - /* Transitioning between inContext <--> isActive */ - - void joinContext(JSContext *cx, JSStackFrame *f) { - JS_ASSERT(!inContext()); - this->cx = cx; - initialFrame = f; - suspendedRegs = NULL; - JS_ASSERT(isActive()); - } - - void leaveContext() { - JS_ASSERT(isActive()); - this->cx = NULL; - initialFrame = NULL; - suspendedRegs = NON_NULL_SUSPENDED_REGS; - JS_ASSERT(!inContext()); - } - - JSContext *maybeContext() const { - return cx; - } - -#undef NON_NULL_SUSPENDED_REGS - - /* Transitioning between isActive <--> isSuspended */ - - void suspend(JSFrameRegs *regs) { - JS_ASSERT(isActive()); - JS_ASSERT(regs && regs->fp && contains(regs->fp)); - suspendedRegs = regs; - JS_ASSERT(isSuspended()); - } - - void resume() { - JS_ASSERT(isSuspended()); - suspendedRegs = NULL; - JS_ASSERT(isActive()); - } - - /* When isSuspended, transitioning isSaved <--> !isSaved */ - - void save(JSFrameRegs *regs) { - JS_ASSERT(!isSuspended()); - suspend(regs); - saved = true; - JS_ASSERT(isSaved()); - } - - void restore() { - JS_ASSERT(isSaved()); - saved = false; - resume(); - JS_ASSERT(!isSuspended()); - } - - /* Data available when inContext */ - - JSStackFrame *getInitialFrame() const { - JS_ASSERT(inContext()); - return initialFrame; - } - - inline JSFrameRegs *getCurrentRegs() const; - inline JSStackFrame *getCurrentFrame() const; - - /* Data available when isSuspended. */ - - JSFrameRegs *getSuspendedRegs() const { - JS_ASSERT(isSuspended()); - return suspendedRegs; - } - - JSStackFrame *getSuspendedFrame() const { - return suspendedRegs->fp; - } - - /* JSContext / js::StackSpace bookkeeping. */ - - void setPreviousInContext(StackSegment *seg) { - previousInContext = seg; - } - - StackSegment *getPreviousInContext() const { - return previousInContext; - } - - void setPreviousInMemory(StackSegment *seg) { - previousInMemory = seg; - } - - StackSegment *getPreviousInMemory() const { - return previousInMemory; - } - - void setInitialVarObj(JSObject *obj) { - JS_ASSERT(inContext()); - initialVarObj = obj; - } - - bool hasInitialVarObj() { - JS_ASSERT(inContext()); - return initialVarObj != NULL; - } - - JSObject &getInitialVarObj() const { - JS_ASSERT(inContext() && initialVarObj); - return *initialVarObj; - } - - JS_REQUIRES_STACK bool contains(const JSStackFrame *fp) const; - - JSStackFrame *computeNextFrame(JSStackFrame *fp) const; -}; - -static const size_t VALUES_PER_STACK_SEGMENT = sizeof(StackSegment) / sizeof(Value); -JS_STATIC_ASSERT(sizeof(StackSegment) % sizeof(Value) == 0); - -/* See StackSpace::pushInvokeArgs. */ -class InvokeArgsGuard : public CallArgs -{ - friend class StackSpace; - JSContext *cx; /* null implies nothing pushed */ - StackSegment *seg; - Value *prevInvokeArgEnd; -#ifdef DEBUG - StackSegment *prevInvokeSegment; - JSStackFrame *prevInvokeFrame; -#endif - public: - InvokeArgsGuard() : cx(NULL), seg(NULL) {} - ~InvokeArgsGuard(); - bool pushed() const { return cx != NULL; } -}; - -/* - * This type can be used to call Invoke when the arguments have already been - * pushed onto the stack as part of normal execution. - */ -struct InvokeArgsAlreadyOnTheStack : CallArgs -{ - InvokeArgsAlreadyOnTheStack(Value *vp, uintN argc) : CallArgs(argc, vp + 2) {} -}; - -/* See StackSpace::pushInvokeFrame. */ -class InvokeFrameGuard -{ - friend class StackSpace; - JSContext *cx_; /* null implies nothing pushed */ - JSFrameRegs regs_; - JSFrameRegs *prevRegs_; - public: - InvokeFrameGuard() : cx_(NULL) {} - ~InvokeFrameGuard() { if (pushed()) pop(); } - bool pushed() const { return cx_ != NULL; } - void pop(); - JSStackFrame *fp() const { return regs_.fp; } -}; - -/* Reusable base; not for direct use. */ -class FrameGuard -{ - friend class StackSpace; - JSContext *cx_; /* null implies nothing pushed */ - StackSegment *seg_; - Value *vp_; - JSStackFrame *fp_; - public: - FrameGuard() : cx_(NULL), vp_(NULL), fp_(NULL) {} - JS_REQUIRES_STACK ~FrameGuard(); - bool pushed() const { return cx_ != NULL; } - StackSegment *segment() const { return seg_; } - Value *vp() const { return vp_; } - JSStackFrame *fp() const { return fp_; } -}; - -/* See StackSpace::pushExecuteFrame. */ -class ExecuteFrameGuard : public FrameGuard -{ - friend class StackSpace; - JSFrameRegs regs_; -}; - -/* See StackSpace::pushDummyFrame. */ -class DummyFrameGuard : public FrameGuard -{ - friend class StackSpace; - JSFrameRegs regs_; -}; - -/* See StackSpace::pushGeneratorFrame. */ -class GeneratorFrameGuard : public FrameGuard -{}; - -/* - * Stack layout - * - * Each ThreadData has one associated StackSpace object which allocates all - * segments for the thread. StackSpace performs all such allocations in a - * single, fixed-size buffer using a specific layout scheme that allows some - * associations between segments, frames, and slots to be implicit, rather - * than explicitly stored as pointers. To maintain useful invariants, stack - * space is not given out arbitrarily, but rather allocated/deallocated for - * specific purposes. The use cases currently supported are: calling a function - * with arguments (e.g. Invoke), executing a script (e.g. Execute), inline - * interpreter calls, and pushing "dummy" frames for bookkeeping purposes. See - * associated member functions below. - * - * First, we consider the layout of individual segments. (See the - * js::StackSegment comment for terminology.) A non-empty segment (i.e., a - * segment in a context) has the following layout: - * - * initial frame current frame ------. if regs, - * .------------. | | regs->sp - * | V V V - * |segment| slots |frame| slots |frame| slots |frame| slots | - * | ^ | ^ | - * ? <----------' `----------' `----------' - * prev prev prev - * - * Moreover, the bytes in the following ranges form a contiguous array of - * Values that are marked during GC: - * 1. between a segment and its first frame - * 2. between two adjacent frames in a segment - * 3. between a segment's current frame and (if fp->regs) fp->regs->sp - * Thus, the VM must ensure that all such Values are safe to be marked. - * - * An empty segment is followed by arguments that are rooted by the - * StackSpace::invokeArgEnd pointer: - * - * invokeArgEnd - * | - * V - * |segment| slots | - * - * Above the level of segments, a StackSpace is simply a contiguous sequence - * of segments kept in a linked list: - * - * base currentSegment firstUnused end - * | | | | - * V V V V - * |segment| --- |segment| --- |segment| ------- | | - * | ^ | ^ | - * 0 <---' `-----------' `-----------' - * previous previous previous - * - * Both js::StackSpace and JSContext maintain a stack of segments, the top of - * which is the "current segment" for that thread or context, respectively. - * Since different contexts can arbitrarily interleave execution in a single - * thread, these stacks are different enough that a segment needs both - * "previousInMemory" and "previousInContext". - * - * For example, in a single thread, a function in segment S1 in a context CX1 - * may call out into C++ code that reenters the VM in a context CX2, which - * creates a new segment S2 in CX2, and CX1 may or may not equal CX2. - * - * Note that there is some structure to this interleaving of segments: - * 1. the inclusion from segments in a context to segments in a thread - * preserves order (in terms of previousInContext and previousInMemory, - * respectively). - * 2. the mapping from stack frames to their containing segment preserves - * order (in terms of prev and previousInContext, respectively). - */ -class StackSpace -{ - Value *base; -#ifdef XP_WIN - mutable Value *commitEnd; -#endif - Value *end; - StackSegment *currentSegment; -#ifdef DEBUG - /* - * Keep track of which segment/frame bumped invokeArgEnd so that - * firstUnused() can assert that, when invokeArgEnd is used as the top of - * the stack, it is being used appropriately. - */ - StackSegment *invokeSegment; - JSStackFrame *invokeFrame; -#endif - Value *invokeArgEnd; - - friend class InvokeArgsGuard; - friend class InvokeFrameGuard; - friend class FrameGuard; - - bool pushSegmentForInvoke(JSContext *cx, uintN argc, InvokeArgsGuard *ag); - void popSegmentForInvoke(const InvokeArgsGuard &ag); - - bool pushInvokeFrameSlow(JSContext *cx, const InvokeArgsGuard &ag, - InvokeFrameGuard *fg); - void popInvokeFrameSlow(const CallArgs &args); - - bool getSegmentAndFrame(JSContext *cx, uintN vplen, uintN nslots, - FrameGuard *fg) const; - void pushSegmentAndFrame(JSContext *cx, JSFrameRegs *regs, FrameGuard *fg); - void popSegmentAndFrame(JSContext *cx); - - struct EnsureSpaceCheck { - inline bool operator()(const StackSpace &, JSContext *, Value *, uintN); - }; - - struct LimitCheck { - JSStackFrame *base; - Value **limit; - LimitCheck(JSStackFrame *base, Value **limit) : base(base), limit(limit) {} - inline bool operator()(const StackSpace &, JSContext *, Value *, uintN); - }; - - template - inline JSStackFrame *getCallFrame(JSContext *cx, Value *sp, uintN nactual, - JSFunction *fun, JSScript *script, - uint32 *pflags, Check check) const; - - inline void popInvokeArgs(const InvokeArgsGuard &args); - inline void popInvokeFrame(const InvokeFrameGuard &ag); - - inline Value *firstUnused() const; - - inline bool isCurrentAndActive(JSContext *cx) const; - friend class AllFramesIter; - StackSegment *getCurrentSegment() const { return currentSegment; } - -#ifdef XP_WIN - /* Commit more memory from the reserved stack space. */ - JS_FRIEND_API(bool) bumpCommit(Value *from, ptrdiff_t nvals) const; -#endif - - public: - static const size_t CAPACITY_VALS = 512 * 1024; - static const size_t CAPACITY_BYTES = CAPACITY_VALS * sizeof(Value); - static const size_t COMMIT_VALS = 16 * 1024; - static const size_t COMMIT_BYTES = COMMIT_VALS * sizeof(Value); - - /* - * SunSpider and v8bench have roughly an average of 9 slots per script. - * Our heuristic for a quick over-recursion check uses a generous slot - * count based on this estimate. We take this frame size and multiply it - * by the old recursion limit from the interpreter. - * - * Worst case, if an average size script (<=9 slots) over recurses, it'll - * effectively be the same as having increased the old inline call count - * to <= 5,000. - */ - static const size_t STACK_QUOTA = (VALUES_PER_STACK_FRAME + 18) * - JS_MAX_INLINE_CALL_COUNT; - - StackSpace(); - ~StackSpace(); - - bool init(); - -#ifdef DEBUG - template - bool contains(T *t) const { - char *v = (char *)t; - JS_ASSERT(size_t(-1) - uintptr_t(t) >= sizeof(T)); - return v >= (char *)base && v + sizeof(T) <= (char *)end; - } -#endif - - /* - * When we LeaveTree, we need to rebuild the stack, which requires stack - * allocation. There is no good way to handle an OOM for these allocations, - * so this function checks that they cannot occur using the size of the - * TraceNativeStorage as a conservative upper bound. - */ - inline bool ensureEnoughSpaceToEnterTrace(); - - /* +1 for slow native's stack frame. */ - static const ptrdiff_t MAX_TRACE_SPACE_VALS = - MAX_NATIVE_STACK_SLOTS + MAX_CALL_STACK_ENTRIES * VALUES_PER_STACK_FRAME + - (VALUES_PER_STACK_SEGMENT + VALUES_PER_STACK_FRAME /* synthesized slow native */); - - /* Mark all segments, frames, and slots on the stack. */ - JS_REQUIRES_STACK void mark(JSTracer *trc); - - /* - * For all five use cases below: - * - The boolean-valued functions call js_ReportOutOfScriptQuota on OOM. - * - The "get*Frame" functions do not change any global state, they just - * check OOM and return pointers to an uninitialized frame with the - * requested missing arguments/slots. Only once the "push*Frame" - * function has been called is global state updated. Thus, between - * "get*Frame" and "push*Frame", the frame and slots are unrooted. - * - The "push*Frame" functions will set fp->prev; the caller needn't. - * - Functions taking "*Guard" arguments will use the guard's destructor - * to pop the allocation. The caller must ensure the guard has the - * appropriate lifetime. - * - The get*Frame functions put the 'nmissing' slots contiguously after - * the arguments. - */ - - /* - * pushInvokeArgs allocates |argc + 2| rooted values that will be passed as - * the arguments to Invoke. A single allocation can be used for multiple - * Invoke calls. The InvokeArgumentsGuard passed to Invoke must come from - * an immediately-enclosing (stack-wise) call to pushInvokeArgs. - */ - bool pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard *ag); - - /* These functions are called inside Invoke, not Invoke clients. */ - bool getInvokeFrame(JSContext *cx, const CallArgs &args, JSFunction *fun, - JSScript *script, uint32 *flags, InvokeFrameGuard *fg) const; - - void pushInvokeFrame(JSContext *cx, const CallArgs &args, InvokeFrameGuard *fg); - - /* These functions are called inside Execute, not Execute clients. */ - bool getExecuteFrame(JSContext *cx, JSScript *script, ExecuteFrameGuard *fg) const; - void pushExecuteFrame(JSContext *cx, JSObject *initialVarObj, ExecuteFrameGuard *fg); - - /* Get the segment which contains the target frame. */ - js::StackSegment *containingSegment(const JSStackFrame *target); - - /* - * Since RAII cannot be used for inline frames, callers must manually - * call pushInlineFrame/popInlineFrame. - */ - inline JSStackFrame *getInlineFrame(JSContext *cx, Value *sp, uintN nactual, - JSFunction *fun, JSScript *script, - uint32 *flags) const; - inline void pushInlineFrame(JSContext *cx, JSScript *script, JSStackFrame *fp, - JSFrameRegs *regs); - inline void popInlineFrame(JSContext *cx, JSStackFrame *prev, js::Value *newsp); - - /* These functions are called inside SendToGenerator. */ - bool getGeneratorFrame(JSContext *cx, uintN vplen, uintN nslots, - GeneratorFrameGuard *fg); - void pushGeneratorFrame(JSContext *cx, JSFrameRegs *regs, GeneratorFrameGuard *fg); - - /* Pushes a JSStackFrame::isDummyFrame. */ - bool pushDummyFrame(JSContext *cx, JSObject &scopeChain, DummyFrameGuard *fg); - - /* Check and bump the given stack limit. */ - inline JSStackFrame *getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual, - JSFunction *fun, JSScript *script, uint32 *flags, - JSStackFrame *base, Value **limit) const; - - /* - * Compute a stack limit for entering method jit code which allows the - * method jit to check for end-of-stack and over-recursion with a single - * comparison. See STACK_QUOTA above. - */ - inline Value *getStackLimit(JSContext *cx); - - /* - * Try to bump the given 'limit' by bumping the commit limit. Return false - * if fully committed or if 'limit' exceeds 'base' + STACK_QUOTA. - */ - bool bumpCommitAndLimit(JSStackFrame *base, Value *from, uintN nvals, Value **limit) const; - - /* - * Allocate nvals on the top of the stack, report error on failure. - * N.B. the caller must ensure |from >= firstUnused()|. - */ - inline bool ensureSpace(JSContext *maybecx, Value *from, ptrdiff_t nvals) const; -}; - -JS_STATIC_ASSERT(StackSpace::CAPACITY_VALS % StackSpace::COMMIT_VALS == 0); - -/* - * While |cx->fp|'s pc/sp are available in |cx->regs|, to compute the saved - * value of pc/sp for any other frame, it is necessary to know about that - * frame's next-frame. This iterator maintains this information when walking - * a chain of stack frames starting at |cx->fp|. - * - * Usage: - * for (FrameRegsIter i(cx); !i.done(); ++i) - * ... i.fp() ... i.sp() ... i.pc() - */ -class FrameRegsIter -{ - JSContext *cx; - StackSegment *curseg; - JSStackFrame *curfp; - Value *cursp; - jsbytecode *curpc; - - void initSlow(); - void incSlow(JSStackFrame *fp, JSStackFrame *prev); - - public: - JS_REQUIRES_STACK inline FrameRegsIter(JSContext *cx); - - bool done() const { return curfp == NULL; } - inline FrameRegsIter &operator++(); - - JSStackFrame *fp() const { return curfp; } - Value *sp() const { return cursp; } - jsbytecode *pc() const { return curpc; } -}; - -/* - * Utility class for iteration over all active stack frames. - */ -class AllFramesIter -{ -public: - AllFramesIter(JSContext *cx); - - bool done() const { return curfp == NULL; } - AllFramesIter& operator++(); - - JSStackFrame *fp() const { return curfp; } - -private: - StackSegment *curcs; - JSStackFrame *curfp; -}; - /* * GetSrcNote cache to avoid O(n^2) growth in finding a source note for a * given pc in a script. We use the script->code pointer to tag the cache, @@ -930,7 +288,7 @@ struct JSThread { } }; -#define JS_THREAD_DATA(cx) (&(cx)->thread->data) +#define JS_THREAD_DATA(cx) (&(cx)->thread()->data) extern JSThread * js_CurrentThread(JSRuntime *rt); @@ -1610,6 +968,7 @@ typedef HashSetfp); - return regs->fp; - } - - JSStackFrame* maybefp() { - JS_ASSERT_IF(regs, regs->fp); - return regs ? regs->fp : NULL; - } - - bool hasfp() { - JS_ASSERT_IF(regs, regs->fp); - return !!regs; - } - - public: - friend class js::StackSpace; - friend bool js::Interpret(JSContext *, JSStackFrame *, uintN, JSInterpMode); + /* ContextStack convenience functions */ + bool running() const { return stack.running(); } + js::StackFrame* fp() const { return stack.fp(); } + js::StackFrame* maybefp() const { return stack.maybefp(); } + js::FrameRegs& regs() const { return stack.regs(); } + js::FrameRegs* maybeRegs() const { return stack.maybeRegs(); } + /* Set cx->compartment based on the current scope chain. */ void resetCompartment(); - void wrapPendingException(); - /* For grep-ability, changes to 'regs' should call this function. */ - void setCurrentRegs(JSFrameRegs *regs) { - JS_ASSERT_IF(regs, regs->fp); - this->regs = regs; - } + /* Wrap cx->exception for the current compartment. */ + void wrapPendingException(); /* Temporary arena pool used while compiling and decompiling. */ JSArenaPool tempPool; @@ -1719,76 +1060,15 @@ struct JSContext void *data; void *data2; - private: - /* Linked list of segments. See StackSegment. */ - js::StackSegment *currentSegment; - - public: - void assertSegmentsInSync() const { -#ifdef DEBUG - if (regs) { - JS_ASSERT(currentSegment->isActive()); - if (js::StackSegment *prev = currentSegment->getPreviousInContext()) - JS_ASSERT(!prev->isActive()); - } else { - JS_ASSERT_IF(currentSegment, !currentSegment->isActive()); - } -#endif - } - - /* Return whether this context has an active segment. */ - bool hasActiveSegment() const { - assertSegmentsInSync(); - return !!regs; - } - - /* Assuming there is an active segment, return it. */ - js::StackSegment *activeSegment() const { - JS_ASSERT(hasActiveSegment()); - return currentSegment; - } - - /* Return the current segment, which may or may not be active. */ - js::StackSegment *getCurrentSegment() const { - assertSegmentsInSync(); - return currentSegment; - } - inline js::RegExpStatics *regExpStatics(); - private: - /* Add the given segment to the list as the new active segment. */ - void pushSegmentAndFrame(js::StackSegment *newseg, JSFrameRegs ®s); - - /* Remove the active segment and make the next segment active. */ - void popSegmentAndFrame(); - - public: - /* Mark the top segment as suspended, without pushing a new one. */ - void saveActiveSegment(); - - /* Undoes calls to suspendActiveSegment. */ - void restoreSegment(); - - /* Search the call stack for the nearest frame with static level targetLevel. */ - JSStackFrame *findFrameAtLevel(uintN targetLevel) const { - JSStackFrame *fp = regs->fp; - while (true) { - JS_ASSERT(fp && fp->isScriptFrame()); - if (fp->script()->staticLevel == targetLevel) - break; - fp = fp->prev(); - } - return fp; - } - public: /* * The default script compilation version can be set iff there is no code running. * This typically occurs via the JSAPI right after a context is constructed. */ bool canSetDefaultVersion() const { - return !regs && !hasVersionOverride; + return !stack.running() && !hasVersionOverride; } /* Force a version for future script compilation. */ @@ -1825,23 +1105,17 @@ struct JSContext return true; } - private: /* - * If there is no code currently executing, turn the override version into - * the default version. - * - * NB: the only time the version is potentially capable of migrating is - * on return from the Execute or ExternalInvoke paths as they call through - * JSContext::popSegmentAndFrame. + * If there is no code on the stack, turn the override version into the + * default version. */ void maybeMigrateVersionOverride() { - if (JS_LIKELY(!isVersionOverridden() || currentSegment)) + if (JS_LIKELY(!isVersionOverridden() && stack.empty())) return; defaultVersion = versionOverride; clearVersionOverride(); } - public: /* * Return: * - The override version, if there is an override version. @@ -1854,13 +1128,13 @@ struct JSContext if (hasVersionOverride) return versionOverride; - if (regs) { + if (stack.running()) { /* There may be a scripted function somewhere on the stack! */ - JSStackFrame *fp = regs->fp; - while (fp && !fp->isScriptFrame()) - fp = fp->prev(); - if (fp) - return fp->script()->getVersion(); + js::StackFrame *f = fp(); + while (f && !f->isScriptFrame()) + f = f->prev(); + if (f) + return f->script()->getVersion(); } return defaultVersion; @@ -1895,7 +1169,14 @@ struct JSContext bool hasAtLineOption() const { return hasRunOption(JSOPTION_ATLINE); } #ifdef JS_THREADSAFE - JSThread *thread; + private: + JSThread *thread_; + public: + JSThread *thread() const { return thread_; } + + void setThread(JSThread *thread); + static const size_t threadOffset() { return offsetof(JSContext, thread_); } + unsigned outstandingRequests;/* number of JS_BeginRequest calls without the corresponding JS_EndRequest. */ @@ -1940,6 +1221,8 @@ struct JSContext #ifdef JS_METHODJIT bool methodJitEnabled; bool profilingEnabled; + + inline js::mjit::JaegerCompartment *jaegerCompartment(); #endif /* Caller must be holding runtime->gcLock. */ @@ -1973,12 +1256,8 @@ struct JSContext js::Vector genStack; public: -#ifdef JS_METHODJIT - inline js::mjit::JaegerCompartment *jaegerCompartment(); -#endif - /* Return the generator object for the given generator frame. */ - JSGenerator *generatorFor(JSStackFrame *fp) const; + JSGenerator *generatorFor(js::StackFrame *fp) const; /* Early OOM-check. */ inline bool ensureGeneratorStackSpace(); @@ -2037,14 +1316,10 @@ struct JSContext void purge(); - js::StackSpace &stack() const { - return JS_THREAD_DATA(this)->stackSpace; - } - #ifdef DEBUG void assertValidStackDepth(uintN depth) { - JS_ASSERT(0 <= regs->sp - regs->fp->base()); - JS_ASSERT(depth <= uintptr_t(regs->sp - regs->fp->base())); + JS_ASSERT(0 <= regs().sp - fp()->base()); + JS_ASSERT(depth <= uintptr_t(regs().sp - fp()->base())); } #else void assertValidStackDepth(uintN /*depth*/) {} @@ -2076,30 +1351,28 @@ struct JSContext JS_FRIEND_API(void) checkMallocGCPressure(void *p); }; /* struct JSContext */ +namespace js { + #ifdef JS_THREADSAFE -# define JS_THREAD_ID(cx) ((cx)->thread ? (cx)->thread->id : 0) +# define JS_THREAD_ID(cx) ((cx)->thread() ? (cx)->thread()->id : 0) #endif #if defined JS_THREADSAFE && defined DEBUG -namespace js { - class AutoCheckRequestDepth { JSContext *cx; public: - AutoCheckRequestDepth(JSContext *cx) : cx(cx) { cx->thread->checkRequestDepth++; } + AutoCheckRequestDepth(JSContext *cx) : cx(cx) { cx->thread()->checkRequestDepth++; } ~AutoCheckRequestDepth() { - JS_ASSERT(cx->thread->checkRequestDepth != 0); - cx->thread->checkRequestDepth--; + JS_ASSERT(cx->thread()->checkRequestDepth != 0); + cx->thread()->checkRequestDepth--; } }; -} - # define CHECK_REQUEST(cx) \ - JS_ASSERT((cx)->thread); \ - JS_ASSERT((cx)->thread->data.requestDepth || (cx)->thread == (cx)->runtime->gcThread); \ + JS_ASSERT((cx)->thread()); \ + JS_ASSERT((cx)->thread()->data.requestDepth || (cx)->thread() == (cx)->runtime->gcThread); \ AutoCheckRequestDepth _autoCheckRequestDepth(cx); #else @@ -2108,22 +1381,20 @@ class AutoCheckRequestDepth { #endif static inline uintN -FramePCOffset(JSContext *cx, JSStackFrame* fp) +FramePCOffset(JSContext *cx, js::StackFrame* fp) { jsbytecode *pc = fp->hasImacropc() ? fp->imacropc() : fp->pc(cx); return uintN(pc - fp->script()->code); } static inline JSAtom ** -FrameAtomBase(JSContext *cx, JSStackFrame *fp) +FrameAtomBase(JSContext *cx, js::StackFrame *fp) { return fp->hasImacropc() ? COMMON_ATOMS_START(&cx->runtime->atomState) : fp->script()->atomMap.vector; } -namespace js { - struct AutoResolving { public: enum Kind { @@ -2992,10 +2263,11 @@ js_ReportOutOfMemory(JSContext *cx); * Report that cx->scriptStackQuota is exhausted. */ void -js_ReportOutOfScriptQuota(JSContext *cx); +js_ReportOutOfScriptQuota(JSContext *maybecx); -extern JS_FRIEND_API(void) -js_ReportOverRecursed(JSContext *cx); +/* JS_CHECK_RECURSION is used outside JS, so JS_FRIEND_API. */ +JS_FRIEND_API(void) +js_ReportOverRecursed(JSContext *maybecx); extern JS_FRIEND_API(void) js_ReportAllocationOverflow(JSContext *cx); @@ -3055,8 +2327,8 @@ js_ReportValueErrorFlags(JSContext *cx, uintN flags, const uintN errorNumber, extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit]; #ifdef JS_THREADSAFE -# define JS_ASSERT_REQUEST_DEPTH(cx) (JS_ASSERT((cx)->thread), \ - JS_ASSERT((cx)->thread->data.requestDepth >= 1)) +# define JS_ASSERT_REQUEST_DEPTH(cx) (JS_ASSERT((cx)->thread()), \ + JS_ASSERT((cx)->thread()->data.requestDepth >= 1)) #else # define JS_ASSERT_REQUEST_DEPTH(cx) ((void) 0) #endif @@ -3092,8 +2364,8 @@ TriggerAllOperationCallbacks(JSRuntime *rt); } /* namespace js */ -extern JSStackFrame * -js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp); +extern js::StackFrame * +js_GetScriptedCaller(JSContext *cx, js::StackFrame *fp); extern jsbytecode* js_GetCurrentBytecodePC(JSContext* cx); @@ -3116,7 +2388,7 @@ LeaveTrace(JSContext *cx); * * Defined in jstracer.cpp if JS_TRACER is defined. */ -static JS_FORCES_STACK JS_INLINE JSStackFrame * +static JS_FORCES_STACK JS_INLINE js::StackFrame * js_GetTopStackFrame(JSContext *cx) { js::LeaveTrace(cx); diff --git a/js/src/jscntxtinlines.h b/js/src/jscntxtinlines.h index 8c2a57a4fe74..fb00563431ba 100644 --- a/js/src/jscntxtinlines.h +++ b/js/src/jscntxtinlines.h @@ -43,7 +43,6 @@ #include "jscntxt.h" #include "jscompartment.h" -#include "jsparse.h" #include "jsstaticcheck.h" #include "jsxml.h" #include "jsregexp.h" @@ -64,7 +63,7 @@ GetGlobalForScopeChain(JSContext *cx) */ VOUCH_DOES_NOT_REQUIRE_STACK(); - if (cx->hasfp()) + if (cx->running()) return cx->fp()->scopeChain().getGlobal(); JSObject *scope = cx->globalObject; @@ -76,425 +75,6 @@ GetGlobalForScopeChain(JSContext *cx) return scope->asGlobal(); } -} - -#ifdef JS_METHODJIT -inline js::mjit::JaegerCompartment *JSContext::jaegerCompartment() -{ - return compartment->jaegerCompartment; -} -#endif - -inline bool -JSContext::ensureGeneratorStackSpace() -{ - bool ok = genStack.reserve(genStack.length() + 1); - if (!ok) - js_ReportOutOfMemory(this); - return ok; -} - -inline js::RegExpStatics * -JSContext::regExpStatics() -{ - return js::RegExpStatics::extractFrom(js::GetGlobalForScopeChain(this)); -} - -namespace js { - -JS_REQUIRES_STACK JS_ALWAYS_INLINE JSFrameRegs * -StackSegment::getCurrentRegs() const -{ - JS_ASSERT(inContext()); - return isActive() ? cx->regs : getSuspendedRegs(); -} - -JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame * -StackSegment::getCurrentFrame() const -{ - return getCurrentRegs()->fp; -} - -JS_REQUIRES_STACK inline Value * -StackSpace::firstUnused() const -{ - StackSegment *seg = currentSegment; - if (!seg) { - JS_ASSERT(invokeArgEnd == NULL); - return base; - } - if (seg->inContext()) { - Value *sp = seg->getCurrentRegs()->sp; - if (invokeArgEnd > sp) { - JS_ASSERT(invokeSegment == currentSegment); - JS_ASSERT_IF(seg->maybeContext()->hasfp(), - invokeFrame == seg->maybeContext()->fp()); - return invokeArgEnd; - } - return sp; - } - JS_ASSERT(invokeArgEnd); - JS_ASSERT(invokeSegment == currentSegment); - return invokeArgEnd; -} - - -/* Inline so we don't need the friend API. */ -JS_ALWAYS_INLINE bool -StackSpace::isCurrentAndActive(JSContext *cx) const -{ -#ifdef DEBUG - JS_ASSERT_IF(cx->getCurrentSegment(), - cx->getCurrentSegment()->maybeContext() == cx); - cx->assertSegmentsInSync(); -#endif - return currentSegment && - currentSegment->isActive() && - currentSegment == cx->getCurrentSegment(); -} - -STATIC_POSTCONDITION(!return || ubound(from) >= nvals) -JS_ALWAYS_INLINE bool -StackSpace::ensureSpace(JSContext *maybecx, Value *from, ptrdiff_t nvals) const -{ - JS_ASSERT(from >= firstUnused()); -#ifdef XP_WIN - JS_ASSERT(from <= commitEnd); - if (commitEnd - from >= nvals) - goto success; - if (end - from < nvals) { - if (maybecx) - js_ReportOutOfScriptQuota(maybecx); - return false; - } - if (!bumpCommit(from, nvals)) { - if (maybecx) - js_ReportOutOfScriptQuota(maybecx); - return false; - } - goto success; -#else - if (end - from < nvals) { - if (maybecx) - js_ReportOutOfScriptQuota(maybecx); - return false; - } - goto success; -#endif - success: -#ifdef DEBUG - memset(from, 0xde, nvals * sizeof(js::Value)); -#endif - return true; -} - -JS_ALWAYS_INLINE bool -StackSpace::ensureEnoughSpaceToEnterTrace() -{ -#ifdef XP_WIN - return ensureSpace(NULL, firstUnused(), MAX_TRACE_SPACE_VALS); -#endif - return end - firstUnused() > MAX_TRACE_SPACE_VALS; -} - -JS_ALWAYS_INLINE bool -StackSpace::EnsureSpaceCheck::operator()(const StackSpace &stack, JSContext *cx, - Value *from, uintN nvals) -{ - return stack.ensureSpace(cx, from, nvals); -} - -JS_ALWAYS_INLINE bool -StackSpace::LimitCheck::operator()(const StackSpace &stack, JSContext *cx, - Value *from, uintN nvals) -{ - JS_ASSERT(from == stack.firstUnused()); - JS_ASSERT(from < *limit); - if (*limit - from >= ptrdiff_t(nvals)) - return true; - if (stack.bumpCommitAndLimit(base, from, nvals, limit)) - return true; - js_ReportOverRecursed(cx); - return false; -} - -JS_REQUIRES_STACK JS_ALWAYS_INLINE bool -StackSpace::pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard *ag) -{ - if (JS_UNLIKELY(!isCurrentAndActive(cx))) - return pushSegmentForInvoke(cx, argc, ag); - - Value *sp = cx->regs->sp; - Value *start = invokeArgEnd > sp ? invokeArgEnd : sp; - JS_ASSERT(start == firstUnused()); - uintN nvals = 2 + argc; - if (!ensureSpace(cx, start, nvals)) - return false; - - Value *vp = start; - Value *vpend = vp + nvals; - /* Don't need to MakeRangeGCSafe: the VM stack is conservatively marked. */ - - /* Use invokeArgEnd to root [vp, vpend) until the frame is pushed. */ - ag->prevInvokeArgEnd = invokeArgEnd; - invokeArgEnd = vpend; -#ifdef DEBUG - ag->prevInvokeSegment = invokeSegment; - invokeSegment = currentSegment; - ag->prevInvokeFrame = invokeFrame; - invokeFrame = cx->maybefp(); -#endif - - ag->cx = cx; - ImplicitCast(*ag) = CallArgsFromVp(argc, vp); - return true; -} - -JS_REQUIRES_STACK JS_ALWAYS_INLINE void -StackSpace::popInvokeArgs(const InvokeArgsGuard &ag) -{ - if (JS_UNLIKELY(ag.seg != NULL)) { - popSegmentForInvoke(ag); - return; - } - - JS_ASSERT(isCurrentAndActive(ag.cx)); - JS_ASSERT(invokeSegment == currentSegment); - JS_ASSERT(invokeFrame == ag.cx->maybefp()); - JS_ASSERT(invokeArgEnd == ag.argv() + ag.argc()); - -#ifdef DEBUG - invokeSegment = ag.prevInvokeSegment; - invokeFrame = ag.prevInvokeFrame; -#endif - invokeArgEnd = ag.prevInvokeArgEnd; -} - -JS_ALWAYS_INLINE -InvokeArgsGuard::~InvokeArgsGuard() -{ - if (JS_UNLIKELY(!pushed())) - return; - cx->stack().popInvokeArgs(*this); -} - -template -JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame * -StackSpace::getCallFrame(JSContext *cx, Value *firstUnused, uintN nactual, - JSFunction *fun, JSScript *script, uint32 *flags, - Check check) const -{ - JS_ASSERT(fun->script() == script); - - /* Include an extra sizeof(JSStackFrame) for the method-jit. */ - uintN nvals = VALUES_PER_STACK_FRAME + script->nslots; - uintN nformal = fun->nargs; - - /* Maintain layout invariant: &formalArgs[0] == ((Value *)fp) - nformal. */ - - if (nactual == nformal) { - if (JS_UNLIKELY(!check(*this, cx, firstUnused, nvals))) - return NULL; - return reinterpret_cast(firstUnused); - } - - if (nactual < nformal) { - *flags |= JSFRAME_UNDERFLOW_ARGS; - uintN nmissing = nformal - nactual; - if (JS_UNLIKELY(!check(*this, cx, firstUnused, nmissing + nvals))) - return NULL; - SetValueRangeToUndefined(firstUnused, nmissing); - return reinterpret_cast(firstUnused + nmissing); - } - - *flags |= JSFRAME_OVERFLOW_ARGS; - uintN ncopy = 2 + nformal; - if (JS_UNLIKELY(!check(*this, cx, firstUnused, ncopy + nvals))) - return NULL; - - Value *dst = firstUnused; - Value *src = firstUnused - (2 + nactual); - PodCopy(dst, src, ncopy); - Debug_SetValueRangeToCrashOnTouch(src, ncopy); - return reinterpret_cast(firstUnused + ncopy); -} - -JS_REQUIRES_STACK JS_ALWAYS_INLINE bool -StackSpace::getInvokeFrame(JSContext *cx, const CallArgs &args, - JSFunction *fun, JSScript *script, - uint32 *flags, InvokeFrameGuard *fg) const -{ - JS_ASSERT(firstUnused() == args.argv() + args.argc()); - - Value *firstUnused = args.argv() + args.argc(); - fg->regs_.fp = getCallFrame(cx, firstUnused, args.argc(), fun, script, flags, - EnsureSpaceCheck()); - fg->regs_.sp = fg->regs_.fp->slots() + script->nfixed; - fg->regs_.pc = script->code; - - return fg->regs_.fp != NULL; -} - -JS_REQUIRES_STACK JS_ALWAYS_INLINE void -StackSpace::pushInvokeFrame(JSContext *cx, const CallArgs &args, - InvokeFrameGuard *fg) -{ - JS_ASSERT(firstUnused() == args.argv() + args.argc()); - - if (JS_UNLIKELY(!currentSegment->inContext())) { - cx->pushSegmentAndFrame(currentSegment, fg->regs_); - } else { - fg->prevRegs_ = cx->regs; - cx->setCurrentRegs(&fg->regs_); - } - - fg->cx_ = cx; - JS_ASSERT(isCurrentAndActive(cx)); -} - -JS_REQUIRES_STACK JS_ALWAYS_INLINE void -StackSpace::popInvokeFrame(const InvokeFrameGuard &fg) -{ - JSContext *cx = fg.cx_; - JSStackFrame *fp = fg.regs_.fp; - - PutActivationObjects(cx, fp); - - JS_ASSERT(isCurrentAndActive(cx)); - if (JS_UNLIKELY(currentSegment->getInitialFrame() == fp)) { - cx->popSegmentAndFrame(); - } else { - JS_ASSERT(&fg.regs_ == cx->regs); - JS_ASSERT(fp->prev_ == fg.prevRegs_->fp); - JS_ASSERT(fp->prevpc() == fg.prevRegs_->pc); - cx->setCurrentRegs(fg.prevRegs_); - } -} - -JS_ALWAYS_INLINE void -InvokeFrameGuard::pop() -{ - JS_ASSERT(pushed()); - cx_->stack().popInvokeFrame(*this); - cx_ = NULL; -} - -JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame * -StackSpace::getInlineFrame(JSContext *cx, Value *sp, uintN nactual, - JSFunction *fun, JSScript *script, uint32 *flags) const -{ - JS_ASSERT(isCurrentAndActive(cx)); - JS_ASSERT(cx->hasActiveSegment()); - JS_ASSERT(cx->regs->sp == sp); - - return getCallFrame(cx, sp, nactual, fun, script, flags, EnsureSpaceCheck()); -} - -JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame * -StackSpace::getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual, - JSFunction *fun, JSScript *script, uint32 *flags, - JSStackFrame *base, Value **limit) const -{ - JS_ASSERT(isCurrentAndActive(cx)); - JS_ASSERT(cx->hasActiveSegment()); - JS_ASSERT(cx->regs->sp == sp); - - return getCallFrame(cx, sp, nactual, fun, script, flags, LimitCheck(base, limit)); -} - -JS_REQUIRES_STACK JS_ALWAYS_INLINE void -StackSpace::pushInlineFrame(JSContext *cx, JSScript *script, JSStackFrame *fp, - JSFrameRegs *regs) -{ - JS_ASSERT(isCurrentAndActive(cx)); - JS_ASSERT(cx->regs == regs && script == fp->script()); - - regs->fp = fp; - regs->pc = script->code; - regs->sp = fp->slots() + script->nfixed; -} - -JS_REQUIRES_STACK JS_ALWAYS_INLINE void -StackSpace::popInlineFrame(JSContext *cx, JSStackFrame *prev, Value *newsp) -{ - JSFrameRegs *regs = cx->regs; - JSStackFrame *fp = regs->fp; - - JS_ASSERT(isCurrentAndActive(cx)); - JS_ASSERT(cx->hasActiveSegment()); - JS_ASSERT(fp->prev_ == prev); - JS_ASSERT(!fp->hasImacropc()); - JS_ASSERT(prev->base() <= newsp && newsp <= fp->formalArgsEnd()); - - PutActivationObjects(cx, fp); - - regs->pc = prev->pc(cx, fp); - regs->fp = prev; - regs->sp = newsp; -} - -JS_ALWAYS_INLINE Value * -StackSpace::getStackLimit(JSContext *cx) -{ - Value *sp = cx->regs->sp; - JS_ASSERT(sp == firstUnused()); - Value *limit = sp + STACK_QUOTA; - - /* - * Try to reserve the whole STACK_QUOTA. If that fails, though, just - * reserve the minimum required space: enough for the nslots + an - * additional stack frame. - */ -#ifdef XP_WIN - if (JS_LIKELY(limit <= commitEnd)) - return limit; - if (ensureSpace(NULL /* don't report error */, sp, STACK_QUOTA)) - return limit; - uintN minimum = cx->fp()->numSlots() + VALUES_PER_STACK_FRAME; - return ensureSpace(cx, sp, minimum) ? sp + minimum : NULL; -#else - if (JS_LIKELY(limit <= end)) - return limit; - uintN minimum = cx->fp()->numSlots() + VALUES_PER_STACK_FRAME; - return ensureSpace(cx, sp, minimum) ? sp + minimum : NULL; -#endif -} - -JS_REQUIRES_STACK inline -FrameRegsIter::FrameRegsIter(JSContext *cx) - : cx(cx) -{ - curseg = cx->getCurrentSegment(); - if (JS_UNLIKELY(!curseg || !curseg->isActive())) { - initSlow(); - return; - } - JS_ASSERT(cx->regs->fp); - curfp = cx->regs->fp; - cursp = cx->regs->sp; - curpc = cx->regs->pc; - return; -} - -inline FrameRegsIter & -FrameRegsIter::operator++() -{ - JSStackFrame *fp = curfp; - JSStackFrame *prev = curfp = curfp->prev(); - if (!prev) - return *this; - - curpc = curfp->pc(cx, fp); - - if (JS_UNLIKELY(fp == curseg->getInitialFrame())) { - incSlow(fp, prev); - return *this; - } - - cursp = fp->formalArgsEnd(); - return *this; -} - inline GSNCache * GetGSNCache(JSContext *cx) { @@ -528,7 +108,7 @@ class CompartmentChecker public: explicit CompartmentChecker(JSContext *cx) : context(cx), compartment(cx->compartment) { - check(cx->hasfp() ? JS_GetGlobalForScopeChain(cx) : cx->globalObject); + check(cx->running() ? JS_GetGlobalForScopeChain(cx) : cx->globalObject); VOUCH_DOES_NOT_REQUIRE_STACK(); } @@ -613,7 +193,7 @@ class CompartmentChecker } } - void check(JSStackFrame *fp) { + void check(StackFrame *fp) { check(&fp->scopeChain()); } }; @@ -811,6 +391,28 @@ CanLeaveTrace(JSContext *cx) } /* namespace js */ +#ifdef JS_METHODJIT +inline js::mjit::JaegerCompartment *JSContext::jaegerCompartment() +{ + return compartment->jaegerCompartment; +} +#endif + +inline bool +JSContext::ensureGeneratorStackSpace() +{ + bool ok = genStack.reserve(genStack.length() + 1); + if (!ok) + js_ReportOutOfMemory(this); + return ok; +} + +inline js::RegExpStatics * +JSContext::regExpStatics() +{ + return js::RegExpStatics::extractFrom(js::GetGlobalForScopeChain(this)); +} + inline void JSContext::setPendingException(js::Value v) { this->throwing = true; diff --git a/js/src/jscompartment.cpp b/js/src/jscompartment.cpp index f2a4e2e2bcad..60d44e1611df 100644 --- a/js/src/jscompartment.cpp +++ b/js/src/jscompartment.cpp @@ -222,7 +222,7 @@ JSCompartment::wrap(JSContext *cx, Value *vp) * This loses us some transparency, and is generally very cheesy. */ JSObject *global; - if (cx->hasfp()) { + if (cx->running()) { global = cx->fp()->scopeChain().getGlobal(); } else { global = cx->globalObject; diff --git a/js/src/jscompartment.h b/js/src/jscompartment.h index 86b4bed1092c..92194b396d43 100644 --- a/js/src/jscompartment.h +++ b/js/src/jscompartment.h @@ -136,6 +136,10 @@ struct TracerState */ struct TraceNativeStorage { + /* Max number of stack slots/frame that may need to be restored in LeaveTree. */ + static const size_t MAX_NATIVE_STACK_SLOTS = 4096; + static const size_t MAX_CALL_STACK_ENTRIES = 500; + double stack_global_buf[MAX_NATIVE_STACK_SLOTS + GLOBAL_SLOTS_BUFFER_SIZE]; FrameInfo *callstack_buf[MAX_CALL_STACK_ENTRIES]; @@ -456,7 +460,7 @@ struct JS_FRIEND_API(JSCompartment) { js::NativeIterCache nativeIterCache; - typedef js::LazilyConstructed LazyToSourceCache; + typedef js::Maybe LazyToSourceCache; LazyToSourceCache toSourceCache; JSCompartment(JSRuntime *rt); diff --git a/js/src/jsdate.cpp b/js/src/jsdate.cpp index 5221cf37411a..8ea1188ed5f2 100644 --- a/js/src/jsdate.cpp +++ b/js/src/jsdate.cpp @@ -73,6 +73,8 @@ #include "jsobjinlines.h" +#include "vm/Stack-inl.h" + using namespace js; /* @@ -2106,13 +2108,13 @@ date_toJSON(JSContext *cx, uintN argc, Value *vp) /* Step 6. */ LeaveTrace(cx); InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, 0, &args)) + if (!cx->stack.pushInvokeArgs(cx, 0, &args)) return false; args.calleev() = toISO; args.thisv().setObject(*obj); - if (!Invoke(cx, args, 0)) + if (!Invoke(cx, args)) return false; *vp = args.rval(); return true; diff --git a/js/src/jsdbgapi.cpp b/js/src/jsdbgapi.cpp index fc9351cc6c7d..8dc7d577f4ad 100644 --- a/js/src/jsdbgapi.cpp +++ b/js/src/jsdbgapi.cpp @@ -69,11 +69,13 @@ #include "jsatominlines.h" #include "jsdbgapiinlines.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" +#include "jsinterpinlines.h" #include "jsscopeinlines.h" #include "jsscriptinlines.h" +#include "vm/Stack-inl.h" + #include "jsautooplen.h" #include "methodjit/MethodJIT.h" @@ -116,21 +118,21 @@ JS_SetRuntimeDebugMode(JSRuntime *rt, JSBool debug) namespace js { void -ScriptDebugPrologue(JSContext *cx, JSStackFrame *fp) +ScriptDebugPrologue(JSContext *cx, StackFrame *fp) { if (fp->isFramePushedByExecute()) { if (JSInterpreterHook hook = cx->debugHooks->executeHook) - fp->setHookData(hook(cx, fp, true, 0, cx->debugHooks->executeHookData)); + fp->setHookData(hook(cx, Jsvalify(fp), true, 0, cx->debugHooks->executeHookData)); } else { if (JSInterpreterHook hook = cx->debugHooks->callHook) - fp->setHookData(hook(cx, fp, true, 0, cx->debugHooks->callHookData)); + fp->setHookData(hook(cx, Jsvalify(fp), true, 0, cx->debugHooks->callHookData)); } Probes::enterJSFun(cx, fp->maybeFun(), fp->script()); } bool -ScriptDebugEpilogue(JSContext *cx, JSStackFrame *fp, bool okArg) +ScriptDebugEpilogue(JSContext *cx, StackFrame *fp, bool okArg) { JSBool ok = okArg; @@ -139,10 +141,10 @@ ScriptDebugEpilogue(JSContext *cx, JSStackFrame *fp, bool okArg) if (void *hookData = fp->maybeHookData()) { if (fp->isFramePushedByExecute()) { if (JSInterpreterHook hook = cx->debugHooks->executeHook) - hook(cx, fp, false, &ok, hookData); + hook(cx, Jsvalify(fp), false, &ok, hookData); } else { if (JSInterpreterHook hook = cx->debugHooks->callHook) - hook(cx, fp, false, &ok, hookData); + hook(cx, Jsvalify(fp), false, &ok, hookData); } } @@ -836,7 +838,9 @@ js_watch_set(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp) } { - Conditionally tvr(needMethodSlotWrite, cx, needMethodSlotWrite); + Maybe tvr; + if (needMethodSlotWrite) + tvr.construct(cx, needMethodSlotWrite); /* * Call the handler. This invalidates shape, so re-lookup the shape. @@ -1361,33 +1365,35 @@ JS_GetScriptPrincipals(JSContext *cx, JSScript *script) JS_PUBLIC_API(JSStackFrame *) JS_FrameIterator(JSContext *cx, JSStackFrame **iteratorp) { - *iteratorp = (*iteratorp == NULL) ? js_GetTopStackFrame(cx) : (*iteratorp)->prev(); + StackFrame *fp = Valueify(*iteratorp); + *iteratorp = Jsvalify((fp == NULL) ? js_GetTopStackFrame(cx) : fp->prev()); return *iteratorp; } JS_PUBLIC_API(JSScript *) JS_GetFrameScript(JSContext *cx, JSStackFrame *fp) { - return fp->maybeScript(); + return Valueify(fp)->maybeScript(); } JS_PUBLIC_API(jsbytecode *) JS_GetFramePC(JSContext *cx, JSStackFrame *fp) { - return fp->pc(cx); + return Valueify(fp)->pc(cx); } JS_PUBLIC_API(JSStackFrame *) JS_GetScriptedCaller(JSContext *cx, JSStackFrame *fp) { - return js_GetScriptedCaller(cx, fp); + return Jsvalify(js_GetScriptedCaller(cx, Valueify(fp))); } JS_PUBLIC_API(void *) -JS_GetFrameAnnotation(JSContext *cx, JSStackFrame *fp) +JS_GetFrameAnnotation(JSContext *cx, JSStackFrame *fpArg) { + StackFrame *fp = Valueify(fpArg); if (fp->annotation() && fp->isScriptFrame()) { - JSPrincipals *principals = fp->principals(cx); + JSPrincipals *principals = fp->scopeChain().principals(cx); if (principals && principals->globalPrivilegesEnabled(cx, principals)) { /* @@ -1404,7 +1410,7 @@ JS_GetFrameAnnotation(JSContext *cx, JSStackFrame *fp) JS_PUBLIC_API(void) JS_SetFrameAnnotation(JSContext *cx, JSStackFrame *fp, void *annotation) { - fp->setAnnotation(annotation); + Valueify(fp)->setAnnotation(annotation); } JS_PUBLIC_API(void *) @@ -1412,7 +1418,7 @@ JS_GetFramePrincipalArray(JSContext *cx, JSStackFrame *fp) { JSPrincipals *principals; - principals = fp->principals(cx); + principals = Valueify(fp)->scopeChain().principals(cx); if (!principals) return NULL; return principals->getPrincipalArray(cx, principals); @@ -1421,34 +1427,36 @@ JS_GetFramePrincipalArray(JSContext *cx, JSStackFrame *fp) JS_PUBLIC_API(JSBool) JS_IsScriptFrame(JSContext *cx, JSStackFrame *fp) { - return !fp->isDummyFrame(); + return !Valueify(fp)->isDummyFrame(); } /* this is deprecated, use JS_GetFrameScopeChain instead */ JS_PUBLIC_API(JSObject *) JS_GetFrameObject(JSContext *cx, JSStackFrame *fp) { - return &fp->scopeChain(); + return &Valueify(fp)->scopeChain(); } JS_PUBLIC_API(JSObject *) -JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fp) +JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fpArg) { - JS_ASSERT(cx->stack().contains(fp)); + StackFrame *fp = Valueify(fpArg); + JS_ASSERT(cx->stack.contains(fp)); js::AutoCompartment ac(cx, &fp->scopeChain()); if (!ac.enter()) return NULL; /* Force creation of argument and call objects if not yet created */ - (void) JS_GetFrameCallObject(cx, fp); + (void) JS_GetFrameCallObject(cx, Jsvalify(fp)); return GetScopeChain(cx, fp); } JS_PUBLIC_API(JSObject *) -JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fp) +JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fpArg) { - JS_ASSERT(cx->stack().contains(fp)); + StackFrame *fp = Valueify(fpArg); + JS_ASSERT(cx->stack.contains(fp)); if (!fp->isFunctionFrame()) return NULL; @@ -1467,8 +1475,9 @@ JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fp) } JS_PUBLIC_API(JSBool) -JS_GetFrameThis(JSContext *cx, JSStackFrame *fp, jsval *thisv) +JS_GetFrameThis(JSContext *cx, JSStackFrame *fpArg, jsval *thisv) { + StackFrame *fp = Valueify(fpArg); if (fp->isDummyFrame()) return false; @@ -1485,12 +1494,13 @@ JS_GetFrameThis(JSContext *cx, JSStackFrame *fp, jsval *thisv) JS_PUBLIC_API(JSFunction *) JS_GetFrameFunction(JSContext *cx, JSStackFrame *fp) { - return fp->maybeFun(); + return Valueify(fp)->maybeFun(); } JS_PUBLIC_API(JSObject *) -JS_GetFrameFunctionObject(JSContext *cx, JSStackFrame *fp) +JS_GetFrameFunctionObject(JSContext *cx, JSStackFrame *fpArg) { + StackFrame *fp = Valueify(fpArg); if (!fp->isFunctionFrame()) return NULL; @@ -1502,13 +1512,13 @@ JS_GetFrameFunctionObject(JSContext *cx, JSStackFrame *fp) JS_PUBLIC_API(JSBool) JS_IsConstructorFrame(JSContext *cx, JSStackFrame *fp) { - return fp->isConstructing(); + return Valueify(fp)->isConstructing(); } JS_PUBLIC_API(JSObject *) JS_GetFrameCalleeObject(JSContext *cx, JSStackFrame *fp) { - return fp->maybeCallee(); + return Valueify(fp)->maybeCallee(); } JS_PUBLIC_API(JSBool) @@ -1516,7 +1526,7 @@ JS_GetValidFrameCalleeObject(JSContext *cx, JSStackFrame *fp, jsval *vp) { Value v; - if (!fp->getValidCalleeObject(cx, &v)) + if (!Valueify(fp)->getValidCalleeObject(cx, &v)) return false; *vp = Jsvalify(v); return true; @@ -1525,18 +1535,19 @@ JS_GetValidFrameCalleeObject(JSContext *cx, JSStackFrame *fp, jsval *vp) JS_PUBLIC_API(JSBool) JS_IsDebuggerFrame(JSContext *cx, JSStackFrame *fp) { - return fp->isDebuggerFrame(); + return Valueify(fp)->isDebuggerFrame(); } JS_PUBLIC_API(jsval) JS_GetFrameReturnValue(JSContext *cx, JSStackFrame *fp) { - return Jsvalify(fp->returnValue()); + return Jsvalify(Valueify(fp)->returnValue()); } JS_PUBLIC_API(void) -JS_SetFrameReturnValue(JSContext *cx, JSStackFrame *fp, jsval rval) +JS_SetFrameReturnValue(JSContext *cx, JSStackFrame *fpArg, jsval rval) { + StackFrame *fp = Valueify(fpArg); #ifdef JS_METHODJIT JS_ASSERT_IF(fp->isScriptFrame(), fp->script()->debugMode); #endif @@ -1590,7 +1601,7 @@ JS_SetDestroyScriptHook(JSRuntime *rt, JSDestroyScriptHook hook, /***************************************************************************/ JS_PUBLIC_API(JSBool) -JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp, +JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fpArg, const jschar *chars, uintN length, const char *filename, uintN lineno, jsval *rval) @@ -1600,7 +1611,7 @@ JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp, if (!CheckDebugMode(cx)) return false; - JSObject *scobj = JS_GetFrameScopeChain(cx, fp); + JSObject *scobj = JS_GetFrameScopeChain(cx, fpArg); if (!scobj) return false; @@ -1614,7 +1625,8 @@ JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp, * we use a static level that will cause us not to attempt to optimize * variable references made by this frame. */ - JSScript *script = Compiler::compileScript(cx, scobj, fp, fp->principals(cx), + StackFrame *fp = Valueify(fpArg); + JSScript *script = Compiler::compileScript(cx, scobj, fp, fp->scopeChain().principals(cx), TCF_COMPILE_N_GO, chars, length, filename, lineno, cx->findVersion(), NULL, UpvarCookie::UPVAR_LEVEL_LIMIT); @@ -1622,7 +1634,8 @@ JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp, if (!script) return false; - bool ok = Execute(cx, *scobj, script, fp, JSFRAME_DEBUGGER | JSFRAME_EVAL, Valueify(rval)); + uintN evalFlags = StackFrame::DEBUGGER | StackFrame::EVAL; + bool ok = Execute(cx, *scobj, script, fp, evalFlags, Valueify(rval)); js_DestroyScript(cx, script); return ok; @@ -2423,7 +2436,7 @@ jstv_Lineno(JSContext *cx, JSStackFrame *fp) JS_FRIEND_API(void) js::StoreTraceVisState(JSContext *cx, TraceVisState s, TraceVisExitReason r) { - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); char *script_file = jstv_Filename(fp); JSHashNumber hash = JS_HashString(script_file); diff --git a/js/src/jsemit.cpp b/js/src/jsemit.cpp index 10b5570ed647..c26289658f49 100644 --- a/js/src/jsemit.cpp +++ b/js/src/jsemit.cpp @@ -2211,7 +2211,7 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn) } if (cookie.isFree()) { - JSStackFrame *caller = cg->parser->callerFrame; + StackFrame *caller = cg->parser->callerFrame; if (caller) { JS_ASSERT(cg->compileAndGo()); diff --git a/js/src/jsexn.cpp b/js/src/jsexn.cpp index 57f43d9f4adc..efa9da82413c 100644 --- a/js/src/jsexn.cpp +++ b/js/src/jsexn.cpp @@ -64,10 +64,10 @@ #include "jsstaticcheck.h" #include "jswrapper.h" -#include "jscntxtinlines.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" +#include "vm/Stack-inl.h" + using namespace js; using namespace js::gc; @@ -267,7 +267,7 @@ InitExnPrivate(JSContext *cx, JSObject *exnObject, JSString *message, JSErrorReporter older; JSExceptionState *state; jsid callerid; - JSStackFrame *fp, *fpstop; + StackFrame *fp, *fpstop; size_t stackDepth, valueCount, size; JSBool overflow; JSExnPrivate *priv; @@ -693,7 +693,7 @@ static JSBool Exception(JSContext *cx, uintN argc, Value *vp) { JSString *message, *filename; - JSStackFrame *fp; + StackFrame *fp; /* * ECMA ed. 3, 15.11.1 requires Error, etc., to construct even when diff --git a/js/src/jsfriendapi.cpp b/js/src/jsfriendapi.cpp index 16a10befd34c..902d43626722 100644 --- a/js/src/jsfriendapi.cpp +++ b/js/src/jsfriendapi.cpp @@ -74,3 +74,9 @@ JS_UnwrapObject(JSObject *obj) { return obj->unwrap(); } + +JS_FRIEND_API(JSObject *) +JS_GetFrameScopeChainRaw(JSStackFrame *fp) +{ + return &Valueify(fp)->scopeChain(); +} diff --git a/js/src/jsfriendapi.h b/js/src/jsfriendapi.h index b3beecaa8c57..a2179e5983f4 100644 --- a/js/src/jsfriendapi.h +++ b/js/src/jsfriendapi.h @@ -54,6 +54,9 @@ JS_FindCompilationScope(JSContext *cx, JSObject *obj); extern JS_FRIEND_API(JSObject *) JS_UnwrapObject(JSObject *obj); +extern JS_FRIEND_API(JSObject *) +JS_GetFrameScopeChainRaw(JSStackFrame *fp); + JS_END_EXTERN_C #endif /* jsfriendapi_h___ */ diff --git a/js/src/jsfun.cpp b/js/src/jsfun.cpp index f155cc32581f..cf3f44c517df 100644 --- a/js/src/jsfun.cpp +++ b/js/src/jsfun.cpp @@ -86,12 +86,12 @@ #endif #include "jsatominlines.h" -#include "jscntxtinlines.h" #include "jsfuninlines.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsscriptinlines.h" +#include "vm/Stack-inl.h" + using namespace js; using namespace js::gc; @@ -102,7 +102,7 @@ JSObject::getThrowTypeError() const } JSBool -js_GetArgsValue(JSContext *cx, JSStackFrame *fp, Value *vp) +js_GetArgsValue(JSContext *cx, StackFrame *fp, Value *vp) { JSObject *argsobj; @@ -119,7 +119,7 @@ js_GetArgsValue(JSContext *cx, JSStackFrame *fp, Value *vp) } JSBool -js_GetArgsProperty(JSContext *cx, JSStackFrame *fp, jsid id, Value *vp) +js_GetArgsProperty(JSContext *cx, StackFrame *fp, jsid id, Value *vp) { JS_ASSERT(fp->isFunctionFrame()); @@ -233,7 +233,7 @@ struct STATIC_SKIP_INFERENCE PutArg }; JSObject * -js_GetArgsObject(JSContext *cx, JSStackFrame *fp) +js_GetArgsObject(JSContext *cx, StackFrame *fp) { /* * We must be in a function activation; the function must be lightweight @@ -273,7 +273,7 @@ js_GetArgsObject(JSContext *cx, JSStackFrame *fp) } void -js_PutArgsObject(JSContext *cx, JSStackFrame *fp) +js_PutArgsObject(StackFrame *fp) { JSObject &argsobj = fp->argsObj(); if (argsobj.isNormalArguments()) { @@ -357,7 +357,7 @@ args_delProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp) } static JS_REQUIRES_STACK JSObject * -WrapEscapingClosure(JSContext *cx, JSStackFrame *fp, JSFunction *fun) +WrapEscapingClosure(JSContext *cx, StackFrame *fp, JSFunction *fun) { JS_ASSERT(fun->optimizedClosure()); JS_ASSERT(!fun->u.i.wrapper); @@ -526,7 +526,7 @@ ArgGetter(JSContext *cx, JSObject *obj, jsid id, Value *vp) uintN arg = uintN(JSID_TO_INT(id)); if (arg < obj->getArgsInitialLength()) { JS_ASSERT(!obj->getArgsElement(arg).isMagic(JS_ARGS_HOLE)); - if (JSStackFrame *fp = (JSStackFrame *) obj->getPrivate()) + if (StackFrame *fp = (StackFrame *) obj->getPrivate()) *vp = fp->canonicalActualArg(arg); else *vp = obj->getArgsElement(arg); @@ -575,7 +575,7 @@ ArgSetter(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp) if (JSID_IS_INT(id)) { uintN arg = uintN(JSID_TO_INT(id)); if (arg < obj->getArgsInitialLength()) { - JSStackFrame *fp = (JSStackFrame *) obj->getPrivate(); + StackFrame *fp = (StackFrame *) obj->getPrivate(); if (fp) { JSScript *script = fp->functionScript(); if (script->usesArguments) @@ -798,13 +798,13 @@ args_finalize(JSContext *cx, JSObject *obj) * otherwise reachable. An executing generator is rooted by its invocation. To * distinguish the two cases (which imply different access paths to the * generator object), we use the JSFRAME_FLOATING_GENERATOR flag, which is only - * set on the JSStackFrame kept in the generator object's JSGenerator. + * set on the StackFrame kept in the generator object's JSGenerator. */ static inline void MaybeMarkGenerator(JSTracer *trc, JSObject *obj) { #if JS_HAS_GENERATORS - JSStackFrame *fp = (JSStackFrame *) obj->getPrivate(); + StackFrame *fp = (StackFrame *) obj->getPrivate(); if (fp && fp->isFloatingGenerator()) { JSObject *genobj = js_FloatingFrameToGenerator(fp)->obj; MarkObject(trc, *genobj, "generator object"); @@ -838,7 +838,7 @@ args_trace(JSTracer *trc, JSObject *obj) * * The JSClass functions below collaborate to lazily reflect and synchronize * actual argument values, argument count, and callee function object stored - * in a JSStackFrame with their corresponding property values in the frame's + * in a StackFrame with their corresponding property values in the frame's * arguments object. */ Class js_ArgumentsClass = { @@ -895,7 +895,7 @@ Class StrictArgumentsClass = { } /* - * A Declarative Environment object stores its active JSStackFrame pointer in + * A Declarative Environment object stores its active StackFrame pointer in * its private slot, just as Call and Arguments objects do. */ Class js_DeclEnvClass = { @@ -930,7 +930,7 @@ CheckForEscapingClosure(JSContext *cx, JSObject *obj, Value *vp) if (fun->needsWrapper()) { LeaveTrace(cx); - JSStackFrame *fp = (JSStackFrame *) obj->getPrivate(); + StackFrame *fp = (StackFrame *) obj->getPrivate(); if (fp) { JSObject *wrapper = WrapEscapingClosure(cx, fp, fun); if (!wrapper) @@ -993,7 +993,7 @@ NewCallObject(JSContext *cx, JSScript *script, JSObject &scopeChain, JSObject *c } static inline JSObject * -NewDeclEnvObject(JSContext *cx, JSStackFrame *fp) +NewDeclEnvObject(JSContext *cx, StackFrame *fp) { JSObject *envobj = js_NewGCObject(cx, FINALIZE_OBJECT2); if (!envobj) @@ -1011,7 +1011,7 @@ NewDeclEnvObject(JSContext *cx, JSStackFrame *fp) namespace js { JSObject * -CreateFunCallObject(JSContext *cx, JSStackFrame *fp) +CreateFunCallObject(JSContext *cx, StackFrame *fp) { JS_ASSERT(fp->isNonEvalFunctionFrame()); JS_ASSERT(!fp->hasCallObj()); @@ -1048,7 +1048,7 @@ CreateFunCallObject(JSContext *cx, JSStackFrame *fp) } JSObject * -CreateEvalCallObject(JSContext *cx, JSStackFrame *fp) +CreateEvalCallObject(JSContext *cx, StackFrame *fp) { JSObject *callobj = NewCallObject(cx, fp->script(), fp->scopeChain(), NULL); if (!callobj) @@ -1083,7 +1083,7 @@ CopyValuesToCallObject(JSObject &callobj, uintN nargs, Value *argv, uintN nvars, } void -js_PutCallObject(JSContext *cx, JSStackFrame *fp) +js_PutCallObject(StackFrame *fp) { JSObject &callobj = fp->callObj(); JS_ASSERT(callobj.getPrivate() == fp); @@ -1094,7 +1094,7 @@ js_PutCallObject(JSContext *cx, JSStackFrame *fp) if (fp->hasArgsObj()) { if (!fp->hasOverriddenArgs()) callobj.setCallObjArguments(ObjectValue(fp->argsObj())); - js_PutArgsObject(cx, fp); + js_PutArgsObject(fp); } JSScript *script = fp->script(); @@ -1160,7 +1160,7 @@ js_PutCallObject(JSContext *cx, JSStackFrame *fp) } JSBool JS_FASTCALL -js_PutCallObjectOnTrace(JSContext *cx, JSObject *callobj, uint32 nargs, Value *argv, +js_PutCallObjectOnTrace(JSObject *callobj, uint32 nargs, Value *argv, uint32 nvars, Value *slots) { JS_ASSERT(callobj->isCall()); @@ -1173,7 +1173,7 @@ js_PutCallObjectOnTrace(JSContext *cx, JSObject *callobj, uint32 nargs, Value *a return true; } -JS_DEFINE_CALLINFO_6(extern, BOOL, js_PutCallObjectOnTrace, CONTEXT, OBJECT, UINT32, VALUEPTR, +JS_DEFINE_CALLINFO_5(extern, BOOL, js_PutCallObjectOnTrace, OBJECT, UINT32, VALUEPTR, UINT32, VALUEPTR, 0, nanojit::ACCSET_STORE_ANY) namespace js { @@ -1181,7 +1181,7 @@ namespace js { static JSBool GetCallArguments(JSContext *cx, JSObject *obj, jsid id, Value *vp) { - JSStackFrame *fp = obj->maybeCallObjStackFrame(); + StackFrame *fp = obj->maybeCallObjStackFrame(); if (fp && !fp->hasOverriddenArgs()) { JSObject *argsobj = js_GetArgsObject(cx, fp); if (!argsobj) @@ -1196,7 +1196,7 @@ GetCallArguments(JSContext *cx, JSObject *obj, jsid id, Value *vp) static JSBool SetCallArguments(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp) { - if (JSStackFrame *fp = obj->maybeCallObjStackFrame()) + if (StackFrame *fp = obj->maybeCallObjStackFrame()) fp->setOverriddenArgs(); obj->setCallObjArguments(*vp); return true; @@ -1208,7 +1208,7 @@ GetCallArg(JSContext *cx, JSObject *obj, jsid id, Value *vp) JS_ASSERT((int16) JSID_TO_INT(id) == JSID_TO_INT(id)); uintN i = (uint16) JSID_TO_INT(id); - if (JSStackFrame *fp = obj->maybeCallObjStackFrame()) + if (StackFrame *fp = obj->maybeCallObjStackFrame()) *vp = fp->formalArg(i); else *vp = obj->callObjArg(i); @@ -1222,7 +1222,7 @@ SetCallArg(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp) uintN i = (uint16) JSID_TO_INT(id); Value *argp; - if (JSStackFrame *fp = obj->maybeCallObjStackFrame()) + if (StackFrame *fp = obj->maybeCallObjStackFrame()) argp = &fp->formalArg(i); else argp = &obj->callObjArg(i); @@ -1261,7 +1261,7 @@ GetCallVar(JSContext *cx, JSObject *obj, jsid id, Value *vp) JS_ASSERT((int16) JSID_TO_INT(id) == JSID_TO_INT(id)); uintN i = (uint16) JSID_TO_INT(id); - if (JSStackFrame *fp = obj->maybeCallObjStackFrame()) + if (StackFrame *fp = obj->maybeCallObjStackFrame()) *vp = fp->varSlot(i); else *vp = obj->callObjVar(i); @@ -1302,7 +1302,7 @@ SetCallVar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp) #endif Value *varp; - if (JSStackFrame *fp = obj->maybeCallObjStackFrame()) + if (StackFrame *fp = obj->maybeCallObjStackFrame()) varp = &fp->varSlot(i); else varp = &obj->callObjVar(i); @@ -1379,7 +1379,7 @@ static void call_trace(JSTracer *trc, JSObject *obj) { JS_ASSERT(obj->isCall()); - if (JSStackFrame *fp = obj->maybeCallObjStackFrame()) { + if (StackFrame *fp = obj->maybeCallObjStackFrame()) { /* * FIXME: Hide copies of stack values rooted by fp from the Cycle * Collector, which currently lacks a non-stub Unlink implementation @@ -1420,7 +1420,7 @@ JS_PUBLIC_DATA(Class) js_CallClass = { }; bool -JSStackFrame::getValidCalleeObject(JSContext *cx, Value *vp) +StackFrame::getValidCalleeObject(JSContext *cx, Value *vp) { if (!isFunctionFrame()) { vp->setUndefined(); @@ -1584,7 +1584,7 @@ fun_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp) JSFunction *fun = obj->getFunctionPrivate(); /* Find fun's top-most activation record. */ - JSStackFrame *fp; + StackFrame *fp; for (fp = js_GetTopStackFrame(cx); fp && (fp->maybeFun() != fun || fp->isEvalOrDebuggerFrame()); fp = fp->prev()) { @@ -2058,7 +2058,7 @@ fun_toStringHelper(JSContext *cx, JSObject *obj, uintN indent) return NULL; if (!indent) { - LazilyConstructed &lazy = cx->compartment->toSourceCache; + Maybe &lazy = cx->compartment->toSourceCache; if (lazy.empty()) { lazy.construct(); @@ -2137,7 +2137,7 @@ js_fun_call(JSContext *cx, uintN argc, Value *vp) /* Allocate stack space for fval, obj, and the args. */ InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, argc, &args)) + if (!cx->stack.pushInvokeArgs(cx, argc, &args)) return JS_FALSE; /* Push fval, thisv, and the args. */ @@ -2145,7 +2145,7 @@ js_fun_call(JSContext *cx, uintN argc, Value *vp) args.thisv() = thisv; memcpy(args.argv(), argv, argc * sizeof *argv); - bool ok = Invoke(cx, args, 0); + bool ok = Invoke(cx, args); *vp = args.rval(); return ok; } @@ -2188,7 +2188,7 @@ js_fun_apply(JSContext *cx, uintN argc, Value *vp) uintN n = uintN(JS_MIN(length, JS_ARGS_LENGTH_MAX)); InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, n, &args)) + if (!cx->stack.pushInvokeArgs(cx, n, &args)) return false; /* Push fval, obj, and aobj's elements as args. */ @@ -2200,7 +2200,7 @@ js_fun_apply(JSContext *cx, uintN argc, Value *vp) return false; /* Step 9. */ - if (!Invoke(cx, args, 0)) + if (!Invoke(cx, args)) return false; *vp = args.rval(); return true; @@ -2301,7 +2301,7 @@ CallOrConstructBoundFunction(JSContext *cx, uintN argc, Value *vp) const Value &boundThis = obj->getBoundFunctionThis(); InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, argc + argslen, &args)) + if (!cx->stack.pushInvokeArgs(cx, argc + argslen, &args)) return false; /* 15.3.4.5.1, 15.3.4.5.2 step 4. */ @@ -2314,7 +2314,7 @@ CallOrConstructBoundFunction(JSContext *cx, uintN argc, Value *vp) if (!constructing) args.thisv() = boundThis; - if (constructing ? !InvokeConstructor(cx, args) : !Invoke(cx, args, 0)) + if (constructing ? !InvokeConstructor(cx, args) : !Invoke(cx, args)) return false; *vp = args.rval(); @@ -2961,9 +2961,7 @@ js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, Native native, return fun; } -#if (JSV2F_CONSTRUCT & JSV2F_SEARCH_STACK) -# error "JSINVOKE_CONSTRUCT and JSV2F_SEARCH_STACK are not disjoint!" -#endif +JS_STATIC_ASSERT((JSV2F_CONSTRUCT & JSV2F_SEARCH_STACK) == 0); JSFunction * js_ValueToFunction(JSContext *cx, const Value *vp, uintN flags) diff --git a/js/src/jsfun.h b/js/src/jsfun.h index 3a90fda2b2eb..176627b07397 100644 --- a/js/src/jsfun.h +++ b/js/src/jsfun.h @@ -489,7 +489,7 @@ js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, js::Native native, * fact that JSINVOKE_CONSTRUCT (aka JSFRAME_CONSTRUCTING) is 1, and test that * with #if/#error in jsfun.c. */ -#define JSV2F_CONSTRUCT JSINVOKE_CONSTRUCT +#define JSV2F_CONSTRUCT ((uintN)js::INVOKE_CONSTRUCTOR) #define JSV2F_SEARCH_STACK 0x10000 extern JSFunction * @@ -508,19 +508,19 @@ extern JSObject * JS_FASTCALL js_CreateCallObjectOnTrace(JSContext *cx, JSFunction *fun, JSObject *callee, JSObject *scopeChain); extern void -js_PutCallObject(JSContext *cx, JSStackFrame *fp); +js_PutCallObject(js::StackFrame *fp); extern JSBool JS_FASTCALL -js_PutCallObjectOnTrace(JSContext *cx, JSObject *scopeChain, uint32 nargs, - js::Value *argv, uint32 nvars, js::Value *slots); +js_PutCallObjectOnTrace(JSObject *scopeChain, uint32 nargs, js::Value *argv, + uint32 nvars, js::Value *slots); namespace js { JSObject * -CreateFunCallObject(JSContext *cx, JSStackFrame *fp); +CreateFunCallObject(JSContext *cx, StackFrame *fp); JSObject * -CreateEvalCallObject(JSContext *cx, JSStackFrame *fp); +CreateEvalCallObject(JSContext *cx, StackFrame *fp); extern JSBool GetCallArg(JSContext *cx, JSObject *obj, jsid id, js::Value *vp); @@ -550,10 +550,10 @@ SetCallUpvar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, js::Value *vp } // namespace js extern JSBool -js_GetArgsValue(JSContext *cx, JSStackFrame *fp, js::Value *vp); +js_GetArgsValue(JSContext *cx, js::StackFrame *fp, js::Value *vp); extern JSBool -js_GetArgsProperty(JSContext *cx, JSStackFrame *fp, jsid id, js::Value *vp); +js_GetArgsProperty(JSContext *cx, js::StackFrame *fp, jsid id, js::Value *vp); /* * Get the arguments object for the given frame. If the frame is strict mode @@ -566,10 +566,10 @@ js_GetArgsProperty(JSContext *cx, JSStackFrame *fp, jsid id, js::Value *vp); * function. */ extern JSObject * -js_GetArgsObject(JSContext *cx, JSStackFrame *fp); +js_GetArgsObject(JSContext *cx, js::StackFrame *fp); extern void -js_PutArgsObject(JSContext *cx, JSStackFrame *fp); +js_PutArgsObject(js::StackFrame *fp); inline bool js_IsNamedLambda(JSFunction *fun) { return (fun->flags & JSFUN_LAMBDA) && fun->atom; } diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index b3c8580f63ea..d290da82fa7d 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -85,8 +85,6 @@ #endif #include "jsprobes.h" -#include "jscntxtinlines.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jshashtable.h" #include "jsweakmap.h" @@ -118,29 +116,6 @@ JS_STATIC_ASSERT(JSTRACE_XML == 3); */ JS_STATIC_ASSERT(JSTRACE_SHAPE + 1 == JSTRACE_XML); -/* - * Everything we store in the heap must be a multiple of the cell size. - */ -JS_STATIC_ASSERT(sizeof(JSString) % sizeof(FreeCell) == 0); -JS_STATIC_ASSERT(sizeof(JSShortString) % sizeof(FreeCell) == 0); -JS_STATIC_ASSERT(sizeof(JSObject) % sizeof(FreeCell) == 0); -JS_STATIC_ASSERT(sizeof(JSFunction) % sizeof(FreeCell) == 0); -JS_STATIC_ASSERT(sizeof(Shape) % sizeof(FreeCell) == 0); -#ifdef JSXML -JS_STATIC_ASSERT(sizeof(JSXML) % sizeof(FreeCell) == 0); -#endif - -/* - * All arenas must be exactly 4k. - */ -JS_STATIC_ASSERT(sizeof(Arena) == 4096); -JS_STATIC_ASSERT(sizeof(Arena) == 4096); -JS_STATIC_ASSERT(sizeof(Arena) == 4096); -JS_STATIC_ASSERT(sizeof(Arena) == 4096); -JS_STATIC_ASSERT(sizeof(Arena) == 4096); -JS_STATIC_ASSERT(sizeof(Arena) == 4096); -JS_STATIC_ASSERT(sizeof(Arena) == 4096); - #ifdef JS_GCMETER # define METER(x) ((void) (x)) # define METER_IF(condition, x) ((void) ((condition) && (x))) @@ -166,83 +141,63 @@ FinalizeKind slotsToThingKind[] = { JS_STATIC_ASSERT(JS_ARRAY_LENGTH(slotsToThingKind) == SLOTS_TO_THING_KIND_LIMIT); -/* Initialize the arena and setup the free list. */ -template -void -Arena::init(JSCompartment *compartment, unsigned thingKind) +#ifdef DEBUG +const uint8 GCThingSizeMap[] = { + sizeof(JSObject), /* FINALIZE_OBJECT0 */ + sizeof(JSObject), /* FINALIZE_OBJECT0_BACKGROUND */ + sizeof(JSObject_Slots2), /* FINALIZE_OBJECT2 */ + sizeof(JSObject_Slots2), /* FINALIZE_OBJECT2_BACKGROUND */ + sizeof(JSObject_Slots4), /* FINALIZE_OBJECT4 */ + sizeof(JSObject_Slots4), /* FINALIZE_OBJECT4_BACKGROUND */ + sizeof(JSObject_Slots8), /* FINALIZE_OBJECT8 */ + sizeof(JSObject_Slots8), /* FINALIZE_OBJECT8_BACKGROUND */ + sizeof(JSObject_Slots12), /* FINALIZE_OBJECT12 */ + sizeof(JSObject_Slots12), /* FINALIZE_OBJECT12_BACKGROUND */ + sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16 */ + sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16_BACKGROUND */ + sizeof(JSFunction), /* FINALIZE_FUNCTION */ + sizeof(Shape), /* FINALIZE_SHAPE */ +#if JS_HAS_XML_SUPPORT + sizeof(JSXML), /* FINALIZE_XML */ +#endif + sizeof(JSShortString), /* FINALIZE_SHORT_STRING */ + sizeof(JSString), /* FINALIZE_STRING */ + sizeof(JSString), /* FINALIZE_EXTERNAL_STRING */ +}; + +JS_STATIC_ASSERT(JS_ARRAY_LENGTH(GCThingSizeMap) == FINALIZE_LIMIT); + +JS_FRIEND_API(size_t) +ArenaHeader::getThingSize() const { - aheader.compartment = compartment; - aheader.thingKind = thingKind; - char *p = (char *)&t.things[0]; - aheader.freeList = reinterpret_cast(p); - T *thing = &t.things[0]; + return GCThingSizeMap[getThingKind()]; +} +#endif + +/* Initialize the arena and setup the free list. */ +template +inline FreeCell * +Arena::buildFreeList() +{ + T *first = &t.things[0]; T *last = &t.things[JS_ARRAY_LENGTH(t.things) - 1]; - while (thing < last) { - thing->asFreeCell()->link = (thing + 1)->asFreeCell(); - ++thing; + for (T *thing = first; thing != last;) { + T *following = thing + 1; + thing->asFreeCell()->link = following->asFreeCell(); + thing = following; } last->asFreeCell()->link = NULL; -#ifdef DEBUG - aheader.thingSize = sizeof(T); - aheader.isUsed = true; - aheader.hasFreeThings = true; -#endif -} - -template -bool -Arena::inFreeList(void *thing) const -{ - FreeCell *cursor = aheader.freeList; - while (cursor) { - JS_ASSERT(aheader.thingSize == sizeof(T)); - JS_ASSERT(!cursor->isMarked()); - - /* If the cursor moves past the thing, it's not in the freelist. */ - if (thing < cursor) - break; - - /* If we find it on the freelist, it's dead. */ - if (thing == cursor) - return true; - JS_ASSERT_IF(cursor->link, cursor < cursor->link); - cursor = cursor->link; - } - return false; -} - -template -inline ConservativeGCTest -Arena::mark(T *thing, JSTracer *trc) -{ - T *alignedThing = getAlignedThing(thing); - - if (alignedThing > &t.things[ThingsPerArena-1] || alignedThing < &t.things[0]) - return CGCT_NOTARENA; - - if (inFreeList(alignedThing)) - return CGCT_NOTLIVE; - - JS_ASSERT(sizeof(T) == aheader.thingSize); - js::gc::MarkRoot(trc, alignedThing, "machine stack"); - -#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS - if (alignedThing != thing) - return CGCT_VALIDWITHOFFSET; -#endif - return CGCT_VALID; + return first->asFreeCell(); } template inline bool Arena::finalize(JSContext *cx) { - JS_ASSERT_IF(header()->hasFreeThings, header()->freeList); - JS_ASSERT(!getMarkingDelay()->link); - JS_ASSERT(getMarkingDelay()->unmarkedChildren == 0); - JS_ASSERT(header()->isUsed); - - FreeCell *nextFree = header()->freeList; + JS_ASSERT(aheader.compartment); + JS_ASSERT(!aheader.getMarkingDelay()->link); + + FreeCell *nextFree = aheader.freeList; FreeCell *freeList = NULL; FreeCell **tailp = &freeList; bool allClear = true; @@ -297,59 +252,20 @@ Arena::finalize(JSContext *cx) t = t->link; } } -#endif - if (allClear) { JS_ASSERT(nfree == ThingsPerArena); - JS_ASSERT((T *)tailp == &t.things[ThingsPerArena-1]); - *tailp = NULL; - header()->freeList = freeList; -#ifdef DEBUG - header()->hasFreeThings = true; -#endif - JS_ASSERT((T *)header()->freeList == &t.things[0]); + JS_ASSERT(freeList == static_cast(&t.things[0])); + JS_ASSERT(tailp == &t.things[ThingsPerArena-1].asFreeCell()->link); } else { JS_ASSERT(nfree < ThingsPerArena); - *tailp = NULL; - header()->freeList = freeList; -#ifdef DEBUG - header()->hasFreeThings = (nfree == 0) ? false : true; -#endif } +#endif + *tailp = NULL; + aheader.freeList = freeList; return allClear; } #ifdef DEBUG -bool -checkArenaListsForThing(JSCompartment *comp, void *thing) -{ - if (comp->arenas[FINALIZE_OBJECT0].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT0_BACKGROUND].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT2].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT2_BACKGROUND].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT4].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT4_BACKGROUND].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT8].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT8_BACKGROUND].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT12].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT12_BACKGROUND].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT16].arenasContainThing(thing) || - comp->arenas[FINALIZE_OBJECT16_BACKGROUND].arenasContainThing(thing) || - comp->arenas[FINALIZE_FUNCTION].arenasContainThing(thing) || - comp->arenas[FINALIZE_SHAPE].arenasContainThing(thing) || -#if JS_HAS_XML_SUPPORT - comp->arenas[FINALIZE_XML].arenasContainThing(thing) || -#endif - comp->arenas[FINALIZE_STRING].arenasContainThing(thing) || - comp->arenas[FINALIZE_EXTERNAL_STRING].arenasContainThing(thing) || - comp->arenas[FINALIZE_SHORT_STRING].arenasContainThing(thing)) - { - return true; - } - - return false; -} - bool checkArenaListAllUnmarked(JSCompartment *comp) { @@ -367,8 +283,8 @@ checkArenaListAllUnmarked(JSCompartment *comp) void JSCompartment::finishArenaLists() { - for (int i = 0; i < FINALIZE_LIMIT; i++) - arenas[i].releaseAll(); + for (unsigned i = 0; i < FINALIZE_LIMIT; i++) + arenas[i].releaseAll(i); } void @@ -383,28 +299,25 @@ Chunk::init(JSRuntime *rt) info.runtime = rt; info.age = 0; info.emptyArenaLists.init(); - info.emptyArenaLists.cellFreeList = &arenas[0]; + info.emptyArenaLists.cellFreeList = &arenas[0].aheader; #ifdef JS_THREADSAFE info.chunkLock = JS_NEW_LOCK(); if (!info.chunkLock) return false; #endif - Arena *arena = &arenas[0]; - Arena *last = &arenas[JS_ARRAY_LENGTH(arenas) - 1]; - while (arena < last) { - arena->header()->next = arena + 1; - arena->header()->compartment = NULL; -#ifdef DEBUG - arena->header()->isUsed = false; -#endif - ++arena; + ArenaHeader *aheader = &arenas[0].aheader; + ArenaHeader *last = &arenas[JS_ARRAY_LENGTH(arenas) - 1].aheader; + while (aheader < last) { + ArenaHeader *following = reinterpret_cast(aheader->address() + ArenaSize); + aheader->next = following; + aheader->compartment = NULL; + aheader = following; } - last->header()->next = NULL; - last->header()->compartment = NULL; -#ifdef DEBUG - last->header()->isUsed = false; -#endif + last->next = NULL; + last->compartment = NULL; info.numFree = ArenasPerChunk; + for (size_t i = 0; i != JS_ARRAY_LENGTH(markingDelay); ++i) + markingDelay[i].init(); return true; } @@ -430,57 +343,63 @@ Chunk::withinArenasRange(Cell *cell) } template -Arena * +ArenaHeader * Chunk::allocateArena(JSContext *cx, unsigned thingKind) { #ifdef JS_THREADSAFE - Conditionally lockIf(cx->runtime->gcHelperThread.sweeping, info.chunkLock); + Maybe maybeLock; + if (cx->runtime->gcHelperThread.sweeping) + maybeLock.construct(info.chunkLock); #endif JSCompartment *comp = cx->compartment; JS_ASSERT(hasAvailableArenas()); - Arena *arena = info.emptyArenaLists.getNext(comp, thingKind); - JS_ASSERT(arena); - JS_ASSERT(arena->header()->isUsed); + ArenaHeader *aheader = info.emptyArenaLists.getTypedFreeList(thingKind); + if (!aheader) { + aheader = info.emptyArenaLists.getOtherArena(); + aheader->freeList = aheader->getArena()->buildFreeList(); + } + JS_ASSERT(!aheader->compartment); + JS_ASSERT(!aheader->getMarkingDelay()->link); + aheader->compartment = comp; + aheader->setThingKind(thingKind); --info.numFree; JSRuntime *rt = info.runtime; - JS_ATOMIC_ADD(&rt->gcBytes, sizeof(Arena)); - JS_ATOMIC_ADD(&comp->gcBytes, sizeof(Arena)); + JS_ATOMIC_ADD(&rt->gcBytes, ArenaSize); + JS_ATOMIC_ADD(&comp->gcBytes, ArenaSize); METER(JS_ATOMIC_INCREMENT(&rt->gcStats.nallarenas)); if (comp->gcBytes >= comp->gcTriggerBytes) TriggerCompartmentGC(comp); - return arena; + return aheader; } -template void -Chunk::releaseArena(Arena *arena) +Chunk::releaseArena(ArenaHeader *aheader) { JSRuntime *rt = info.runtime; #ifdef JS_THREADSAFE - Conditionally lockIf(rt->gcHelperThread.sweeping, info.chunkLock); + Maybe maybeLock; + if (rt->gcHelperThread.sweeping) + maybeLock.construct(info.chunkLock); #endif - JSCompartment *comp = arena->header()->compartment; + JSCompartment *comp = aheader->compartment; METER(rt->gcStats.afree++); JS_ASSERT(rt->gcStats.nallarenas != 0); METER(JS_ATOMIC_DECREMENT(&rt->gcStats.nallarenas)); - JS_ASSERT(size_t(rt->gcBytes) >= sizeof(Arena)); - JS_ASSERT(size_t(comp->gcBytes) >= sizeof(Arena)); + JS_ASSERT(size_t(rt->gcBytes) >= ArenaSize); + JS_ASSERT(size_t(comp->gcBytes) >= ArenaSize); #ifdef JS_THREADSAFE if (rt->gcHelperThread.sweeping) { - rt->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * sizeof(Arena)); - comp->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * sizeof(Arena)); + rt->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize); + comp->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize); } #endif - JS_ATOMIC_ADD(&rt->gcBytes, -sizeof(Arena)); - JS_ATOMIC_ADD(&comp->gcBytes, -sizeof(Arena)); - info.emptyArenaLists.insert((Arena *)arena); -#ifdef DEBUG - arena->header()->isUsed = false; -#endif - arena->header()->compartment = NULL; + JS_ATOMIC_ADD(&rt->gcBytes, -ArenaSize); + JS_ATOMIC_ADD(&comp->gcBytes, -ArenaSize); + info.emptyArenaLists.insert(aheader); + aheader->compartment = NULL; ++info.numFree; if (unused()) info.age = 0; @@ -604,7 +523,7 @@ ExpireGCChunks(JSRuntime *rt) } template -static Arena * +static ArenaHeader * AllocateArena(JSContext *cx, unsigned thingKind) { JSRuntime *rt = cx->runtime; @@ -707,32 +626,63 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes) namespace js { -template -static inline ConservativeGCTest -MarkCell(Cell *cell, JSTracer *trc) +inline bool +InFreeList(ArenaHeader *aheader, void *thing) { - return GetArena(cell)->mark((T *)cell, trc); + for (FreeCell *cursor = aheader->freeList; cursor; cursor = cursor->link) { + JS_ASSERT(!cursor->isMarked()); + JS_ASSERT_IF(cursor->link, cursor < cursor->link); + + /* If the cursor moves past the thing, it's not in the freelist. */ + if (thing < cursor) + break; + + /* If we find it on the freelist, it's dead. */ + if (thing == cursor) + return true; + } + return false; +} + +template +inline ConservativeGCTest +MarkArenaPtrConservatively(JSTracer *trc, ArenaHeader *aheader, uintptr_t addr) +{ + JS_ASSERT(aheader->compartment); + JS_ASSERT(sizeof(T) == aheader->getThingSize()); + + uintptr_t offset = (addr & ArenaMask) - Arena::FirstThingOffset; + if (offset >= Arena::ThingsSpan) + return CGCT_NOTARENA; + + /* addr can point inside the thing so we must align the address. */ + uintptr_t shift = offset % sizeof(T); + T *thing = reinterpret_cast(addr - shift); + + if (InFreeList(aheader, thing)) + return CGCT_NOTLIVE; + + MarkRoot(trc, thing, "machine stack"); + +#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS + if (IS_GC_MARKING_TRACER(trc) && static_cast(trc)->conservativeDumpFileName) + static_cast(trc)->conservativeRoots.append(thing); +#endif + +#if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER + if (IS_GC_MARKING_TRACER(trc) && shift) + static_cast(trc)->conservativeStats.unaligned++; +#endif + return CGCT_VALID; } /* - * Returns CGCT_VALID or CGCT_VALIDWITHOFFSET and mark it if the w can be a - * live GC thing and sets thingKind accordingly. Otherwise returns the - * reason for rejection. + * Returns CGCT_VALID and mark it if the w can be a live GC thing and sets + * thingKind accordingly. Otherwise returns the reason for rejection. */ inline ConservativeGCTest -MarkIfGCThingWord(JSTracer *trc, jsuword w, uint32 &thingKind) +MarkIfGCThingWord(JSTracer *trc, jsuword w) { - JSRuntime *rt = trc->context->runtime; - /* - * The conservative scanner may access words that valgrind considers as - * undefined. To avoid false positives and not to alter valgrind view of - * the memory we make as memcheck-defined the argument, a copy of the - * original word. See bug 572678. - */ -#ifdef JS_VALGRIND - VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w)); -#endif - /* * We assume that the compiler never uses sub-word alignment to store * pointers and does not tag pointers on its own. Additionally, the value @@ -750,71 +700,75 @@ MarkIfGCThingWord(JSTracer *trc, jsuword w, uint32 &thingKind) */ const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK); #if JS_BITS_PER_WORD == 32 - jsuword payload = w & JSID_PAYLOAD_MASK; + jsuword addr = w & JSID_PAYLOAD_MASK; #elif JS_BITS_PER_WORD == 64 - jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK; + jsuword addr = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK; #endif - Cell *cell = reinterpret_cast(payload); - Chunk *chunk = cell->chunk(); + Chunk *chunk = Chunk::fromAddress(addr); - if (!rt->gcChunkSet.has(chunk)) + if (!trc->context->runtime->gcChunkSet.has(chunk)) return CGCT_NOTCHUNK; - if (!chunk->withinArenasRange(cell)) + /* + * We query for pointers outside the arena array after checking for an + * allocated chunk. Such pointers are rare and we want to reject them + * after doing more likely rejections. + */ + if (!Chunk::withinArenasRange(addr)) return CGCT_NOTARENA; - ArenaHeader *aheader = cell->arena()->header(); + ArenaHeader *aheader = &chunk->arenas[Chunk::arenaIndex(addr)].aheader; if (!aheader->compartment) - return CGCT_NOTLIVE; + return CGCT_FREEARENA; ConservativeGCTest test; - thingKind = aheader->thingKind; + unsigned thingKind = aheader->getThingKind(); switch (thingKind) { case FINALIZE_OBJECT0: case FINALIZE_OBJECT0_BACKGROUND: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; case FINALIZE_OBJECT2: case FINALIZE_OBJECT2_BACKGROUND: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; case FINALIZE_OBJECT4: case FINALIZE_OBJECT4_BACKGROUND: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; case FINALIZE_OBJECT8: case FINALIZE_OBJECT8_BACKGROUND: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; case FINALIZE_OBJECT12: case FINALIZE_OBJECT12_BACKGROUND: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; case FINALIZE_OBJECT16: case FINALIZE_OBJECT16_BACKGROUND: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; case FINALIZE_STRING: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; case FINALIZE_EXTERNAL_STRING: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; case FINALIZE_SHORT_STRING: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; case FINALIZE_FUNCTION: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; case FINALIZE_SHAPE: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; #if JS_HAS_XML_SUPPORT case FINALIZE_XML: - test = MarkCell(cell, trc); + test = MarkArenaPtrConservatively(trc, aheader, addr); break; #endif default: @@ -825,13 +779,6 @@ MarkIfGCThingWord(JSTracer *trc, jsuword w, uint32 &thingKind) return test; } -inline ConservativeGCTest -MarkIfGCThingWord(JSTracer *trc, jsuword w) -{ - uint32 thingKind; - return MarkIfGCThingWord(trc, w, thingKind); -} - static void MarkWordConservatively(JSTracer *trc, jsuword w) { @@ -845,34 +792,7 @@ MarkWordConservatively(JSTracer *trc, jsuword w) VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w)); #endif - uint32 thingKind; -#if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER - ConservativeGCTest test = -#endif - MarkIfGCThingWord(trc, w, thingKind); - -#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS - if (test == CGCT_VALID || test == CGCT_VALIDWITHOFFSET) { - if (IS_GC_MARKING_TRACER(trc) && static_cast(trc)->conservativeDumpFileName) { - const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK); -#if JS_BITS_PER_WORD == 32 - jsuword payload = w & JSID_PAYLOAD_MASK; -#elif JS_BITS_PER_WORD == 64 - jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK; -#endif - void *thing = (test == CGCT_VALIDWITHOFFSET) - ? GetAlignedThing((void *)payload, thingKind) - : (void *)payload; - GCMarker::ConservativeRoot root = {thing, thingKind}; - static_cast(trc)->conservativeRoots.append(root); - } - } -#endif - -#if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER - if (IS_GC_MARKING_TRACER(trc)) - static_cast(trc)->conservativeStats.counter[test]++; -#endif + MarkIfGCThingWord(trc, w); } static void @@ -968,8 +888,8 @@ RecordNativeStackTopForGC(JSContext *cx) #ifdef JS_THREADSAFE /* Record the stack top here only if we are called from a request. */ - JS_ASSERT(cx->thread->data.requestDepth >= ctd->requestThreshold); - if (cx->thread->data.requestDepth == ctd->requestThreshold) + JS_ASSERT(cx->thread()->data.requestDepth >= ctd->requestThreshold); + if (cx->thread()->data.requestDepth == ctd->requestThreshold) return; #endif ctd->recordStackTop(); @@ -1235,7 +1155,7 @@ bool CheckAllocation(JSContext *cx) { #ifdef JS_THREADSAFE - JS_ASSERT(cx->thread); + JS_ASSERT(cx->thread()); #endif JS_ASSERT(!cx->runtime->gcRunning); return true; @@ -1263,9 +1183,9 @@ RunLastDitchGC(JSContext *cx) JSRuntime *rt = cx->runtime; METER(rt->gcStats.lastditch++); #ifdef JS_THREADSAFE - Conditionally - unlockAtomsCompartmenIf(cx->compartment == rt->atomsCompartment && - rt->atomsCompartmentIsLocked, cx); + Maybe maybeUnlockAtomsCompartment; + if (cx->compartment == rt->atomsCompartment && rt->atomsCompartmentIsLocked) + maybeUnlockAtomsCompartment.construct(cx); #endif /* The last ditch GC preserves all atoms. */ AutoKeepAtoms keep(rt); @@ -1315,13 +1235,13 @@ RefillTypedFreeList(JSContext *cx, unsigned thingKind) #ifdef JS_THREADSAFE try_again: #endif - Arena *a = NULL; + ArenaHeader *aheader = NULL; if (!arenaList->hasToBeFinalized) { - a = reinterpret_cast *>(arenaList->getNextWithFreeList(cx)); - if (a) { - JS_ASSERT(a->header()->freeList); - JS_ASSERT(sizeof(T) == a->header()->thingSize); - compartment->freeLists.populate(a, thingKind); + aheader = arenaList->getNextWithFreeList(); + if (aheader) { + JS_ASSERT(aheader->freeList); + JS_ASSERT(sizeof(T) == aheader->getThingSize()); + compartment->freeLists.populate(aheader, thingKind); return true; } } @@ -1330,11 +1250,10 @@ try_again: * If the allocation fails rt->gcIsNeeded will be set and we will run * the GC on the next loop iteration if the last ditch GC is allowed. */ - a = AllocateArena(cx, thingKind); - if (a) { - compartment->freeLists.populate(a, thingKind); - arenaList->insert((Arena *) a); - a->getMarkingDelay()->init(); + aheader = AllocateArena(cx, thingKind); + if (aheader) { + compartment->freeLists.populate(aheader, thingKind); + arenaList->insert(aheader); return true; } #ifdef JS_THREADSAFE @@ -1452,14 +1371,14 @@ namespace js { GCMarker::GCMarker(JSContext *cx) : color(0), - unmarkedArenaStackTop(NULL), + unmarkedArenaStackTop(MarkingDelay::stackBottom()), objStack(cx->runtime->gcMarkStackObjs, sizeof(cx->runtime->gcMarkStackObjs)), xmlStack(cx->runtime->gcMarkStackXMLs, sizeof(cx->runtime->gcMarkStackXMLs)), largeStack(cx->runtime->gcMarkStackLarges, sizeof(cx->runtime->gcMarkStackLarges)) { JS_TRACER_INIT(this, cx, NULL); #ifdef DEBUG - markLaterCount = 0; + markLaterArenas = 0; #endif #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS"); @@ -1482,117 +1401,108 @@ void GCMarker::delayMarkingChildren(const void *thing) { const Cell *cell = reinterpret_cast(thing); - Arena *a = cell->arena(); - JS_ASSERT(cell->isMarked()); - METER(cell->compartment()->rt->gcStats.unmarked++); - MarkingDelay *markingDelay = a->getMarkingDelay(); - - if (markingDelay->link) { - if (markingDelay->start > (jsuword)cell) - markingDelay->start = (jsuword)cell; - /* Arena already scheduled to be marked again */ + ArenaHeader *aheader = cell->arenaHeader(); + if (aheader->getMarkingDelay()->link) { + /* Arena already scheduled to be marked later */ return; } - markingDelay->start = (jsuword)cell; - Arena *tos = unmarkedArenaStackTop; - markingDelay->link = tos ? tos : a; - unmarkedArenaStackTop = a; -#ifdef DEBUG - JSCompartment *comp = cell->compartment(); - markLaterCount += Arena::ThingsPerArena; - METER_UPDATE_MAX(comp->rt->gcStats.maxunmarked, markLaterCount); -#endif + aheader->getMarkingDelay()->link = unmarkedArenaStackTop; + unmarkedArenaStackTop = aheader; + METER(markLaterArenas++); + METER_UPDATE_MAX(cell->compartment()->rt->gcStats.maxunmarked, markLaterArenas); } template -void -Arena::markDelayedChildren(JSTracer *trc) +static void +MarkDelayedChilderen(JSTracer *trc, ArenaHeader *aheader) { - T* thing = (T *)getMarkingDelay()->start; - T *thingsEnd = &t.things[ThingsPerArena-1]; - JS_ASSERT(thing == getAlignedThing(thing)); - while (thing <= thingsEnd) { + Arena *a = aheader->getArena(); + T *end = &a->t.things[Arena::ThingsPerArena]; + for (T* thing = &a->t.things[0]; thing != end; ++thing) { if (thing->isMarked()) js::gc::MarkChildren(trc, thing); - - thing++; } } void GCMarker::markDelayedChildren() { - while (Arena *a = unmarkedArenaStackTop) { + while (unmarkedArenaStackTop != MarkingDelay::stackBottom()) { /* - * markingDelay->link == current arena indicates last arena on stack. - * If marking gets delayed at the same arena again, the arena is pushed - * again in delayMarkingChildren. markingDelay->link has to be cleared, - * otherwise the arena is not pushed again. + * If marking gets delayed at the same arena again, we must repeat + * marking of its things. For that we pop arena from the stack and + * clear its nextDelayedMarking before we begin the marking. */ - MarkingDelay *markingDelay = a->getMarkingDelay(); - unmarkedArenaStackTop = (markingDelay->link != a) - ? markingDelay->link - : NULL; - markingDelay->link = NULL; + ArenaHeader *aheader = unmarkedArenaStackTop; + unmarkedArenaStackTop = aheader->getMarkingDelay()->link; + JS_ASSERT(unmarkedArenaStackTop); + aheader->getMarkingDelay()->link = NULL; #ifdef DEBUG - markLaterCount -= Arena::ThingsPerArena; + JS_ASSERT(markLaterArenas); + markLaterArenas--; #endif - switch (a->header()->thingKind) { + switch (aheader->getThingKind()) { case FINALIZE_OBJECT0: case FINALIZE_OBJECT0_BACKGROUND: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; case FINALIZE_OBJECT2: case FINALIZE_OBJECT2_BACKGROUND: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; case FINALIZE_OBJECT4: case FINALIZE_OBJECT4_BACKGROUND: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; case FINALIZE_OBJECT8: case FINALIZE_OBJECT8_BACKGROUND: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; case FINALIZE_OBJECT12: case FINALIZE_OBJECT12_BACKGROUND: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; case FINALIZE_OBJECT16: case FINALIZE_OBJECT16_BACKGROUND: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; case FINALIZE_STRING: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; case FINALIZE_EXTERNAL_STRING: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; case FINALIZE_SHORT_STRING: JS_NOT_REACHED("no delayed marking"); break; case FINALIZE_FUNCTION: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; case FINALIZE_SHAPE: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; #if JS_HAS_XML_SUPPORT case FINALIZE_XML: - reinterpret_cast *>(a)->markDelayedChildren(this); + MarkDelayedChilderen(this, aheader); break; #endif default: JS_NOT_REACHED("wrong thingkind"); } } - JS_ASSERT(markLaterCount == 0); - JS_ASSERT(!unmarkedArenaStackTop); + JS_ASSERT(!markLaterArenas); } } /* namespace js */ +#ifdef DEBUG +static void +EmptyMarkCallback(JSTracer *trc, void *thing, uint32 kind) +{ +} +#endif + static void gc_root_traversal(JSTracer *trc, const RootEntry &entry) { @@ -1607,23 +1517,18 @@ gc_root_traversal(JSTracer *trc, const RootEntry &entry) if (ptr) { if (!JSAtom::isStatic(ptr)) { - bool root_points_to_gcArenaList = false; - JSCompartment **c = trc->context->runtime->compartments.begin(); - for (; c != trc->context->runtime->compartments.end(); ++c) { - JSCompartment *comp = *c; - if (checkArenaListsForThing(comp, ptr)) { - root_points_to_gcArenaList = true; - break; - } - } - if (!root_points_to_gcArenaList && entry.value.name) { + /* Use conservative machinery to find if ptr is a valid GC thing. */ + JSTracer checker; + JS_TRACER_INIT(&checker, trc->context, EmptyMarkCallback); + ConservativeGCTest test = MarkIfGCThingWord(&checker, reinterpret_cast(ptr)); + if (test != CGCT_VALID && entry.value.name) { fprintf(stderr, "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n" "invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n" "The root's name is \"%s\".\n", entry.value.name); } - JS_ASSERT(root_points_to_gcArenaList); + JS_ASSERT(test == CGCT_VALID); } } #endif @@ -1642,7 +1547,7 @@ gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc) } void -js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp) +js_TraceStackFrame(JSTracer *trc, StackFrame *fp) { MarkObject(trc, fp->scopeChain(), "scope chain"); if (fp->isDummyFrame()) @@ -1951,28 +1856,23 @@ FinalizeArenaList(JSCompartment *comp, JSContext *cx, JSGCInvocationKind gckind, { JS_STATIC_ASSERT(!(sizeof(T) & Cell::CellMask)); ArenaList *arenaList = GetFinalizableArenaList(comp, thingKind); - Arena **ap = &arenaList->head; - Arena *a = (Arena *) *ap; - if (!a) - return; - JS_ASSERT(sizeof(T) == arenaList->head->header()->thingSize); + ArenaHeader **ap = &arenaList->head; #ifdef JS_GCMETER uint32 nlivearenas = 0, nkilledarenas = 0, nthings = 0; #endif - for (;;) { - JS_ASSERT(a->header()->thingKind == thingKind); - bool allClear = a->finalize(cx); + while (ArenaHeader *aheader = *ap) { + JS_ASSERT(aheader->getThingKind() == thingKind); + JS_ASSERT(aheader->getThingSize() == sizeof(T)); + bool allClear = aheader->getArena()->finalize(cx); if (allClear) { - *ap = a->header()->next; - a->chunk()->releaseArena(a); + *ap = aheader->next; + aheader->chunk()->releaseArena(aheader); METER(nkilledarenas++); } else { - ap = &a->header()->next; + ap = &aheader->next; METER(nlivearenas++); } - if (!(a = (Arena *) *ap)) - break; } arenaList->cursor = arenaList->head; METER(UpdateCompartmentStats(comp, thingKind, nlivearenas, nkilledarenas, nthings)); @@ -1980,47 +1880,51 @@ FinalizeArenaList(JSCompartment *comp, JSContext *cx, JSGCInvocationKind gckind, template static void -FinalizeArenaListLater(JSContext *cx, ArenaList *arenaList, Arena *head) +FinalizeArenaListLater(JSContext *cx, ArenaList *arenaList, ArenaHeader *listHead) { JS_STATIC_ASSERT(!(sizeof(T) & Cell::CellMask)); JS_ASSERT(arenaList->hasToBeFinalized); - Arena **ap = &head; - Arena *a = (Arena *) *ap; - JS_ASSERT(a); + ArenaHeader **ap = &listHead; + ArenaHeader *aheader = *ap; + JS_ASSERT(aheader); #ifdef DEBUG - int thingKind = head->header()->thingKind; - JSCompartment *comp = head->header()->compartment; + int thingKind = listHead->getThingKind(); + JSCompartment *comp = listHead->compartment; #endif - JS_ASSERT(sizeof(T) == head->header()->thingSize); + JS_ASSERT(sizeof(T) == listHead->getThingSize()); #ifdef JS_GCMETER uint32 nlivearenas = 0, nkilledarenas = 0, nthings = 0; #endif for (;;) { - bool allClear = a->finalize(cx); - /* We don't delete the head because the next allcoated arena has to link to it. */ - if (allClear && (a != (Arena *)head)) { - *ap = a->header()->next; - a->chunk()->releaseArena(a); + bool allClear = aheader->getArena()->finalize(cx); + + /* + * We don't delete the head because the next allocated arena has to + * link to it. + */ + if (allClear && (aheader != listHead)) { + *ap = aheader->next; + aheader->chunk()->releaseArena(aheader); METER(nkilledarenas++); } else { - ap = &a->header()->next; + ap = &aheader->next; METER(nlivearenas++); } - if (!(a = (Arena *) *ap)) + if (!(aheader = *ap)) break; } - arenaList->cursor = (Arena *)head; + arenaList->cursor = listHead; arenaList->hasToBeFinalized = false; METER(UpdateCompartmentStats(comp, thingKind, nlivearenas, nkilledarenas, nthings)); } void -FinalizeArenaList(JSContext *cx, ArenaList *list, Arena *head) +FinalizeArenaList(JSContext *cx, ArenaList *list, ArenaHeader *listHead) { JS_ASSERT(list->head); - JS_ASSERT(head); - js::gc::FinalizeKind kind = js::gc::FinalizeKind(head->header()->thingKind); + JS_ASSERT(listHead); + FinalizeKind kind = js::gc::FinalizeKind(listHead->getThingKind()); switch (kind) { case FINALIZE_OBJECT0: @@ -2035,28 +1939,28 @@ FinalizeArenaList(JSContext *cx, ArenaList *list, Arena *head) JS_NOT_REACHED("no background finalization"); break; case FINALIZE_OBJECT0_BACKGROUND: - FinalizeArenaListLater(cx, list, (Arena *)head); + FinalizeArenaListLater(cx, list, listHead); break; case FINALIZE_OBJECT2_BACKGROUND: - FinalizeArenaListLater(cx, list, (Arena *)head); + FinalizeArenaListLater(cx, list, listHead); break; case FINALIZE_OBJECT4_BACKGROUND: - FinalizeArenaListLater(cx, list, (Arena *)head); + FinalizeArenaListLater(cx, list, listHead); break; case FINALIZE_OBJECT8_BACKGROUND: - FinalizeArenaListLater(cx, list, (Arena *)head); + FinalizeArenaListLater(cx, list, listHead); break; case FINALIZE_OBJECT12_BACKGROUND: - FinalizeArenaListLater(cx, list, (Arena *)head); + FinalizeArenaListLater(cx, list, listHead); break; case FINALIZE_OBJECT16_BACKGROUND: - FinalizeArenaListLater(cx, list, (Arena *)head); + FinalizeArenaListLater(cx, list, listHead); break; case FINALIZE_STRING: - FinalizeArenaListLater(cx, list, (Arena *)head); + FinalizeArenaListLater(cx, list, listHead); break; case FINALIZE_SHORT_STRING: - FinalizeArenaListLater(cx, list, (Arena *)head); + FinalizeArenaListLater(cx, list, listHead); break; #if JS_HAS_XML_SUPPORT case FINALIZE_XML: @@ -2073,11 +1977,8 @@ template void BackgroundFinalize(JSCompartment *comp, JSContext *cx, JSGCInvocationKind gckind, unsigned thingKind) { ArenaList *list = GetFinalizableArenaList(comp, thingKind); - if (list->head && list->head->header()->next) { - cx->gcBackgroundFree->finalizeLater(list); - } else { + if (!(list->head && list->head->next && cx->gcBackgroundFree->finalizeLater(list))) FinalizeArenaList(comp, cx, gckind, thingKind); - } } #endif @@ -2252,44 +2153,15 @@ GCHelperThread::replenishAndFreeLater(void *ptr) Foreground::free_(ptr); } -void -GCHelperThread::replenishAndFinalizeLater(ArenaList *list) -{ - JS_ASSERT(cx); - JS_ASSERT(finalizeCursor == finalizeCursorEnd); - do { - if (finalizeCursor && !finalizeVector.append(finalizeCursorEnd - FREE_ARRAY_LENGTH)) - break; - finalizeCursor = (void **) OffTheBooks::malloc_(FREE_ARRAY_SIZE); - if (!finalizeCursor) { - finalizeCursorEnd = NULL; - break; - } - finalizeCursorEnd = finalizeCursor + FREE_ARRAY_LENGTH; - *finalizeCursor++ = list; - *finalizeCursor++ = list->head; - return; - } while (false); - FinalizeArenaList(cx, list, list->head); -} - void GCHelperThread::doSweep() { JS_ASSERT(cx); - if (finalizeCursor) { - void **array = finalizeCursorEnd - FREE_ARRAY_LENGTH; - finalizeElementsAndArray(array, finalizeCursor); - finalizeCursor = finalizeCursorEnd = NULL; - } else { - JS_ASSERT(!finalizeCursorEnd); - } - for (void ***iter = finalizeVector.begin(); iter != finalizeVector.end(); ++iter) { - void **array = *iter; - finalizeElementsAndArray(array, array + FREE_ARRAY_LENGTH); - } + for (FinalizeListAndHead *i = finalizeVector.begin(); i != finalizeVector.end(); ++i) + FinalizeArenaList(cx, i->list, i->head); finalizeVector.resize(0); cx = NULL; + if (freeCursor) { void **array = freeCursorEnd - FREE_ARRAY_LENGTH; freeElementsAndArray(array, freeCursor); @@ -2622,9 +2494,9 @@ LetOtherGCFinish(JSContext *cx) { JSRuntime *rt = cx->runtime; JS_ASSERT(rt->gcThread); - JS_ASSERT(cx->thread != rt->gcThread); + JS_ASSERT(cx->thread() != rt->gcThread); - size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0; + size_t requestDebit = cx->thread()->data.requestDepth ? 1 : 0; JS_ASSERT(requestDebit <= rt->requestCount); #ifdef JS_TRACER JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx)); @@ -2693,7 +2565,7 @@ AutoGCSession::AutoGCSession(JSContext *cx) JSRuntime *rt = cx->runtime; #ifdef JS_THREADSAFE - if (rt->gcThread && rt->gcThread != cx->thread) + if (rt->gcThread && rt->gcThread != cx->thread()) LetOtherGCFinish(cx); #endif @@ -2702,7 +2574,7 @@ AutoGCSession::AutoGCSession(JSContext *cx) #ifdef JS_THREADSAFE /* No other thread is in GC, so indicate that we're now in GC. */ JS_ASSERT(!rt->gcThread); - rt->gcThread = cx->thread; + rt->gcThread = cx->thread(); /* * Notify operation callbacks on other threads, which will give them a @@ -2712,7 +2584,7 @@ AutoGCSession::AutoGCSession(JSContext *cx) */ for (JSThread::Map::Range r = rt->threads.all(); !r.empty(); r.popFront()) { JSThread *thread = r.front().value; - if (thread != cx->thread) + if (thread != cx->thread()) thread->data.triggerOperationCallback(rt); } @@ -2722,7 +2594,7 @@ AutoGCSession::AutoGCSession(JSContext *cx) * JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on * rt->requestCount transitions to 0. */ - size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0; + size_t requestDebit = cx->thread()->data.requestDepth ? 1 : 0; JS_ASSERT(requestDebit <= rt->requestCount); if (requestDebit != rt->requestCount) { rt->requestCount -= requestDebit; @@ -2750,7 +2622,7 @@ AutoGCSession::~AutoGCSession() JSRuntime *rt = context->runtime; rt->gcRunning = false; #ifdef JS_THREADSAFE - JS_ASSERT(rt->gcThread == context->thread); + JS_ASSERT(rt->gcThread == context->thread()); rt->gcThread = NULL; JS_NOTIFY_GC_DONE(rt); #endif @@ -2773,7 +2645,7 @@ GCUntilDone(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIM rt->gcPoke = true; #ifdef JS_THREADSAFE JS_ASSERT(rt->gcThread); - if (rt->gcThread != cx->thread) { + if (rt->gcThread != cx->thread()) { /* We do not return until another GC finishes. */ LetOtherGCFinish(cx); } @@ -2919,7 +2791,7 @@ SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto) * request. */ #ifdef JS_THREADSAFE - JS_ASSERT(cx->thread->data.requestDepth); + JS_ASSERT(cx->thread()->data.requestDepth); /* * This is only necessary if AutoGCSession below would wait for GC to @@ -3000,7 +2872,7 @@ TraceRuntime(JSTracer *trc) JSRuntime *rt = cx->runtime; AutoLockGC lock(rt); - if (rt->gcThread != cx->thread) { + if (rt->gcThread != cx->thread()) { AutoGCSession gcsession(cx); AutoUnlockGC unlock(rt); RecordNativeStackTopForGC(trc->context); diff --git a/js/src/jsgc.h b/js/src/jsgc.h index d4b1f9aea224..4f2e024a9613 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -92,6 +92,7 @@ enum FinalizeKind { FINALIZE_OBJECT16_BACKGROUND, FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND, FINALIZE_FUNCTION, + FINALIZE_FUNCTION_AND_OBJECT_LAST = FINALIZE_FUNCTION, FINALIZE_SHAPE, #if JS_HAS_XML_SUPPORT FINALIZE_XML, @@ -102,18 +103,43 @@ enum FinalizeKind { FINALIZE_LIMIT }; -const uintN JS_FINALIZE_OBJECT_LIMIT = 12; +const size_t ArenaShift = 12; +const size_t ArenaSize = size_t(1) << ArenaShift; +const size_t ArenaMask = ArenaSize - 1; + +template struct Arena; /* Every arena has a header. */ struct ArenaHeader { JSCompartment *compartment; - Arena *next; + ArenaHeader *next; FreeCell *freeList; + + private: unsigned thingKind; + + public: + inline uintptr_t address() const; + inline Chunk *chunk() const; + inline ArenaBitmap *bitmap() const; + + template + Arena *getArena() { + return reinterpret_cast *>(address()); + } + + unsigned getThingKind() const { + return thingKind; + } + + void setThingKind(unsigned kind) { + thingKind = kind; + } + + inline MarkingDelay *getMarkingDelay() const; + #ifdef DEBUG - size_t thingSize; - bool isUsed; - bool hasFreeThings; + JS_FRIEND_API(size_t) getThingSize() const; #endif }; @@ -143,10 +169,6 @@ struct Things { template struct Arena { - static const size_t ArenaSize = 4096; - - ArenaHeader aheader; - /* * Layout of an arena: * An arena is 4K. We want it to have a header followed by a list of T @@ -171,35 +193,28 @@ struct Arena { static const size_t SpaceAfterHeader = ArenaSize - HeaderSize; static const size_t Filler2Size = SpaceAfterHeader % sizeof(T); static const size_t ThingsPerArena = SpaceAfterHeader / sizeof(T); + static const size_t FirstThingOffset = HeaderSize; + static const size_t ThingsSpan = ThingsPerArena * sizeof(T); + ArenaHeader aheader; Things t; static void staticAsserts() { + /* + * Everything we store in the heap must be a multiple of the cell + * size. + */ + JS_STATIC_ASSERT(sizeof(T) % Cell::CellSize == 0); JS_STATIC_ASSERT(offsetof(Arena, t.things) % sizeof(T) == 0); JS_STATIC_ASSERT(sizeof(Arena) == ArenaSize); } - inline Chunk *chunk() const; - inline size_t arenaIndex() const; + inline FreeCell *buildFreeList(); - inline ArenaHeader *header() { return &aheader; }; - - inline MarkingDelay *getMarkingDelay() const; - inline ArenaBitmap *bitmap() const; - - inline ConservativeGCTest mark(T *thing, JSTracer *trc); - void markDelayedChildren(JSTracer *trc); - inline bool inFreeList(void *thing) const; - inline T *getAlignedThing(const void *thing); -#ifdef DEBUG - inline bool assureThingIsAligned(void *thing); -#endif - - void init(JSCompartment *compartment, unsigned thingKind); bool finalize(JSContext *cx); }; -void FinalizeArena(Arena *a); +void FinalizeArena(ArenaHeader *aheader); /* * Live objects are marked black. How many other additional colors are available @@ -209,7 +224,7 @@ static const uint32 BLACK = 0; /* An arena bitmap contains enough mark bits for all the cells in an arena. */ struct ArenaBitmap { - static const size_t BitCount = Arena::ArenaSize / Cell::CellSize; + static const size_t BitCount = ArenaSize / Cell::CellSize; static const size_t BitWords = BitCount / JS_BITS_PER_WORD; uintptr_t bitmap[BitWords]; @@ -258,100 +273,76 @@ struct ArenaBitmap { }; /* Ensure that bitmap covers the whole arena. */ -JS_STATIC_ASSERT(Arena::ArenaSize % Cell::CellSize == 0); +JS_STATIC_ASSERT(ArenaSize % Cell::CellSize == 0); JS_STATIC_ASSERT(ArenaBitmap::BitCount % JS_BITS_PER_WORD == 0); -/* Marking delay is used to resume marking later when recursive marking uses too much stack. */ +/* + * When recursive marking uses too much stack the marking is delayed and + * the corresponding arenas are put into a stack using a linked via the + * following per arena structure. + */ struct MarkingDelay { - Arena *link; - uintptr_t unmarkedChildren; - jsuword start; + ArenaHeader *link; void init() { link = NULL; - unmarkedChildren = 0; + } + + /* + * To separate arenas without things to mark later from the arena at the + * marked delay stack bottom we use for the latter a special sentinel + * value. We set it to the header for the second arena in the chunk + * starting the 0 address. + */ + static ArenaHeader *stackBottom() { + return reinterpret_cast(ArenaSize); } }; struct EmptyArenaLists { /* Arenas with no internal freelist prepared. */ - Arena *cellFreeList; + ArenaHeader *cellFreeList; /* Arenas with internal freelists prepared for a given finalize kind. */ - Arena *freeLists[FINALIZE_LIMIT]; + ArenaHeader *freeLists[FINALIZE_LIMIT]; void init() { PodZero(this); } - Arena *getOtherArena() { - Arena *arena = cellFreeList; - if (arena) { - cellFreeList = arena->header()->next; - return arena; + ArenaHeader *getOtherArena() { + ArenaHeader *aheader = cellFreeList; + if (aheader) { + cellFreeList = aheader->next; + return aheader; } for (int i = 0; i < FINALIZE_LIMIT; i++) { - if ((arena = (Arena *) freeLists[i])) { - freeLists[i] = freeLists[i]->header()->next; - return arena; + aheader = freeLists[i]; + if (aheader) { + freeLists[i] = aheader->next; + return aheader; } } JS_NOT_REACHED("No arena"); return NULL; } - template - inline Arena *getTypedFreeList(unsigned thingKind); + ArenaHeader *getTypedFreeList(unsigned thingKind) { + JS_ASSERT(thingKind < FINALIZE_LIMIT); + ArenaHeader *aheader = freeLists[thingKind]; + if (aheader) + freeLists[thingKind] = aheader->next; + return aheader; + } - template - inline Arena *getNext(JSCompartment *comp, unsigned thingKind); - - template - inline void insert(Arena *arena); + void insert(ArenaHeader *aheader) { + unsigned thingKind = aheader->getThingKind(); + aheader->next = freeLists[thingKind]; + freeLists[thingKind] = aheader; + } }; -template -inline Arena * -EmptyArenaLists::getTypedFreeList(unsigned thingKind) { - JS_ASSERT(thingKind < FINALIZE_LIMIT); - Arena *arena = (Arena*) freeLists[thingKind]; - if (arena) { - freeLists[thingKind] = freeLists[thingKind]->header()->next; - return arena; - } - return NULL; -} - -template -inline Arena * -EmptyArenaLists::getNext(JSCompartment *comp, unsigned thingKind) { - Arena *arena = getTypedFreeList(thingKind); - if (arena) { - JS_ASSERT(arena->header()->isUsed == false); - JS_ASSERT(arena->header()->thingSize == sizeof(T)); -#ifdef DEBUG - arena->header()->isUsed = true; -#endif - arena->header()->thingKind = thingKind; - arena->header()->compartment = comp; - return arena; - } - arena = (Arena *)getOtherArena(); - JS_ASSERT(arena->header()->isUsed == false); - arena->init(comp, thingKind); - return arena; -} - -template -inline void -EmptyArenaLists::insert(Arena *arena) { - unsigned thingKind = arena->header()->thingKind; - JS_ASSERT(thingKind < FINALIZE_LIMIT); - arena->header()->next = freeLists[thingKind]; - freeLists[thingKind] = (Arena *) arena; -} - /* The chunk header (located at the end of the chunk to preserve arena alignment). */ struct ChunkInfo { Chunk *link; @@ -366,7 +357,7 @@ struct ChunkInfo { /* Chunks contain arenas and associated data structures (mark bitmap, delayed marking state). */ struct Chunk { - static const size_t BytesPerArena = sizeof(Arena) + + static const size_t BytesPerArena = ArenaSize + sizeof(ArenaBitmap) + sizeof(MarkingDelay); @@ -378,6 +369,21 @@ struct Chunk { ChunkInfo info; + static Chunk *fromAddress(uintptr_t addr) { + addr &= ~GC_CHUNK_MASK; + return reinterpret_cast(addr); + } + + static bool withinArenasRange(uintptr_t addr) { + uintptr_t offset = addr & GC_CHUNK_MASK; + return offset < ArenasPerChunk * ArenaSize; + } + + static size_t arenaIndex(uintptr_t addr) { + JS_ASSERT(withinArenasRange(addr)); + return (addr & GC_CHUNK_MASK) >> ArenaShift; + } + void clearMarkBitmap(); bool init(JSRuntime *rt); @@ -386,30 +392,37 @@ struct Chunk { bool withinArenasRange(Cell *cell); template - Arena *allocateArena(JSContext *cx, unsigned thingKind); + ArenaHeader *allocateArena(JSContext *cx, unsigned thingKind); - template - void releaseArena(Arena *a); + void releaseArena(ArenaHeader *aheader); JSRuntime *getRuntime(); }; JS_STATIC_ASSERT(sizeof(Chunk) <= GC_CHUNK_SIZE); JS_STATIC_ASSERT(sizeof(Chunk) + Chunk::BytesPerArena > GC_CHUNK_SIZE); -Arena * -Cell::arena() const +inline uintptr_t +Cell::address() const { uintptr_t addr = uintptr_t(this); - JS_ASSERT(addr % sizeof(FreeCell) == 0); - addr &= ~(Arena::ArenaSize - 1); - return reinterpret_cast *>(addr); + JS_ASSERT(addr % Cell::CellSize == 0); + JS_ASSERT(Chunk::withinArenasRange(addr)); + return addr; +} + +inline ArenaHeader * +Cell::arenaHeader() const +{ + uintptr_t addr = address(); + addr &= ~ArenaMask; + return reinterpret_cast(addr); } Chunk * Cell::chunk() const { uintptr_t addr = uintptr_t(this); - JS_ASSERT(addr % sizeof(FreeCell) == 0); + JS_ASSERT(addr % Cell::CellSize == 0); addr &= ~(GC_CHUNK_SIZE - 1); return reinterpret_cast(addr); } @@ -417,70 +430,60 @@ Cell::chunk() const ArenaBitmap * Cell::bitmap() const { - return &chunk()->bitmaps[arena()->arenaIndex()]; + return &chunk()->bitmaps[Chunk::arenaIndex(address())]; } STATIC_POSTCONDITION_ASSUME(return < ArenaBitmap::BitCount) size_t Cell::cellIndex() const { - return this->asFreeCell() - arena()->t.things[0].asFreeCell(); -} - -template -Chunk * -Arena::chunk() const -{ - uintptr_t addr = uintptr_t(this); - JS_ASSERT(addr % sizeof(FreeCell) == 0); - addr &= ~(GC_CHUNK_SIZE - 1); - return reinterpret_cast(addr); -} - -template -size_t -Arena::arenaIndex() const -{ - return reinterpret_cast *>(this) - chunk()->arenas; -} - -template -MarkingDelay * -Arena::getMarkingDelay() const -{ - return &chunk()->markingDelay[arenaIndex()]; -} - -template -ArenaBitmap * -Arena::bitmap() const -{ - return &chunk()->bitmaps[arenaIndex()]; -} - -template -inline T * -Arena::getAlignedThing(const void *thing) -{ - jsuword start = reinterpret_cast(&t.things[0]); - jsuword offset = reinterpret_cast(thing) - start; - offset -= offset % sizeof(T); - return reinterpret_cast(start + offset); + uintptr_t addr = address(); + return (addr & ArenaMask) >> Cell::CellShift; } #ifdef DEBUG -template inline bool -Arena::assureThingIsAligned(void *thing) +Cell::isAligned() const { - return (getAlignedThing(thing) == thing); + uintptr_t offset = address() & ArenaMask; + return offset % arenaHeader()->getThingSize() == 0; } #endif +inline uintptr_t +ArenaHeader::address() const +{ + uintptr_t addr = reinterpret_cast(this); + JS_ASSERT(!(addr & ArenaMask)); + JS_ASSERT(Chunk::withinArenasRange(addr)); + return addr; +} + +inline Chunk * +ArenaHeader::chunk() const +{ + return Chunk::fromAddress(address()); +} + +inline ArenaBitmap * +ArenaHeader::bitmap() const +{ + return &chunk()->bitmaps[Chunk::arenaIndex(address())]; +} + +inline MarkingDelay * +ArenaHeader::getMarkingDelay() const +{ + return &chunk()->markingDelay[Chunk::arenaIndex(address())]; +} + static void AssertValidColor(const void *thing, uint32 color) { - JS_ASSERT_IF(color, color < reinterpret_cast(thing)->arena()->header()->thingSize / sizeof(FreeCell)); +#ifdef DEBUG + ArenaHeader *aheader = reinterpret_cast(thing)->arenaHeader(); + JS_ASSERT_IF(color, color < aheader->getThingSize() / Cell::CellSize); +#endif } inline bool @@ -508,15 +511,7 @@ Cell::unmark(uint32 color) const JSCompartment * Cell::compartment() const { - return arena()->header()->compartment; -} - -template -static inline -Arena * -GetArena(Cell *cell) -{ - return reinterpret_cast *>(cell->arena()); + return arenaHeader()->compartment; } #define JSTRACE_XML 3 @@ -580,15 +575,10 @@ GetGCThingRuntime(void *thing) return reinterpret_cast(thing)->chunk()->info.runtime; } -#ifdef DEBUG -extern bool -checkArenaListsForThing(JSCompartment *comp, jsuword thing); -#endif - /* The arenas in a list have uniform kind. */ struct ArenaList { - Arena *head; /* list start */ - Arena *cursor; /* arena with free things */ + ArenaHeader *head; /* list start */ + ArenaHeader *cursor; /* arena with free things */ volatile bool hasToBeFinalized; inline void init() { @@ -597,47 +587,35 @@ struct ArenaList { hasToBeFinalized = false; } - inline Arena *getNextWithFreeList(JSContext *cx) { + inline ArenaHeader *getNextWithFreeList() { JS_ASSERT(!hasToBeFinalized); - Arena *a; - while (cursor != NULL) { - ArenaHeader *aheader = cursor->header(); - a = cursor; + while (cursor) { + ArenaHeader *aheader = cursor; cursor = aheader->next; if (aheader->freeList) - return a; + return aheader; } return NULL; } #ifdef DEBUG - template - bool arenasContainThing(void *thing) { - for (Arena *a = (Arena *) head; a; a = (Arena *) a->header()->next) { - JS_ASSERT(a->header()->isUsed); - if (thing >= &a->t.things[0] && thing < &a->t.things[a->ThingsPerArena]) - return true; - } - return false; - } - bool markedThingsInArenaList() { - for (Arena *a = (Arena *) head; a; a = (Arena *) a->header()->next) { - if (!a->bitmap()->noBitsSet()) + for (ArenaHeader *aheader = head; aheader; aheader = aheader->next) { + if (!aheader->bitmap()->noBitsSet()) return true; } return false; } #endif - inline void insert(Arena *a) { - a->header()->next = head; - head = a; + inline void insert(ArenaHeader *aheader) { + aheader->next = head; + head = aheader; } - void releaseAll() { + void releaseAll(unsigned thingKind) { while (head) { - Arena *next = head->header()->next; + ArenaHeader *next = head->next; head->chunk()->releaseArena(head); head = next; } @@ -646,7 +624,7 @@ struct ArenaList { } inline bool isEmpty() const { - return (head == NULL); + return !head; } }; @@ -664,17 +642,12 @@ struct FreeLists { } else { finalizables[kind] = NULL; } -#ifdef DEBUG - if (top && !top->link) - top->arena()->header()->hasFreeThings = false; -#endif } return top; } - template - inline void populate(Arena *a, uint32 thingKind) { - finalizables[thingKind] = &a->header()->freeList; + void populate(ArenaHeader *aheader, uint32 thingKind) { + finalizables[thingKind] = &aheader->freeList; } #ifdef DEBUG @@ -744,9 +717,7 @@ CheckGCFreeListLink(js::gc::FreeCell *cell) * The GC things on the free lists come from one arena and the things on * the free list are linked in ascending address order. */ - JS_ASSERT_IF(cell->link, - cell->arena() == - cell->link->arena()); + JS_ASSERT_IF(cell->link, cell->arenaHeader() == cell->link->arenaHeader()); JS_ASSERT_IF(cell->link, cell < cell->link); } @@ -821,7 +792,7 @@ extern JS_FRIEND_API(bool) js_GCThingIsMarked(void *thing, uintN color); extern void -js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp); +js_TraceStackFrame(JSTracer *trc, js::StackFrame *fp); namespace js { @@ -885,7 +856,7 @@ extern void js_DestroyScriptsToGC(JSContext *cx, JSCompartment *comp); extern void -FinalizeArenaList(JSContext *cx, js::gc::ArenaList *arenaList, js::gc::Arena *head); +FinalizeArenaList(JSContext *cx, js::gc::ArenaList *arenaList, js::gc::ArenaHeader *head); namespace js { @@ -914,15 +885,18 @@ class GCHelperThread { Vector freeVector; void **freeCursor; void **freeCursorEnd; - Vector finalizeVector; - void **finalizeCursor; - void **finalizeCursorEnd; + + struct FinalizeListAndHead { + js::gc::ArenaList *list; + js::gc::ArenaHeader *head; + + }; + + Vector finalizeVector; JS_FRIEND_API(void) replenishAndFreeLater(void *ptr); - void replenishAndFinalizeLater(js::gc::ArenaList *list); - static void freeElementsAndArray(void **array, void **end) { JS_ASSERT(array <= end); for (void **p = array; p != end; ++p) @@ -930,17 +904,6 @@ class GCHelperThread { js::Foreground::free_(array); } - void finalizeElementsAndArray(void **array, void **end) { - JS_ASSERT(array <= end); - for (void **p = array; p != end; p += 2) { - js::gc::ArenaList *list = (js::gc::ArenaList *)*p; - js::gc::Arena *head = (js::gc::Arena *)*(p+1); - - FinalizeArenaList(cx, list, head); - } - js::Foreground::free_(array); - } - static void threadMain(void* arg); void threadLoop(JSRuntime *rt); @@ -954,8 +917,6 @@ class GCHelperThread { shutdown(false), freeCursor(NULL), freeCursorEnd(NULL), - finalizeCursor(NULL), - finalizeCursorEnd(NULL), sweeping(false) { } volatile bool sweeping; @@ -976,19 +937,16 @@ class GCHelperThread { replenishAndFreeLater(ptr); } - void finalizeLater(js::gc::ArenaList *list) { + bool finalizeLater(js::gc::ArenaList *list) { + JS_ASSERT(!sweeping); JS_ASSERT(!list->hasToBeFinalized); if (!list->head) - return; - + return true; + FinalizeListAndHead f = {list, list->head}; + if (!finalizeVector.append(f)) + return false; list->hasToBeFinalized = true; - JS_ASSERT(!sweeping); - if (finalizeCursor + 1 < finalizeCursorEnd) { - *finalizeCursor++ = list; - *finalizeCursor++ = list->head; - } else { - replenishAndFinalizeLater(list); - } + return true; } void setContext(JSContext *context) { cx = context; } @@ -1119,9 +1077,9 @@ struct GCMarker : public JSTracer { public: /* See comments before delayMarkingChildren is jsgc.cpp. */ - js::gc::Arena *unmarkedArenaStackTop; + js::gc::ArenaHeader *unmarkedArenaStackTop; #ifdef DEBUG - size_t markLaterCount; + size_t markLaterArenas; #endif #if defined(JS_DUMP_CONSERVATIVE_GC_ROOTS) || defined(JS_GCMETER) @@ -1129,8 +1087,7 @@ struct GCMarker : public JSTracer { #endif #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS - struct ConservativeRoot { void *thing; uint32 thingKind; }; - Vector conservativeRoots; + Vector conservativeRoots; const char *conservativeDumpFileName; void dumpConservativeRoots(); diff --git a/js/src/jsgcinlines.h b/js/src/jsgcinlines.h index 8a953329ec0f..a09faca0eca1 100644 --- a/js/src/jsgcinlines.h +++ b/js/src/jsgcinlines.h @@ -1,4 +1,4 @@ -/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- * * ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 @@ -114,7 +114,7 @@ GetGCThingTraceKind(const void *thing) if (JSAtom::isStatic(thing)) return JSTRACE_STRING; const Cell *cell = reinterpret_cast(thing); - return GetFinalizableTraceKind(cell->arena()->header()->thingKind); + return GetFinalizableTraceKind(cell->arenaHeader()->getThingKind()); } /* Capacity for slotsToThingKind */ diff --git a/js/src/jsgcmark.cpp b/js/src/jsgcmark.cpp index 6aadef2e2faf..984a861d5d16 100644 --- a/js/src/jsgcmark.cpp +++ b/js/src/jsgcmark.cpp @@ -77,14 +77,17 @@ Mark(JSTracer *trc, T *thing) JS_ASSERT(JS_IS_VALID_TRACE_KIND(GetGCThingTraceKind(thing))); JS_ASSERT(trc->debugPrinter || trc->debugPrintArg); - /* Per-Compartment GC only with GCMarker and no custom JSTracer */ - JS_ASSERT_IF(trc->context->runtime->gcCurrentCompartment, IS_GC_MARKING_TRACER(trc)); + JS_ASSERT(!JSAtom::isStatic(thing)); + JS_ASSERT(thing->asFreeCell()->isAligned()); JSRuntime *rt = trc->context->runtime; - JS_ASSERT(thing->arena()->header()->compartment); - JS_ASSERT(thing->arena()->header()->compartment->rt == rt); + JS_ASSERT(thing->arenaHeader()->compartment); + JS_ASSERT(thing->arenaHeader()->compartment->rt == rt); - /* Don't mark things outside a compartment if we are in a per-compartment GC */ + /* + * Don't mark things outside a compartment if we are in a per-compartment + * GC. + */ if (!rt->gcCurrentCompartment || thing->compartment() == rt->gcCurrentCompartment) { if (IS_GC_MARKING_TRACER(trc)) PushMarkStack(static_cast(trc), thing); @@ -104,7 +107,6 @@ MarkString(JSTracer *trc, JSString *str) JS_ASSERT(str); if (str->isStaticAtom()) return; - JS_ASSERT(GetArena((Cell *)str)->assureThingIsAligned((JSString *)str)); Mark(trc, str); } @@ -122,13 +124,6 @@ MarkObject(JSTracer *trc, JSObject &obj, const char *name) JS_ASSERT(trc); JS_ASSERT(&obj); JS_SET_TRACING_NAME(trc, name); - JS_ASSERT(GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj)); Mark(trc, &obj); } @@ -139,13 +134,6 @@ MarkObjectWithPrinter(JSTracer *trc, JSObject &obj, JSTraceNamePrinter printer, JS_ASSERT(trc); JS_ASSERT(&obj); JS_SET_TRACING_DETAILS(trc, printer, arg, index); - JS_ASSERT(GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj) || - GetArena((Cell *)&obj)->assureThingIsAligned(&obj)); Mark(trc, &obj); } @@ -155,19 +143,19 @@ MarkShape(JSTracer *trc, const Shape *shape, const char *name) JS_ASSERT(trc); JS_ASSERT(shape); JS_SET_TRACING_NAME(trc, name); - JS_ASSERT(GetArena((Cell *)shape)->assureThingIsAligned((void *)shape)); Mark(trc, shape); } +#if JS_HAS_XML_SUPPORT void MarkXML(JSTracer *trc, JSXML *xml, const char *name) { JS_ASSERT(trc); JS_ASSERT(xml); JS_SET_TRACING_NAME(trc, name); - JS_ASSERT(GetArena(xml)->assureThingIsAligned(xml)); Mark(trc, xml); } +#endif void PushMarkStack(GCMarker *gcmarker, JSXML *thing) diff --git a/js/src/jsgcstats.cpp b/js/src/jsgcstats.cpp index 433eac22f301..e02031e04807 100644 --- a/js/src/jsgcstats.cpp +++ b/js/src/jsgcstats.cpp @@ -44,6 +44,8 @@ #include "jsbuiltins.h" #include "jscompartment.h" +#include "jsgcinlines.h" + using namespace js; using namespace js::gc; @@ -72,7 +74,7 @@ ConservativeGCStats::dump(FILE *fp) fprintf(fp, " excluded, wrong tag: %lu\n", ULSTAT(counter[CGCT_WRONGTAG])); fprintf(fp, " excluded, not live: %lu\n", ULSTAT(counter[CGCT_NOTLIVE])); fprintf(fp, " valid GC things: %lu\n", ULSTAT(counter[CGCT_VALID])); - fprintf(fp, " valid but not aligned: %lu\n", ULSTAT(counter[CGCT_VALIDWITHOFFSET])); + fprintf(fp, " valid but not aligned: %lu\n", ULSTAT(unaligned)); #undef ULSTAT } #endif @@ -143,49 +145,6 @@ GetSizeAndThings(size_t &thingSize, size_t &thingsPerArena) thingsPerArena = Arena::ThingsPerArena; } -#if defined JS_DUMP_CONSERVATIVE_GC_ROOTS -void * -GetAlignedThing(void *thing, int thingKind) -{ - Cell *cell = (Cell *)thing; - switch (thingKind) { - case FINALIZE_OBJECT0: - case FINALIZE_OBJECT0_BACKGROUND: - return (void *)GetArena(cell)->getAlignedThing(thing); - case FINALIZE_OBJECT2: - case FINALIZE_OBJECT2_BACKGROUND: - return (void *)GetArena(cell)->getAlignedThing(thing); - case FINALIZE_OBJECT4: - case FINALIZE_OBJECT4_BACKGROUND: - return (void *)GetArena(cell)->getAlignedThing(thing); - case FINALIZE_OBJECT8: - case FINALIZE_OBJECT8_BACKGROUND: - return (void *)GetArena(cell)->getAlignedThing(thing); - case FINALIZE_OBJECT12: - case FINALIZE_OBJECT12_BACKGROUND: - return (void *)GetArena(cell)->getAlignedThing(thing); - case FINALIZE_OBJECT16: - case FINALIZE_OBJECT16_BACKGROUND: - return (void *)GetArena(cell)->getAlignedThing(thing); - case FINALIZE_STRING: - return (void *)GetArena(cell)->getAlignedThing(thing); - case FINALIZE_EXTERNAL_STRING: - return (void *)GetArena(cell)->getAlignedThing(thing); - case FINALIZE_SHORT_STRING: - return (void *)GetArena(cell)->getAlignedThing(thing); - case FINALIZE_FUNCTION: - return (void *)GetArena(cell)->getAlignedThing(thing); -#if JS_HAS_XML_SUPPORT - case FINALIZE_XML: - return (void *)GetArena(cell)->getAlignedThing(thing); -#endif - default: - JS_NOT_REACHED("wrong kind"); - return NULL; - } -} -#endif - void GetSizeAndThingsPerArena(int thingKind, size_t &thingSize, size_t &thingsPerArena) { switch (thingKind) { @@ -366,16 +325,16 @@ GCMarker::dumpConservativeRoots() conservativeStats.dump(fp); - for (ConservativeRoot *i = conservativeRoots.begin(); - i != conservativeRoots.end(); - ++i) { - fprintf(fp, " %p: ", i->thing); - switch (GetFinalizableTraceKind(i->thingKind)) { + for (void **thingp = conservativeRoots.begin(); thingp != conservativeRoots.end(); ++thingp) { + void *thing = thingp; + fprintf(fp, " %p: ", thing); + + switch (GetGCThingTraceKind(thing)) { default: JS_NOT_REACHED("Unknown trace kind"); case JSTRACE_OBJECT: { - JSObject *obj = (JSObject *) i->thing; + JSObject *obj = (JSObject *) thing; fprintf(fp, "object %s", obj->getClass()->name); break; } @@ -384,7 +343,7 @@ GCMarker::dumpConservativeRoots() break; } case JSTRACE_STRING: { - JSString *str = (JSString *) i->thing; + JSString *str = (JSString *) thing; if (str->isLinear()) { char buf[50]; PutEscapedString(buf, sizeof buf, &str->asLinear(), '"'); @@ -396,7 +355,7 @@ GCMarker::dumpConservativeRoots() } # if JS_HAS_XML_SUPPORT case JSTRACE_XML: { - JSXML *xml = (JSXML *) i->thing; + JSXML *xml = (JSXML *) thing; fprintf(fp, "xml %u", (unsigned)xml->xml_class); break; } diff --git a/js/src/jsgcstats.h b/js/src/jsgcstats.h index 9aea7f1a2692..6f661e53ec98 100644 --- a/js/src/jsgcstats.h +++ b/js/src/jsgcstats.h @@ -63,7 +63,6 @@ namespace gc { */ enum ConservativeGCTest { CGCT_VALID, - CGCT_VALIDWITHOFFSET, /* points within an object */ CGCT_LOWBITSET, /* excluded because one of the low bits was set */ CGCT_NOTARENA, /* not within arena range in a chunk */ CGCT_NOTCHUNK, /* not within a valid chunk */ @@ -75,7 +74,9 @@ enum ConservativeGCTest { struct ConservativeGCStats { uint32 counter[gc::CGCT_END]; /* ConservativeGCTest classification - counters */ + counters */ + uint32 unaligned; /* number of valid but not aligned on + thing start pointers */ void add(const ConservativeGCStats &another) { for (size_t i = 0; i != JS_ARRAY_LENGTH(counter); ++i) @@ -129,10 +130,6 @@ UpdateCompartmentStats(JSCompartment *comp, unsigned thingKind, uint32 nlivearen uint32 nkilledArenas, uint32 nthings); #endif /* JS_GCMETER */ -#if defined JS_DUMP_CONSERVATIVE_GC_ROOTS -void *GetAlignedThing(void *thing, int thingKind); -#endif - } //gc #ifdef MOZ_GCTIMER diff --git a/js/src/jshashtable.h b/js/src/jshashtable.h index 59211f1b289b..feb603bc7164 100644 --- a/js/src/jshashtable.h +++ b/js/src/jshashtable.h @@ -796,6 +796,8 @@ struct PointerHasher template struct DefaultHasher: PointerHasher::result> { }; +/* Looking for a hasher for jsid? Try the DefaultHasher in jsatom.h. */ + /* * JS-friendly, STL-like container providing a hash-based map from keys to * values. In particular, HashMap calls constructors and destructors of all diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 91df9d9da51d..fb2c1a5b856b 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -80,7 +80,6 @@ #include "methodjit/Logging.h" #endif #include "jsatominlines.h" -#include "jscntxtinlines.h" #include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsprobes.h" @@ -90,6 +89,8 @@ #include "jsstrinlines.h" #include "jsopcodeinlines.h" +#include "vm/Stack-inl.h" + #if JS_HAS_XML_SUPPORT #include "jsxml.h" #endif @@ -106,39 +107,10 @@ using namespace js::gc; /* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */ #if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ -#ifdef DEBUG -JSObject *const JSStackFrame::sInvalidScopeChain = (JSObject *)0xbeef; -#endif - -jsbytecode * -JSStackFrame::pc(JSContext *cx, JSStackFrame *next) -{ - JS_ASSERT_IF(next, next->prev_ == this); - - StackSegment *seg = cx->stack().containingSegment(this); - JSFrameRegs *regs = seg->getCurrentRegs(); - if (regs->fp == this) - return regs->pc; - - if (!next) - next = seg->computeNextFrame(this); - - if (next->flags_ & JSFRAME_HAS_PREVPC) - return next->prevpc_; - -#if defined(JS_METHODJIT) && defined(JS_MONOIC) - js::mjit::JITScript *jit = script()->getJIT(isConstructing()); - return jit->nativeToPC(next->ncode_); -#else - JS_NOT_REACHED("Unknown PC for frame"); - return NULL; -#endif -} - JSObject * js::GetScopeChain(JSContext *cx) { - JSStackFrame *fp = js_GetTopStackFrame(cx); + StackFrame *fp = js_GetTopStackFrame(cx); if (!fp) { /* * There is no code active on this context. In place of an actual @@ -170,7 +142,7 @@ js::GetScopeChain(JSContext *cx) * require bytecode scanning appears below. */ JSObject * -js::GetBlockChain(JSContext *cx, JSStackFrame *fp) +js::GetBlockChain(JSContext *cx, StackFrame *fp) { if (!fp->isScriptFrame()) return NULL; @@ -219,7 +191,7 @@ js::GetBlockChain(JSContext *cx, JSStackFrame *fp) * |oplen| is the length of opcode at the current PC. */ JSObject * -js::GetBlockChainFast(JSContext *cx, JSStackFrame *fp, JSOp op, size_t oplen) +js::GetBlockChainFast(JSContext *cx, StackFrame *fp, JSOp op, size_t oplen) { /* Assume that we're in a script frame. */ jsbytecode *pc = fp->pc(cx); @@ -269,7 +241,7 @@ js::GetBlockChainFast(JSContext *cx, JSStackFrame *fp, JSOp op, size_t oplen) * some other cases --- entering 'with' blocks, for example. */ static JSObject * -GetScopeChainFull(JSContext *cx, JSStackFrame *fp, JSObject *blockChain) +GetScopeChainFull(JSContext *cx, StackFrame *fp, JSObject *blockChain) { JSObject *sharedBlock = blockChain; @@ -389,13 +361,13 @@ GetScopeChainFull(JSContext *cx, JSStackFrame *fp, JSObject *blockChain) } JSObject * -js::GetScopeChain(JSContext *cx, JSStackFrame *fp) +js::GetScopeChain(JSContext *cx, StackFrame *fp) { return GetScopeChainFull(cx, fp, GetBlockChain(cx, fp)); } JSObject * -js::GetScopeChainFast(JSContext *cx, JSStackFrame *fp, JSOp op, size_t oplen) +js::GetScopeChainFast(JSContext *cx, StackFrame *fp, JSOp op, size_t oplen) { return GetScopeChainFull(cx, fp, GetBlockChainFast(cx, fp, op, oplen)); } @@ -573,10 +545,10 @@ js_OnUnknownMethod(JSContext *cx, Value *vp) } static JS_REQUIRES_STACK JSBool -NoSuchMethod(JSContext *cx, uintN argc, Value *vp, uint32 flags) +NoSuchMethod(JSContext *cx, uintN argc, Value *vp) { InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, 2, &args)) + if (!cx->stack.pushInvokeArgs(cx, 2, &args)) return JS_FALSE; JS_ASSERT(vp[0].isObject()); @@ -591,9 +563,7 @@ NoSuchMethod(JSContext *cx, uintN argc, Value *vp, uint32 flags) if (!argsobj) return JS_FALSE; args[1].setObject(*argsobj); - JSBool ok = (flags & JSINVOKE_CONSTRUCT) - ? InvokeConstructor(cx, args) - : Invoke(cx, args, flags); + JSBool ok = Invoke(cx, args); vp[0] = args.rval(); return ok; } @@ -603,7 +573,7 @@ NoSuchMethod(JSContext *cx, uintN argc, Value *vp, uint32 flags) namespace js { JS_REQUIRES_STACK bool -RunScript(JSContext *cx, JSScript *script, JSStackFrame *fp) +RunScript(JSContext *cx, JSScript *script, StackFrame *fp) { JS_ASSERT(script); JS_ASSERT(fp == cx->fp()); @@ -640,7 +610,7 @@ RunScript(JSContext *cx, JSScript *script, JSStackFrame *fp) * when done. Then push the return value. */ JS_REQUIRES_STACK bool -Invoke(JSContext *cx, const CallArgs &argsRef, uint32 flags) +Invoke(JSContext *cx, const CallArgs &argsRef, ConstructOption option) { /* N.B. Must be kept in sync with InvokeSessionGuard::start/invoke */ @@ -648,7 +618,7 @@ Invoke(JSContext *cx, const CallArgs &argsRef, uint32 flags) JS_ASSERT(args.argc() <= JS_ARGS_LENGTH_MAX); if (args.calleev().isPrimitive()) { - js_ReportIsNotFunction(cx, &args.calleev(), flags & JSINVOKE_FUNFLAGS); + js_ReportIsNotFunction(cx, &args.calleev(), ToReportFlags(option)); return false; } @@ -659,11 +629,11 @@ Invoke(JSContext *cx, const CallArgs &argsRef, uint32 flags) if (JS_UNLIKELY(clasp != &js_FunctionClass)) { #if JS_HAS_NO_SUCH_METHOD if (JS_UNLIKELY(clasp == &js_NoSuchMethodClass)) - return NoSuchMethod(cx, args.argc(), args.base(), 0); + return NoSuchMethod(cx, args.argc(), args.base()); #endif - JS_ASSERT_IF(flags & JSINVOKE_CONSTRUCT, !clasp->construct); + JS_ASSERT_IF(option == INVOKE_CONSTRUCTOR, !clasp->construct); if (!clasp->call) { - js_ReportIsNotFunction(cx, &args.calleev(), flags); + js_ReportIsNotFunction(cx, &args.calleev(), ToReportFlags(option)); return false; } return CallJSNative(cx, clasp->call, args.argc(), args.base()); @@ -671,14 +641,14 @@ Invoke(JSContext *cx, const CallArgs &argsRef, uint32 flags) /* Invoke native functions. */ JSFunction *fun = callee.getFunctionPrivate(); - JS_ASSERT_IF(flags & JSINVOKE_CONSTRUCT, !fun->isConstructor()); + JS_ASSERT_IF(option == INVOKE_CONSTRUCTOR, !fun->isConstructor()); if (fun->isNative()) return CallJSNative(cx, fun->u.n.native, args.argc(), args.base()); /* Handle the empty-script special case. */ JSScript *script = fun->script(); if (JS_UNLIKELY(script->isEmpty())) { - if (flags & JSINVOKE_CONSTRUCT) { + if (option == INVOKE_CONSTRUCTOR) { JSObject *obj = js_CreateThisForFunction(cx, &callee); if (!obj) return false; @@ -690,17 +660,18 @@ Invoke(JSContext *cx, const CallArgs &argsRef, uint32 flags) } /* Get pointer to new frame/slots, prepare arguments. */ + uint32 flags = ToFrameFlags(option); InvokeFrameGuard frame; - if (JS_UNLIKELY(!cx->stack().getInvokeFrame(cx, args, fun, script, &flags, &frame))) + StackFrame *fp = cx->stack.getInvokeFrame(cx, args, fun, script, &flags, &frame); + if (!fp) return false; /* Initialize frame, locals. */ - JSStackFrame *fp = frame.fp(); fp->initCallFrame(cx, callee, fun, args.argc(), flags); SetValueRangeToUndefined(fp->slots(), script->nfixed); /* Officially push fp. frame's destructor pops. */ - cx->stack().pushInvokeFrame(cx, args, &frame); + cx->stack.pushInvokeFrame(args, &frame); /* Now that the new frame is rooted, maybe create a call object. */ if (fun->isHeavyweight() && !CreateFunCallObject(cx, fp)) @@ -714,8 +685,7 @@ Invoke(JSContext *cx, const CallArgs &argsRef, uint32 flags) } args.rval() = fp->returnValue(); - JS_ASSERT_IF(ok && (flags & JSINVOKE_CONSTRUCT), !args.rval().isPrimitive()); - + JS_ASSERT_IF(ok && option == INVOKE_CONSTRUCTOR, !args.rval().isPrimitive()); return ok; } @@ -729,7 +699,7 @@ InvokeSessionGuard::start(JSContext *cx, const Value &calleev, const Value &this #endif /* Always push arguments, regardless of optimized/normal invoke. */ - StackSpace &stack = cx->stack(); + ContextStack &stack = cx->stack; if (!stack.pushInvokeArgs(cx, argc, &args_)) return false; @@ -762,9 +732,9 @@ InvokeSessionGuard::start(JSContext *cx, const Value &calleev, const Value &this uint32 flags = 0; if (!stack.getInvokeFrame(cx, args_, fun, script_, &flags, &frame_)) return false; - JSStackFrame *fp = frame_.fp(); + StackFrame *fp = frame_.fp(); fp->initCallFrame(cx, calleev.toObject(), fun, argc, flags); - stack.pushInvokeFrame(cx, args_, &frame_); + stack.pushInvokeFrame(args_, &frame_); #ifdef JS_METHODJIT /* Hoist dynamic checks from RunScript. */ @@ -777,7 +747,7 @@ InvokeSessionGuard::start(JSContext *cx, const Value &calleev, const Value &this /* Hoist dynamic checks from CheckStackAndEnterMethodJIT. */ JS_CHECK_RECURSION(cx, return false); - stackLimit_ = stack.getStackLimit(cx); + stackLimit_ = stack.space().getStackLimit(cx); if (!stackLimit_) return false; @@ -814,7 +784,7 @@ ExternalInvoke(JSContext *cx, const Value &thisv, const Value &fval, LeaveTrace(cx); InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, argc, &args)) + if (!cx->stack.pushInvokeArgs(cx, argc, &args)) return false; args.calleev() = fval; @@ -833,7 +803,7 @@ ExternalInvoke(JSContext *cx, const Value &thisv, const Value &fval, args.thisv().setObject(*thisp); } - if (!Invoke(cx, args, 0)) + if (!Invoke(cx, args)) return false; *rval = args.rval(); @@ -847,7 +817,7 @@ ExternalInvokeConstructor(JSContext *cx, const Value &fval, uintN argc, Value *a LeaveTrace(cx); InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, argc, &args)) + if (!cx->stack.pushInvokeArgs(cx, argc, &args)) return false; args.calleev() = fval; @@ -880,9 +850,9 @@ ExternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, const Value &fval, JS_STATIC_ASSERT(SHARP_NSLOTS == 2); static JS_NEVER_INLINE bool -InitSharpSlots(JSContext *cx, JSStackFrame *fp) +InitSharpSlots(JSContext *cx, StackFrame *fp) { - JSStackFrame *prev = fp->prev(); + StackFrame *prev = fp->prev(); JSScript *script = fp->script(); JS_ASSERT(script->nfixed >= SHARP_NSLOTS); @@ -906,7 +876,7 @@ InitSharpSlots(JSContext *cx, JSStackFrame *fp) bool Execute(JSContext *cx, JSObject &chain, JSScript *script, - JSStackFrame *prev, uintN flags, Value *result) + StackFrame *prev, uintN flags, Value *result) { JS_ASSERT_IF(prev, !prev->isDummyFrame()); JS_ASSERT_IF(prev, prev->compartment() == cx->compartment); @@ -927,7 +897,7 @@ Execute(JSContext *cx, JSObject &chain, JSScript *script, * the frame is rooted. */ ExecuteFrameGuard frame; - if (!cx->stack().getExecuteFrame(cx, script, &frame)) + if (!cx->stack.getExecuteFrame(cx, script, &frame)) return false; /* Initialize fixed slots (GVAR ops expect NULL). */ @@ -941,8 +911,8 @@ Execute(JSContext *cx, JSObject &chain, JSScript *script, /* NB: prev may not be in cx->currentSegment. */ initialVarObj = (prev == cx->maybefp()) - ? &prev->varobj(cx) - : &prev->varobj(cx->stack().containingSegment(prev)); + ? &cx->stack.currentVarObj() + : &cx->stack.space().varObjForFrame(prev); } else { /* The scope chain could be anything, so innerize just in case. */ JSObject *innerizedChain = &chain; @@ -980,7 +950,7 @@ Execute(JSContext *cx, JSObject &chain, JSScript *script, } /* Officially push the frame. ~FrameGuard pops. */ - cx->stack().pushExecuteFrame(cx, initialVarObj, &frame); + cx->stack.pushExecuteFrame(initialVarObj, &frame); #if JS_HAS_SHARP_VARS if (script->hasSharps && !InitSharpSlots(cx, frame.fp())) @@ -1264,7 +1234,7 @@ InvokeConstructor(JSContext *cx, const CallArgs &argsRef) if (!fun->isInterpretedConstructor()) goto error; - if (!Invoke(cx, args, JSINVOKE_CONSTRUCT)) + if (!Invoke(cx, args, INVOKE_CONSTRUCTOR)) return false; JS_ASSERT(args.rval().isObject()); @@ -1289,7 +1259,7 @@ InvokeConstructorWithGivenThis(JSContext *cx, JSObject *thisobj, const Value &fv LeaveTrace(cx); InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, argc, &args)) + if (!cx->stack.pushInvokeArgs(cx, argc, &args)) return JS_FALSE; args.calleev() = fval; @@ -1309,7 +1279,7 @@ InvokeConstructorWithGivenThis(JSContext *cx, JSObject *thisobj, const Value &fv ok = CallJSNativeConstructor(cx, clasp->construct, args.argc(), args.base()); } else { args.thisv().setObjectOrNull(thisobj); - ok = Invoke(cx, args, JSINVOKE_CONSTRUCT); + ok = Invoke(cx, args, INVOKE_CONSTRUCTOR); } *rval = args.rval(); @@ -1347,8 +1317,8 @@ ValueToId(JSContext *cx, const Value &v, jsid *idp) JS_STATIC_INTERPRET JS_REQUIRES_STACK JSBool js_EnterWith(JSContext *cx, jsint stackIndex, JSOp op, size_t oplen) { - JSStackFrame *fp = cx->fp(); - Value *sp = cx->regs->sp; + StackFrame *fp = cx->fp(); + Value *sp = cx->regs().sp; JS_ASSERT(stackIndex < 0); JS_ASSERT(fp->base() <= sp + stackIndex); @@ -1416,9 +1386,9 @@ js_UnwindScope(JSContext *cx, jsint stackDepth, JSBool normalUnwind) Class *clasp; JS_ASSERT(stackDepth >= 0); - JS_ASSERT(cx->fp()->base() + stackDepth <= cx->regs->sp); + JS_ASSERT(cx->fp()->base() + stackDepth <= cx->regs().sp); - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); for (;;) { clasp = js_IsActiveWithOrBlock(cx, &fp->scopeChain(), stackDepth); if (!clasp) @@ -1431,7 +1401,7 @@ js_UnwindScope(JSContext *cx, jsint stackDepth, JSBool normalUnwind) } } - cx->regs->sp = fp->base() + stackDepth; + cx->regs().sp = fp->base() + stackDepth; return normalUnwind; } @@ -1464,7 +1434,7 @@ js::GetUpvar(JSContext *cx, uintN closureLevel, UpvarCookie cookie) const uintN targetLevel = closureLevel - cookie.level(); JS_ASSERT(targetLevel < UpvarCookie::UPVAR_LEVEL_LIMIT); - JSStackFrame *fp = cx->findFrameAtLevel(targetLevel); + StackFrame *fp = cx->stack.findFrameAtLevel(targetLevel); uintN slot = cookie.slot(); Value *vp; @@ -1490,15 +1460,15 @@ JS_STATIC_INTERPRET JS_REQUIRES_STACK void js_LogOpcode(JSContext *cx) { FILE *logfp; - JSStackFrame *fp; - JSFrameRegs *regs; + StackFrame *fp; + FrameRegs *regs; intN ndefs, n, nuses; JSOp op; logfp = (FILE *) cx->logfp; JS_ASSERT(logfp); fp = cx->fp(); - regs = cx->regs; + regs = &cx->regs(); /* * Operations in prologues don't produce interesting values, and @@ -1900,7 +1870,7 @@ namespace reprmeter { #define PUSH_OBJECT_OR_NULL(obj) do { regs.sp++->setObjectOrNull(obj); assertSameCompartment(cx, regs.sp[-1]); } while (0) #define PUSH_HOLE() regs.sp++->setMagic(JS_ARRAY_HOLE) #define POP_COPY_TO(v) v = *--regs.sp -#define POP_RETURN_VALUE() regs.fp->setReturnValue(*--regs.sp) +#define POP_RETURN_VALUE() regs.fp()->setReturnValue(*--regs.sp) #define POP_BOOLEAN(cx, vp, b) \ JS_BEGIN_MACRO \ @@ -2015,7 +1985,7 @@ CanIncDecWithoutOverflow(int32_t i) JS_END_MACRO static bool -AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, JSFrameRegs& regs, +AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, FrameRegs& regs, ptrdiff_t pcoff, JSObject *start, JSObject *found, PropertyCacheEntry *entry) { @@ -2163,7 +2133,7 @@ IteratorNext(JSContext *cx, JSObject *iterobj, Value *rval) namespace js { JS_REQUIRES_STACK JS_NEVER_INLINE bool -Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInterpMode interpMode) +Interpret(JSContext *cx, StackFrame *entryFrame, uintN inlineCallCount, InterpMode interpMode) { #ifdef MOZ_TRACEVIS TraceVisStateObj tvso(cx, S_INTERP); @@ -2288,7 +2258,7 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInte #define LOAD_ATOM(PCOFF, atom) \ JS_BEGIN_MACRO \ - JS_ASSERT(regs.fp->hasImacropc() \ + JS_ASSERT(regs.fp()->hasImacropc() \ ? atoms == COMMON_ATOMS_START(&rt->atomState) && \ GET_INDEX(regs.pc + PCOFF) < js_common_atom_count \ : (size_t)(atoms - script->atomMap.vector) < \ @@ -2317,22 +2287,22 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInte JS_BEGIN_MACRO \ useMethodJIT = cx->methodJitEnabled && \ interpMode == JSINTERP_NORMAL && \ - script->getJITStatus(regs.fp->isConstructing()) != JITScript_Invalid; \ + script->getJITStatus(regs.fp()->isConstructing()) != JITScript_Invalid; \ JS_END_MACRO #define MONITOR_BRANCH_METHODJIT() \ JS_BEGIN_MACRO \ mjit::CompileStatus status = \ - mjit::CanMethodJITAtBranch(cx, script, regs.fp, regs.pc); \ + mjit::CanMethodJITAtBranch(cx, script, regs.fp(), regs.pc); \ if (status == mjit::Compile_Error) \ goto error; \ if (status == mjit::Compile_Okay) { \ void *ncode = \ - script->nativeCodeForPC(regs.fp->isConstructing(), regs.pc); \ + script->nativeCodeForPC(regs.fp()->isConstructing(), regs.pc);\ interpReturnOK = mjit::JaegerShotAtSafePoint(cx, ncode); \ if (inlineCallCount) \ goto jit_return; \ - regs.fp->setFinishedInInterpreter(); \ + regs.fp()->setFinishedInInterpreter(); \ goto leave_on_safe_point; \ } \ if (status == mjit::Compile_Abort) { \ @@ -2369,10 +2339,10 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInte #define RESTORE_INTERP_VARS() \ JS_BEGIN_MACRO \ - script = regs.fp->script(); \ - argv = regs.fp->maybeFormalArgs(); \ - atoms = FrameAtomBase(cx, regs.fp); \ - JS_ASSERT(cx->regs == ®s); \ + script = regs.fp()->script(); \ + argv = regs.fp()->maybeFormalArgs(); \ + atoms = FrameAtomBase(cx, regs.fp()); \ + JS_ASSERT(&cx->regs() == ®s); \ if (cx->isExceptionPending()) \ goto error; \ JS_END_MACRO @@ -2426,8 +2396,8 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInte JS_ASSERT_IF(leaveOnSafePoint, !TRACE_RECORDER(cx)); \ JS_ASSERT_IF(leaveOnSafePoint, !TRACE_PROFILER(cx)); \ JS_ASSERT_IF(leaveOnSafePoint, interpMode != JSINTERP_NORMAL); \ - if (leaveOnSafePoint && !regs.fp->hasImacropc() && \ - script->maybeNativeCodeForPC(regs.fp->isConstructing(), regs.pc)) { \ + if (leaveOnSafePoint && !regs.fp()->hasImacropc() && \ + script->maybeNativeCodeForPC(regs.fp()->isConstructing(), regs.pc)) { \ JS_ASSERT(!TRACE_RECORDER(cx)); \ interpReturnOK = true; \ goto leave_on_safe_point; \ @@ -2463,28 +2433,28 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInte ENABLE_INTERRUPTS(); \ JS_END_MACRO - JSFrameRegs regs = *cx->regs; + FrameRegs regs = cx->regs(); /* Repoint cx->regs to a local variable for faster access. */ struct InterpExitGuard { JSContext *cx; - const JSFrameRegs ®s; - JSFrameRegs *prevContextRegs; - InterpExitGuard(JSContext *cx, JSFrameRegs ®s) - : cx(cx), regs(regs), prevContextRegs(cx->regs) { - cx->setCurrentRegs(®s); + const FrameRegs ®s; + FrameRegs *prevContextRegs; + InterpExitGuard(JSContext *cx, FrameRegs ®s) + : cx(cx), regs(regs), prevContextRegs(&cx->regs()) { + cx->stack.repointRegs(®s); } ~InterpExitGuard() { - JS_ASSERT(cx->regs == ®s); + JS_ASSERT(&cx->regs() == ®s); *prevContextRegs = regs; - cx->setCurrentRegs(prevContextRegs); + cx->stack.repointRegs(prevContextRegs); } } interpGuard(cx, regs); /* Copy in hot values that change infrequently. */ JSRuntime *const rt = cx->runtime; - JSScript *script = regs.fp->script(); - Value *argv = regs.fp->maybeFormalArgs(); + JSScript *script = regs.fp()->script(); + Value *argv = regs.fp()->maybeFormalArgs(); CHECK_INTERRUPT_HANDLER(); #if defined(JS_TRACER) && defined(JS_METHODJIT) @@ -2495,7 +2465,7 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInte #endif if (!entryFrame) - entryFrame = regs.fp; + entryFrame = regs.fp(); /* * Initialize the index segment register used by LOAD_ATOM and @@ -2507,10 +2477,10 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInte JSAtom **atoms = script->atomMap.vector; #if JS_HAS_GENERATORS - if (JS_UNLIKELY(regs.fp->isGeneratorFrame())) { - JS_ASSERT(interpGuard.prevContextRegs == &cx->generatorFor(regs.fp)->regs); + if (JS_UNLIKELY(regs.fp()->isGeneratorFrame())) { + JS_ASSERT(interpGuard.prevContextRegs == &cx->generatorFor(regs.fp())->regs); JS_ASSERT((size_t) (regs.pc - script->code) <= script->length); - JS_ASSERT((size_t) (regs.sp - regs.fp->base()) <= StackDepth(script)); + JS_ASSERT((size_t) (regs.sp - regs.fp()->base()) <= StackDepth(script)); /* * To support generator_throw and to catch ignored exceptions, @@ -2537,14 +2507,14 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInte AbortRecording(cx, "attempt to reenter interpreter while recording"); } - if (regs.fp->hasImacropc()) + if (regs.fp()->hasImacropc()) atoms = COMMON_ATOMS_START(&rt->atomState); #endif /* Don't call the script prologue if executing between Method and Trace JIT. */ if (interpMode == JSINTERP_NORMAL) { - JS_ASSERT_IF(!regs.fp->isGeneratorFrame(), regs.pc == script->code); - if (!ScriptPrologueOrGeneratorResume(cx, regs.fp)) + JS_ASSERT_IF(!regs.fp()->isGeneratorFrame(), regs.pc == script->code); + if (!ScriptPrologueOrGeneratorResume(cx, regs.fp())) goto error; } @@ -2646,7 +2616,7 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInte case JSTRAP_CONTINUE: break; case JSTRAP_RETURN: - regs.fp->setReturnValue(rval); + regs.fp()->setReturnValue(rval); interpReturnOK = JS_TRUE; goto forced_return; case JSTRAP_THROW: @@ -2773,18 +2743,18 @@ BEGIN_CASE(JSOP_POPN) { regs.sp -= GET_UINT16(regs.pc); #ifdef DEBUG - JS_ASSERT(regs.fp->base() <= regs.sp); - JSObject *obj = GetBlockChain(cx, regs.fp); + JS_ASSERT(regs.fp()->base() <= regs.sp); + JSObject *obj = GetBlockChain(cx, regs.fp()); JS_ASSERT_IF(obj, OBJ_BLOCK_DEPTH(cx, obj) + OBJ_BLOCK_COUNT(cx, obj) - <= (size_t) (regs.sp - regs.fp->base())); - for (obj = ®s.fp->scopeChain(); obj; obj = obj->getParent()) { + <= (size_t) (regs.sp - regs.fp()->base())); + for (obj = ®s.fp()->scopeChain(); obj; obj = obj->getParent()) { Class *clasp = obj->getClass(); if (clasp != &js_BlockClass && clasp != &js_WithClass) continue; - if (obj->getPrivate() != js_FloatingFrameIfGenerator(cx, regs.fp)) + if (obj->getPrivate() != js_FloatingFrameIfGenerator(cx, regs.fp())) break; - JS_ASSERT(regs.fp->base() + OBJ_BLOCK_DEPTH(cx, obj) + JS_ASSERT(regs.fp()->base() + OBJ_BLOCK_DEPTH(cx, obj) + ((clasp == &js_BlockClass) ? OBJ_BLOCK_COUNT(cx, obj) : 1) @@ -2812,11 +2782,11 @@ BEGIN_CASE(JSOP_ENTERWITH) * We set sp[-1] to the current "with" object to help asserting the * enter/leave balance in [leavewith]. */ - regs.sp[-1].setObject(regs.fp->scopeChain()); + regs.sp[-1].setObject(regs.fp()->scopeChain()); END_CASE(JSOP_ENTERWITH) BEGIN_CASE(JSOP_LEAVEWITH) - JS_ASSERT(regs.sp[-1].toObject() == regs.fp->scopeChain()); + JS_ASSERT(regs.sp[-1].toObject() == regs.fp()->scopeChain()); regs.sp--; js_LeaveWith(cx); END_CASE(JSOP_LEAVEWITH) @@ -2835,16 +2805,16 @@ BEGIN_CASE(JSOP_STOP) CHECK_BRANCH(); #ifdef JS_TRACER - if (regs.fp->hasImacropc()) { + if (regs.fp()->hasImacropc()) { /* * If we are at the end of an imacro, return to its caller in the * current frame. */ JS_ASSERT(op == JSOP_STOP); - JS_ASSERT((uintN)(regs.sp - regs.fp->slots()) <= script->nslots); - jsbytecode *imacpc = regs.fp->imacropc(); + JS_ASSERT((uintN)(regs.sp - regs.fp()->slots()) <= script->nslots); + jsbytecode *imacpc = regs.fp()->imacropc(); regs.pc = imacpc + js_CodeSpec[*imacpc].length; - regs.fp->clearImacropc(); + regs.fp()->clearImacropc(); LEAVE_ON_SAFE_POINT(); atoms = script->atomMap.vector; op = JSOp(*regs.pc); @@ -2853,25 +2823,24 @@ BEGIN_CASE(JSOP_STOP) #endif interpReturnOK = true; - if (entryFrame != regs.fp) + if (entryFrame != regs.fp()) inline_return: { - JS_ASSERT(!js_IsActiveWithOrBlock(cx, ®s.fp->scopeChain(), 0)); - interpReturnOK = ScriptEpilogue(cx, regs.fp, interpReturnOK); + JS_ASSERT(!regs.fp()->hasImacropc()); + JS_ASSERT(!js_IsActiveWithOrBlock(cx, ®s.fp()->scopeChain(), 0)); + interpReturnOK = ScriptEpilogue(cx, regs.fp(), interpReturnOK); CHECK_INTERRUPT_HANDLER(); /* The JIT inlines ScriptEpilogue. */ #ifdef JS_METHODJIT jit_return: #endif - Value *newsp = regs.fp->actualArgs() - 1; - newsp[-1] = regs.fp->returnValue(); - cx->stack().popInlineFrame(cx, regs.fp->prev(), newsp); + cx->stack.popInlineFrame(); - /* Sync interpreter registers. */ - script = regs.fp->script(); - argv = regs.fp->maybeFormalArgs(); - atoms = FrameAtomBase(cx, regs.fp); + /* Sync interpreter locals. */ + script = regs.fp()->script(); + argv = regs.fp()->maybeFormalArgs(); + atoms = FrameAtomBase(cx, regs.fp()); /* Resume execution in the calling frame. */ RESET_USE_METHODJIT(); @@ -2886,7 +2855,7 @@ BEGIN_CASE(JSOP_STOP) } goto error; } else { - JS_ASSERT(regs.sp == regs.fp->base()); + JS_ASSERT(regs.sp == regs.fp()->base()); } interpReturnOK = true; goto exit; @@ -3069,7 +3038,7 @@ END_CASE(JSOP_IN) BEGIN_CASE(JSOP_ITER) { - JS_ASSERT(regs.sp > regs.fp->base()); + JS_ASSERT(regs.sp > regs.fp()->base()); uintN flags = regs.pc[1]; if (!js_ValueToIterator(cx, flags, ®s.sp[-1])) goto error; @@ -3080,7 +3049,7 @@ END_CASE(JSOP_ITER) BEGIN_CASE(JSOP_MOREITER) { - JS_ASSERT(regs.sp - 1 >= regs.fp->base()); + JS_ASSERT(regs.sp - 1 >= regs.fp()->base()); JS_ASSERT(regs.sp[-1].isObject()); PUSH_NULL(); bool cond; @@ -3093,7 +3062,7 @@ END_CASE(JSOP_MOREITER) BEGIN_CASE(JSOP_ENDITER) { - JS_ASSERT(regs.sp - 1 >= regs.fp->base()); + JS_ASSERT(regs.sp - 1 >= regs.fp()->base()); bool ok = !!js_CloseIterator(cx, ®s.sp[-1].toObject()); regs.sp--; if (!ok) @@ -3103,9 +3072,9 @@ END_CASE(JSOP_ENDITER) BEGIN_CASE(JSOP_FORARG) { - JS_ASSERT(regs.sp - 1 >= regs.fp->base()); + JS_ASSERT(regs.sp - 1 >= regs.fp()->base()); uintN slot = GET_ARGNO(regs.pc); - JS_ASSERT(slot < regs.fp->numFormalArgs()); + JS_ASSERT(slot < regs.fp()->numFormalArgs()); JS_ASSERT(regs.sp[-1].isObject()); if (!IteratorNext(cx, ®s.sp[-1].toObject(), &argv[slot])) goto error; @@ -3114,11 +3083,11 @@ END_CASE(JSOP_FORARG) BEGIN_CASE(JSOP_FORLOCAL) { - JS_ASSERT(regs.sp - 1 >= regs.fp->base()); + JS_ASSERT(regs.sp - 1 >= regs.fp()->base()); uintN slot = GET_SLOTNO(regs.pc); - JS_ASSERT(slot < regs.fp->numSlots()); + JS_ASSERT(slot < regs.fp()->numSlots()); JS_ASSERT(regs.sp[-1].isObject()); - if (!IteratorNext(cx, ®s.sp[-1].toObject(), ®s.fp->slots()[slot])) + if (!IteratorNext(cx, ®s.sp[-1].toObject(), ®s.fp()->slots()[slot])) goto error; } END_CASE(JSOP_FORLOCAL) @@ -3126,7 +3095,7 @@ END_CASE(JSOP_FORLOCAL) BEGIN_CASE(JSOP_FORNAME) BEGIN_CASE(JSOP_FORGNAME) { - JS_ASSERT(regs.sp - 1 >= regs.fp->base()); + JS_ASSERT(regs.sp - 1 >= regs.fp()->base()); JSAtom *atom; LOAD_ATOM(0, atom); jsid id = ATOM_TO_JSID(atom); @@ -3148,7 +3117,7 @@ END_CASE(JSOP_FORNAME) BEGIN_CASE(JSOP_FORPROP) { - JS_ASSERT(regs.sp - 2 >= regs.fp->base()); + JS_ASSERT(regs.sp - 2 >= regs.fp()->base()); JSAtom *atom; LOAD_ATOM(0, atom); jsid id = ATOM_TO_JSID(atom); @@ -3173,7 +3142,7 @@ BEGIN_CASE(JSOP_FORELEM) * side expression evaluation and assignment. This opcode exists solely to * help the decompiler. */ - JS_ASSERT(regs.sp - 1 >= regs.fp->base()); + JS_ASSERT(regs.sp - 1 >= regs.fp()->base()); JS_ASSERT(regs.sp[-1].isObject()); PUSH_NULL(); if (!IteratorNext(cx, ®s.sp[-2].toObject(), ®s.sp[-1])) @@ -3182,7 +3151,7 @@ END_CASE(JSOP_FORELEM) BEGIN_CASE(JSOP_DUP) { - JS_ASSERT(regs.sp > regs.fp->base()); + JS_ASSERT(regs.sp > regs.fp()->base()); const Value &rref = regs.sp[-1]; PUSH_COPY(rref); } @@ -3190,7 +3159,7 @@ END_CASE(JSOP_DUP) BEGIN_CASE(JSOP_DUP2) { - JS_ASSERT(regs.sp - 2 >= regs.fp->base()); + JS_ASSERT(regs.sp - 2 >= regs.fp()->base()); const Value &lref = regs.sp[-2]; const Value &rref = regs.sp[-1]; PUSH_COPY(lref); @@ -3200,7 +3169,7 @@ END_CASE(JSOP_DUP2) BEGIN_CASE(JSOP_SWAP) { - JS_ASSERT(regs.sp - 2 >= regs.fp->base()); + JS_ASSERT(regs.sp - 2 >= regs.fp()->base()); Value &lref = regs.sp[-2]; Value &rref = regs.sp[-1]; lref.swap(rref); @@ -3210,7 +3179,7 @@ END_CASE(JSOP_SWAP) BEGIN_CASE(JSOP_PICK) { jsint i = regs.pc[1]; - JS_ASSERT(regs.sp - (i+1) >= regs.fp->base()); + JS_ASSERT(regs.sp - (i+1) >= regs.fp()->base()); Value lval = regs.sp[-(i+1)]; memmove(regs.sp - (i+1), regs.sp - i, sizeof(Value)*i); regs.sp[-1] = lval; @@ -3282,7 +3251,7 @@ BEGIN_CASE(JSOP_SETCONST) { JSAtom *atom; LOAD_ATOM(0, atom); - JSObject &obj = regs.fp->varobj(cx); + JSObject &obj = cx->stack.currentVarObj(); const Value &ref = regs.sp[-1]; if (!obj.defineProperty(cx, ATOM_TO_JSID(atom), ref, PropertyStub, StrictPropertyStub, @@ -3311,7 +3280,7 @@ END_CASE(JSOP_ENUMCONSTELEM) #endif BEGIN_CASE(JSOP_BINDGNAME) - PUSH_OBJECT(*regs.fp->scopeChain().getGlobal()); + PUSH_OBJECT(*regs.fp()->scopeChain().getGlobal()); END_CASE(JSOP_BINDGNAME) BEGIN_CASE(JSOP_BINDNAME) @@ -3334,7 +3303,7 @@ BEGIN_CASE(JSOP_BINDNAME) * the rhs. We desire such resolve hook equivalence between the two * forms. */ - obj = ®s.fp->scopeChain(); + obj = ®s.fp()->scopeChain(); if (!obj->getParent()) break; @@ -3348,7 +3317,7 @@ BEGIN_CASE(JSOP_BINDNAME) } jsid id = ATOM_TO_JSID(atom); - obj = js_FindIdentifierBase(cx, ®s.fp->scopeChain(), id); + obj = js_FindIdentifierBase(cx, ®s.fp()->scopeChain(), id); if (!obj) goto error; } while (0); @@ -3357,8 +3326,8 @@ BEGIN_CASE(JSOP_BINDNAME) END_CASE(JSOP_BINDNAME) BEGIN_CASE(JSOP_IMACOP) - JS_ASSERT(JS_UPTRDIFF(regs.fp->imacropc(), script->code) < script->length); - op = JSOp(*regs.fp->imacropc()); + JS_ASSERT(JS_UPTRDIFF(regs.fp()->imacropc(), script->code) < script->length); + op = JSOp(*regs.fp()->imacropc()); DO_OP(); #define BITWISE_OP(OP) \ @@ -3853,7 +3822,7 @@ BEGIN_CASE(JSOP_DECGNAME) BEGIN_CASE(JSOP_GNAMEINC) BEGIN_CASE(JSOP_GNAMEDEC) { - obj = ®s.fp->scopeChain(); + obj = ®s.fp()->scopeChain(); if (js_CodeSpec[op].format & JOF_GNAME) obj = obj->getGlobal(); @@ -3971,7 +3940,7 @@ BEGIN_CASE(JSOP_ARGINC) do_arg_incop: slot = GET_ARGNO(regs.pc); - JS_ASSERT(slot < regs.fp->numFormalArgs()); + JS_ASSERT(slot < regs.fp()->numFormalArgs()); METER_SLOT_OP(op, slot); vp = argv + slot; goto do_int_fast_incop; @@ -3992,9 +3961,9 @@ BEGIN_CASE(JSOP_LOCALINC) */ do_local_incop: slot = GET_SLOTNO(regs.pc); - JS_ASSERT(slot < regs.fp->numSlots()); + JS_ASSERT(slot < regs.fp()->numSlots()); METER_SLOT_OP(op, slot); - vp = regs.fp->slots() + slot; + vp = regs.fp()->slots() + slot; do_int_fast_incop: int32_t tmp; @@ -4014,16 +3983,16 @@ BEGIN_CASE(JSOP_LOCALINC) } BEGIN_CASE(JSOP_THIS) - if (!ComputeThis(cx, regs.fp)) + if (!ComputeThis(cx, regs.fp())) goto error; - PUSH_COPY(regs.fp->thisValue()); + PUSH_COPY(regs.fp()->thisValue()); END_CASE(JSOP_THIS) BEGIN_CASE(JSOP_UNBRANDTHIS) { - if (!ComputeThis(cx, regs.fp)) + if (!ComputeThis(cx, regs.fp())) goto error; - Value &thisv = regs.fp->thisValue(); + Value &thisv = regs.fp()->thisValue(); if (thisv.isObject()) { JSObject *obj = &thisv.toObject(); if (obj->isNative()) @@ -4038,17 +4007,17 @@ END_CASE(JSOP_UNBRANDTHIS) jsint i; BEGIN_CASE(JSOP_GETTHISPROP) - if (!ComputeThis(cx, regs.fp)) + if (!ComputeThis(cx, regs.fp())) goto error; i = 0; - PUSH_COPY(regs.fp->thisValue()); + PUSH_COPY(regs.fp()->thisValue()); goto do_getprop_body; BEGIN_CASE(JSOP_GETARGPROP) { i = ARGNO_LEN; uint32 slot = GET_ARGNO(regs.pc); - JS_ASSERT(slot < regs.fp->numFormalArgs()); + JS_ASSERT(slot < regs.fp()->numFormalArgs()); PUSH_COPY(argv[slot]); goto do_getprop_body; } @@ -4058,7 +4027,7 @@ BEGIN_CASE(JSOP_GETLOCALPROP) i = SLOTNO_LEN; uint32 slot = GET_SLOTNO(regs.pc); JS_ASSERT(slot < script->nslots); - PUSH_COPY(regs.fp->slots()[slot]); + PUSH_COPY(regs.fp()->slots()[slot]); goto do_getprop_body; } @@ -4097,7 +4066,7 @@ BEGIN_CASE(JSOP_GETXPROP) JS_ASSERT(entry->vword.isShape()); const Shape *shape = entry->vword.toShape(); NATIVE_GET(cx, obj, obj2, shape, - regs.fp->hasImacropc() ? JSGET_NO_METHOD_BARRIER : JSGET_METHOD_BARRIER, + regs.fp()->hasImacropc() ? JSGET_NO_METHOD_BARRIER : JSGET_METHOD_BARRIER, &rval); } break; @@ -4106,7 +4075,7 @@ BEGIN_CASE(JSOP_GETXPROP) jsid id = ATOM_TO_JSID(atom); if (JS_LIKELY(!aobj->getOps()->getProperty) ? !js_GetPropertyHelper(cx, obj, id, - (regs.fp->hasImacropc() || + (regs.fp()->hasImacropc() || regs.pc[JSOP_GETPROP_LENGTH + i] == JSOP_IFEQ) ? JSGET_CACHE_RESULT | JSGET_NO_METHOD_BARRIER : JSGET_CACHE_RESULT | JSGET_METHOD_BARRIER, @@ -4229,7 +4198,7 @@ BEGIN_CASE(JSOP_CALLPROP) } } #if JS_HAS_NO_SUCH_METHOD - if (JS_UNLIKELY(rval.isUndefined()) && regs.sp[-1].isObject()) { + if (JS_UNLIKELY(rval.isPrimitive()) && regs.sp[-1].isObject()) { LOAD_ATOM(0, atom); regs.sp[-2].setString(atom); if (!js_OnUnknownMethod(cx, regs.sp - 2)) @@ -4240,7 +4209,7 @@ BEGIN_CASE(JSOP_CALLPROP) END_CASE(JSOP_CALLPROP) BEGIN_CASE(JSOP_UNBRAND) - JS_ASSERT(regs.sp - regs.fp->slots() >= 1); + JS_ASSERT(regs.sp - regs.fp()->slots() >= 1); regs.sp[-1].toObject().unbrand(cx); END_CASE(JSOP_UNBRAND) @@ -4256,7 +4225,7 @@ BEGIN_CASE(JSOP_SETMETHOD) JSObject *obj; VALUE_TO_OBJECT(cx, &lref, obj); - JS_ASSERT_IF(op == JSOP_SETGNAME, obj == regs.fp->scopeChain().getGlobal()); + JS_ASSERT_IF(op == JSOP_SETGNAME, obj == regs.fp()->scopeChain().getGlobal()); do { PropertyCache *cache = &JS_PROPERTY_CACHE(cx); @@ -4440,7 +4409,7 @@ BEGIN_CASE(JSOP_GETELEM) if (arg < obj->getArgsInitialLength()) { copyFrom = obj->addressOfArgsElement(arg); if (!copyFrom->isMagic(JS_ARGS_HOLE)) { - if (JSStackFrame *afp = (JSStackFrame *) obj->getPrivate()) + if (StackFrame *afp = (StackFrame *) obj->getPrivate()) copyFrom = &afp->canonicalActualArg(arg); goto end_getelem; } @@ -4489,7 +4458,7 @@ BEGIN_CASE(JSOP_CALLELEM) goto error; #if JS_HAS_NO_SUCH_METHOD - if (JS_UNLIKELY(regs.sp[-2].isUndefined()) && thisv.isObject()) { + if (JS_UNLIKELY(regs.sp[-2].isPrimitive()) && thisv.isObject()) { /* For js_OnUnknownMethod, sp[-2] is the index, and sp[-1] is the object missing it. */ regs.sp[-2] = regs.sp[-1]; regs.sp[-1].setObject(*thisObj); @@ -4559,7 +4528,7 @@ BEGIN_CASE(JSOP_NEW) /* Get immediate argc and find the constructor function. */ argc = GET_ARGC(regs.pc); vp = regs.sp - (2 + argc); - JS_ASSERT(vp >= regs.fp->base()); + JS_ASSERT(vp >= regs.fp()->base()); /* * Assign lval, callee, and newfun exactly as the code at inline_call: expects to * find them, to avoid nesting a js_Interpret call via js_InvokeConstructor. @@ -4576,12 +4545,12 @@ BEGIN_CASE(JSOP_NEW) goto end_new; } - flags = JSFRAME_CONSTRUCTING; + flags = StackFrame::CONSTRUCTING; goto inline_call; } } - if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(vp, argc))) + if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(argc, vp))) goto error; regs.sp = vp + 1; CHECK_INTERRUPT_HANDLER(); @@ -4596,7 +4565,7 @@ BEGIN_CASE(JSOP_EVAL) argc = GET_ARGC(regs.pc); vp = regs.sp - (argc + 2); - if (!IsBuiltinEvalForScope(®s.fp->scopeChain(), *vp)) + if (!IsBuiltinEvalForScope(®s.fp()->scopeChain(), *vp)) goto call_using_invoke; if (!DirectEval(cx, CallArgsFromVp(argc, vp))) @@ -4629,15 +4598,15 @@ BEGIN_CASE(JSOP_FUNCALL) } /* Restrict recursion of lightweight functions. */ - if (JS_UNLIKELY(inlineCallCount >= JS_MAX_INLINE_CALL_COUNT)) { + if (JS_UNLIKELY(inlineCallCount >= StackSpace::MAX_INLINE_CALLS)) { js_ReportOverRecursed(cx); goto error; } /* Get pointer to new frame/slots, prepare arguments. */ - StackSpace &stack = cx->stack(); - JSStackFrame *newfp = stack.getInlineFrame(cx, regs.sp, argc, newfun, - newscript, &flags); + ContextStack &stack = cx->stack; + StackFrame *newfp = stack.getInlineFrame(cx, regs.sp, argc, newfun, + newscript, &flags); if (JS_UNLIKELY(!newfp)) goto error; @@ -4646,16 +4615,16 @@ BEGIN_CASE(JSOP_FUNCALL) SetValueRangeToUndefined(newfp->slots(), newscript->nfixed); /* Officially push the frame. */ - stack.pushInlineFrame(cx, newscript, newfp, ®s); + stack.pushInlineFrame(newscript, newfp, regs); /* Refresh interpreter locals. */ - JS_ASSERT(newfp == regs.fp); + JS_ASSERT(newfp == regs.fp()); script = newscript; - argv = regs.fp->formalArgsEnd() - newfun->nargs; + argv = regs.fp()->formalArgsEnd() - newfun->nargs; atoms = script->atomMap.vector; /* Now that the new frame is rooted, maybe create a call object. */ - if (newfun->isHeavyweight() && !CreateFunCallObject(cx, regs.fp)) + if (newfun->isHeavyweight() && !CreateFunCallObject(cx, regs.fp())) goto error; RESET_USE_METHODJIT(); @@ -4671,7 +4640,7 @@ BEGIN_CASE(JSOP_FUNCALL) mjit::CompileRequest request = (interpMode == JSINTERP_NORMAL) ? mjit::CompileRequest_Interpreter : mjit::CompileRequest_JIT; - mjit::CompileStatus status = mjit::CanMethodJIT(cx, script, regs.fp, request); + mjit::CompileStatus status = mjit::CanMethodJIT(cx, script, regs.fp(), request); if (status == mjit::Compile_Error) goto error; if (!TRACE_RECORDER(cx) && !TRACE_PROFILER(cx) && status == mjit::Compile_Okay) { @@ -4681,7 +4650,7 @@ BEGIN_CASE(JSOP_FUNCALL) } #endif - if (!ScriptPrologue(cx, regs.fp)) + if (!ScriptPrologue(cx, regs.fp())) goto error; CHECK_INTERRUPT_HANDLER(); @@ -4703,7 +4672,7 @@ BEGIN_CASE(JSOP_FUNCALL) call_using_invoke: bool ok; - ok = Invoke(cx, InvokeArgsAlreadyOnTheStack(vp, argc), 0); + ok = Invoke(cx, InvokeArgsAlreadyOnTheStack(argc, vp)); regs.sp = vp + 1; CHECK_INTERRUPT_HANDLER(); if (!ok) @@ -4737,7 +4706,7 @@ BEGIN_CASE(JSOP_CALLGNAME) BEGIN_CASE(JSOP_NAME) BEGIN_CASE(JSOP_CALLNAME) { - JSObject *obj = ®s.fp->scopeChain(); + JSObject *obj = ®s.fp()->scopeChain(); const Shape *shape; Value rval; @@ -4841,7 +4810,7 @@ END_CASE(JSOP_RESETBASE) BEGIN_CASE(JSOP_DOUBLE) { - JS_ASSERT(!regs.fp->hasImacropc()); + JS_ASSERT(!regs.fp()->hasImacropc()); double dbl; LOAD_DOUBLE(0, dbl); PUSH_DOUBLE(dbl); @@ -4876,7 +4845,7 @@ BEGIN_CASE(JSOP_REGEXP) */ jsatomid index = GET_FULL_INDEX(0); JSObject *proto; - if (!js_GetClassPrototype(cx, ®s.fp->scopeChain(), JSProto_RegExp, &proto)) + if (!js_GetClassPrototype(cx, ®s.fp()->scopeChain(), JSProto_RegExp, &proto)) goto error; JS_ASSERT(proto); JSObject *obj = js_CloneRegExpObject(cx, script->getRegExp(index), proto); @@ -4997,7 +4966,7 @@ BEGIN_CASE(JSOP_LOOKUPSWITCH) * JSOP_LOOKUPSWITCH and JSOP_LOOKUPSWITCHX are never used if any atom * index in it would exceed 64K limit. */ - JS_ASSERT(!regs.fp->hasImacropc()); + JS_ASSERT(!regs.fp()->hasImacropc()); JS_ASSERT(atoms == script->atomMap.vector); jsbytecode *pc2 = regs.pc; @@ -5066,7 +5035,7 @@ BEGIN_CASE(JSOP_TRAP) case JSTRAP_ERROR: goto error; case JSTRAP_RETURN: - regs.fp->setReturnValue(rval); + regs.fp()->setReturnValue(rval); interpReturnOK = JS_TRUE; goto forced_return; case JSTRAP_THROW: @@ -5086,7 +5055,7 @@ BEGIN_CASE(JSOP_TRAP) BEGIN_CASE(JSOP_ARGUMENTS) { Value rval; - if (!js_GetArgsValue(cx, regs.fp, &rval)) + if (!js_GetArgsValue(cx, regs.fp(), &rval)) goto error; PUSH_COPY(rval); } @@ -5096,7 +5065,7 @@ BEGIN_CASE(JSOP_ARGSUB) { jsid id = INT_TO_JSID(GET_ARGNO(regs.pc)); Value rval; - if (!js_GetArgsProperty(cx, regs.fp, id, &rval)) + if (!js_GetArgsProperty(cx, regs.fp(), id, &rval)) goto error; PUSH_COPY(rval); } @@ -5106,7 +5075,7 @@ BEGIN_CASE(JSOP_ARGCNT) { jsid id = ATOM_TO_JSID(rt->atomState.lengthAtom); Value rval; - if (!js_GetArgsProperty(cx, regs.fp, id, &rval)) + if (!js_GetArgsProperty(cx, regs.fp(), id, &rval)) goto error; PUSH_COPY(rval); } @@ -5116,7 +5085,7 @@ BEGIN_CASE(JSOP_GETARG) BEGIN_CASE(JSOP_CALLARG) { uint32 slot = GET_ARGNO(regs.pc); - JS_ASSERT(slot < regs.fp->numFormalArgs()); + JS_ASSERT(slot < regs.fp()->numFormalArgs()); METER_SLOT_OP(op, slot); PUSH_COPY(argv[slot]); if (op == JSOP_CALLARG) @@ -5127,7 +5096,7 @@ END_CASE(JSOP_GETARG) BEGIN_CASE(JSOP_SETARG) { uint32 slot = GET_ARGNO(regs.pc); - JS_ASSERT(slot < regs.fp->numFormalArgs()); + JS_ASSERT(slot < regs.fp()->numFormalArgs()); METER_SLOT_OP(op, slot); argv[slot] = regs.sp[-1]; } @@ -5137,7 +5106,7 @@ BEGIN_CASE(JSOP_GETLOCAL) { uint32 slot = GET_SLOTNO(regs.pc); JS_ASSERT(slot < script->nslots); - PUSH_COPY(regs.fp->slots()[slot]); + PUSH_COPY(regs.fp()->slots()[slot]); } END_CASE(JSOP_GETLOCAL) @@ -5145,7 +5114,7 @@ BEGIN_CASE(JSOP_CALLLOCAL) { uint32 slot = GET_SLOTNO(regs.pc); JS_ASSERT(slot < script->nslots); - PUSH_COPY(regs.fp->slots()[slot]); + PUSH_COPY(regs.fp()->slots()[slot]); PUSH_UNDEFINED(); } END_CASE(JSOP_CALLLOCAL) @@ -5154,14 +5123,14 @@ BEGIN_CASE(JSOP_SETLOCAL) { uint32 slot = GET_SLOTNO(regs.pc); JS_ASSERT(slot < script->nslots); - regs.fp->slots()[slot] = regs.sp[-1]; + regs.fp()->slots()[slot] = regs.sp[-1]; } END_SET_CASE(JSOP_SETLOCAL) BEGIN_CASE(JSOP_GETUPVAR_DBG) BEGIN_CASE(JSOP_CALLUPVAR_DBG) { - JSFunction *fun = regs.fp->fun(); + JSFunction *fun = regs.fp()->fun(); JS_ASSERT(FUN_KIND(fun) == JSFUN_INTERPRETED); JS_ASSERT(fun->u.i.wrapper); @@ -5202,7 +5171,7 @@ END_CASE(JSOP_GETUPVAR_DBG) BEGIN_CASE(JSOP_GETFCSLOT) BEGIN_CASE(JSOP_CALLFCSLOT) { - JS_ASSERT(regs.fp->isNonEvalFunctionFrame()); + JS_ASSERT(regs.fp()->isNonEvalFunctionFrame()); uintN index = GET_UINT16(regs.pc); JSObject *obj = &argv[-2].toObject(); @@ -5218,7 +5187,7 @@ BEGIN_CASE(JSOP_CALLGLOBAL) { uint32 slot = GET_SLOTNO(regs.pc); slot = script->getGlobalSlot(slot); - JSObject *obj = regs.fp->scopeChain().getGlobal(); + JSObject *obj = regs.fp()->scopeChain().getGlobal(); JS_ASSERT(obj->containsSlot(slot)); PUSH_COPY(obj->getSlot(slot)); if (op == JSOP_CALLGLOBAL) @@ -5232,10 +5201,10 @@ BEGIN_CASE(JSOP_DEFVAR) uint32 index = GET_INDEX(regs.pc); JSAtom *atom = atoms[index]; - JSObject *obj = ®s.fp->varobj(cx); + JSObject *obj = &cx->stack.currentVarObj(); JS_ASSERT(!obj->getOps()->defineProperty); uintN attrs = JSPROP_ENUMERATE; - if (!regs.fp->isEvalFrame()) + if (!regs.fp()->isEvalFrame()) attrs |= JSPROP_PERMANENT; /* Lookup id in order to check for redeclaration problems. */ @@ -5292,11 +5261,11 @@ BEGIN_CASE(JSOP_DEFFUN) * FIXME: bug 476950, although debugger users may also demand some kind * of scope link for debugger-assisted eval-in-frame. */ - obj2 = ®s.fp->scopeChain(); + obj2 = ®s.fp()->scopeChain(); } else { JS_ASSERT(!fun->isFlatClosure()); - obj2 = GetScopeChainFast(cx, regs.fp, JSOP_DEFFUN, JSOP_DEFFUN_LENGTH); + obj2 = GetScopeChainFast(cx, regs.fp(), JSOP_DEFFUN, JSOP_DEFFUN_LENGTH); if (!obj2) goto error; } @@ -5320,7 +5289,7 @@ BEGIN_CASE(JSOP_DEFFUN) * ECMA requires functions defined when entering Eval code to be * impermanent. */ - uintN attrs = regs.fp->isEvalFrame() + uintN attrs = regs.fp()->isEvalFrame() ? JSPROP_ENUMERATE : JSPROP_ENUMERATE | JSPROP_PERMANENT; @@ -5329,7 +5298,7 @@ BEGIN_CASE(JSOP_DEFFUN) * current scope chain even for the case of function expression statements * and functions defined by eval inside let or with blocks. */ - JSObject *parent = ®s.fp->varobj(cx); + JSObject *parent = &cx->stack.currentVarObj(); /* ES5 10.5 (NB: with subsequent errata). */ jsid id = ATOM_TO_JSID(fun->atom); @@ -5396,11 +5365,11 @@ BEGIN_CASE(JSOP_DEFFUN_DBGFC) Value rval = ObjectValue(*obj); - uintN attrs = regs.fp->isEvalFrame() + uintN attrs = regs.fp()->isEvalFrame() ? JSPROP_ENUMERATE : JSPROP_ENUMERATE | JSPROP_PERMANENT; - JSObject &parent = regs.fp->varobj(cx); + JSObject &parent = cx->stack.currentVarObj(); jsid id = ATOM_TO_JSID(fun->atom); if (!CheckRedeclaration(cx, &parent, id, attrs)) @@ -5430,11 +5399,11 @@ BEGIN_CASE(JSOP_DEFLOCALFUN) JSObject *obj = FUN_OBJECT(fun); if (FUN_NULL_CLOSURE(fun)) { - obj = CloneFunctionObject(cx, fun, ®s.fp->scopeChain()); + obj = CloneFunctionObject(cx, fun, ®s.fp()->scopeChain()); if (!obj) goto error; } else { - JSObject *parent = GetScopeChainFast(cx, regs.fp, JSOP_DEFLOCALFUN, + JSObject *parent = GetScopeChainFast(cx, regs.fp(), JSOP_DEFLOCALFUN, JSOP_DEFLOCALFUN_LENGTH); if (!parent) goto error; @@ -5453,7 +5422,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN) uint32 slot = GET_SLOTNO(regs.pc); TRACE_2(DefLocalFunSetSlot, slot, obj); - regs.fp->slots()[slot].setObject(*obj); + regs.fp()->slots()[slot].setObject(*obj); } END_CASE(JSOP_DEFLOCALFUN) @@ -5469,7 +5438,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN_FC) uint32 slot = GET_SLOTNO(regs.pc); TRACE_2(DefLocalFunSetSlot, slot, obj); - regs.fp->slots()[slot].setObject(*obj); + regs.fp()->slots()[slot].setObject(*obj); } END_CASE(JSOP_DEFLOCALFUN_FC) @@ -5483,7 +5452,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN_DBGFC) goto error; uint32 slot = GET_SLOTNO(regs.pc); - regs.fp->slots()[slot].setObject(*obj); + regs.fp()->slots()[slot].setObject(*obj); } END_CASE(JSOP_DEFLOCALFUN_DBGFC) @@ -5498,7 +5467,7 @@ BEGIN_CASE(JSOP_LAMBDA) do { JSObject *parent; if (FUN_NULL_CLOSURE(fun)) { - parent = ®s.fp->scopeChain(); + parent = ®s.fp()->scopeChain(); if (obj->getParent() == parent) { jsbytecode *pc2 = AdvanceOverBlockchainOp(regs.pc + JSOP_LAMBDA_LENGTH); @@ -5592,7 +5561,7 @@ BEGIN_CASE(JSOP_LAMBDA) } #endif } else { - parent = GetScopeChainFast(cx, regs.fp, JSOP_LAMBDA, JSOP_LAMBDA_LENGTH); + parent = GetScopeChainFast(cx, regs.fp(), JSOP_LAMBDA, JSOP_LAMBDA_LENGTH); if (!parent) goto error; } @@ -5633,7 +5602,7 @@ BEGIN_CASE(JSOP_LAMBDA_DBGFC) END_CASE(JSOP_LAMBDA_DBGFC) BEGIN_CASE(JSOP_CALLEE) - JS_ASSERT(regs.fp->isNonEvalFunctionFrame()); + JS_ASSERT(regs.fp()->isNonEvalFunctionFrame()); PUSH_COPY(argv[-2]); END_CASE(JSOP_CALLEE) @@ -5677,7 +5646,7 @@ BEGIN_CASE(JSOP_SETTER) case JSOP_INITPROP: { - JS_ASSERT(regs.sp - regs.fp->base() >= 2); + JS_ASSERT(regs.sp - regs.fp()->base() >= 2); rval = regs.sp[-1]; i = -1; JSAtom *atom; @@ -5688,7 +5657,7 @@ BEGIN_CASE(JSOP_SETTER) default: JS_ASSERT(op2 == JSOP_INITELEM); - JS_ASSERT(regs.sp - regs.fp->base() >= 3); + JS_ASSERT(regs.sp - regs.fp()->base() >= 3); rval = regs.sp[-1]; id = JSID_VOID; i = -2; @@ -5809,7 +5778,7 @@ END_CASE(JSOP_NEWOBJECT) BEGIN_CASE(JSOP_ENDINIT) { /* FIXME remove JSOP_ENDINIT bug 588522 */ - JS_ASSERT(regs.sp - regs.fp->base() >= 1); + JS_ASSERT(regs.sp - regs.fp()->base() >= 1); JS_ASSERT(regs.sp[-1].isObject()); } END_CASE(JSOP_ENDINIT) @@ -5818,7 +5787,7 @@ BEGIN_CASE(JSOP_INITPROP) BEGIN_CASE(JSOP_INITMETHOD) { /* Load the property's initial value into rval. */ - JS_ASSERT(regs.sp - regs.fp->base() >= 2); + JS_ASSERT(regs.sp - regs.fp()->base() >= 2); Value rval = regs.sp[-1]; /* Load the object being initialized into lval/obj. */ @@ -5891,7 +5860,7 @@ END_CASE(JSOP_INITPROP); BEGIN_CASE(JSOP_INITELEM) { /* Pop the element's value into rval. */ - JS_ASSERT(regs.sp - regs.fp->base() >= 3); + JS_ASSERT(regs.sp - regs.fp()->base() >= 3); const Value &rref = regs.sp[-1]; /* Find the object being initialized at top of stack. */ @@ -5929,8 +5898,8 @@ END_CASE(JSOP_INITELEM) BEGIN_CASE(JSOP_DEFSHARP) { uint32 slot = GET_UINT16(regs.pc); - JS_ASSERT(slot + 1 < regs.fp->numFixed()); - const Value &lref = regs.fp->slots()[slot]; + JS_ASSERT(slot + 1 < regs.fp()->numFixed()); + const Value &lref = regs.fp()->slots()[slot]; JSObject *obj; if (lref.isObject()) { obj = &lref.toObject(); @@ -5939,7 +5908,7 @@ BEGIN_CASE(JSOP_DEFSHARP) obj = NewDenseEmptyArray(cx); if (!obj) goto error; - regs.fp->slots()[slot].setObject(*obj); + regs.fp()->slots()[slot].setObject(*obj); } jsint i = (jsint) GET_UINT16(regs.pc + UINT16_LEN); jsid id = INT_TO_JSID(i); @@ -5959,14 +5928,14 @@ END_CASE(JSOP_DEFSHARP) BEGIN_CASE(JSOP_USESHARP) { uint32 slot = GET_UINT16(regs.pc); - JS_ASSERT(slot + 1 < regs.fp->numFixed()); - const Value &lref = regs.fp->slots()[slot]; + JS_ASSERT(slot + 1 < regs.fp()->numFixed()); + const Value &lref = regs.fp()->slots()[slot]; jsint i = (jsint) GET_UINT16(regs.pc + UINT16_LEN); Value rval; if (lref.isUndefined()) { rval.setUndefined(); } else { - JSObject *obj = ®s.fp->slots()[slot].toObject(); + JSObject *obj = ®s.fp()->slots()[slot].toObject(); jsid id = INT_TO_JSID(i); if (!obj->getProperty(cx, id, &rval)) goto error; @@ -5986,8 +5955,8 @@ END_CASE(JSOP_USESHARP) BEGIN_CASE(JSOP_SHARPINIT) { uint32 slot = GET_UINT16(regs.pc); - JS_ASSERT(slot + 1 < regs.fp->numFixed()); - Value *vp = ®s.fp->slots()[slot]; + JS_ASSERT(slot + 1 < regs.fp()->numFixed()); + Value *vp = ®s.fp()->slots()[slot]; Value rval = vp[1]; /* @@ -6092,10 +6061,10 @@ BEGIN_CASE(JSOP_SETLOCALPOP) * The stack must have a block with at least one local slot below the * exception object. */ - JS_ASSERT((size_t) (regs.sp - regs.fp->base()) >= 2); + JS_ASSERT((size_t) (regs.sp - regs.fp()->base()) >= 2); uint32 slot = GET_UINT16(regs.pc); JS_ASSERT(slot + 1 < script->nslots); - POP_COPY_TO(regs.fp->slots()[slot]); + POP_COPY_TO(regs.fp()->slots()[slot]); } END_CASE(JSOP_SETLOCALPOP) @@ -6104,7 +6073,7 @@ BEGIN_CASE(JSOP_IFPRIMTOP) * If the top of stack is of primitive type, jump to our target. Otherwise * advance to the next opcode. */ - JS_ASSERT(regs.sp > regs.fp->base()); + JS_ASSERT(regs.sp > regs.fp()->base()); if (regs.sp[-1].isPrimitive()) { len = GET_JUMP_OFFSET(regs.pc); BRANCH(len); @@ -6112,7 +6081,7 @@ BEGIN_CASE(JSOP_IFPRIMTOP) END_CASE(JSOP_IFPRIMTOP) BEGIN_CASE(JSOP_PRIMTOP) - JS_ASSERT(regs.sp > regs.fp->base()); + JS_ASSERT(regs.sp > regs.fp()->base()); if (regs.sp[-1].isObject()) { jsint i = GET_INT8(regs.pc); js_ReportValueError2(cx, JSMSG_CANT_CONVERT_TO, -2, regs.sp[-2], NULL, @@ -6156,7 +6125,7 @@ BEGIN_CASE(JSOP_DEBUGGER) case JSTRAP_CONTINUE: break; case JSTRAP_RETURN: - regs.fp->setReturnValue(rval); + regs.fp()->setReturnValue(rval); interpReturnOK = JS_TRUE; goto forced_return; case JSTRAP_THROW: @@ -6461,10 +6430,10 @@ BEGIN_CASE(JSOP_ENTERBLOCK) JSObject *obj; LOAD_OBJECT(0, obj); JS_ASSERT(obj->isStaticBlock()); - JS_ASSERT(regs.fp->base() + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp); + JS_ASSERT(regs.fp()->base() + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp); Value *vp = regs.sp + OBJ_BLOCK_COUNT(cx, obj); JS_ASSERT(regs.sp < vp); - JS_ASSERT(vp <= regs.fp->slots() + script->nslots); + JS_ASSERT(vp <= regs.fp()->slots() + script->nslots); SetValueRangeToUndefined(regs.sp, vp); regs.sp = vp; @@ -6476,12 +6445,12 @@ BEGIN_CASE(JSOP_ENTERBLOCK) * anything else we should have popped off fp->scopeChain when we left its * static scope. */ - JSObject *obj2 = ®s.fp->scopeChain(); + JSObject *obj2 = ®s.fp()->scopeChain(); Class *clasp; while ((clasp = obj2->getClass()) == &js_WithClass) obj2 = obj2->getParent(); if (clasp == &js_BlockClass && - obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, regs.fp)) { + obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, regs.fp())) { JSObject *youngestProto = obj2->getProto(); JS_ASSERT(youngestProto->isStaticBlock()); JSObject *parent = obj; @@ -6507,7 +6476,7 @@ BEGIN_CASE(JSOP_LEAVEBLOCK) * cloned onto fp->scopeChain, clear its private data, move its locals from * the stack into the clone, and pop it off the chain. */ - JSObject &obj = regs.fp->scopeChain(); + JSObject &obj = regs.fp()->scopeChain(); if (obj.getProto() == blockChain) { JS_ASSERT(obj.isClonedBlock()); if (!js_PutBlockObject(cx, JS_TRUE)) @@ -6520,10 +6489,10 @@ BEGIN_CASE(JSOP_LEAVEBLOCK) vp = ®s.sp[-1]; regs.sp -= GET_UINT16(regs.pc); if (op == JSOP_LEAVEBLOCKEXPR) { - JS_ASSERT(regs.fp->base() + blockDepth == regs.sp - 1); + JS_ASSERT(regs.fp()->base() + blockDepth == regs.sp - 1); regs.sp[-1] = *vp; } else { - JS_ASSERT(regs.fp->base() + blockDepth == regs.sp); + JS_ASSERT(regs.fp()->base() + blockDepth == regs.sp); } } END_CASE(JSOP_LEAVEBLOCK) @@ -6536,24 +6505,24 @@ BEGIN_CASE(JSOP_GENERATOR) JSObject *obj = js_NewGenerator(cx); if (!obj) goto error; - JS_ASSERT(!regs.fp->hasCallObj() && !regs.fp->hasArgsObj()); - regs.fp->setReturnValue(ObjectValue(*obj)); + JS_ASSERT(!regs.fp()->hasCallObj() && !regs.fp()->hasArgsObj()); + regs.fp()->setReturnValue(ObjectValue(*obj)); interpReturnOK = true; - if (entryFrame != regs.fp) + if (entryFrame != regs.fp()) goto inline_return; goto exit; } BEGIN_CASE(JSOP_YIELD) JS_ASSERT(!cx->isExceptionPending()); - JS_ASSERT(regs.fp->isNonEvalFunctionFrame()); - if (cx->generatorFor(regs.fp)->state == JSGEN_CLOSING) { + JS_ASSERT(regs.fp()->isNonEvalFunctionFrame()); + if (cx->generatorFor(regs.fp())->state == JSGEN_CLOSING) { js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD, JSDVG_SEARCH_STACK, argv[-2], NULL); goto error; } - regs.fp->setReturnValue(regs.sp[-1]); - regs.fp->setYielding(); + regs.fp()->setReturnValue(regs.sp[-1]); + regs.fp()->setYielding(); regs.pc += JSOP_YIELD_LENGTH; interpReturnOK = JS_TRUE; goto exit; @@ -6563,7 +6532,7 @@ BEGIN_CASE(JSOP_ARRAYPUSH) uint32 slot = GET_UINT16(regs.pc); JS_ASSERT(script->nfixed <= slot); JS_ASSERT(slot < script->nslots); - JSObject *obj = ®s.fp->slots()[slot].toObject(); + JSObject *obj = ®s.fp()->slots()[slot].toObject(); if (!js_ArrayCompPush(cx, obj, regs.sp[-1])) goto error; regs.sp--; @@ -6639,16 +6608,16 @@ END_CASE(JSOP_ARRAYPUSH) #endif /* !JS_THREADED_INTERP */ error: - JS_ASSERT(cx->regs == ®s); + JS_ASSERT(&cx->regs() == ®s); #ifdef JS_TRACER - if (regs.fp->hasImacropc() && cx->isExceptionPending()) { + if (regs.fp()->hasImacropc() && cx->isExceptionPending()) { // Handle exceptions as if they came from the imacro-calling pc. - regs.pc = regs.fp->imacropc(); - regs.fp->clearImacropc(); + regs.pc = regs.fp()->imacropc(); + regs.fp()->clearImacropc(); } #endif - JS_ASSERT(size_t((regs.fp->hasImacropc() ? regs.fp->imacropc() : regs.pc) - script->code) < + JS_ASSERT(size_t((regs.fp()->hasImacropc() ? regs.fp()->imacropc() : regs.pc) - script->code) < script->length); #ifdef JS_TRACER @@ -6687,7 +6656,7 @@ END_CASE(JSOP_ARRAYPUSH) goto error; case JSTRAP_RETURN: cx->clearPendingException(); - regs.fp->setReturnValue(rval); + regs.fp()->setReturnValue(rval); interpReturnOK = JS_TRUE; goto forced_return; case JSTRAP_THROW: @@ -6730,7 +6699,7 @@ END_CASE(JSOP_ARRAYPUSH) * with the stack depth exceeding the current one and this * condition is what we use to filter them out. */ - if (tn->stackDepth > regs.sp - regs.fp->base()) + if (tn->stackDepth > regs.sp - regs.fp()->base()) continue; /* @@ -6741,7 +6710,7 @@ END_CASE(JSOP_ARRAYPUSH) regs.pc = (script)->main + tn->start + tn->length; JSBool ok = js_UnwindScope(cx, tn->stackDepth, JS_TRUE); - JS_ASSERT(regs.sp == regs.fp->base() + tn->stackDepth); + JS_ASSERT(regs.sp == regs.fp()->base() + tn->stackDepth); if (!ok) { /* * Restart the handler search with updated pc and stack depth @@ -6779,7 +6748,7 @@ END_CASE(JSOP_ARRAYPUSH) case JSTRY_ITER: { /* This is similar to JSOP_ENDITER in the interpreter loop. */ - JS_ASSERT(js_GetOpcode(cx, regs.fp->script(), regs.pc) == JSOP_ENDITER); + JS_ASSERT(js_GetOpcode(cx, regs.fp()->script(), regs.pc) == JSOP_ENDITER); Value v = cx->getPendingException(); cx->clearPendingException(); ok = js_CloseIterator(cx, ®s.sp[-1].toObject()); @@ -6802,7 +6771,7 @@ END_CASE(JSOP_ARRAYPUSH) cx->getPendingException().isMagic(JS_GENERATOR_CLOSING))) { cx->clearPendingException(); interpReturnOK = JS_TRUE; - regs.fp->clearReturnValue(); + regs.fp()->clearReturnValue(); } #endif } @@ -6816,18 +6785,18 @@ END_CASE(JSOP_ARRAYPUSH) * interpReturnOK set to true bypassing any finally blocks. */ interpReturnOK &= js_UnwindScope(cx, 0, interpReturnOK || cx->isExceptionPending()); - JS_ASSERT(regs.sp == regs.fp->base()); + JS_ASSERT(regs.sp == regs.fp()->base()); #ifdef DEBUG cx->logPrevPc = NULL; #endif - if (entryFrame != regs.fp) + if (entryFrame != regs.fp()) goto inline_return; exit: - interpReturnOK = ScriptEpilogueOrGeneratorYield(cx, regs.fp, interpReturnOK); - regs.fp->setFinishedInInterpreter(); + interpReturnOK = ScriptEpilogueOrGeneratorYield(cx, regs.fp(), interpReturnOK); + regs.fp()->setFinishedInInterpreter(); /* * At this point we are inevitably leaving an interpreted function or a @@ -6840,7 +6809,7 @@ END_CASE(JSOP_ARRAYPUSH) * error case and for a normal return, the code jumps directly to parent's * frame pc. */ - JS_ASSERT(entryFrame == regs.fp); + JS_ASSERT(entryFrame == regs.fp()); #ifdef JS_TRACER JS_ASSERT_IF(interpReturnOK && interpMode == JSINTERP_RECORD, !TRACE_RECORDER(cx)); @@ -6852,7 +6821,7 @@ END_CASE(JSOP_ARRAYPUSH) # endif #endif - JS_ASSERT_IF(!regs.fp->isGeneratorFrame(), !js_IsActiveWithOrBlock(cx, ®s.fp->scopeChain(), 0)); + JS_ASSERT_IF(!regs.fp()->isGeneratorFrame(), !js_IsActiveWithOrBlock(cx, ®s.fp()->scopeChain(), 0)); return interpReturnOK; diff --git a/js/src/jsinterp.h b/js/src/jsinterp.h index 60ddb8be9e61..998f28c12aa9 100644 --- a/js/src/jsinterp.h +++ b/js/src/jsinterp.h @@ -45,807 +45,19 @@ */ #include "jsprvtd.h" #include "jspubtd.h" -#include "jsfun.h" #include "jsopcode.h" #include "jsscript.h" #include "jsvalue.h" -struct JSFrameRegs -{ - STATIC_SKIP_INFERENCE - js::Value *sp; /* stack pointer */ - jsbytecode *pc; /* program counter */ - JSStackFrame *fp; /* active frame */ -}; - -/* Flags to toggle js::Interpret() execution. */ -enum JSInterpMode -{ - JSINTERP_NORMAL = 0, /* interpreter is running normally */ - JSINTERP_RECORD = 1, /* interpreter has been started to record/run traces */ - JSINTERP_SAFEPOINT = 2, /* interpreter should leave on a method JIT safe point */ - JSINTERP_PROFILE = 3 /* interpreter should profile a loop */ -}; - -/* Flags used in JSStackFrame::flags_ */ -enum JSFrameFlags -{ - /* Primary frame type */ - JSFRAME_GLOBAL = 0x1, /* frame pushed for a global script */ - JSFRAME_FUNCTION = 0x2, /* frame pushed for a scripted call */ - JSFRAME_DUMMY = 0x4, /* frame pushed for bookkeeping */ - - /* Frame subtypes */ - JSFRAME_EVAL = 0x8, /* frame pushed for eval() or debugger eval */ - JSFRAME_DEBUGGER = 0x10, /* frame pushed for debugger eval */ - JSFRAME_GENERATOR = 0x20, /* frame is associated with a generator */ - JSFRAME_FLOATING_GENERATOR = 0x40, /* frame is is in generator obj, not on stack */ - JSFRAME_CONSTRUCTING = 0x80, /* frame is for a constructor invocation */ - - /* Temporary frame states */ - JSFRAME_YIELDING = 0x200, /* js::Interpret dispatched JSOP_YIELD */ - JSFRAME_FINISHED_IN_INTERP = 0x400, /* set if frame finished in Interpret() */ - - /* Concerning function arguments */ - JSFRAME_OVERRIDE_ARGS = 0x1000, /* overridden arguments local variable */ - JSFRAME_OVERFLOW_ARGS = 0x2000, /* numActualArgs > numFormalArgs */ - JSFRAME_UNDERFLOW_ARGS = 0x4000, /* numActualArgs < numFormalArgs */ - - /* Lazy frame initialization */ - JSFRAME_HAS_IMACRO_PC = 0x8000, /* frame has imacpc value available */ - JSFRAME_HAS_CALL_OBJ = 0x10000, /* frame has a callobj reachable from scopeChain_ */ - JSFRAME_HAS_ARGS_OBJ = 0x20000, /* frame has an argsobj in JSStackFrame::args */ - JSFRAME_HAS_HOOK_DATA = 0x40000, /* frame has hookData_ set */ - JSFRAME_HAS_ANNOTATION = 0x80000, /* frame has annotation_ set */ - JSFRAME_HAS_RVAL = 0x100000, /* frame has rval_ set */ - JSFRAME_HAS_SCOPECHAIN = 0x200000, /* frame has scopeChain_ set */ - JSFRAME_HAS_PREVPC = 0x400000 /* frame has prevpc_ set */ -}; - -namespace js { namespace mjit { struct JITScript; } } - -/* - * A stack frame is a part of a stack segment (see js::StackSegment) which is - * on the per-thread VM stack (see js::StackSpace). - */ -struct JSStackFrame -{ - private: - mutable uint32 flags_; /* bits described by JSFrameFlags */ - union { /* describes what code is executing in a */ - JSScript *script; /* global frame */ - JSFunction *fun; /* function frame, pre GetScopeChain */ - } exec; - union { /* describes the arguments of a function */ - uintN nactual; /* pre GetArgumentsObject */ - JSObject *obj; /* post GetArgumentsObject */ - JSScript *script; /* eval has no args, but needs a script */ - } args; - mutable JSObject *scopeChain_; /* current scope chain */ - JSStackFrame *prev_; /* previous cx->regs->fp */ - void *ncode_; /* return address for method JIT */ - - /* Lazily initialized */ - js::Value rval_; /* return value of the frame */ - jsbytecode *prevpc_; /* pc of previous frame*/ - jsbytecode *imacropc_; /* pc of macro caller */ - void *hookData_; /* closure returned by call hook */ - void *annotation_; /* perhaps remove with bug 546848 */ - - friend class js::StackSpace; - friend class js::FrameRegsIter; - friend struct JSContext; - - inline void initPrev(JSContext *cx); - - public: - /* - * Stack frame sort (see JSStackFrame comment above) - * - * A stack frame may have one of three types, which determines which - * members of the frame may be accessed and other invariants: - * - * global frame: execution of global code or an eval in global code - * function frame: execution of function code or an eval in a function - * dummy frame: bookkeeping frame (read: hack) - * - */ - - bool isFunctionFrame() const { - return !!(flags_ & JSFRAME_FUNCTION); - } - - bool isGlobalFrame() const { - return !!(flags_ & JSFRAME_GLOBAL); - } - - bool isDummyFrame() const { - return !!(flags_ & JSFRAME_DUMMY); - } - - bool isScriptFrame() const { - bool retval = !!(flags_ & (JSFRAME_FUNCTION | JSFRAME_GLOBAL)); - JS_ASSERT(retval == !isDummyFrame()); - return retval; - } - - /* - * Eval frames - * - * As noted above, global and function frames may optionally be 'eval - * frames'. Eval code shares its parent's arguments which means that the - * arg-access members of JSStackFrame may not be used for eval frames. - * Search for 'hasArgs' below for more details. - * - * A further sub-classification of eval frames is whether the frame was - * pushed for an ES5 strict-mode eval(). - */ - - bool isEvalFrame() const { - JS_ASSERT_IF(flags_ & JSFRAME_EVAL, isScriptFrame()); - return flags_ & JSFRAME_EVAL; - } - - bool isNonEvalFunctionFrame() const { - return (flags_ & (JSFRAME_FUNCTION | JSFRAME_EVAL)) == JSFRAME_FUNCTION; - } - - bool isStrictEvalFrame() const { - return isEvalFrame() && script()->strictModeCode; - } - - bool isNonStrictEvalFrame() const { - return isEvalFrame() && !script()->strictModeCode; - } - - /* - * Frame initialization - * - * After acquiring a pointer to an uninitialized stack frame on the VM - * stack from js::StackSpace, these members are used to initialize the - * stack frame before officially pushing the frame into the context. - * Collecting frame initialization into a set of inline helpers allows - * simpler reasoning and makes call-optimization easier. - */ - - /* Used for Invoke, Interpret, trace-jit LeaveTree, and method-jit stubs. */ - inline void initCallFrame(JSContext *cx, JSObject &callee, JSFunction *fun, - uint32 nactual, uint32 flags); - - /* Used for SessionInvoke. */ - inline void resetInvokeCallFrame(); - - /* Called by method-jit stubs and serve as a specification for jit-code. */ - inline void initCallFrameCallerHalf(JSContext *cx, uint32 flags, void *ncode); - inline void initCallFrameEarlyPrologue(JSFunction *fun, uint32 nactual); - inline void initCallFrameLatePrologue(); - - /* Used for eval. */ - inline void initEvalFrame(JSContext *cx, JSScript *script, JSStackFrame *prev, - uint32 flags); - inline void initGlobalFrame(JSScript *script, JSObject &chain, uint32 flags); - - /* Used when activating generators. */ - inline void stealFrameAndSlots(js::Value *vp, JSStackFrame *otherfp, - js::Value *othervp, js::Value *othersp); - - /* Perhaps one fine day we will remove dummy frames. */ - inline void initDummyFrame(JSContext *cx, JSObject &chain); - - /* - * Previous frame - * - * A frame's 'prev' frame is either null or the previous frame pointed to - * by cx->regs->fp when this frame was pushed. Often, given two prev-linked - * frames, the next-frame is a function or eval that was called by the - * prev-frame, but not always: the prev-frame may have called a native that - * reentered the VM through JS_CallFunctionValue on the same context - * (without calling JS_SaveFrameChain) which pushed the next-frame. Thus, - * 'prev' has little semantic meaning and basically just tells the VM what - * to set cx->regs->fp to when this frame is popped. - */ - - JSStackFrame *prev() const { - return prev_; - } - - inline void resetGeneratorPrev(JSContext *cx); - - /* - * Frame slots - * - * A frame's 'slots' are the fixed slots associated with the frame (like - * local variables) followed by an expression stack holding temporary - * values. A frame's 'base' is the base of the expression stack. - */ - - js::Value *slots() const { - return (js::Value *)(this + 1); - } - - js::Value *base() const { - return slots() + script()->nfixed; - } - - js::Value &varSlot(uintN i) { - JS_ASSERT(i < script()->nfixed); - JS_ASSERT_IF(maybeFun(), i < script()->bindings.countVars()); - return slots()[i]; - } - - /* - * Script - * - * All function and global frames have an associated JSScript which holds - * the bytecode being executed for the frame. - */ - - /* - * Get the frame's current bytecode, assuming |this| is in |cx|. - * next is frame whose prev == this, NULL if not known or if this == cx->fp(). - */ - jsbytecode *pc(JSContext *cx, JSStackFrame *next = NULL); - - jsbytecode *prevpc() { - JS_ASSERT((prev_ != NULL) && (flags_ & JSFRAME_HAS_PREVPC)); - return prevpc_; - } - - JSScript *script() const { - JS_ASSERT(isScriptFrame()); - return isFunctionFrame() - ? isEvalFrame() ? args.script : fun()->script() - : exec.script; - } - - JSScript *functionScript() const { - JS_ASSERT(isFunctionFrame()); - return isEvalFrame() ? args.script : fun()->script(); - } - - JSScript *globalScript() const { - JS_ASSERT(isGlobalFrame()); - return exec.script; - } - - JSScript *maybeScript() const { - return isScriptFrame() ? script() : NULL; - } - - size_t numFixed() const { - return script()->nfixed; - } - - size_t numSlots() const { - return script()->nslots; - } - - size_t numGlobalVars() const { - JS_ASSERT(isGlobalFrame()); - return exec.script->nfixed; - } - - /* - * Function - * - * All function frames have an associated interpreted JSFunction. - */ - - JSFunction* fun() const { - JS_ASSERT(isFunctionFrame()); - return exec.fun; - } - - JSFunction* maybeFun() const { - return isFunctionFrame() ? fun() : NULL; - } - - /* - * Arguments - * - * Only non-eval function frames have arguments. A frame follows its - * arguments contiguously in memory. The arguments pushed by the caller are - * the 'actual' arguments. The declared arguments of the callee are the - * 'formal' arguments. When the caller passes less or equal actual - * arguments, the actual and formal arguments are the same array (but with - * different extents). When the caller passes too many arguments, the - * formal subset of the actual arguments is copied onto the top of the - * stack. This allows the engine to maintain a jit-time constant offset of - * arguments from the frame pointer. Since the formal subset of the actual - * arguments is potentially on the stack twice, it is important for all - * reads/writes to refer to the same canonical memory location. - * - * An arguments object (the object returned by the 'arguments' keyword) is - * lazily created, so a given function frame may or may not have one. - */ - - /* True if this frame has arguments. Contrast with hasArgsObj. */ - bool hasArgs() const { - return isNonEvalFunctionFrame(); - } - - uintN numFormalArgs() const { - JS_ASSERT(hasArgs()); - return fun()->nargs; - } - - js::Value &formalArg(uintN i) const { - JS_ASSERT(i < numFormalArgs()); - return formalArgs()[i]; - } - - js::Value *formalArgs() const { - JS_ASSERT(hasArgs()); - return (js::Value *)this - numFormalArgs(); - } - - js::Value *formalArgsEnd() const { - JS_ASSERT(hasArgs()); - return (js::Value *)this; - } - - js::Value *maybeFormalArgs() const { - return (flags_ & (JSFRAME_FUNCTION | JSFRAME_EVAL)) == JSFRAME_FUNCTION - ? formalArgs() - : NULL; - } - - inline uintN numActualArgs() const; - inline js::Value *actualArgs() const; - inline js::Value *actualArgsEnd() const; - - inline js::Value &canonicalActualArg(uintN i) const; - - /* - * Apply 'op' to each arg of the specified type. Stop if 'op' returns - * false. Return 'true' iff all 'op' calls returned true. - */ - template inline bool forEachCanonicalActualArg(Op op); - template inline bool forEachFormalArg(Op op); - - inline void clearMissingArgs(); - - bool hasArgsObj() const { - return !!(flags_ & JSFRAME_HAS_ARGS_OBJ); - } - - JSObject &argsObj() const { - JS_ASSERT(hasArgsObj()); - JS_ASSERT(!isEvalFrame()); - return *args.obj; - } - - JSObject *maybeArgsObj() const { - return hasArgsObj() ? &argsObj() : NULL; - } - - inline void setArgsObj(JSObject &obj); - - /* - * This value - * - * Every frame has a this value although, until 'this' is computed, the - * value may not be the semantically-correct 'this' value. - * - * The 'this' value is stored before the formal arguments for function - * frames and directly before the frame for global frames. The *Args - * members assert !isEvalFrame(), so we implement specialized inline - * methods for accessing 'this'. When the caller has static knowledge that - * a frame is a function or global frame, 'functionThis' and 'globalThis', - * respectively, allow more efficient access. - */ - - js::Value &functionThis() const { - JS_ASSERT(isFunctionFrame()); - if (isEvalFrame()) - return ((js::Value *)this)[-1]; - return formalArgs()[-1]; - } - - JSObject &constructorThis() const { - JS_ASSERT(hasArgs()); - return formalArgs()[-1].toObject(); - } - - js::Value &globalThis() const { - JS_ASSERT(isGlobalFrame()); - return ((js::Value *)this)[-1]; - } - - js::Value &thisValue() const { - if (flags_ & (JSFRAME_EVAL | JSFRAME_GLOBAL)) - return ((js::Value *)this)[-1]; - return formalArgs()[-1]; - } - - /* - * Callee - * - * Only function frames have a callee. An eval frame in a function has the - * same caller as its containing function frame. - */ - - js::Value &calleev() const { - JS_ASSERT(isFunctionFrame()); - if (isEvalFrame()) - return ((js::Value *)this)[-2]; - return formalArgs()[-2]; - } - - JSObject &callee() const { - JS_ASSERT(isFunctionFrame()); - return calleev().toObject(); - } - - JSObject *maybeCallee() const { - return isFunctionFrame() ? &callee() : NULL; - } - - js::CallReceiver callReceiver() const { - return js::CallReceiverFromArgv(formalArgs()); - } - - /* - * getValidCalleeObject is a fallible getter to compute the correct callee - * function object, which may require deferred cloning due to the JSObject - * methodReadBarrier. For a non-function frame, return true with *vp set - * from calleev, which may not be an object (it could be undefined). - */ - bool getValidCalleeObject(JSContext *cx, js::Value *vp); - - /* - * Scope chain - * - * Every frame has a scopeChain which, when traversed via the 'parent' link - * to the root, indicates the current global object. A 'call object' is a - * node on a scope chain representing a function's activation record. A - * call object is used for dynamically-scoped name lookup and lexically- - * scoped upvar access. The call object holds the values of locals and - * arguments when a function returns (and its stack frame is popped). For - * performance reasons, call objects are created lazily for 'lightweight' - * functions, i.e., functions which are not statically known to require a - * call object. Thus, a given function frame may or may not have a call - * object. When a function does have a call object, it is found by walking - * up the scope chain until the first call object. Thus, it is important, - * when setting the scope chain, to indicate whether the new scope chain - * contains a new call object and thus changes the 'hasCallObj' state. - * - * NB: 'fp->hasCallObj()' implies that fp->callObj() needs to be 'put' when - * the frame is popped. Since the scope chain of a non-strict eval frame - * contains the call object of the parent (function) frame, it is possible - * to have: - * !fp->hasCall() && fp->scopeChain().isCall() - */ - - JSObject &scopeChain() const { - JS_ASSERT_IF(!(flags_ & JSFRAME_HAS_SCOPECHAIN), isFunctionFrame()); - if (!(flags_ & JSFRAME_HAS_SCOPECHAIN)) { - scopeChain_ = callee().getParent(); - flags_ |= JSFRAME_HAS_SCOPECHAIN; - } - return *scopeChain_; - } - - bool hasCallObj() const { - bool ret = !!(flags_ & JSFRAME_HAS_CALL_OBJ); - JS_ASSERT_IF(ret, !isNonStrictEvalFrame()); - return ret; - } - - inline JSObject &callObj() const; - inline void setScopeChainNoCallObj(JSObject &obj); - inline void setScopeChainWithOwnCallObj(JSObject &obj); - - inline void markActivationObjectsAsPut(); - - /* - * Frame compartment - * - * A stack frame's compartment is the frame's containing context's - * compartment when the frame was pushed. - */ - - JSCompartment *compartment() const { - JS_ASSERT_IF(isScriptFrame(), scopeChain().compartment() == script()->compartment); - return scopeChain().compartment(); - } - - inline JSPrincipals *principals(JSContext *cx) const; - - /* - * Imacropc - * - * A frame's IMacro pc is the bytecode address when an imacro started - * executing (guaranteed non-null). An imacro does not push a frame, so - * when the imacro finishes, the frame's IMacro pc becomes the current pc. - */ - - bool hasImacropc() const { - return flags_ & JSFRAME_HAS_IMACRO_PC; - } - - jsbytecode *imacropc() const { - JS_ASSERT(hasImacropc()); - return imacropc_; - } - - jsbytecode *maybeImacropc() const { - return hasImacropc() ? imacropc() : NULL; - } - - void clearImacropc() { - flags_ &= ~JSFRAME_HAS_IMACRO_PC; - } - - void setImacropc(jsbytecode *pc) { - JS_ASSERT(pc); - JS_ASSERT(!(flags_ & JSFRAME_HAS_IMACRO_PC)); - imacropc_ = pc; - flags_ |= JSFRAME_HAS_IMACRO_PC; - } - - /* Annotation (will be removed after bug 546848) */ - - void* annotation() const { - return (flags_ & JSFRAME_HAS_ANNOTATION) ? annotation_ : NULL; - } - - void setAnnotation(void *annot) { - flags_ |= JSFRAME_HAS_ANNOTATION; - annotation_ = annot; - } - - /* Debugger hook data */ - - bool hasHookData() const { - return !!(flags_ & JSFRAME_HAS_HOOK_DATA); - } - - void* hookData() const { - JS_ASSERT(hasHookData()); - return hookData_; - } - - void* maybeHookData() const { - return hasHookData() ? hookData_ : NULL; - } - - void setHookData(void *v) { - hookData_ = v; - flags_ |= JSFRAME_HAS_HOOK_DATA; - } - - /* Return value */ - - const js::Value &returnValue() { - if (!(flags_ & JSFRAME_HAS_RVAL)) - rval_.setUndefined(); - return rval_; - } - - void markReturnValue() { - flags_ |= JSFRAME_HAS_RVAL; - } - - void setReturnValue(const js::Value &v) { - rval_ = v; - markReturnValue(); - } - - void clearReturnValue() { - rval_.setUndefined(); - markReturnValue(); - } - - /* Native-code return address */ - - void *nativeReturnAddress() const { - return ncode_; - } - - void setNativeReturnAddress(void *addr) { - ncode_ = addr; - } - - void **addressOfNativeReturnAddress() { - return &ncode_; - } - - /* - * Generator-specific members - * - * A non-eval function frame may optionally be the activation of a - * generator. For the most part, generator frames act like ordinary frames. - * For exceptions, see js_FloatingFrameIfGenerator. - */ - - bool isGeneratorFrame() const { - return !!(flags_ & JSFRAME_GENERATOR); - } - - bool isFloatingGenerator() const { - JS_ASSERT_IF(flags_ & JSFRAME_FLOATING_GENERATOR, isGeneratorFrame()); - return !!(flags_ & JSFRAME_FLOATING_GENERATOR); - } - - void initFloatingGenerator() { - JS_ASSERT(!(flags_ & JSFRAME_GENERATOR)); - flags_ |= (JSFRAME_GENERATOR | JSFRAME_FLOATING_GENERATOR); - } - - void unsetFloatingGenerator() { - flags_ &= ~JSFRAME_FLOATING_GENERATOR; - } - - void setFloatingGenerator() { - flags_ |= JSFRAME_FLOATING_GENERATOR; - } - - /* - * js::Execute pushes both global and function frames (since eval() in a - * function pushes a frame with isFunctionFrame() && isEvalFrame()). Most - * code should not care where a frame was pushed, but if it is necessary to - * pick out frames pushed by js::Execute, this is the right query: - */ - - bool isFramePushedByExecute() const { - return !!(flags_ & (JSFRAME_GLOBAL | JSFRAME_EVAL)); - } - - /* - * Other flags - */ - - bool isConstructing() const { - return !!(flags_ & JSFRAME_CONSTRUCTING); - } - - uint32 isConstructingFlag() const { - JS_ASSERT(isFunctionFrame()); - JS_ASSERT((flags_ & ~(JSFRAME_CONSTRUCTING | JSFRAME_FUNCTION)) == 0); - return flags_; - } - - bool isDebuggerFrame() const { - return !!(flags_ & JSFRAME_DEBUGGER); - } - - bool isEvalOrDebuggerFrame() const { - return !!(flags_ & (JSFRAME_EVAL | JSFRAME_DEBUGGER)); - } - - bool hasOverriddenArgs() const { - return !!(flags_ & JSFRAME_OVERRIDE_ARGS); - } - - bool hasOverflowArgs() const { - return !!(flags_ & JSFRAME_OVERFLOW_ARGS); - } - - void setOverriddenArgs() { - flags_ |= JSFRAME_OVERRIDE_ARGS; - } - - bool isYielding() { - return !!(flags_ & JSFRAME_YIELDING); - } - - void setYielding() { - flags_ |= JSFRAME_YIELDING; - } - - void clearYielding() { - flags_ &= ~JSFRAME_YIELDING; - } - - void setFinishedInInterpreter() { - flags_ |= JSFRAME_FINISHED_IN_INTERP; - } - - bool finishedInInterpreter() const { - return !!(flags_ & JSFRAME_FINISHED_IN_INTERP); - } - - /* - * Variables object accessors - * - * A stack frame's 'varobj' refers to the 'variables object' (ES3 term) - * associated with the Execution Context's VariableEnvironment (ES5 10.3). - * - * To compute the frame's varobj, the caller must supply the segment - * containing the frame (see js::StackSegment comment). As an abbreviation, - * the caller may pass the context if the frame is contained in that - * context's active segment. - */ - - inline JSObject &varobj(js::StackSegment *seg) const; - inline JSObject &varobj(JSContext *cx) const; - - /* Access to privates from the jits. */ - - static size_t offsetOfFlags() { - return offsetof(JSStackFrame, flags_); - } - - static size_t offsetOfExec() { - return offsetof(JSStackFrame, exec); - } - - void *addressOfArgs() { - return &args; - } - - static size_t offsetOfScopeChain() { - return offsetof(JSStackFrame, scopeChain_); - } - - JSObject **addressOfScopeChain() { - JS_ASSERT(flags_ & JSFRAME_HAS_SCOPECHAIN); - return &scopeChain_; - } - - static size_t offsetOfPrev() { - return offsetof(JSStackFrame, prev_); - } - - static size_t offsetOfReturnValue() { - return offsetof(JSStackFrame, rval_); - } - - static ptrdiff_t offsetOfncode() { - return offsetof(JSStackFrame, ncode_); - } - - static ptrdiff_t offsetOfCallee(JSFunction *fun) { - JS_ASSERT(fun != NULL); - return -(fun->nargs + 2) * sizeof(js::Value); - } - - static ptrdiff_t offsetOfThis(JSFunction *fun) { - return fun == NULL - ? -1 * ptrdiff_t(sizeof(js::Value)) - : -(fun->nargs + 1) * ptrdiff_t(sizeof(js::Value)); - } - - static ptrdiff_t offsetOfFormalArg(JSFunction *fun, uintN i) { - JS_ASSERT(i < fun->nargs); - return (-(int)fun->nargs + i) * sizeof(js::Value); - } - - static size_t offsetOfFixed(uintN i) { - return sizeof(JSStackFrame) + i * sizeof(js::Value); - } - - /* Workaround for static asserts on private members. */ - - void staticAsserts() { - JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) % sizeof(js::Value) == 0); - JS_STATIC_ASSERT(sizeof(JSStackFrame) % sizeof(js::Value) == 0); - } - -#ifdef JS_METHODJIT - js::mjit::JITScript *jit() { - return script()->getJIT(isConstructing()); - } -#endif - - void methodjitStaticAsserts(); - -#ifdef DEBUG - /* Poison scopeChain value set before a frame is flushed. */ - static JSObject *const sInvalidScopeChain; -#endif -}; +#include "vm/Stack.h" namespace js { -static const size_t VALUES_PER_STACK_FRAME = sizeof(JSStackFrame) / sizeof(Value); +extern JSObject * +GetBlockChain(JSContext *cx, StackFrame *fp); extern JSObject * -GetBlockChain(JSContext *cx, JSStackFrame *fp); - -extern JSObject * -GetBlockChainFast(JSContext *cx, JSStackFrame *fp, JSOp op, size_t oplen); +GetBlockChainFast(JSContext *cx, StackFrame *fp, JSOp op, size_t oplen); extern JSObject * GetScopeChain(JSContext *cx); @@ -858,10 +70,10 @@ GetScopeChain(JSContext *cx); * must reflect at runtime. */ extern JSObject * -GetScopeChain(JSContext *cx, JSStackFrame *fp); +GetScopeChain(JSContext *cx, StackFrame *fp); extern JSObject * -GetScopeChainFast(JSContext *cx, JSStackFrame *fp, JSOp op, size_t oplen); +GetScopeChainFast(JSContext *cx, StackFrame *fp, JSOp op, size_t oplen); /* * Report an error that the this value passed as |this| in the given arguments @@ -881,16 +93,6 @@ ReportIncompatibleMethod(JSContext *cx, Value *vp, Class *clasp); template bool GetPrimitiveThis(JSContext *cx, Value *vp, T *v); -inline void -PutActivationObjects(JSContext *cx, JSStackFrame *fp) -{ - /* The order is important since js_PutCallObject does js_PutArgsObject. */ - if (fp->hasCallObj()) - js_PutCallObject(cx, fp); - else if (fp->hasArgsObj()) - js_PutArgsObject(cx, fp); -} - /* * ScriptPrologue/ScriptEpilogue must be called in pairs. ScriptPrologue * must be called before the script executes. ScriptEpilogue must be called @@ -898,10 +100,10 @@ PutActivationObjects(JSContext *cx, JSStackFrame *fp) */ inline bool -ScriptPrologue(JSContext *cx, JSStackFrame *fp, JSScript *script); +ScriptPrologue(JSContext *cx, StackFrame *fp, JSScript *script); inline bool -ScriptEpilogue(JSContext *cx, JSStackFrame *fp, bool ok); +ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok); /* * It is not valid to call ScriptPrologue when a generator is resumed or to @@ -911,18 +113,18 @@ ScriptEpilogue(JSContext *cx, JSStackFrame *fp, bool ok); */ inline bool -ScriptPrologueOrGeneratorResume(JSContext *cx, JSStackFrame *fp); +ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp); inline bool -ScriptEpilogueOrGeneratorYield(JSContext *cx, JSStackFrame *fp, bool ok); +ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok); /* Implemented in jsdbgapi: */ extern void -ScriptDebugPrologue(JSContext *cx, JSStackFrame *fp); +ScriptDebugPrologue(JSContext *cx, StackFrame *fp); extern bool -ScriptDebugEpilogue(JSContext *cx, JSStackFrame *fp, bool ok); +ScriptDebugEpilogue(JSContext *cx, StackFrame *fp, bool ok); /* * For a given |call|, convert null/undefined |this| into the global object for @@ -940,18 +142,40 @@ BoxNonStrictThis(JSContext *cx, const CallReceiver &call); * an optimization to avoid global-this computation). */ inline bool -ComputeThis(JSContext *cx, JSStackFrame *fp); +ComputeThis(JSContext *cx, StackFrame *fp); + +/* + * Choose enumerator values so that the enum can be passed used directly as the + * stack frame flags. + */ +enum ConstructOption { + INVOKE_NORMAL = 0, + INVOKE_CONSTRUCTOR = StackFrame::CONSTRUCTING +}; +JS_STATIC_ASSERT(INVOKE_CONSTRUCTOR != INVOKE_NORMAL); + +static inline uintN +ToReportFlags(ConstructOption option) +{ + return (uintN)option; +} + +static inline uint32 +ToFrameFlags(ConstructOption option) +{ + return (uintN)option; +} /* * The js::InvokeArgumentsGuard passed to js_Invoke must come from an * immediately-enclosing successful call to js::StackSpace::pushInvokeArgs, - * i.e., there must have been no un-popped pushes to cx->stack(). Furthermore, + * i.e., there must have been no un-popped pushes to cx->stack. Furthermore, * |args.getvp()[0]| should be the callee, |args.getvp()[1]| should be |this|, * and the range [args.getvp() + 2, args.getvp() + 2 + args.getArgc()) should * be initialized actual arguments. */ extern JS_REQUIRES_STACK bool -Invoke(JSContext *cx, const CallArgs &args, uint32 flags); +Invoke(JSContext *cx, const CallArgs &args, ConstructOption option = INVOKE_NORMAL); /* * Natives like sort/forEach/replace call Invoke repeatedly with the same @@ -979,29 +203,9 @@ Invoke(JSContext *cx, const CallArgs &args, uint32 flags); */ class InvokeSessionGuard; -/* - * Consolidated js_Invoke flags simply rename certain JSFRAME_* flags, so that - * we can share bits stored in JSStackFrame.flags and passed to: - * - * js_Invoke - * js_InternalInvoke - * js_ValueToFunction - * js_ValueToFunctionObject - * js_ValueToCallableObject - * js_ReportIsNotFunction - * - * See jsfun.h for the latter four and flag renaming macros. - */ -#define JSINVOKE_CONSTRUCT JSFRAME_CONSTRUCTING - -/* - * Mask to isolate construct and iterator flags for use with jsfun.h functions. - */ -#define JSINVOKE_FUNFLAGS JSINVOKE_CONSTRUCT - /* * "External" calls may come from C or C++ code using a JSContext on which no - * JS is running (!cx->fp), so they may need to push a dummy JSStackFrame. + * JS is running (!cx->fp), so they may need to push a dummy StackFrame. */ extern bool @@ -1037,17 +241,26 @@ ExternalInvokeConstructor(JSContext *cx, const Value &fval, uintN argc, Value *a */ extern JS_FORCES_STACK bool Execute(JSContext *cx, JSObject &chain, JSScript *script, - JSStackFrame *prev, uintN flags, Value *result); + StackFrame *prev, uintN flags, Value *result); + +/* Flags to toggle js::Interpret() execution. */ +enum InterpMode +{ + JSINTERP_NORMAL = 0, /* interpreter is running normally */ + JSINTERP_RECORD = 1, /* interpreter has been started to record/run traces */ + JSINTERP_SAFEPOINT = 2, /* interpreter should leave on a method JIT safe point */ + JSINTERP_PROFILE = 3 /* interpreter should profile a loop */ +}; /* * Execute the caller-initialized frame for a user-defined script or function * pointed to by cx->fp until completion or error. */ extern JS_REQUIRES_STACK JS_NEVER_INLINE bool -Interpret(JSContext *cx, JSStackFrame *stopFp, uintN inlineCallCount = 0, JSInterpMode mode = JSINTERP_NORMAL); +Interpret(JSContext *cx, StackFrame *stopFp, uintN inlineCallCount = 0, InterpMode mode = JSINTERP_NORMAL); extern JS_REQUIRES_STACK bool -RunScript(JSContext *cx, JSScript *script, JSStackFrame *fp); +RunScript(JSContext *cx, JSScript *script, StackFrame *fp); extern bool CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs); @@ -1102,8 +315,6 @@ GetUpvar(JSContext *cx, uintN level, js::UpvarCookie cookie); # endif #endif -#define JS_MAX_INLINE_CALL_COUNT 3000 - #if !JS_LONE_INTERPRET # define JS_STATIC_INTERPRET static #else diff --git a/js/src/jsinterpinlines.h b/js/src/jsinterpinlines.h index 2b735c031575..9b1ec580afe2 100644 --- a/js/src/jsinterpinlines.h +++ b/js/src/jsinterpinlines.h @@ -52,425 +52,7 @@ #include "jsfuninlines.h" -inline void -JSStackFrame::initPrev(JSContext *cx) -{ - JS_ASSERT(flags_ & JSFRAME_HAS_PREVPC); - if (JSFrameRegs *regs = cx->regs) { - prev_ = regs->fp; - prevpc_ = regs->pc; - JS_ASSERT_IF(!prev_->isDummyFrame() && !prev_->hasImacropc(), - uint32(prevpc_ - prev_->script()->code) < prev_->script()->length); - } else { - prev_ = NULL; -#ifdef DEBUG - prevpc_ = (jsbytecode *)0xbadc; -#endif - } -} - -inline void -JSStackFrame::resetGeneratorPrev(JSContext *cx) -{ - flags_ |= JSFRAME_HAS_PREVPC; - initPrev(cx); -} - -inline void -JSStackFrame::initCallFrame(JSContext *cx, JSObject &callee, JSFunction *fun, - uint32 nactual, uint32 flagsArg) -{ - JS_ASSERT((flagsArg & ~(JSFRAME_CONSTRUCTING | - JSFRAME_OVERFLOW_ARGS | - JSFRAME_UNDERFLOW_ARGS)) == 0); - JS_ASSERT(fun == callee.getFunctionPrivate()); - - /* Initialize stack frame members. */ - flags_ = JSFRAME_FUNCTION | JSFRAME_HAS_PREVPC | JSFRAME_HAS_SCOPECHAIN | flagsArg; - exec.fun = fun; - args.nactual = nactual; /* only need to write if over/under-flow */ - scopeChain_ = callee.getParent(); - initPrev(cx); - JS_ASSERT(!hasImacropc()); - JS_ASSERT(!hasHookData()); - JS_ASSERT(annotation() == NULL); - JS_ASSERT(!hasCallObj()); -} - -inline void -JSStackFrame::resetInvokeCallFrame() -{ - /* Undo changes to frame made during execution; see initCallFrame */ - - JS_ASSERT(!(flags_ & ~(JSFRAME_FUNCTION | - JSFRAME_OVERFLOW_ARGS | - JSFRAME_UNDERFLOW_ARGS | - JSFRAME_OVERRIDE_ARGS | - JSFRAME_HAS_PREVPC | - JSFRAME_HAS_RVAL | - JSFRAME_HAS_SCOPECHAIN | - JSFRAME_HAS_ANNOTATION | - JSFRAME_HAS_HOOK_DATA | - JSFRAME_HAS_CALL_OBJ | - JSFRAME_HAS_ARGS_OBJ | - JSFRAME_FINISHED_IN_INTERP))); - - /* - * Since the stack frame is usually popped after PutActivationObjects, - * these bits aren't cleared. The activation objects must have actually - * been put, though. - */ - JS_ASSERT_IF(flags_ & JSFRAME_HAS_CALL_OBJ, callObj().getPrivate() == NULL); - JS_ASSERT_IF(flags_ & JSFRAME_HAS_ARGS_OBJ, argsObj().getPrivate() == NULL); - - flags_ &= JSFRAME_FUNCTION | - JSFRAME_OVERFLOW_ARGS | - JSFRAME_HAS_PREVPC | - JSFRAME_UNDERFLOW_ARGS; - - JS_ASSERT(exec.fun == callee().getFunctionPrivate()); - scopeChain_ = callee().getParent(); -} - -inline void -JSStackFrame::initCallFrameCallerHalf(JSContext *cx, uint32 flagsArg, - void *ncode) -{ - JS_ASSERT((flagsArg & ~(JSFRAME_CONSTRUCTING | - JSFRAME_FUNCTION | - JSFRAME_OVERFLOW_ARGS | - JSFRAME_UNDERFLOW_ARGS)) == 0); - - flags_ = JSFRAME_FUNCTION | flagsArg; - prev_ = cx->regs->fp; - ncode_ = ncode; -} - -/* - * The "early prologue" refers to the members that are stored for the benefit - * of slow paths before initializing the rest of the members. - */ -inline void -JSStackFrame::initCallFrameEarlyPrologue(JSFunction *fun, uint32 nactual) -{ - exec.fun = fun; - if (flags_ & (JSFRAME_OVERFLOW_ARGS | JSFRAME_UNDERFLOW_ARGS)) - args.nactual = nactual; -} - -/* - * The "late prologue" refers to the members that are stored after having - * checked for stack overflow and formal/actual arg mismatch. - */ -inline void -JSStackFrame::initCallFrameLatePrologue() -{ - SetValueRangeToUndefined(slots(), script()->nfixed); -} - -inline void -JSStackFrame::initEvalFrame(JSContext *cx, JSScript *script, JSStackFrame *prev, uint32 flagsArg) -{ - JS_ASSERT(flagsArg & JSFRAME_EVAL); - JS_ASSERT((flagsArg & ~(JSFRAME_EVAL | JSFRAME_DEBUGGER)) == 0); - JS_ASSERT(prev->isScriptFrame()); - - /* Copy (callee, thisv). */ - js::Value *dstvp = (js::Value *)this - 2; - js::Value *srcvp = prev->hasArgs() - ? prev->formalArgs() - 2 - : (js::Value *)prev - 2; - dstvp[0] = srcvp[0]; - dstvp[1] = srcvp[1]; - JS_ASSERT_IF(prev->isFunctionFrame(), - dstvp[0].toObject().isFunction()); - - /* Initialize stack frame members. */ - flags_ = flagsArg | JSFRAME_HAS_PREVPC | JSFRAME_HAS_SCOPECHAIN | - (prev->flags_ & (JSFRAME_FUNCTION | JSFRAME_GLOBAL)); - if (isFunctionFrame()) { - exec = prev->exec; - args.script = script; - } else { - exec.script = script; - } - - scopeChain_ = &prev->scopeChain(); - prev_ = prev; - prevpc_ = prev->pc(cx); - JS_ASSERT(!hasImacropc()); - JS_ASSERT(!hasHookData()); - setAnnotation(prev->annotation()); -} - -inline void -JSStackFrame::initGlobalFrame(JSScript *script, JSObject &chain, uint32 flagsArg) -{ - JS_ASSERT((flagsArg & ~(JSFRAME_EVAL | JSFRAME_DEBUGGER)) == 0); - - /* Initialize (callee, thisv). */ - js::Value *vp = (js::Value *)this - 2; - vp[0].setUndefined(); - vp[1].setUndefined(); /* Set after frame pushed using thisObject */ - - /* Initialize stack frame members. */ - flags_ = flagsArg | JSFRAME_GLOBAL | JSFRAME_HAS_PREVPC | JSFRAME_HAS_SCOPECHAIN; - exec.script = script; - args.script = (JSScript *)0xbad; - scopeChain_ = &chain; - prev_ = NULL; - JS_ASSERT(!hasImacropc()); - JS_ASSERT(!hasHookData()); - JS_ASSERT(annotation() == NULL); -} - -inline void -JSStackFrame::initDummyFrame(JSContext *cx, JSObject &chain) -{ - js::PodZero(this); - flags_ = JSFRAME_DUMMY | JSFRAME_HAS_PREVPC | JSFRAME_HAS_SCOPECHAIN; - initPrev(cx); - chain.isGlobal(); - setScopeChainNoCallObj(chain); -} - -inline void -JSStackFrame::stealFrameAndSlots(js::Value *vp, JSStackFrame *otherfp, - js::Value *othervp, js::Value *othersp) -{ - JS_ASSERT(vp == (js::Value *)this - (otherfp->formalArgsEnd() - othervp)); - JS_ASSERT(othervp == otherfp->actualArgs() - 2); - JS_ASSERT(othersp >= otherfp->slots()); - JS_ASSERT(othersp <= otherfp->base() + otherfp->numSlots()); - - PodCopy(vp, othervp, othersp - othervp); - JS_ASSERT(vp == this->actualArgs() - 2); - - /* Catch bad-touching of non-canonical args (e.g., generator_trace). */ - if (otherfp->hasOverflowArgs()) - Debug_SetValueRangeToCrashOnTouch(othervp, othervp + 2 + otherfp->numFormalArgs()); - - /* - * Repoint Call, Arguments, Block and With objects to the new live frame. - * Call and Arguments are done directly because we have pointers to them. - * Block and With objects are done indirectly through 'liveFrame'. See - * js_LiveFrameToFloating comment in jsiter.h. - */ - if (hasCallObj()) { - JSObject &obj = callObj(); - obj.setPrivate(this); - otherfp->flags_ &= ~JSFRAME_HAS_CALL_OBJ; - if (js_IsNamedLambda(fun())) { - JSObject *env = obj.getParent(); - JS_ASSERT(env->getClass() == &js_DeclEnvClass); - env->setPrivate(this); - } - } - if (hasArgsObj()) { - JSObject &args = argsObj(); - JS_ASSERT(args.isArguments()); - if (args.isNormalArguments()) - args.setPrivate(this); - else - JS_ASSERT(!args.getPrivate()); - otherfp->flags_ &= ~JSFRAME_HAS_ARGS_OBJ; - } -} - -inline js::Value & -JSStackFrame::canonicalActualArg(uintN i) const -{ - if (i < numFormalArgs()) - return formalArg(i); - JS_ASSERT(i < numActualArgs()); - return actualArgs()[i]; -} - -template -inline bool -JSStackFrame::forEachCanonicalActualArg(Op op) -{ - uintN nformal = fun()->nargs; - js::Value *formals = formalArgsEnd() - nformal; - uintN nactual = numActualArgs(); - if (nactual <= nformal) { - uintN i = 0; - js::Value *actualsEnd = formals + nactual; - for (js::Value *p = formals; p != actualsEnd; ++p, ++i) { - if (!op(i, p)) - return false; - } - } else { - uintN i = 0; - js::Value *formalsEnd = formalArgsEnd(); - for (js::Value *p = formals; p != formalsEnd; ++p, ++i) { - if (!op(i, p)) - return false; - } - js::Value *actuals = formalsEnd - (nactual + 2); - js::Value *actualsEnd = formals - 2; - for (js::Value *p = actuals; p != actualsEnd; ++p, ++i) { - if (!op(i, p)) - return false; - } - } - return true; -} - -template -inline bool -JSStackFrame::forEachFormalArg(Op op) -{ - js::Value *formals = formalArgsEnd() - fun()->nargs; - js::Value *formalsEnd = formalArgsEnd(); - uintN i = 0; - for (js::Value *p = formals; p != formalsEnd; ++p, ++i) { - if (!op(i, p)) - return false; - } - return true; -} - -namespace js { - -struct CopyTo -{ - Value *dst; - CopyTo(Value *dst) : dst(dst) {} - bool operator()(uintN, Value *src) { - *dst++ = *src; - return true; - } -}; - -} - -JS_ALWAYS_INLINE void -JSStackFrame::clearMissingArgs() -{ - if (flags_ & JSFRAME_UNDERFLOW_ARGS) - SetValueRangeToUndefined(formalArgs() + numActualArgs(), formalArgsEnd()); -} - -inline JSObject & -JSStackFrame::varobj(js::StackSegment *seg) const -{ - JS_ASSERT(seg->contains(this)); - return isFunctionFrame() ? callObj() : seg->getInitialVarObj(); -} - -inline JSObject & -JSStackFrame::varobj(JSContext *cx) const -{ - JS_ASSERT(cx->activeSegment()->contains(this)); - return isFunctionFrame() ? callObj() : cx->activeSegment()->getInitialVarObj(); -} - -inline uintN -JSStackFrame::numActualArgs() const -{ - JS_ASSERT(hasArgs()); - if (JS_UNLIKELY(flags_ & (JSFRAME_OVERFLOW_ARGS | JSFRAME_UNDERFLOW_ARGS))) - return hasArgsObj() ? argsObj().getArgsInitialLength() : args.nactual; - return numFormalArgs(); -} - -inline js::Value * -JSStackFrame::actualArgs() const -{ - JS_ASSERT(hasArgs()); - js::Value *argv = formalArgs(); - if (JS_UNLIKELY(flags_ & JSFRAME_OVERFLOW_ARGS)) { - uintN nactual = hasArgsObj() ? argsObj().getArgsInitialLength() : args.nactual; - return argv - (2 + nactual); - } - return argv; -} - -inline js::Value * -JSStackFrame::actualArgsEnd() const -{ - JS_ASSERT(hasArgs()); - if (JS_UNLIKELY(flags_ & JSFRAME_OVERFLOW_ARGS)) - return formalArgs() - 2; - return formalArgs() + numActualArgs(); -} - -inline void -JSStackFrame::setArgsObj(JSObject &obj) -{ - JS_ASSERT_IF(hasArgsObj(), &obj == args.obj); - JS_ASSERT_IF(!hasArgsObj(), numActualArgs() == obj.getArgsInitialLength()); - args.obj = &obj; - flags_ |= JSFRAME_HAS_ARGS_OBJ; -} - -inline void -JSStackFrame::setScopeChainNoCallObj(JSObject &obj) -{ -#ifdef DEBUG - JS_ASSERT(&obj != NULL); - if (&obj != sInvalidScopeChain) { - if (hasCallObj()) { - JSObject *pobj = &obj; - while (pobj && pobj->getPrivate() != this) - pobj = pobj->getParent(); - JS_ASSERT(pobj); - } else { - for (JSObject *pobj = &obj; pobj; pobj = pobj->getParent()) - JS_ASSERT_IF(pobj->isCall(), pobj->getPrivate() != this); - } - } -#endif - scopeChain_ = &obj; - flags_ |= JSFRAME_HAS_SCOPECHAIN; -} - -inline void -JSStackFrame::setScopeChainWithOwnCallObj(JSObject &obj) -{ - JS_ASSERT(&obj != NULL); - JS_ASSERT(!hasCallObj() && obj.isCall() && obj.getPrivate() == this); - scopeChain_ = &obj; - flags_ |= JSFRAME_HAS_SCOPECHAIN | JSFRAME_HAS_CALL_OBJ; -} - -inline JSObject & -JSStackFrame::callObj() const -{ - JS_ASSERT_IF(isNonEvalFunctionFrame() || isStrictEvalFrame(), hasCallObj()); - - JSObject *pobj = &scopeChain(); - while (JS_UNLIKELY(pobj->getClass() != &js_CallClass)) { - JS_ASSERT(js::IsCacheableNonGlobalScope(pobj) || pobj->isWith()); - pobj = pobj->getParent(); - } - return *pobj; -} - -inline void -JSStackFrame::markActivationObjectsAsPut() -{ - if (flags_ & (JSFRAME_HAS_ARGS_OBJ | JSFRAME_HAS_CALL_OBJ)) { - if (hasArgsObj() && !argsObj().getPrivate()) { - args.nactual = args.obj->getArgsInitialLength(); - flags_ &= ~JSFRAME_HAS_ARGS_OBJ; - } - if (hasCallObj() && !callObj().getPrivate()) { - /* - * For function frames, the call object may or may not have have an - * enclosing DeclEnv object, so we use the callee's parent, since - * it was the initial scope chain. For global (strict) eval frames, - * there is no calle, but the call object's parent is the initial - * scope chain. - */ - scopeChain_ = isFunctionFrame() - ? callee().getParent() - : scopeChain_->getParent(); - flags_ &= ~JSFRAME_HAS_CALL_OBJ; - } - } -} +#include "vm/Stack-inl.h" namespace js { @@ -548,12 +130,11 @@ InvokeSessionGuard::invoke(JSContext *cx) const #else if (!optimized()) #endif - return Invoke(cx, args_, 0); + return Invoke(cx, args_); /* Clear any garbage left from the last Invoke. */ - JSStackFrame *fp = frame_.fp(); + StackFrame *fp = frame_.fp(); fp->clearMissingArgs(); - PutActivationObjects(cx, frame_.fp()); fp->resetInvokeCallFrame(); SetValueRangeToUndefined(fp->slots(), script_->nfixed); @@ -563,9 +144,9 @@ InvokeSessionGuard::invoke(JSContext *cx) const Probes::enterJSFun(cx, fp->fun(), script_); #ifdef JS_METHODJIT ok = mjit::EnterMethodJIT(cx, fp, code, stackLimit_); - cx->regs->pc = stop_; + cx->regs().pc = stop_; #else - cx->regs->pc = script_->code; + cx->regs().pc = script_->code; ok = Interpret(cx, cx->fp()); #endif Probes::exitJSFun(cx, fp->fun(), script_); @@ -606,7 +187,7 @@ class PrimitiveBehavior { } // namespace detail template -bool +inline bool GetPrimitiveThis(JSContext *cx, Value *vp, T *v) { typedef detail::PrimitiveBehavior Behavior; @@ -704,7 +285,7 @@ ComputeImplicitThis(JSContext *cx, JSObject *obj, const Value &funval, Value *vp } inline bool -ComputeThis(JSContext *cx, JSStackFrame *fp) +ComputeThis(JSContext *cx, StackFrame *fp) { Value &thisv = fp->thisValue(); if (thisv.isObject()) @@ -761,7 +342,7 @@ ValuePropertyBearer(JSContext *cx, const Value &v, int spindex) } inline bool -ScriptPrologue(JSContext *cx, JSStackFrame *fp) +ScriptPrologue(JSContext *cx, StackFrame *fp) { JS_ASSERT_IF(fp->isNonEvalFunctionFrame() && fp->fun()->isHeavyweight(), fp->hasCallObj()); @@ -778,7 +359,7 @@ ScriptPrologue(JSContext *cx, JSStackFrame *fp) } inline bool -ScriptEpilogue(JSContext *cx, JSStackFrame *fp, bool ok) +ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok) { if (cx->compartment->debugMode) ok = ScriptDebugEpilogue(cx, fp, ok); @@ -797,7 +378,7 @@ ScriptEpilogue(JSContext *cx, JSStackFrame *fp, bool ok) } inline bool -ScriptPrologueOrGeneratorResume(JSContext *cx, JSStackFrame *fp) +ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp) { if (!fp->isGeneratorFrame()) return ScriptPrologue(cx, fp); @@ -807,7 +388,7 @@ ScriptPrologueOrGeneratorResume(JSContext *cx, JSStackFrame *fp) } inline bool -ScriptEpilogueOrGeneratorYield(JSContext *cx, JSStackFrame *fp, bool ok) +ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok) { if (!fp->isYielding()) return ScriptEpilogue(cx, fp, ok); @@ -816,6 +397,6 @@ ScriptEpilogueOrGeneratorYield(JSContext *cx, JSStackFrame *fp, bool ok) return ok; } -} +} /* namespace js */ #endif /* jsinterpinlines_h__ */ diff --git a/js/src/jsiter.cpp b/js/src/jsiter.cpp index 1f312571ced3..0b4da51ce94e 100644 --- a/js/src/jsiter.cpp +++ b/js/src/jsiter.cpp @@ -65,6 +65,7 @@ #include "jsobj.h" #include "jsopcode.h" #include "jsproxy.h" +#include "jsscan.h" #include "jsscope.h" #include "jsscript.h" #include "jsstaticcheck.h" @@ -74,11 +75,11 @@ #include "jsxml.h" #endif -#include "jscntxtinlines.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsstrinlines.h" +#include "vm/Stack-inl.h" + using namespace js; using namespace js::gc; @@ -1091,7 +1092,7 @@ generator_trace(JSTracer *trc, JSObject *obj) if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING) return; - JSStackFrame *fp = gen->floatingFrame(); + StackFrame *fp = gen->floatingFrame(); JS_ASSERT(gen->liveFrame() == fp); /* @@ -1134,17 +1135,10 @@ Class js_GeneratorClass = { } }; -static inline void -RebaseRegsFromTo(JSFrameRegs *regs, JSStackFrame *from, JSStackFrame *to) -{ - regs->fp = to; - regs->sp = to->slots() + (regs->sp - from->slots()); -} - /* * Called from the JSOP_GENERATOR case in the interpreter, with fp referring * to the frame by which the generator function was activated. Create a new - * JSGenerator object, which contains its own JSStackFrame that we populate + * JSGenerator object, which contains its own StackFrame that we populate * from *fp. We know that upon return, the JSOP_GENERATOR opcode will return * from the activation in fp, so we can steal away fp->callobj and fp->argsobj * if they are non-null. @@ -1156,8 +1150,8 @@ js_NewGenerator(JSContext *cx) if (!obj) return NULL; - JSStackFrame *stackfp = cx->fp(); - JS_ASSERT(stackfp->base() == cx->regs->sp); + StackFrame *stackfp = cx->fp(); + JS_ASSERT(stackfp->base() == cx->regs().sp); JS_ASSERT(stackfp->actualArgs() <= stackfp->formalArgs()); /* Load and compute stack slot counts. */ @@ -1177,7 +1171,7 @@ js_NewGenerator(JSContext *cx) /* Cut up floatingStack space. */ Value *genvp = gen->floatingStack; - JSStackFrame *genfp = reinterpret_cast(genvp + vplen); + StackFrame *genfp = reinterpret_cast(genvp + vplen); /* Initialize JSGenerator. */ gen->obj = obj; @@ -1186,11 +1180,11 @@ js_NewGenerator(JSContext *cx) gen->floating = genfp; /* Initialize regs stored in generator. */ - gen->regs = *cx->regs; - RebaseRegsFromTo(&gen->regs, stackfp, genfp); + gen->regs = cx->regs(); + gen->regs.rebaseFromTo(stackfp, genfp); /* Copy frame off the stack. */ - genfp->stealFrameAndSlots(genvp, stackfp, stackvp, cx->regs->sp); + genfp->stealFrameAndSlots(genvp, stackfp, stackvp, cx->regs().sp); genfp->initFloatingGenerator(); obj->setPrivate(gen); @@ -1198,7 +1192,7 @@ js_NewGenerator(JSContext *cx) } JSGenerator * -js_FloatingFrameToGenerator(JSStackFrame *fp) +js_FloatingFrameToGenerator(StackFrame *fp) { JS_ASSERT(fp->isGeneratorFrame() && fp->isFloatingGenerator()); char *floatingStackp = (char *)(fp->actualArgs() - 2); @@ -1258,11 +1252,11 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj, break; } - JSStackFrame *genfp = gen->floatingFrame(); + StackFrame *genfp = gen->floatingFrame(); Value *genvp = gen->floatingStack; uintN vplen = genfp->formalArgsEnd() - genvp; - JSStackFrame *stackfp; + StackFrame *stackfp; Value *stackvp; JSBool ok; { @@ -1271,7 +1265,7 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj, * the code before pushExecuteFrame must not reenter the interpreter. */ GeneratorFrameGuard frame; - if (!cx->stack().getGeneratorFrame(cx, vplen, genfp->numSlots(), &frame)) { + if (!cx->stack.getGeneratorFrame(cx, vplen, genfp->numSlots(), &frame)) { gen->state = JSGEN_CLOSED; return JS_FALSE; } @@ -1282,11 +1276,11 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj, stackfp->stealFrameAndSlots(stackvp, genfp, genvp, gen->regs.sp); stackfp->resetGeneratorPrev(cx); stackfp->unsetFloatingGenerator(); - RebaseRegsFromTo(&gen->regs, genfp, stackfp); + gen->regs.rebaseFromTo(genfp, stackfp); MUST_FLOW_THROUGH("restore"); /* Officially push frame. frame's destructor pops. */ - cx->stack().pushGeneratorFrame(cx, &gen->regs, &frame); + cx->stack.pushGeneratorFrame(gen->regs, &frame); cx->enterGenerator(gen); /* OOM check above. */ JSObject *enumerators = cx->enumerators; @@ -1306,7 +1300,7 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj, genfp->setFloatingGenerator(); } MUST_FLOW_LABEL(restore) - RebaseRegsFromTo(&gen->regs, stackfp, genfp); + gen->regs.rebaseFromTo(stackfp, genfp); if (gen->floatingFrame()->isYielding()) { /* Yield cannot fail, throw or be called on closing. */ diff --git a/js/src/jsiter.h b/js/src/jsiter.h index 9ae01d356f45..41086e0dbe59 100644 --- a/js/src/jsiter.h +++ b/js/src/jsiter.h @@ -181,19 +181,19 @@ typedef enum JSGeneratorState { struct JSGenerator { JSObject *obj; JSGeneratorState state; - JSFrameRegs regs; + js::FrameRegs regs; JSObject *enumerators; - JSStackFrame *floating; + js::StackFrame *floating; js::Value floatingStack[1]; - JSStackFrame *floatingFrame() { + js::StackFrame *floatingFrame() { return floating; } - JSStackFrame *liveFrame() { + js::StackFrame *liveFrame() { JS_ASSERT((state == JSGEN_RUNNING || state == JSGEN_CLOSING) == - (regs.fp != floatingFrame())); - return regs.fp; + (regs.fp() != floatingFrame())); + return regs.fp(); } }; @@ -211,10 +211,10 @@ js_NewGenerator(JSContext *cx); * Block and With objects must "normalize" to and from the floating/live frames * in the case of generators using the following functions. */ -inline JSStackFrame * -js_FloatingFrameIfGenerator(JSContext *cx, JSStackFrame *fp) +inline js::StackFrame * +js_FloatingFrameIfGenerator(JSContext *cx, js::StackFrame *fp) { - JS_ASSERT(cx->stack().contains(fp)); + JS_ASSERT(cx->stack.contains(fp)); if (JS_UNLIKELY(fp->isGeneratorFrame())) return cx->generatorFor(fp)->floatingFrame(); return fp; @@ -222,10 +222,10 @@ js_FloatingFrameIfGenerator(JSContext *cx, JSStackFrame *fp) /* Given a floating frame, given the JSGenerator containing it. */ extern JSGenerator * -js_FloatingFrameToGenerator(JSStackFrame *fp); +js_FloatingFrameToGenerator(js::StackFrame *fp); -inline JSStackFrame * -js_LiveFrameIfGenerator(JSStackFrame *fp) +inline js::StackFrame * +js_LiveFrameIfGenerator(js::StackFrame *fp) { return fp->isGeneratorFrame() ? js_FloatingFrameToGenerator(fp)->liveFrame() : fp; } diff --git a/js/src/jslock.h b/js/src/jslock.h index e62b393f3949..909871c81e10 100644 --- a/js/src/jslock.h +++ b/js/src/jslock.h @@ -81,7 +81,7 @@ typedef struct JSThinLock { JSFatLock *fat; } JSThinLock; -#define CX_THINLOCK_ID(cx) ((jsword)(cx)->thread) +#define CX_THINLOCK_ID(cx) ((jsword)(cx)->thread()) #define CURRENT_THREAD_IS_ME(me) (((JSThread *)me)->id == js_CurrentThreadId()) typedef PRLock JSLock; diff --git a/js/src/jsmath.cpp b/js/src/jsmath.cpp index a5dfcdd7e5b7..8901c7e3a962 100644 --- a/js/src/jsmath.cpp +++ b/js/src/jsmath.cpp @@ -881,5 +881,8 @@ js_InitMathClass(JSContext *cx, JSObject *obj) return NULL; if (!JS_DefineConstDoubles(cx, Math, math_constants)) return NULL; + + MarkStandardClassInitializedNoProto(obj, &js_MathClass); + return Math; } diff --git a/js/src/jsnum.cpp b/js/src/jsnum.cpp index 6b48af02a851..73fb0c0942ba 100644 --- a/js/src/jsnum.cpp +++ b/js/src/jsnum.cpp @@ -700,19 +700,18 @@ num_toString(JSContext *cx, uintN argc, Value *vp) if (!GetPrimitiveThis(cx, vp, &d)) return false; - int32_t base = 10; + int32 base = 10; if (argc != 0 && !vp[2].isUndefined()) { - if (!ValueToECMAInt32(cx, vp[2], &base)) - return JS_FALSE; + jsdouble d2; + if (!ToInteger(cx, vp[2], &d2)) + return false; - if (base < 2 || base > 36) { - ToCStringBuf cbuf; - char *numStr = IntToCString(&cbuf, base); /* convert the base itself to a string */ - JS_ASSERT(numStr); - JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_RADIX, - numStr); - return JS_FALSE; + if (d2 < 2 || d2 > 36) { + JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_RADIX); + return false; } + + base = int32(d2); } JSString *str = js_NumberToStringWithBase(cx, d, base); if (!str) { @@ -874,9 +873,8 @@ num_to(JSContext *cx, JSDToStrMode zeroArgMode, JSDToStrMode oneArgMode, precision = 0.0; oneArgMode = zeroArgMode; } else { - if (!ValueToNumber(cx, vp[2], &precision)) - return JS_FALSE; - precision = js_DoubleToInteger(precision); + if (!ToInteger(cx, vp[2], &precision)) + return false; if (precision < precisionMin || precision > precisionMax) { ToCStringBuf cbuf; numStr = IntToCString(&cbuf, jsint(precision)); diff --git a/js/src/jsnum.h b/js/src/jsnum.h index d03d0d733363..aeb7362c4d88 100644 --- a/js/src/jsnum.h +++ b/js/src/jsnum.h @@ -696,6 +696,26 @@ StringToNumberType(JSContext *cx, JSString *str, T *result) *result = NumberTraits::toSelfType(d); return true; } + +/* ES5 9.4 ToInteger. */ +static inline bool +ToInteger(JSContext *cx, const js::Value &v, jsdouble *dp) +{ + if (v.isInt32()) { + *dp = v.toInt32(); + return true; + } + if (v.isDouble()) { + *dp = v.toDouble(); + } else { + extern bool ValueToNumberSlow(JSContext *cx, js::Value v, double *dp); + if (!ValueToNumberSlow(cx, v, dp)) + return false; + } + *dp = js_DoubleToInteger(*dp); + return true; } +} /* namespace js */ + #endif /* jsnum_h___ */ diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 91ad0ee2a74e..a77448d31492 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -86,6 +86,8 @@ #include "jsscriptinlines.h" #include "jsobjinlines.h" +#include "vm/StringObject-inl.h" + #if JS_HAS_GENERATORS #include "jsiter.h" #endif @@ -959,7 +961,7 @@ EvalCacheHash(JSContext *cx, JSLinearString *str) } static JS_ALWAYS_INLINE JSScript * -EvalCacheLookup(JSContext *cx, JSLinearString *str, JSStackFrame *caller, uintN staticLevel, +EvalCacheLookup(JSContext *cx, JSLinearString *str, StackFrame *caller, uintN staticLevel, JSPrincipals *principals, JSObject &scopeobj, JSScript **bucket) { /* @@ -1082,7 +1084,7 @@ class EvalScriptGuard } } - void lookupInEvalCache(JSStackFrame *caller, uintN staticLevel, + void lookupInEvalCache(StackFrame *caller, uintN staticLevel, JSPrincipals *principals, JSObject &scopeobj) { if (JSScript *found = EvalCacheLookup(cx_, str_, caller, staticLevel, principals, scopeobj, bucket_)) { @@ -1120,7 +1122,7 @@ class EvalScriptGuard enum EvalType { DIRECT_EVAL, INDIRECT_EVAL }; static bool -EvalKernel(JSContext *cx, const CallArgs &call, EvalType evalType, JSStackFrame *caller, +EvalKernel(JSContext *cx, const CallArgs &call, EvalType evalType, StackFrame *caller, JSObject &scopeobj) { JS_ASSERT((evalType == INDIRECT_EVAL) == (caller == NULL)); @@ -1235,7 +1237,7 @@ EvalKernel(JSContext *cx, const CallArgs &call, EvalType evalType, JSStackFrame esg.setNewScript(compiled); } - return Execute(cx, scopeobj, esg.script(), caller, JSFRAME_EVAL, &call.rval()); + return Execute(cx, scopeobj, esg.script(), caller, StackFrame::EVAL, &call.rval()); } /* @@ -1247,7 +1249,7 @@ static inline bool WarnOnTooManyArgs(JSContext *cx, const CallArgs &call) { if (call.argc() > 1) { - if (JSStackFrame *caller = js_GetScriptedCaller(cx, NULL)) { + if (StackFrame *caller = js_GetScriptedCaller(cx, NULL)) { if (!caller->script()->warnedAboutTwoArgumentEval) { static const char TWO_ARGUMENT_WARNING[] = "Support for eval(code, scopeObject) has been removed. " @@ -1286,10 +1288,10 @@ bool DirectEval(JSContext *cx, const CallArgs &call) { /* Direct eval can assume it was called from an interpreted frame. */ - JSStackFrame *caller = cx->fp(); + StackFrame *caller = cx->fp(); JS_ASSERT(caller->isScriptFrame()); JS_ASSERT(IsBuiltinEvalForScope(&caller->scopeChain(), call.calleev())); - JS_ASSERT(*cx->regs->pc == JSOP_EVAL); + JS_ASSERT(*cx->regs().pc == JSOP_EVAL); AutoFunctionCallProbe callProbe(cx, call.callee().getFunctionPrivate(), caller->script()); @@ -1339,8 +1341,8 @@ PrincipalsForCompiledCode(const CallArgs &call, JSContext *cx) #ifdef DEBUG if (calleePrincipals) { - if (JSStackFrame *caller = js_GetScriptedCaller(cx, NULL)) { - if (JSPrincipals *callerPrincipals = caller->principals(cx)) { + if (StackFrame *caller = js_GetScriptedCaller(cx, NULL)) { + if (JSPrincipals *callerPrincipals = caller->scopeChain().principals(cx)) { JS_ASSERT(callerPrincipals->subsume(callerPrincipals, calleePrincipals)); } } @@ -1360,8 +1362,8 @@ obj_watch_handler(JSContext *cx, JSObject *obj, jsid id, jsval old, { JSObject *callable = (JSObject *) closure; if (JSPrincipals *watcher = callable->principals(cx)) { - if (JSStackFrame *caller = js_GetScriptedCaller(cx, NULL)) { - if (JSPrincipals *subject = caller->principals(cx)) { + if (StackFrame *caller = js_GetScriptedCaller(cx, NULL)) { + if (JSPrincipals *subject = caller->scopeChain().principals(cx)) { if (!watcher->subsume(watcher, subject)) { /* Silently don't call the watch handler. */ return JS_TRUE; @@ -2930,25 +2932,6 @@ js_CreateThisForFunction(JSContext *cx, JSObject *callee) #ifdef JS_TRACER -static JS_ALWAYS_INLINE JSObject* -NewObjectWithClassProto(JSContext *cx, Class *clasp, JSObject *proto, - /*gc::FinalizeKind*/ unsigned _kind) -{ - JS_ASSERT(clasp->isNative()); - gc::FinalizeKind kind = gc::FinalizeKind(_kind); - - if (CanBeFinalizedInBackground(kind, clasp)) - kind = (gc::FinalizeKind)(kind + 1); - - JSObject* obj = js_NewGCObject(cx, kind); - if (!obj) - return NULL; - - if (!obj->initSharingEmptyShape(cx, clasp, proto, proto->getParent(), NULL, kind)) - return NULL; - return obj; -} - JSObject* FASTCALL js_Object_tn(JSContext* cx, JSObject* proto) { @@ -2978,11 +2961,8 @@ JSObject* FASTCALL js_String_tn(JSContext* cx, JSObject* proto, JSString* str) { JS_ASSERT(JS_ON_TRACE(cx)); - JS_ASSERT(FINALIZE_OBJECT2 == gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(&js_StringClass))); - JSObject *obj = NewObjectWithClassProto(cx, &js_StringClass, proto, FINALIZE_OBJECT2); - if (!obj || !obj->initString(cx, str)) - return NULL; - return obj; + JS_ASSERT(proto); + return StringObject::createWithProto(cx, str, *proto); } JS_DEFINE_CALLINFO_3(extern, OBJECT, js_String_tn, CONTEXT, CALLEE_PROTOTYPE, STRING, 0, nanojit::ACCSET_STORE_ANY) @@ -3110,8 +3090,8 @@ js_InferFlags(JSContext *cx, uintN defaultFlags) uint32 format; uintN flags = 0; - JSStackFrame *const fp = js_GetTopStackFrame(cx); - if (!fp || !(pc = cx->regs->pc)) + StackFrame *const fp = js_GetTopStackFrame(cx); + if (!fp || !(pc = cx->regs().pc)) return defaultFlags; cs = &js_CodeSpec[js_GetOpcode(cx, fp->script(), pc)]; format = cs->format; @@ -3239,7 +3219,7 @@ js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth) if (!obj) return NULL; - JSStackFrame *priv = js_FloatingFrameIfGenerator(cx, cx->fp()); + StackFrame *priv = js_FloatingFrameIfGenerator(cx, cx->fp()); EmptyShape *emptyWithShape = EmptyShape::getEmptyWithShape(cx); if (!emptyWithShape) @@ -3281,7 +3261,7 @@ js_NewBlockObject(JSContext *cx) } JSObject * -js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp) +js_CloneBlockObject(JSContext *cx, JSObject *proto, StackFrame *fp) { JS_ASSERT(proto->isStaticBlock()); @@ -3292,7 +3272,7 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp) if (!clone) return NULL; - JSStackFrame *priv = js_FloatingFrameIfGenerator(cx, fp); + StackFrame *priv = js_FloatingFrameIfGenerator(cx, fp); /* The caller sets parent on its own. */ clone->initClonedBlock(cx, proto, priv); @@ -3309,7 +3289,7 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp) JS_REQUIRES_STACK JSBool js_PutBlockObject(JSContext *cx, JSBool normalUnwind) { - JSStackFrame *const fp = cx->fp(); + StackFrame *const fp = cx->fp(); JSObject *obj = &fp->scopeChain(); JS_ASSERT(obj->isClonedBlock()); JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp())); @@ -3320,8 +3300,8 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind) /* The block and its locals must be on the current stack for GC safety. */ uintN depth = OBJ_BLOCK_DEPTH(cx, obj); - JS_ASSERT(depth <= size_t(cx->regs->sp - fp->base())); - JS_ASSERT(count <= size_t(cx->regs->sp - fp->base() - depth)); + JS_ASSERT(depth <= size_t(cx->regs().sp - fp->base())); + JS_ASSERT(count <= size_t(cx->regs().sp - fp->base() - depth)); /* See comments in CheckDestructuring from jsparse.cpp. */ JS_ASSERT(count >= 1); @@ -3350,7 +3330,7 @@ block_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp) uintN index = (uintN) JSID_TO_INT(id); JS_ASSERT(index < OBJ_BLOCK_COUNT(cx, obj)); - JSStackFrame *fp = (JSStackFrame *) obj->getPrivate(); + StackFrame *fp = (StackFrame *) obj->getPrivate(); if (fp) { fp = js_LiveFrameIfGenerator(fp); index += fp->numFixed() + OBJ_BLOCK_DEPTH(cx, obj); @@ -3371,7 +3351,7 @@ block_setProperty(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *v uintN index = (uintN) JSID_TO_INT(id); JS_ASSERT(index < OBJ_BLOCK_COUNT(cx, obj)); - JSStackFrame *fp = (JSStackFrame *) obj->getPrivate(); + StackFrame *fp = (StackFrame *) obj->getPrivate(); if (fp) { fp = js_LiveFrameIfGenerator(fp); index += fp->numFixed() + OBJ_BLOCK_DEPTH(cx, obj); @@ -3976,6 +3956,37 @@ bad: return NULL; } +/* + * Lazy standard classes need a way to indicate if they have been initialized. + * Otherwise, when we delete them, we might accidentally recreate them via a + * lazy initialization. We use the presence of a ctor or proto in the + * globalObject's slot to indicate that they've been constructed, but this only + * works for classes which have a proto and ctor. Classes which don't have one + * can call MarkStandardClassInitializedNoProto(), and we can always check + * whether a class is initialized by calling IsStandardClassResolved(). + */ +bool +IsStandardClassResolved(JSObject *obj, js::Class *clasp) +{ + JSProtoKey key = JSCLASS_CACHED_PROTO_KEY(clasp); + + /* If the constructor is undefined, then it hasn't been initialized. */ + return (obj->getReservedSlot(key) != UndefinedValue()); +} + +void +MarkStandardClassInitializedNoProto(JSObject* obj, js::Class *clasp) +{ + JSProtoKey key = JSCLASS_CACHED_PROTO_KEY(clasp); + + /* + * We use True so that it's obvious what we're doing (instead of, say, + * Null, which might be miscontrued as an error in setting Undefined). + */ + if (obj->getReservedSlot(key) == UndefinedValue()) + obj->setSlot(key, BooleanValue(true)); +} + } JSObject * @@ -4227,7 +4238,7 @@ JSBool js_FindClassObject(JSContext *cx, JSObject *start, JSProtoKey protoKey, Value *vp, Class *clasp) { - JSStackFrame *fp; + StackFrame *fp; JSObject *obj, *cobj, *pobj; jsid id; JSProperty *prop; @@ -4945,7 +4956,7 @@ js_LookupPropertyWithFlagsInline(JSContext *cx, JSObject *obj, jsid id, uintN fl * Non-native objects must have either non-native lookup results, * or else native results from the non-native's prototype chain. * - * See JSStackFrame::getValidCalleeObject, where we depend on this + * See StackFrame::getValidCalleeObject, where we depend on this * fact to force a prototype-delegated joined method accessed via * arguments.callee through the delegating |this| object's method * read barrier. @@ -5459,7 +5470,7 @@ js_GetMethod(JSContext *cx, JSObject *obj, jsid id, uintN getHow, Value *vp) JS_FRIEND_API(bool) js_CheckUndeclaredVarAssignment(JSContext *cx, JSString *propname) { - JSStackFrame *const fp = js_GetTopStackFrame(cx); + StackFrame *const fp = js_GetTopStackFrame(cx); if (!fp) return true; @@ -5873,7 +5884,7 @@ js_DeleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval, JSBool str JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj); if (fun != funobj) { - for (JSStackFrame *fp = cx->maybefp(); fp; fp = fp->prev()) { + for (StackFrame *fp = cx->maybefp(); fp; fp = fp->prev()) { if (fp->isFunctionFrame() && fp->callee() == fun->compiledFunObj() && fp->thisValue().isObject()) @@ -6157,7 +6168,7 @@ js_GetClassPrototype(JSContext *cx, JSObject *scopeobj, JSProtoKey protoKey, if (protoKey != JSProto_Null) { if (!scopeobj) { - if (cx->hasfp()) + if (cx->running()) scopeobj = &cx->fp()->scopeChain(); if (!scopeobj) { scopeobj = cx->globalObject; @@ -6205,12 +6216,8 @@ js_SetClassPrototype(JSContext *cx, JSObject *ctor, JSObject *proto, uintN attrs JSObject * PrimitiveToObject(JSContext *cx, const Value &v) { - if (v.isString()) { - JSObject *obj = NewBuiltinClassInstance(cx, &js_StringClass); - if (!obj || !obj->initString(cx, v.toString())) - return NULL; - return obj; - } + if (v.isString()) + return StringObject::create(cx, v.toString()); JS_ASSERT(v.isNumber() || v.isBoolean()); Class *clasp = v.isNumber() ? &js_NumberClass : &js_BooleanClass; @@ -6858,7 +6865,7 @@ MaybeDumpValue(const char *name, const Value &v) } JS_FRIEND_API(void) -js_DumpStackFrame(JSContext *cx, JSStackFrame *start) +js_DumpStackFrame(JSContext *cx, StackFrame *start) { /* This should only called during live debugging. */ VOUCH_DOES_NOT_REQUIRE_STACK(); @@ -6875,9 +6882,9 @@ js_DumpStackFrame(JSContext *cx, JSStackFrame *start) } for (; !i.done(); ++i) { - JSStackFrame *const fp = i.fp(); + StackFrame *const fp = i.fp(); - fprintf(stderr, "JSStackFrame at %p\n", (void *) fp); + fprintf(stderr, "StackFrame at %p\n", (void *) fp); if (fp->isFunctionFrame()) { fprintf(stderr, "callee fun: "); dumpValue(ObjectValue(fp->callee())); diff --git a/js/src/jsobj.h b/js/src/jsobj.h index 6ea319198ed5..0c56922005b9 100644 --- a/js/src/jsobj.h +++ b/js/src/jsobj.h @@ -262,6 +262,7 @@ namespace js { struct NativeIterator; class RegExp; class GlobalObject; +class StringObject; } @@ -330,6 +331,9 @@ struct JSObject : js::gc::Cell { inline void setLastProperty(const js::Shape *shape); inline void removeLastProperty(); + /* For setLastProperty() only. */ + friend class js::StringObject; + #ifdef DEBUG void checkShapeConsistency(); #endif @@ -477,7 +481,7 @@ struct JSObject : js::gc::Cell { /* Functions for setting up scope chain object maps and shapes. */ void initCall(JSContext *cx, const js::Bindings &bindings, JSObject *parent); - void initClonedBlock(JSContext *cx, JSObject *proto, JSStackFrame *priv); + void initClonedBlock(JSContext *cx, JSObject *proto, js::StackFrame *priv); void setBlockOwnShape(JSContext *cx); void deletingShapeChange(JSContext *cx, const js::Shape &shape); @@ -755,23 +759,8 @@ struct JSObject : js::gc::Cell { inline const js::Value &getPrimitiveThis() const; inline void setPrimitiveThis(const js::Value &pthis); - private: - /* 0 is JSSLOT_PRIMITIVE_THIS */ - static const uint32 JSSLOT_STRING_LENGTH = 1; - - /* - * Compute the initial shape to associate with fresh String objects, - * encoding the initial length property. Return the shape after changing - * this String object's last property to it. - */ - const js::Shape *assignInitialStringShape(JSContext *cx); - public: - static const uint32 STRING_RESERVED_SLOTS = 2; - - inline size_t getStringLength() const; - - inline bool initString(JSContext *cx, JSString *str); + inline js::StringObject *asString(); /* * Array-specific getters and setters (for both dense and slow arrays). @@ -916,7 +905,7 @@ struct JSObject : js::gc::Cell { inline bool callIsForEval() const; /* The stack frame for this Call object, if the frame is still active. */ - inline JSStackFrame *maybeCallObjStackFrame() const; + inline js::StackFrame *maybeCallObjStackFrame() const; /* * The callee function if this Call object was created for a function @@ -1416,7 +1405,7 @@ inline bool JSObject::isBlock() const { return getClass() == &js_BlockClass; } /* * Block scope object macros. The slots reserved by js_BlockClass are: * - * private JSStackFrame * active frame pointer or null + * private StackFrame * active frame pointer or null * JSSLOT_BLOCK_DEPTH int depth of block slots in frame * * After JSSLOT_BLOCK_DEPTH come one or more slots for the block locals. @@ -1424,7 +1413,7 @@ inline bool JSObject::isBlock() const { return getClass() == &js_BlockClass; } * A With object is like a Block object, in that both have one reserved slot * telling the stack depth of the relevant slots (the slot whose value is the * object named in the with statement, the slots containing the block's local - * variables); and both have a private slot referring to the JSStackFrame in + * variables); and both have a private slot referring to the StackFrame in * whose activation they were created (or null if the with or block object * outlives the frame). */ @@ -1480,7 +1469,7 @@ extern JSObject * js_NewBlockObject(JSContext *cx); extern JSObject * -js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp); +js_CloneBlockObject(JSContext *cx, JSObject *proto, js::StackFrame *fp); extern JS_REQUIRES_STACK JSBool js_PutBlockObject(JSContext *cx, JSBool normalUnwind); @@ -1548,6 +1537,13 @@ DefineConstructorAndPrototype(JSContext *cx, JSObject *obj, JSProtoKey key, JSAt Native constructor, uintN nargs, JSPropertySpec *ps, JSFunctionSpec *fs, JSPropertySpec *static_ps, JSFunctionSpec *static_fs); + +bool +IsStandardClassResolved(JSObject *obj, js::Class *clasp); + +void +MarkStandardClassInitializedNoProto(JSObject *obj, js::Class *clasp); + } extern JSObject * @@ -1912,7 +1908,7 @@ JS_FRIEND_API(void) js_DumpAtom(JSAtom *atom); JS_FRIEND_API(void) js_DumpObject(JSObject *obj); JS_FRIEND_API(void) js_DumpValue(const js::Value &val); JS_FRIEND_API(void) js_DumpId(jsid id); -JS_FRIEND_API(void) js_DumpStackFrame(JSContext *cx, JSStackFrame *start = NULL); +JS_FRIEND_API(void) js_DumpStackFrame(JSContext *cx, js::StackFrame *start = NULL); #endif extern uintN @@ -1922,7 +1918,6 @@ js_InferFlags(JSContext *cx, uintN defaultFlags); JSBool js_Object(JSContext *cx, uintN argc, js::Value *vp); - namespace js { extern bool diff --git a/js/src/jsobjinlines.h b/js/src/jsobjinlines.h index 7b70d47cf58d..9413da1b1fda 100644 --- a/js/src/jsobjinlines.h +++ b/js/src/jsobjinlines.h @@ -169,7 +169,7 @@ JSObject::initCall(JSContext *cx, const js::Bindings &bindings, JSObject *parent * shape. */ inline void -JSObject::initClonedBlock(JSContext *cx, JSObject *proto, JSStackFrame *frame) +JSObject::initClonedBlock(JSContext *cx, JSObject *proto, js::StackFrame *frame) { init(cx, &js_BlockClass, proto, NULL, frame, false); @@ -327,7 +327,7 @@ JSObject::setPrimitiveThis(const js::Value &pthis) inline /* gc::FinalizeKind */ unsigned JSObject::finalizeKind() const { - return js::gc::FinalizeKind(arena()->header()->thingKind); + return js::gc::FinalizeKind(arenaHeader()->getThingKind()); } inline size_t @@ -509,11 +509,11 @@ JSObject::callIsForEval() const return getSlot(JSSLOT_CALL_CALLEE).isNull(); } -inline JSStackFrame * +inline js::StackFrame * JSObject::maybeCallObjStackFrame() const { JS_ASSERT(isCall()); - return reinterpret_cast(getPrivate()); + return reinterpret_cast(getPrivate()); } inline void @@ -881,12 +881,6 @@ JSObject::principals(JSContext *cx) return compPrincipals; } -inline JSPrincipals * -JSStackFrame::principals(JSContext *cx) const -{ - return scopeChain().principals(cx); -} - inline uint32 JSObject::slotSpan() const { @@ -1222,7 +1216,7 @@ NewBuiltinClassInstance(JSContext *cx, Class *clasp, gc::FinalizeKind kind) /* NB: inline-expanded and specialized version of js_GetClassPrototype. */ JSObject *global; - if (!cx->hasfp()) { + if (!cx->running()) { global = cx->globalObject; OBJ_TO_INNER_OBJECT(cx, global); if (!global) @@ -1429,6 +1423,25 @@ NewObjectGCKind(JSContext *cx, js::Class *clasp) return gc::FINALIZE_OBJECT4; } +static JS_ALWAYS_INLINE JSObject* +NewObjectWithClassProto(JSContext *cx, Class *clasp, JSObject *proto, + /*gc::FinalizeKind*/ unsigned _kind) +{ + JS_ASSERT(clasp->isNative()); + gc::FinalizeKind kind = gc::FinalizeKind(_kind); + + if (CanBeFinalizedInBackground(kind, clasp)) + kind = (gc::FinalizeKind)(kind + 1); + + JSObject* obj = js_NewGCObject(cx, kind); + if (!obj) + return NULL; + + if (!obj->initSharingEmptyShape(cx, clasp, proto, proto->getParent(), NULL, kind)) + return NULL; + return obj; +} + /* Make an object with pregenerated shape from a NEWOBJECT bytecode. */ static inline JSObject * CopyInitializerObject(JSContext *cx, JSObject *baseobj) diff --git a/js/src/json.cpp b/js/src/json.cpp index 8271ccfabd86..65e1c4d96751 100644 --- a/js/src/json.cpp +++ b/js/src/json.cpp @@ -67,6 +67,8 @@ #include "jsobjinlines.h" #include "jsstrinlines.h" +#include "vm/Stack-inl.h" + using namespace js; using namespace js::gc; @@ -129,21 +131,19 @@ js_json_parse(JSContext *cx, uintN argc, Value *vp) return ParseJSONWithReviver(cx, linearStr->chars(), linearStr->length(), reviver, vp); } +/* ES5 15.12.3. */ JSBool js_json_stringify(JSContext *cx, uintN argc, Value *vp) { - Value *argv = vp + 2; - AutoValueRooter space(cx); - AutoObjectRooter replacer(cx); - - // Must throw an Error if there isn't a first arg - if (!JS_ConvertArguments(cx, argc, Jsvalify(argv), "v / o v", vp, replacer.addr(), space.addr())) - return JS_FALSE; + *vp = (argc >= 1) ? vp[2] : UndefinedValue(); + JSObject *replacer = (argc >= 2 && vp[3].isObject()) + ? &vp[3].toObject() + : NULL; + Value space = (argc >= 3) ? vp[4] : UndefinedValue(); StringBuffer sb(cx); - - if (!js_Stringify(cx, vp, replacer.object(), space.value(), sb)) - return JS_FALSE; + if (!js_Stringify(cx, vp, replacer, space, sb)) + return false; // XXX This can never happen to nsJSON.cpp, but the JSON object // needs to support returning undefined. So this is a little awkward @@ -151,13 +151,13 @@ js_json_stringify(JSContext *cx, uintN argc, Value *vp) if (!sb.empty()) { JSString *str = sb.finishString(); if (!str) - return JS_FALSE; + return false; vp->setString(str); } else { vp->setUndefined(); } - return JS_TRUE; + return true; } JSBool @@ -179,84 +179,91 @@ js_TryJSON(JSContext *cx, Value *vp) } -static const char quote = '\"'; -static const char backslash = '\\'; -static const char unicodeEscape[] = "\\u00"; - -static JSBool -write_string(JSContext *cx, StringBuffer &sb, const jschar *buf, uint32 len) +static inline bool IsQuoteSpecialCharacter(jschar c) { - if (!sb.append(quote)) - return JS_FALSE; + JS_STATIC_ASSERT('\b' < ' '); + JS_STATIC_ASSERT('\f' < ' '); + JS_STATIC_ASSERT('\n' < ' '); + JS_STATIC_ASSERT('\r' < ' '); + JS_STATIC_ASSERT('\t' < ' '); + return c == '"' || c == '\\' || c < ' '; +} - uint32 mark = 0; - uint32 i; - for (i = 0; i < len; ++i) { - if (buf[i] == quote || buf[i] == backslash) { - if (!sb.append(&buf[mark], i - mark) || !sb.append(backslash) || - !sb.append(buf[i])) { - return JS_FALSE; - } - mark = i + 1; - } else if (buf[i] <= 31 || buf[i] == 127) { - if (!sb.append(&buf[mark], i - mark) || - !sb.append(unicodeEscape)) { - return JS_FALSE; - } - char ubuf[3]; - size_t len = JS_snprintf(ubuf, sizeof(ubuf), "%.2x", buf[i]); - JS_ASSERT(len == 2); - jschar wbuf[3]; - size_t wbufSize = JS_ARRAY_LENGTH(wbuf); - if (!js_InflateStringToBuffer(cx, ubuf, len, wbuf, &wbufSize) || - !sb.append(wbuf, wbufSize)) { - return JS_FALSE; - } - mark = i + 1; +/* ES5 15.12.3 Quote. */ +static bool +Quote(JSContext *cx, StringBuffer &sb, JSString *str) +{ + JS::Anchor anchor(str); + size_t len = str->length(); + const jschar *buf = str->getChars(cx); + if (!buf) + return false; + + /* Step 1. */ + if (!sb.append('"')) + return false; + + /* Step 2. */ + for (size_t i = 0; i < len; ++i) { + /* Batch-append maximal character sequences containing no escapes. */ + size_t mark = i; + do { + if (IsQuoteSpecialCharacter(buf[i])) + break; + } while (++i < len); + if (i > mark) { + if (!sb.append(&buf[mark], i - mark)) + return false; + if (i == len) + break; + } + + jschar c = buf[i]; + if (c == '"' || c == '\\') { + if (!sb.append('\\') || !sb.append(c)) + return false; + } else if (c == '\b' || c == '\f' || c == '\n' || c == '\r' || c == '\t') { + jschar abbrev = (c == '\b') + ? 'b' + : (c == '\f') + ? 'f' + : (c == '\n') + ? 'n' + : (c == '\r') + ? 'r' + : 't'; + if (!sb.append('\\') || !sb.append(abbrev)) + return false; + mark = i + 1; + } else { + JS_ASSERT(c < ' '); + if (!sb.append("\\u00")) + return false; + JS_ASSERT((c >> 4) < 10); + uint8 x = c >> 4, y = c % 16; + if (!sb.append('0' + x) || !sb.append(y < 10 ? '0' + y : 'a' + (y - 10))) + return false; } } - if (mark < len && !sb.append(&buf[mark], len - mark)) - return JS_FALSE; - - return sb.append(quote); + /* Steps 3-4. */ + return sb.append('"'); } class StringifyContext { -public: - StringifyContext(JSContext *cx, StringBuffer &sb, JSObject *replacer) - : sb(sb), gap(cx), replacer(replacer), depth(0), objectStack(cx) + public: + StringifyContext(JSContext *cx, StringBuffer &sb, const StringBuffer &gap, + JSObject *replacer, const AutoIdVector &propertyList) + : sb(sb), + gap(gap), + replacer(replacer), + propertyList(propertyList), + depth(0), + objectStack(cx) {} - bool initializeGap(JSContext *cx, const Value &space) { - AutoValueRooter gapValue(cx, space); - - if (space.isObject()) { - JSObject &obj = space.toObject(); - Class *clasp = obj.getClass(); - if (clasp == &js_NumberClass || clasp == &js_StringClass) - *gapValue.addr() = obj.getPrimitiveThis(); - } - - if (gapValue.value().isString()) { - if (!gap.append(gapValue.value().toString())) - return false; - if (gap.length() > 10) - gap.resize(10); - } else if (gapValue.value().isNumber()) { - jsdouble d = gapValue.value().isInt32() - ? gapValue.value().toInt32() - : js_DoubleToInteger(gapValue.value().toDouble()); - d = JS_MIN(10, d); - if (d >= 1 && !gap.appendN(' ', uint32(d))) - return false; - } - - return true; - } - - bool initializeStack() { + bool init() { return objectStack.init(16); } @@ -265,8 +272,9 @@ public: #endif StringBuffer &sb; - StringBuffer gap; - JSObject *replacer; + const StringBuffer ⪆ + JSObject * const replacer; + const AutoIdVector &propertyList; uint32 depth; HashSet objectStack; }; @@ -336,14 +344,14 @@ PreprocessValue(JSContext *cx, JSObject *holder, jsid key, Value *vp, StringifyC LeaveTrace(cx); InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, 1, &args)) + if (!cx->stack.pushInvokeArgs(cx, 1, &args)) return false; args.calleev() = toJSON; args.thisv() = *vp; args[0] = StringValue(keyStr); - if (!Invoke(cx, args, 0)) + if (!Invoke(cx, args)) return false; *vp = args.rval(); } @@ -359,7 +367,7 @@ PreprocessValue(JSContext *cx, JSObject *holder, jsid key, Value *vp, StringifyC LeaveTrace(cx); InvokeArgsGuard args; - if (!cx->stack().pushInvokeArgs(cx, 2, &args)) + if (!cx->stack.pushInvokeArgs(cx, 2, &args)) return false; args.calleev() = ObjectValue(*scx->replacer); @@ -367,7 +375,7 @@ PreprocessValue(JSContext *cx, JSObject *holder, jsid key, Value *vp, StringifyC args[0] = StringValue(keyStr); args[1] = *vp; - if (!Invoke(cx, args, 0)) + if (!Invoke(cx, args)) return false; *vp = args.rval(); } @@ -430,44 +438,25 @@ JO(JSContext *cx, JSObject *obj, StringifyContext *scx) if (!scx->sb.append('{')) return JS_FALSE; - AutoIdRooter idr(cx); - jsid& id = *idr.addr(); - /* Steps 5-7. */ - /* XXX Bug 648471: Do this in js_Stringify, rename keySource. */ - Value keySource = ObjectValue(*obj); - bool usingWhitelist = false; - - // if the replacer is an array, we use the keys from it - if (scx->replacer && JS_IsArrayObject(cx, scx->replacer)) { - usingWhitelist = true; - keySource.setObject(*scx->replacer); + AutoIdVector ids(cx); + const AutoIdVector *props; + if (scx->replacer && !scx->replacer->isCallable()) { + JS_ASSERT(JS_IsArrayObject(cx, scx->replacer)); + props = &scx->propertyList; + } else { + JS_ASSERT_IF(scx->replacer, scx->propertyList.length() == 0); + if (!GetPropertyNames(cx, obj, JSITER_OWNONLY, &ids)) + return false; + props = &ids; } - bool wroteMember = false; - AutoIdVector props(cx); - if (!GetPropertyNames(cx, &keySource.toObject(), JSITER_OWNONLY, &props)) - return JS_FALSE; + /* My kingdom for not-quite-initialized-from-the-start references. */ + const AutoIdVector &propertyList = *props; /* Steps 8-10, 13. */ - for (size_t i = 0, len = props.length(); i < len; i++) { - if (!usingWhitelist) { - if (!js_ValueToStringId(cx, IdToValue(props[i]), &id)) - return JS_FALSE; - } else { - // skip non-index properties - jsuint index = 0; - if (!js_IdIsIndex(props[i], &index)) - continue; - - Value whitelistElement; - if (!scx->replacer->getProperty(cx, props[i], &whitelistElement)) - return JS_FALSE; - - if (!js_ValueToStringId(cx, whitelistElement, &id)) - return JS_FALSE; - } - + bool wroteMember = false; + for (size_t i = 0, len = propertyList.length(); i < len; i++) { /* * Steps 8a-8b. Note that the call to Str is broken up into 1) getting * the property; 2) processing for toJSON, calling the replacer, and @@ -475,43 +464,38 @@ JO(JSContext *cx, JSObject *obj, StringifyContext *scx) * values which process to |undefined|, and 4) stringifying all values * which pass the filter. */ + const jsid &id = propertyList[i]; Value outputValue; if (!obj->getProperty(cx, id, &outputValue)) - return JS_FALSE; + return false; if (!PreprocessValue(cx, obj, id, &outputValue, scx)) - return JS_FALSE; + return false; if (IsFilteredValue(outputValue)) continue; /* Output a comma unless this is the first member to write. */ if (wroteMember && !scx->sb.append(',')) - return JS_FALSE; + return false; wroteMember = true; if (!WriteIndent(cx, scx, scx->depth)) - return JS_FALSE; + return false; - // Be careful below: this string is weakly rooted! - JSString *s = js_ValueToString(cx, IdToValue(id)); + JSString *s = IdToString(cx, id); if (!s) - return JS_FALSE; + return false; - JS::Anchor anchor(s); - size_t length = s->length(); - const jschar *chars = s->getChars(cx); - if (!chars) - return JS_FALSE; - - if (!write_string(cx, scx->sb, chars, length) || + if (!Quote(cx, scx->sb, s) || !scx->sb.append(':') || !(scx->gap.empty() || scx->sb.append(' ')) || - !Str(cx, outputValue, scx)) { - return JS_FALSE; + !Str(cx, outputValue, scx)) + { + return false; } } if (wroteMember && !WriteIndent(cx, scx, scx->depth - 1)) - return JS_FALSE; + return false; return scx->sb.append('}'); } @@ -611,14 +595,8 @@ Str(JSContext *cx, const Value &v, StringifyContext *scx) */ /* Step 8. */ - if (v.isString()) { - JSString *str = v.toString(); - size_t length = str->length(); - const jschar *chars = str->getChars(cx); - if (!chars) - return false; - return write_string(cx, scx->sb, chars, length); - } + if (v.isString()) + return Quote(cx, scx->sb, v.toString()); /* Step 5. */ if (v.isNull()) @@ -653,27 +631,155 @@ Str(JSContext *cx, const Value &v, StringifyContext *scx) return ok; } +/* ES5 15.12.3. */ JSBool -js_Stringify(JSContext *cx, Value *vp, JSObject *replacer, const Value &space, - StringBuffer &sb) +js_Stringify(JSContext *cx, Value *vp, JSObject *replacer, Value space, StringBuffer &sb) { - StringifyContext scx(cx, sb, replacer); - if (!scx.initializeGap(cx, space) || !scx.initializeStack()) - return JS_FALSE; + /* + * Step 4. + * + * The spec algorithm is unhelpfully vague in 15.12.3 step 4b about the + * exact steps taken when the replacer is an array, regarding the exact + * sequence of [[Get]] calls for the array's elements, when its overall + * length is calculated, whether own or own plus inherited properties are + * considered, and so on. A rewrite of the step was proposed in + * , + * whose steps are copied below, and which are implemented here. + * + * i. Let PropertyList be an empty internal List. + * ii. Let len be the result of calling the [[Get]] internal method of + * replacer with the argument "length". + * iii. Let i be 0. + * iv. While i < len: + * 1. Let item be undefined. + * 2. Let v be the result of calling the [[Get]] internal method of + * replacer with the argument ToString(i). + * 3. If Type(v) is String then let item be v. + * 4. Else if Type(v) is Number then let item be ToString(v). + * 5. Else if Type(v) is Object then + * a. If the [[Class]] internal property of v is "String" or + * "Number" then let item be ToString(v). + * 6. If item is not undefined and item is not currently an element of + * PropertyList then, + * a. Append item to the end of PropertyList. + * 7. Let i be i + 1. + */ + AutoIdVector propertyList(cx); + if (replacer && JS_IsArrayObject(cx, replacer)) { + /* Step 4b(ii). */ + jsuint len; + JS_ALWAYS_TRUE(js_GetLengthProperty(cx, replacer, &len)); + if (replacer->isDenseArray()) + len = JS_MIN(len, replacer->getDenseArrayCapacity()); - JSObject *obj = NewBuiltinClassInstance(cx, &js_ObjectClass); - if (!obj) - return JS_FALSE; + HashSet idSet(cx); + if (!idSet.init(len)) + return false; - AutoObjectRooter tvr(cx, obj); + /* Step 4b(iii). */ + jsuint i = 0; + + /* Step 4b(iv). */ + for (; i < len; i++) { + /* Step 4b(iv)(2). */ + Value v; + if (!replacer->getProperty(cx, INT_TO_JSID(i), &v)) + return false; + + jsid id; + if (v.isNumber()) { + /* Step 4b(iv)(4). */ + int32_t n; + if (v.isNumber() && ValueFitsInInt32(v, &n) && INT_FITS_IN_JSID(n)) { + id = INT_TO_JSID(n); + } else { + if (!js_ValueToStringId(cx, v, &id)) + return false; + id = js_CheckForStringIndex(id); + } + } else if (v.isString() || + (v.isObject() && (v.toObject().isString() || v.toObject().isNumber()))) + { + /* Step 4b(iv)(3), 4b(iv)(5). */ + if (!js_ValueToStringId(cx, v, &id)) + return false; + id = js_CheckForStringIndex(id); + } else { + continue; + } + + /* Step 4b(iv)(6). */ + HashSet::AddPtr p = idSet.lookupForAdd(id); + if (!p) { + /* Step 4b(iv)(6)(a). */ + if (!idSet.add(p, id) || !propertyList.append(id)) + return false; + } + } + } + + /* Step 5. */ + if (space.isObject()) { + JSObject &spaceObj = space.toObject(); + if (spaceObj.isNumber()) { + jsdouble d; + if (!ValueToNumber(cx, space, &d)) + return false; + space = NumberValue(d); + } else if (spaceObj.isString()) { + JSString *str = js_ValueToString(cx, space); + if (!str) + return false; + space = StringValue(str); + } + } + + StringBuffer gap(cx); + + if (space.isNumber()) { + /* Step 6. */ + jsdouble d; + JS_ALWAYS_TRUE(ToInteger(cx, space, &d)); + d = JS_MIN(10, d); + if (d >= 1 && !gap.appendN(' ', uint32(d))) + return false; + } else if (space.isString()) { + /* Step 7. */ + JSLinearString *str = space.toString()->ensureLinear(cx); + if (!str) + return false; + JS::Anchor anchor(str); + size_t len = JS_MIN(10, space.toString()->length()); + if (!gap.append(str->chars(), len)) + return false; + } else { + /* Step 8. */ + JS_ASSERT(gap.empty()); + } + + /* Step 9. */ + JSObject *wrapper = NewBuiltinClassInstance(cx, &js_ObjectClass); + if (!wrapper) + return false; + + /* Step 10. */ jsid emptyId = ATOM_TO_JSID(cx->runtime->atomState.emptyAtom); - if (!obj->defineProperty(cx, emptyId, *vp, NULL, NULL, JSPROP_ENUMERATE)) - return JS_FALSE; + if (!js_DefineNativeProperty(cx, wrapper, emptyId, *vp, PropertyStub, StrictPropertyStub, + JSPROP_ENUMERATE, 0, 0, NULL)) + { + return false; + } - if (!PreprocessValue(cx, obj, emptyId, vp, &scx)) - return JS_FALSE; + /* Step 11. */ + StringifyContext scx(cx, sb, gap, replacer, propertyList); + if (!scx.init()) + return false; + + if (!PreprocessValue(cx, wrapper, emptyId, vp, &scx)) + return false; if (IsFilteredValue(*vp)) - return JS_TRUE; + return true; + return Str(cx, *vp, &scx); } @@ -1397,5 +1503,7 @@ js_InitJSONClass(JSContext *cx, JSObject *obj) if (!JS_DefineFunctions(cx, JSON, json_static_methods)) return NULL; + MarkStandardClassInitializedNoProto(obj, &js_JSONClass); + return JSON; } diff --git a/js/src/json.h b/js/src/json.h index 747f8d936f04..005b8005aa7a 100644 --- a/js/src/json.h +++ b/js/src/json.h @@ -52,8 +52,8 @@ extern JSObject * js_InitJSONClass(JSContext *cx, JSObject *obj); extern JSBool -js_Stringify(JSContext *cx, js::Value *vp, JSObject *replacer, - const js::Value &space, js::StringBuffer &sb); +js_Stringify(JSContext *cx, js::Value *vp, JSObject *replacer, js::Value space, + js::StringBuffer &sb); extern JSBool js_TryJSON(JSContext *cx, js::Value *vp); diff --git a/js/src/jsopcode.cpp b/js/src/jsopcode.cpp index 764d43e65d43..74fa354cdc35 100644 --- a/js/src/jsopcode.cpp +++ b/js/src/jsopcode.cpp @@ -72,7 +72,6 @@ #include "jsstaticcheck.h" #include "jsvector.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsscriptinlines.h" #include "jscntxtinlines.h" @@ -311,7 +310,7 @@ js_DumpPC(JSContext *cx) void *mark = JS_ARENA_MARK(&cx->tempPool); Sprinter sprinter; INIT_SPRINTER(cx, &sprinter, &cx->tempPool, 0); - JSBool ok = js_DisassembleAtPC(cx, cx->fp()->script(), true, cx->regs->pc, &sprinter); + JSBool ok = js_DisassembleAtPC(cx, cx->fp()->script(), true, cx->regs().pc, &sprinter); fprintf(stdout, "%s", sprinter.base); JS_ARENA_RELEASE(&cx->tempPool, mark); return ok; @@ -2053,7 +2052,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop) token = CodeToken[op]; if (pc + oplen == jp->dvgfence) { - JSStackFrame *fp; + StackFrame *fp; uint32 format, mode, type; /* @@ -2915,7 +2914,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop) * object that's not a constructor, causing us to be * called with an intervening frame on the stack. */ - JSStackFrame *fp = js_GetTopStackFrame(cx); + StackFrame *fp = js_GetTopStackFrame(cx); if (fp) { while (!fp->isEvalFrame()) fp = fp->prev(); @@ -5088,7 +5087,7 @@ char * js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in, JSString *fallback) { - JSStackFrame *fp; + StackFrame *fp; JSScript *script; jsbytecode *pc; @@ -5100,12 +5099,12 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in, LeaveTrace(cx); - if (!cx->regs || !cx->regs->fp || !cx->regs->fp->isScriptFrame()) + if (!cx->running() || !cx->fp()->isScriptFrame()) goto do_fallback; - fp = cx->regs->fp; + fp = cx->fp(); script = fp->script(); - pc = fp->hasImacropc() ? fp->imacropc() : cx->regs->pc; + pc = fp->hasImacropc() ? fp->imacropc() : cx->regs().pc; JS_ASSERT(script->code <= pc && pc < script->code + script->length); if (pc < script->main) @@ -5139,7 +5138,7 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in, * it that caused exception, see bug 328664. */ Value *stackBase = fp->base(); - Value *sp = cx->regs->sp; + Value *sp = cx->regs().sp; do { if (sp == stackBase) { pcdepth = -1; @@ -5167,11 +5166,10 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in, } { - jsbytecode* basepc = cx->regs->pc; + jsbytecode* basepc = cx->regs().pc; jsbytecode* savedImacropc = fp->maybeImacropc(); if (savedImacropc) { - JS_ASSERT(cx->hasfp()); - cx->regs->pc = savedImacropc; + cx->regs().pc = savedImacropc; fp->clearImacropc(); } @@ -5186,8 +5184,7 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in, name = DecompileExpression(cx, script, fp->maybeFun(), pc); if (savedImacropc) { - JS_ASSERT(cx->hasfp()); - cx->regs->pc = basepc; + cx->regs().pc = basepc; fp->setImacropc(savedImacropc); } @@ -5467,7 +5464,7 @@ ReconstructImacroPCStack(JSContext *cx, JSScript *script, * Begin with a recursive call back to ReconstructPCStack to pick up * the state-of-the-world at the *start* of the imacro. */ - JSStackFrame *fp = js_GetScriptedCaller(cx, NULL); + StackFrame *fp = js_GetScriptedCaller(cx, NULL); JS_ASSERT(fp->hasImacropc()); intN pcdepth = ReconstructPCStack(cx, script, fp->imacropc(), pcstack); if (pcdepth < 0) diff --git a/js/src/jsparse.cpp b/js/src/jsparse.cpp index 5be592aef89d..b2a941c6447e 100644 --- a/js/src/jsparse.cpp +++ b/js/src/jsparse.cpp @@ -91,7 +91,6 @@ #endif #include "jsatominlines.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsregexpinlines.h" #include "jsscriptinlines.h" @@ -185,14 +184,14 @@ JSParseNode::clear() pn_parens = false; } -Parser::Parser(JSContext *cx, JSPrincipals *prin, JSStackFrame *cfp) +Parser::Parser(JSContext *cx, JSPrincipals *prin, StackFrame *cfp) : js::AutoGCRooter(cx, PARSER), context(cx), aleFreeList(NULL), tokenStream(cx), principals(NULL), callerFrame(cfp), - callerVarObj(cfp ? &cfp->varobj(cx->stack().containingSegment(cfp)) : NULL), + callerVarObj(cfp ? &cx->stack.space().varObjForFrame(cfp) : NULL), nodeList(NULL), functionCount(0), traceListHead(NULL), @@ -891,13 +890,13 @@ SetStaticLevel(JSTreeContext *tc, uintN staticLevel) /* * Compile a top-level script. */ -Compiler::Compiler(JSContext *cx, JSPrincipals *prin, JSStackFrame *cfp) +Compiler::Compiler(JSContext *cx, JSPrincipals *prin, StackFrame *cfp) : parser(cx, prin, cfp) { } JSScript * -Compiler::compileScript(JSContext *cx, JSObject *scopeChain, JSStackFrame *callerFrame, +Compiler::compileScript(JSContext *cx, JSObject *scopeChain, StackFrame *callerFrame, JSPrincipals *principals, uint32 tcflags, const jschar *chars, size_t length, const char *filename, uintN lineno, JSVersion version, @@ -8762,7 +8761,7 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot) return NULL; JSObject *obj; - if (context->hasfp()) { + if (context->running()) { obj = RegExp::createObject(context, context->regExpStatics(), tokenStream.getTokenbuf().begin(), tokenStream.getTokenbuf().length(), diff --git a/js/src/jsparse.h b/js/src/jsparse.h index a3ed2e3b81d1..12cd6bd99d16 100644 --- a/js/src/jsparse.h +++ b/js/src/jsparse.h @@ -1048,14 +1048,14 @@ namespace js { struct Parser : private js::AutoGCRooter { - JSContext * const context; /* FIXME Bug 551291: use AutoGCRooter::context? */ + JSContext *const context; /* FIXME Bug 551291: use AutoGCRooter::context? */ JSAtomListElement *aleFreeList; void *tempFreeList[NUM_TEMP_FREELISTS]; TokenStream tokenStream; void *tempPoolMark; /* initial JSContext.tempPool mark */ JSPrincipals *principals; /* principals associated with source */ - JSStackFrame *const callerFrame; /* scripted caller frame for eval and dbgapi */ - JSObject *const callerVarObj; /* callerFrame's varObj */ + StackFrame *const callerFrame; /* scripted caller frame for eval and dbgapi */ + JSObject *const callerVarObj; /* callerFrame's varObj */ JSParseNode *nodeList; /* list of recyclable parse-node structs */ uint32 functionCount; /* number of functions in current unit */ JSObjectBox *traceListHead; /* list of parsed object for GC tracing */ @@ -1065,7 +1065,7 @@ struct Parser : private js::AutoGCRooter /* Root atoms and objects allocated for the parsed tree. */ js::AutoKeepAtoms keepAtoms; - Parser(JSContext *cx, JSPrincipals *prin = NULL, JSStackFrame *cfp = NULL); + Parser(JSContext *cx, JSPrincipals *prin = NULL, StackFrame *cfp = NULL); ~Parser(); friend void js::AutoGCRooter::trace(JSTracer *trc); @@ -1238,7 +1238,7 @@ struct Compiler Parser parser; GlobalScope *globalScope; - Compiler(JSContext *cx, JSPrincipals *prin = NULL, JSStackFrame *cfp = NULL); + Compiler(JSContext *cx, JSPrincipals *prin = NULL, StackFrame *cfp = NULL); /* * Initialize a compiler. Parameters are passed on to init parser. @@ -1255,7 +1255,7 @@ struct Compiler const char *filename, uintN lineno, JSVersion version); static JSScript * - compileScript(JSContext *cx, JSObject *scopeChain, JSStackFrame *callerFrame, + compileScript(JSContext *cx, JSObject *scopeChain, StackFrame *callerFrame, JSPrincipals *principals, uint32 tcflags, const jschar *chars, size_t length, const char *filename, uintN lineno, JSVersion version, diff --git a/js/src/jspropertycache.cpp b/js/src/jspropertycache.cpp index f0c95b6064da..a387d32ef83b 100644 --- a/js/src/jspropertycache.cpp +++ b/js/src/jspropertycache.cpp @@ -135,7 +135,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI * Optimize the cached vword based on our parameters and the current pc's * opcode format flags. */ - pc = cx->regs->pc; + pc = cx->regs().pc; op = js_GetOpcode(cx, cx->fp()->script(), pc); cs = &js_CodeSpec[op]; kshape = 0; @@ -324,7 +324,7 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject JSObject *obj, *pobj, *tmp; uint32 vcap; - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); JS_ASSERT(this == &JS_PROPERTY_CACHE(cx)); JS_ASSERT(uintN((fp->hasImacropc() ? fp->imacropc() : pc) - fp->script()->code) diff --git a/js/src/jsproxy.cpp b/js/src/jsproxy.cpp index be30629ecdf9..cd902a656583 100644 --- a/js/src/jsproxy.cpp +++ b/js/src/jsproxy.cpp @@ -1406,7 +1406,7 @@ FixProxy(JSContext *cx, JSObject *proxy, JSBool *bp) * Make a blank object from the recipe fix provided to us. This must have * number of fixed slots as the proxy so that we can swap their contents. */ - gc::FinalizeKind kind = gc::FinalizeKind(proxy->arena()->header()->thingKind); + gc::FinalizeKind kind = gc::FinalizeKind(proxy->arenaHeader()->getThingKind()); JSObject *newborn = NewNonFunction(cx, clasp, proto, parent, kind); if (!newborn) return false; @@ -1459,5 +1459,8 @@ js_InitProxyClass(JSContext *cx, JSObject *obj) } if (!JS_DefineFunctions(cx, module, static_methods)) return NULL; + + MarkStandardClassInitializedNoProto(obj, &js_ProxyClass); + return module; } diff --git a/js/src/jsprvtd.h b/js/src/jsprvtd.h index a67c0914cd35..edd3b1e0378b 100644 --- a/js/src/jsprvtd.h +++ b/js/src/jsprvtd.h @@ -138,12 +138,18 @@ class ExecuteArgsGuard; class InvokeFrameGuard; class InvokeArgsGuard; class InvokeSessionGuard; +class StringBuffer; class TraceRecorder; struct TraceMonitor; -class StackSpace; + +class FrameRegs; +class StackFrame; class StackSegment; +class StackSpace; +class ContextStack; class FrameRegsIter; -class StringBuffer; +class CallReceiver; +class CallArgs; struct Compiler; struct Parser; diff --git a/js/src/jsreflect.cpp b/js/src/jsreflect.cpp index c793022178ae..a37b3aeb759d 100644 --- a/js/src/jsreflect.cpp +++ b/js/src/jsreflect.cpp @@ -3314,5 +3314,7 @@ js_InitReflectClass(JSContext *cx, JSObject *obj) if (!JS_DefineFunctions(cx, Reflect, static_methods)) return NULL; + MarkStandardClassInitializedNoProto(obj, &js_ReflectClass); + return Reflect; } diff --git a/js/src/jsregexp.cpp b/js/src/jsregexp.cpp index 40ca19a25fd1..0df8ae536a47 100644 --- a/js/src/jsregexp.cpp +++ b/js/src/jsregexp.cpp @@ -678,15 +678,8 @@ ExecuteRegExp(JSContext *cx, ExecType execType, uintN argc, Value *vp) /* Step 5. */ jsdouble i; - if (lastIndex.isInt32()) { - i = lastIndex.toInt32(); - } else { - if (lastIndex.isDouble()) - i = lastIndex.toDouble(); - else if (!ValueToNumber(cx, lastIndex, &i)) - return false; - i = js_DoubleToInteger(i); - } + if (!ToInteger(cx, lastIndex, &i)) + return false; /* Steps 6-7 (with sticky extension). */ if (!re->global() && !re->sticky()) diff --git a/js/src/jsscopeinlines.h b/js/src/jsscopeinlines.h index fdd20c4faf8c..851e152b3f7f 100644 --- a/js/src/jsscopeinlines.h +++ b/js/src/jsscopeinlines.h @@ -49,8 +49,10 @@ #include "jsscope.h" #include "jsgc.h" -#include "jsgcinlines.h" +#include "vm/StringObject.h" + #include "jscntxtinlines.h" +#include "jsgcinlines.h" #include "jsobjinlines.h" inline void @@ -71,7 +73,7 @@ JSObject::getEmptyShape(JSContext *cx, js::Class *aclasp, if (!emptyShapes) { emptyShapes = (js::EmptyShape**) - cx->calloc_(sizeof(js::EmptyShape*) * js::gc::JS_FINALIZE_OBJECT_LIMIT); + cx->calloc_(sizeof(js::EmptyShape*) * js::gc::FINALIZE_FUNCTION_AND_OBJECT_LAST); if (!emptyShapes) return NULL; @@ -134,34 +136,29 @@ JSObject::extend(JSContext *cx, const js::Shape *shape, bool isDefinitelyAtom) updateShape(cx); } +namespace js { + inline bool -JSObject::initString(JSContext *cx, JSString *str) +StringObject::init(JSContext *cx, JSString *str) { - JS_ASSERT(isString()); JS_ASSERT(nativeEmpty()); - const js::Shape **shapep = &cx->compartment->initialStringShape; + const Shape **shapep = &cx->compartment->initialStringShape; if (*shapep) { setLastProperty(*shapep); } else { - *shapep = assignInitialStringShape(cx); + *shapep = assignInitialShape(cx); if (!*shapep) return false; } JS_ASSERT(*shapep == lastProperty()); JS_ASSERT(!nativeEmpty()); + JS_ASSERT(nativeLookup(ATOM_TO_JSID(cx->runtime->atomState.lengthAtom))->slot == LENGTH_SLOT); - JS_ASSERT(nativeLookup(ATOM_TO_JSID(cx->runtime->atomState.lengthAtom))->slot == - JSObject::JSSLOT_STRING_LENGTH); - - setPrimitiveThis(js::StringValue(str)); - JS_ASSERT(str->length() <= JSString::MAX_LENGTH); - setSlot(JSSLOT_STRING_LENGTH, js::Int32Value(int32(str->length()))); + setStringThis(str); return true; } -namespace js { - inline Shape::Shape(jsid id, js::PropertyOp getter, js::StrictPropertyOp setter, uint32 slot, uintN attrs, uintN flags, intN shortid, uint32 shape, uint32 slotSpan) diff --git a/js/src/jsscript.cpp b/js/src/jsscript.cpp index ac625b62ac47..0dd911e698a8 100644 --- a/js/src/jsscript.cpp +++ b/js/src/jsscript.cpp @@ -68,7 +68,6 @@ #endif #include "methodjit/MethodJIT.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsscriptinlines.h" @@ -1225,7 +1224,7 @@ JSScript::NewScript(JSContext *cx, uint32 length, uint32 nsrcnotes, uint32 natom script->compartment = cx->compartment; #ifdef CHECK_SCRIPT_OWNER - script->owner = cx->thread; + script->owner = cx->thread(); #endif JS_APPEND_LINK(&script->links, &cx->compartment->scripts); @@ -1470,7 +1469,7 @@ DestroyScript(JSContext *cx, JSScript *script) JS_PROPERTY_CACHE(cx).purgeForScript(cx, script); #ifdef CHECK_SCRIPT_OWNER - JS_ASSERT(script->owner == cx->thread); + JS_ASSERT(script->owner == cx->thread()); #endif } @@ -1646,7 +1645,7 @@ js_GetSrcNoteCached(JSContext *cx, JSScript *script, jsbytecode *pc) } uintN -js_FramePCToLineNumber(JSContext *cx, JSStackFrame *fp) +js_FramePCToLineNumber(JSContext *cx, StackFrame *fp) { return js_PCToLineNumber(cx, fp->script(), fp->hasImacropc() ? fp->imacropc() : fp->pc(cx)); @@ -1662,7 +1661,7 @@ js_PCToLineNumber(JSContext *cx, JSScript *script, jsbytecode *pc) jssrcnote *sn; JSSrcNoteType type; - /* Cope with JSStackFrame.pc value prior to entering js_Interpret. */ + /* Cope with StackFrame.pc value prior to entering js_Interpret. */ if (!pc) return 0; @@ -1767,12 +1766,12 @@ js_GetScriptLineExtent(JSScript *script) const char * js::CurrentScriptFileAndLineSlow(JSContext *cx, uintN *linenop) { - if (!cx->hasfp()) { + if (!cx->running()) { *linenop = 0; return NULL; } - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); while (fp->isDummyFrame()) fp = fp->prev(); diff --git a/js/src/jsscript.h b/js/src/jsscript.h index 12bcdca212d2..ed7dd8dad765 100644 --- a/js/src/jsscript.h +++ b/js/src/jsscript.h @@ -738,7 +738,7 @@ js_GetSrcNoteCached(JSContext *cx, JSScript *script, jsbytecode *pc); * fp->imacpc may be non-null, indicating an active imacro. */ extern uintN -js_FramePCToLineNumber(JSContext *cx, JSStackFrame *fp); +js_FramePCToLineNumber(JSContext *cx, js::StackFrame *fp); extern uintN js_PCToLineNumber(JSContext *cx, JSScript *script, jsbytecode *pc); diff --git a/js/src/jsscriptinlines.h b/js/src/jsscriptinlines.h index 63fe4a5d9afe..8af35211b36a 100644 --- a/js/src/jsscriptinlines.h +++ b/js/src/jsscriptinlines.h @@ -102,9 +102,9 @@ inline const char * CurrentScriptFileAndLine(JSContext *cx, uintN *linenop, LineOption opt) { if (opt == CALLED_FROM_JSOP_EVAL) { - JS_ASSERT(*cx->regs->pc == JSOP_EVAL); - JS_ASSERT(*(cx->regs->pc + JSOP_EVAL_LENGTH) == JSOP_LINENO); - *linenop = GET_UINT16(cx->regs->pc + JSOP_EVAL_LENGTH); + JS_ASSERT(*cx->regs().pc == JSOP_EVAL); + JS_ASSERT(*(cx->regs().pc + JSOP_EVAL_LENGTH) == JSOP_LINENO); + *linenop = GET_UINT16(cx->regs().pc + JSOP_EVAL_LENGTH); return cx->fp()->script()->filename; } diff --git a/js/src/jsstr.cpp b/js/src/jsstr.cpp index 947a96029bbc..c73282ebccba 100644 --- a/js/src/jsstr.cpp +++ b/js/src/jsstr.cpp @@ -76,13 +76,14 @@ #include "jsvector.h" #include "jsversion.h" -#include "jscntxtinlines.h" #include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsregexpinlines.h" #include "jsstrinlines.h" #include "jsautooplen.h" // generated headers last +#include "vm/StringObject-inl.h" + using namespace js; using namespace js::gc; @@ -90,7 +91,7 @@ using namespace js::gc; bool JSString::isShort() const { - bool is_short = arena()->header()->thingKind == FINALIZE_SHORT_STRING; + bool is_short = arenaHeader()->getThingKind() == FINALIZE_SHORT_STRING; JS_ASSERT_IF(is_short, isFlat()); return is_short; } @@ -105,7 +106,7 @@ JSString::isFixed() const bool JSString::isExternal() const { - bool is_external = arena()->header()->thingKind == FINALIZE_EXTERNAL_STRING; + bool is_external = arenaHeader()->getThingKind() == FINALIZE_EXTERNAL_STRING; JS_ASSERT_IF(is_external, isFixed()); return is_external; } @@ -750,7 +751,7 @@ str_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags, Class js_StringClass = { js_String_str, - JSCLASS_HAS_RESERVED_SLOTS(JSObject::STRING_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(StringObject::RESERVED_SLOTS) | JSCLASS_NEW_RESOLVE | JSCLASS_HAS_CACHED_PROTO(JSProto_String), PropertyStub, /* addProperty */ PropertyStub, /* delProperty */ @@ -882,11 +883,8 @@ ValueToIntegerRange(JSContext *cx, const Value &v, int32 *out) *out = v.toInt32(); } else { double d; - - if (!ValueToNumber(cx, v, &d)) + if (!ToInteger(cx, v, &d)) return false; - - d = js_DoubleToInteger(d); if (d > INT32_MAX) *out = INT32_MAX; else if (d < INT32_MIN) @@ -1086,14 +1084,9 @@ js_str_charAt(JSContext *cx, uintN argc, Value *vp) if (!str) return false; - double d; - if (argc == 0) { - d = 0.0; - } else { - if (!ValueToNumber(cx, vp[2], &d)) - return false; - d = js_DoubleToInteger(d); - } + double d = 0.0; + if (argc > 0 && !ToInteger(cx, vp[2], &d)) + return false; if (d < 0 || str->length() <= d) goto out_of_range; @@ -1126,14 +1119,9 @@ js_str_charCodeAt(JSContext *cx, uintN argc, Value *vp) if (!str) return false; - double d; - if (argc == 0) { - d = 0.0; - } else { - if (!ValueToNumber(cx, vp[2], &d)) - return false; - d = js_DoubleToInteger(d); - } + double d = 0.0; + if (argc > 0 && !ToInteger(cx, vp[2], &d)) + return false; if (d < 0 || str->length() <= d) goto out_of_range; @@ -1468,9 +1456,8 @@ str_indexOf(JSContext *cx, uintN argc, Value *vp) } } else { jsdouble d; - if (!ValueToNumber(cx, vp[3], &d)) - return JS_FALSE; - d = js_DoubleToInteger(d); + if (!ToInteger(cx, vp[3], &d)) + return false; if (d <= 0) { start = 0; } else if (d > textlen) { @@ -2452,7 +2439,7 @@ str_replace_flat_lambda(JSContext *cx, uintN argc, Value *vp, ReplaceData &rdata /* lambda(matchStr, matchStart, textstr) */ static const uint32 lambdaArgc = 3; - if (!cx->stack().pushInvokeArgs(cx, lambdaArgc, &rdata.singleShot)) + if (!cx->stack.pushInvokeArgs(cx, lambdaArgc, &rdata.singleShot)) return false; CallArgs &args = rdata.singleShot; @@ -2464,7 +2451,7 @@ str_replace_flat_lambda(JSContext *cx, uintN argc, Value *vp, ReplaceData &rdata sp[1].setInt32(fm.match()); sp[2].setString(rdata.str); - if (!Invoke(cx, rdata.singleShot, 0)) + if (!Invoke(cx, rdata.singleShot)) return false; JSString *repstr = js_ValueToString(cx, args.rval()); @@ -2995,9 +2982,8 @@ str_slice(JSContext *cx, uintN argc, Value *vp) if (argc != 0) { double begin, end, length; - if (!ValueToNumber(cx, vp[2], &begin)) - return JS_FALSE; - begin = js_DoubleToInteger(begin); + if (!ToInteger(cx, vp[2], &begin)) + return false; length = str->length(); if (begin < 0) { begin += length; @@ -3010,9 +2996,8 @@ str_slice(JSContext *cx, uintN argc, Value *vp) if (argc == 1 || vp[3].isUndefined()) { end = length; } else { - if (!ValueToNumber(cx, vp[3], &end)) - return JS_FALSE; - end = js_DoubleToInteger(end); + if (!ToInteger(cx, vp[3], &end)) + return false; if (end < 0) { end += length; if (end < 0) @@ -3471,10 +3456,10 @@ js_String(JSContext *cx, uintN argc, Value *vp) } if (IsConstructing(vp)) { - JSObject *obj = NewBuiltinClassInstance(cx, &js_StringClass); - if (!obj || !obj->initString(cx, str)) + StringObject *strobj = StringObject::create(cx, str); + if (!strobj) return false; - vp->setObject(*obj); + vp->setObject(*strobj); } else { vp->setString(str); } @@ -3538,14 +3523,13 @@ static JSFunctionSpec string_static_methods[] = { }; const Shape * -JSObject::assignInitialStringShape(JSContext *cx) +StringObject::assignInitialShape(JSContext *cx) { JS_ASSERT(!cx->compartment->initialStringShape); - JS_ASSERT(isString()); JS_ASSERT(nativeEmpty()); return addDataProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom), - JSSLOT_STRING_LENGTH, JSPROP_PERMANENT | JSPROP_READONLY); + LENGTH_SLOT, JSPROP_PERMANENT | JSPROP_READONLY); } JSObject * @@ -3567,7 +3551,7 @@ js_InitStringClass(JSContext *cx, JSObject *global) return NULL; JSObject *proto = NewObject(cx, &js_StringClass, objectProto, global); - if (!proto || !proto->initString(cx, cx->runtime->emptyString)) + if (!proto || !proto->asString()->init(cx, cx->runtime->emptyString)) return NULL; /* Now create the String function. */ @@ -3906,6 +3890,8 @@ js::ValueToStringBufferSlow(JSContext *cx, const Value &arg, StringBuffer &sb) JS_FRIEND_API(JSString *) js_ValueToSource(JSContext *cx, const Value &v) { + JS_CHECK_RECURSION(cx, return NULL); + if (v.isUndefined()) return cx->runtime->atomState.void0Atom; if (v.isString()) diff --git a/js/src/jsstrinlines.h b/js/src/jsstrinlines.h index 674b0552da5b..6bd691ac7dbe 100644 --- a/js/src/jsstrinlines.h +++ b/js/src/jsstrinlines.h @@ -551,10 +551,10 @@ inline void JSAtom::finalize(JSRuntime *rt) { JS_ASSERT(isAtom()); - if (arena()->header()->thingKind == js::gc::FINALIZE_STRING) + if (arenaHeader()->getThingKind() == js::gc::FINALIZE_STRING) asFlat().finalize(rt); else - JS_ASSERT(arena()->header()->thingKind == js::gc::FINALIZE_SHORT_STRING); + JS_ASSERT(arenaHeader()->getThingKind() == js::gc::FINALIZE_SHORT_STRING); } inline void diff --git a/js/src/jstl.h b/js/src/jstl.h index c35bda02e7d3..52f1efde67ca 100644 --- a/js/src/jstl.h +++ b/js/src/jstl.h @@ -234,142 +234,6 @@ PointerRangeSize(T *begin, T *end) return (size_t(end) - size_t(begin)) / sizeof(T); } -/* - * This utility pales in comparison to Boost's aligned_storage. The utility - * simply assumes that JSUint64 is enough alignment for anyone. This may need - * to be extended one day... - * - * As an important side effect, pulling the storage into this template is - * enough obfuscation to confuse gcc's strict-aliasing analysis into not giving - * false negatives when we cast from the char buffer to whatever type we've - * constructed using the bytes. - */ -template -struct AlignedStorage -{ - union U { - char bytes[nbytes]; - uint64 _; - } u; - - const void *addr() const { return u.bytes; } - void *addr() { return u.bytes; } -}; - -template -struct AlignedStorage2 -{ - union U { - char bytes[sizeof(T)]; - uint64 _; - } u; - - const T *addr() const { return (const T *)u.bytes; } - T *addr() { return (T *)u.bytes; } -}; - -/* - * Small utility for lazily constructing objects without using dynamic storage. - * When a LazilyConstructed is constructed, it is |empty()|, i.e., no value - * of T has been constructed and no T destructor will be called when the - * LazilyConstructed is destroyed. Upon calling |construct|, a T object will - * be constructed with the given arguments and that object will be destroyed - * when the owning LazilyConstructed is destroyed. - * - * N.B. GCC seems to miss some optimizations with LazilyConstructed and may - * generate extra branches/loads/stores. Use with caution on hot paths. - */ -template -class LazilyConstructed -{ - AlignedStorage2 storage; - bool constructed; - - T &asT() { return *storage.addr(); } - - explicit LazilyConstructed(const LazilyConstructed &other); - const LazilyConstructed &operator=(const LazilyConstructed &other); - - public: - LazilyConstructed() { constructed = false; } - ~LazilyConstructed() { if (constructed) asT().~T(); } - - bool empty() const { return !constructed; } - - void construct() { - JS_ASSERT(!constructed); - new(storage.addr()) T(); - constructed = true; - } - - template - void construct(const T1 &t1) { - JS_ASSERT(!constructed); - new(storage.addr()) T(t1); - constructed = true; - } - - template - void construct(const T1 &t1, const T2 &t2) { - JS_ASSERT(!constructed); - new(storage.addr()) T(t1, t2); - constructed = true; - } - - template - void construct(const T1 &t1, const T2 &t2, const T3 &t3) { - JS_ASSERT(!constructed); - new(storage.addr()) T(t1, t2, t3); - constructed = true; - } - - template - void construct(const T1 &t1, const T2 &t2, const T3 &t3, const T4 &t4) { - JS_ASSERT(!constructed); - new(storage.addr()) T(t1, t2, t3, t4); - constructed = true; - } - - T *addr() { - JS_ASSERT(constructed); - return &asT(); - } - - T &ref() { - JS_ASSERT(constructed); - return asT(); - } - - void destroy() { - ref().~T(); - constructed = false; - } - - void destroyIfConstructed() { - if (!empty()) - destroy(); - } -}; - - -/* - * N.B. GCC seems to miss some optimizations with Conditionally and may - * generate extra branches/loads/stores. Use with caution on hot paths. - */ -template -class Conditionally { - LazilyConstructed t; - - public: - Conditionally(bool b) { if (b) t.construct(); } - - template - Conditionally(bool b, const T1 &t1) { if (b) t.construct(t1); } - - template - Conditionally(bool b, const T1 &t1, const T2 &t2) { if (b) t.construct(t1, t2); } -}; - template class AlignedPtrAndFlag { diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 6e1315ee8b38..ec67ba19a588 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -92,6 +92,8 @@ #include "jscntxtinlines.h" #include "jsopcodeinlines.h" +#include "vm/Stack-inl.h" + #ifdef JS_METHODJIT #include "methodjit/MethodJIT.h" #endif @@ -425,12 +427,6 @@ ValueToTypeChar(const Value &v) /* Max number of slots in a table-switch. */ #define MAX_TABLE_SWITCH 256 -/* Max memory needed to rebuild the interpreter stack when falling off trace. */ -#define MAX_INTERP_STACK_BYTES \ - (MAX_NATIVE_STACK_SLOTS * sizeof(Value) + \ - MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) + \ - sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */ - /* Max number of branches per tree. */ #define MAX_BRANCHES 32 @@ -1226,7 +1222,7 @@ getCoercedType(const Value &v) } static inline JSValueType -getFrameObjPtrTraceType(void *p, JSStackFrame *fp) +getFrameObjPtrTraceType(void *p, StackFrame *fp) { if (p == fp->addressOfScopeChain()) { JS_ASSERT(*(JSObject **)p != NULL); @@ -1272,7 +1268,7 @@ static JS_REQUIRES_STACK inline int GlobalSlotHash(JSContext* cx, unsigned slot) { uintptr_t h = HASH_SEED; - JSStackFrame* fp = cx->fp(); + StackFrame* fp = cx->fp(); while (fp->prev()) fp = fp->prev(); @@ -1321,7 +1317,7 @@ Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc) JS_REQUIRES_STACK void Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot) { - markStackSlotUndemotable(cx, slot, cx->regs->pc); + markStackSlotUndemotable(cx, slot, cx->regs().pc); } /* Consult with the oracle whether we shouldn't demote a certain slot. */ @@ -1334,7 +1330,7 @@ Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc) con JS_REQUIRES_STACK bool Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const { - return isStackSlotUndemotable(cx, slot, cx->regs->pc); + return isStackSlotUndemotable(cx, slot, cx->regs().pc); } /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */ @@ -1632,7 +1628,7 @@ TreeFragment::initialize(JSContext* cx, SlotList *globalSlots, bool speculate) /* Capture the coerced type of each active slot in the type map. */ this->typeMap.captureTypes(cx, globalObj, *globalSlots, 0 /* callDepth */, speculate); this->nStackTypes = this->typeMap.length() - globalSlots->length(); - this->spOffsetAtEntry = cx->regs->sp - cx->fp()->base(); + this->spOffsetAtEntry = cx->regs().sp - cx->fp()->base(); #ifdef DEBUG this->treeFileName = cx->fp()->script()->filename; @@ -1646,7 +1642,7 @@ TreeFragment::initialize(JSContext* cx, SlotList *globalSlots, bool speculate) this->sideExits.clear(); /* Determine the native frame layout at the entry point. */ - this->nativeStackBase = (nStackTypes - (cx->regs->sp - cx->fp()->base())) * + this->nativeStackBase = (nStackTypes - (cx->regs().sp - cx->fp()->base())) * sizeof(double); this->maxNativeStackSlots = nStackTypes; this->maxCallDepth = 0; @@ -1727,7 +1723,7 @@ fcallinfo(LIns *ins) } /* - * JSStackFrame::numActualArgs is only defined for function frames. Since the + * StackFrame::numActualArgs is only defined for function frames. Since the * actual arguments of the entry frame are kept on trace, argc is included in * the tuple identifying a fragment so that two fragments for the same loop but * recorded with different number of actual arguments are treated as two @@ -1737,13 +1733,13 @@ fcallinfo(LIns *ins) static inline uintN entryFrameArgc(JSContext *cx) { - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); return fp->hasArgs() ? fp->numActualArgs() : 0; } template static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool -VisitStackAndArgs(Visitor &visitor, JSStackFrame *fp, JSStackFrame *next, Value *stack) +VisitStackAndArgs(Visitor &visitor, StackFrame *fp, StackFrame *next, Value *stack) { if (JS_LIKELY(!next->hasOverflowArgs())) return visitor.visitStackSlots(stack, next->formalArgsEnd() - stack, fp); @@ -1752,7 +1748,7 @@ VisitStackAndArgs(Visitor &visitor, JSStackFrame *fp, JSStackFrame *next, Value * In the case of nactual > nformal, the formals are copied by the VM onto * the top of the stack. We only want to mark the formals once, so we * carefully mark only the canonical actual arguments (as defined by - * JSStackFrame::canonicalActualArg). + * StackFrame::canonicalActualArg). */ uintN nactual = next->numActualArgs(); Value *actuals = next->actualArgs(); @@ -1767,7 +1763,7 @@ VisitStackAndArgs(Visitor &visitor, JSStackFrame *fp, JSStackFrame *next, Value } /* - * Visit the values in the given JSStackFrame that the tracer cares about. This + * Visit the values in the given StackFrame that the tracer cares about. This * visitor function is (implicitly) the primary definition of the native stack * area layout. There are a few other independent pieces of code that must be * maintained to assume the same layout. They are marked like this: @@ -1776,8 +1772,8 @@ VisitStackAndArgs(Visitor &visitor, JSStackFrame *fp, JSStackFrame *next, Value */ template static JS_REQUIRES_STACK bool -VisitFrameSlots(Visitor &visitor, JSContext *cx, unsigned depth, JSStackFrame *fp, - JSStackFrame *next) +VisitFrameSlots(Visitor &visitor, JSContext *cx, unsigned depth, StackFrame *fp, + StackFrame *next) { JS_ASSERT_IF(!next, cx->fp() == fp); @@ -1790,7 +1786,7 @@ VisitFrameSlots(Visitor &visitor, JSContext *cx, unsigned depth, JSStackFrame *f Value *base = fp->slots() + fp->globalScript()->nfixed; if (next) return VisitStackAndArgs(visitor, fp, next, base); - return visitor.visitStackSlots(base, cx->regs->sp - base, fp); + return visitor.visitStackSlots(base, cx->regs().sp - base, fp); } if (JS_UNLIKELY(fp->isEvalFrame())) { @@ -1817,10 +1813,10 @@ VisitFrameSlots(Visitor &visitor, JSContext *cx, unsigned depth, JSStackFrame *f JS_ASSERT(fp->isFunctionFrame()); /* - * We keep two members of JSStackFrame on trace: the args obj pointer and + * We keep two members of StackFrame on trace: the args obj pointer and * the scope chain pointer. The visitor must take care not to treat these * as js::Value-typed variables, since they are unboxed pointers. - * Moreover, JSStackFrame compresses the args obj pointer with nactual, so + * Moreover, StackFrame compresses the args obj pointer with nactual, so * fp->addressOfArgs() is not really a JSObject**: the visitor must treat * !fp->hasArgsObj() as a null args obj pointer. Hence, visitFrameObjPtr * is only passed a void *. @@ -1835,7 +1831,7 @@ VisitFrameSlots(Visitor &visitor, JSContext *cx, unsigned depth, JSStackFrame *f visitor.setStackSlotKind("slots"); if (next) return VisitStackAndArgs(visitor, fp, next, fp->slots()); - return visitor.visitStackSlots(fp->slots(), cx->regs->sp - fp->slots(), fp); + return visitor.visitStackSlots(fp->slots(), cx->regs().sp - fp->slots(), fp); } // Number of native frame slots used for 'special' values between args and vars. @@ -1938,7 +1934,7 @@ public: {} JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) { + visitStackSlots(Value *vp, size_t count, StackFrame* fp) { if (mDone) return false; if (mStop && size_t(((const Value *)mStop) - vp) < count) { @@ -1951,7 +1947,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitFrameObjPtr(void* p, JSStackFrame* fp) { + visitFrameObjPtr(void* p, StackFrame* fp) { if (mDone) return false; if (mStop && mStop == p) { @@ -1972,7 +1968,7 @@ public: }; static JS_REQUIRES_STACK JS_ALWAYS_INLINE unsigned -CountStackAndArgs(JSStackFrame *next, Value *stack) +CountStackAndArgs(StackFrame *next, Value *stack) { if (JS_LIKELY(!next->hasOverflowArgs())) return (Value *)next - stack; @@ -1982,7 +1978,7 @@ CountStackAndArgs(JSStackFrame *next, Value *stack) } static JS_ALWAYS_INLINE uintN -NumSlotsBeforeFixed(JSStackFrame *fp) +NumSlotsBeforeFixed(StackFrame *fp) { uintN numArgs = fp->isEvalFrame() ? 0 : Max(fp->numActualArgs(), fp->numFormalArgs()); return 2 + numArgs + SPECIAL_FRAME_SLOTS; @@ -1997,8 +1993,8 @@ NumSlotsBeforeFixed(JSStackFrame *fp) JS_REQUIRES_STACK unsigned NativeStackSlots(JSContext *cx, unsigned callDepth) { - JSStackFrame *fp = cx->fp(); - JSStackFrame *next = NULL; + StackFrame *fp = cx->fp(); + StackFrame *next = NULL; unsigned slots = 0; unsigned depth = callDepth; @@ -2008,7 +2004,7 @@ NativeStackSlots(JSContext *cx, unsigned callDepth) if (next) slots += CountStackAndArgs(next, fp->slots()); else - slots += cx->regs->sp - fp->slots(); + slots += cx->regs().sp - fp->slots(); } Value *start; @@ -2021,7 +2017,7 @@ NativeStackSlots(JSContext *cx, unsigned callDepth) if (next) slots += CountStackAndArgs(next, start); else - slots += cx->regs->sp - start; + slots += cx->regs().sp - start; #ifdef DEBUG CountSlotsVisitor visitor; @@ -2060,7 +2056,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, int count, JSStackFrame* fp) { + visitStackSlots(Value *vp, int count, StackFrame* fp) { for (int i = 0; i < count; ++i) { JSValueType type = getCoercedType(vp[i]); if (type == JSVAL_TYPE_INT32 && (!mOracle || mOracle->isStackSlotUndemotable(mCx, length()))) @@ -2075,7 +2071,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitFrameObjPtr(void* p, JSStackFrame* fp) { + visitFrameObjPtr(void* p, StackFrame* fp) { JSValueType type = getFrameObjPtrTraceType(p, fp); debug_only_printf(LC_TMTracer, "capture type %s%d: %c\n", @@ -2279,7 +2275,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, TraceMonitor *tm, { JS_ASSERT(globalObj == cx->fp()->scopeChain().getGlobal()); JS_ASSERT(globalObj->hasOwnShape()); - JS_ASSERT(cx->regs->pc == (jsbytecode*)fragment->ip); + JS_ASSERT(cx->regs().pc == (jsbytecode*)fragment->ip); #ifdef JS_METHODJIT if (TRACE_PROFILER(cx)) @@ -2634,7 +2630,7 @@ TraceRecorder::nativeStackOffsetImpl(const void* p) const if (!visitor.stopped()) { const Value *vp = (const Value *)p; JS_ASSERT(size_t(vp - cx->fp()->slots()) < cx->fp()->numSlots()); - offset += size_t(vp - cx->regs->sp) * sizeof(double); + offset += size_t(vp - cx->regs().sp) * sizeof(double); } return offset; } @@ -3026,7 +3022,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, int count, JSStackFrame* fp) { + visitStackSlots(Value *vp, int count, StackFrame* fp) { for (int i = 0; i < count; ++i) { debug_only_printf(LC_TMTracer, "%s%d: ", stackSlotKind(), i); ValueToNative(*vp++, *mTypeMap++, mStack++); @@ -3035,7 +3031,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitFrameObjPtr(void* p, JSStackFrame* fp) { + visitFrameObjPtr(void* p, StackFrame* fp) { debug_only_printf(LC_TMTracer, "%s%d: ", stackSlotKind(), 0); if (p == fp->addressOfScopeChain()) *(JSObject **)mStack = &fp->scopeChain(); @@ -3113,7 +3109,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) { + visitStackSlots(Value *vp, size_t count, StackFrame* fp) { JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota); for (size_t i = 0; i < count; ++i) { debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i)); @@ -3126,7 +3122,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitFrameObjPtr(void* p, JSStackFrame* fp) { + visitFrameObjPtr(void* p, StackFrame* fp) { JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota); debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), 0); JSObject *frameobj = *(JSObject **)mStack; @@ -3155,7 +3151,7 @@ public: !frameobj->getPrivate() && fp->maybeCallee() == frameobj->getCallObjCallee()) { - JS_ASSERT(&fp->scopeChain() == JSStackFrame::sInvalidScopeChain); + JS_ASSERT(&fp->scopeChain() == StackFrame::sInvalidScopeChain); frameobj->setPrivate(fp); fp->setScopeChainWithOwnCallObj(*frameobj); } else { @@ -3274,7 +3270,7 @@ GetUpvarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, * then we simply get the value from the interpreter state. */ JS_ASSERT(upvarLevel < UpvarCookie::UPVAR_LEVEL_LIMIT); - JSStackFrame* fp = cx->findFrameAtLevel(upvarLevel); + StackFrame* fp = cx->stack.findFrameAtLevel(upvarLevel); Value v = T::interp_get(fp, slot); JSValueType type = getCoercedType(v); ValueToNative(v, type, result); @@ -3283,7 +3279,7 @@ GetUpvarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, // For this traits type, 'slot' is the argument index, which may be -2 for callee. struct UpvarArgTraits { - static Value interp_get(JSStackFrame* fp, int32 slot) { + static Value interp_get(StackFrame* fp, int32 slot) { return fp->formalArg(slot); } @@ -3300,7 +3296,7 @@ GetUpvarArgOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDept // For this traits type, 'slot' is an index into the local slots array. struct UpvarVarTraits { - static Value interp_get(JSStackFrame* fp, int32 slot) { + static Value interp_get(StackFrame* fp, int32 slot) { return fp->slots()[slot]; } @@ -3321,7 +3317,7 @@ GetUpvarVarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDept * the only one that can have no function.) */ struct UpvarStackTraits { - static Value interp_get(JSStackFrame* fp, int32 slot) { + static Value interp_get(StackFrame* fp, int32 slot) { return fp->slots()[slot + fp->numFixed()]; } @@ -3390,7 +3386,7 @@ GetFromClosure(JSContext* cx, JSObject* call, const ClosureVarInfo* cv, double* // We already guarded on trace that we aren't touching an outer tree's entry frame VOUCH_DOES_NOT_REQUIRE_STACK(); - JSStackFrame* fp = (JSStackFrame*) call->getPrivate(); + StackFrame* fp = (StackFrame*) call->getPrivate(); JS_ASSERT(fp != cx->fp()); Value v; @@ -3416,7 +3412,7 @@ struct ArgClosureTraits { // Get the right frame slots to use our slot index with. // See also UpvarArgTraits. - static inline Value get_slot(JSStackFrame* fp, unsigned slot) { + static inline Value get_slot(StackFrame* fp, unsigned slot) { JS_ASSERT(slot < fp->numFormalArgs()); return fp->formalArg(slot); } @@ -3449,7 +3445,7 @@ GetClosureArg(JSContext* cx, JSObject* callee, const ClosureVarInfo* cv, double* struct VarClosureTraits { // See also UpvarVarTraits. - static inline Value get_slot(JSStackFrame* fp, unsigned slot) { + static inline Value get_slot(StackFrame* fp, unsigned slot) { JS_ASSERT(slot < fp->fun()->script()->bindings.countVars()); return fp->slots()[slot]; } @@ -3504,7 +3500,7 @@ FlushNativeStackFrame(JSContext* cx, unsigned callDepth, const JSValueType* mp, /* Emit load instructions onto the trace that read the initial stack state. */ JS_REQUIRES_STACK void TraceRecorder::importImpl(Address addr, const void* p, JSValueType t, - const char *prefix, uintN index, JSStackFrame *fp) + const char *prefix, uintN index, StackFrame *fp) { LIns* ins; if (t == JSVAL_TYPE_INT32) { /* demoted */ @@ -3582,7 +3578,7 @@ TraceRecorder::importImpl(Address addr, const void* p, JSValueType t, JS_REQUIRES_STACK void TraceRecorder::import(Address addr, const Value* p, JSValueType t, - const char *prefix, uintN index, JSStackFrame *fp) + const char *prefix, uintN index, StackFrame *fp) { return importImpl(addr, p, t, prefix, index, fp); } @@ -3593,7 +3589,7 @@ class ImportBoxedStackSlotVisitor : public SlotVisitorBase LIns *mBase; ptrdiff_t mStackOffset; JSValueType *mTypemap; - JSStackFrame *mFp; + StackFrame *mFp; public: ImportBoxedStackSlotVisitor(TraceRecorder &recorder, LIns *base, @@ -3606,7 +3602,7 @@ public: {} JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) { + visitStackSlots(Value *vp, size_t count, StackFrame* fp) { for (size_t i = 0; i < count; ++i) { if (*mTypemap == JSVAL_TYPE_BOXED) { mRecorder.import(StackAddress(mBase, mStackOffset), vp, JSVAL_TYPE_BOXED, @@ -3624,7 +3620,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitFrameObjPtr(void* p, JSStackFrame *fp) { + visitFrameObjPtr(void* p, StackFrame *fp) { JS_ASSERT(*mTypemap != JSVAL_TYPE_BOXED); mTypemap++; mStackOffset += sizeof(double); @@ -3684,7 +3680,7 @@ TraceRecorder::import(TreeFragment* tree, LIns* sp, unsigned stackSlots, unsigne JS_REQUIRES_STACK bool TraceRecorder::isValidSlot(JSObject *obj, const Shape* shape) { - uint32 setflags = (js_CodeSpec[*cx->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR)); + uint32 setflags = (js_CodeSpec[*cx->regs().pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR)); if (setflags) { if (!shape->hasDefaultSetter()) @@ -3894,7 +3890,7 @@ TraceRecorder::get(const Value *p) bool TraceRecorder::isValidFrameObjPtr(void *p) { - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); for (; fp; fp = fp->prev()) { if (fp->addressOfScopeChain() == p || fp->addressOfArgs() == p) return true; @@ -4038,7 +4034,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) { + visitStackSlots(Value *vp, size_t count, StackFrame* fp) { /* N.B. vp may actually point to a JSObject*. */ for (size_t i = 0; i < count; ++i) { LIns *ins = mRecorder.get(vp); @@ -4060,7 +4056,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitFrameObjPtr(void* p, JSStackFrame* fp) { + visitFrameObjPtr(void* p, StackFrame* fp) { JS_ASSERT(*mTypeMap != JSVAL_TYPE_BOXED); ++mTypeMap; ++mSlotnum; @@ -4126,14 +4122,14 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) { + visitStackSlots(Value *vp, size_t count, StackFrame* fp) { for (size_t i = 0; i < count; ++i) *mTypeMap++ = mRecorder.determineSlotType(vp++); return true; } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitFrameObjPtr(void* p, JSStackFrame* fp) { + visitFrameObjPtr(void* p, StackFrame* fp) { *mTypeMap++ = getFrameObjPtrTraceType(p, fp); return true; } @@ -4150,7 +4146,7 @@ TreevisLogExit(JSContext* cx, VMSideExit* exit) { debug_only_printf(LC_TMTreeVis, "TREEVIS ADDEXIT EXIT=%p TYPE=%s FRAG=%p PC=%p FILE=\"%s\"" " LINE=%d OFFS=%d", (void*)exit, getExitName(exit->exitType), - (void*)exit->from, (void*)cx->regs->pc, cx->fp()->script()->filename, + (void*)exit->from, (void*)cx->regs().pc, cx->fp()->script()->filename, js_FramePCToLineNumber(cx, cx->fp()), FramePCOffset(cx, cx->fp())); debug_only_print0(LC_TMTreeVis, " STACK=\""); for (unsigned i = 0; i < exit->numStackSlots; i++) @@ -4165,9 +4161,9 @@ TreevisLogExit(JSContext* cx, VMSideExit* exit) JS_REQUIRES_STACK VMSideExit* TraceRecorder::snapshot(ExitType exitType) { - JSStackFrame* const fp = cx->fp(); - JSFrameRegs* const regs = cx->regs; - jsbytecode* pc = regs->pc; + StackFrame* const fp = cx->fp(); + FrameRegs& regs = cx->regs(); + jsbytecode* pc = regs.pc; /* * Check for a return-value opcode that needs to restart at the next @@ -4186,7 +4182,7 @@ TraceRecorder::snapshot(ExitType exitType) JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_FUNAPPLY || *pc == JSOP_FUNCALL || *pc == JSOP_NEW || *pc == JSOP_SETPROP || *pc == JSOP_SETNAME); pc += cs.length; - regs->pc = pc; + regs.pc = pc; MUST_FLOW_THROUGH("restore_pc"); } @@ -4233,7 +4229,7 @@ TraceRecorder::snapshot(ExitType exitType) if (pendingUnboxSlot || (pendingSpecializedNative && (pendingSpecializedNative->flags & JSTN_UNBOX_AFTER))) { unsigned pos = stackSlots - 1; - if (pendingUnboxSlot == cx->regs->sp - 2) + if (pendingUnboxSlot == regs.sp - 2) pos = stackSlots - 2; typemap[pos] = JSVAL_TYPE_BOXED; } else if (pendingSpecializedNative && @@ -4247,7 +4243,7 @@ TraceRecorder::snapshot(ExitType exitType) /* Now restore the the original pc (after which early returns are ok). */ if (resumeAfter) { MUST_FLOW_LABEL(restore_pc); - regs->pc = pc - cs.length; + regs.pc = pc - cs.length; } else { /* * If we take a snapshot on a goto, advance to the target address. This @@ -4489,7 +4485,7 @@ TraceRecorder::compile() ResetJIT(cx, traceMonitor, FR_DEEP_BAIL); return ARECORD_ABORTED; } - if (tree->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) { + if (tree->maxNativeStackSlots >= TraceNativeStorage::MAX_NATIVE_STACK_SLOTS) { debug_only_print0(LC_TMTracer, "Blacklist: excessive stack use.\n"); Blacklist((jsbytecode*)tree->ip); return ARECORD_STOP; @@ -4785,7 +4781,7 @@ class DefaultSlotMap : public SlotMap } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) + visitStackSlots(Value *vp, size_t count, StackFrame* fp) { for (size_t i = 0; i < count; i++) addSlot(&vp[i]); @@ -4793,7 +4789,7 @@ class DefaultSlotMap : public SlotMap } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitFrameObjPtr(void* p, JSStackFrame* fp) + visitFrameObjPtr(void* p, StackFrame* fp) { addSlot(getFrameObjPtrTraceType(p, fp)); return true; @@ -4865,7 +4861,7 @@ TraceRecorder::closeLoop() * to be in an imacro here and the opcode should be either JSOP_TRACE or, in * case this loop was blacklisted in the meantime, JSOP_NOTRACE. */ - JS_ASSERT(*cx->regs->pc == JSOP_TRACE || *cx->regs->pc == JSOP_NOTRACE); + JS_ASSERT(*cx->regs().pc == JSOP_TRACE || *cx->regs().pc == JSOP_NOTRACE); JS_ASSERT(!cx->fp()->hasImacropc()); if (callDepth != 0) { @@ -5233,14 +5229,14 @@ class ClearSlotsVisitor : public SlotVisitorBase {} JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, size_t count, JSStackFrame *) { + visitStackSlots(Value *vp, size_t count, StackFrame *) { for (Value *vpend = vp + count; vp != vpend; ++vp) tracker.set(vp, NULL); return true; } JS_ALWAYS_INLINE bool - visitFrameObjPtr(void *p, JSStackFrame *) { + visitFrameObjPtr(void *p, StackFrame *) { tracker.set(p, NULL); return true; } @@ -5455,15 +5451,15 @@ TraceRecorder::checkTraceEnd(jsbytecode *pc) * pointer and pretend we have reached the loop header. */ if (pendingLoop) { - JS_ASSERT(!cx->fp()->hasImacropc() && (pc == cx->regs->pc || pc == cx->regs->pc + 1)); - JSFrameRegs orig = *cx->regs; + JS_ASSERT(!cx->fp()->hasImacropc() && (pc == cx->regs().pc || pc == cx->regs().pc + 1)); + FrameRegs orig = cx->regs(); - cx->regs->pc = (jsbytecode*)tree->ip; - cx->regs->sp = cx->fp()->base() + tree->spOffsetAtEntry; + cx->regs().pc = (jsbytecode*)tree->ip; + cx->regs().sp = cx->fp()->base() + tree->spOffsetAtEntry; JSContext* localcx = cx; AbortableRecordingStatus ars = closeLoop(); - *localcx->regs = orig; + localcx->regs() = orig; return ars; } @@ -5610,7 +5606,7 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee) VOUCH_DOES_NOT_REQUIRE_STACK(); /* Assert that we have a correct sp distance from cx->fp()->slots in fi. */ - JSStackFrame* const fp = cx->fp(); + StackFrame* const fp = cx->fp(); JS_ASSERT_IF(!fi.imacpc, js_ReconstructStackDepth(cx, fp->script(), fi.pc) == uintN(fi.spdist - fp->numFixed())); @@ -5620,22 +5616,19 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee) JSScript* newscript = newfun->script(); /* Fill in the prev-frame's sp. */ - JSFrameRegs *regs = cx->regs; - regs->sp = fp->slots() + fi.spdist; - regs->pc = fi.pc; + FrameRegs ®s = cx->regs(); + regs.sp = fp->slots() + fi.spdist; + regs.pc = fi.pc; if (fi.imacpc) fp->setImacropc(fi.imacpc); /* Set argc/flags then mimic JSOP_CALL. */ uintN argc = fi.get_argc(); - uint32 flags = fi.is_constructing () - ? JSFRAME_CONSTRUCTING | JSFRAME_CONSTRUCTING - : 0; + uint32 flags = fi.is_constructing() ? StackFrame::CONSTRUCTING : 0; /* Get pointer to new/frame/slots, prepare arguments. */ - StackSpace &stack = cx->stack(); - JSStackFrame *newfp = stack.getInlineFrame(cx, regs->sp, argc, newfun, - newscript, &flags); + StackFrame *newfp = cx->stack.getInlineFrame(cx, regs.sp, argc, newfun, + newscript, &flags); /* Initialize frame; do not need to initialize locals. */ newfp->initCallFrame(cx, *callee, newfun, argc, flags); @@ -5651,18 +5644,18 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee) /* These should be initialized by FlushNativeStackFrame. */ newfp->thisValue().setMagic(JS_THIS_POISON); - newfp->setScopeChainNoCallObj(*JSStackFrame::sInvalidScopeChain); + newfp->setScopeChainNoCallObj(*StackFrame::sInvalidScopeChain); #endif /* Officially push the frame. */ - stack.pushInlineFrame(cx, newscript, newfp, cx->regs); + cx->stack.pushInlineFrame(newscript, newfp, cx->regs()); /* Call object will be set by FlushNativeStackFrame. */ /* Call the debugger hook if present. */ JSInterpreterHook hook = cx->debugHooks->callHook; if (hook) { - newfp->setHookData(hook(cx, newfp, JS_TRUE, 0, + newfp->setHookData(hook(cx, Jsvalify(newfp), JS_TRUE, 0, cx->debugHooks->callHookData)); } } @@ -5838,11 +5831,11 @@ CreateBranchFragment(JSContext* cx, TraceMonitor* tm, TreeFragment* root, VMSide ? (++(tm->lastFragID)) : 0; ) - VMFragment* f = new (*tm->dataAlloc) VMFragment(cx->regs->pc verbose_only(, profFragID)); + VMFragment* f = new (*tm->dataAlloc) VMFragment(cx->regs().pc verbose_only(, profFragID)); debug_only_printf(LC_TMTreeVis, "TREEVIS CREATEBRANCH ROOT=%p FRAG=%p PC=%p FILE=\"%s\"" " LINE=%d ANCHOR=%p OFFS=%d\n", - (void*)root, (void*)f, (void*)cx->regs->pc, cx->fp()->script()->filename, + (void*)root, (void*)f, (void*)cx->regs().pc, cx->fp()->script()->filename, js_FramePCToLineNumber(cx, cx->fp()), (void*)anchor, FramePCOffset(cx, cx->fp())); verbose_only( tm->branches = new (*tm->dataAlloc) Seq(f, tm->branches); ) @@ -5899,7 +5892,7 @@ AttemptToExtendTree(JSContext* cx, TraceMonitor* tm, VMSideExit* anchor, VMSideE * might extend along separate paths (i.e. after the loop edge, and after a * return statement). */ - c->ip = cx->regs->pc; + c->ip = cx->regs().pc; JS_ASSERT(c->root == f); } @@ -5983,7 +5976,7 @@ TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCall JS_ASSERT(r->fragment && !r->fragment->lastIns); TreeFragment* root = r->fragment->root; - TreeFragment* first = LookupOrAddLoop(tm, cx->regs->pc, root->globalObj, + TreeFragment* first = LookupOrAddLoop(tm, cx->regs().pc, root->globalObj, root->globalShape, entryFrameArgc(cx)); /* @@ -6119,9 +6112,9 @@ TraceRecorder::attemptTreeCall(TreeFragment* f, uintN& inlineCallCount) case MUL_ZERO_EXIT: case OVERFLOW_EXIT: if (lr->exitType == MUL_ZERO_EXIT) - traceMonitor->oracle->markInstructionSlowZeroTest(cx->regs->pc); + traceMonitor->oracle->markInstructionSlowZeroTest(cx->regs().pc); else - traceMonitor->oracle->markInstructionUndemotable(cx->regs->pc); + traceMonitor->oracle->markInstructionUndemotable(cx->regs().pc); /* FALL THROUGH */ case BRANCH_EXIT: /* Abort recording the outer tree, extend the inner tree. */ @@ -6177,7 +6170,7 @@ IsEntryTypeCompatible(const Value &v, JSValueType type) } static inline bool -IsFrameObjPtrTypeCompatible(void *p, JSStackFrame *fp, JSValueType type) +IsFrameObjPtrTypeCompatible(void *p, StackFrame *fp, JSValueType type) { debug_only_printf(LC_TMTracer, "%c/%c ", TypeToChar(type), (p == fp->addressOfScopeChain() || fp->hasArgsObj()) @@ -6231,7 +6224,7 @@ public: */ JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) { + visitStackSlots(Value *vp, size_t count, StackFrame* fp) { for (size_t i = 0; i < count; ++i) { debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i)); if (!IsEntryTypeCompatible(*vp, *mTypeMap)) { @@ -6250,7 +6243,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitFrameObjPtr(void* p, JSStackFrame* fp) { + visitFrameObjPtr(void* p, StackFrame* fp) { debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), 0); if (!IsFrameObjPtrTypeCompatible(p, fp, *mTypeMap)) mOk = false; @@ -6321,7 +6314,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) { + visitStackSlots(Value *vp, size_t count, StackFrame* fp) { for (size_t i = 0; i < count; ++i) { if (!mOk) break; @@ -6331,7 +6324,7 @@ public: } JS_REQUIRES_STACK JS_ALWAYS_INLINE bool - visitFrameObjPtr(void* p, JSStackFrame *fp) { + visitFrameObjPtr(void* p, StackFrame *fp) { debug_only_printf(LC_TMTracer, "%s%d=", stackSlotKind(), 0); JS_ASSERT(*(uint8_t*)mTypeMap != 0xCD); return mOk = IsFrameObjPtrTypeCompatible(p, fp, *mTypeMap++); @@ -6412,8 +6405,8 @@ TracerState::TracerState(JSContext* cx, TraceMonitor* tm, TreeFragment* f, callstackBase(tm->storage->callstack()), sor(callstackBase), rp(callstackBase), - eor(callstackBase + JS_MIN(MAX_CALL_STACK_ENTRIES, - JS_MAX_INLINE_CALL_COUNT - inlineCallCount)), + eor(callstackBase + JS_MIN(TraceNativeStorage::MAX_CALL_STACK_ENTRIES, + StackSpace::MAX_INLINE_CALLS - inlineCallCount)), lastTreeExitGuard(NULL), lastTreeCallGuard(NULL), rpAtLastTreeCall(NULL), @@ -6442,7 +6435,7 @@ TracerState::TracerState(JSContext* cx, TraceMonitor* tm, TreeFragment* f, JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL); JS_THREAD_DATA(cx)->onTraceCompartment = cx->compartment; - JS_ASSERT(eos == stackBase + MAX_NATIVE_STACK_SLOTS); + JS_ASSERT(eos == stackBase + TraceNativeStorage::MAX_NATIVE_STACK_SLOTS); JS_ASSERT(sp < eos); /* @@ -6450,15 +6443,15 @@ TracerState::TracerState(JSContext* cx, TraceMonitor* tm, TreeFragment* f, * EnterFrame. It is okay to have a 0-frame restriction since the JIT * might not need any frames. */ - JS_ASSERT(inlineCallCount <= JS_MAX_INLINE_CALL_COUNT); + JS_ASSERT(inlineCallCount <= StackSpace::MAX_INLINE_CALLS); #ifdef DEBUG /* * Cannot 0xCD-fill global frame since it may overwrite a bailed outer * ExecuteTree's 0xdeadbeefdeadbeef marker. */ - memset(tm->storage->stack(), 0xCD, MAX_NATIVE_STACK_SLOTS * sizeof(double)); - memset(tm->storage->callstack(), 0xCD, MAX_CALL_STACK_ENTRIES * sizeof(FrameInfo*)); + memset(tm->storage->stack(), 0xCD, TraceNativeStorage::MAX_NATIVE_STACK_SLOTS * sizeof(double)); + memset(tm->storage->callstack(), 0xCD, TraceNativeStorage::MAX_CALL_STACK_ENTRIES * sizeof(FrameInfo*)); #endif } @@ -6560,8 +6553,8 @@ ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallC #endif JS_ASSERT(f->root == f && f->code()); - if (!ScopeChainCheck(cx, f) || !cx->stack().ensureEnoughSpaceToEnterTrace() || - inlineCallCount + f->maxCallDepth > JS_MAX_INLINE_CALL_COUNT) { + if (!ScopeChainCheck(cx, f) || !cx->stack.space().ensureEnoughSpaceToEnterTrace() || + inlineCallCount + f->maxCallDepth > StackSpace::MAX_INLINE_CALLS) { *lrp = NULL; return true; } @@ -6621,7 +6614,7 @@ ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallC f->iters += iters; #ifdef DEBUG - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); const char *prefix = ""; if (iters == LOOP_COUNT_MAX) prefix = ">"; @@ -6632,7 +6625,7 @@ ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallC getExitName(lr->exitType), fp->script()->filename, js_FramePCToLineNumber(cx, fp), - js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs->pc]); + js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs().pc]); #endif #ifdef JS_METHODJIT @@ -6752,7 +6745,7 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) * ExecuteTree. We are about to return to the interpreter. Adjust * the top stack frame to resume on the next op. */ - JSFrameRegs* regs = cx->regs; + FrameRegs* regs = &cx->regs(); JSOp op = (JSOp) *regs->pc; /* @@ -6811,17 +6804,17 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) * Flush the slots for cx->fp() (which will become cx->fp()->prev after * SynthesizeFrame). Since a frame's arguments (including callee * and thisv) are part of the frame, we only want to flush up to the - * next frame's arguments, so set cx->regs->sp to to not include said + * next frame's arguments, so set cx->regs().sp to to not include said * arguments. The upcoming call to SynthesizeFrame will reset regs->sp * to its correct value. */ - cx->regs->sp = cx->fp()->slots() + (fi->spdist - (2 + fi->get_argc())); + cx->regs().sp = cx->fp()->slots() + (fi->spdist - (2 + fi->get_argc())); int slots = FlushNativeStackFrame(cx, 0 /* callDepth */, fi->get_typemap(), stack); /* Finish initializing cx->fp() and push a new cx->fp(). */ SynthesizeFrame(cx, *fi, callee); #ifdef DEBUG - JSStackFrame* fp = cx->fp(); + StackFrame* fp = cx->fp(); debug_only_printf(LC_TMTracer, "synthesized deep frame for %s:%u@%u, slots=%d, fi=%p\n", fp->script()->filename, @@ -6856,7 +6849,7 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) SynthesizeFrame(cx, *callstack[n], callee); ++*state.inlineCallCountp; #ifdef DEBUG - JSStackFrame* fp = cx->fp(); + StackFrame* fp = cx->fp(); debug_only_printf(LC_TMTracer, "synthesized shallow frame for %s:%u@%u\n", fp->script()->filename, js_FramePCToLineNumber(cx, fp), @@ -6869,20 +6862,20 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) * entered into). These are our final values for sp and pc since * SynthesizeFrame has already taken care of all frames in between. */ - JSStackFrame* const fp = cx->fp(); + StackFrame* const fp = cx->fp(); /* * If we are not exiting from an inlined frame, the state->sp is spbase. * Otherwise spbase is whatever slots frames around us consume. */ - cx->regs->pc = innermost->pc; + cx->regs().pc = innermost->pc; if (innermost->imacpc) fp->setImacropc(innermost->imacpc); else fp->clearImacropc(); /* - * Set cx->regs->regs for the top frame. Since the top frame does not have a + * Set cx->regs().regs for the top frame. Since the top frame does not have a * FrameInfo (a FrameInfo is only pushed for calls), we basically need to * compute the offset from fp->slots() to the top of the stack based on the * number of native slots allocated for this function. @@ -6897,12 +6890,12 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) /* A frame's native slots includes args and frame ptrs, so strip them off. */ slotOffset -= NumSlotsBeforeFixed(fp); } - cx->regs->sp = fp->slots() + slotOffset; + cx->regs().sp = fp->slots() + slotOffset; /* Assert that we computed sp correctly. */ JS_ASSERT_IF(!fp->hasImacropc(), fp->slots() + fp->numFixed() + - js_ReconstructStackDepth(cx, fp->script(), cx->regs->pc) == cx->regs->sp); + js_ReconstructStackDepth(cx, fp->script(), cx->regs().pc) == cx->regs().sp); #ifdef EXECUTE_TREE_TIMER uint64 cycles = rdtsc() - state.startTime; @@ -6915,10 +6908,10 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) fp->script()->filename, js_FramePCToLineNumber(cx, fp), FramePCOffset(cx, fp), - js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs->pc], + js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs().pc], (void*)lr, getExitName(lr->exitType), - (long long int)(cx->regs->sp - fp->base()), + (long long int)(cx->regs().sp - fp->base()), calldepth, (unsigned long long int)cycles); @@ -6961,7 +6954,7 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) } /* Write back interned globals. */ - JS_ASSERT(state.eos == state.stackBase + MAX_NATIVE_STACK_SLOTS); + JS_ASSERT(state.eos == state.stackBase + TraceNativeStorage::MAX_NATIVE_STACK_SLOTS); JSObject* globalObj = outermostTree->globalObj; FlushNativeGlobalFrame(cx, globalObj, state.eos, ngslots, gslots, globalTypeMap); @@ -6997,7 +6990,7 @@ TraceRecorder::assertInsideLoop() if (callDepth > 0) return; - jsbytecode *pc = cx->regs->fp->hasImacropc() ? cx->regs->fp->imacropc() : cx->regs->pc; + jsbytecode *pc = cx->fp()->hasImacropc() ? cx->fp()->imacropc() : cx->regs().pc; jsbytecode *beg = (jsbytecode *)tree->ip; jsbytecode *end = GetLoopBottom(cx, beg); @@ -7020,7 +7013,7 @@ RecordLoopEdge(JSContext* cx, TraceMonitor* tm, uintN& inlineCallCount) /* Is the recorder currently active? */ if (tm->recorder) { tm->recorder->assertInsideLoop(); - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; if (pc == tm->recorder->tree->ip) { AbortableRecordingStatus status = tm->recorder->closeLoop(); if (status != ARECORD_COMPLETED) { @@ -7045,7 +7038,7 @@ RecordLoopEdge(JSContext* cx, TraceMonitor* tm, uintN& inlineCallCount) * interpreter and do not attempt to trigger or record a new tree at * this location. */ - if (pc != cx->regs->pc) { + if (pc != cx->regs().pc) { #ifdef MOZ_TRACEVIS tvso.r = R_INNER_SIDE_EXIT; #endif @@ -7064,7 +7057,7 @@ RecordLoopEdge(JSContext* cx, TraceMonitor* tm, uintN& inlineCallCount) SlotList* globalSlots = NULL; if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) { - Backoff(tm, cx->regs->pc); + Backoff(tm, cx->regs().pc); return MONITOR_NOT_RECORDING; } @@ -7076,7 +7069,7 @@ RecordLoopEdge(JSContext* cx, TraceMonitor* tm, uintN& inlineCallCount) return MONITOR_NOT_RECORDING; } - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; uint32 argc = entryFrameArgc(cx); TreeFragment* f = LookupOrAddLoop(tm, pc, globalObj, globalShape, argc); @@ -7168,9 +7161,9 @@ RecordLoopEdge(JSContext* cx, TraceMonitor* tm, uintN& inlineCallCount) case MUL_ZERO_EXIT: case OVERFLOW_EXIT: if (lr->exitType == MUL_ZERO_EXIT) - tm->oracle->markInstructionSlowZeroTest(cx->regs->pc); + tm->oracle->markInstructionSlowZeroTest(cx->regs().pc); else - tm->oracle->markInstructionUndemotable(cx->regs->pc); + tm->oracle->markInstructionUndemotable(cx->regs().pc); /* FALL THROUGH */ case BRANCH_EXIT: rv = AttemptToExtendTree(cx, tm, lr, NULL, NULL, NULL @@ -7284,9 +7277,9 @@ TraceRecorder::monitorRecording(JSOp op) INIT_SPRINTER(cx, &sprinter, &cx->tempPool, 0); debug_only_print0(LC_TMRecorder, "\n"); - js_Disassemble1(cx, cx->fp()->script(), cx->regs->pc, + js_Disassemble1(cx, cx->fp()->script(), cx->regs().pc, cx->fp()->hasImacropc() - ? 0 : cx->regs->pc - cx->fp()->script()->code, + ? 0 : cx->regs().pc - cx->fp()->script()->code, !cx->fp()->hasImacropc(), &sprinter); fprintf(stdout, "%s", sprinter.base); @@ -7923,7 +7916,7 @@ TraceRecorder::varval(unsigned n) const JS_REQUIRES_STACK Value& TraceRecorder::stackval(int n) const { - return cx->regs->sp[n]; + return cx->regs().sp[n]; } JS_REQUIRES_STACK void @@ -7973,7 +7966,7 @@ TraceRecorder::entryScopeChain() const JS_REQUIRES_STACK LIns* TraceRecorder::entryFrameIns() const { - return w.ldpFrameFp(w.ldpContextField(regs)); + return w.ldpFrameFp(w.ldpContextRegs(cx_ins)); } /* @@ -7981,11 +7974,11 @@ TraceRecorder::entryFrameIns() const * trace. |depthp| is an optional outparam: if it is non-null, it will be * filled in with the depth of the call object's frame relevant to cx->fp(). */ -JS_REQUIRES_STACK JSStackFrame* +JS_REQUIRES_STACK StackFrame* TraceRecorder::frameIfInRange(JSObject* obj, unsigned* depthp) const { - JSStackFrame* ofp = (JSStackFrame*) obj->getPrivate(); - JSStackFrame* fp = cx->fp(); + StackFrame* ofp = (StackFrame*) obj->getPrivate(); + StackFrame* fp = cx->fp(); for (unsigned depth = 0; depth <= callDepth; ++depth) { if (fp == ofp) { if (depthp) @@ -8021,7 +8014,7 @@ TraceRecorder::scopeChainProp(JSObject* chainHead, Value*& vp, LIns*& ins, NameR TraceMonitor &localtm = *traceMonitor; - JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)]; + JSAtom* atom = atoms[GET_INDEX(cx->regs().pc)]; JSObject* obj2; JSProperty* prop; JSObject *obj = chainHead; @@ -8087,7 +8080,7 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp, { Shape *shape = (Shape*) prop; - JSOp op = JSOp(*cx->regs->pc); + JSOp op = JSOp(*cx->regs().pc); uint32 setflags = (js_CodeSpec[op].format & (JOF_SET | JOF_INCDEC | JOF_FOR)); if (setflags && !shape->writable()) RETURN_STOP("writing to a read-only property"); @@ -8095,7 +8088,7 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp, uintN slot = uint16(shape->shortid); vp = NULL; - JSStackFrame* cfp = (JSStackFrame*) obj->getPrivate(); + StackFrame* cfp = (StackFrame*) obj->getPrivate(); if (cfp) { if (shape->getterOp() == GetCallArg) { JS_ASSERT(slot < cfp->numFormalArgs()); @@ -8115,7 +8108,7 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp, if (frameIfInRange(obj)) { // At this point we are guaranteed to be looking at an active call oject - // whose properties are stored in the corresponding JSStackFrame. + // whose properties are stored in the corresponding StackFrame. ins = get(vp); nr.tracked = true; return RECORD_CONTINUE; @@ -8276,7 +8269,7 @@ TraceRecorder::tryToDemote(LOpcode op, jsdouble v0, jsdouble v1, LIns* s0, LIns* * z = d0 * d1 with 0 <= (d0|d1) <= 0xffff guarantees z <= fffe0001). */ - if (!oracle || oracle->isInstructionUndemotable(cx->regs->pc) || + if (!oracle || oracle->isInstructionUndemotable(cx->regs().pc) || !IsPromotedInt32(s0) || !IsPromotedInt32(s1)) { undemotable: @@ -8362,7 +8355,7 @@ TraceRecorder::tryToDemote(LOpcode op, jsdouble v0, jsdouble v1, LIns* s0, LIns* * if either argument is negative. If this ever fails, we recompile with * a stronger, but slower, guard. */ - if (v0 < 0.0 || v1 < 0.0 || oracle->isInstructionSlowZeroTest(cx->regs->pc)) { + if (v0 < 0.0 || v1 < 0.0 || oracle->isInstructionSlowZeroTest(cx->regs().pc)) { if (!exit) exit = snapshot(OVERFLOW_EXIT); @@ -8620,11 +8613,11 @@ TraceRecorder::callImacro(jsbytecode* imacro) JS_REQUIRES_STACK RecordingStatus TraceRecorder::callImacroInfallibly(jsbytecode* imacro) { - JSStackFrame* fp = cx->fp(); + StackFrame* fp = cx->fp(); JS_ASSERT(!fp->hasImacropc()); - JSFrameRegs* regs = cx->regs; - fp->setImacropc(regs->pc); - regs->pc = imacro; + FrameRegs& regs = cx->regs(); + fp->setImacropc(regs.pc); + regs.pc = imacro; updateAtoms(); return RECORD_IMACRO; } @@ -8659,7 +8652,7 @@ TraceRecorder::ifop() return ARECORD_STOP; } - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; emitIf(pc, cond, x); return checkTraceEnd(pc); } @@ -8721,7 +8714,7 @@ TraceRecorder::inc(const Value &v, LIns*& v_ins, Value &v_out, jsint incr, bool LIns* v_after; CHECK_STATUS(incHelper(v, v_ins, v_out, v_after, incr)); - const JSCodeSpec& cs = js_CodeSpec[*cx->regs->pc]; + const JSCodeSpec& cs = js_CodeSpec[*cx->regs().pc]; JS_ASSERT(cs.ndefs == 1); stack(-cs.nuses, pre ? v_after : v_ins); v_ins = v_after; @@ -9060,7 +9053,7 @@ TraceRecorder::equalityHelper(Value& l, Value& r, LIns* l_ins, LIns* r_ins, cond = !cond; } - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; /* * Don't guard if the same path is always taken. If it isn't, we have to @@ -9195,7 +9188,7 @@ TraceRecorder::relational(LOpcode op, bool tryBranchAfterCond) } x = w.ins2(op, l_ins, r_ins); - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; /* * Don't guard if the same path is always taken. If it isn't, we have to @@ -9421,7 +9414,7 @@ TraceRecorder::forgetGuardedShapes() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2, PCVal& pcval) { - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; JS_ASSERT(*pc != JSOP_INITPROP && *pc != JSOP_INITMETHOD && *pc != JSOP_SETNAME && *pc != JSOP_SETPROP && *pc != JSOP_SETMETHOD); @@ -9509,7 +9502,7 @@ TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2 // enough to share mutable objects on the scope or proto chain, but we // don't care about such insane embeddings. Anyway, the (scope, proto) // entry->vcap coordinates must reach obj2 from aobj at this point. - JS_ASSERT(cx->thread->data.requestDepth); + JS_ASSERT(cx->thread()->data.requestDepth); #endif return InjectStatus(guardPropertyCacheHit(obj_ins, aobj, obj2, entry, pcval)); @@ -9534,7 +9527,7 @@ TraceRecorder::guardPropertyCacheHit(LIns* obj_ins, if (entry->adding()) RETURN_STOP("adding a property to the global object"); - JSOp op = js_GetOpcode(cx, cx->fp()->script(), cx->regs->pc); + JSOp op = js_GetOpcode(cx, cx->fp()->script(), cx->regs().pc); if (JOF_OPMODE(op) != JOF_NAME) { guard(true, w.name(w.eqp(obj_ins, w.immpObjGC(globalObj)), "guard_global"), @@ -9945,7 +9938,7 @@ TraceRecorder::unbox_int_id(LIns *id_ins) JS_REQUIRES_STACK RecordingStatus TraceRecorder::getThis(LIns*& this_ins) { - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); if (fp->isGlobalFrame()) { // Top-level code. It is an invariant of the interpreter that fp->thisv @@ -10131,7 +10124,7 @@ TraceRecorder::clearReturningFrameFromNativeTracker() */ ClearSlotsVisitor visitor(nativeFrameTracker); VisitStackSlots(visitor, cx, 0); - Value *vp = cx->regs->sp; + Value *vp = cx->regs().sp; Value *vpend = cx->fp()->slots() + cx->fp()->script()->nslots; for (; vp < vpend; ++vp) nativeFrameTracker.set(vp, NULL); @@ -10158,7 +10151,7 @@ class BoxArg JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::putActivationObjects() { - JSStackFrame *const fp = cx->fp(); + StackFrame *const fp = cx->fp(); bool have_args = fp->hasArgsObj() && !fp->argsObj().isStrictArguments(); bool have_call = fp->isFunctionFrame() && fp->fun()->isHeavyweight(); @@ -10173,7 +10166,7 @@ TraceRecorder::putActivationObjects() * abort. (In the case where the record-time property name is not * "arguments" but a later run-time property name is, we wouldn't have * emitted the call to js_PutArgumentsOnTrace(), and js_GetArgsValue() - * will deep bail asking for the top JSStackFrame.) + * will deep bail asking for the top StackFrame.) */ RETURN_STOP_A("dodgy arguments access"); } @@ -10217,7 +10210,7 @@ TraceRecorder::putActivationObjects() LIns* scopeChain_ins = getFrameObjPtr(fp->addressOfScopeChain()); LIns* args[] = { slots_ins, w.nameImmi(nslots), args_ins, - w.nameImmi(fp->numFormalArgs()), scopeChain_ins, cx_ins }; + w.nameImmi(fp->numFormalArgs()), scopeChain_ins }; w.call(&js_PutCallObjectOnTrace_ci, args); } @@ -10227,7 +10220,7 @@ TraceRecorder::putActivationObjects() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_EnterFrame() { - JSStackFrame* const fp = cx->fp(); + StackFrame* const fp = cx->fp(); if (++callDepth >= MAX_CALLDEPTH) RETURN_STOP_A("exceeded maximum call depth"); @@ -10349,10 +10342,10 @@ TraceRecorder::record_EnterFrame() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_LeaveFrame() { - debug_only_stmt(JSStackFrame *fp = cx->fp();) + debug_only_stmt(StackFrame *fp = cx->fp();) JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, fp->script(), - cx->regs->pc)].length == JSOP_CALL_LENGTH); + cx->regs().pc)].length == JSOP_CALL_LENGTH); if (callDepth-- <= 0) RETURN_STOP_A("returned out of a loop we started tracing"); @@ -10381,7 +10374,7 @@ TraceRecorder::record_JSOP_POPV() // trace JSOP_EVAL or leaving the frame where tracing started. LIns *fp_ins = entryFrameIns(); box_value_into(rval, get(&rval), StackFrameAddress(fp_ins, - JSStackFrame::offsetOfReturnValue())); + StackFrame::offsetOfReturnValue())); return ARECORD_CONTINUE; } @@ -10431,7 +10424,7 @@ TraceRecorder::record_JSOP_RETURN() /* If we inlined this function call, make the return value available to the caller code. */ Value& rval = stackval(-1); - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); if (fp->isConstructing() && rval.isPrimitive()) { rval_ins = get(&fp->thisValue()); } else { @@ -10456,7 +10449,7 @@ TraceRecorder::record_JSOP_GOTO() * generate an always-taken loop exit guard. For other downward gotos * (like if/else) continue recording. */ - jssrcnote* sn = js_GetSrcNote(cx->fp()->script(), cx->regs->pc); + jssrcnote* sn = js_GetSrcNote(cx->fp()->script(), cx->regs().pc); if (sn) { if (SN_TYPE(sn) == SRC_BREAK) { @@ -10477,7 +10470,7 @@ TraceRecorder::record_JSOP_GOTO() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_IFEQ() { - trackCfgMerges(cx->regs->pc); + trackCfgMerges(cx->regs().pc); return ifop(); } @@ -10503,7 +10496,7 @@ TraceRecorder::newArguments(LIns* callee_ins) JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_ARGUMENTS() { - JSStackFrame* const fp = cx->fp(); + StackFrame* const fp = cx->fp(); /* In an eval, 'arguments' will be a BINDNAME, which we don't trace. */ JS_ASSERT(!fp->isEvalFrame()); @@ -10578,8 +10571,8 @@ TraceRecorder::record_JSOP_SWAP() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_PICK() { - Value* sp = cx->regs->sp; - jsint n = cx->regs->pc[1]; + Value* sp = cx->regs().sp; + jsint n = cx->regs().pc[1]; JS_ASSERT(sp - (n+1) >= cx->fp()->base()); LIns* top = get(sp - (n+1)); for (jsint i = 0; i < n; ++i) @@ -10767,7 +10760,7 @@ TraceRecorder::record_JSOP_NEG() * we're not a double that's zero. */ if (oracle && - !oracle->isInstructionUndemotable(cx->regs->pc) && + !oracle->isInstructionUndemotable(cx->regs().pc) && IsPromotedInt32(a) && (!v.isInt32() || v.toInt32() != 0) && (!v.isDouble() || v.toDouble() != 0) && @@ -11118,8 +11111,8 @@ JS_REQUIRES_STACK RecordingStatus TraceRecorder::callSpecializedNative(JSNativeTraceInfo *trcinfo, uintN argc, bool constructing) { - JSStackFrame* const fp = cx->fp(); - jsbytecode *pc = cx->regs->pc; + StackFrame* const fp = cx->fp(); + jsbytecode *pc = cx->regs().pc; Value& fval = stackval(0 - (2 + argc)); Value& tval = stackval(0 - (1 + argc)); @@ -11368,7 +11361,7 @@ TraceRecorder::callNative(uintN argc, JSOp mode) * the result array, which can be expensive. This requires * that RegExp.prototype.test() hasn't been changed; we check this. */ - if (!CallResultEscapes(cx->regs->pc)) { + if (!CallResultEscapes(cx->regs().pc)) { JSObject* proto; jsid id = ATOM_TO_JSID(cx->runtime->atomState.testAtom); /* Get RegExp.prototype.test() and check it hasn't been changed. */ @@ -11608,7 +11601,7 @@ TraceRecorder::functionCall(uintN argc, JSOp mode) JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_NEW() { - uintN argc = GET_ARGC(cx->regs->pc); + uintN argc = GET_ARGC(cx->regs().pc); cx->assertValidStackDepth(argc + 2); return InjectStatus(functionCall(argc, JSOP_NEW)); } @@ -11675,7 +11668,7 @@ TraceRecorder::record_JSOP_DELPROP() if (&lval.toObject() == globalObj) RETURN_STOP_A("JSOP_DELPROP on global property"); - JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)]; + JSAtom* atom = atoms[GET_INDEX(cx->regs().pc)]; enterDeepBailCall(); LIns* args[] = { strictModeCode_ins, w.immpAtomGC(atom), get(&lval), cx_ins }; @@ -12085,7 +12078,7 @@ TraceRecorder::record_AddProperty(JSObject *obj) // On trace, call js_Add{,Atom}Property to do the dirty work. LIns* args[] = { w.immpShapeGC(shape), obj_ins, cx_ins }; - jsbytecode op = *cx->regs->pc; + jsbytecode op = *cx->regs().pc; bool isDefinitelyAtom = (op == JSOP_SETPROP); const CallInfo *ci = isDefinitelyAtom ? &js_AddAtomProperty_ci : &js_AddProperty_ci; LIns* ok_ins = w.call(ci, args); @@ -12125,7 +12118,7 @@ TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, const Shape *sh LIns *v_ins, const Value &v) { // Set variables in on-trace-stack call objects by updating the tracker. - JSStackFrame *fp = frameIfInRange(callobj); + StackFrame *fp = frameIfInRange(callobj); if (fp) { if (shape->setterOp() == SetCallArg) { JS_ASSERT(shape->hasShortID()); @@ -12171,8 +12164,8 @@ TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, const Shape *sh return RECORD_CONTINUE; } - // This is the hard case: we have a JSStackFrame private, but it's not in - // range. During trace execution we may or may not have a JSStackFrame + // This is the hard case: we have a StackFrame private, but it's not in + // range. During trace execution we may or may not have a StackFrame // anymore. Call the standard builtins, which handle that situation. // Set variables in off-trace-stack call objects by calling standard builtins. @@ -12214,7 +12207,7 @@ TraceRecorder::setProperty(JSObject* obj, LIns* obj_ins, const Value &v, LIns* v { *deferredp = false; - JSAtom *atom = atoms[GET_INDEX(cx->regs->pc)]; + JSAtom *atom = atoms[GET_INDEX(cx->regs().pc)]; jsid id = ATOM_TO_JSID(atom); if (obj->getOps()->setProperty) @@ -12250,7 +12243,7 @@ TraceRecorder::setProperty(JSObject* obj, LIns* obj_ins, const Value &v, LIns* v // Handle setting an existing own property. if (pobj == obj) { - if (*cx->regs->pc == JSOP_SETMETHOD) { + if (*cx->regs().pc == JSOP_SETMETHOD) { if (shape->isMethod() && shape->methodObject() == v.toObject()) return RECORD_CONTINUE; RETURN_STOP("setmethod: property exists"); @@ -12329,7 +12322,7 @@ TraceRecorder::recordInitPropertyOp(jsbytecode op) Value& v = stackval(-1); LIns* v_ins = get(&v); - JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)]; + JSAtom* atom = atoms[GET_INDEX(cx->regs().pc)]; jsid id = js_CheckForStringIndex(ATOM_TO_JSID(atom)); // If obj already has this property (because JSOP_NEWOBJECT already set its @@ -12403,7 +12396,7 @@ TraceRecorder::finishGetProp(LIns* obj_ins, LIns* vp_ins, LIns* ok_ins, Value* o JS_ASSERT(vp_ins->isop(LIR_allocp)); LIns* result_ins = w.lddAlloc(vp_ins); set(outp, result_ins); - if (js_CodeSpec[*cx->regs->pc].format & JOF_CALLOP) + if (js_CodeSpec[*cx->regs().pc].format & JOF_CALLOP) set(outp + 1, obj_ins); // We need to guard on ok_ins, but this requires a snapshot of the state @@ -12630,7 +12623,7 @@ TraceRecorder::getPropertyById(LIns* obj_ins, Value* outp) { // Find the atom. JSAtom* atom; - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; const JSCodeSpec& cs = js_CodeSpec[*pc]; if (*pc == JSOP_LENGTH) { atom = cx->runtime->atomState.lengthAtom; @@ -12713,8 +12706,8 @@ TraceRecorder::getPropertyWithScriptGetter(JSObject *obj, LIns* obj_ins, const S // the interpreter state and the tracker in the same way. This adjustment // is noted in imacros.jsasm with .fixup tags. Value getter = shape->getterValue(); - Value*& sp = cx->regs->sp; - switch (*cx->regs->pc) { + Value*& sp = cx->regs().sp; + switch (*cx->regs().pc) { case JSOP_GETPROP: sp++; sp[-1] = sp[-2]; @@ -12840,7 +12833,7 @@ TraceRecorder::guardNotHole(LIns *argsobj_ins, LIns *idx_ins) JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETELEM() { - bool call = *cx->regs->pc == JSOP_CALLELEM; + bool call = *cx->regs().pc == JSOP_CALLELEM; Value& idx = stackval(-1); Value& lval = stackval(-2); @@ -12888,7 +12881,7 @@ TraceRecorder::record_JSOP_GETELEM() // Only trace reading arguments out of active, tracked frame unsigned depth; - JSStackFrame *afp = guardArguments(obj, obj_ins, &depth); + StackFrame *afp = guardArguments(obj, obj_ins, &depth); if (afp) { Value* vp = &afp->canonicalActualArg(int_idx); if (idx_ins->isImmD()) { @@ -13134,7 +13127,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex) if (!idx.isPrimitive()) RETURN_STOP_A("non-primitive index"); CHECK_STATUS_A(initOrSetPropertyByName(obj_ins, &idx, &v, - *cx->regs->pc == JSOP_INITELEM)); + *cx->regs().pc == JSOP_INITELEM)); } else if (OkToTraceTypedArrays && js_IsTypedArray(obj)) { // Fast path: assigning to element of typed array. VMSideExit* branchExit = snapshot(BRANCH_EXIT); @@ -13240,7 +13233,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex) } } else if (idx.toInt32() < 0 || !obj->isDenseArray()) { CHECK_STATUS_A(initOrSetPropertyByIndex(obj_ins, idx_ins, &v, - *cx->regs->pc == JSOP_INITELEM)); + *cx->regs().pc == JSOP_INITELEM)); } else { // Fast path: assigning to element of dense array. VMSideExit* branchExit = snapshot(BRANCH_EXIT); @@ -13308,7 +13301,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex) box_value_into(v, v_ins, dslotAddr); } - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP) set(&lval, v_ins); @@ -13420,7 +13413,7 @@ TraceRecorder::upvar(JSScript* script, JSUpvarArray* uva, uintN index, Value& v) */ uint32 level = script->staticLevel - cookie.level(); uint32 cookieSlot = cookie.slot(); - JSStackFrame* fp = cx->findFrameAtLevel(level); + StackFrame* fp = cx->stack.findFrameAtLevel(level); const CallInfo* ci; int32 slot; if (!fp->isFunctionFrame() || fp->isEvalFrame()) { @@ -13489,7 +13482,7 @@ TraceRecorder::record_JSOP_GETFCSLOT() LIns* upvars_ins = w.getObjPrivatizedSlot(callee_ins, JSObject::JSSLOT_FLAT_CLOSURE_UPVARS); - unsigned index = GET_UINT16(cx->regs->pc); + unsigned index = GET_UINT16(cx->regs().pc); LIns *v_ins = unbox_value(callee.getFlatClosureUpvar(index), FCSlotsAddress(upvars_ins, index), snapshot(BRANCH_EXIT)); @@ -13531,7 +13524,7 @@ TraceRecorder::guardCallee(Value& callee) * * As long as we guard on parent scope, we are guaranteed when recording * variable accesses for a Call object having no private data that we can - * emit code that avoids checking for an active JSStackFrame for the Call + * emit code that avoids checking for an active StackFrame for the Call * object (which would hold fresh variable values -- the Call object's * slots would be stale until the stack frame is popped). This is because * Call objects can't pick up a new stack frame in their private slot once @@ -13570,12 +13563,12 @@ TraceRecorder::guardCallee(Value& callee) * the current trace and is guaranteed to refer to the same frame on trace for * all later executions. */ -JS_REQUIRES_STACK JSStackFrame * +JS_REQUIRES_STACK StackFrame * TraceRecorder::guardArguments(JSObject *obj, LIns* obj_ins, unsigned *depthp) { JS_ASSERT(obj->isArguments()); - JSStackFrame *afp = frameIfInRange(obj, depthp); + StackFrame *afp = frameIfInRange(obj, depthp); if (!afp) return NULL; @@ -13645,7 +13638,7 @@ TraceRecorder::interpretedFunctionCall(Value& fval, JSFunction* fun, uintN argc, if (fval.toObject().getGlobal() != globalObj) RETURN_STOP("JSOP_CALL or JSOP_NEW crosses global scopes"); - JSStackFrame* const fp = cx->fp(); + StackFrame* const fp = cx->fp(); if (constructing) { LIns* thisobj_ins; @@ -13665,9 +13658,9 @@ TraceRecorder::interpretedFunctionCall(Value& fval, JSFunction* fun, uintN argc, JS_ASSERT(argc < FrameInfo::CONSTRUCTING_FLAG); tree->gcthings.addUnique(fval); - fi->pc = cx->regs->pc; + fi->pc = cx->regs().pc; fi->imacpc = fp->maybeImacropc(); - fi->spdist = cx->regs->sp - fp->slots(); + fi->spdist = cx->regs().sp - fp->slots(); fi->set_argc(uint16(argc), constructing); fi->callerHeight = stackSlots - (2 + argc); fi->callerArgc = fp->hasArgs() ? fp->numActualArgs() : 0; @@ -13696,7 +13689,7 @@ TraceRecorder::interpretedFunctionCall(Value& fval, JSFunction* fun, uintN argc, * We implement JSOP_FUNAPPLY/JSOP_FUNCALL using imacros */ static inline JSOp -GetCallMode(JSStackFrame *fp) +GetCallMode(StackFrame *fp) { if (fp->hasImacropc()) { JSOp op = (JSOp) *fp->imacropc(); @@ -13709,7 +13702,7 @@ GetCallMode(JSStackFrame *fp) JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_CALL() { - uintN argc = GET_ARGC(cx->regs->pc); + uintN argc = GET_ARGC(cx->regs().pc); cx->assertValidStackDepth(argc + 2); return InjectStatus(functionCall(argc, GetCallMode(cx->fp()))); } @@ -13747,11 +13740,11 @@ TraceRecorder::record_JSOP_FUNCALL() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_FUNAPPLY() { - jsbytecode *pc = cx->regs->pc; + jsbytecode *pc = cx->regs().pc; uintN argc = GET_ARGC(pc); cx->assertValidStackDepth(argc + 2); - Value* vp = cx->regs->sp - (argc + 2); + Value* vp = cx->regs().sp - (argc + 2); jsuint length = 0; JSObject* aobj = NULL; LIns* aobj_ins = NULL; @@ -13805,7 +13798,7 @@ TraceRecorder::record_JSOP_FUNAPPLY() BRANCH_EXIT); } else if (aobj->isArguments()) { unsigned depth; - JSStackFrame *afp = guardArguments(aobj, aobj_ins, &depth); + StackFrame *afp = guardArguments(aobj, aobj_ins, &depth); if (!afp) RETURN_STOP_A("can't reach arguments object's frame"); if (aobj->isArgsLengthOverridden()) @@ -13836,7 +13829,7 @@ TraceRecorder::record_NativeCallComplete() #ifdef DEBUG JS_ASSERT(pendingSpecializedNative); - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_FUNCALL || *pc == JSOP_FUNAPPLY || *pc == JSOP_NEW || *pc == JSOP_SETPROP); #endif @@ -13900,7 +13893,7 @@ TraceRecorder::record_NativeCallComplete() * value is actually currently associated with that location, and that we are talking * about the top of the stack here, which is where we expected boxed values. */ - JS_ASSERT(&v == &cx->regs->sp[-1] && get(&v) == v_ins); + JS_ASSERT(&v == &cx->regs().sp[-1] && get(&v) == v_ins); set(&v, unbox_value(v, AllocSlotsAddress(native_rval_ins), snapshot(BRANCH_EXIT))); } else if (pendingSpecializedNative->flags & (JSTN_RETURN_NULLABLE_STR | JSTN_RETURN_NULLABLE_OBJ)) { @@ -13927,7 +13920,7 @@ JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::name(Value*& vp, LIns*& ins, NameResult& nr) { JSObject* obj = &cx->fp()->scopeChain(); - JSOp op = JSOp(*cx->regs->pc); + JSOp op = JSOp(*cx->regs().pc); if (js_CodeSpec[op].format & JOF_GNAME) obj = obj->getGlobal(); if (obj != globalObj) @@ -14063,7 +14056,7 @@ JS_REQUIRES_STACK RecordingStatus TraceRecorder::propTail(JSObject* obj, LIns* obj_ins, JSObject* obj2, PCVal pcval, uint32 *slotp, LIns** v_insp, Value *outp) { - const JSCodeSpec& cs = js_CodeSpec[*cx->regs->pc]; + const JSCodeSpec& cs = js_CodeSpec[*cx->regs().pc]; uint32 setflags = (cs.format & (JOF_INCDEC | JOF_FOR)); JS_ASSERT(!(cs.format & JOF_SET)); @@ -14304,7 +14297,7 @@ TraceRecorder::typedArrayElement(Value& oval, Value& ival, Value*& vp, LIns*& v_ JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::getProp(JSObject* obj, LIns* obj_ins) { - JSOp op = JSOp(*cx->regs->pc); + JSOp op = JSOp(*cx->regs().pc); const JSCodeSpec& cs = js_CodeSpec[op]; JS_ASSERT(cs.ndefs == 1); @@ -14334,7 +14327,7 @@ TraceRecorder::record_JSOP_NAME() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_DOUBLE() { - double d = consts[GET_INDEX(cx->regs->pc)].toDouble(); + double d = consts[GET_INDEX(cx->regs().pc)].toDouble(); stack(0, w.immd(d)); return ARECORD_CONTINUE; } @@ -14342,7 +14335,7 @@ TraceRecorder::record_JSOP_DOUBLE() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_STRING() { - JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)]; + JSAtom* atom = atoms[GET_INDEX(cx->regs().pc)]; stack(0, w.immpAtomGC(atom)); return ARECORD_CONTINUE; } @@ -14432,9 +14425,9 @@ TraceRecorder::record_JSOP_STRICTNE() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_OBJECT() { - JSStackFrame* const fp = cx->fp(); + StackFrame* const fp = cx->fp(); JSScript* script = fp->script(); - unsigned index = atoms - script->atomMap.vector + GET_INDEX(cx->regs->pc); + unsigned index = atoms - script->atomMap.vector + GET_INDEX(cx->regs().pc); JSObject* obj; obj = script->getObject(index); @@ -14457,35 +14450,35 @@ TraceRecorder::record_JSOP_TRAP() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETARG() { - stack(0, arg(GET_ARGNO(cx->regs->pc))); + stack(0, arg(GET_ARGNO(cx->regs().pc))); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_SETARG() { - arg(GET_ARGNO(cx->regs->pc), stack(-1)); + arg(GET_ARGNO(cx->regs().pc), stack(-1)); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETLOCAL() { - stack(0, var(GET_SLOTNO(cx->regs->pc))); + stack(0, var(GET_SLOTNO(cx->regs().pc))); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_SETLOCAL() { - var(GET_SLOTNO(cx->regs->pc), stack(-1)); + var(GET_SLOTNO(cx->regs().pc), stack(-1)); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_UINT16() { - stack(0, w.immd(GET_UINT16(cx->regs->pc))); + stack(0, w.immd(GET_UINT16(cx->regs().pc))); return ARECORD_CONTINUE; } @@ -14495,7 +14488,7 @@ TraceRecorder::record_JSOP_NEWINIT() initDepth++; hadNewInit = true; - JSProtoKey key = JSProtoKey(cx->regs->pc[1]); + JSProtoKey key = JSProtoKey(cx->regs().pc[1]); LIns* proto_ins; CHECK_STATUS_A(getClassPrototype(key, proto_ins)); @@ -14521,7 +14514,7 @@ TraceRecorder::record_JSOP_NEWARRAY() LIns* proto_ins; CHECK_STATUS_A(getClassPrototype(JSProto_Array, proto_ins)); - unsigned count = GET_UINT24(cx->regs->pc); + unsigned count = GET_UINT24(cx->regs().pc); LIns *args[] = { proto_ins, w.immi(count), cx_ins }; LIns *v_ins = w.call(&NewDenseAllocatedArray_ci, args); @@ -14606,49 +14599,49 @@ TraceRecorder::record_JSOP_USESHARP() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INCARG() { - return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), 1)); + return InjectStatus(inc(argval(GET_ARGNO(cx->regs().pc)), 1)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INCLOCAL() { - return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), 1)); + return InjectStatus(inc(varval(GET_SLOTNO(cx->regs().pc)), 1)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_DECARG() { - return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), -1)); + return InjectStatus(inc(argval(GET_ARGNO(cx->regs().pc)), -1)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_DECLOCAL() { - return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), -1)); + return InjectStatus(inc(varval(GET_SLOTNO(cx->regs().pc)), -1)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_ARGINC() { - return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), 1, false)); + return InjectStatus(inc(argval(GET_ARGNO(cx->regs().pc)), 1, false)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_LOCALINC() { - return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), 1, false)); + return InjectStatus(inc(varval(GET_SLOTNO(cx->regs().pc)), 1, false)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_ARGDEC() { - return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), -1, false)); + return InjectStatus(inc(argval(GET_ARGNO(cx->regs().pc)), -1, false)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_LOCALDEC() { - return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), -1, false)); + return InjectStatus(inc(varval(GET_SLOTNO(cx->regs().pc)), -1, false)); } JS_REQUIRES_STACK AbortableRecordingStatus @@ -14684,7 +14677,7 @@ TraceRecorder::record_JSOP_ITER() RETURN_IF_XML_A(v); LIns *obj_ins = get(&v); - jsuint flags = cx->regs->pc[1]; + jsuint flags = cx->regs().pc[1]; enterDeepBailCall(); @@ -14700,7 +14693,7 @@ TraceRecorder::record_JSOP_ITER() // unknown type in *vp (it can be either a function or a non-function // object). Use the same mechanism as finishGetProp to arrange for // LeaveTree to deal with this value. - pendingUnboxSlot = cx->regs->sp - 1; + pendingUnboxSlot = cx->regs().sp - 1; set(pendingUnboxSlot, w.name(w.lddAlloc(vp_ins), "iterval")); leaveDeepBailCall(); @@ -14927,7 +14920,7 @@ TraceRecorder::record_JSOP_FORARG() { LIns* v_ins; CHECK_STATUS_A(unboxNextValue(v_ins)); - arg(GET_ARGNO(cx->regs->pc), v_ins); + arg(GET_ARGNO(cx->regs().pc), v_ins); return ARECORD_CONTINUE; } @@ -14936,7 +14929,7 @@ TraceRecorder::record_JSOP_FORLOCAL() { LIns* v_ins; CHECK_STATUS_A(unboxNextValue(v_ins)); - var(GET_SLOTNO(cx->regs->pc), v_ins); + var(GET_SLOTNO(cx->regs().pc), v_ins); return ARECORD_CONTINUE; } @@ -15052,14 +15045,14 @@ JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_BINDNAME() { TraceMonitor *localtm = traceMonitor; - JSStackFrame* const fp = cx->fp(); + StackFrame* const fp = cx->fp(); JSObject *obj; if (!fp->isFunctionFrame()) { obj = &fp->scopeChain(); #ifdef DEBUG - JSStackFrame *fp2 = fp; + StackFrame *fp2 = fp; #endif /* @@ -15122,7 +15115,7 @@ TraceRecorder::record_JSOP_BINDNAME() LIns *obj_ins = w.ldpObjParent(get(callee)); // Find the target object. - JSAtom *atom = atoms[GET_INDEX(cx->regs->pc)]; + JSAtom *atom = atoms[GET_INDEX(cx->regs().pc)]; jsid id = ATOM_TO_JSID(atom); JSObject *obj2 = js_FindIdentifierBase(cx, &fp->scopeChain(), id); if (!obj2) @@ -15233,7 +15226,7 @@ TraceRecorder::record_JSOP_IN() * The interpreter fuses comparisons and the following branch, so we have * to do that here as well. */ - jsbytecode *pc = cx->regs->pc; + jsbytecode *pc = cx->regs().pc; fuseIf(pc + 1, cond, x); /* If the branch was to a loop header, we may need to close it. */ @@ -15401,7 +15394,7 @@ TraceRecorder::record_JSOP_DEFVAR() jsatomid TraceRecorder::getFullIndex(ptrdiff_t pcoff) { - jsatomid index = GET_INDEX(cx->regs->pc + pcoff); + jsatomid index = GET_INDEX(cx->regs().pc + pcoff); index += atoms - cx->fp()->script()->atomMap.vector; return index; } @@ -15427,7 +15420,7 @@ TraceRecorder::record_JSOP_LAMBDA() * must agree with the corresponding break-from-do-while(0) logic there. */ if (FUN_NULL_CLOSURE(fun) && FUN_OBJECT(fun)->getParent() == &cx->fp()->scopeChain()) { - jsbytecode *pc2 = AdvanceOverBlockchainOp(cx->regs->pc + JSOP_LAMBDA_LENGTH); + jsbytecode *pc2 = AdvanceOverBlockchainOp(cx->regs().pc + JSOP_LAMBDA_LENGTH); JSOp op2 = JSOp(*pc2); if (op2 == JSOP_INITMETHOD) { @@ -15458,7 +15451,7 @@ TraceRecorder::record_JSOP_LAMBDA() * so regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)], * is the callee for this JSOP_CALL. */ - const Value &cref = cx->regs->sp[1 - (iargc + 2)]; + const Value &cref = cx->regs().sp[1 - (iargc + 2)]; JSObject *callee; if (IsFunctionObject(cref, &callee)) { @@ -15556,7 +15549,7 @@ TraceRecorder::record_JSOP_CALLEE() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_SETLOCALPOP() { - var(GET_SLOTNO(cx->regs->pc), stack(-1)); + var(GET_SLOTNO(cx->regs().pc), stack(-1)); return ARECORD_CONTINUE; } @@ -15597,7 +15590,7 @@ TraceRecorder::record_JSOP_NOP() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_ARGSUB() { - JSStackFrame* const fp = cx->fp(); + StackFrame* const fp = cx->fp(); /* * The arguments object or its absence in the frame is part of the typemap, @@ -15606,7 +15599,7 @@ TraceRecorder::record_JSOP_ARGSUB() * extent ARGCNT are emitted to avoid arguments object creation. */ if (!fp->hasArgsObj() && !fp->fun()->isHeavyweight()) { - uintN slot = GET_ARGNO(cx->regs->pc); + uintN slot = GET_ARGNO(cx->regs().pc); if (slot >= fp->numActualArgs()) RETURN_STOP_A("can't trace out-of-range arguments"); @@ -15630,7 +15623,7 @@ TraceRecorder::guardArgsLengthNotAssigned(LIns* argsobj_ins) JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_ARGCNT() { - JSStackFrame * const fp = cx->fp(); + StackFrame * const fp = cx->fp(); if (fp->fun()->flags & JSFUN_HEAVYWEIGHT) RETURN_STOP_A("can't trace heavyweight JSOP_ARGCNT"); @@ -15779,9 +15772,9 @@ TraceRecorder::record_JSOP_RETRVAL() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_REGEXP() { - JSStackFrame* const fp = cx->fp(); + StackFrame* const fp = cx->fp(); JSScript* script = fp->script(); - unsigned index = atoms - script->atomMap.vector + GET_INDEX(cx->regs->pc); + unsigned index = atoms - script->atomMap.vector + GET_INDEX(cx->regs().pc); LIns* proto_ins; CHECK_STATUS_A(getClassPrototype(JSProto_RegExp, proto_ins)); @@ -16021,14 +16014,14 @@ TraceRecorder::record_JSOP_DELDESC() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_UINT24() { - stack(0, w.immd(GET_UINT24(cx->regs->pc))); + stack(0, w.immd(GET_UINT24(cx->regs().pc))); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INDEXBASE() { - atoms += GET_INDEXBASE(cx->regs->pc); + atoms += GET_INDEXBASE(cx->regs().pc); return ARECORD_CONTINUE; } @@ -16055,7 +16048,7 @@ TraceRecorder::record_JSOP_CALLELEM() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_STOP() { - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); /* A return from callDepth 0 terminates the current loop, except for recursion. */ if (callDepth == 0 && !fp->hasImacropc()) { @@ -16067,7 +16060,7 @@ TraceRecorder::record_JSOP_STOP() /* * End of imacro, so return true to the interpreter immediately. The * interpreter's JSOP_STOP case will return from the imacro, back to - * the pc after the calling op, still in the same JSStackFrame. + * the pc after the calling op, still in the same StackFrame. */ updateAtoms(fp->script()); return ARECORD_CONTINUE; @@ -16158,9 +16151,9 @@ TraceRecorder::record_JSOP_YIELD() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_ARRAYPUSH() { - uint32_t slot = GET_UINT16(cx->regs->pc); + uint32_t slot = GET_UINT16(cx->regs().pc); JS_ASSERT(cx->fp()->numFixed() <= slot); - JS_ASSERT(cx->fp()->slots() + slot < cx->regs->sp - 1); + JS_ASSERT(cx->fp()->slots() + slot < cx->regs().sp - 1); Value &arrayval = cx->fp()->slots()[slot]; JS_ASSERT(arrayval.isObject()); LIns *array_ins = get(&arrayval); @@ -16186,7 +16179,7 @@ JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_LEAVEBLOCKEXPR() { LIns* v_ins = stack(-1); - int n = -1 - GET_UINT16(cx->regs->pc); + int n = -1 - GET_UINT16(cx->regs().pc); stack(n, v_ins); return ARECORD_CONTINUE; } @@ -16213,13 +16206,13 @@ TraceRecorder::record_JSOP_GETTHISPROP() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETARGPROP() { - return getProp(argval(GET_ARGNO(cx->regs->pc))); + return getProp(argval(GET_ARGNO(cx->regs().pc))); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETLOCALPROP() { - return getProp(varval(GET_SLOTNO(cx->regs->pc))); + return getProp(varval(GET_SLOTNO(cx->regs().pc))); } JS_REQUIRES_STACK AbortableRecordingStatus @@ -16246,7 +16239,7 @@ TraceRecorder::record_JSOP_INDEXBASE3() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_CALLLOCAL() { - uintN slot = GET_SLOTNO(cx->regs->pc); + uintN slot = GET_SLOTNO(cx->regs().pc); stack(0, var(slot)); stack(1, w.immiUndefined()); return ARECORD_CONTINUE; @@ -16255,7 +16248,7 @@ TraceRecorder::record_JSOP_CALLLOCAL() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_CALLARG() { - uintN slot = GET_ARGNO(cx->regs->pc); + uintN slot = GET_ARGNO(cx->regs().pc); stack(0, arg(slot)); stack(1, w.immiUndefined()); return ARECORD_CONTINUE; @@ -16271,14 +16264,14 @@ TraceRecorder::record_JSOP_BINDGNAME() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INT8() { - stack(0, w.immd(GET_INT8(cx->regs->pc))); + stack(0, w.immd(GET_INT8(cx->regs().pc))); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INT32() { - stack(0, w.immd(GET_INT32(cx->regs->pc))); + stack(0, w.immd(GET_INT32(cx->regs().pc))); return ARECORD_CONTINUE; } @@ -16298,7 +16291,7 @@ TraceRecorder::record_JSOP_LENGTH() if (obj->isArguments()) { unsigned depth; - JSStackFrame *afp = guardArguments(obj, obj_ins, &depth); + StackFrame *afp = guardArguments(obj, obj_ins, &depth); if (!afp) RETURN_STOP_A("can't reach arguments object's frame"); @@ -16383,7 +16376,7 @@ JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_UNBRANDTHIS() { /* In case of primitive this, do nothing. */ - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); if (fp->fun()->inStrictMode() && !fp->thisValue().isObject()) return ARECORD_CONTINUE; @@ -16407,7 +16400,7 @@ TraceRecorder::record_JSOP_SHARPINIT() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETGLOBAL() { - uint32 slot = cx->fp()->script()->getGlobalSlot(GET_SLOTNO(cx->regs->pc)); + uint32 slot = cx->fp()->script()->getGlobalSlot(GET_SLOTNO(cx->regs().pc)); if (!lazilyImportGlobalSlot(slot)) RETURN_STOP_A("lazy import of global slot failed"); @@ -16418,7 +16411,7 @@ TraceRecorder::record_JSOP_GETGLOBAL() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_CALLGLOBAL() { - uint32 slot = cx->fp()->script()->getGlobalSlot(GET_SLOTNO(cx->regs->pc)); + uint32 slot = cx->fp()->script()->getGlobalSlot(GET_SLOTNO(cx->regs().pc)); if (!lazilyImportGlobalSlot(slot)) RETURN_STOP_A("lazy import of global slot failed"); @@ -16641,8 +16634,8 @@ JS_REQUIRES_STACK TracePointAction RecordTracePoint(JSContext* cx, TraceMonitor* tm, uintN& inlineCallCount, bool* blacklist, bool execAllowed) { - JSStackFrame* fp = cx->fp(); - jsbytecode* pc = cx->regs->pc; + StackFrame* fp = cx->fp(); + jsbytecode* pc = cx->regs().pc; JS_ASSERT(!tm->recorder); JS_ASSERT(!tm->profile); @@ -16695,9 +16688,9 @@ RecordTracePoint(JSContext* cx, TraceMonitor* tm, case MUL_ZERO_EXIT: case OVERFLOW_EXIT: if (lr->exitType == MUL_ZERO_EXIT) - tm->oracle->markInstructionSlowZeroTest(cx->regs->pc); + tm->oracle->markInstructionSlowZeroTest(cx->regs().pc); else - tm->oracle->markInstructionUndemotable(cx->regs->pc); + tm->oracle->markInstructionUndemotable(cx->regs().pc); /* FALL THROUGH */ case BRANCH_EXIT: if (!AttemptToExtendTree(cx, tm, lr, NULL, NULL, NULL)) @@ -16746,7 +16739,7 @@ RecordTracePoint(JSContext* cx, TraceMonitor* tm, return TPA_RanStuff; } -LoopProfile::LoopProfile(TraceMonitor *tm, JSStackFrame *entryfp, +LoopProfile::LoopProfile(TraceMonitor *tm, StackFrame *entryfp, jsbytecode *top, jsbytecode *bottom) : traceMonitor(tm), entryScript(entryfp->script()), @@ -16782,13 +16775,13 @@ LoopProfile::reset() MonitorResult LoopProfile::profileLoopEdge(JSContext* cx, uintN& inlineCallCount) { - if (cx->regs->pc == top) { + if (cx->regs().pc == top) { debug_only_print0(LC_TMProfiler, "Profiling complete (edge)\n"); decide(cx); } else { /* Record an inner loop invocation. */ - JSStackFrame *fp = cx->fp(); - jsbytecode *pc = cx->regs->pc; + StackFrame *fp = cx->fp(); + jsbytecode *pc = cx->regs().pc; bool found = false; /* We started with the most deeply nested one first, since it gets hit most often.*/ @@ -16814,7 +16807,7 @@ static const uintN MAX_PROFILE_OPS = 4096; static jsbytecode * GetLoopBottom(JSContext *cx) { - return GetLoopBottom(cx, cx->regs->pc); + return GetLoopBottom(cx, cx->regs().pc); } static LoopProfile * @@ -16842,7 +16835,7 @@ LookupOrAddProfile(JSContext *cx, TraceMonitor *tm, void** traceData, uintN *tra if (*traceData && *traceEpoch == tm->flushEpoch) { prof = (LoopProfile *)*traceData; } else { - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; jsbytecode* bottom = GetLoopBottom(cx); if (!bottom) return NULL; @@ -16853,7 +16846,7 @@ LookupOrAddProfile(JSContext *cx, TraceMonitor *tm, void** traceData, uintN *tra } #else LoopProfileMap &table = *tm->loopProfiles; - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; if (LoopProfileMap::AddPtr p = table.lookupForAdd(pc)) { prof = p->value; } else { @@ -16910,7 +16903,7 @@ MonitorTracePoint(JSContext *cx, uintN& inlineCallCount, bool* blacklist, if (TRACE_PROFILER(cx)) return TPA_Nothing; - jsbytecode* pc = cx->regs->pc; + jsbytecode* pc = cx->regs().pc; LoopProfile *prof = LookupOrAddProfile(cx, tm, traceData, traceEpoch); if (!prof) { *blacklist = true; @@ -16921,7 +16914,7 @@ MonitorTracePoint(JSContext *cx, uintN& inlineCallCount, bool* blacklist, if (prof->hits < PROFILE_HOTLOOP) return TPA_Nothing; - AutoRetBlacklist autoRetBlacklist(cx->regs->pc, blacklist); + AutoRetBlacklist autoRetBlacklist(cx->regs().pc, blacklist); if (prof->profiled) { if (prof->traceOK) { @@ -16963,7 +16956,7 @@ MonitorTracePoint(JSContext *cx, uintN& inlineCallCount, bool* blacklist, */ template static inline bool -PCWithinLoop(JSStackFrame *fp, jsbytecode *pc, T& loop) +PCWithinLoop(StackFrame *fp, jsbytecode *pc, T& loop) { return fp > loop.entryfp || (fp == loop.entryfp && pc >= loop.top && pc <= loop.bottom); } @@ -16981,8 +16974,8 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op) return ProfComplete; } - jsbytecode *pc = cx->regs->pc; - JSStackFrame *fp = cx->fp(); + jsbytecode *pc = cx->regs().pc; + StackFrame *fp = cx->fp(); JSScript *script = fp->script(); if (!PCWithinLoop(fp, pc, *this)) { @@ -17020,8 +17013,8 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op) } if (op == JSOP_ADD || op == JSOP_SUB || op == JSOP_MUL || op == JSOP_DIV) { - Value& v1 = cx->regs->sp[-1]; - Value& v2 = cx->regs->sp[-2]; + Value& v1 = cx->regs().sp[-1]; + Value& v2 = cx->regs().sp[-2]; /* If either operand is a double, treat it as a floating-point op. */ if (v1.isDouble() || v2.isDouble()) @@ -17046,7 +17039,7 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op) increment(OP_NEW); if (op == JSOP_GETELEM || op == JSOP_SETELEM) { - Value& lval = cx->regs->sp[op == JSOP_GETELEM ? -2 : -3]; + Value& lval = cx->regs().sp[op == JSOP_GETELEM ? -2 : -3]; if (lval.isObject() && js_IsTypedArray(&lval.toObject())) increment(OP_TYPED_ARRAY); else if (lval.isObject() && lval.toObject().isDenseArray() && op == JSOP_GETELEM) @@ -17059,7 +17052,7 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op) /* Try to see if it's a scripted getter, which is faster in the tracer. */ Value v = UndefinedValue(); if (op == JSOP_GETPROP || op == JSOP_CALLPROP) { - v = cx->regs->sp[-1]; + v = cx->regs().sp[-1]; } else if (op == JSOP_GETARGPROP) { uint32 slot = GET_ARGNO(pc); JS_ASSERT(slot < fp->numFormalArgs()); @@ -17089,8 +17082,8 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op) if (op == JSOP_CALL) { increment(OP_CALL); - uintN argc = GET_ARGC(cx->regs->pc); - Value &v = cx->regs->sp[-((int)argc + 2)]; + uintN argc = GET_ARGC(cx->regs().pc); + Value &v = cx->regs().sp[-((int)argc + 2)]; JSObject *callee; if (IsFunctionObject(v, &callee)) { JSFunction *fun = callee->getFunctionPrivate(); @@ -17125,7 +17118,7 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op) } /* These are the places where the interpreter skips over branches. */ - jsbytecode *testPC = cx->regs->pc; + jsbytecode *testPC = cx->regs().pc; if (op == JSOP_EQ || op == JSOP_NE || op == JSOP_LT || op == JSOP_GT || op == JSOP_LE || op == JSOP_GE || op == JSOP_IN || op == JSOP_MOREITER) { @@ -17133,9 +17126,9 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op) ptrdiff_t oplen = cs->length; JS_ASSERT(oplen != -1); - if (cx->regs->pc - script->code + oplen < ptrdiff_t(script->length)) - if (cx->regs->pc[oplen] == JSOP_IFEQ || cx->regs->pc[oplen] == JSOP_IFNE) - testPC = cx->regs->pc + oplen; + if (cx->regs().pc - script->code + oplen < ptrdiff_t(script->length)) + if (cx->regs().pc[oplen] == JSOP_IFEQ || cx->regs().pc[oplen] == JSOP_IFNE) + testPC = cx->regs().pc + oplen; } /* Check if we're exiting the loop being profiled. */ @@ -17151,7 +17144,7 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op) } if (testPC + len == top && (op == JSOP_LT || op == JSOP_LE) - && cx->regs->sp[-2].isInt32() && cx->regs->sp[-2].toInt32() < 16) + && cx->regs().sp[-2].isInt32() && cx->regs().sp[-2].toInt32() < 16) { maybeShortLoop = true; } @@ -17170,7 +17163,7 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op) } if (op == JSOP_INT8) { - stackPush(StackValue(true, GET_INT8(cx->regs->pc))); + stackPush(StackValue(true, GET_INT8(cx->regs().pc))); } else if (op == JSOP_STRING) { stackPush(StackValue(true)); } else if (op == JSOP_TYPEOF || op == JSOP_TYPEOFEXPR) { @@ -17180,7 +17173,7 @@ LoopProfile::profileOperation(JSContext* cx, JSOp op) StackValue v2 = stackAt(-2); stackPush(StackValue(v1.isConst && v2.isConst)); } else if (op == JSOP_AND) { - bool b = !!js_ValueToBoolean(cx->regs->sp[-1]); + bool b = !!js_ValueToBoolean(cx->regs().sp[-1]); StackValue v = stackAt(-1); if (b) stackPop(); @@ -17369,7 +17362,7 @@ LoopProfile::decide(JSContext *cx) } JS_REQUIRES_STACK MonitorResult -MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, JSInterpMode interpMode) +MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, InterpMode interpMode) { TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx); if (interpMode == JSINTERP_PROFILE && tm->profile) @@ -17394,7 +17387,7 @@ AbortProfiling(JSContext *cx) #else /* JS_METHODJIT */ JS_REQUIRES_STACK MonitorResult -MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, JSInterpMode interpMode) +MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, InterpMode interpMode) { TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx); return RecordLoopEdge(cx, tm, inlineCallCount); diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 69868ac089b1..004015e20d5d 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -653,7 +653,7 @@ public: JSScript *entryScript; /* The stack frame where we started profiling. Only valid while profiling! */ - JSStackFrame *entryfp; + StackFrame *entryfp; /* The bytecode locations of the loop header and the back edge. */ jsbytecode *top, *bottom; @@ -716,12 +716,12 @@ public: * and how many iterations we execute it. */ struct InnerLoop { - JSStackFrame *entryfp; + StackFrame *entryfp; jsbytecode *top, *bottom; uintN iters; InnerLoop() {} - InnerLoop(JSStackFrame *entryfp, jsbytecode *top, jsbytecode *bottom) + InnerLoop(StackFrame *entryfp, jsbytecode *top, jsbytecode *bottom) : entryfp(entryfp), top(top), bottom(bottom), iters(0) {} }; @@ -772,7 +772,7 @@ public: return StackValue(false); } - LoopProfile(TraceMonitor *tm, JSStackFrame *entryfp, jsbytecode *top, jsbytecode *bottom); + LoopProfile(TraceMonitor *tm, StackFrame *entryfp, jsbytecode *top, jsbytecode *bottom); void reset(); @@ -1188,9 +1188,9 @@ class TraceRecorder JS_REQUIRES_STACK ptrdiff_t nativespOffsetImpl(const void* p) const; JS_REQUIRES_STACK ptrdiff_t nativespOffset(const Value* p) const; JS_REQUIRES_STACK void importImpl(tjit::Address addr, const void* p, JSValueType t, - const char *prefix, uintN index, JSStackFrame *fp); + const char *prefix, uintN index, StackFrame *fp); JS_REQUIRES_STACK void import(tjit::Address addr, const Value* p, JSValueType t, - const char *prefix, uintN index, JSStackFrame *fp); + const char *prefix, uintN index, StackFrame *fp); JS_REQUIRES_STACK void import(TreeFragment* tree, nanojit::LIns* sp, unsigned stackSlots, unsigned callDepth, unsigned ngslots, JSValueType* typeMap); void trackNativeStackUse(unsigned slots); @@ -1265,7 +1265,7 @@ class TraceRecorder JS_REQUIRES_STACK nanojit::LIns* scopeChain(); JS_REQUIRES_STACK nanojit::LIns* entryScopeChain() const; JS_REQUIRES_STACK nanojit::LIns* entryFrameIns() const; - JS_REQUIRES_STACK JSStackFrame* frameIfInRange(JSObject* obj, unsigned* depthp = NULL) const; + JS_REQUIRES_STACK StackFrame* frameIfInRange(JSObject* obj, unsigned* depthp = NULL) const; JS_REQUIRES_STACK RecordingStatus traverseScopeChain(JSObject *obj, nanojit::LIns *obj_ins, JSObject *obj2, nanojit::LIns *&obj2_ins); JS_REQUIRES_STACK AbortableRecordingStatus scopeChainProp(JSObject* obj, Value*& vp, nanojit::LIns*& ins, NameResult& nr, JSObject **scopeObjp = NULL); JS_REQUIRES_STACK RecordingStatus callProp(JSObject* obj, JSProperty* shape, jsid id, Value*& vp, nanojit::LIns*& ins, NameResult& nr); @@ -1482,8 +1482,8 @@ class TraceRecorder JS_REQUIRES_STACK RecordingStatus createThis(JSObject& ctor, nanojit::LIns* ctor_ins, nanojit::LIns** thisobj_insp); JS_REQUIRES_STACK RecordingStatus guardCallee(Value& callee); - JS_REQUIRES_STACK JSStackFrame *guardArguments(JSObject *obj, nanojit::LIns* obj_ins, - unsigned *depthp); + JS_REQUIRES_STACK StackFrame *guardArguments(JSObject *obj, nanojit::LIns* obj_ins, + unsigned *depthp); JS_REQUIRES_STACK nanojit::LIns* guardArgsLengthNotAssigned(nanojit::LIns* argsobj_ins); JS_REQUIRES_STACK void guardNotHole(nanojit::LIns* argsobj_ins, nanojit::LIns* ids_ins); JS_REQUIRES_STACK RecordingStatus getClassPrototype(JSObject* ctor, @@ -1683,7 +1683,7 @@ class TraceRecorder #define TRACE_2(x,a,b) TRACE_ARGS(x, (a, b)) extern JS_REQUIRES_STACK MonitorResult -MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, JSInterpMode interpMode); +MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, InterpMode interpMode); extern JS_REQUIRES_STACK TracePointAction RecordTracePoint(JSContext*, uintN& inlineCallCount, bool* blacklist); diff --git a/js/src/jsvalue.h b/js/src/jsvalue.h index 35fefd7f9273..ebe6cc987779 100644 --- a/js/src/jsvalue.h +++ b/js/src/jsvalue.h @@ -1236,87 +1236,5 @@ Debug_SetValueRangeToCrashOnTouch(Value *vec, size_t len) #endif } -/* - * Abstracts the layout of the (callee,this) receiver pair that is passed to - * natives and scripted functions. - */ -class CallReceiver -{ -#ifdef DEBUG - mutable bool usedRval_; -#endif - protected: - Value *argv_; - CallReceiver() {} - CallReceiver(Value *argv) : argv_(argv) { -#ifdef DEBUG - usedRval_ = false; -#endif - } - - public: - friend CallReceiver CallReceiverFromVp(Value *); - friend CallReceiver CallReceiverFromArgv(Value *); - Value *base() const { return argv_ - 2; } - JSObject &callee() const { JS_ASSERT(!usedRval_); return argv_[-2].toObject(); } - Value &calleev() const { JS_ASSERT(!usedRval_); return argv_[-2]; } - Value &thisv() const { return argv_[-1]; } - - Value &rval() const { -#ifdef DEBUG - usedRval_ = true; -#endif - return argv_[-2]; - } - - void calleeHasBeenReset() const { -#ifdef DEBUG - usedRval_ = false; -#endif - } -}; - -JS_ALWAYS_INLINE CallReceiver -CallReceiverFromVp(Value *vp) -{ - return CallReceiver(vp + 2); -} - -JS_ALWAYS_INLINE CallReceiver -CallReceiverFromArgv(Value *argv) -{ - return CallReceiver(argv); -} - -/* - * Abstracts the layout of the stack passed to natives from the engine and from - * natives to js::Invoke. - */ -class CallArgs : public CallReceiver -{ - uintN argc_; - protected: - CallArgs() {} - CallArgs(uintN argc, Value *argv) : CallReceiver(argv), argc_(argc) {} - public: - friend CallArgs CallArgsFromVp(uintN, Value *); - friend CallArgs CallArgsFromArgv(uintN, Value *); - Value &operator[](unsigned i) const { JS_ASSERT(i < argc_); return argv_[i]; } - Value *argv() const { return argv_; } - uintN argc() const { return argc_; } -}; - -JS_ALWAYS_INLINE CallArgs -CallArgsFromVp(uintN argc, Value *vp) -{ - return CallArgs(argc, vp + 2); -} - -JS_ALWAYS_INLINE CallArgs -CallArgsFromArgv(uintN argc, Value *argv) -{ - return CallArgs(argc, argv); -} - } /* namespace js */ #endif /* jsvalue_h__ */ diff --git a/js/src/jswrapper.cpp b/js/src/jswrapper.cpp index 14d83a39181b..0e0577658d09 100644 --- a/js/src/jswrapper.cpp +++ b/js/src/jswrapper.cpp @@ -385,7 +385,7 @@ AutoCompartment::enter() JS_ASSERT(scopeChain->isNative()); frame.construct(); - if (!context->stack().pushDummyFrame(context, *scopeChain, &frame.ref())) { + if (!context->stack.pushDummyFrame(context, *scopeChain, &frame.ref())) { context->compartment = origin; return false; } diff --git a/js/src/jswrapper.h b/js/src/jswrapper.h index 23514ff0a50e..8c5f73eccba4 100644 --- a/js/src/jswrapper.h +++ b/js/src/jswrapper.h @@ -164,8 +164,8 @@ class AutoCompartment JSObject * const target; JSCompartment * const destination; private: - LazilyConstructed frame; - JSFrameRegs regs; + Maybe frame; + FrameRegs regs; AutoStringRooter input; bool entered; diff --git a/js/src/jsxml.cpp b/js/src/jsxml.cpp index f806a6460812..d2a2eb67499f 100644 --- a/js/src/jsxml.cpp +++ b/js/src/jsxml.cpp @@ -57,7 +57,6 @@ #include "jsfun.h" #include "jsgc.h" #include "jsgcmark.h" -#include "jsinterp.h" #include "jslock.h" #include "jsnum.h" #include "jsobj.h" @@ -72,11 +71,11 @@ #include "jsvector.h" #include "jsatominlines.h" -#include "jscntxtinlines.h" -#include "jsinterpinlines.h" #include "jsobjinlines.h" #include "jsstrinlines.h" +#include "vm/Stack-inl.h" + #ifdef DEBUG #include /* for #ifdef DEBUG memset calls */ #endif @@ -1745,7 +1744,7 @@ ParseXMLSource(JSContext *cx, JSString *src) filename = NULL; lineno = 1; if (!i.done()) { - JSStackFrame *fp = i.fp(); + StackFrame *fp = i.fp(); op = (JSOp) *i.pc(); if (op == JSOP_TOXML || op == JSOP_TOXMLLIST) { filename = fp->script()->filename; @@ -7307,8 +7306,7 @@ js_SetDefaultXMLNamespace(JSContext *cx, const Value &v) if (!ns) return JS_FALSE; - JSStackFrame *fp = js_GetTopStackFrame(cx); - JSObject &varobj = fp->varobj(cx); + JSObject &varobj = cx->stack.currentVarObj(); if (!varobj.defineProperty(cx, JS_DEFAULT_XML_NAMESPACE_ID, ObjectValue(*ns), PropertyStub, StrictPropertyStub, JSPROP_PERMANENT)) { return JS_FALSE; @@ -7386,7 +7384,7 @@ js_ValueToXMLString(JSContext *cx, const Value &v) JSBool js_GetAnyName(JSContext *cx, jsid *idp) { - JSObject *global = cx->hasfp() ? cx->fp()->scopeChain().getGlobal() : cx->globalObject; + JSObject *global = cx->running() ? cx->fp()->scopeChain().getGlobal() : cx->globalObject; Value v = global->getReservedSlot(JSProto_AnyName); if (v.isUndefined()) { JSObject *obj = NewNonFunction(cx, &js_AnyNameClass, NULL, global); @@ -7627,7 +7625,7 @@ js_StepXMLListFilter(JSContext *cx, JSBool initialized) JSXMLFilter *filter; LeaveTrace(cx); - sp = Jsvalify(cx->regs->sp); + sp = Jsvalify(cx->regs().sp); if (!initialized) { /* * We haven't iterated yet, so initialize the filter based on the diff --git a/js/src/methodjit/BaseAssembler.h b/js/src/methodjit/BaseAssembler.h index 400e373a932b..0d404c67860d 100644 --- a/js/src/methodjit/BaseAssembler.h +++ b/js/src/methodjit/BaseAssembler.h @@ -589,7 +589,7 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist if (frameDepth >= 0) { // sp = fp->slots() + frameDepth // regs->sp = sp - addPtr(Imm32(sizeof(JSStackFrame) + frameDepth * sizeof(jsval)), + addPtr(Imm32(sizeof(StackFrame) + frameDepth * sizeof(jsval)), JSFrameReg, ClobberInCall); storePtr(ClobberInCall, FrameAddress(offsetof(VMFrame, regs.sp))); @@ -605,11 +605,11 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist setupInfallibleVMFrame(frameDepth); /* regs->fp = fp */ - storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp))); + storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp)); /* PC -> regs->pc :( */ storePtr(ImmPtr(pc), - FrameAddress(offsetof(VMFrame, regs) + offsetof(JSFrameRegs, pc))); + FrameAddress(offsetof(VMFrame, regs) + offsetof(FrameRegs, pc))); } // An infallible VM call is a stub call (taking a VMFrame & and one @@ -753,7 +753,7 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = Assembler::JSPar struct FrameFlagsAddress : JSC::MacroAssembler::Address { FrameFlagsAddress() - : Address(JSFrameReg, JSStackFrame::offsetOfFlags()) + : Address(JSFrameReg, StackFrame::offsetOfFlags()) {} }; diff --git a/js/src/methodjit/Compiler.cpp b/js/src/methodjit/Compiler.cpp index b1d3615fd6fd..f480869fb0f8 100644 --- a/js/src/methodjit/Compiler.cpp +++ b/js/src/methodjit/Compiler.cpp @@ -83,7 +83,7 @@ static const char *OpcodeNames[] = { }; #endif -mjit::Compiler::Compiler(JSContext *cx, JSStackFrame *fp) +mjit::Compiler::Compiler(JSContext *cx, StackFrame *fp) : BaseCompiler(cx), fp(fp), script(fp->script()), @@ -239,7 +239,7 @@ mjit::Compiler::~Compiler() } CompileStatus JS_NEVER_INLINE -mjit::TryCompile(JSContext *cx, JSStackFrame *fp) +mjit::TryCompile(JSContext *cx, StackFrame *fp) { JS_ASSERT(cx->fp() == fp); @@ -294,7 +294,7 @@ mjit::Compiler::generatePrologue() Label fastPath = masm.label(); /* Store this early on so slow paths can access it. */ - masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec())); + masm.storePtr(ImmPtr(fun), Address(JSFrameReg, StackFrame::offsetOfExec())); { /* @@ -312,8 +312,8 @@ mjit::Compiler::generatePrologue() stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1); /* Slow path - call the arity check function. Returns new fp. */ - stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec())); - stubcc.masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp))); + stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, StackFrame::offsetOfExec())); + stubcc.masm.storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp)); OOL_STUBCALL(stubs::FixupArity); stubcc.masm.move(Registers::ReturnReg, JSFrameReg); stubcc.crossJump(stubcc.masm.jump(), fastPath); @@ -343,7 +343,7 @@ mjit::Compiler::generatePrologue() */ for (uint32 i = 0; i < script->nfixed; i++) { if (analysis->localHasUseBeforeDef(i) || addTraceHints) { - Address local(JSFrameReg, sizeof(JSStackFrame) + i * sizeof(Value)); + Address local(JSFrameReg, sizeof(StackFrame) + i * sizeof(Value)); masm.storeValue(UndefinedValue(), local); } } @@ -364,10 +364,11 @@ mjit::Compiler::generatePrologue() */ RegisterID t0 = Registers::ReturnReg; Jump hasScope = masm.branchTest32(Assembler::NonZero, - FrameFlagsAddress(), Imm32(JSFRAME_HAS_SCOPECHAIN)); - masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), t0); + FrameFlagsAddress(), + Imm32(StackFrame::HAS_SCOPECHAIN)); + masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(fun)), t0); masm.loadPtr(Address(t0, offsetof(JSObject, parent)), t0); - masm.storePtr(t0, Address(JSFrameReg, JSStackFrame::offsetOfScopeChain())); + masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain())); hasScope.linkTo(masm.label(), &masm); } } @@ -968,12 +969,12 @@ mjit::Compiler::generateMethod() { RegisterID reg = frame.allocReg(); masm.load32(FrameFlagsAddress(), reg); - masm.or32(Imm32(JSFRAME_HAS_RVAL), reg); + masm.or32(Imm32(StackFrame::HAS_RVAL), reg); masm.store32(reg, FrameFlagsAddress()); frame.freeReg(reg); FrameEntry *fe = frame.peek(-1); - frame.storeTo(fe, Address(JSFrameReg, JSStackFrame::offsetOfReturnValue()), true); + frame.storeTo(fe, Address(JSFrameReg, StackFrame::offsetOfReturnValue()), true); frame.pop(); } END_CASE(JSOP_POPV) @@ -2119,7 +2120,7 @@ mjit::Compiler::jsop_getglobal(uint32 index) void mjit::Compiler::emitFinalReturn(Assembler &masm) { - masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfncode()), Registers::ReturnReg); + masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfNcode()), Registers::ReturnReg); masm.jump(Registers::ReturnReg); } @@ -2166,8 +2167,8 @@ mjit::Compiler::loadReturnValue(Assembler *masm, FrameEntry *fe) if (analysis->usesReturnValue()) { Jump rvalClear = masm->branchTest32(Assembler::Zero, FrameFlagsAddress(), - Imm32(JSFRAME_HAS_RVAL)); - Address rvalAddress(JSFrameReg, JSStackFrame::offsetOfReturnValue()); + Imm32(StackFrame::HAS_RVAL)); + Address rvalAddress(JSFrameReg, StackFrame::offsetOfReturnValue()); masm->loadValueAsComponents(rvalAddress, typeReg, dataReg); rvalClear.linkTo(masm->label(), masm); } @@ -2184,7 +2185,7 @@ mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe) JS_ASSERT(isConstructing); bool ool = (masm != &this->masm); - Address thisv(JSFrameReg, JSStackFrame::offsetOfThis(fun)); + Address thisv(JSFrameReg, StackFrame::offsetOfThis(fun)); // We can just load |thisv| if either of the following is true: // (1) There is no explicit return value, AND fp->rval is not used. @@ -2256,8 +2257,8 @@ mjit::Compiler::emitReturn(FrameEntry *fe) } else { /* if (hasCallObj() || hasArgsObj()) */ Jump putObjs = masm.branchTest32(Assembler::NonZero, - Address(JSFrameReg, JSStackFrame::offsetOfFlags()), - Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ)); + Address(JSFrameReg, StackFrame::offsetOfFlags()), + Imm32(StackFrame::HAS_CALL_OBJ | StackFrame::HAS_ARGS_OBJ)); stubcc.linkExit(putObjs, Uses(frame.frameSlots())); stubcc.leave(); @@ -2338,7 +2339,7 @@ mjit::Compiler::interruptCheckHelper() * interrupt is on another thread. */ stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), reg); - stubcc.masm.loadPtr(Address(reg, offsetof(JSContext, thread)), reg); + stubcc.masm.loadPtr(Address(reg, JSContext::threadOffset()), reg); Address flag(reg, offsetof(JSThread, data.interruptFlags)); Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag); #endif @@ -2377,16 +2378,16 @@ mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew) Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0); - masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg); + masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg); callPatch.hasFastNcode = true; callPatch.fastNcodePatch = masm.storePtrWithPatch(ImmPtr(NULL), - Address(JSFrameReg, JSStackFrame::offsetOfncode())); + Address(JSFrameReg, StackFrame::offsetOfNcode())); masm.jump(r0); callPatch.joinPoint = masm.label(); addReturnSite(callPatch.joinPoint, __LINE__); - masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg); + masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfPrev()), JSFrameReg); frame.popn(argc + 2); frame.takeReg(JSReturnReg_Type); @@ -2460,8 +2461,8 @@ mjit::Compiler::checkCallApplySpeculation(uint32 callImmArgc, uint32 speculatedA RegisterID r0 = Registers::ReturnReg; Jump notCompiled = stubcc.masm.branchTestPtr(Assembler::Zero, r0, r0); - stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg); - Address ncodeAddr(JSFrameReg, JSStackFrame::offsetOfncode()); + stubcc.masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg); + Address ncodeAddr(JSFrameReg, StackFrame::offsetOfNcode()); uncachedCallPatch->hasSlowNcode = true; uncachedCallPatch->slowNcodePatch = stubcc.masm.storePtrWithPatch(ImmPtr(NULL), ncodeAddr); @@ -2731,11 +2732,11 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew) stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc); else stubcc.masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), JSParamReg_Argc); - stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg); + stubcc.masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg); callPatch.hasSlowNcode = true; callPatch.slowNcodePatch = stubcc.masm.storePtrWithPatch(ImmPtr(NULL), - Address(JSFrameReg, JSStackFrame::offsetOfncode())); + Address(JSFrameReg, StackFrame::offsetOfNcode())); stubcc.masm.jump(Registers::ReturnReg); /* @@ -2764,7 +2765,7 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew) uint32 flags = 0; if (callingNew) - flags |= JSFRAME_CONSTRUCTING; + flags |= StackFrame::CONSTRUCTING; InlineFrameAssembler inlFrame(masm, callIC, flags); callPatch.hasFastNcode = true; @@ -2775,7 +2776,7 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew) addReturnSite(callPatch.joinPoint, __LINE__); if (lowerFunCallOrApply) uncachedCallPatch.joinPoint = callIC.joinPoint; - masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg); + masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfPrev()), JSFrameReg); /* * We've placed hotJump, joinPoint and hotPathLabel, and no other labels are located by offset @@ -2818,7 +2819,7 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew) /* * This function must be called immediately after any instruction which could - * cause a new JSStackFrame to be pushed and could lead to a new debug trap + * cause a new StackFrame to be pushed and could lead to a new debug trap * being set. This includes any API callbacks and any scripted or native call. */ void @@ -2830,7 +2831,7 @@ mjit::Compiler::addCallSite(const InternalCallSite &site) void mjit::Compiler::restoreFrameRegs(Assembler &masm) { - masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg); + masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg); } bool @@ -3163,7 +3164,7 @@ mjit::Compiler::jsop_callprop_generic(JSAtom *atom) * since a sync will be needed for the upcoming call. */ uint32 thisvSlot = frame.localSlots(); - Address thisv = Address(JSFrameReg, sizeof(JSStackFrame) + thisvSlot * sizeof(Value)); + Address thisv = Address(JSFrameReg, sizeof(StackFrame) + thisvSlot * sizeof(Value)); #if defined JS_NUNBOX32 masm.storeValueFromComponents(pic.typeReg, pic.objReg, thisv); @@ -3622,7 +3623,7 @@ mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache) pic.fastPathStart = masm.label(); Address parent(pic.objReg, offsetof(JSObject, parent)); - masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg); + masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg); pic.shapeGuard = masm.label(); Jump inlineJump = masm.branchPtr(Assembler::NotEqual, parent, ImmPtr(0)); @@ -3689,7 +3690,7 @@ void mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache) { RegisterID reg = frame.allocReg(); - Address scopeChain(JSFrameReg, JSStackFrame::offsetOfScopeChain()); + Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain()); masm.loadPtr(scopeChain, reg); Address address(reg, offsetof(JSObject, parent)); diff --git a/js/src/methodjit/Compiler.h b/js/src/methodjit/Compiler.h index 95d807497f5b..f9a7d95b639a 100644 --- a/js/src/methodjit/Compiler.h +++ b/js/src/methodjit/Compiler.h @@ -326,7 +326,7 @@ class Compiler : public BaseCompiler size_t offsetIndex; }; - JSStackFrame *fp; + StackFrame *fp; JSScript *script; JSObject *scopeChain; JSObject *globalObj; @@ -372,7 +372,7 @@ class Compiler : public BaseCompiler // follows interpreter usage in JSOP_LENGTH. enum { LengthAtomIndex = uint32(-2) }; - Compiler(JSContext *cx, JSStackFrame *fp); + Compiler(JSContext *cx, StackFrame *fp); ~Compiler(); CompileStatus compile(); diff --git a/js/src/methodjit/FrameState-inl.h b/js/src/methodjit/FrameState-inl.h index 7764a3fb9392..3010ecb17dcd 100644 --- a/js/src/methodjit/FrameState-inl.h +++ b/js/src/methodjit/FrameState-inl.h @@ -733,13 +733,13 @@ FrameState::addressOf(const FrameEntry *fe) const { int32 frameOffset = 0; if (fe >= locals) - frameOffset = JSStackFrame::offsetOfFixed(uint32(fe - locals)); + frameOffset = StackFrame::offsetOfFixed(uint32(fe - locals)); else if (fe >= args) - frameOffset = JSStackFrame::offsetOfFormalArg(fun, uint32(fe - args)); + frameOffset = StackFrame::offsetOfFormalArg(fun, uint32(fe - args)); else if (fe == this_) - frameOffset = JSStackFrame::offsetOfThis(fun); + frameOffset = StackFrame::offsetOfThis(fun); else if (fe == callee_) - frameOffset = JSStackFrame::offsetOfCallee(fun); + frameOffset = StackFrame::offsetOfCallee(fun); JS_ASSERT(frameOffset); return Address(JSFrameReg, frameOffset); } diff --git a/js/src/methodjit/FrameState.h b/js/src/methodjit/FrameState.h index c7f214bf780a..b95016bd02f2 100644 --- a/js/src/methodjit/FrameState.h +++ b/js/src/methodjit/FrameState.h @@ -741,13 +741,13 @@ class FrameState void assertValidRegisterState() const; #endif - // Return an address, relative to the JSStackFrame, that represents where + // Return an address, relative to the StackFrame, that represents where // this FrameEntry is stored in memory. Note that this is its canonical // address, not its backing store. There is no guarantee that the memory // is coherent. Address addressOf(const FrameEntry *fe) const; - // Returns an address, relative to the JSStackFrame, that represents where + // Returns an address, relative to the StackFrame, that represents where // this FrameEntry is backed in memory. This is not necessarily its // canonical address, but the address for which the payload has been synced // to memory. The caller guarantees that the payload has been synced. diff --git a/js/src/methodjit/InlineFrameAssembler.h b/js/src/methodjit/InlineFrameAssembler.h index 0e60478a7ac9..4a00c9ab67ea 100644 --- a/js/src/methodjit/InlineFrameAssembler.h +++ b/js/src/methodjit/InlineFrameAssembler.h @@ -62,7 +62,7 @@ struct AdjustedFrame { /* * This is used for emitting code to inline callee-side frame creation and - * should jit code equivalent to JSStackFrame::initCallFrameCallerHalf. + * should jit code equivalent to StackFrame::initCallFrameCallerHalf. * * Once finished, JSFrameReg is advanced to be the new fp. */ @@ -105,23 +105,23 @@ class InlineFrameAssembler { DataLabelPtr assemble(void *ncode) { - JS_ASSERT((flags & ~JSFRAME_CONSTRUCTING) == 0); + JS_ASSERT((flags & ~StackFrame::CONSTRUCTING) == 0); - /* Generate JSStackFrame::initCallFrameCallerHalf. */ + /* Generate StackFrame::initCallFrameCallerHalf. */ DataLabelPtr ncodePatch; if (frameSize.isStatic()) { uint32 frameDepth = frameSize.staticLocalSlots(); - AdjustedFrame newfp(sizeof(JSStackFrame) + frameDepth * sizeof(Value)); + AdjustedFrame newfp(sizeof(StackFrame) + frameDepth * sizeof(Value)); - Address flagsAddr = newfp.addrOf(JSStackFrame::offsetOfFlags()); - masm.store32(Imm32(JSFRAME_FUNCTION | flags), flagsAddr); - Address prevAddr = newfp.addrOf(JSStackFrame::offsetOfPrev()); + Address flagsAddr = newfp.addrOf(StackFrame::offsetOfFlags()); + masm.store32(Imm32(StackFrame::FUNCTION | flags), flagsAddr); + Address prevAddr = newfp.addrOf(StackFrame::offsetOfPrev()); masm.storePtr(JSFrameReg, prevAddr); - Address ncodeAddr = newfp.addrOf(JSStackFrame::offsetOfncode()); + Address ncodeAddr = newfp.addrOf(StackFrame::offsetOfNcode()); ncodePatch = masm.storePtrWithPatch(ImmPtr(ncode), ncodeAddr); - masm.addPtr(Imm32(sizeof(JSStackFrame) + frameDepth * sizeof(Value)), JSFrameReg); + masm.addPtr(Imm32(sizeof(StackFrame) + frameDepth * sizeof(Value)), JSFrameReg); } else { /* * If the frame size is dynamic, then the fast path generated by @@ -134,11 +134,11 @@ class InlineFrameAssembler { RegisterID newfp = tempRegs.takeAnyReg(); masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.sp)), newfp); - Address flagsAddr(newfp, JSStackFrame::offsetOfFlags()); - masm.store32(Imm32(JSFRAME_FUNCTION | flags), flagsAddr); - Address prevAddr(newfp, JSStackFrame::offsetOfPrev()); + Address flagsAddr(newfp, StackFrame::offsetOfFlags()); + masm.store32(Imm32(StackFrame::FUNCTION | flags), flagsAddr); + Address prevAddr(newfp, StackFrame::offsetOfPrev()); masm.storePtr(JSFrameReg, prevAddr); - Address ncodeAddr(newfp, JSStackFrame::offsetOfncode()); + Address ncodeAddr(newfp, StackFrame::offsetOfNcode()); ncodePatch = masm.storePtrWithPatch(ImmPtr(ncode), ncodeAddr); masm.move(newfp, JSFrameReg); diff --git a/js/src/methodjit/InvokeHelpers.cpp b/js/src/methodjit/InvokeHelpers.cpp index 34816b113dbd..d65a4bb1b6c4 100644 --- a/js/src/methodjit/InvokeHelpers.cpp +++ b/js/src/methodjit/InvokeHelpers.cpp @@ -81,13 +81,13 @@ using ic::Repatcher; static jsbytecode * FindExceptionHandler(JSContext *cx) { - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); JSScript *script = fp->script(); top: if (cx->isExceptionPending() && JSScript::isValidOffset(script->trynotesOffset)) { // The PC is updated before every stub call, so we can use it here. - unsigned offset = cx->regs->pc - script->main; + unsigned offset = cx->regs().pc - script->main; JSTryNoteArray *tnarray = script->trynotes(); for (unsigned i = 0; i < tnarray->length; ++i) { @@ -110,12 +110,12 @@ top: // bytecode compiler cannot throw, so this is not possible. if (offset - tn->start > tn->length) continue; - if (tn->stackDepth > cx->regs->sp - fp->base()) + if (tn->stackDepth > cx->regs().sp - fp->base()) continue; jsbytecode *pc = script->main + tn->start + tn->length; JSBool ok = js_UnwindScope(cx, tn->stackDepth, JS_TRUE); - JS_ASSERT(cx->regs->sp == fp->base() + tn->stackDepth); + JS_ASSERT(cx->regs().sp == fp->base() + tn->stackDepth); switch (tn->kind) { case JSTRY_CATCH: @@ -139,9 +139,9 @@ top: * Push (true, exception) pair for finally to indicate that * [retsub] should rethrow the exception. */ - cx->regs->sp[0].setBoolean(true); - cx->regs->sp[1] = cx->getPendingException(); - cx->regs->sp += 2; + cx->regs().sp[0].setBoolean(true); + cx->regs().sp[1] = cx->getPendingException(); + cx->regs().sp += 2; cx->clearPendingException(); return pc; @@ -157,8 +157,8 @@ top: Value v = cx->getPendingException(); JS_ASSERT(js_GetOpcode(cx, fp->script(), pc) == JSOP_ENDITER); cx->clearPendingException(); - ok = !!js_CloseIterator(cx, &cx->regs->sp[-1].toObject()); - cx->regs->sp -= 1; + ok = !!js_CloseIterator(cx, &cx->regs().sp[-1].toObject()); + cx->regs().sp -= 1; if (!ok) goto top; cx->setPendingException(v); @@ -176,16 +176,9 @@ top: static void InlineReturn(VMFrame &f) { - JSContext *cx = f.cx; - JSStackFrame *fp = f.regs.fp; - JS_ASSERT(f.fp() != f.entryfp); - - JS_ASSERT(!js_IsActiveWithOrBlock(cx, &fp->scopeChain(), 0)); - - Value *newsp = fp->actualArgs() - 1; - newsp[-1] = fp->returnValue(); - cx->stack().popInlineFrame(cx, fp->prev(), newsp); + JS_ASSERT(!js_IsActiveWithOrBlock(f.cx, &f.fp()->scopeChain(), 0)); + f.cx->stack.popInlineFrame(); } void JS_FASTCALL @@ -193,7 +186,7 @@ stubs::SlowCall(VMFrame &f, uint32 argc) { Value *vp = f.regs.sp - (argc + 2); - if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(vp, argc), 0)) + if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(argc, vp))) THROW(); } @@ -203,7 +196,7 @@ stubs::SlowNew(VMFrame &f, uint32 argc) JSContext *cx = f.cx; Value *vp = f.regs.sp - (argc + 2); - if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(vp, argc))) + if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(argc, vp))) THROW(); } @@ -212,11 +205,9 @@ stubs::SlowNew(VMFrame &f, uint32 argc) * on fp->exec.fun. */ static inline void -RemovePartialFrame(JSContext *cx, JSStackFrame *fp) +RemovePartialFrame(JSContext *cx, StackFrame *fp) { - JSStackFrame *prev = fp->prev(); - Value *newsp = (Value *)fp; - cx->stack().popInlineFrame(cx, prev, newsp); + cx->stack.popInlineFrame(); } /* @@ -229,7 +220,8 @@ stubs::HitStackQuota(VMFrame &f) /* Include space to push another frame. */ uintN nvals = f.fp()->script()->nslots + VALUES_PER_STACK_FRAME; JS_ASSERT(f.regs.sp == f.fp()->base()); - if (f.cx->stack().bumpCommitAndLimit(f.entryfp, f.regs.sp, nvals, &f.stackLimit)) + StackSpace &space = f.cx->stack.space(); + if (space.bumpLimitWithinQuota(NULL, f.entryfp, f.regs.sp, nvals, &f.stackLimit)) return; /* Remove the current partially-constructed frame before throwing. */ @@ -246,7 +238,7 @@ void * JS_FASTCALL stubs::FixupArity(VMFrame &f, uint32 nactual) { JSContext *cx = f.cx; - JSStackFrame *oldfp = f.fp(); + StackFrame *oldfp = f.fp(); JS_ASSERT(nactual != oldfp->numFormalArgs()); @@ -261,13 +253,12 @@ stubs::FixupArity(VMFrame &f, uint32 nactual) void *ncode = oldfp->nativeReturnAddress(); /* Pop the inline frame. */ - f.fp() = oldfp->prev(); - f.regs.sp = (Value*) oldfp; + f.regs.popPartialFrame((Value *)oldfp); /* Reserve enough space for a callee frame. */ - JSStackFrame *newfp = cx->stack().getInlineFrameWithinLimit(cx, (Value*) oldfp, nactual, - fun, fun->script(), &flags, - f.entryfp, &f.stackLimit); + StackFrame *newfp = cx->stack.getInlineFrameWithinLimit(cx, (Value*) oldfp, nactual, + fun, fun->script(), &flags, + f.entryfp, &f.stackLimit); if (!newfp) { /* * The PC is not coherent with the current frame, so fix it up for @@ -295,7 +286,7 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual) * compile though because we could throw, so get a full, adjusted frame. */ JSContext *cx = f.cx; - JSStackFrame *fp = f.fp(); + StackFrame *fp = f.fp(); /* * Since we can only use members set by initCallFrameCallerHalf, @@ -312,7 +303,7 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual) fp->initCallFrameEarlyPrologue(fun, nactual); if (nactual != fp->numFormalArgs()) { - fp = (JSStackFrame *)FixupArity(f, nactual); + fp = (StackFrame *)FixupArity(f, nactual); if (!fp) return NULL; } @@ -321,9 +312,7 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual) fp->initCallFrameLatePrologue(); /* These would have been initialized by the prologue. */ - f.regs.fp = fp; - f.regs.sp = fp->base(); - f.regs.pc = script->code; + f.regs.prepareToRun(fp, script); if (fun->isHeavyweight() && !js::CreateFunCallObject(cx, fp)) THROWV(NULL); @@ -352,10 +341,9 @@ UncachedInlineCall(VMFrame &f, uint32 flags, void **pret, bool *unjittable, uint JSScript *newscript = newfun->script(); /* Get pointer to new frame/slots, prepare arguments. */ - StackSpace &stack = cx->stack(); - JSStackFrame *newfp = stack.getInlineFrameWithinLimit(cx, f.regs.sp, argc, - newfun, newscript, &flags, - f.entryfp, &f.stackLimit); + StackFrame *newfp = cx->stack.getInlineFrameWithinLimit(cx, f.regs.sp, argc, + newfun, newscript, &flags, + f.entryfp, &f.stackLimit); if (JS_UNLIKELY(!newfp)) return false; @@ -364,8 +352,8 @@ UncachedInlineCall(VMFrame &f, uint32 flags, void **pret, bool *unjittable, uint SetValueRangeToUndefined(newfp->slots(), newscript->nfixed); /* Officially push the frame. */ - stack.pushInlineFrame(cx, newscript, newfp, &f.regs); - JS_ASSERT(newfp == f.regs.fp); + cx->stack.pushInlineFrame(newscript, newfp, f.regs); + JS_ASSERT(newfp == f.fp()); /* Scope with a call object parented by callee's parent. */ if (newfun->isHeavyweight() && !js::CreateFunCallObject(cx, newfp)) @@ -414,10 +402,10 @@ stubs::UncachedNewHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr) /* Try to do a fast inline call before the general Invoke path. */ if (IsFunctionObject(*vp, &ucr->fun) && ucr->fun->isInterpretedConstructor()) { ucr->callee = &vp->toObject(); - if (!UncachedInlineCall(f, JSFRAME_CONSTRUCTING, &ucr->codeAddr, &ucr->unjittable, argc)) + if (!UncachedInlineCall(f, StackFrame::CONSTRUCTING, &ucr->codeAddr, &ucr->unjittable, argc)) THROW(); } else { - if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(vp, argc))) + if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(argc, vp))) THROW(); } } @@ -435,13 +423,13 @@ stubs::Eval(VMFrame &f, uint32 argc) { Value *vp = f.regs.sp - (argc + 2); - if (!IsBuiltinEvalForScope(&f.regs.fp->scopeChain(), *vp)) { - if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(vp, argc), 0)) + if (!IsBuiltinEvalForScope(&f.fp()->scopeChain(), *vp)) { + if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(argc, vp))) THROW(); return; } - JS_ASSERT(f.regs.fp == f.cx->fp()); + JS_ASSERT(f.fp() == f.cx->fp()); if (!DirectEval(f.cx, CallArgsFromVp(argc, vp))) THROW(); @@ -473,7 +461,7 @@ stubs::UncachedCallHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr) } } - if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(vp, argc), 0)) + if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(argc, vp))) THROW(); return; @@ -483,7 +471,7 @@ void JS_FASTCALL stubs::PutActivationObjects(VMFrame &f) { JS_ASSERT(f.fp()->hasCallObj() || f.fp()->hasArgsObj()); - js::PutActivationObjects(f.cx, f.fp()); + f.fp()->putActivationObjects(); } extern "C" void * @@ -508,13 +496,13 @@ js_InternalThrow(VMFrame &f) } // Make sure sp is up to date. - JS_ASSERT(cx->regs == &f.regs); + JS_ASSERT(&cx->regs() == &f.regs); // Call the throw hook if necessary JSThrowHook handler = f.cx->debugHooks->throwHook; if (handler) { Value rval; - switch (handler(cx, cx->fp()->script(), cx->regs->pc, Jsvalify(&rval), + switch (handler(cx, cx->fp()->script(), cx->regs().pc, Jsvalify(&rval), cx->debugHooks->throwHookData)) { case JSTRAP_ERROR: cx->clearPendingException(); @@ -555,16 +543,16 @@ js_InternalThrow(VMFrame &f) if (f.entryfp == f.fp()) break; - JS_ASSERT(f.regs.sp == cx->regs->sp); + JS_ASSERT(f.regs.sp == cx->regs().sp); InlineReturn(f); } - JS_ASSERT(f.regs.sp == cx->regs->sp); + JS_ASSERT(f.regs.sp == cx->regs().sp); if (!pc) return NULL; - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); JSScript *script = fp->script(); return script->nativeCodeForPC(fp->isConstructing(), pc); } @@ -581,7 +569,7 @@ void JS_FASTCALL stubs::CreateThis(VMFrame &f, JSObject *proto) { JSContext *cx = f.cx; - JSStackFrame *fp = f.fp(); + StackFrame *fp = f.fp(); JSObject *callee = &fp->callee(); JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto); if (!obj) @@ -610,7 +598,7 @@ stubs::ScriptDebugEpilogue(VMFrame &f) * handler in the process. */ static inline bool -HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostFrame = true) +HandleErrorInExcessFrame(VMFrame &f, StackFrame *stopFp, bool searchedTopmostFrame = true) { JSContext *cx = f.cx; @@ -621,7 +609,7 @@ HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostF * * Note that this also guarantees ScriptEpilogue() has been called. */ - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); if (searchedTopmostFrame) { /* * This is a special case meaning that fp->finishedInInterpreter() is @@ -647,7 +635,7 @@ HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostF /* Clear imacros. */ if (fp->hasImacropc()) { - cx->regs->pc = fp->imacropc(); + cx->regs().pc = fp->imacropc(); fp->clearImacropc(); } JS_ASSERT(!fp->hasImacropc()); @@ -656,7 +644,7 @@ HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostF if (cx->isExceptionPending()) { jsbytecode *pc = FindExceptionHandler(cx); if (pc) { - cx->regs->pc = pc; + cx->regs().pc = pc; returnOK = true; break; } @@ -672,7 +660,7 @@ HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostF InlineReturn(f); } - JS_ASSERT(&f.regs == cx->regs); + JS_ASSERT(&f.regs == &cx->regs()); JS_ASSERT_IF(!returnOK, cx->fp() == stopFp); return returnOK; @@ -682,12 +670,12 @@ HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostF static inline void * AtSafePoint(JSContext *cx) { - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); if (fp->hasImacropc()) return NULL; JSScript *script = fp->script(); - return script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs->pc); + return script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc); } /* @@ -698,13 +686,13 @@ static inline JSBool PartialInterpret(VMFrame &f) { JSContext *cx = f.cx; - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); #ifdef DEBUG JSScript *script = fp->script(); JS_ASSERT(!fp->finishedInInterpreter()); JS_ASSERT(fp->hasImacropc() || - !script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs->pc)); + !script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc)); #endif JSBool ok = JS_TRUE; @@ -726,7 +714,7 @@ JS_STATIC_ASSERT(JSOP_NOP == 0); static inline bool FrameIsFinished(JSContext *cx) { - JSOp op = JSOp(*cx->regs->pc); + JSOp op = JSOp(*cx->regs().pc); return (op == JSOP_RETURN || op == JSOP_RETRVAL || op == JSOP_STOP) @@ -739,12 +727,12 @@ FrameIsFinished(JSContext *cx) static inline void AdvanceReturnPC(JSContext *cx) { - JS_ASSERT(*cx->regs->pc == JSOP_CALL || - *cx->regs->pc == JSOP_NEW || - *cx->regs->pc == JSOP_EVAL || - *cx->regs->pc == JSOP_FUNCALL || - *cx->regs->pc == JSOP_FUNAPPLY); - cx->regs->pc += JSOP_CALL_LENGTH; + JS_ASSERT(*cx->regs().pc == JSOP_CALL || + *cx->regs().pc == JSOP_NEW || + *cx->regs().pc == JSOP_EVAL || + *cx->regs().pc == JSOP_FUNCALL || + *cx->regs().pc == JSOP_FUNAPPLY); + cx->regs().pc += JSOP_CALL_LENGTH; } @@ -756,7 +744,7 @@ AdvanceReturnPC(JSContext *cx) * (and faster) to finish frames in C++ even if at a safe point here. */ static bool -HandleFinishedFrame(VMFrame &f, JSStackFrame *entryFrame) +HandleFinishedFrame(VMFrame &f, StackFrame *entryFrame) { JSContext *cx = f.cx; @@ -791,7 +779,7 @@ HandleFinishedFrame(VMFrame &f, JSStackFrame *entryFrame) */ bool returnOK = true; if (!cx->fp()->finishedInInterpreter()) { - if (JSOp(*cx->regs->pc) == JSOP_RETURN) + if (JSOp(*cx->regs().pc) == JSOP_RETURN) cx->fp()->setReturnValue(f.regs.sp[-1]); returnOK = ScriptEpilogue(cx, cx->fp(), true); @@ -821,10 +809,10 @@ HandleFinishedFrame(VMFrame &f, JSStackFrame *entryFrame) * pushed by a call, that has method JIT'd code. */ static bool -EvaluateExcessFrame(VMFrame &f, JSStackFrame *entryFrame) +EvaluateExcessFrame(VMFrame &f, StackFrame *entryFrame) { JSContext *cx = f.cx; - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); /* * A "finished" frame is when the interpreter rested on a STOP, @@ -851,7 +839,7 @@ EvaluateExcessFrame(VMFrame &f, JSStackFrame *entryFrame) * always leave f.regs.fp == entryFrame. */ static bool -FinishExcessFrames(VMFrame &f, JSStackFrame *entryFrame) +FinishExcessFrames(VMFrame &f, StackFrame *entryFrame) { JSContext *cx = f.cx; @@ -940,7 +928,7 @@ RunTracer(VMFrame &f) #endif { JSContext *cx = f.cx; - JSStackFrame *entryFrame = f.fp(); + StackFrame *entryFrame = f.fp(); TracePointAction tpa; /* :TODO: nuke PIC? */ @@ -989,7 +977,6 @@ RunTracer(VMFrame &f) // error failures correctly. JS_ASSERT_IF(cx->isExceptionPending(), tpa == TPA_Error); - f.fp() = cx->fp(); JS_ASSERT(f.fp() == cx->fp()); switch (tpa) { case TPA_Nothing: diff --git a/js/src/methodjit/MachineRegs.h b/js/src/methodjit/MachineRegs.h index 7a18bce7a777..64443bb84743 100644 --- a/js/src/methodjit/MachineRegs.h +++ b/js/src/methodjit/MachineRegs.h @@ -63,7 +63,7 @@ struct Registers { static const RegisterID ScratchReg = JSC::X86Registers::r11; #endif - // Register that homes the current JSStackFrame. + // Register that homes the current StackFrame. #if defined(JS_CPU_X86) || defined(JS_CPU_X64) static const RegisterID JSFrameReg = JSC::X86Registers::ebx; #elif defined(JS_CPU_ARM) diff --git a/js/src/methodjit/MethodJIT-inl.h b/js/src/methodjit/MethodJIT-inl.h index 8f392ff4b047..d5ecbb1ea799 100644 --- a/js/src/methodjit/MethodJIT-inl.h +++ b/js/src/methodjit/MethodJIT-inl.h @@ -57,7 +57,7 @@ static const size_t CALLS_BEFORE_COMPILE = 16; static const size_t BACKEDGES_BEFORE_COMPILE = 16; static inline CompileStatus -CanMethodJIT(JSContext *cx, JSScript *script, JSStackFrame *fp, CompileRequest request) +CanMethodJIT(JSContext *cx, JSScript *script, StackFrame *fp, CompileRequest request) { if (!cx->methodJitEnabled) return Compile_Abort; @@ -81,7 +81,7 @@ CanMethodJIT(JSContext *cx, JSScript *script, JSStackFrame *fp, CompileRequest r * methodjit. If so, we compile the given function. */ static inline CompileStatus -CanMethodJITAtBranch(JSContext *cx, JSScript *script, JSStackFrame *fp, jsbytecode *pc) +CanMethodJITAtBranch(JSContext *cx, JSScript *script, StackFrame *fp, jsbytecode *pc) { if (!cx->methodJitEnabled) return Compile_Abort; diff --git a/js/src/methodjit/MethodJIT.cpp b/js/src/methodjit/MethodJIT.cpp index c2af0ec5f0f7..8a96b86dfab7 100644 --- a/js/src/methodjit/MethodJIT.cpp +++ b/js/src/methodjit/MethodJIT.cpp @@ -62,20 +62,20 @@ js::mjit::CompilerAllocPolicy::CompilerAllocPolicy(JSContext *cx, Compiler &comp { } void -JSStackFrame::methodjitStaticAsserts() +StackFrame::methodjitStaticAsserts() { /* Static assert for x86 trampolines in MethodJIT.cpp. */ #if defined(JS_CPU_X86) - JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) == 0x18); - JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) + 4 == 0x1C); - JS_STATIC_ASSERT(offsetof(JSStackFrame, ncode_) == 0x14); + JS_STATIC_ASSERT(offsetof(StackFrame, rval_) == 0x18); + JS_STATIC_ASSERT(offsetof(StackFrame, rval_) + 4 == 0x1C); + JS_STATIC_ASSERT(offsetof(StackFrame, ncode_) == 0x14); /* ARM uses decimal literals. */ - JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) == 24); - JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) + 4 == 28); - JS_STATIC_ASSERT(offsetof(JSStackFrame, ncode_) == 20); + JS_STATIC_ASSERT(offsetof(StackFrame, rval_) == 24); + JS_STATIC_ASSERT(offsetof(StackFrame, rval_) + 4 == 28); + JS_STATIC_ASSERT(offsetof(StackFrame, ncode_) == 20); #elif defined(JS_CPU_X64) - JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) == 0x30); - JS_STATIC_ASSERT(offsetof(JSStackFrame, ncode_) == 0x28); + JS_STATIC_ASSERT(offsetof(StackFrame, rval_) == 0x30); + JS_STATIC_ASSERT(offsetof(StackFrame, ncode_) == 0x28); #endif } @@ -119,7 +119,7 @@ extern "C" void JS_FASTCALL PushActiveVMFrame(VMFrame &f) { f.entryfp->script()->compartment->jaegerCompartment->pushActiveFrame(&f); - f.regs.fp->setNativeReturnAddress(JS_FUNC_TO_DATA_PTR(void*, JaegerTrampolineReturn)); + f.regs.fp()->setNativeReturnAddress(JS_FUNC_TO_DATA_PTR(void*, JaegerTrampolineReturn)); } extern "C" void JS_FASTCALL @@ -131,7 +131,8 @@ PopActiveVMFrame(VMFrame &f) extern "C" void JS_FASTCALL SetVMFrameRegs(VMFrame &f) { - f.cx->setCurrentRegs(&f.regs); + /* Restored on exit from EnterMethodJIT. */ + f.cx->stack.repointRegs(&f.regs); } #if defined(__APPLE__) || (defined(XP_WIN) && !defined(JS_CPU_X64)) || defined(XP_OS2) @@ -140,7 +141,7 @@ SetVMFrameRegs(VMFrame &f) # define SYMBOL_STRING(name) #name #endif -JS_STATIC_ASSERT(offsetof(JSFrameRegs, sp) == 0); +JS_STATIC_ASSERT(offsetof(FrameRegs, sp) == 0); #if defined(__linux__) && defined(JS_CPU_X64) # define SYMBOL_STRING_RELOC(name) #name "@plt" @@ -179,7 +180,7 @@ JS_STATIC_ASSERT(sizeof(VMFrame) % 16 == 0); * *** DANGER *** */ JS_STATIC_ASSERT(offsetof(VMFrame, savedRBX) == 0x58); -JS_STATIC_ASSERT(offsetof(VMFrame, regs.fp) == 0x38); +JS_STATIC_ASSERT(VMFrame::offsetOfFp == 0x38); JS_STATIC_ASSERT(JSVAL_TAG_MASK == 0xFFFF800000000000LL); JS_STATIC_ASSERT(JSVAL_PAYLOAD_MASK == 0x00007FFFFFFFFFFFLL); @@ -284,7 +285,7 @@ SYMBOL_STRING(JaegerThrowpoline) ":" "\n" * *** DANGER *** */ JS_STATIC_ASSERT(offsetof(VMFrame, savedEBX) == 0x2c); -JS_STATIC_ASSERT(offsetof(VMFrame, regs.fp) == 0x1C); +JS_STATIC_ASSERT((VMFrame::offsetOfFp) == 0x1C); asm ( ".text\n" @@ -370,7 +371,7 @@ JS_STATIC_ASSERT(offsetof(VMFrame, savedLR) == (4*19)); JS_STATIC_ASSERT(offsetof(VMFrame, entryfp) == (4*10)); JS_STATIC_ASSERT(offsetof(VMFrame, stackLimit) == (4*9)); JS_STATIC_ASSERT(offsetof(VMFrame, cx) == (4*8)); -JS_STATIC_ASSERT(offsetof(VMFrame, regs.fp) == (4*7)); +JS_STATIC_ASSERT(VMFrame::offsetOfFp == (4*7)); JS_STATIC_ASSERT(offsetof(VMFrame, unused) == (4*4)); JS_STATIC_ASSERT(offsetof(VMFrame, previous) == (4*3)); @@ -520,11 +521,11 @@ SYMBOL_STRING(JaegerStubVeneer) ":" "\n" * *** DANGER *** */ JS_STATIC_ASSERT(offsetof(VMFrame, savedEBX) == 0x2c); -JS_STATIC_ASSERT(offsetof(VMFrame, regs.fp) == 0x1C); +JS_STATIC_ASSERT(VMFrame::offsetOfFp == 0x1C); extern "C" { - __declspec(naked) JSBool JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code, + __declspec(naked) JSBool JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit) { __asm { @@ -615,7 +616,7 @@ extern "C" { * *** DANGER *** */ JS_STATIC_ASSERT(offsetof(VMFrame, savedRBX) == 0x58); -JS_STATIC_ASSERT(offsetof(VMFrame, regs.fp) == 0x38); +JS_STATIC_ASSERT(VMFrame::offsetOfFp == 0x38); JS_STATIC_ASSERT(JSVAL_TAG_MASK == 0xFFFF800000000000LL); JS_STATIC_ASSERT(JSVAL_PAYLOAD_MASK == 0x00007FFFFFFFFFFFLL); @@ -660,10 +661,10 @@ JaegerCompartment::Finish() } extern "C" JSBool -JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLimit); +JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit); JSBool -mjit::EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLimit) +mjit::EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit) { #ifdef JS_METHODJIT_SPEW Profiler prof; @@ -674,8 +675,8 @@ mjit::EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLi prof.start(); #endif - JS_ASSERT(cx->regs->fp == fp); - JSFrameRegs *oldRegs = cx->regs; + JS_ASSERT(cx->fp() == fp); + FrameRegs &oldRegs = cx->regs(); JSBool ok; { @@ -684,7 +685,8 @@ mjit::EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLi ok = JaegerTrampoline(cx, fp, code, stackLimit); } - cx->setCurrentRegs(oldRegs); + /* Undo repointRegs in SetVMFrameRegs. */ + cx->stack.repointRegs(&oldRegs); JS_ASSERT(fp == cx->fp()); /* The trampoline wrote the return value but did not set the HAS_RVAL flag. */ @@ -702,11 +704,11 @@ mjit::EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLi } static inline JSBool -CheckStackAndEnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code) +CheckStackAndEnterMethodJIT(JSContext *cx, StackFrame *fp, void *code) { JS_CHECK_RECURSION(cx, return false); - Value *stackLimit = cx->stack().getStackLimit(cx); + Value *stackLimit = cx->stack.space().getStackLimit(cx); if (!stackLimit) return false; @@ -716,7 +718,7 @@ CheckStackAndEnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code) JSBool mjit::JaegerShot(JSContext *cx) { - JSStackFrame *fp = cx->fp(); + StackFrame *fp = cx->fp(); JSScript *script = fp->script(); JITScript *jit = script->getJIT(fp->isConstructing()); @@ -725,7 +727,7 @@ mjit::JaegerShot(JSContext *cx) AbortRecording(cx, "attempt to enter method JIT while recording"); #endif - JS_ASSERT(cx->regs->pc == script->code); + JS_ASSERT(cx->regs().pc == script->code); return CheckStackAndEnterMethodJIT(cx, cx->fp(), jit->invokeEntry); } diff --git a/js/src/methodjit/MethodJIT.h b/js/src/methodjit/MethodJIT.h index e1741e95731f..5b4dd457de16 100644 --- a/js/src/methodjit/MethodJIT.h +++ b/js/src/methodjit/MethodJIT.h @@ -107,10 +107,10 @@ struct VMFrame VMFrame *previous; void *unused; - JSFrameRegs regs; + FrameRegs regs; JSContext *cx; Value *stackLimit; - JSStackFrame *entryfp; + StackFrame *entryfp; #if defined(JS_CPU_X86) void *savedEBX; @@ -178,8 +178,13 @@ struct VMFrame JSRuntime *runtime() { return cx->runtime; } - JSStackFrame *&fp() { return regs.fp; } + StackFrame *fp() { return regs.fp(); } mjit::JITScript *jit() { return fp()->jit(); } + + static const size_t offsetOfFp = 5 * sizeof(void *) + FrameRegs::offsetOfFp; + static void staticAssert() { + JS_STATIC_ASSERT(offsetOfFp == offsetof(VMFrame, regs) + FrameRegs::offsetOfFp); + } }; #ifdef JS_CPU_ARM @@ -425,7 +430,7 @@ struct JITScript { * Execute the given mjit code. This is a low-level call and callers must * provide the same guarantees as JaegerShot/CheckStackAndEnterMethodJIT. */ -JSBool EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLimit); +JSBool EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit); /* Execute a method that has been JIT compiled. */ JSBool JaegerShot(JSContext *cx); @@ -445,7 +450,7 @@ void JS_FASTCALL ProfileStubCall(VMFrame &f); CompileStatus JS_NEVER_INLINE -TryCompile(JSContext *cx, JSStackFrame *fp); +TryCompile(JSContext *cx, StackFrame *fp); void ReleaseScriptCode(JSContext *cx, JSScript *script); diff --git a/js/src/methodjit/MonoIC.cpp b/js/src/methodjit/MonoIC.cpp index d3db5ecfecb5..8d072baabbb1 100644 --- a/js/src/methodjit/MonoIC.cpp +++ b/js/src/methodjit/MonoIC.cpp @@ -652,7 +652,7 @@ class CallCompiler : public BaseCompiler Jump hasCode = masm.branchPtr(Assembler::Above, t0, ImmPtr(JS_UNJITTABLE_SCRIPT)); /* Try and compile. On success we get back the nmap pointer. */ - masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp))); + masm.storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp)); void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction); if (ic.frameSize.isStatic()) { masm.move(Imm32(ic.frameSize.staticArgc()), Registers::ArgReg1); @@ -661,7 +661,7 @@ class CallCompiler : public BaseCompiler masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), Registers::ArgReg1); masm.fallibleVMCall(compilePtr, script->code, -1); } - masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg); + masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg); Jump notCompiled = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg, Registers::ReturnReg); @@ -777,7 +777,7 @@ class CallCompiler : public BaseCompiler JITScript *jit = f.jit(); /* Snapshot the frameDepth before SplatApplyArgs modifies it. */ - uintN initialFrameDepth = f.regs.sp - f.regs.fp->slots(); + uintN initialFrameDepth = f.regs.sp - f.regs.fp()->slots(); /* * SplatApplyArgs has not been called, so we call it here before @@ -785,7 +785,7 @@ class CallCompiler : public BaseCompiler */ Value *vp; if (ic.frameSize.isStatic()) { - JS_ASSERT(f.regs.sp - f.regs.fp->slots() == (int)ic.frameSize.staticLocalSlots()); + JS_ASSERT(f.regs.sp - f.regs.fp()->slots() == (int)ic.frameSize.staticLocalSlots()); vp = f.regs.sp - (2 + ic.frameSize.staticArgc()); } else { JS_ASSERT(*f.regs.pc == JSOP_FUNAPPLY && GET_ARGC(f.regs.pc) == 2); @@ -839,18 +839,18 @@ class CallCompiler : public BaseCompiler RegisterID t0 = tempRegs.takeAnyReg(); /* Store pc. */ - masm.storePtr(ImmPtr(cx->regs->pc), + masm.storePtr(ImmPtr(cx->regs().pc), FrameAddress(offsetof(VMFrame, regs.pc))); /* Store sp (if not already set by ic::SplatApplyArgs). */ if (ic.frameSize.isStatic()) { - uint32 spOffset = sizeof(JSStackFrame) + initialFrameDepth * sizeof(Value); + uint32 spOffset = sizeof(StackFrame) + initialFrameDepth * sizeof(Value); masm.addPtr(Imm32(spOffset), JSFrameReg, t0); masm.storePtr(t0, FrameAddress(offsetof(VMFrame, regs.sp))); } /* Store fp. */ - masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp))); + masm.storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp)); /* Grab cx. */ #ifdef JS_CPU_X86 @@ -868,7 +868,7 @@ class CallCompiler : public BaseCompiler #endif MaybeRegisterID argcReg; if (ic.frameSize.isStatic()) { - uint32 vpOffset = sizeof(JSStackFrame) + (vp - f.regs.fp->slots()) * sizeof(Value); + uint32 vpOffset = sizeof(StackFrame) + (vp - f.regs.fp()->slots()) * sizeof(Value); masm.addPtr(Imm32(vpOffset), JSFrameReg, vpReg); } else { argcReg = tempRegs.takeAnyReg(); @@ -972,7 +972,7 @@ class CallCompiler : public BaseCompiler JSObject *callee = ucr.callee; JS_ASSERT(callee); - uint32 flags = callingNew ? JSFRAME_CONSTRUCTING : 0; + uint32 flags = callingNew ? StackFrame::CONSTRUCTING : 0; if (!ic.hit) { ic.hit = true; @@ -1036,7 +1036,6 @@ ic::NativeNew(VMFrame &f, CallICInfo *ic) } static const unsigned MANY_ARGS = 1024; -static const unsigned MIN_SPACE = 500; static bool BumpStackFull(VMFrame &f, uintN inc) @@ -1045,12 +1044,8 @@ BumpStackFull(VMFrame &f, uintN inc) if (inc < MANY_ARGS) { if (f.regs.sp + inc < f.stackLimit) return true; - StackSpace &stack = f.cx->stack(); - if (!stack.bumpCommitAndLimit(f.entryfp, f.regs.sp, inc, &f.stackLimit)) { - js_ReportOverRecursed(f.cx); - return false; - } - return true; + StackSpace &space = f.cx->stack.space(); + return space.bumpLimitWithinQuota(f.cx, f.entryfp, f.regs.sp, inc, &f.stackLimit); } /* @@ -1065,20 +1060,8 @@ BumpStackFull(VMFrame &f, uintN inc) * However, since each apply call must consume at least MANY_ARGS slots, * this sequence will quickly reach the end of the stack and OOM. */ - - uintN incWithSpace = inc + MIN_SPACE; - Value *bumpedWithSpace = f.regs.sp + incWithSpace; - if (bumpedWithSpace < f.stackLimit) - return true; - - StackSpace &stack = f.cx->stack(); - if (stack.bumpCommitAndLimit(f.entryfp, f.regs.sp, incWithSpace, &f.stackLimit)) - return true; - - if (!stack.ensureSpace(f.cx, f.regs.sp, incWithSpace)) - return false; - f.stackLimit = bumpedWithSpace; - return true; + StackSpace &space = f.cx->stack.space(); + return space.bumpLimit(f.cx, f.entryfp, f.regs.sp, inc, &f.stackLimit); } static JS_ALWAYS_INLINE bool @@ -1117,7 +1100,7 @@ ic::SplatApplyArgs(VMFrame &f) Value *vp = f.regs.sp - 3; JS_ASSERT(JS_CALLEE(cx, vp).toObject().getFunctionPrivate()->u.n.native == js_fun_apply); - JSStackFrame *fp = f.regs.fp; + StackFrame *fp = f.regs.fp(); if (!fp->hasOverriddenArgs()) { uintN n; if (!fp->hasArgsObj()) { diff --git a/js/src/methodjit/PolyIC.cpp b/js/src/methodjit/PolyIC.cpp index 1c7c39544cab..4dc89e596315 100644 --- a/js/src/methodjit/PolyIC.cpp +++ b/js/src/methodjit/PolyIC.cpp @@ -404,8 +404,8 @@ class SetPropCompiler : public PICStubCompiler { Address addr(pic.shapeReg, shape->setterOp() == SetCallArg - ? JSStackFrame::offsetOfFormalArg(fun, slot) - : JSStackFrame::offsetOfFixed(slot)); + ? StackFrame::offsetOfFormalArg(fun, slot) + : StackFrame::offsetOfFixed(slot)); masm.storeValue(pic.u.vr, addr); skipOver = masm.jump(); } @@ -907,7 +907,7 @@ class GetPropCompiler : public PICStubCompiler * up in the fast path, or put this offset in PICInfo? */ uint32 thisvOffset = uint32(f.regs.sp - f.fp()->slots()) - 1; - Address thisv(JSFrameReg, sizeof(JSStackFrame) + thisvOffset * sizeof(Value)); + Address thisv(JSFrameReg, sizeof(StackFrame) + thisvOffset * sizeof(Value)); masm.storeValueFromComponents(ImmType(JSVAL_TYPE_STRING), pic.objReg, thisv); @@ -1247,7 +1247,7 @@ class ScopeNameCompiler : public PICStubCompiler /* For GETXPROP, the object is already in objReg. */ if (pic.kind == ic::PICInfo::NAME) - masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg); + masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg); JS_ASSERT(obj == getprop.holder); JS_ASSERT(getprop.holder == scopeChain->getGlobal()); @@ -1315,7 +1315,7 @@ class ScopeNameCompiler : public PICStubCompiler /* For GETXPROP, the object is already in objReg. */ if (pic.kind == ic::PICInfo::NAME) - masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg); + masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg); JS_ASSERT(obj == getprop.holder); JS_ASSERT(getprop.holder != scopeChain->getGlobal()); @@ -1352,8 +1352,8 @@ class ScopeNameCompiler : public PICStubCompiler /* Not-escaped case. */ { - Address addr(pic.shapeReg, kind == ARG ? JSStackFrame::offsetOfFormalArg(fun, slot) - : JSStackFrame::offsetOfFixed(slot)); + Address addr(pic.shapeReg, kind == ARG ? StackFrame::offsetOfFormalArg(fun, slot) + : StackFrame::offsetOfFixed(slot)); masm.loadPayload(addr, pic.objReg); masm.loadTypeTag(addr, pic.shapeReg); skipOver = masm.jump(); @@ -1459,7 +1459,7 @@ class ScopeNameCompiler : public PICStubCompiler /* Kludge to allow (typeof foo == "undefined") tests. */ disable("property not found"); if (pic.kind == ic::PICInfo::NAME) { - JSOp op2 = js_GetOpcode(cx, script, cx->regs->pc + JSOP_NAME_LENGTH); + JSOp op2 = js_GetOpcode(cx, script, cx->regs().pc + JSOP_NAME_LENGTH); if (op2 == JSOP_TYPEOF) { vp->setUndefined(); return true; @@ -1531,7 +1531,7 @@ class BindNameCompiler : public PICStubCompiler BindNameLabels &labels = pic.bindNameLabels(); /* Guard on the shape of the scope chain. */ - masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg); + masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg); masm.loadShape(pic.objReg, pic.shapeReg); Jump firstShape = masm.branch32(Assembler::NotEqual, pic.shapeReg, Imm32(scopeChain->shape())); @@ -1751,7 +1751,7 @@ void JS_FASTCALL ic::CallProp(VMFrame &f, ic::PICInfo *pic) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; JSScript *script = f.fp()->script(); @@ -1850,7 +1850,7 @@ ic::CallProp(VMFrame &f, ic::PICInfo *pic) } #if JS_HAS_NO_SUCH_METHOD - if (JS_UNLIKELY(rval.isUndefined()) && regs.sp[-1].isObject()) { + if (JS_UNLIKELY(rval.isPrimitive()) && regs.sp[-1].isObject()) { regs.sp[-2].setString(pic->atom); if (!js_OnUnknownMethod(cx, regs.sp - 2)) THROW(); @@ -2102,8 +2102,8 @@ GetElementIC::attachGetProp(JSContext *cx, JSObject *obj, const Value &v, jsid i if (op == JSOP_CALLELEM) { // Emit a write of |obj| to the top of the stack, before we lose it. - Value *thisVp = &cx->regs->sp[-1]; - Address thisSlot(JSFrameReg, JSStackFrame::offsetOfFixed(thisVp - cx->fp()->slots())); + Value *thisVp = &cx->regs().sp[-1]; + Address thisSlot(JSFrameReg, StackFrame::offsetOfFixed(thisVp - cx->fp()->slots())); masm.storeValueFromComponents(ImmType(JSVAL_TYPE_OBJECT), objReg, thisSlot); } @@ -2369,7 +2369,7 @@ ic::CallElement(VMFrame &f, ic::GetElementIC *ic) THROW(); #if JS_HAS_NO_SUCH_METHOD - if (JS_UNLIKELY(f.regs.sp[-2].isUndefined()) && thisv.isObject()) { + if (JS_UNLIKELY(f.regs.sp[-2].isPrimitive()) && thisv.isObject()) { f.regs.sp[-2] = f.regs.sp[-1]; f.regs.sp[-1].setObject(*thisObj); if (!js_OnUnknownMethod(cx, f.regs.sp - 2)) diff --git a/js/src/methodjit/RematInfo.h b/js/src/methodjit/RematInfo.h index 3279e763426c..caba92daed01 100644 --- a/js/src/methodjit/RematInfo.h +++ b/js/src/methodjit/RematInfo.h @@ -88,7 +88,7 @@ struct StateRemat { // representation in a struct or union. This prevents bloating the IC // structs by an extra 8 bytes in some cases. 16 bits are needed to encode // the largest local: - // ((UINT16_LIMIT - 1) * sizeof(Value) + sizeof(JSStackFrame), + // ((UINT16_LIMIT - 1) * sizeof(Value) + sizeof(StackFrame), // And an extra bit for the sign on arguments. #define MIN_STATE_REMAT_BITS 21 @@ -96,7 +96,7 @@ struct StateRemat { bool inRegister() const { return offset_ >= 0 && offset_ <= int32(JSC::MacroAssembler::TotalRegisters); } bool inMemory() const { - return offset_ >= int32(sizeof(JSStackFrame)) || + return offset_ >= int32(sizeof(StackFrame)) || offset_ < 0; } diff --git a/js/src/methodjit/Retcon.cpp b/js/src/methodjit/Retcon.cpp index e081b92ec499..05dd2e1e07e9 100644 --- a/js/src/methodjit/Retcon.cpp +++ b/js/src/methodjit/Retcon.cpp @@ -123,8 +123,8 @@ Recompiler::recompile() Vector normalPatches(cx); Vector ctorPatches(cx); - JSStackFrame *firstCtorFrame = NULL; - JSStackFrame *firstNormalFrame = NULL; + StackFrame *firstCtorFrame = NULL; + StackFrame *firstNormalFrame = NULL; // Find all JIT'd stack frames to account for return addresses that will // need to be patched after recompilation. @@ -133,8 +133,8 @@ Recompiler::recompile() f = f->previous) { // Scan all frames owned by this VMFrame. - JSStackFrame *end = f->entryfp->prev(); - for (JSStackFrame *fp = f->fp(); fp != end; fp = fp->prev()) { + StackFrame *end = f->entryfp->prev(); + for (StackFrame *fp = f->fp(); fp != end; fp = fp->prev()) { // Remember the latest frame for each type of JIT'd code, so the // compiler will have a frame to re-JIT from. if (!firstCtorFrame && fp->script() == script && fp->isConstructing()) @@ -198,7 +198,7 @@ Recompiler::saveTraps(JITScript *jit, Vector *sites) } bool -Recompiler::recompile(JSStackFrame *fp, Vector &patches, +Recompiler::recompile(StackFrame *fp, Vector &patches, Vector &sites) { /* If we get this far, the script is live, and we better be safe to re-jit. */ diff --git a/js/src/methodjit/Retcon.h b/js/src/methodjit/Retcon.h index afdf8f197bd1..582a8167581a 100644 --- a/js/src/methodjit/Retcon.h +++ b/js/src/methodjit/Retcon.h @@ -98,7 +98,7 @@ private: PatchableAddress findPatch(JITScript *jit, void **location); void applyPatch(Compiler& c, PatchableAddress& toPatch); - bool recompile(JSStackFrame *fp, Vector &patches, + bool recompile(StackFrame *fp, Vector &patches, Vector &sites); bool saveTraps(JITScript *jit, Vector *sites); }; diff --git a/js/src/methodjit/StubCalls.cpp b/js/src/methodjit/StubCalls.cpp index f75be169f6ae..c6ce87246497 100644 --- a/js/src/methodjit/StubCalls.cpp +++ b/js/src/methodjit/StubCalls.cpp @@ -411,7 +411,7 @@ void JS_FASTCALL stubs::GetElem(VMFrame &f) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; Value &lref = regs.sp[-2]; Value &rref = regs.sp[-1]; @@ -451,7 +451,7 @@ stubs::GetElem(VMFrame &f) if (arg < obj->getArgsInitialLength()) { copyFrom = obj->addressOfArgsElement(arg); if (!copyFrom->isMagic()) { - if (JSStackFrame *afp = (JSStackFrame *) obj->getPrivate()) + if (StackFrame *afp = (StackFrame *) obj->getPrivate()) copyFrom = &afp->canonicalActualArg(arg); goto end_getelem; } @@ -496,7 +496,7 @@ void JS_FASTCALL stubs::CallElem(VMFrame &f) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; /* Find the object on which to look for |this|'s properties. */ Value thisv = regs.sp[-2]; @@ -514,7 +514,7 @@ stubs::CallElem(VMFrame &f) THROW(); #if JS_HAS_NO_SUCH_METHOD - if (JS_UNLIKELY(regs.sp[-2].isUndefined()) && thisv.isObject()) { + if (JS_UNLIKELY(regs.sp[-2].isPrimitive()) && thisv.isObject()) { regs.sp[-2] = regs.sp[-1]; regs.sp[-1].setObject(*thisObj); if (!js_OnUnknownMethod(cx, regs.sp - 2)) @@ -531,7 +531,7 @@ void JS_FASTCALL stubs::SetElem(VMFrame &f) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; Value &objval = regs.sp[-3]; Value &idval = regs.sp[-2]; @@ -691,7 +691,7 @@ stubs::DefFun(VMFrame &f, JSFunction *fun) JSObject *obj2; JSContext *cx = f.cx; - JSStackFrame *fp = f.fp(); + StackFrame *fp = f.fp(); /* * A top-level function defined in Global or Eval code (see ECMA-262 @@ -744,7 +744,7 @@ stubs::DefFun(VMFrame &f, JSFunction *fun) * current scope chain even for the case of function expression statements * and functions defined by eval inside let or with blocks. */ - JSObject *parent = &fp->varobj(cx); + JSObject *parent = &cx->stack.currentVarObj(); /* ES5 10.5 (NB: with subsequent errata). */ jsid id = ATOM_TO_JSID(fun->atom); @@ -811,7 +811,7 @@ template void JS_FASTCALL stubs::DefFun(VMFrame &f, JSFunction *fun); #define RELATIONAL(OP) \ JS_BEGIN_MACRO \ JSContext *cx = f.cx; \ - JSFrameRegs ®s = f.regs; \ + FrameRegs ®s = f.regs; \ Value rval = regs.sp[-1]; \ Value lval = regs.sp[-2]; \ bool cond; \ @@ -879,7 +879,7 @@ static inline bool StubEqualityOp(VMFrame &f) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; Value rval = regs.sp[-1]; Value lval = regs.sp[-2]; @@ -1006,7 +1006,7 @@ void JS_FASTCALL stubs::Add(VMFrame &f) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; Value rval = regs.sp[-1]; Value lval = regs.sp[-2]; @@ -1078,7 +1078,7 @@ void JS_FASTCALL stubs::Sub(VMFrame &f) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; double d1, d2; if (!ValueToNumber(cx, regs.sp[-2], &d1) || !ValueToNumber(cx, regs.sp[-1], &d2)) { @@ -1092,7 +1092,7 @@ void JS_FASTCALL stubs::Mul(VMFrame &f) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; double d1, d2; if (!ValueToNumber(cx, regs.sp[-2], &d1) || !ValueToNumber(cx, regs.sp[-1], &d2)) { @@ -1107,7 +1107,7 @@ stubs::Div(VMFrame &f) { JSContext *cx = f.cx; JSRuntime *rt = cx->runtime; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; double d1, d2; if (!ValueToNumber(cx, regs.sp[-2], &d1) || @@ -1139,7 +1139,7 @@ void JS_FASTCALL stubs::Mod(VMFrame &f) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; Value &lref = regs.sp[-2]; Value &rref = regs.sp[-1]; @@ -1202,7 +1202,7 @@ void JS_FASTCALL stubs::Trap(VMFrame &f, uint32 trapTypes) { Value rval; - jsbytecode *pc = f.cx->regs->pc; + jsbytecode *pc = f.cx->regs().pc; /* * Trap may be called for a single-step interrupt trap and/or a @@ -1296,7 +1296,7 @@ void JS_FASTCALL stubs::InitElem(VMFrame &f, uint32 last) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; /* Pop the element's value into rval. */ JS_ASSERT(regs.sp - f.fp()->base() >= 3); @@ -1836,7 +1836,7 @@ static bool JS_FASTCALL InlineGetProp(VMFrame &f) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; Value *vp = &f.regs.sp[-1]; JSObject *obj = ValueToObject(f.cx, vp); @@ -1913,7 +1913,7 @@ void JS_FASTCALL stubs::CallProp(VMFrame &f, JSAtom *origAtom) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; Value lval; lval = regs.sp[-1]; @@ -1994,7 +1994,7 @@ stubs::CallProp(VMFrame &f, JSAtom *origAtom) } } #if JS_HAS_NO_SUCH_METHOD - if (JS_UNLIKELY(rval.isUndefined()) && regs.sp[-1].isObject()) { + if (JS_UNLIKELY(rval.isPrimitive()) && regs.sp[-1].isObject()) { regs.sp[-2].setString(origAtom); if (!js_OnUnknownMethod(cx, regs.sp - 2)) THROW(); @@ -2005,7 +2005,7 @@ stubs::CallProp(VMFrame &f, JSAtom *origAtom) void JS_FASTCALL stubs::Length(VMFrame &f) { - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; Value *vp = ®s.sp[-1]; if (vp->isString()) { @@ -2042,7 +2042,7 @@ InitPropOrMethod(VMFrame &f, JSAtom *atom, JSOp op) { JSContext *cx = f.cx; JSRuntime *rt = cx->runtime; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; /* Load the property's initial value into rval. */ JS_ASSERT(regs.sp - f.fp()->base() >= 2); @@ -2224,7 +2224,7 @@ JSBool JS_FASTCALL stubs::InstanceOf(VMFrame &f) { JSContext *cx = f.cx; - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; const Value &rref = regs.sp[-1]; if (rref.isPrimitive()) { @@ -2263,7 +2263,7 @@ stubs::ArgCnt(VMFrame &f) { JSContext *cx = f.cx; JSRuntime *rt = cx->runtime; - JSStackFrame *fp = f.fp(); + StackFrame *fp = f.fp(); jsid id = ATOM_TO_JSID(rt->atomState.lengthAtom); f.regs.sp++; @@ -2274,9 +2274,9 @@ stubs::ArgCnt(VMFrame &f) void JS_FASTCALL stubs::EnterBlock(VMFrame &f, JSObject *obj) { - JSFrameRegs ®s = f.regs; + FrameRegs ®s = f.regs; #ifdef DEBUG - JSStackFrame *fp = f.fp(); + StackFrame *fp = f.fp(); #endif JS_ASSERT(obj->isStaticBlock()); @@ -2316,7 +2316,7 @@ void JS_FASTCALL stubs::LeaveBlock(VMFrame &f, JSObject *blockChain) { JSContext *cx = f.cx; - JSStackFrame *fp = f.fp(); + StackFrame *fp = f.fp(); #ifdef DEBUG JS_ASSERT(blockChain->isStaticBlock()); @@ -2552,9 +2552,9 @@ void JS_FASTCALL stubs::DefVarOrConst(VMFrame &f, JSAtom *atom) { JSContext *cx = f.cx; - JSStackFrame *fp = f.fp(); + StackFrame *fp = f.fp(); - JSObject *obj = &fp->varobj(cx); + JSObject *obj = &cx->stack.currentVarObj(); JS_ASSERT(!obj->getOps()->defineProperty); uintN attrs = JSPROP_ENUMERATE; if (!fp->isEvalFrame()) @@ -2598,9 +2598,8 @@ void JS_FASTCALL stubs::SetConst(VMFrame &f, JSAtom *atom) { JSContext *cx = f.cx; - JSStackFrame *fp = f.fp(); - JSObject *obj = &fp->varobj(cx); + JSObject *obj = &cx->stack.currentVarObj(); const Value &ref = f.regs.sp[-1]; if (!obj->defineProperty(cx, ATOM_TO_JSID(atom), ref, PropertyStub, StrictPropertyStub, diff --git a/js/src/methodjit/TrampolineCompiler.cpp b/js/src/methodjit/TrampolineCompiler.cpp index a822c0fc4739..a6ac9d709f0e 100644 --- a/js/src/methodjit/TrampolineCompiler.cpp +++ b/js/src/methodjit/TrampolineCompiler.cpp @@ -118,20 +118,20 @@ TrampolineCompiler::generateForceReturn(Assembler &masm) { /* if (hasArgsObj() || hasCallObj()) stubs::PutActivationObjects() */ Jump noActObjs = masm.branchTest32(Assembler::Zero, FrameFlagsAddress(), - Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ)); + Imm32(StackFrame::HAS_CALL_OBJ | StackFrame::HAS_ARGS_OBJ)); masm.fallibleVMCall(JS_FUNC_TO_DATA_PTR(void *, stubs::PutActivationObjects), NULL, 0); noActObjs.linkTo(masm.label(), &masm); /* Store any known return value */ masm.loadValueAsComponents(UndefinedValue(), JSReturnReg_Type, JSReturnReg_Data); Jump rvalClear = masm.branchTest32(Assembler::Zero, - FrameFlagsAddress(), Imm32(JSFRAME_HAS_RVAL)); - Address rvalAddress(JSFrameReg, JSStackFrame::offsetOfReturnValue()); + FrameFlagsAddress(), Imm32(StackFrame::HAS_RVAL)); + Address rvalAddress(JSFrameReg, StackFrame::offsetOfReturnValue()); masm.loadValueAsComponents(rvalAddress, JSReturnReg_Type, JSReturnReg_Data); rvalClear.linkTo(masm.label(), &masm); /* Return to the caller */ - masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfncode()), Registers::ReturnReg); + masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfNcode()), Registers::ReturnReg); masm.jump(Registers::ReturnReg); return true; } diff --git a/js/src/methodjit/TrampolineMasmX64.asm b/js/src/methodjit/TrampolineMasmX64.asm index 037ace7022c1..054f7a80a8b9 100644 --- a/js/src/methodjit/TrampolineMasmX64.asm +++ b/js/src/methodjit/TrampolineMasmX64.asm @@ -43,7 +43,7 @@ extern PopActiveVMFrame:PROC .CODE -; JSBool JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code, +; JSBool JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code, ; Value *stackLimit, void *safePoint); JaegerTrampoline PROC FRAME push rbp diff --git a/js/src/methodjit/TrampolineMingwX64.s b/js/src/methodjit/TrampolineMingwX64.s index 056aab9c1e3a..95a47ac45db3 100644 --- a/js/src/methodjit/TrampolineMingwX64.s +++ b/js/src/methodjit/TrampolineMingwX64.s @@ -44,7 +44,7 @@ .text .intel_syntax noprefix -# JSBool JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code, +# JSBool JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code, # Value *stackLimit, void *safePoint)# .globl JaegerTrampoline .def JaegerTrampoline diff --git a/js/src/methodjit/TrampolineSUNWX64.s b/js/src/methodjit/TrampolineSUNWX64.s index 114ea466dcac..ae415d30be92 100644 --- a/js/src/methodjit/TrampolineSUNWX64.s +++ b/js/src/methodjit/TrampolineSUNWX64.s @@ -37,8 +37,8 @@ .text -/ JSBool JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code, -/ JSFrameRegs *regs, uintptr_t inlineCallCount) +/ JSBool JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code, +/ FrameRegs *regs, uintptr_t inlineCallCount) .global JaegerTrampoline .type JaegerTrampoline, @function JaegerTrampoline: diff --git a/js/src/methodjit/TrampolineSUNWX86.s b/js/src/methodjit/TrampolineSUNWX86.s index e3920f54e5e8..292046f18395 100644 --- a/js/src/methodjit/TrampolineSUNWX86.s +++ b/js/src/methodjit/TrampolineSUNWX86.s @@ -37,8 +37,8 @@ .text -/ JSBool JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code, -/ JSFrameRegs *regs, uintptr_t inlineCallCount) +/ JSBool JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code, +/ FrameRegs *regs, uintptr_t inlineCallCount) .global JaegerTrampoline .type JaegerTrampoline, @function JaegerTrampoline: diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index 6bfffbb8b8e7..1cb4d14379bd 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -1800,7 +1800,7 @@ GetTrapArgs(JSContext *cx, uintN argc, jsval *argv, JSScript **scriptp, uintN intarg; JSScript *script; - *scriptp = JS_GetScriptedCaller(cx, NULL)->script(); + *scriptp = JS_GetFrameScript(cx, JS_GetScriptedCaller(cx, NULL)); *ip = 0; if (argc != 0) { v = argv[0]; @@ -1823,11 +1823,12 @@ GetTrapArgs(JSContext *cx, uintN argc, jsval *argv, JSScript **scriptp, } static JSTrapStatus -TrapHandler(JSContext *cx, JSScript *script, jsbytecode *pc, jsval *rval, +TrapHandler(JSContext *cx, JSScript *, jsbytecode *pc, jsval *rval, jsval closure) { JSString *str = JSVAL_TO_STRING(closure); JSStackFrame *caller = JS_GetScriptedCaller(cx, NULL); + JSScript *script = JS_GetFrameScript(cx, caller); size_t length; const jschar *chars = JS_GetStringCharsAndLength(cx, str, &length); @@ -1835,8 +1836,8 @@ TrapHandler(JSContext *cx, JSScript *script, jsbytecode *pc, jsval *rval, return JSTRAP_ERROR; if (!JS_EvaluateUCInStackFrame(cx, caller, chars, length, - caller->script()->filename, - caller->script()->lineno, + script->filename, + script->lineno, rval)) { return JSTRAP_ERROR; } @@ -1938,7 +1939,7 @@ LineToPC(JSContext *cx, uintN argc, jsval *vp) JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, JSSMSG_LINE2PC_USAGE); return JS_FALSE; } - script = JS_GetScriptedCaller(cx, NULL)->script(); + script = JS_GetFrameScript(cx, JS_GetScriptedCaller(cx, NULL)); if (!GetTrapArgs(cx, argc, JS_ARGV(cx, vp), &script, &i)) return JS_FALSE; lineno = (i == 0) ? script->lineno : (uintN)i; @@ -3218,7 +3219,8 @@ ResolveClass(JSContext *cx, JSObject *obj, jsid id, JSBool *resolved) if (!*resolved) { if (JSID_IS_ATOM(id, CLASS_ATOM(cx, Reflect))) { - if (!js_InitReflectClass(cx, obj)) + if (!IsStandardClassResolved(obj, &js_ReflectClass) && + !js_InitReflectClass(cx, obj)) return JS_FALSE; *resolved = JS_TRUE; } @@ -3573,6 +3575,8 @@ EvalInContext(JSContext *cx, uintN argc, jsval *vp) return true; JSStackFrame *fp = JS_GetScriptedCaller(cx, NULL); + JSScript *script = JS_GetFrameScript(cx, fp); + jsbytecode *pc = JS_GetFramePC(cx, fp); { JSAutoEnterCompartment ac; uintN flags; @@ -3591,8 +3595,8 @@ EvalInContext(JSContext *cx, uintN argc, jsval *vp) return false; } if (!JS_EvaluateUCScript(cx, sobj, src, srclen, - fp->script()->filename, - JS_PCToLineNumber(cx, fp->script(), fp->pc(cx)), + script->filename, + JS_PCToLineNumber(cx, script, pc), vp)) { return false; } @@ -3618,7 +3622,7 @@ EvalInFrame(JSContext *cx, uintN argc, jsval *vp) ? !!(JSVAL_TO_BOOLEAN(argv[2])) : false; - JS_ASSERT(cx->hasfp()); + JS_ASSERT(cx->running()); FrameRegsIter fi(cx); for (uint32 i = 0; i < upCount; ++i, ++fi) { @@ -3626,8 +3630,8 @@ EvalInFrame(JSContext *cx, uintN argc, jsval *vp) break; } - JSStackFrame *const fp = fi.fp(); - if (!JS_IsScriptFrame(cx, fp)) { + StackFrame *const fp = fi.fp(); + if (!fp->isScriptFrame()) { JS_ReportError(cx, "cannot eval in non-script frame"); return JS_FALSE; } @@ -3641,7 +3645,7 @@ EvalInFrame(JSContext *cx, uintN argc, jsval *vp) if (!chars) return JS_FALSE; - JSBool ok = JS_EvaluateUCInStackFrame(cx, fp, chars, length, + JSBool ok = JS_EvaluateUCInStackFrame(cx, Jsvalify(fp), chars, length, fp->script()->filename, JS_PCToLineNumber(cx, fp->script(), fi.pc()), @@ -4463,7 +4467,6 @@ Snarf(JSContext *cx, uintN argc, jsval *vp) { JSString *str; const char *pathname; - JSStackFrame *fp; if (!argc) return JS_FALSE; @@ -4476,10 +4479,11 @@ Snarf(JSContext *cx, uintN argc, jsval *vp) return JS_FALSE; /* Get the currently executing script's name. */ - fp = JS_GetScriptedCaller(cx, NULL); - JS_ASSERT(fp && fp->script()->filename); + JSStackFrame *fp = JS_GetScriptedCaller(cx, NULL); + JSScript *script = JS_GetFrameScript(cx, fp); + JS_ASSERT(fp && script->filename); #ifdef XP_UNIX - pathname = MakeAbsolutePathname(cx, fp->script()->filename, filename.ptr()); + pathname = MakeAbsolutePathname(cx, script->filename, filename.ptr()); if (!pathname) return JS_FALSE; #else diff --git a/js/src/tests/ecma_5/JSON/jstests.list b/js/src/tests/ecma_5/JSON/jstests.list index 9ac0dd285892..2470c43f6fb4 100644 --- a/js/src/tests/ecma_5/JSON/jstests.list +++ b/js/src/tests/ecma_5/JSON/jstests.list @@ -14,9 +14,15 @@ script stringify-call-toJSON-once.js script stringify-dropping-elements.js script stringify-gap.js script stringify-ignore-noncallable-toJSON.js +script stringify-missing-arguments.js script stringify-primitives.js script stringify-replacer.js +script stringify-replacer-array-duplicated-element.js +script stringify-replacer-array-edgecase-jsid-elements.js +script stringify-replacer-array-hijinks.js +script stringify-replacer-array-skipped-element.js script stringify-replacer-with-array-indexes.js +script stringify-special-escapes.js script stringify-toJSON-arguments.js script trailing-comma.js diff --git a/js/src/tests/ecma_5/JSON/stringify-gap.js b/js/src/tests/ecma_5/JSON/stringify-gap.js index b8eb50f194fd..8480b4b3eeaa 100644 --- a/js/src/tests/ecma_5/JSON/stringify-gap.js +++ b/js/src/tests/ecma_5/JSON/stringify-gap.js @@ -43,6 +43,15 @@ var expected = assertEq(JSON.stringify(obj, null, 3), expected); assertEq(JSON.stringify(obj, null, " "), expected); +obj = [1, 2, 3]; + +String.prototype.toString = function() { return "--"; }; + +assertEq(JSON.stringify(obj, null, new String(" ")), "[\n--1,\n--2,\n--3\n]"); + +Number.prototype.valueOf = function() { return 0; }; + +assertEq(JSON.stringify(obj, null, new Number(3)), "[1,2,3]"); /******************************************************************************/ diff --git a/js/src/tests/ecma_5/JSON/stringify-missing-arguments.js b/js/src/tests/ecma_5/JSON/stringify-missing-arguments.js new file mode 100644 index 000000000000..18ca60835763 --- /dev/null +++ b/js/src/tests/ecma_5/JSON/stringify-missing-arguments.js @@ -0,0 +1,22 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/licenses/publicdomain/ + +var gTestfile = 'stringify-missing-arguments.js'; +//----------------------------------------------------------------------------- +var BUGNUMBER = 648471; +var summary = "JSON.stringify with no arguments"; + +print(BUGNUMBER + ": " + summary); + +/************** + * BEGIN TEST * + **************/ + +assertEq(JSON.stringify(), undefined); + +/******************************************************************************/ + +if (typeof reportCompare === "function") + reportCompare(true, true); + +print("All tests passed!"); diff --git a/js/src/tests/ecma_5/JSON/stringify-replacer-array-duplicated-element.js b/js/src/tests/ecma_5/JSON/stringify-replacer-array-duplicated-element.js new file mode 100644 index 000000000000..69192e6caecb --- /dev/null +++ b/js/src/tests/ecma_5/JSON/stringify-replacer-array-duplicated-element.js @@ -0,0 +1,69 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/licenses/publicdomain/ + +var gTestfile = 'stringify-replacer-array-hijinks.js'; +//----------------------------------------------------------------------------- +var BUGNUMBER = 648471; +var summary = + "Better/more correct handling for replacer arrays with getter array index " + + "properties"; + +print(BUGNUMBER + ": " + summary); + +/************** + * BEGIN TEST * + **************/ + +var bigOdd = Math.pow(2, 50) + 1; + +function two() +{ + return Math.random() < 0.5 ? 2 : "2"; +} + +assertEq(JSON.stringify({ 1: 1 }, [1, 1]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [1, "1"]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [1, bigOdd % two()]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, ["1", 1]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, ["1", "1"]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, ["1", bigOdd % two()]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [bigOdd % two(), 1]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [bigOdd % two(), "1"]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [bigOdd % two(), bigOdd % two()]), '{"1":1}'); + + +assertEq(JSON.stringify({ 1: 1 }, [1, new String(1)]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [1, new Number(1)]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, ["1", new Number(1)]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, ["1", new String(1)]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [bigOdd % two(), new Number(1)]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [bigOdd % two(), new String(1)]), '{"1":1}'); + + +assertEq(JSON.stringify({ 1: 1 }, [new String(1), new String(1)]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [new String(1), new Number(1)]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [new Number(1), new String(1)]), '{"1":1}'); + +assertEq(JSON.stringify({ 1: 1 }, [new Number(1), new Number(1)]), '{"1":1}'); + +/******************************************************************************/ + +if (typeof reportCompare === "function") + reportCompare(true, true); + +print("Tests complete"); diff --git a/js/src/tests/ecma_5/JSON/stringify-replacer-array-edgecase-jsid-elements.js b/js/src/tests/ecma_5/JSON/stringify-replacer-array-edgecase-jsid-elements.js new file mode 100644 index 000000000000..e7be4c6e1120 --- /dev/null +++ b/js/src/tests/ecma_5/JSON/stringify-replacer-array-edgecase-jsid-elements.js @@ -0,0 +1,77 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/licenses/publicdomain/ + +var gTestfile = 'stringify-replacer-array-edgecase-jsid-elements.js'; +//----------------------------------------------------------------------------- +var BUGNUMBER = 648471; +var summary = + "Better/more correct handling for replacer arrays with getter array index " + + "properties"; + +print(BUGNUMBER + ": " + summary); + +/************** + * BEGIN TEST * + **************/ + +/* JSID_INT_MIN/MAX copied from jsapi.h. */ + +var obj = + { + /* [JSID_INT_MIN - 1, JSID_INT_MIN + 1] */ + "-1073741825": -1073741825, + "-1073741824": -1073741824, + "-1073741823": -1073741823, + + "-2.5": -2.5, + "-1": -1, + + 0: 0, + + 1: 1, + 2.5: 2.5, + + /* [JSID_INT_MAX - 1, JSID_INT_MAX + 1] */ + 1073741822: 1073741822, + 1073741823: 1073741823, + 1073741824: 1073741824, + }; + +for (var s in obj) +{ + var n = obj[s]; + assertEq(+s, n); + assertEq(JSON.stringify(obj, [n]), + '{"' + s + '":' + n + '}', + "Failed to stringify numeric property " + n + "correctly"); + assertEq(JSON.stringify(obj, [s]), + '{"' + s + '":' + n + '}', + "Failed to stringify string property " + n + "correctly"); + assertEq(JSON.stringify(obj, [s, ]), + '{"' + s + '":' + n + '}', + "Failed to stringify string then number properties ('" + s + "', " + n + ") correctly"); + assertEq(JSON.stringify(obj, [n, s]), + '{"' + s + '":' + n + '}', + "Failed to stringify number then string properties (" + n + ", '" + s + "') correctly"); +} + +// -0 is tricky, because ToString(-0) === "0", so test it specially. +assertEq(JSON.stringify({ "-0": 17, 0: 42 }, [-0]), + '{"0":42}', + "Failed to stringify numeric property -0 correctly"); +assertEq(JSON.stringify({ "-0": 17, 0: 42 }, ["-0"]), + '{"-0":17}', + "Failed to stringify string property -0 correctly"); +assertEq(JSON.stringify({ "-0": 17, 0: 42 }, ["-0", -0]), + '{"-0":17,"0":42}', + "Failed to stringify string then number properties ('-0', -0) correctly"); +assertEq(JSON.stringify({ "-0": 17, 0: 42 }, [-0, "-0"]), + '{"0":42,"-0":17}', + "Failed to stringify number then string properties (-0, '-0) correctly"); + +/******************************************************************************/ + +if (typeof reportCompare === "function") + reportCompare(true, true); + +print("Tests complete"); diff --git a/js/src/tests/ecma_5/JSON/stringify-replacer-array-hijinks.js b/js/src/tests/ecma_5/JSON/stringify-replacer-array-hijinks.js new file mode 100644 index 000000000000..60c949a5f1b9 --- /dev/null +++ b/js/src/tests/ecma_5/JSON/stringify-replacer-array-hijinks.js @@ -0,0 +1,59 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/licenses/publicdomain/ + +var gTestfile = 'stringify-replacer-array-hijinks.js'; +//----------------------------------------------------------------------------- +var BUGNUMBER = 648471; +var summary = + "Better/more correct handling for replacer arrays with getter array index " + + "properties"; + +print(BUGNUMBER + ": " + summary); + +/************** + * BEGIN TEST * + **************/ + +var replacer = [0, 1, 2, 3]; +Object.prototype[3] = 3; +Object.defineProperty(replacer, 1, { + get: function() + { + Object.defineProperty(replacer, 4, { value: 4 }); + delete replacer[2]; + delete replacer[3]; + replacer[5] = 5; + return 1; + } +}); + +var s = + JSON.stringify({0: { 1: { 3: { 4: { 5: { 2: "omitted" } } } } } }, replacer); + +// The replacer array's length is as seen on first query, so property names are +// accumulated for indexes i ∈ {0, 1, 2, 3}, but index 1 deletes 2 and 3, so 2 +// isn't seen but 3 is seen as Object.prototype[3]. +assertEq('{"0":{"1":{"3":{"3":3}},"3":3},"3":3}', s); + + +var replacer = [0, 1, 2, 3]; +Object.defineProperty(replacer, 0, { + get: function() + { + replacer.length = 0; + return {}; + } +}); + +// The replacer.length truncation means only properties on the prototype chain +// shine through, but it doesn't affect the original bounds of the iteration +// used to determine property names which will be included in the final string. +assertEq(JSON.stringify({ 0: 0, 1: 1, 2: 2, 3: 3 }, replacer), + '{"3":3}'); + +/******************************************************************************/ + +if (typeof reportCompare === "function") + reportCompare(true, true); + +print("Tests complete"); diff --git a/js/src/tests/ecma_5/JSON/stringify-replacer-array-skipped-element.js b/js/src/tests/ecma_5/JSON/stringify-replacer-array-skipped-element.js new file mode 100644 index 000000000000..6297c3a5bf46 --- /dev/null +++ b/js/src/tests/ecma_5/JSON/stringify-replacer-array-skipped-element.js @@ -0,0 +1,62 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/licenses/publicdomain/ + +var gTestfile = 'stringify-replacer-array-skipped-element.js'; +//----------------------------------------------------------------------------- +var BUGNUMBER = 648471; +var summary = + "Better/more correct handling for replacer arrays with getter array index " + + "properties"; + +print(BUGNUMBER + ": " + summary); + +/************** + * BEGIN TEST * + **************/ + +assertEq(JSON.stringify({ 3: 3, 4: 4 }, + ["3", { toString: function() { return "4" } }]), + '{"3":3}'); + +assertEq(JSON.stringify({ 3: 3, true: 4 }, ["3", true]), + '{"3":3}'); + +assertEq(JSON.stringify({ 3: 3, true: 4 }, ["3", "true", true]), + '{"3":3,"true":4}'); + +assertEq(JSON.stringify({ 3: 3, true: 4 }, ["3", true, "true"]), + '{"3":3,"true":4}'); + +assertEq(JSON.stringify({ 3: 3, false: 4 }, ["3", false]), + '{"3":3}'); + +assertEq(JSON.stringify({ 3: 3, false: 4 }, ["3", "false", false]), + '{"3":3,"false":4}'); + +assertEq(JSON.stringify({ 3: 3, false: 4 }, ["3", false, "false"]), + '{"3":3,"false":4}'); + +assertEq(JSON.stringify({ 3: 3, undefined: 4 }, ["3", undefined]), + '{"3":3}'); + +assertEq(JSON.stringify({ 3: 3, undefined: 4 }, ["3", "undefined", undefined]), + '{"3":3,"undefined":4}'); + +assertEq(JSON.stringify({ 3: 3, undefined: 4 }, ["3", undefined, "undefined"]), + '{"3":3,"undefined":4}'); + +assertEq(JSON.stringify({ 3: 3, null: 4 }, ["3", null]), + '{"3":3}'); + +assertEq(JSON.stringify({ 3: 3, null: 4 }, ["3", "null", null]), + '{"3":3,"null":4}'); + +assertEq(JSON.stringify({ 3: 3, null: 4 }, ["3", null, "null"]), + '{"3":3,"null":4}'); + +/******************************************************************************/ + +if (typeof reportCompare === "function") + reportCompare(true, true); + +print("Tests complete"); diff --git a/js/src/tests/ecma_5/JSON/stringify-special-escapes.js b/js/src/tests/ecma_5/JSON/stringify-special-escapes.js new file mode 100644 index 000000000000..cf3d8c28e0bc --- /dev/null +++ b/js/src/tests/ecma_5/JSON/stringify-special-escapes.js @@ -0,0 +1,227 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/licenses/publicdomain/ + +var gTestfile = 'stringify-special-escapes.js'; +//----------------------------------------------------------------------------- +var BUGNUMBER = 512266; +var summary = + "JSON.stringify of \\b\\f\\n\\r\\t should use one-character escapes, not hex"; + +print(BUGNUMBER + ": " + summary); + +/************** + * BEGIN TEST * + **************/ + +assertEq(JSON.stringify("\u0000"), '"\\u0000"'); +assertEq(JSON.stringify("\u0001"), '"\\u0001"'); +assertEq(JSON.stringify("\u0002"), '"\\u0002"'); +assertEq(JSON.stringify("\u0003"), '"\\u0003"'); +assertEq(JSON.stringify("\u0004"), '"\\u0004"'); +assertEq(JSON.stringify("\u0005"), '"\\u0005"'); +assertEq(JSON.stringify("\u0006"), '"\\u0006"'); +assertEq(JSON.stringify("\u0007"), '"\\u0007"'); +assertEq(JSON.stringify("\u0008"), '"\\b"'); +assertEq(JSON.stringify("\u0009"), '"\\t"'); +assertEq(JSON.stringify("\u000A"), '"\\n"'); +assertEq(JSON.stringify("\u000B"), '"\\u000b"'); +assertEq(JSON.stringify("\u000C"), '"\\f"'); +assertEq(JSON.stringify("\u000D"), '"\\r"'); +assertEq(JSON.stringify("\u000E"), '"\\u000e"'); +assertEq(JSON.stringify("\u000F"), '"\\u000f"'); +assertEq(JSON.stringify("\u0010"), '"\\u0010"'); +assertEq(JSON.stringify("\u0011"), '"\\u0011"'); +assertEq(JSON.stringify("\u0012"), '"\\u0012"'); +assertEq(JSON.stringify("\u0013"), '"\\u0013"'); +assertEq(JSON.stringify("\u0014"), '"\\u0014"'); +assertEq(JSON.stringify("\u0015"), '"\\u0015"'); +assertEq(JSON.stringify("\u0016"), '"\\u0016"'); +assertEq(JSON.stringify("\u0017"), '"\\u0017"'); +assertEq(JSON.stringify("\u0018"), '"\\u0018"'); +assertEq(JSON.stringify("\u0019"), '"\\u0019"'); +assertEq(JSON.stringify("\u001A"), '"\\u001a"'); +assertEq(JSON.stringify("\u001B"), '"\\u001b"'); +assertEq(JSON.stringify("\u001C"), '"\\u001c"'); +assertEq(JSON.stringify("\u001D"), '"\\u001d"'); +assertEq(JSON.stringify("\u001E"), '"\\u001e"'); +assertEq(JSON.stringify("\u001F"), '"\\u001f"'); +assertEq(JSON.stringify("\u0020"), '" "'); + +assertEq(JSON.stringify("\\u0000"), '"\\\\u0000"'); +assertEq(JSON.stringify("\\u0001"), '"\\\\u0001"'); +assertEq(JSON.stringify("\\u0002"), '"\\\\u0002"'); +assertEq(JSON.stringify("\\u0003"), '"\\\\u0003"'); +assertEq(JSON.stringify("\\u0004"), '"\\\\u0004"'); +assertEq(JSON.stringify("\\u0005"), '"\\\\u0005"'); +assertEq(JSON.stringify("\\u0006"), '"\\\\u0006"'); +assertEq(JSON.stringify("\\u0007"), '"\\\\u0007"'); +assertEq(JSON.stringify("\\u0008"), '"\\\\u0008"'); +assertEq(JSON.stringify("\\u0009"), '"\\\\u0009"'); +assertEq(JSON.stringify("\\u000A"), '"\\\\u000A"'); +assertEq(JSON.stringify("\\u000B"), '"\\\\u000B"'); +assertEq(JSON.stringify("\\u000C"), '"\\\\u000C"'); +assertEq(JSON.stringify("\\u000D"), '"\\\\u000D"'); +assertEq(JSON.stringify("\\u000E"), '"\\\\u000E"'); +assertEq(JSON.stringify("\\u000F"), '"\\\\u000F"'); +assertEq(JSON.stringify("\\u0010"), '"\\\\u0010"'); +assertEq(JSON.stringify("\\u0011"), '"\\\\u0011"'); +assertEq(JSON.stringify("\\u0012"), '"\\\\u0012"'); +assertEq(JSON.stringify("\\u0013"), '"\\\\u0013"'); +assertEq(JSON.stringify("\\u0014"), '"\\\\u0014"'); +assertEq(JSON.stringify("\\u0015"), '"\\\\u0015"'); +assertEq(JSON.stringify("\\u0016"), '"\\\\u0016"'); +assertEq(JSON.stringify("\\u0017"), '"\\\\u0017"'); +assertEq(JSON.stringify("\\u0018"), '"\\\\u0018"'); +assertEq(JSON.stringify("\\u0019"), '"\\\\u0019"'); +assertEq(JSON.stringify("\\u001A"), '"\\\\u001A"'); +assertEq(JSON.stringify("\\u001B"), '"\\\\u001B"'); +assertEq(JSON.stringify("\\u001C"), '"\\\\u001C"'); +assertEq(JSON.stringify("\\u001D"), '"\\\\u001D"'); +assertEq(JSON.stringify("\\u001E"), '"\\\\u001E"'); +assertEq(JSON.stringify("\\u001F"), '"\\\\u001F"'); +assertEq(JSON.stringify("\\u0020"), '"\\\\u0020"'); + + +assertEq(JSON.stringify("a\u0000"), '"a\\u0000"'); +assertEq(JSON.stringify("a\u0001"), '"a\\u0001"'); +assertEq(JSON.stringify("a\u0002"), '"a\\u0002"'); +assertEq(JSON.stringify("a\u0003"), '"a\\u0003"'); +assertEq(JSON.stringify("a\u0004"), '"a\\u0004"'); +assertEq(JSON.stringify("a\u0005"), '"a\\u0005"'); +assertEq(JSON.stringify("a\u0006"), '"a\\u0006"'); +assertEq(JSON.stringify("a\u0007"), '"a\\u0007"'); +assertEq(JSON.stringify("a\u0008"), '"a\\b"'); +assertEq(JSON.stringify("a\u0009"), '"a\\t"'); +assertEq(JSON.stringify("a\u000A"), '"a\\n"'); +assertEq(JSON.stringify("a\u000B"), '"a\\u000b"'); +assertEq(JSON.stringify("a\u000C"), '"a\\f"'); +assertEq(JSON.stringify("a\u000D"), '"a\\r"'); +assertEq(JSON.stringify("a\u000E"), '"a\\u000e"'); +assertEq(JSON.stringify("a\u000F"), '"a\\u000f"'); +assertEq(JSON.stringify("a\u0010"), '"a\\u0010"'); +assertEq(JSON.stringify("a\u0011"), '"a\\u0011"'); +assertEq(JSON.stringify("a\u0012"), '"a\\u0012"'); +assertEq(JSON.stringify("a\u0013"), '"a\\u0013"'); +assertEq(JSON.stringify("a\u0014"), '"a\\u0014"'); +assertEq(JSON.stringify("a\u0015"), '"a\\u0015"'); +assertEq(JSON.stringify("a\u0016"), '"a\\u0016"'); +assertEq(JSON.stringify("a\u0017"), '"a\\u0017"'); +assertEq(JSON.stringify("a\u0018"), '"a\\u0018"'); +assertEq(JSON.stringify("a\u0019"), '"a\\u0019"'); +assertEq(JSON.stringify("a\u001A"), '"a\\u001a"'); +assertEq(JSON.stringify("a\u001B"), '"a\\u001b"'); +assertEq(JSON.stringify("a\u001C"), '"a\\u001c"'); +assertEq(JSON.stringify("a\u001D"), '"a\\u001d"'); +assertEq(JSON.stringify("a\u001E"), '"a\\u001e"'); +assertEq(JSON.stringify("a\u001F"), '"a\\u001f"'); +assertEq(JSON.stringify("a\u0020"), '"a "'); + +assertEq(JSON.stringify("a\\u0000"), '"a\\\\u0000"'); +assertEq(JSON.stringify("a\\u0001"), '"a\\\\u0001"'); +assertEq(JSON.stringify("a\\u0002"), '"a\\\\u0002"'); +assertEq(JSON.stringify("a\\u0003"), '"a\\\\u0003"'); +assertEq(JSON.stringify("a\\u0004"), '"a\\\\u0004"'); +assertEq(JSON.stringify("a\\u0005"), '"a\\\\u0005"'); +assertEq(JSON.stringify("a\\u0006"), '"a\\\\u0006"'); +assertEq(JSON.stringify("a\\u0007"), '"a\\\\u0007"'); +assertEq(JSON.stringify("a\\u0008"), '"a\\\\u0008"'); +assertEq(JSON.stringify("a\\u0009"), '"a\\\\u0009"'); +assertEq(JSON.stringify("a\\u000A"), '"a\\\\u000A"'); +assertEq(JSON.stringify("a\\u000B"), '"a\\\\u000B"'); +assertEq(JSON.stringify("a\\u000C"), '"a\\\\u000C"'); +assertEq(JSON.stringify("a\\u000D"), '"a\\\\u000D"'); +assertEq(JSON.stringify("a\\u000E"), '"a\\\\u000E"'); +assertEq(JSON.stringify("a\\u000F"), '"a\\\\u000F"'); +assertEq(JSON.stringify("a\\u0010"), '"a\\\\u0010"'); +assertEq(JSON.stringify("a\\u0011"), '"a\\\\u0011"'); +assertEq(JSON.stringify("a\\u0012"), '"a\\\\u0012"'); +assertEq(JSON.stringify("a\\u0013"), '"a\\\\u0013"'); +assertEq(JSON.stringify("a\\u0014"), '"a\\\\u0014"'); +assertEq(JSON.stringify("a\\u0015"), '"a\\\\u0015"'); +assertEq(JSON.stringify("a\\u0016"), '"a\\\\u0016"'); +assertEq(JSON.stringify("a\\u0017"), '"a\\\\u0017"'); +assertEq(JSON.stringify("a\\u0018"), '"a\\\\u0018"'); +assertEq(JSON.stringify("a\\u0019"), '"a\\\\u0019"'); +assertEq(JSON.stringify("a\\u001A"), '"a\\\\u001A"'); +assertEq(JSON.stringify("a\\u001B"), '"a\\\\u001B"'); +assertEq(JSON.stringify("a\\u001C"), '"a\\\\u001C"'); +assertEq(JSON.stringify("a\\u001D"), '"a\\\\u001D"'); +assertEq(JSON.stringify("a\\u001E"), '"a\\\\u001E"'); +assertEq(JSON.stringify("a\\u001F"), '"a\\\\u001F"'); +assertEq(JSON.stringify("a\\u0020"), '"a\\\\u0020"'); + + +assertEq(JSON.stringify("\u0000Q"), '"\\u0000Q"'); +assertEq(JSON.stringify("\u0001Q"), '"\\u0001Q"'); +assertEq(JSON.stringify("\u0002Q"), '"\\u0002Q"'); +assertEq(JSON.stringify("\u0003Q"), '"\\u0003Q"'); +assertEq(JSON.stringify("\u0004Q"), '"\\u0004Q"'); +assertEq(JSON.stringify("\u0005Q"), '"\\u0005Q"'); +assertEq(JSON.stringify("\u0006Q"), '"\\u0006Q"'); +assertEq(JSON.stringify("\u0007Q"), '"\\u0007Q"'); +assertEq(JSON.stringify("\u0008Q"), '"\\bQ"'); +assertEq(JSON.stringify("\u0009Q"), '"\\tQ"'); +assertEq(JSON.stringify("\u000AQ"), '"\\nQ"'); +assertEq(JSON.stringify("\u000BQ"), '"\\u000bQ"'); +assertEq(JSON.stringify("\u000CQ"), '"\\fQ"'); +assertEq(JSON.stringify("\u000DQ"), '"\\rQ"'); +assertEq(JSON.stringify("\u000EQ"), '"\\u000eQ"'); +assertEq(JSON.stringify("\u000FQ"), '"\\u000fQ"'); +assertEq(JSON.stringify("\u0010Q"), '"\\u0010Q"'); +assertEq(JSON.stringify("\u0011Q"), '"\\u0011Q"'); +assertEq(JSON.stringify("\u0012Q"), '"\\u0012Q"'); +assertEq(JSON.stringify("\u0013Q"), '"\\u0013Q"'); +assertEq(JSON.stringify("\u0014Q"), '"\\u0014Q"'); +assertEq(JSON.stringify("\u0015Q"), '"\\u0015Q"'); +assertEq(JSON.stringify("\u0016Q"), '"\\u0016Q"'); +assertEq(JSON.stringify("\u0017Q"), '"\\u0017Q"'); +assertEq(JSON.stringify("\u0018Q"), '"\\u0018Q"'); +assertEq(JSON.stringify("\u0019Q"), '"\\u0019Q"'); +assertEq(JSON.stringify("\u001AQ"), '"\\u001aQ"'); +assertEq(JSON.stringify("\u001BQ"), '"\\u001bQ"'); +assertEq(JSON.stringify("\u001CQ"), '"\\u001cQ"'); +assertEq(JSON.stringify("\u001DQ"), '"\\u001dQ"'); +assertEq(JSON.stringify("\u001EQ"), '"\\u001eQ"'); +assertEq(JSON.stringify("\u001FQ"), '"\\u001fQ"'); +assertEq(JSON.stringify("\u0020Q"), '" Q"'); + +assertEq(JSON.stringify("\\u0000Q"), '"\\\\u0000Q"'); +assertEq(JSON.stringify("\\u0001Q"), '"\\\\u0001Q"'); +assertEq(JSON.stringify("\\u0002Q"), '"\\\\u0002Q"'); +assertEq(JSON.stringify("\\u0003Q"), '"\\\\u0003Q"'); +assertEq(JSON.stringify("\\u0004Q"), '"\\\\u0004Q"'); +assertEq(JSON.stringify("\\u0005Q"), '"\\\\u0005Q"'); +assertEq(JSON.stringify("\\u0006Q"), '"\\\\u0006Q"'); +assertEq(JSON.stringify("\\u0007Q"), '"\\\\u0007Q"'); +assertEq(JSON.stringify("\\u0008Q"), '"\\\\u0008Q"'); +assertEq(JSON.stringify("\\u0009Q"), '"\\\\u0009Q"'); +assertEq(JSON.stringify("\\u000AQ"), '"\\\\u000AQ"'); +assertEq(JSON.stringify("\\u000BQ"), '"\\\\u000BQ"'); +assertEq(JSON.stringify("\\u000CQ"), '"\\\\u000CQ"'); +assertEq(JSON.stringify("\\u000DQ"), '"\\\\u000DQ"'); +assertEq(JSON.stringify("\\u000EQ"), '"\\\\u000EQ"'); +assertEq(JSON.stringify("\\u000FQ"), '"\\\\u000FQ"'); +assertEq(JSON.stringify("\\u0010Q"), '"\\\\u0010Q"'); +assertEq(JSON.stringify("\\u0011Q"), '"\\\\u0011Q"'); +assertEq(JSON.stringify("\\u0012Q"), '"\\\\u0012Q"'); +assertEq(JSON.stringify("\\u0013Q"), '"\\\\u0013Q"'); +assertEq(JSON.stringify("\\u0014Q"), '"\\\\u0014Q"'); +assertEq(JSON.stringify("\\u0015Q"), '"\\\\u0015Q"'); +assertEq(JSON.stringify("\\u0016Q"), '"\\\\u0016Q"'); +assertEq(JSON.stringify("\\u0017Q"), '"\\\\u0017Q"'); +assertEq(JSON.stringify("\\u0018Q"), '"\\\\u0018Q"'); +assertEq(JSON.stringify("\\u0019Q"), '"\\\\u0019Q"'); +assertEq(JSON.stringify("\\u001AQ"), '"\\\\u001AQ"'); +assertEq(JSON.stringify("\\u001BQ"), '"\\\\u001BQ"'); +assertEq(JSON.stringify("\\u001CQ"), '"\\\\u001CQ"'); +assertEq(JSON.stringify("\\u001DQ"), '"\\\\u001DQ"'); +assertEq(JSON.stringify("\\u001EQ"), '"\\\\u001EQ"'); +assertEq(JSON.stringify("\\u001FQ"), '"\\\\u001FQ"'); +assertEq(JSON.stringify("\\u0020Q"), '"\\\\u0020Q"'); + +/******************************************************************************/ + +if (typeof reportCompare === "function") + reportCompare(true, true); + +print("Tests complete"); diff --git a/js/src/tests/ecma_5/Number/jstests.list b/js/src/tests/ecma_5/Number/jstests.list index b08bee3790ee..1542e21f00d6 100644 --- a/js/src/tests/ecma_5/Number/jstests.list +++ b/js/src/tests/ecma_5/Number/jstests.list @@ -1,2 +1,3 @@ url-prefix ../../jsreftest.html?test=ecma_5/Number/ script 15.7.4.2.js +script toString-radix-handling.js diff --git a/js/src/tests/ecma_5/Number/toString-radix-handling.js b/js/src/tests/ecma_5/Number/toString-radix-handling.js new file mode 100644 index 000000000000..dd91675a27ca --- /dev/null +++ b/js/src/tests/ecma_5/Number/toString-radix-handling.js @@ -0,0 +1,37 @@ +/* + * Any copyright is dedicated to the Public Domain. + * http://creativecommonn.org/licenses/publicdomain/ + */ + +var BUGNUMBER = 647385; +var summary = + "Number.prototype.toString should use ToInteger on the radix and should " + + "throw a RangeError if the radix is bad"; + +print(BUGNUMBER + ": " + summary); + +/************** + * BEGIN TEST * + **************/ + +function test(r) +{ + try + { + 5..toString(r); + throw "should have thrown"; + } + catch (e) + { + assertEq(e instanceof RangeError, true, "expected a RangeError, got " + e); + } +} +test(Math.pow(2, 32) + 10); +test(55); + +/******************************************************************************/ + +if (typeof reportCompare === "function") + reportCompare(true, true); + +print("All tests passed!"); diff --git a/js/src/tests/ecma_5/extensions/array-toString-recursion.js b/js/src/tests/ecma_5/extensions/array-toString-recursion.js new file mode 100644 index 000000000000..aa5d856c3bf4 --- /dev/null +++ b/js/src/tests/ecma_5/extensions/array-toString-recursion.js @@ -0,0 +1,46 @@ +/* + * Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/licenses/publicdomain/ + */ + +//----------------------------------------------------------------------------- +var BUGNUMBER = 635389; +var summary = 'Infinite recursion via [].{toString,toLocaleString,join}'; + +print(BUGNUMBER + ": " + summary); + +/************** + * BEGIN TEST * + **************/ + +try +{ + var x = []; + x.join = Array.prototype.toString; + "" + x; + throw new Error("should have thrown"); +} +catch (e) +{ + assertEq(e instanceof InternalError, true, + "should have thrown for over-recursion"); +} + +try +{ + var x = { toString: Array.prototype.toString, join: Array.prototype.toString }; + "" + x; + throw new Error("should have thrown"); +} +catch (e) +{ + assertEq(e instanceof InternalError, true, + "should have thrown for over-recursion"); +} + +/******************************************************************************/ + +if (typeof reportCompare === "function") + reportCompare(true, true); + +print("All tests passed!"); diff --git a/js/src/tests/ecma_5/extensions/jstests.list b/js/src/tests/ecma_5/extensions/jstests.list index 3132c8e908e4..bcd7fa0185b4 100644 --- a/js/src/tests/ecma_5/extensions/jstests.list +++ b/js/src/tests/ecma_5/extensions/jstests.list @@ -9,6 +9,7 @@ script bug352085.js script bug472534.js script bug496985.js script bug566661.js +script array-toString-recursion.js skip-if(!xulRuntime.shell) script cross-global-eval-is-indirect.js # needs newGlobal() script eval-native-callback-is-indirect.js script extension-methods-reject-null-undefined-this.js @@ -23,6 +24,7 @@ script regress-bug629723.js script strict-function-statements.js script strict-option-redeclared-parameter.js script string-literal-getter-setter-decompilation.js +script toSource-infinite-recursion.js script uneval-strict-functions.js script watch-array-length.js script watch-inherited-property.js diff --git a/js/src/tests/ecma_5/extensions/toSource-infinite-recursion.js b/js/src/tests/ecma_5/extensions/toSource-infinite-recursion.js new file mode 100644 index 000000000000..2ddcaa7529e4 --- /dev/null +++ b/js/src/tests/ecma_5/extensions/toSource-infinite-recursion.js @@ -0,0 +1,34 @@ +/* + * Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/licenses/publicdomain/ + */ + +//----------------------------------------------------------------------------- +var BUGNUMBER = 650574; +var summary = 'Check for too-deep stack when converting a value to source'; + +print(BUGNUMBER + ": " + summary); + +/************** + * BEGIN TEST * + **************/ + +try +{ + var e = Error(''); + e.fileName = e; + e.toSource(); + throw new Error("should have thrown"); +} +catch (e) +{ + assertEq(e instanceof InternalError, true, + "should have thrown for over-recursion"); +} + +/******************************************************************************/ + +if (typeof reportCompare === "function") + reportCompare(true, true); + +print("All tests passed!"); diff --git a/js/src/tests/js1_5/extensions/jstests.list b/js/src/tests/js1_5/extensions/jstests.list index 3816c033c536..1953e18cf6cd 100644 --- a/js/src/tests/js1_5/extensions/jstests.list +++ b/js/src/tests/js1_5/extensions/jstests.list @@ -223,3 +223,4 @@ script scope-001.js fails-if(Android) script toLocaleFormat-01.js fails-if(xulRuntime.OS=="WINNT") script toLocaleFormat-02.js script regress-543839.js +script regress-564577.js diff --git a/js/src/tests/js1_5/extensions/regress-564577.js b/js/src/tests/js1_5/extensions/regress-564577.js new file mode 100644 index 000000000000..64edacdfec60 --- /dev/null +++ b/js/src/tests/js1_5/extensions/regress-564577.js @@ -0,0 +1,79 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* + * Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/licenses/publicdomain/ + * Contributor: Matthew Draper + */ + +var gTestfile = 'regress-564577.js'; +//----------------------------------------------------------------------------- +var BUGNUMBER = 564577; +var summary = '__noSuchMethod__ when property exists'; +var actual = ''; +var expect = ''; + + +//----------------------------------------------------------------------------- +test(); +//----------------------------------------------------------------------------- + +function test() +{ + enterFunc ('test'); + printBugNumber(BUGNUMBER); + printStatus (summary); + + + var o = { + aaa: undefined, + bbb: null, + ccc: 77, + ddd: 'foo', + eee: {}, + fff: /./, + __noSuchMethod__: function (id, args) + { + return(id + '('+args.join(',')+') ' + this[id]); + } + }; + + status = summary + ' ' + inSection(1) + ' '; + actual = o.aaa(); + expect = 'aaa() undefined'; + reportCompare(expect, actual, status); + + status = summary + ' ' + inSection(2) + ' '; + actual = o.bbb(); + expect = 'bbb() null'; + reportCompare(expect, actual, status); + + status = summary + ' ' + inSection(3) + ' '; + actual = o.ccc(); + expect = 'ccc() 77'; + reportCompare(expect, actual, status); + + status = summary + ' ' + inSection(4) + ' '; + actual = o.ddd(); + expect = 'ddd() foo'; + reportCompare(expect, actual, status); + + status = summary + ' ' + inSection(5) + ' '; + try { + actual = o.eee(); + } catch(e) { + actual = e + ''; + } + expect = 'TypeError: o.eee is not a function'; + reportCompare(expect, actual, status); + + status = summary + ' ' + inSection(6) + ' '; + try { + actual = o.fff('xyz') + ''; + } catch(e) { + actual = e + ''; + } + expect = 'TypeError: o.fff is not a function'; + reportCompare(expect, actual, status); + + exitFunc('test'); +} diff --git a/js/src/tests/js1_8_5/regress/jstests.list b/js/src/tests/js1_8_5/regress/jstests.list index a4903f92b383..13cf253a7899 100644 --- a/js/src/tests/js1_8_5/regress/jstests.list +++ b/js/src/tests/js1_8_5/regress/jstests.list @@ -90,7 +90,7 @@ script regress-624199.js script regress-624547.js script regress-624968.js script regress-626436.js -fails-if(xulRuntime.shell) script regress-633741.js +script regress-633741.js script regress-634210-1.js script regress-634210-2.js script regress-634210-3.js diff --git a/js/src/tracejit/Writer.cpp b/js/src/tracejit/Writer.cpp index cbd9d27c35e3..c91797d95a20 100644 --- a/js/src/tracejit/Writer.cpp +++ b/js/src/tracejit/Writer.cpp @@ -370,17 +370,17 @@ void ValidateWriter::checkAccSet(LOpcode op, LIns *base, int32_t disp, AccSet ac case ACCSET_FRAMEREGS: // base = ldp.cx ...[offsetof(JSContext, regs)] - // ins = ldp.regs base[] + // ins = ldp.regs base[] ok = op == LIR_ldp && - dispWithin(JSFrameRegs) && - match(base, LIR_ldp, ACCSET_CX, offsetof(JSContext, regs)); + dispWithin(FrameRegs) && + match(base, LIR_ldp, ACCSET_CX, offsetof(JSContext, stack) + ContextStack::offsetOfRegs()); break; case ACCSET_STACKFRAME: - // base = ldp.regs ...[offsetof(JSFrameRegs, fp)] - // ins = {ld,st}X.sf base[] - ok = dispWithin(JSStackFrame) && - match(base, LIR_ldp, ACCSET_FRAMEREGS, offsetof(JSFrameRegs, fp)); + // base = ldp.regs ...[offsetof(FrameRegs, fp)] + // ins = {ld,st}X.sf base[] + ok = dispWithin(StackFrame) && + match(base, LIR_ldp, ACCSET_FRAMEREGS, FrameRegs::offsetOfFp); break; case ACCSET_RUNTIME: diff --git a/js/src/tracejit/Writer.h b/js/src/tracejit/Writer.h index ed35ef6debb1..d9531d025a9f 100644 --- a/js/src/tracejit/Writer.h +++ b/js/src/tracejit/Writer.h @@ -106,8 +106,8 @@ enum LC_TMBits { * - ACCSET_EOS: The globals area. * - ACCSET_ALLOC: All memory blocks allocated with LIR_allocp (in * other words, this region is the AR space). - * - ACCSET_FRAMEREGS: All JSFrameRegs structs. - * - ACCSET_STACKFRAME: All JSStackFrame objects. + * - ACCSET_FRAMEREGS: All FrameRegs structs. + * - ACCSET_STACKFRAME: All StackFrame objects. * - ACCSET_RUNTIME: The JSRuntime object. * - ACCSET_OBJ_CLASP: The 'clasp' field of all JSObjects. * - ACCSET_OBJ_FLAGS: The 'flags' field of all JSObjects. @@ -427,6 +427,10 @@ class Writer #define ldpConstContextField(fieldname) \ name(w.ldpContextFieldHelper(cx_ins, offsetof(JSContext, fieldname), LOAD_CONST), \ #fieldname) + nj::LIns *ldpContextRegs(nj::LIns *cx) const { + int32 offset = offsetof(JSContext, stack) + ContextStack::offsetOfRegs(); + return name(ldpContextFieldHelper(cx, offset, nj::LOAD_NORMAL),"regs"); + } nj::LIns *stContextField(nj::LIns *value, nj::LIns *cx, int32 offset) const { return lir->insStore(value, cx, offset, ACCSET_CX); @@ -457,11 +461,11 @@ class Writer } nj::LIns *ldpFrameFp(nj::LIns *regs) const { - return lir->insLoad(nj::LIR_ldp, regs, offsetof(JSFrameRegs, fp), ACCSET_FRAMEREGS); + return lir->insLoad(nj::LIR_ldp, regs, FrameRegs::offsetOfFp, ACCSET_FRAMEREGS); } nj::LIns *ldpStackFrameScopeChain(nj::LIns *frame) const { - return lir->insLoad(nj::LIR_ldp, frame, JSStackFrame::offsetOfScopeChain(), + return lir->insLoad(nj::LIR_ldp, frame, StackFrame::offsetOfScopeChain(), ACCSET_STACKFRAME); } diff --git a/js/src/vm/Stack-inl.h b/js/src/vm/Stack-inl.h new file mode 100644 index 000000000000..5ad22be1055b --- /dev/null +++ b/js/src/vm/Stack-inl.h @@ -0,0 +1,1084 @@ +/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * vim: set ts=4 sw=4 et tw=79 ft=cpp: + * + * ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is SpiderMonkey JavaScript engine. + * + * The Initial Developer of the Original Code is + * Mozilla Corporation. + * Portions created by the Initial Developer are Copyright (C) 2009 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * Luke Wagner + * + * Alternatively, the contents of this file may be used under the terms of + * either the GNU General Public License Version 2 or later (the "GPL"), or + * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +#ifndef Stack_inl_h__ +#define Stack_inl_h__ + +#include "Stack.h" + +namespace js { + +/*****************************************************************************/ + +/* See VM stack layout comment in Stack.h. */ +class StackSegment +{ + /* The context to which this segment belongs. */ + ContextStack *stack_; + + /* Link for JSContext segment stack mentioned in big comment above. */ + StackSegment *previousInContext_; + + /* Link for StackSpace segment stack mentioned in StackSpace comment. */ + StackSegment *previousInMemory_; + + /* The first frame executed in this segment. null iff cx is null */ + StackFrame *initialFrame_; + + /* If this segment is suspended, |cx->regs| when it was suspended. */ + FrameRegs *suspendedRegs_; + + /* The varobj on entry to initialFrame. */ + JSObject *initialVarObj_; + + /* Whether this segment was suspended by JS_SaveFrameChain. */ + bool saved_; + + /* Align at 8 bytes on all platforms. */ +#if JS_BITS_PER_WORD == 32 + void *padding; +#endif + + /* + * To make isActive a single null-ness check, this non-null constant is + * assigned to suspendedRegs when empty. + */ +#define NON_NULL_SUSPENDED_REGS ((FrameRegs *)0x1) + + public: + StackSegment() + : stack_(NULL), previousInContext_(NULL), previousInMemory_(NULL), + initialFrame_(NULL), suspendedRegs_(NON_NULL_SUSPENDED_REGS), + initialVarObj_(NULL), saved_(false) + { + JS_ASSERT(empty()); + } + + /* Safe casts guaranteed by the contiguous-stack layout. */ + + Value *valueRangeBegin() const { + return (Value *)(this + 1); + } + + /* + * The set of fields provided by a segment depend on its state. In addition + * to the "active" and "suspended" states described in Stack.h, segments + * have a third state: empty. An empty segment contains no frames and is + * pushed for the purpose of preparing the args to Invoke. Invoke args + * requires special handling because anything can happen between pushing + * Invoke args and calling Invoke. Since an empty segment contains no + * frames, it cannot become the "current segment" of a ContextStack (for + * various arcane and hopefully temporary reasons). Thus, an empty segment + * is pushed onto the StackSpace but only pushed onto a ContextStack when it + * gets its first frame pushed from js::Invoke. + * + * Finally, (to support JS_SaveFrameChain/JS_RestoreFrameChain) a suspended + * segment may or may not be "saved". Normally, when the active segment is + * popped, the previous segment (which is necessarily suspended) becomes + * active. If the previous segment was saved, however, then it stays + * suspended until it is made active by a call to JS_RestoreFrameChain. This + * is why a context may have a current segment, but not an active segment. + * Hopefully, this feature will be removed. + */ + + bool empty() const { + JS_ASSERT(!!stack_ == !!initialFrame_); + JS_ASSERT_IF(!stack_, suspendedRegs_ == NON_NULL_SUSPENDED_REGS && !saved_); + return !stack_; + } + + bool isActive() const { + JS_ASSERT_IF(!suspendedRegs_, stack_ && !saved_); + JS_ASSERT_IF(!stack_, suspendedRegs_ == NON_NULL_SUSPENDED_REGS); + return !suspendedRegs_; + } + + bool isSuspended() const { + JS_ASSERT_IF(!stack_ || !suspendedRegs_, !saved_); + JS_ASSERT_IF(!stack_, suspendedRegs_ == NON_NULL_SUSPENDED_REGS); + return stack_ && suspendedRegs_; + } + + /* Substate of suspended, queryable in any state. */ + + bool isSaved() const { + JS_ASSERT_IF(saved_, isSuspended()); + return saved_; + } + + /* Transitioning between empty <--> isActive */ + + void joinContext(ContextStack &stack, StackFrame &frame) { + JS_ASSERT(empty()); + stack_ = &stack; + initialFrame_ = &frame; + suspendedRegs_ = NULL; + JS_ASSERT(isActive()); + } + + void leaveContext() { + JS_ASSERT(isActive()); + stack_ = NULL; + initialFrame_ = NULL; + suspendedRegs_ = NON_NULL_SUSPENDED_REGS; + JS_ASSERT(empty()); + } + + ContextStack &stack() const { + JS_ASSERT(!empty()); + return *stack_; + } + + ContextStack *maybeStack() const { + return stack_; + } + +#undef NON_NULL_SUSPENDED_REGS + + /* Transitioning between isActive <--> isSuspended */ + + void suspend(FrameRegs ®s) { + JS_ASSERT(isActive()); + JS_ASSERT(contains(regs.fp())); + suspendedRegs_ = ®s; + JS_ASSERT(isSuspended()); + } + + void resume() { + JS_ASSERT(isSuspended()); + suspendedRegs_ = NULL; + JS_ASSERT(isActive()); + } + + /* When isSuspended, transitioning isSaved <--> !isSaved */ + + void save(FrameRegs ®s) { + JS_ASSERT(!isSuspended()); + suspend(regs); + saved_ = true; + JS_ASSERT(isSaved()); + } + + void restore() { + JS_ASSERT(isSaved()); + saved_ = false; + resume(); + JS_ASSERT(!isSuspended()); + } + + /* Data available when !empty */ + + StackFrame *initialFrame() const { + JS_ASSERT(!empty()); + return initialFrame_; + } + + FrameRegs ¤tRegs() const { + JS_ASSERT(!empty()); + return isActive() ? stack_->regs() : suspendedRegs(); + } + + StackFrame *currentFrame() const { + return currentRegs().fp(); + } + + StackFrame *currentFrameOrNull() const { + return empty() ? NULL : currentFrame(); + } + + /* Data available when isSuspended. */ + + FrameRegs &suspendedRegs() const { + JS_ASSERT(isSuspended()); + return *suspendedRegs_; + } + + StackFrame *suspendedFrame() const { + return suspendedRegs_->fp(); + } + + /* JSContext / js::StackSpace bookkeeping. */ + + void setPreviousInContext(StackSegment *seg) { + previousInContext_ = seg; + } + + StackSegment *previousInContext() const { + return previousInContext_; + } + + void setPreviousInMemory(StackSegment *seg) { + previousInMemory_ = seg; + } + + StackSegment *previousInMemory() const { + return previousInMemory_; + } + + void setInitialVarObj(JSObject *obj) { + JS_ASSERT(!empty()); + initialVarObj_ = obj; + } + + bool hasInitialVarObj() { + JS_ASSERT(!empty()); + return initialVarObj_ != NULL; + } + + JSObject &initialVarObj() const { + JS_ASSERT(!empty() && initialVarObj_); + return *initialVarObj_; + } + + bool contains(const StackFrame *fp) const; + + StackFrame *computeNextFrame(StackFrame *fp) const; +}; + +static const size_t VALUES_PER_STACK_SEGMENT = sizeof(StackSegment) / sizeof(Value); +JS_STATIC_ASSERT(sizeof(StackSegment) % sizeof(Value) == 0); + +/*****************************************************************************/ + +inline void +StackFrame::initPrev(JSContext *cx) +{ + JS_ASSERT(flags_ & HAS_PREVPC); + if (FrameRegs *regs = cx->maybeRegs()) { + prev_ = regs->fp(); + prevpc_ = regs->pc; + JS_ASSERT_IF(!prev_->isDummyFrame() && !prev_->hasImacropc(), + uint32(prevpc_ - prev_->script()->code) < prev_->script()->length); + } else { + prev_ = NULL; +#ifdef DEBUG + prevpc_ = (jsbytecode *)0xbadc; +#endif + } +} + +inline void +StackFrame::resetGeneratorPrev(JSContext *cx) +{ + flags_ |= HAS_PREVPC; + initPrev(cx); +} + +inline void +StackFrame::initCallFrame(JSContext *cx, JSObject &callee, JSFunction *fun, + uint32 nactual, uint32 flagsArg) +{ + JS_ASSERT((flagsArg & ~(CONSTRUCTING | + OVERFLOW_ARGS | + UNDERFLOW_ARGS)) == 0); + JS_ASSERT(fun == callee.getFunctionPrivate()); + + /* Initialize stack frame members. */ + flags_ = FUNCTION | HAS_PREVPC | HAS_SCOPECHAIN | flagsArg; + exec.fun = fun; + args.nactual = nactual; /* only need to write if over/under-flow */ + scopeChain_ = callee.getParent(); + initPrev(cx); + JS_ASSERT(!hasImacropc()); + JS_ASSERT(!hasHookData()); + JS_ASSERT(annotation() == NULL); + JS_ASSERT(!hasCallObj()); +} + +inline void +StackFrame::resetInvokeCallFrame() +{ + /* Undo changes to frame made during execution; see initCallFrame */ + + putActivationObjects(); + + JS_ASSERT(!(flags_ & ~(FUNCTION | + OVERFLOW_ARGS | + UNDERFLOW_ARGS | + OVERRIDE_ARGS | + HAS_PREVPC | + HAS_RVAL | + HAS_SCOPECHAIN | + HAS_ANNOTATION | + HAS_HOOK_DATA | + HAS_CALL_OBJ | + HAS_ARGS_OBJ | + FINISHED_IN_INTERP))); + + /* + * Since the stack frame is usually popped after PutActivationObjects, + * these bits aren't cleared. The activation objects must have actually + * been put, though. + */ + JS_ASSERT_IF(flags_ & HAS_CALL_OBJ, callObj().getPrivate() == NULL); + JS_ASSERT_IF(flags_ & HAS_ARGS_OBJ, argsObj().getPrivate() == NULL); + + flags_ &= FUNCTION | + OVERFLOW_ARGS | + HAS_PREVPC | + UNDERFLOW_ARGS; + + JS_ASSERT(exec.fun == callee().getFunctionPrivate()); + scopeChain_ = callee().getParent(); +} + +inline void +StackFrame::initCallFrameCallerHalf(JSContext *cx, uint32 flagsArg, + void *ncode) +{ + JS_ASSERT((flagsArg & ~(CONSTRUCTING | + FUNCTION | + OVERFLOW_ARGS | + UNDERFLOW_ARGS)) == 0); + + flags_ = FUNCTION | flagsArg; + prev_ = cx->fp(); + ncode_ = ncode; +} + +/* + * The "early prologue" refers to the members that are stored for the benefit + * of slow paths before initializing the rest of the members. + */ +inline void +StackFrame::initCallFrameEarlyPrologue(JSFunction *fun, uint32 nactual) +{ + exec.fun = fun; + if (flags_ & (OVERFLOW_ARGS | UNDERFLOW_ARGS)) + args.nactual = nactual; +} + +/* + * The "late prologue" refers to the members that are stored after having + * checked for stack overflow and formal/actual arg mismatch. + */ +inline void +StackFrame::initCallFrameLatePrologue() +{ + SetValueRangeToUndefined(slots(), script()->nfixed); +} + +inline void +StackFrame::initEvalFrame(JSContext *cx, JSScript *script, StackFrame *prev, uint32 flagsArg) +{ + JS_ASSERT(flagsArg & EVAL); + JS_ASSERT((flagsArg & ~(EVAL | DEBUGGER)) == 0); + JS_ASSERT(prev->isScriptFrame()); + + /* Copy (callee, thisv). */ + Value *dstvp = (Value *)this - 2; + Value *srcvp = prev->hasArgs() + ? prev->formalArgs() - 2 + : (Value *)prev - 2; + dstvp[0] = srcvp[0]; + dstvp[1] = srcvp[1]; + JS_ASSERT_IF(prev->isFunctionFrame(), + dstvp[0].toObject().isFunction()); + + /* Initialize stack frame members. */ + flags_ = flagsArg | HAS_PREVPC | HAS_SCOPECHAIN | + (prev->flags_ & (FUNCTION | GLOBAL)); + if (isFunctionFrame()) { + exec = prev->exec; + args.script = script; + } else { + exec.script = script; + } + + scopeChain_ = &prev->scopeChain(); + prev_ = prev; + prevpc_ = prev->pc(cx); + JS_ASSERT(!hasImacropc()); + JS_ASSERT(!hasHookData()); + setAnnotation(prev->annotation()); +} + +inline void +StackFrame::initGlobalFrame(JSScript *script, JSObject &chain, uint32 flagsArg) +{ + JS_ASSERT((flagsArg & ~(EVAL | DEBUGGER)) == 0); + + /* Initialize (callee, thisv). */ + Value *vp = (Value *)this - 2; + vp[0].setUndefined(); + vp[1].setUndefined(); /* Set after frame pushed using thisObject */ + + /* Initialize stack frame members. */ + flags_ = flagsArg | GLOBAL | HAS_PREVPC | HAS_SCOPECHAIN; + exec.script = script; + args.script = (JSScript *)0xbad; + scopeChain_ = &chain; + prev_ = NULL; + JS_ASSERT(!hasImacropc()); + JS_ASSERT(!hasHookData()); + JS_ASSERT(annotation() == NULL); +} + +inline void +StackFrame::initDummyFrame(JSContext *cx, JSObject &chain) +{ + PodZero(this); + flags_ = DUMMY | HAS_PREVPC | HAS_SCOPECHAIN; + initPrev(cx); + chain.isGlobal(); + setScopeChainNoCallObj(chain); +} + +inline void +StackFrame::stealFrameAndSlots(Value *vp, StackFrame *otherfp, + Value *othervp, Value *othersp) +{ + JS_ASSERT(vp == (Value *)this - (otherfp->formalArgsEnd() - othervp)); + JS_ASSERT(othervp == otherfp->actualArgs() - 2); + JS_ASSERT(othersp >= otherfp->slots()); + JS_ASSERT(othersp <= otherfp->base() + otherfp->numSlots()); + + PodCopy(vp, othervp, othersp - othervp); + JS_ASSERT(vp == this->actualArgs() - 2); + + /* Catch bad-touching of non-canonical args (e.g., generator_trace). */ + if (otherfp->hasOverflowArgs()) + Debug_SetValueRangeToCrashOnTouch(othervp, othervp + 2 + otherfp->numFormalArgs()); + + /* + * Repoint Call, Arguments, Block and With objects to the new live frame. + * Call and Arguments are done directly because we have pointers to them. + * Block and With objects are done indirectly through 'liveFrame'. See + * js_LiveFrameToFloating comment in jsiter.h. + */ + if (hasCallObj()) { + JSObject &obj = callObj(); + obj.setPrivate(this); + otherfp->flags_ &= ~HAS_CALL_OBJ; + if (js_IsNamedLambda(fun())) { + JSObject *env = obj.getParent(); + JS_ASSERT(env->getClass() == &js_DeclEnvClass); + env->setPrivate(this); + } + } + if (hasArgsObj()) { + JSObject &args = argsObj(); + JS_ASSERT(args.isArguments()); + if (args.isNormalArguments()) + args.setPrivate(this); + else + JS_ASSERT(!args.getPrivate()); + otherfp->flags_ &= ~HAS_ARGS_OBJ; + } +} + +inline Value & +StackFrame::canonicalActualArg(uintN i) const +{ + if (i < numFormalArgs()) + return formalArg(i); + JS_ASSERT(i < numActualArgs()); + return actualArgs()[i]; +} + +template +inline bool +StackFrame::forEachCanonicalActualArg(Op op) +{ + uintN nformal = fun()->nargs; + Value *formals = formalArgsEnd() - nformal; + uintN nactual = numActualArgs(); + if (nactual <= nformal) { + uintN i = 0; + Value *actualsEnd = formals + nactual; + for (Value *p = formals; p != actualsEnd; ++p, ++i) { + if (!op(i, p)) + return false; + } + } else { + uintN i = 0; + Value *formalsEnd = formalArgsEnd(); + for (Value *p = formals; p != formalsEnd; ++p, ++i) { + if (!op(i, p)) + return false; + } + Value *actuals = formalsEnd - (nactual + 2); + Value *actualsEnd = formals - 2; + for (Value *p = actuals; p != actualsEnd; ++p, ++i) { + if (!op(i, p)) + return false; + } + } + return true; +} + +template +inline bool +StackFrame::forEachFormalArg(Op op) +{ + Value *formals = formalArgsEnd() - fun()->nargs; + Value *formalsEnd = formalArgsEnd(); + uintN i = 0; + for (Value *p = formals; p != formalsEnd; ++p, ++i) { + if (!op(i, p)) + return false; + } + return true; +} + +struct CopyTo +{ + Value *dst; + CopyTo(Value *dst) : dst(dst) {} + bool operator()(uintN, Value *src) { + *dst++ = *src; + return true; + } +}; + +JS_ALWAYS_INLINE void +StackFrame::clearMissingArgs() +{ + if (flags_ & UNDERFLOW_ARGS) + SetValueRangeToUndefined(formalArgs() + numActualArgs(), formalArgsEnd()); +} + +inline uintN +StackFrame::numActualArgs() const +{ + JS_ASSERT(hasArgs()); + if (JS_UNLIKELY(flags_ & (OVERFLOW_ARGS | UNDERFLOW_ARGS))) + return hasArgsObj() ? argsObj().getArgsInitialLength() : args.nactual; + return numFormalArgs(); +} + +inline Value * +StackFrame::actualArgs() const +{ + JS_ASSERT(hasArgs()); + Value *argv = formalArgs(); + if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS)) { + uintN nactual = hasArgsObj() ? argsObj().getArgsInitialLength() : args.nactual; + return argv - (2 + nactual); + } + return argv; +} + +inline Value * +StackFrame::actualArgsEnd() const +{ + JS_ASSERT(hasArgs()); + if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS)) + return formalArgs() - 2; + return formalArgs() + numActualArgs(); +} + +inline void +StackFrame::setArgsObj(JSObject &obj) +{ + JS_ASSERT_IF(hasArgsObj(), &obj == args.obj); + JS_ASSERT_IF(!hasArgsObj(), numActualArgs() == obj.getArgsInitialLength()); + args.obj = &obj; + flags_ |= HAS_ARGS_OBJ; +} + +inline void +StackFrame::setScopeChainNoCallObj(JSObject &obj) +{ +#ifdef DEBUG + JS_ASSERT(&obj != NULL); + if (&obj != sInvalidScopeChain) { + if (hasCallObj()) { + JSObject *pobj = &obj; + while (pobj && pobj->getPrivate() != this) + pobj = pobj->getParent(); + JS_ASSERT(pobj); + } else { + for (JSObject *pobj = &obj; pobj; pobj = pobj->getParent()) + JS_ASSERT_IF(pobj->isCall(), pobj->getPrivate() != this); + } + } +#endif + scopeChain_ = &obj; + flags_ |= HAS_SCOPECHAIN; +} + +inline void +StackFrame::setScopeChainWithOwnCallObj(JSObject &obj) +{ + JS_ASSERT(&obj != NULL); + JS_ASSERT(!hasCallObj() && obj.isCall() && obj.getPrivate() == this); + scopeChain_ = &obj; + flags_ |= HAS_SCOPECHAIN | HAS_CALL_OBJ; +} + +inline JSObject & +StackFrame::callObj() const +{ + JS_ASSERT_IF(isNonEvalFunctionFrame() || isStrictEvalFrame(), hasCallObj()); + + JSObject *pobj = &scopeChain(); + while (JS_UNLIKELY(pobj->getClass() != &js_CallClass)) { + JS_ASSERT(IsCacheableNonGlobalScope(pobj) || pobj->isWith()); + pobj = pobj->getParent(); + } + return *pobj; +} + +inline void +StackFrame::putActivationObjects() +{ + if (flags_ & (HAS_ARGS_OBJ | HAS_CALL_OBJ)) { + /* NB: there is an ordering dependency here. */ + if (hasCallObj()) + js_PutCallObject(this); + else if (hasArgsObj()) + js_PutArgsObject(this); + } +} + +inline void +StackFrame::markActivationObjectsAsPut() +{ + if (flags_ & (HAS_ARGS_OBJ | HAS_CALL_OBJ)) { + if (hasArgsObj() && !argsObj().getPrivate()) { + args.nactual = args.obj->getArgsInitialLength(); + flags_ &= ~HAS_ARGS_OBJ; + } + if (hasCallObj() && !callObj().getPrivate()) { + /* + * For function frames, the call object may or may not have have an + * enclosing DeclEnv object, so we use the callee's parent, since + * it was the initial scope chain. For global (strict) eval frames, + * there is no calle, but the call object's parent is the initial + * scope chain. + */ + scopeChain_ = isFunctionFrame() + ? callee().getParent() + : scopeChain_->getParent(); + flags_ &= ~HAS_CALL_OBJ; + } + } +} + +/*****************************************************************************/ + +JS_ALWAYS_INLINE void +StackSpace::pushOverride(Value *top, StackOverride *prev) +{ + *prev = override_; + + override_.top = top; +#ifdef DEBUG + override_.seg = seg_; + override_.frame = seg_->currentFrameOrNull(); +#endif + + JS_ASSERT(prev->top < override_.top); +} + +JS_ALWAYS_INLINE void +StackSpace::popOverride(const StackOverride &prev) +{ + JS_ASSERT(prev.top < override_.top); + + JS_ASSERT_IF(seg_->empty(), override_.frame == NULL); + JS_ASSERT_IF(!seg_->empty(), override_.frame == seg_->currentFrame()); + JS_ASSERT(override_.seg == seg_); + + override_ = prev; +} + +JS_ALWAYS_INLINE Value * +StackSpace::activeFirstUnused() const +{ + JS_ASSERT(seg_->isActive()); + + Value *max = Max(seg_->stack().regs().sp, override_.top); + JS_ASSERT(max == firstUnused()); + return max; +} + +JS_ALWAYS_INLINE bool +StackSpace::ensureEnoughSpaceToEnterTrace() +{ + ptrdiff_t needed = TraceNativeStorage::MAX_NATIVE_STACK_SLOTS + + TraceNativeStorage::MAX_CALL_STACK_ENTRIES * VALUES_PER_STACK_FRAME; +#ifdef XP_WIN + return ensureSpace(NULL, firstUnused(), needed); +#else + return end_ - firstUnused() > needed; +#endif +} + +STATIC_POSTCONDITION(!return || ubound(from) >= nvals) +JS_ALWAYS_INLINE bool +StackSpace::ensureSpace(JSContext *maybecx, Value *from, ptrdiff_t nvals) const +{ + JS_ASSERT(from >= firstUnused()); +#ifdef XP_WIN + JS_ASSERT(from <= commitEnd_); + if (commitEnd_ - from < nvals) + return bumpCommit(maybecx, from, nvals); + return true; +#else + if (end_ - from < nvals) { + js_ReportOutOfScriptQuota(maybecx); + return false; + } + return true; +#endif +} + +inline Value * +StackSpace::getStackLimit(JSContext *cx) +{ + FrameRegs ®s = cx->regs(); + uintN minSpace = regs.fp()->numSlots() + VALUES_PER_STACK_FRAME; + Value *sp = regs.sp; + Value *required = sp + minSpace; + Value *desired = sp + STACK_QUOTA; +#ifdef XP_WIN + if (required <= commitEnd_) + return Min(commitEnd_, desired); + if (!bumpCommit(cx, sp, minSpace)) + return NULL; + JS_ASSERT(commitEnd_ >= required); + return commitEnd_; +#else + if (required <= end_) + return Min(end_, desired); + js_ReportOutOfScriptQuota(cx); + return NULL; +#endif +} + +/*****************************************************************************/ + +JS_ALWAYS_INLINE bool +ContextStack::isCurrentAndActive() const +{ + assertSegmentsInSync(); + return seg_ && seg_->isActive() && seg_ == space().currentSegment(); +} + +namespace detail { + +struct OOMCheck +{ + JS_ALWAYS_INLINE bool + operator()(JSContext *cx, StackSpace &space, Value *from, uintN nvals) + { + return space.ensureSpace(cx, from, nvals); + } +}; + +struct LimitCheck +{ + StackFrame *base; + Value **limit; + + LimitCheck(StackFrame *base, Value **limit) : base(base), limit(limit) {} + + JS_ALWAYS_INLINE bool + operator()(JSContext *cx, StackSpace &space, Value *from, uintN nvals) + { + /* + * Include an extra sizeof(StackFrame) to satisfy the method-jit + * stackLimit invariant. + */ + nvals += VALUES_PER_STACK_FRAME; + + JS_ASSERT(from < *limit); + if (*limit - from >= ptrdiff_t(nvals)) + return true; + return space.bumpLimitWithinQuota(cx, base, from, nvals, limit); + } +}; + +} /* namespace detail */ + +template +JS_ALWAYS_INLINE StackFrame * +ContextStack::getCallFrame(JSContext *cx, Value *firstUnused, uintN nactual, + JSFunction *fun, JSScript *script, uint32 *flags, + Check check) const +{ + JS_ASSERT(fun->script() == script); + JS_ASSERT(space().firstUnused() == firstUnused); + + uintN nvals = VALUES_PER_STACK_FRAME + script->nslots; + uintN nformal = fun->nargs; + + /* Maintain layout invariant: &formalArgs[0] == ((Value *)fp) - nformal. */ + + if (nactual == nformal) { + if (JS_UNLIKELY(!check(cx, space(), firstUnused, nvals))) + return NULL; + return reinterpret_cast(firstUnused); + } + + if (nactual < nformal) { + *flags |= StackFrame::UNDERFLOW_ARGS; + uintN nmissing = nformal - nactual; + if (JS_UNLIKELY(!check(cx, space(), firstUnused, nmissing + nvals))) + return NULL; + SetValueRangeToUndefined(firstUnused, nmissing); + return reinterpret_cast(firstUnused + nmissing); + } + + *flags |= StackFrame::OVERFLOW_ARGS; + uintN ncopy = 2 + nformal; + if (JS_UNLIKELY(!check(cx, space(), firstUnused, ncopy + nvals))) + return NULL; + + Value *dst = firstUnused; + Value *src = firstUnused - (2 + nactual); + PodCopy(dst, src, ncopy); + Debug_SetValueRangeToCrashOnTouch(src, ncopy); + return reinterpret_cast(firstUnused + ncopy); +} + +JS_ALWAYS_INLINE StackFrame * +ContextStack::getInlineFrame(JSContext *cx, Value *sp, uintN nactual, + JSFunction *fun, JSScript *script, uint32 *flags) const +{ + JS_ASSERT(isCurrentAndActive()); + JS_ASSERT(cx->regs().sp == sp); + + return getCallFrame(cx, sp, nactual, fun, script, flags, detail::OOMCheck()); +} + +JS_ALWAYS_INLINE StackFrame * +ContextStack::getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual, + JSFunction *fun, JSScript *script, uint32 *flags, + StackFrame *fp, Value **limit) const +{ + JS_ASSERT(isCurrentAndActive()); + JS_ASSERT(cx->regs().sp == sp); + + return getCallFrame(cx, sp, nactual, fun, script, flags, detail::LimitCheck(fp, limit)); +} + +JS_ALWAYS_INLINE void +ContextStack::pushInlineFrame(JSScript *script, StackFrame *fp, FrameRegs ®s) +{ + JS_ASSERT(isCurrentAndActive()); + JS_ASSERT(regs_ == ®s && script == fp->script()); + + regs.prepareToRun(fp, script); +} + +JS_ALWAYS_INLINE void +ContextStack::popInlineFrame() +{ + JS_ASSERT(isCurrentAndActive()); + + StackFrame *fp = regs_->fp(); + fp->putActivationObjects(); + + Value *newsp = fp->actualArgs() - 1; + JS_ASSERT(newsp >= fp->prev()->base()); + + newsp[-1] = fp->returnValue(); + regs_->popFrame(newsp); +} + +JS_ALWAYS_INLINE bool +ContextStack::pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard *argsGuard) +{ + if (!isCurrentAndActive()) + return pushInvokeArgsSlow(cx, argc, argsGuard); + + Value *start = space().activeFirstUnused(); + uintN vplen = 2 + argc; + if (!space().ensureSpace(cx, start, vplen)) + return false; + + Value *vp = start; + ImplicitCast(*argsGuard) = CallArgsFromVp(argc, vp); + + /* + * Use stack override to root vp until the frame is pushed. Don't need to + * MakeRangeGCSafe: the VM stack is conservatively marked. + */ + space().pushOverride(vp + vplen, &argsGuard->prevOverride_); + + argsGuard->stack_ = this; + return true; +} + +JS_ALWAYS_INLINE void +ContextStack::popInvokeArgs(const InvokeArgsGuard &argsGuard) +{ + if (argsGuard.seg_) { + popInvokeArgsSlow(argsGuard); + return; + } + + JS_ASSERT(isCurrentAndActive()); + space().popOverride(argsGuard.prevOverride_); +} + +JS_ALWAYS_INLINE +InvokeArgsGuard::~InvokeArgsGuard() +{ + if (JS_UNLIKELY(!pushed())) + return; + stack_->popInvokeArgs(*this); +} + +JS_ALWAYS_INLINE StackFrame * +ContextStack::getInvokeFrame(JSContext *cx, const CallArgs &args, + JSFunction *fun, JSScript *script, + uint32 *flags, InvokeFrameGuard *frameGuard) const +{ + uintN argc = args.argc(); + Value *start = args.argv() + argc; + JS_ASSERT(start == space().firstUnused()); + StackFrame *fp = getCallFrame(cx, start, argc, fun, script, flags, detail::OOMCheck()); + if (!fp) + return NULL; + + frameGuard->regs_.prepareToRun(fp, script); + return fp; +} + +JS_ALWAYS_INLINE void +ContextStack::pushInvokeFrame(const CallArgs &args, InvokeFrameGuard *frameGuard) +{ + JS_ASSERT(space().firstUnused() == args.argv() + args.argc()); + + if (JS_UNLIKELY(space().seg_->empty())) { + pushInvokeFrameSlow(frameGuard); + return; + } + + frameGuard->prevRegs_ = regs_; + regs_ = &frameGuard->regs_; + JS_ASSERT(isCurrentAndActive()); + + frameGuard->stack_ = this; +} + +JS_ALWAYS_INLINE void +ContextStack::popInvokeFrame(const InvokeFrameGuard &frameGuard) +{ + JS_ASSERT(isCurrentAndActive()); + JS_ASSERT(&frameGuard.regs_ == regs_); + + if (JS_UNLIKELY(seg_->initialFrame() == regs_->fp())) { + popInvokeFrameSlow(frameGuard); + return; + } + + regs_->fp()->putActivationObjects(); + regs_ = frameGuard.prevRegs_; +} + +JS_ALWAYS_INLINE void +InvokeFrameGuard::pop() +{ + JS_ASSERT(pushed()); + stack_->popInvokeFrame(*this); + stack_ = NULL; +} + +JS_ALWAYS_INLINE +InvokeFrameGuard::~InvokeFrameGuard() +{ + if (pushed()) + pop(); +} + +JS_ALWAYS_INLINE JSObject & +ContextStack::currentVarObj() const +{ + if (regs_->fp()->hasCallObj()) + return regs_->fp()->callObj(); + return seg_->initialVarObj(); +} + +inline StackFrame * +ContextStack::findFrameAtLevel(uintN targetLevel) const +{ + StackFrame *fp = regs_->fp(); + while (true) { + JS_ASSERT(fp && fp->isScriptFrame()); + if (fp->script()->staticLevel == targetLevel) + break; + fp = fp->prev(); + } + return fp; +} + +/*****************************************************************************/ + +inline +FrameRegsIter::FrameRegsIter(JSContext *cx) + : cx_(cx) +{ + seg_ = cx->stack.currentSegment(); + if (JS_UNLIKELY(!seg_ || !seg_->isActive())) { + initSlow(); + return; + } + fp_ = cx->fp(); + sp_ = cx->regs().sp; + pc_ = cx->regs().pc; + return; +} + +inline FrameRegsIter & +FrameRegsIter::operator++() +{ + StackFrame *oldfp = fp_; + fp_ = fp_->prev(); + if (!fp_) + return *this; + + if (JS_UNLIKELY(oldfp == seg_->initialFrame())) { + incSlow(oldfp); + return *this; + } + + pc_ = oldfp->prevpc(); + sp_ = oldfp->formalArgsEnd(); + return *this; +} + +} /* namespace js */ + +#endif /* Stack_inl_h__ */ diff --git a/js/src/vm/Stack.cpp b/js/src/vm/Stack.cpp new file mode 100644 index 000000000000..7738a1f98793 --- /dev/null +++ b/js/src/vm/Stack.cpp @@ -0,0 +1,739 @@ +/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * vim: set ts=4 sw=4 et tw=79 ft=cpp: + * + * ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is SpiderMonkey JavaScript engine. + * + * The Initial Developer of the Original Code is + * Mozilla Corporation. + * Portions created by the Initial Developer are Copyright (C) 2009 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * Luke Wagner + * + * Alternatively, the contents of this file may be used under the terms of + * either the GNU General Public License Version 2 or later (the "GPL"), or + * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +#include "jsgcmark.h" +#include "methodjit/MethodJIT.h" +#include "Stack.h" + +#include "jsgcinlines.h" +#include "jsobjinlines.h" + +#include "Stack-inl.h" + +/* Includes to get to low-level memory-mapping functionality. */ +#ifdef XP_WIN +# include "jswin.h" +#elif defined(XP_OS2) +# define INCL_DOSMEMMGR +# include +#else +# include +# include +# if !defined(MAP_ANONYMOUS) +# if defined(MAP_ANON) +# define MAP_ANONYMOUS MAP_ANON +# else +# define MAP_ANONYMOUS 0 +# endif +# endif +#endif + +using namespace js; + +/*****************************************************************************/ + +#ifdef DEBUG +JSObject *const StackFrame::sInvalidScopeChain = (JSObject *)0xbeef; +#endif + +jsbytecode * +StackFrame::prevpcSlow() +{ + JS_ASSERT(!(flags_ & HAS_PREVPC)); +#if defined(JS_METHODJIT) && defined(JS_MONOIC) + StackFrame *p = prev(); + js::mjit::JITScript *jit = p->script()->getJIT(p->isConstructing()); + prevpc_ = jit->nativeToPC(ncode_); + flags_ |= HAS_PREVPC; + return prevpc_; +#else + JS_NOT_REACHED("Unknown PC for frame"); + return NULL; +#endif +} + +jsbytecode * +StackFrame::pc(JSContext *cx, StackFrame *next) +{ + JS_ASSERT_IF(next, next->prev() == this); + + StackSegment &seg = cx->stack.space().containingSegment(this); + FrameRegs ®s = seg.currentRegs(); + if (regs.fp() == this) + return regs.pc; + if (!next) + next = seg.computeNextFrame(this); + return next->prevpc(); +} + +/*****************************************************************************/ + +JS_REQUIRES_STACK bool +StackSegment::contains(const StackFrame *fp) const +{ + JS_ASSERT(!empty()); + + if (fp < initialFrame_) + return false; + + StackFrame *start; + if (isActive()) + start = stack_->fp(); + else + start = suspendedRegs_->fp(); + + if (fp > start) + return false; + +#ifdef DEBUG + bool found = false; + StackFrame *stop = initialFrame_->prev(); + for (StackFrame *f = start; !found && f != stop; f = f->prev()) { + if (f == fp) { + found = true; + break; + } + } + JS_ASSERT(found); +#endif + + return true; +} + +StackFrame * +StackSegment::computeNextFrame(StackFrame *fp) const +{ + JS_ASSERT(contains(fp)); + JS_ASSERT(fp != currentFrame()); + + StackFrame *next = currentFrame(); + StackFrame *prev; + while ((prev = next->prev()) != fp) + next = prev; + return next; +} + +/*****************************************************************************/ + +StackSpace::StackSpace() + : base_(NULL), +#ifdef XP_WIN + commitEnd_(NULL), +#endif + end_(NULL), + seg_(NULL) +{ + override_.top = NULL; +#ifdef DEBUG + override_.seg = NULL; + override_.frame = NULL; +#endif +} + +bool +StackSpace::init() +{ + void *p; +#ifdef XP_WIN + p = VirtualAlloc(NULL, CAPACITY_BYTES, MEM_RESERVE, PAGE_READWRITE); + if (!p) + return false; + void *check = VirtualAlloc(p, COMMIT_BYTES, MEM_COMMIT, PAGE_READWRITE); + if (p != check) + return false; + base_ = reinterpret_cast(p); + commitEnd_ = base_ + COMMIT_VALS; + end_ = base_ + CAPACITY_VALS; +#elif defined(XP_OS2) + if (DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE | OBJ_ANY) && + DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE)) + return false; + base_ = reinterpret_cast(p); + end_ = base_ + CAPACITY_VALS; +#else + JS_ASSERT(CAPACITY_BYTES % getpagesize() == 0); + p = mmap(NULL, CAPACITY_BYTES, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); + if (p == MAP_FAILED) + return false; + base_ = reinterpret_cast(p); + end_ = base_ + CAPACITY_VALS; +#endif + return true; +} + +StackSpace::~StackSpace() +{ + JS_ASSERT(!seg_); + if (!base_) + return; +#ifdef XP_WIN + VirtualFree(base_, (commitEnd_ - base_) * sizeof(Value), MEM_DECOMMIT); + VirtualFree(base_, 0, MEM_RELEASE); +#elif defined(XP_OS2) + DosFreeMem(base_); +#else +#ifdef SOLARIS + munmap((caddr_t)base_, CAPACITY_BYTES); +#else + munmap(base_, CAPACITY_BYTES); +#endif +#endif +} + +Value * +StackSpace::firstUnused() const +{ + if (!seg_) { + JS_ASSERT(override_.top == NULL); + return base_; + } + if (!seg_->empty()) { + Value *sp = seg_->currentRegs().sp; + if (override_.top > sp) { + JS_ASSERT(override_.seg == seg_); + JS_ASSERT_IF(seg_->isActive(), override_.frame == seg_->stack().fp()); + JS_ASSERT_IF(!seg_->isActive(), override_.frame == seg_->suspendedFrame()); + return override_.top; + } + return sp; + } + JS_ASSERT(override_.seg == seg_); + return override_.top; +} + +StackSegment & +StackSpace::containingSegment(const StackFrame *target) const +{ + for (StackSegment *s = seg_; s; s = s->previousInMemory()) { + if (s->contains(target)) + return *s; + } + JS_NOT_REACHED("frame not in stack space"); + return *(StackSegment *)NULL; +} + +JSObject & +StackSpace::varObjForFrame(const StackFrame *fp) +{ + if (fp->hasCallObj()) + return fp->callObj(); + return containingSegment(fp).initialVarObj(); +} + +void +StackSpace::mark(JSTracer *trc) +{ + /* + * JIT code can leave values in an incoherent (i.e., unsafe for precise + * marking) state, hence MarkStackRangeConservatively. + */ + Value *end = firstUnused(); + for (StackSegment *seg = seg_; seg; seg = seg->previousInMemory()) { + STATIC_ASSERT(ubound(end) >= 0); + if (seg->empty()) { + /* Mark slots/args trailing off segment. */ + MarkStackRangeConservatively(trc, seg->valueRangeBegin(), end); + } else { + /* This may be the only pointer to the initialVarObj. */ + if (seg->hasInitialVarObj()) + gc::MarkObject(trc, seg->initialVarObj(), "varobj"); + + /* Mark slots/args trailing off of the last stack frame. */ + StackFrame *fp = seg->currentFrame(); + MarkStackRangeConservatively(trc, fp->slots(), end); + + /* Mark stack frames and slots/args between stack frames. */ + StackFrame *initial = seg->initialFrame(); + for (StackFrame *f = fp; f != initial; f = f->prev()) { + js_TraceStackFrame(trc, f); + MarkStackRangeConservatively(trc, f->prev()->slots(), (Value *)f); + } + + /* Mark initial stack frame and leading args. */ + js_TraceStackFrame(trc, initial); + MarkStackRangeConservatively(trc, seg->valueRangeBegin(), (Value *)initial); + } + end = (Value *)seg; + } +} + +#ifdef XP_WIN +JS_FRIEND_API(bool) +StackSpace::bumpCommit(JSContext *maybecx, Value *from, ptrdiff_t nvals) const +{ + if (end_ - from < nvals) { + js_ReportOutOfScriptQuota(maybecx); + return false; + } + + Value *newCommit = commitEnd_; + Value *request = from + nvals; + + /* Use a dumb loop; will probably execute once. */ + JS_ASSERT((end_ - newCommit) % COMMIT_VALS == 0); + do { + newCommit += COMMIT_VALS; + JS_ASSERT((end_ - newCommit) >= 0); + } while (newCommit < request); + + /* The cast is safe because CAPACITY_BYTES is small. */ + int32 size = static_cast(newCommit - commitEnd_) * sizeof(Value); + + if (!VirtualAlloc(commitEnd_, size, MEM_COMMIT, PAGE_READWRITE)) { + js_ReportOutOfScriptQuota(maybecx); + return false; + } + + commitEnd_ = newCommit; + return true; +} +#endif + +bool +StackSpace::bumpLimitWithinQuota(JSContext *maybecx, StackFrame *fp, Value *sp, + uintN nvals, Value **limit) const +{ + JS_ASSERT(sp >= firstUnused()); + JS_ASSERT(sp + nvals >= *limit); +#ifdef XP_WIN + Value *quotaEnd = (Value *)fp + STACK_QUOTA; + if (sp + nvals < quotaEnd) { + if (!ensureSpace(NULL, sp, nvals)) + goto fail; + *limit = Min(quotaEnd, commitEnd_); + return true; + } + fail: +#endif + js_ReportOverRecursed(maybecx); + return false; +} + +bool +StackSpace::bumpLimit(JSContext *cx, StackFrame *fp, Value *sp, + uintN nvals, Value **limit) const +{ + JS_ASSERT(*limit > base_); + JS_ASSERT(sp < *limit); + + /* + * Ideally, we would only ensure space for 'nvals', not 'nvals + remain', + * since this is ~500K. However, this whole call should be a rare case: some + * script is passing a obscene number of args to 'apply' and we are just + * trying to keep the stack limit heuristic from breaking the script. + */ + Value *quota = (Value *)fp + STACK_QUOTA; + uintN remain = quota - sp; + uintN inc = nvals + remain; + if (!ensureSpace(NULL, sp, inc)) + return false; + *limit = sp + inc; + return true; +} + +void +StackSpace::popSegment() +{ + JS_ASSERT(seg_->empty()); + seg_ = seg_->previousInMemory(); +} + +void +StackSpace::pushSegment(StackSegment &seg) +{ + JS_ASSERT(seg.empty()); + seg.setPreviousInMemory(seg_); + seg_ = &seg; +} + +/*****************************************************************************/ + +ContextStack::ContextStack(JSContext *cx) + : regs_(NULL), + seg_(NULL), + space_(&JS_THREAD_DATA(cx)->stackSpace), + cx_(cx) +{ + threadReset(); +} + +ContextStack::~ContextStack() +{ + JS_ASSERT(!regs_); + JS_ASSERT(!seg_); +} + +void +ContextStack::threadReset() +{ +#ifdef JS_THREADSAFE + if (cx_->thread()) + space_ = &JS_THREAD_DATA(cx_)->stackSpace; + else + space_ = NULL; +#else + space_ = &JS_THREAD_DATA(cx_)->stackSpace; +#endif +} + +#ifdef DEBUG +void +ContextStack::assertSegmentsInSync() const +{ + if (regs_) { + JS_ASSERT(seg_->isActive()); + if (StackSegment *prev = seg_->previousInContext()) + JS_ASSERT(!prev->isActive()); + } else { + JS_ASSERT_IF(seg_, !seg_->isActive()); + } +} + +void +ContextStack::assertSpaceInSync() const +{ + JS_ASSERT(space_); + JS_ASSERT(space_ == &JS_THREAD_DATA(cx_)->stackSpace); +} + +bool +ContextStack::contains(const StackFrame *fp) const +{ + return &space().containingSegment(fp).stack() == this; +} +#endif + +void +ContextStack::saveActiveSegment() +{ + JS_ASSERT(regs_); + seg_->save(*regs_); + regs_ = NULL; + cx_->resetCompartment(); +} + +void +ContextStack::restoreSegment() +{ + regs_ = &seg_->suspendedRegs(); + seg_->restore(); + cx_->resetCompartment(); +} + +bool +ContextStack::getSegmentAndFrame(JSContext *cx, uintN vplen, uintN nslots, + FrameGuard *frameGuard) const +{ + Value *start = space().firstUnused(); + uintN nvals = VALUES_PER_STACK_SEGMENT + vplen + VALUES_PER_STACK_FRAME + nslots; + if (!space().ensureSpace(cx, start, nvals)) + return false; + + StackSegment *seg = new(start) StackSegment; + Value *vp = seg->valueRangeBegin(); + + frameGuard->seg_ = seg; + frameGuard->vp_ = vp; + frameGuard->fp_ = reinterpret_cast(vp + vplen); + return true; +} + +void +ContextStack::pushSegmentAndFrameImpl(FrameRegs ®s, StackSegment &seg) +{ + JS_ASSERT(&seg == space().currentSegment()); + + if (regs_) + seg_->suspend(*regs_); + regs_ = ®s; + + seg.setPreviousInContext(seg_); + seg_ = &seg; + seg.joinContext(*this, *regs.fp()); +} + +void +ContextStack::pushSegmentAndFrame(FrameRegs ®s, FrameGuard *frameGuard) +{ + space().pushSegment(*frameGuard->seg_); + pushSegmentAndFrameImpl(regs, *frameGuard->seg_); + frameGuard->stack_ = this; +} + +void +ContextStack::popSegmentAndFrameImpl() +{ + JS_ASSERT(isCurrentAndActive()); + JS_ASSERT(&seg_->stack() == this); + JS_ASSERT(seg_->initialFrame() == regs_->fp()); + + regs_->fp()->putActivationObjects(); + + seg_->leaveContext(); + seg_ = seg_->previousInContext(); + if (seg_) { + if (seg_->isSaved()) { + regs_ = NULL; + } else { + regs_ = &seg_->suspendedRegs(); + seg_->resume(); + } + } else { + JS_ASSERT(regs_->fp()->prev() == NULL); + regs_ = NULL; + } +} + +void +ContextStack::popSegmentAndFrame() +{ + popSegmentAndFrameImpl(); + space().popSegment(); + notifyIfNoCodeRunning(); +} + +FrameGuard::~FrameGuard() +{ + if (!pushed()) + return; + JS_ASSERT(stack_->currentSegment() == seg_); + JS_ASSERT(stack_->currentSegment()->currentFrame() == fp_); + stack_->popSegmentAndFrame(); +} + +bool +ContextStack::getExecuteFrame(JSContext *cx, JSScript *script, + ExecuteFrameGuard *frameGuard) const +{ + if (!getSegmentAndFrame(cx, 2, script->nslots, frameGuard)) + return false; + frameGuard->regs_.prepareToRun(frameGuard->fp(), script); + return true; +} + +void +ContextStack::pushExecuteFrame(JSObject *initialVarObj, + ExecuteFrameGuard *frameGuard) +{ + pushSegmentAndFrame(frameGuard->regs_, frameGuard); + frameGuard->seg_->setInitialVarObj(initialVarObj); +} + +bool +ContextStack::pushDummyFrame(JSContext *cx, JSObject &scopeChain, + DummyFrameGuard *frameGuard) +{ + if (!getSegmentAndFrame(cx, 0 /*vplen*/, 0 /*nslots*/, frameGuard)) + return false; + + StackFrame *fp = frameGuard->fp(); + fp->initDummyFrame(cx, scopeChain); + frameGuard->regs_.initDummyFrame(fp); + + pushSegmentAndFrame(frameGuard->regs_, frameGuard); + return true; +} + +bool +ContextStack::getGeneratorFrame(JSContext *cx, uintN vplen, uintN nslots, + GeneratorFrameGuard *frameGuard) +{ + /* The regs will be set by SendToGenerator. */ + return getSegmentAndFrame(cx, vplen, nslots, frameGuard); +} + +void +ContextStack::pushGeneratorFrame(FrameRegs ®s, + GeneratorFrameGuard *frameGuard) +{ + JS_ASSERT(regs.fp() == frameGuard->fp()); + JS_ASSERT(regs.fp()->prev() == regs_->fp()); + + pushSegmentAndFrame(regs, frameGuard); +} + +bool +ContextStack::pushInvokeArgsSlow(JSContext *cx, uintN argc, + InvokeArgsGuard *argsGuard) +{ + /* + * Either there is no code running on this context or its not at the top of + * the contiguous stack. Either way, push a new empty segment which will + * root the args for invoke and later contain the frame pushed by Invoke. + */ + JS_ASSERT(!isCurrentAndActive()); + + Value *start = space().firstUnused(); + size_t vplen = 2 + argc; + ptrdiff_t nvals = VALUES_PER_STACK_SEGMENT + vplen; + if (!space().ensureSpace(cx, start, nvals)) + return false; + + StackSegment *seg = new(start) StackSegment; + argsGuard->seg_ = seg; + + Value *vp = seg->valueRangeBegin(); + ImplicitCast(*argsGuard) = CallArgsFromVp(argc, vp); + + /* + * Use stack override to root vp until the frame is pushed. Don't need to + * MakeRangeGCSafe: the VM stack is conservatively marked. + */ + space().pushSegment(*seg); + space().pushOverride(vp + vplen, &argsGuard->prevOverride_); + + argsGuard->stack_ = this; + return true; +} + +void +ContextStack::popInvokeArgsSlow(const InvokeArgsGuard &argsGuard) +{ + JS_ASSERT(space().currentSegment()->empty()); + + space().popOverride(argsGuard.prevOverride_); + space().popSegment(); + notifyIfNoCodeRunning(); +} + +void +ContextStack::pushInvokeFrameSlow(InvokeFrameGuard *frameGuard) +{ + JS_ASSERT(space().seg_->empty()); + pushSegmentAndFrameImpl(frameGuard->regs_, *space().seg_); + frameGuard->stack_ = this; +} + +void +ContextStack::popInvokeFrameSlow(const InvokeFrameGuard &frameGuard) +{ + JS_ASSERT(frameGuard.regs_.fp() == seg_->initialFrame()); + popSegmentAndFrameImpl(); +} + +/* + * NB: this function can call out and observe the stack (e.g., through GC), so + * it should only be called from a consistent stack state. + */ +void +ContextStack::notifyIfNoCodeRunning() +{ + if (regs_) + return; + + cx_->resetCompartment(); + cx_->maybeMigrateVersionOverride(); +} + +/*****************************************************************************/ + +void +FrameRegsIter::initSlow() +{ + if (!seg_) { + fp_ = NULL; + sp_ = NULL; + pc_ = NULL; + return; + } + + JS_ASSERT(seg_->isSuspended()); + fp_ = seg_->suspendedFrame(); + sp_ = seg_->suspendedRegs().sp; + pc_ = seg_->suspendedRegs().pc; +} + +/* + * Using the invariant described in the js::StackSegment comment, we know that, + * when a pair of prev-linked stack frames are in the same segment, the + * first frame's address is the top of the prev-frame's stack, modulo missing + * arguments. + */ +void +FrameRegsIter::incSlow(StackFrame *oldfp) +{ + JS_ASSERT(oldfp == seg_->initialFrame()); + JS_ASSERT(fp_ == oldfp->prev()); + + /* + * Segments from arbitrary context stacks can interleave so we must do a + * linear scan over segments in this context stack. Furthermore, 'prev' can + * be any frame in the segment (not only the suspendedFrame), so we must + * scan each stack frame in each segment. Fortunately, this is not hot code. + */ + seg_ = seg_->previousInContext(); + sp_ = seg_->suspendedRegs().sp; + pc_ = seg_->suspendedRegs().pc; + StackFrame *f = seg_->suspendedFrame(); + while (f != fp_) { + if (f == seg_->initialFrame()) { + seg_ = seg_->previousInContext(); + sp_ = seg_->suspendedRegs().sp; + pc_ = seg_->suspendedRegs().pc; + f = seg_->suspendedFrame(); + } else { + sp_ = f->formalArgsEnd(); + pc_ = f->prevpc(); + f = f->prev(); + } + } +} + +/*****************************************************************************/ + +AllFramesIter::AllFramesIter(JSContext *cx) + : seg_(cx->stack.currentSegment()), + fp_(seg_ ? seg_->currentFrame() : NULL) +{ +} + +AllFramesIter& +AllFramesIter::operator++() +{ + JS_ASSERT(!done()); + if (fp_ == seg_->initialFrame()) { + seg_ = seg_->previousInMemory(); + fp_ = seg_ ? seg_->currentFrame() : NULL; + } else { + fp_ = fp_->prev(); + } + return *this; +} + diff --git a/js/src/vm/Stack.h b/js/src/vm/Stack.h new file mode 100644 index 000000000000..b45d12793efe --- /dev/null +++ b/js/src/vm/Stack.h @@ -0,0 +1,1449 @@ +/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * vim: set ts=4 sw=4 et tw=79 ft=cpp: + * + * ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is SpiderMonkey JavaScript engine. + * + * The Initial Developer of the Original Code is + * Mozilla Corporation. + * Portions created by the Initial Developer are Copyright (C) 2009 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * Luke Wagner + * + * Alternatively, the contents of this file may be used under the terms of + * either the GNU General Public License Version 2 or later (the "GPL"), or + * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +#ifndef Stack_h__ +#define Stack_h__ + +#include "jsfun.h" + +namespace js { + +class StackFrame; +class FrameRegs; +class StackSegment; +class StackSpace; +class ContextStack; + +class InvokeArgsGuard; +class InvokeFrameGuard; +class FrameGuard; +class ExecuteFrameGuard; +class DummyFrameGuard; +class GeneratorFrameGuard; + +namespace mjit { struct JITScript; } +namespace detail { struct OOMCheck; } + +/* + * VM stack layout + * + * SpiderMonkey uses a per-thread stack to store the activation records, + * parameters, locals, and expression temporaries for the stack of actively + * executing scripts, functions and generators. The per-thread stack is owned + * by the StackSpace object stored in the thread's ThreadData. + * + * The per-thread stack is subdivided into contiguous segments of memory which + * have a memory layout invariant that allows fixed offsets to be used for stack + * access (by the JIT) as well as fast call/return. This memory layout is + * encapsulated by a set of types that describe different regions of memory: + * StackSegment, StackFrame, FrameRegs and CallArgs. To avoid calling into C++, + * the JIT compiler generates code that simulates C++ stack operations. + * + * The memory layout of a segment looks like: + * + * current regs + * .------------------------------------------------------. + * | current frame | + * | .-------------------------------------. V + * | | initial frame | FrameRegs + * | | .------------. | | + * | | | V V V + * |StackSegment| slots |StackFrame| slots |StackFrame| slots | + * | ^ | + * ? <----------' `-----------' + * prev prev + * + * A segment starts with a fixed-size header (js::StackSegment) which logically + * describes the segment, links it to the rest of the stack, and points to the + * first and last frames in the segment. + * + * Each script activation (global or function code) is given a fixed-size header + * (js::StackFrame) which is associated with the values (called "slots") before + * and after it. The frame contains bookkeeping information about the activation + * and links to the previous frame. + * + * The slots preceeding a (function) StackFrame in memory are the arguments of + * the call. The slots after a StackFrame in memory are its locals followed + * by its expression stack. There is no clean line between the arguments of a + * frame and the expression stack of the previous frame since the top slots of + * the expression become the arguments of a call. There are also layout + * invariants concerning the arguments and StackFrame; see "Arguments" comment + * in StackFrame for more details. + * + * The top of a segment's current frame's expression stack is pointed to by the + * segment's "current regs", which contains the stack pointer 'sp'. In the + * interpreter, sp is adjusted as individual values are pushed and popped from + * the stack and the FrameRegs struct (pointed by the StackSegment) is a local + * var of js::Interpret. JIT code simulates this by lazily updating FrameRegs + * when calling from JIT code into the VM. Ideally, we'd like to remove all + * dependence on FrameRegs outside the interpreter. + * + * A call to a native (C++) function does not push a frame. Instead, an array + * of values (possibly from the top of a calling frame's expression stack) is + * passed to the native. The layout of this array is abstracted by js::CallArgs. + * Note that, between any two StackFrames there may be any number of native + * calls, so the meaning of 'prev' is not 'directly called by'. + * + * An additional feature (perhaps not for much longer: bug 650361) is that + * multiple independent "contexts" can interleave (LIFO) on a single contiguous + * stack. "Independent" here means that neither context sees the other's frames. + * Concretely, an embedding may enter the JS engine on cx1 and then, from a + * native called by the JS engine, reenter the VM on cx2. Changing from cx1 to + * cx2 causes cx1's segment to be "suspended" and a new segment started to be + * started for cx2. These two segments are linked from the perspective of + * StackSpace, since they are adjacent on the thread's stack, but not from the + * perspective of cx1 and cx2. Thus, each segment has two prev-links: + * previousInMemory and previousInContext. A context's apparent stack is + * encapsulated and managed by the js::ContextStack object stored in JSContext. + * ContextStack is the primary interface to the rest of the engine for pushing + * and popping args (for js::Invoke calls) and frames. + */ + +/*****************************************************************************/ + +class CallReceiver +{ +#ifdef DEBUG + mutable bool usedRval_; +#endif + protected: + Value *argv_; + CallReceiver() {} + CallReceiver(Value *argv) : argv_(argv) { +#ifdef DEBUG + usedRval_ = false; +#endif + } + + public: + friend CallReceiver CallReceiverFromVp(Value *); + friend CallReceiver CallReceiverFromArgv(Value *); + Value *base() const { return argv_ - 2; } + JSObject &callee() const { JS_ASSERT(!usedRval_); return argv_[-2].toObject(); } + Value &calleev() const { JS_ASSERT(!usedRval_); return argv_[-2]; } + Value &thisv() const { return argv_[-1]; } + + Value &rval() const { +#ifdef DEBUG + usedRval_ = true; +#endif + return argv_[-2]; + } + + void calleeHasBeenReset() const { +#ifdef DEBUG + usedRval_ = false; +#endif + } +}; + +JS_ALWAYS_INLINE CallReceiver +CallReceiverFromVp(Value *vp) +{ + return CallReceiver(vp + 2); +} + +JS_ALWAYS_INLINE CallReceiver +CallReceiverFromArgv(Value *argv) +{ + return CallReceiver(argv); +} + +/*****************************************************************************/ + +class CallArgs : public CallReceiver +{ + uintN argc_; + protected: + CallArgs() {} + CallArgs(uintN argc, Value *argv) : CallReceiver(argv), argc_(argc) {} + public: + friend CallArgs CallArgsFromVp(uintN, Value *); + friend CallArgs CallArgsFromArgv(uintN, Value *); + Value &operator[](unsigned i) const { JS_ASSERT(i < argc_); return argv_[i]; } + Value *argv() const { return argv_; } + uintN argc() const { return argc_; } +}; + +JS_ALWAYS_INLINE CallArgs +CallArgsFromVp(uintN argc, Value *vp) +{ + return CallArgs(argc, vp + 2); +} + +JS_ALWAYS_INLINE CallArgs +CallArgsFromArgv(uintN argc, Value *argv) +{ + return CallArgs(argc, argv); +} + +/*****************************************************************************/ + +class StackFrame +{ + public: + enum Flags { + /* Primary frame type */ + GLOBAL = 0x1, /* frame pushed for a global script */ + FUNCTION = 0x2, /* frame pushed for a scripted call */ + DUMMY = 0x4, /* frame pushed for bookkeeping */ + + /* Frame subtypes */ + EVAL = 0x8, /* frame pushed for eval() or debugger eval */ + DEBUGGER = 0x10, /* frame pushed for debugger eval */ + GENERATOR = 0x20, /* frame is associated with a generator */ + FLOATING_GENERATOR = 0x40, /* frame is is in generator obj, not on stack */ + CONSTRUCTING = 0x80, /* frame is for a constructor invocation */ + + /* Temporary frame states */ + YIELDING = 0x100, /* js::Interpret dispatched JSOP_YIELD */ + FINISHED_IN_INTERP = 0x200, /* set if frame finished in Interpret() */ + + /* Concerning function arguments */ + OVERRIDE_ARGS = 0x400, /* overridden arguments local variable */ + OVERFLOW_ARGS = 0x800, /* numActualArgs > numFormalArgs */ + UNDERFLOW_ARGS = 0x1000, /* numActualArgs < numFormalArgs */ + + /* Lazy frame initialization */ + HAS_IMACRO_PC = 0x2000, /* frame has imacpc value available */ + HAS_CALL_OBJ = 0x4000, /* frame has a callobj reachable from scopeChain_ */ + HAS_ARGS_OBJ = 0x8000, /* frame has an argsobj in StackFrame::args */ + HAS_HOOK_DATA = 0x10000, /* frame has hookData_ set */ + HAS_ANNOTATION = 0x20000, /* frame has annotation_ set */ + HAS_RVAL = 0x40000, /* frame has rval_ set */ + HAS_SCOPECHAIN = 0x80000, /* frame has scopeChain_ set */ + HAS_PREVPC = 0x100000 /* frame has prevpc_ set */ + }; + + private: + mutable uint32 flags_; /* bits described by Flags */ + union { /* describes what code is executing in a */ + JSScript *script; /* global frame */ + JSFunction *fun; /* function frame, pre GetScopeChain */ + } exec; + union { /* describes the arguments of a function */ + uintN nactual; /* pre GetArgumentsObject */ + JSObject *obj; /* post GetArgumentsObject */ + JSScript *script; /* eval has no args, but needs a script */ + } args; + mutable JSObject *scopeChain_; /* current scope chain */ + StackFrame *prev_; /* previous cx->regs->fp */ + void *ncode_; /* return address for method JIT */ + + /* Lazily initialized */ + js::Value rval_; /* return value of the frame */ + jsbytecode *prevpc_; /* pc of previous frame*/ + jsbytecode *imacropc_; /* pc of macro caller */ + void *hookData_; /* closure returned by call hook */ + void *annotation_; /* perhaps remove with bug 546848 */ + + static void staticAsserts() { + JS_STATIC_ASSERT(offsetof(StackFrame, rval_) % sizeof(js::Value) == 0); + JS_STATIC_ASSERT(sizeof(StackFrame) % sizeof(js::Value) == 0); + } + + inline void initPrev(JSContext *cx); + jsbytecode *prevpcSlow(); + + public: + /* + * Frame initialization + * + * After acquiring a pointer to an uninitialized stack frame on the VM + * stack from StackSpace, these members are used to initialize the stack + * frame before officially pushing the frame into the context. + */ + + /* Used for Invoke, Interpret, trace-jit LeaveTree, and method-jit stubs. */ + inline void initCallFrame(JSContext *cx, JSObject &callee, JSFunction *fun, + uint32 nactual, uint32 flags); + + /* Used for SessionInvoke. */ + inline void resetInvokeCallFrame(); + + /* Called by method-jit stubs and serve as a specification for jit-code. */ + inline void initCallFrameCallerHalf(JSContext *cx, uint32 flags, void *ncode); + inline void initCallFrameEarlyPrologue(JSFunction *fun, uint32 nactual); + inline void initCallFrameLatePrologue(); + + /* Used for eval. */ + inline void initEvalFrame(JSContext *cx, JSScript *script, StackFrame *prev, + uint32 flags); + inline void initGlobalFrame(JSScript *script, JSObject &chain, uint32 flags); + + /* Used when activating generators. */ + inline void stealFrameAndSlots(js::Value *vp, StackFrame *otherfp, + js::Value *othervp, js::Value *othersp); + + /* Perhaps one fine day we will remove dummy frames. */ + inline void initDummyFrame(JSContext *cx, JSObject &chain); + + /* + * Stack frame type + * + * A stack frame may have one of three types, which determines which + * members of the frame may be accessed and other invariants: + * + * global frame: execution of global code or an eval in global code + * function frame: execution of function code or an eval in a function + * dummy frame: bookkeeping frame (to be removed in bug 625199) + */ + + bool isFunctionFrame() const { + return !!(flags_ & FUNCTION); + } + + bool isGlobalFrame() const { + return !!(flags_ & GLOBAL); + } + + bool isDummyFrame() const { + return !!(flags_ & DUMMY); + } + + bool isScriptFrame() const { + bool retval = !!(flags_ & (FUNCTION | GLOBAL)); + JS_ASSERT(retval == !isDummyFrame()); + return retval; + } + + /* + * Eval frames + * + * As noted above, global and function frames may optionally be 'eval + * frames'. Eval code shares its parent's arguments which means that the + * arg-access members of StackFrame may not be used for eval frames. + * Search for 'hasArgs' below for more details. + * + * A further sub-classification of eval frames is whether the frame was + * pushed for an ES5 strict-mode eval(). + */ + + bool isEvalFrame() const { + JS_ASSERT_IF(flags_ & EVAL, isScriptFrame()); + return flags_ & EVAL; + } + + bool isNonEvalFunctionFrame() const { + return (flags_ & (FUNCTION | EVAL)) == FUNCTION; + } + + inline bool isStrictEvalFrame() const { + return isEvalFrame() && script()->strictModeCode; + } + + bool isNonStrictEvalFrame() const { + return isEvalFrame() && !script()->strictModeCode; + } + + /* + * Previous frame + * + * A frame's 'prev' frame is either null or the previous frame pointed to + * by cx->regs->fp when this frame was pushed. Often, given two prev-linked + * frames, the next-frame is a function or eval that was called by the + * prev-frame, but not always: the prev-frame may have called a native that + * reentered the VM through JS_CallFunctionValue on the same context + * (without calling JS_SaveFrameChain) which pushed the next-frame. Thus, + * 'prev' has little semantic meaning and basically just tells the VM what + * to set cx->regs->fp to when this frame is popped. + */ + + StackFrame *prev() const { + return prev_; + } + + inline void resetGeneratorPrev(JSContext *cx); + + /* + * Frame slots + * + * A frame's 'slots' are the fixed slots associated with the frame (like + * local variables) followed by an expression stack holding temporary + * values. A frame's 'base' is the base of the expression stack. + */ + + js::Value *slots() const { + return (js::Value *)(this + 1); + } + + js::Value *base() const { + return slots() + script()->nfixed; + } + + js::Value &varSlot(uintN i) { + JS_ASSERT(i < script()->nfixed); + JS_ASSERT_IF(maybeFun(), i < script()->bindings.countVars()); + return slots()[i]; + } + + /* + * Script + * + * All function and global frames have an associated JSScript which holds + * the bytecode being executed for the frame. + */ + + /* + * Get the frame's current bytecode, assuming |this| is in |cx|. + * next is frame whose prev == this, NULL if not known or if this == cx->fp(). + */ + jsbytecode *pc(JSContext *cx, StackFrame *next = NULL); + + jsbytecode *prevpc() { + if (flags_ & HAS_PREVPC) + return prevpc_; + return prevpcSlow(); + } + + JSScript *script() const { + JS_ASSERT(isScriptFrame()); + return isFunctionFrame() + ? isEvalFrame() ? args.script : fun()->script() + : exec.script; + } + + JSScript *functionScript() const { + JS_ASSERT(isFunctionFrame()); + return isEvalFrame() ? args.script : fun()->script(); + } + + JSScript *globalScript() const { + JS_ASSERT(isGlobalFrame()); + return exec.script; + } + + JSScript *maybeScript() const { + return isScriptFrame() ? script() : NULL; + } + + size_t numFixed() const { + return script()->nfixed; + } + + size_t numSlots() const { + return script()->nslots; + } + + size_t numGlobalVars() const { + JS_ASSERT(isGlobalFrame()); + return exec.script->nfixed; + } + + /* + * Function + * + * All function frames have an associated interpreted JSFunction. + */ + + JSFunction* fun() const { + JS_ASSERT(isFunctionFrame()); + return exec.fun; + } + + JSFunction* maybeFun() const { + return isFunctionFrame() ? fun() : NULL; + } + + /* + * Arguments + * + * Only non-eval function frames have arguments. A frame follows its + * arguments contiguously in memory. The arguments pushed by the caller are + * the 'actual' arguments. The declared arguments of the callee are the + * 'formal' arguments. When the caller passes less or equal actual + * arguments, the actual and formal arguments are the same array (but with + * different extents). When the caller passes too many arguments, the + * formal subset of the actual arguments is copied onto the top of the + * stack. This allows the engine to maintain a jit-time constant offset of + * arguments from the frame pointer. Since the formal subset of the actual + * arguments is potentially on the stack twice, it is important for all + * reads/writes to refer to the same canonical memory location. + * + * An arguments object (the object returned by the 'arguments' keyword) is + * lazily created, so a given function frame may or may not have one. + */ + + /* True if this frame has arguments. Contrast with hasArgsObj. */ + bool hasArgs() const { + return isNonEvalFunctionFrame(); + } + + uintN numFormalArgs() const { + JS_ASSERT(hasArgs()); + return fun()->nargs; + } + + js::Value &formalArg(uintN i) const { + JS_ASSERT(i < numFormalArgs()); + return formalArgs()[i]; + } + + js::Value *formalArgs() const { + JS_ASSERT(hasArgs()); + return (js::Value *)this - numFormalArgs(); + } + + js::Value *formalArgsEnd() const { + JS_ASSERT(hasArgs()); + return (js::Value *)this; + } + + js::Value *maybeFormalArgs() const { + return (flags_ & (FUNCTION | EVAL)) == FUNCTION + ? formalArgs() + : NULL; + } + + inline uintN numActualArgs() const; + inline js::Value *actualArgs() const; + inline js::Value *actualArgsEnd() const; + + inline js::Value &canonicalActualArg(uintN i) const; + template inline bool forEachCanonicalActualArg(Op op); + template inline bool forEachFormalArg(Op op); + + inline void clearMissingArgs(); + + bool hasArgsObj() const { + return !!(flags_ & HAS_ARGS_OBJ); + } + + JSObject &argsObj() const { + JS_ASSERT(hasArgsObj()); + JS_ASSERT(!isEvalFrame()); + return *args.obj; + } + + JSObject *maybeArgsObj() const { + return hasArgsObj() ? &argsObj() : NULL; + } + + inline void setArgsObj(JSObject &obj); + + /* + * This value + * + * Every frame has a this value although, until 'this' is computed, the + * value may not be the semantically-correct 'this' value. + * + * The 'this' value is stored before the formal arguments for function + * frames and directly before the frame for global frames. The *Args + * members assert !isEvalFrame(), so we implement specialized inline + * methods for accessing 'this'. When the caller has static knowledge that + * a frame is a function or global frame, 'functionThis' and 'globalThis', + * respectively, allow more efficient access. + */ + + js::Value &functionThis() const { + JS_ASSERT(isFunctionFrame()); + if (isEvalFrame()) + return ((js::Value *)this)[-1]; + return formalArgs()[-1]; + } + + JSObject &constructorThis() const { + JS_ASSERT(hasArgs()); + return formalArgs()[-1].toObject(); + } + + js::Value &globalThis() const { + JS_ASSERT(isGlobalFrame()); + return ((js::Value *)this)[-1]; + } + + js::Value &thisValue() const { + if (flags_ & (EVAL | GLOBAL)) + return ((js::Value *)this)[-1]; + return formalArgs()[-1]; + } + + /* + * Callee + * + * Only function frames have a callee. An eval frame in a function has the + * same caller as its containing function frame. + */ + + js::Value &calleev() const { + JS_ASSERT(isFunctionFrame()); + if (isEvalFrame()) + return ((js::Value *)this)[-2]; + return formalArgs()[-2]; + } + + JSObject &callee() const { + JS_ASSERT(isFunctionFrame()); + return calleev().toObject(); + } + + JSObject *maybeCallee() const { + return isFunctionFrame() ? &callee() : NULL; + } + + js::CallReceiver callReceiver() const { + return js::CallReceiverFromArgv(formalArgs()); + } + + /* + * getValidCalleeObject is a fallible getter to compute the correct callee + * function object, which may require deferred cloning due to the JSObject + * methodReadBarrier. For a non-function frame, return true with *vp set + * from calleev, which may not be an object (it could be undefined). + */ + bool getValidCalleeObject(JSContext *cx, js::Value *vp); + + /* + * Scope chain + * + * Every frame has a scopeChain which, when traversed via the 'parent' link + * to the root, indicates the current global object. A 'call object' is a + * node on a scope chain representing a function's activation record. A + * call object is used for dynamically-scoped name lookup and lexically- + * scoped upvar access. The call object holds the values of locals and + * arguments when a function returns (and its stack frame is popped). For + * performance reasons, call objects are created lazily for 'lightweight' + * functions, i.e., functions which are not statically known to require a + * call object. Thus, a given function frame may or may not have a call + * object. When a function does have a call object, it is found by walking + * up the scope chain until the first call object. Thus, it is important, + * when setting the scope chain, to indicate whether the new scope chain + * contains a new call object and thus changes the 'hasCallObj' state. + * + * NB: 'fp->hasCallObj()' implies that fp->callObj() needs to be 'put' when + * the frame is popped. Since the scope chain of a non-strict eval frame + * contains the call object of the parent (function) frame, it is possible + * to have: + * !fp->hasCall() && fp->scopeChain().isCall() + */ + + JSObject &scopeChain() const { + JS_ASSERT_IF(!(flags_ & HAS_SCOPECHAIN), isFunctionFrame()); + if (!(flags_ & HAS_SCOPECHAIN)) { + scopeChain_ = callee().getParent(); + flags_ |= HAS_SCOPECHAIN; + } + return *scopeChain_; + } + + bool hasCallObj() const { + bool ret = !!(flags_ & HAS_CALL_OBJ); + JS_ASSERT_IF(ret, !isNonStrictEvalFrame()); + return ret; + } + + inline JSObject &callObj() const; + inline void setScopeChainNoCallObj(JSObject &obj); + inline void setScopeChainWithOwnCallObj(JSObject &obj); + + /* + * NB: putActivationObjects does not mark activation objects as having been + * put (since the frame is about to be popped). + */ + inline void putActivationObjects(); + inline void markActivationObjectsAsPut(); + + /* + * Frame compartment + * + * A stack frame's compartment is the frame's containing context's + * compartment when the frame was pushed. + */ + + JSCompartment *compartment() const { + JS_ASSERT_IF(isScriptFrame(), scopeChain().compartment() == script()->compartment); + return scopeChain().compartment(); + } + + /* + * Imacropc + * + * A frame's IMacro pc is the bytecode address when an imacro started + * executing (guaranteed non-null). An imacro does not push a frame, so + * when the imacro finishes, the frame's IMacro pc becomes the current pc. + */ + + bool hasImacropc() const { + return flags_ & HAS_IMACRO_PC; + } + + jsbytecode *imacropc() const { + JS_ASSERT(hasImacropc()); + return imacropc_; + } + + jsbytecode *maybeImacropc() const { + return hasImacropc() ? imacropc() : NULL; + } + + void clearImacropc() { + flags_ &= ~HAS_IMACRO_PC; + } + + void setImacropc(jsbytecode *pc) { + JS_ASSERT(pc); + JS_ASSERT(!(flags_ & HAS_IMACRO_PC)); + imacropc_ = pc; + flags_ |= HAS_IMACRO_PC; + } + + /* Annotation (will be removed after bug 546848) */ + + void* annotation() const { + return (flags_ & HAS_ANNOTATION) ? annotation_ : NULL; + } + + void setAnnotation(void *annot) { + flags_ |= HAS_ANNOTATION; + annotation_ = annot; + } + + /* Debugger hook data */ + + bool hasHookData() const { + return !!(flags_ & HAS_HOOK_DATA); + } + + void* hookData() const { + JS_ASSERT(hasHookData()); + return hookData_; + } + + void* maybeHookData() const { + return hasHookData() ? hookData_ : NULL; + } + + void setHookData(void *v) { + hookData_ = v; + flags_ |= HAS_HOOK_DATA; + } + + /* Return value */ + + const js::Value &returnValue() { + if (!(flags_ & HAS_RVAL)) + rval_.setUndefined(); + return rval_; + } + + void markReturnValue() { + flags_ |= HAS_RVAL; + } + + void setReturnValue(const js::Value &v) { + rval_ = v; + markReturnValue(); + } + + void clearReturnValue() { + rval_.setUndefined(); + markReturnValue(); + } + + /* Native-code return address */ + + void *nativeReturnAddress() const { + return ncode_; + } + + void setNativeReturnAddress(void *addr) { + ncode_ = addr; + } + + void **addressOfNativeReturnAddress() { + return &ncode_; + } + + /* + * Generator-specific members + * + * A non-eval function frame may optionally be the activation of a + * generator. For the most part, generator frames act like ordinary frames. + * For exceptions, see js_FloatingFrameIfGenerator. + */ + + bool isGeneratorFrame() const { + return !!(flags_ & GENERATOR); + } + + bool isFloatingGenerator() const { + JS_ASSERT_IF(flags_ & FLOATING_GENERATOR, isGeneratorFrame()); + return !!(flags_ & FLOATING_GENERATOR); + } + + void initFloatingGenerator() { + JS_ASSERT(!(flags_ & GENERATOR)); + flags_ |= (GENERATOR | FLOATING_GENERATOR); + } + + void unsetFloatingGenerator() { + flags_ &= ~FLOATING_GENERATOR; + } + + void setFloatingGenerator() { + flags_ |= FLOATING_GENERATOR; + } + + /* + * js::Execute pushes both global and function frames (since eval() in a + * function pushes a frame with isFunctionFrame() && isEvalFrame()). Most + * code should not care where a frame was pushed, but if it is necessary to + * pick out frames pushed by js::Execute, this is the right query: + */ + + bool isFramePushedByExecute() const { + return !!(flags_ & (GLOBAL | EVAL)); + } + + /* + * Other flags + */ + + bool isConstructing() const { + return !!(flags_ & CONSTRUCTING); + } + + uint32 isConstructingFlag() const { + JS_ASSERT(isFunctionFrame()); + JS_ASSERT((flags_ & ~(CONSTRUCTING | FUNCTION)) == 0); + return flags_; + } + + bool isDebuggerFrame() const { + return !!(flags_ & DEBUGGER); + } + + bool isEvalOrDebuggerFrame() const { + return !!(flags_ & (EVAL | DEBUGGER)); + } + + bool hasOverriddenArgs() const { + return !!(flags_ & OVERRIDE_ARGS); + } + + bool hasOverflowArgs() const { + return !!(flags_ & OVERFLOW_ARGS); + } + + void setOverriddenArgs() { + flags_ |= OVERRIDE_ARGS; + } + + bool isYielding() { + return !!(flags_ & YIELDING); + } + + void setYielding() { + flags_ |= YIELDING; + } + + void clearYielding() { + flags_ &= ~YIELDING; + } + + void setFinishedInInterpreter() { + flags_ |= FINISHED_IN_INTERP; + } + + bool finishedInInterpreter() const { + return !!(flags_ & FINISHED_IN_INTERP); + } + +#ifdef DEBUG + /* Poison scopeChain value set before a frame is flushed. */ + static JSObject *const sInvalidScopeChain; +#endif + + public: + /* Public, but only for JIT use: */ + + static size_t offsetOfFlags() { + return offsetof(StackFrame, flags_); + } + + static size_t offsetOfExec() { + return offsetof(StackFrame, exec); + } + + void *addressOfArgs() { + return &args; + } + + static size_t offsetOfScopeChain() { + return offsetof(StackFrame, scopeChain_); + } + + JSObject **addressOfScopeChain() { + JS_ASSERT(flags_ & HAS_SCOPECHAIN); + return &scopeChain_; + } + + static size_t offsetOfPrev() { + return offsetof(StackFrame, prev_); + } + + static size_t offsetOfReturnValue() { + return offsetof(StackFrame, rval_); + } + + static ptrdiff_t offsetOfNcode() { + return offsetof(StackFrame, ncode_); + } + + static ptrdiff_t offsetOfCallee(JSFunction *fun) { + JS_ASSERT(fun != NULL); + return -(fun->nargs + 2) * sizeof(js::Value); + } + + static ptrdiff_t offsetOfThis(JSFunction *fun) { + return fun == NULL + ? -1 * ptrdiff_t(sizeof(js::Value)) + : -(fun->nargs + 1) * ptrdiff_t(sizeof(js::Value)); + } + + static ptrdiff_t offsetOfFormalArg(JSFunction *fun, uintN i) { + JS_ASSERT(i < fun->nargs); + return (-(int)fun->nargs + i) * sizeof(js::Value); + } + + static size_t offsetOfFixed(uintN i) { + return sizeof(StackFrame) + i * sizeof(js::Value); + } + +#ifdef JS_METHODJIT + js::mjit::JITScript *jit() { + return script()->getJIT(isConstructing()); + } +#endif + + void methodjitStaticAsserts(); +}; + +static const size_t VALUES_PER_STACK_FRAME = sizeof(StackFrame) / sizeof(Value); + +inline StackFrame * Valueify(JSStackFrame *fp) { return (StackFrame *)fp; } +static inline JSStackFrame * Jsvalify(StackFrame *fp) { return (JSStackFrame *)fp; } + +/*****************************************************************************/ + +class FrameRegs +{ + public: + Value *sp; + jsbytecode *pc; + private: + StackFrame *fp_; + public: + StackFrame *fp() const { return fp_; } + + /* For jit use (need constant): */ + static const size_t offsetOfFp = 2 * sizeof(void *); + static void staticAssert() { + JS_STATIC_ASSERT(offsetOfFp == offsetof(FrameRegs, fp_)); + } + + /* For generator: */ + void rebaseFromTo(StackFrame *from, StackFrame *to) { + fp_ = to; + sp = to->slots() + (sp - from->slots()); + } + + /* For ContextStack: */ + void popFrame(Value *newsp) { + pc = fp_->prevpc(); + sp = newsp; + fp_ = fp_->prev(); + } + + /* For FixupArity: */ + void popPartialFrame(Value *newsp) { + sp = newsp; + fp_ = fp_->prev(); + } + + /* For stubs::CompileFunction, ContextStack: */ + void prepareToRun(StackFrame *fp, JSScript *script) { + pc = script->code; + sp = fp->slots() + script->nfixed; + fp_ = fp; + } + + /* For pushDummyFrame: */ + void initDummyFrame(StackFrame *fp) { + pc = NULL; + sp = fp->slots(); + fp_ = fp; + } +}; + +/*****************************************************************************/ + +struct StackOverride +{ + Value *top; +#ifdef DEBUG + StackSegment *seg; + StackFrame *frame; +#endif +}; + +/*****************************************************************************/ + +class StackSpace +{ + Value *base_; +#ifdef XP_WIN + mutable Value *commitEnd_; +#endif + Value *end_; + StackSegment *seg_; + StackOverride override_; + + static const size_t CAPACITY_VALS = 512 * 1024; + static const size_t CAPACITY_BYTES = CAPACITY_VALS * sizeof(Value); + static const size_t COMMIT_VALS = 16 * 1024; + static const size_t COMMIT_BYTES = COMMIT_VALS * sizeof(Value); + + static void staticAsserts() { + JS_STATIC_ASSERT(CAPACITY_VALS % COMMIT_VALS == 0); + } + +#ifdef XP_WIN + JS_FRIEND_API(bool) bumpCommit(JSContext *maybecx, Value *from, ptrdiff_t nvals) const; +#endif + + friend class ContextStack; + friend struct detail::OOMCheck; + inline bool ensureSpace(JSContext *maybecx, Value *from, ptrdiff_t nvals) const; + void pushSegment(StackSegment &seg); + void popSegment(); + inline void pushOverride(Value *top, StackOverride *prev); + inline void popOverride(const StackOverride &prev); + + public: + StackSpace(); + bool init(); + ~StackSpace(); + + /* See stack layout comment above. */ + StackSegment *currentSegment() const { return seg_; } + Value *firstUnused() const; + + /* Optimization of firstUnused when currentSegment() is known active. */ + inline Value *activeFirstUnused() const; + + /* Get the segment containing the target frame. */ + StackSegment &containingSegment(const StackFrame *target) const; + + /* + * Retrieve the 'variables object' (ES3 term) associated with the given + * frame's Execution Context's VariableEnvironment (ES5 10.3). + */ + JSObject &varObjForFrame(const StackFrame *fp); + + /* + * LeaveTree requires stack allocation to rebuild the stack. There is no + * good way to handle an OOM for these allocations, so this function checks + * that OOM cannot occur using the size of the TraceNativeStorage as a + * conservative upper bound. + */ + inline bool ensureEnoughSpaceToEnterTrace(); + + /* + * If we let infinite recursion go until it hit the end of the contiguous + * stack, it would take a long time. As a heuristic, we kill scripts which + * go deeper than MAX_INLINE_CALLS. Note: this heuristic only applies to a + * single activation of the VM. If a script reenters, the call count gets + * reset. This is ok because we will quickly hit the C recursion limit. + */ + static const size_t MAX_INLINE_CALLS = 3000; + + /* + * SunSpider and v8bench have roughly an average of 9 slots per script. Our + * heuristic for a quick over-recursion check uses a generous slot count + * based on this estimate. We take this frame size and multiply it by the + * old recursion limit from the interpreter. Worst case, if an average size + * script (<=9 slots) over recurses, it'll effectively be the same as having + * increased the old inline call count to <= 5,000. + */ + static const size_t STACK_QUOTA = MAX_INLINE_CALLS * (VALUES_PER_STACK_FRAME + 18); + + /* + * In the mjit, we'd like to collapse two "overflow" checks into one: + * - the MAX_INLINE_CALLS check (see above comment) + * - the stack OOM check (or, on Windows, the commit/OOM check) This + * function produces a 'limit' pointer that satisfies both these checks. + * (The STACK_QUOTA comment explains how this limit simulates checking + * MAX_INLINE_CALLS.) This limit is guaranteed to have at least enough space + * for cx->fp()->nslots() plus an extra stack frame (which is the min + * requirement for entering mjit code) or else an error is reported and NULL + * is returned. When the stack grows past the returned limit, the script may + * still be within quota, but more memory needs to be committed. This is + * handled by bumpLimitWithinQuota. + */ + inline Value *getStackLimit(JSContext *cx); + + /* + * Try to bump the limit, staying within |base + STACK_QUOTA|, by + * committing more pages of the contiguous stack. + * base: the frame on which execution started + * from: the current top of the stack + * nvals: requested space above 'from' + * *limit: receives bumped new limit + */ + bool bumpLimitWithinQuota(JSContext *maybecx, StackFrame *base, Value *from, uintN nvals, Value **limit) const; + + /* + * Raise the given limit without considering quota. + * See comment in BumpStackFull. + */ + bool bumpLimit(JSContext *cx, StackFrame *base, Value *from, uintN nvals, Value **limit) const; + + /* Called during GC: mark segments, frames, and slots under firstUnused. */ + void mark(JSTracer *trc); +}; + +/*****************************************************************************/ + +class ContextStack +{ + FrameRegs *regs_; + StackSegment *seg_; + StackSpace *space_; + JSContext *cx_; + + /* + * This is the collecting-point for code that wants to know when there is + * no JS active. Note that "no JS active" does not mean the stack is empty + * because of JS_(Save|Restore)FrameChain. If code really wants to know + * when the stack is empty, test |cx->stack.empty()|. + */ + void notifyIfNoCodeRunning(); + + /* + * Return whether this ContextStack is running code at the top of the + * contiguous stack. This is a precondition for extending the current + * segment by pushing stack frames or overrides etc. + */ + inline bool isCurrentAndActive() const; + +#ifdef DEBUG + void assertSegmentsInSync() const; + void assertSpaceInSync() const; +#else + void assertSegmentsInSync() const {} + void assertSpaceInSync() const {} +#endif + + friend class FrameGuard; + bool getSegmentAndFrame(JSContext *cx, uintN vplen, uintN nslots, + FrameGuard *frameGuard) const; + void pushSegmentAndFrame(FrameRegs ®s, FrameGuard *frameGuard); + void pushSegmentAndFrameImpl(FrameRegs ®s, StackSegment &seg); + void popSegmentAndFrame(); + void popSegmentAndFrameImpl(); + + template + inline StackFrame *getCallFrame(JSContext *cx, Value *sp, uintN nactual, + JSFunction *fun, JSScript *script, uint32 *pflags, + Check check) const; + + friend class InvokeArgsGuard; + bool pushInvokeArgsSlow(JSContext *cx, uintN argc, InvokeArgsGuard *argsGuard); + void popInvokeArgsSlow(const InvokeArgsGuard &argsGuard); + inline void popInvokeArgs(const InvokeArgsGuard &argsGuard); + + friend class InvokeFrameGuard; + void pushInvokeFrameSlow(InvokeFrameGuard *frameGuard); + void popInvokeFrameSlow(const InvokeFrameGuard &frameGuard); + inline void popInvokeFrame(const InvokeFrameGuard &frameGuard); + + public: + ContextStack(JSContext *cx); + ~ContextStack(); + + /* + * A context is "empty" if it has no code, running or suspended, on its + * stack. Running code can be stopped (via JS_SaveFrameChain) which leads + * to the state |!cx->empty() && cx->running()|. + */ + bool empty() const { JS_ASSERT_IF(regs_, seg_); return !seg_; } + bool running() const { JS_ASSERT_IF(regs_, regs_->fp()); return !!regs_; } + + /* Current regs of the current segment (see VM stack layout comment). */ + FrameRegs ®s() const { JS_ASSERT(regs_); return *regs_; } + + /* Convenience helpers. */ + FrameRegs *maybeRegs() const { return regs_; } + StackFrame *fp() const { return regs_->fp(); } + StackFrame *maybefp() const { return regs_ ? regs_->fp() : NULL; } + + /* The StackSpace currently hosting this ContextStack. */ + StackSpace &space() const { assertSpaceInSync(); return *space_; } + + /* + * To avoid indirection, ContextSpace caches a pointers to the StackSpace. + * This must be kept coherent with cx->thread->data.space by calling + * 'threadReset' whenver cx->thread changes. + */ + void threadReset(); + + /* + * As an optimization, the interpreter/mjit can operate on a local + * FrameRegs instance repoint the ContextStack to this local instance. + */ + void repointRegs(FrameRegs *regs) { + JS_ASSERT_IF(regs, regs->fp()); + regs_ = regs; + } + + /* Return the current segment, which may or may not be active. */ + js::StackSegment *currentSegment() const { + assertSegmentsInSync(); + return seg_; + } + + /* This is an optimization of StackSpace::varObjForFrame. */ + inline JSObject ¤tVarObj() const; + + /* Search the call stack for the nearest frame with static level targetLevel. */ + inline StackFrame *findFrameAtLevel(uintN targetLevel) const; + +#ifdef DEBUG + /* Return whether the given frame is in this context's stack. */ + bool contains(const StackFrame *fp) const; +#endif + + /* Mark the top segment as suspended, without pushing a new one. */ + void saveActiveSegment(); + + /* Undoes calls to suspendActiveSegment. */ + void restoreSegment(); + + /* + * For the five sets of stack operations below: + * - The boolean-valued functions call js_ReportOutOfScriptQuota on OOM. + * - The "get*Frame" functions do not change any global state, they just + * check OOM and return pointers to an uninitialized frame with the + * requested missing arguments/slots. Only once the "push*Frame" + * function has been called is global state updated. Thus, between + * "get*Frame" and "push*Frame", the frame and slots are unrooted. + * - Functions taking "*Guard" arguments will use the guard's destructor + * to pop the stack. The caller must ensure the guard has the + * appropriate lifetime. + */ + + /* + * pushInvokeArgs allocates |argc + 2| rooted values that will be passed as + * the arguments to Invoke. A single allocation can be used for multiple + * Invoke calls. The InvokeArgumentsGuard passed to Invoke must come from + * an immediately-enclosing (stack-wise) call to pushInvokeArgs. + */ + bool pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard *ag); + + /* These functions are called inside Invoke, not Invoke clients. */ + inline StackFrame * + getInvokeFrame(JSContext *cx, const CallArgs &args, + JSFunction *fun, JSScript *script, uint32 *flags, + InvokeFrameGuard *frameGuard) const; + void pushInvokeFrame(const CallArgs &args, + InvokeFrameGuard *frameGuard); + + /* These functions are called inside Execute, not Execute clients. */ + bool getExecuteFrame(JSContext *cx, JSScript *script, + ExecuteFrameGuard *frameGuard) const; + void pushExecuteFrame(JSObject *initialVarObj, + ExecuteFrameGuard *frameGuard); + + /* These functions are called inside SendToGenerator. */ + bool getGeneratorFrame(JSContext *cx, uintN vplen, uintN nslots, + GeneratorFrameGuard *frameGuard); + void pushGeneratorFrame(FrameRegs ®s, + GeneratorFrameGuard *frameGuard); + + /* Pushes a StackFrame::isDummyFrame. */ + bool pushDummyFrame(JSContext *cx, JSObject &scopeChain, + DummyFrameGuard *frameGuard); + + /* + * An "inline frame" may only be pushed from within the top, active + * segment. This is the case for calls made inside mjit code and Interpret. + * The *WithinLimit variant stays within the stack quota using the given + * limit (see StackSpace::getStackLimit). + */ + inline StackFrame * + getInlineFrame(JSContext *cx, Value *sp, uintN nactual, + JSFunction *fun, JSScript *script, uint32 *flags) const; + inline StackFrame * + getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual, + JSFunction *fun, JSScript *script, uint32 *flags, + StackFrame *base, Value **limit) const; + inline void pushInlineFrame(JSScript *script, StackFrame *fp, FrameRegs ®s); + inline void popInlineFrame(); + + /* For jit use: */ + static size_t offsetOfRegs() { return offsetof(ContextStack, regs_); } +}; + +/*****************************************************************************/ + +class InvokeArgsGuard : public CallArgs +{ + friend class ContextStack; + ContextStack *stack_; /* null implies nothing pushed */ + StackSegment *seg_; /* null implies no segment pushed */ + StackOverride prevOverride_; + public: + InvokeArgsGuard() : stack_(NULL), seg_(NULL) {} + ~InvokeArgsGuard(); + bool pushed() const { return stack_ != NULL; } +}; + +/* + * This type can be used to call Invoke when the arguments have already been + * pushed onto the stack as part of normal execution. + */ +struct InvokeArgsAlreadyOnTheStack : CallArgs +{ + InvokeArgsAlreadyOnTheStack(uintN argc, Value *vp) : CallArgs(argc, vp + 2) {} +}; + +class InvokeFrameGuard + +{ + friend class ContextStack; + ContextStack *stack_; /* null implies nothing pushed */ + FrameRegs regs_; + FrameRegs *prevRegs_; + public: + InvokeFrameGuard() : stack_(NULL) {} + ~InvokeFrameGuard(); + bool pushed() const { return stack_ != NULL; } + void pop(); + StackFrame *fp() const { return regs_.fp(); } +}; + +/* Reusable base; not for direct use. */ +class FrameGuard +{ + friend class ContextStack; + ContextStack *stack_; /* null implies nothing pushed */ + StackSegment *seg_; + Value *vp_; + StackFrame *fp_; + public: + FrameGuard() : stack_(NULL), vp_(NULL), fp_(NULL) {} + ~FrameGuard(); + bool pushed() const { return stack_ != NULL; } + StackSegment *segment() const { return seg_; } + Value *vp() const { return vp_; } + StackFrame *fp() const { return fp_; } +}; + +class ExecuteFrameGuard : public FrameGuard +{ + friend class ContextStack; + FrameRegs regs_; +}; + +class DummyFrameGuard : public FrameGuard +{ + friend class ContextStack; + FrameRegs regs_; +}; + +class GeneratorFrameGuard : public FrameGuard +{}; + +/*****************************************************************************/ + +/* + * While |cx->fp|'s pc/sp are available in |cx->regs|, to compute the saved + * value of pc/sp for any other frame, it is necessary to know about that + * frame's next-frame. This iterator maintains this information when walking + * a chain of stack frames starting at |cx->fp|. + * + * Usage: + * for (FrameRegsIter i(cx); !i.done(); ++i) + * ... i.fp() ... i.sp() ... i.pc() + */ +class FrameRegsIter +{ + JSContext *cx_; + StackSegment *seg_; + StackFrame *fp_; + Value *sp_; + jsbytecode *pc_; + + void initSlow(); + void incSlow(StackFrame *oldfp); + + public: + inline FrameRegsIter(JSContext *cx); + + bool done() const { return fp_ == NULL; } + inline FrameRegsIter &operator++(); + + StackFrame *fp() const { return fp_; } + Value *sp() const { return sp_; } + jsbytecode *pc() const { return pc_; } +}; + +/* + * Utility class for iteration over all active stack frames. + */ +class AllFramesIter +{ +public: + AllFramesIter(JSContext *cx); + + bool done() const { return fp_ == NULL; } + AllFramesIter& operator++(); + + StackFrame *fp() const { return fp_; } + +private: + StackSegment *seg_; + StackFrame *fp_; +}; + +} /* namespace js */ + +#endif /* Stack_h__ */ diff --git a/js/src/vm/StringObject-inl.h b/js/src/vm/StringObject-inl.h new file mode 100644 index 000000000000..db0f2b1b1434 --- /dev/null +++ b/js/src/vm/StringObject-inl.h @@ -0,0 +1,82 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * vim: set ts=8 sw=4 et tw=78: + * + * ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is SpiderMonkey string object code. + * + * The Initial Developer of the Original Code is + * the Mozilla Foundation. + * Portions created by the Initial Developer are Copyright (C) 2011 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * Jeff Walden (original author) + * + * Alternatively, the contents of this file may be used under the terms of + * either of the GNU General Public License Version 2 or later (the "GPL"), + * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +#ifndef StringObject_inl_h___ +#define StringObject_inl_h___ + +#include "StringObject.h" + +inline js::StringObject * +JSObject::asString() +{ + JS_ASSERT(isString()); + return static_cast(const_cast(this)); +} + +namespace js { + +inline StringObject * +StringObject::create(JSContext *cx, JSString *str) +{ + JSObject *obj = NewBuiltinClassInstance(cx, &js_StringClass); + if (!obj) + return NULL; + StringObject *strobj = obj->asString(); + if (!strobj->init(cx, str)) + return NULL; + return strobj; +} + +inline StringObject * +StringObject::createWithProto(JSContext *cx, JSString *str, JSObject &proto) +{ + JS_ASSERT(gc::FINALIZE_OBJECT2 == gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(&js_StringClass))); + JSObject *obj = NewObjectWithClassProto(cx, &js_StringClass, &proto, gc::FINALIZE_OBJECT2); + if (!obj) + return NULL; + StringObject *strobj = obj->asString(); + if (!strobj->init(cx, str)) + return NULL; + return strobj; +} + +} // namespace js + +#endif /* StringObject_inl_h__ */ diff --git a/js/src/vm/StringObject.h b/js/src/vm/StringObject.h new file mode 100644 index 000000000000..d7a7ae6821b6 --- /dev/null +++ b/js/src/vm/StringObject.h @@ -0,0 +1,104 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * vim: set ts=8 sw=4 et tw=78: + * + * ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is SpiderMonkey string object code. + * + * The Initial Developer of the Original Code is + * the Mozilla Foundation. + * Portions created by the Initial Developer are Copyright (C) 2011 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * Jeff Walden (original author) + * + * Alternatively, the contents of this file may be used under the terms of + * either of the GNU General Public License Version 2 or later (the "GPL"), + * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +#ifndef StringObject_h___ +#define StringObject_h___ + +#include "jsobj.h" +#include "jsstr.h" + +namespace js { + +class StringObject : public ::JSObject +{ + static const uintN PRIMITIVE_THIS_SLOT = 0; + static const uintN LENGTH_SLOT = 1; + + public: + static const uintN RESERVED_SLOTS = 2; + + /* + * Creates a new String object boxing the given string. The object's + * [[Prototype]] is determined from context. + */ + static inline StringObject *create(JSContext *cx, JSString *str); + + /* + * Identical to create(), but uses |proto| as [[Prototype]]. This method + * must not be used to create |String.prototype|. + */ + static inline StringObject *createWithProto(JSContext *cx, JSString *str, JSObject &proto); + + JSString *unbox() const { + return getSlot(PRIMITIVE_THIS_SLOT).toString(); + } + + inline size_t length() const { + return size_t(getSlot(LENGTH_SLOT).toInt32()); + } + + private: + inline bool init(JSContext *cx, JSString *str); + + void setStringThis(JSString *str) { + JS_ASSERT(getSlot(PRIMITIVE_THIS_SLOT).isUndefined()); + setSlot(PRIMITIVE_THIS_SLOT, StringValue(str)); + setSlot(LENGTH_SLOT, Int32Value(int32(str->length()))); + } + + /* For access to init, as String.prototype is special. */ + friend JSObject * + ::js_InitStringClass(JSContext *cx, JSObject *global); + + /* + * Compute the initial shape to associate with fresh String objects, which + * encodes the initial length property. Return the shape after changing + * this String object's last property to it. + */ + const js::Shape *assignInitialShape(JSContext *cx); + + private: + StringObject(); + StringObject &operator=(const StringObject &so); +}; + +} // namespace js + +#endif /* StringObject_h__ */ diff --git a/js/src/xpconnect/src/nsXPConnect.cpp b/js/src/xpconnect/src/nsXPConnect.cpp index 6d9e5d587746..0ecf71ec2111 100644 --- a/js/src/xpconnect/src/nsXPConnect.cpp +++ b/js/src/xpconnect/src/nsXPConnect.cpp @@ -398,13 +398,14 @@ nsXPConnect::Collect() // cycle collection. So to compensate for JS_BeginRequest in // XPCCallContext::Init we disable the conservative scanner if that call // has started the request on this thread. - JS_ASSERT(cx->thread->data.requestDepth >= 1); - JS_ASSERT(!cx->thread->data.conservativeGC.requestThreshold); - if(cx->thread->data.requestDepth == 1) - cx->thread->data.conservativeGC.requestThreshold = 1; + js::ThreadData &threadData = cx->thread()->data; + JS_ASSERT(threadData.requestDepth >= 1); + JS_ASSERT(!threadData.conservativeGC.requestThreshold); + if(threadData.requestDepth == 1) + threadData.conservativeGC.requestThreshold = 1; JS_GC(cx); - if(cx->thread->data.requestDepth == 1) - cx->thread->data.conservativeGC.requestThreshold = 0; + if(threadData.requestDepth == 1) + threadData.conservativeGC.requestThreshold = 0; } NS_IMETHODIMP @@ -2612,7 +2613,7 @@ nsXPConnect::Push(JSContext * cx) bool runningJS = false; for (PRUint32 i = 0; i < stack->Length(); ++i) { JSContext *cx = (*stack)[i].cx; - if (cx && cx->getCurrentSegment()) { + if (cx && !cx->stack.empty()) { runningJS = true; break; } diff --git a/js/src/xpconnect/src/xpcprivate.h b/js/src/xpconnect/src/xpcprivate.h index eba2a4d3de9a..3afd702ebce5 100644 --- a/js/src/xpconnect/src/xpcprivate.h +++ b/js/src/xpconnect/src/xpcprivate.h @@ -3626,9 +3626,9 @@ public: { if(cx) { - NS_ASSERTION(cx->thread, "Uh, JS context w/o a thread?"); + NS_ASSERTION(cx->thread(), "Uh, JS context w/o a thread?"); - if(cx->thread == sMainJSThread) + if(cx->thread() == sMainJSThread) return sMainThreadData; } else if(sMainThreadData && sMainThreadData->mThread == PR_GetCurrentThread()) @@ -3733,7 +3733,7 @@ public: {sMainJSThread = nsnull; sMainThreadData = nsnull;} static PRBool IsMainThread(JSContext *cx) - { return cx->thread == sMainJSThread; } + { return cx->thread() == sMainJSThread; } private: XPCPerThreadData(); diff --git a/js/src/xpconnect/src/xpcstack.cpp b/js/src/xpconnect/src/xpcstack.cpp index c0376c20634f..c737da277529 100644 --- a/js/src/xpconnect/src/xpcstack.cpp +++ b/js/src/xpconnect/src/xpcstack.cpp @@ -141,9 +141,10 @@ XPCJSStackFrame::CreateStack(JSContext* cx, JSStackFrame* fp, { NS_ADDREF(self); - if(fp->prev()) + JSStackFrame *tmp = fp; + if(JSStackFrame *prev = JS_FrameIterator(cx, &tmp)) { - if(NS_FAILED(CreateStack(cx, fp->prev(), + if(NS_FAILED(CreateStack(cx, prev, (XPCJSStackFrame**) &self->mCaller))) failed = JS_TRUE; } diff --git a/js/src/xpconnect/src/xpcthreadcontext.cpp b/js/src/xpconnect/src/xpcthreadcontext.cpp index 398ac4855cdd..9bc580edda84 100644 --- a/js/src/xpconnect/src/xpcthreadcontext.cpp +++ b/js/src/xpconnect/src/xpcthreadcontext.cpp @@ -498,7 +498,7 @@ XPCPerThreadData::GetDataImpl(JSContext *cx) if(cx && !sMainJSThread && NS_IsMainThread()) { - sMainJSThread = cx->thread; + sMainJSThread = cx->thread(); sMainThreadData = data; diff --git a/js/src/xpconnect/src/xpcwrappedjsclass.cpp b/js/src/xpconnect/src/xpcwrappedjsclass.cpp index 25d4fa366a94..24218d14a7f9 100644 --- a/js/src/xpconnect/src/xpcwrappedjsclass.cpp +++ b/js/src/xpconnect/src/xpcwrappedjsclass.cpp @@ -575,7 +575,7 @@ GetContextFromObject(JSObject *obj) if(xpcc) { JSContext *cx = xpcc->GetJSContext(); - if(cx->thread->id == js_CurrentThreadId()) + if(cx->thread()->id == js_CurrentThreadId()) return cx; } diff --git a/js/src/xpconnect/src/xpcwrappednative.cpp b/js/src/xpconnect/src/xpcwrappednative.cpp index 7f03eeb7cfb1..40a1c18f62fe 100644 --- a/js/src/xpconnect/src/xpcwrappednative.cpp +++ b/js/src/xpconnect/src/xpcwrappednative.cpp @@ -2222,7 +2222,7 @@ class CallMethodHelper // to be declared as that would make the ctor and dtors run for each // CallMethodHelper instantiation, and they're only needed in a // fraction of all the calls that come through here. - js::LazilyConstructed mAutoString; + js::Maybe mAutoString; jsval* const mArgv; const PRUint32 mArgc; diff --git a/js/src/xpconnect/wrappers/AccessCheck.cpp b/js/src/xpconnect/wrappers/AccessCheck.cpp index f058c587e6af..22e896d7dbff 100644 --- a/js/src/xpconnect/wrappers/AccessCheck.cpp +++ b/js/src/xpconnect/wrappers/AccessCheck.cpp @@ -49,6 +49,7 @@ #include "FilteringWrapper.h" #include "WrapperFactory.h" +#include "jsfriendapi.h" #include "jsstr.h" namespace xpc { @@ -240,13 +241,13 @@ AccessCheck::documentDomainMakesSameOrigin(JSContext *cx, JSObject *obj) JSStackFrame *fp = nsnull; JS_FrameIterator(cx, &fp); if (fp) { - while (fp->isDummyFrame()) { + while (!JS_IsScriptFrame(cx, fp)) { if (!JS_FrameIterator(cx, &fp)) break; } if (fp) - scope = &fp->scopeChain(); + scope = JS_GetFrameScopeChainRaw(fp); } if (!scope) diff --git a/layout/generic/nsLineLayout.cpp b/layout/generic/nsLineLayout.cpp index bfbcc2b90205..7fbb88587c7e 100644 --- a/layout/generic/nsLineLayout.cpp +++ b/layout/generic/nsLineLayout.cpp @@ -46,6 +46,7 @@ #define PL_ARENA_CONST_ALIGN_MASK (sizeof(void*)-1) #include "plarena.h" +#include "mozilla/Util.h" #include "nsCOMPtr.h" #include "nsLineLayout.h" #include "nsBlockFrame.h" @@ -66,7 +67,6 @@ #include "nsLayoutUtils.h" #include "nsTextFrame.h" #include "nsCSSRendering.h" -#include "jstl.h" #ifdef DEBUG #undef NOISY_HORIZONTAL_ALIGN @@ -84,6 +84,8 @@ #undef REALLY_NOISY_TRIM #endif +using namespace mozilla; + //---------------------------------------------------------------------- #define FIX_BUG_50257 @@ -786,7 +788,7 @@ nsLineLayout::ReflowFrame(nsIFrame* aFrame, nscoord availableSpaceOnLine = psd->mRightEdge - psd->mX; // Setup reflow state for reflowing the frame - js::LazilyConstructed reflowStateHolder; + Maybe reflowStateHolder; if (!isText) { reflowStateHolder.construct(mPresContext, *psd->mReflowState, aFrame, availSize); diff --git a/mfbt/Types.h b/mfbt/Types.h new file mode 100644 index 000000000000..36156a5451df --- /dev/null +++ b/mfbt/Types.h @@ -0,0 +1,91 @@ +/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * vim: set ts=8 sw=4 et tw=99 ft=cpp: + * + * ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at: + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is Mozilla Code. + * + * The Initial Developer of the Original Code is + * The Mozilla Foundation + * Portions created by the Initial Developer are Copyrigght (C) 2011 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * + * Alternatively, the contents of this file may be used under the terms of + * either the GNU General Public License Version 2 or later (the "GPL"), or + * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +/* + * NB: This header must be both valid C and C++. It must be + * include-able by code embedding SpiderMonkey *and* Gecko. + */ + +#ifndef mozilla_Types_h_ +#define mozilla_Types_h_ + +/* + * mfbt is logically "lower level" than js/src, but needs basic + * definitions of numerical types and macros for compiler/linker + * directives. js/src already goes through some pain to provide them + * on numerous platforms, so instead of moving all that goop here, + * this header makes use of the fact that for the foreseeable future + * mfbt code will be part and parcel with libmozjs, static or not. + * + * For now, the policy is to use jstypes definitions but add a layer + * of indirection on top of them in case a Great Refactoring ever + * happens. + */ +#include "jstypes.h" + +/* + * The numerical types provided by jstypes.h that are allowed within + * mfbt code are + * + * stddef types: size_t, ptrdiff_t, etc. + * stdin [sic] types: int8, uint32, etc. + * + * stdint types (int8_t etc.), are available for use here, but doing + * so would change SpiderMonkey's and Gecko's contracts with + * embedders: stdint types have not yet appeared in public APIs. + */ + +#define MOZ_EXPORT_API(type_) JS_EXPORT_API(type_) +#define MOZ_IMPORT_API(type_) JS_IMPORT_API(type_) + +/* + * mfbt definitions need to see export declarations when built, but + * other code needs to see import declarations when using mfbt. + */ +#if defined(IMPL_MFBT) +# define MFBT_API(type_) MOZ_EXPORT_API(type_) +#else +# define MFBT_API(type_) MOZ_IMPORT_API(type_) +#endif + + +#define MOZ_BEGIN_EXTERN_C JS_BEGIN_EXTERN_C +#define MOZ_END_EXTERN_C JS_END_EXTERN_C + +#endif // mozilla_Types_h_ diff --git a/mfbt/Util.h b/mfbt/Util.h index d82a203fd08d..7317bb774db6 100644 --- a/mfbt/Util.h +++ b/mfbt/Util.h @@ -40,6 +40,42 @@ #ifndef mozilla_Util_h_ #define mozilla_Util_h_ +#include "mozilla/Types.h" + +/* + * XXX: we're cheating here in order to avoid creating object files + * for mfbt /just/ to provide a function like FatalError() to be used + * by MOZ_ASSERT(). (It'll happen eventually, but for just ASSERT() + * it isn't worth the pain.) JS_Assert(), although unfortunately + * named, is part of SpiderMonkey's stable, external API, so this + * isn't quite as bad as it seems. + * + * Once mfbt needs object files, this unholy union with JS_Assert() + * will be broken. + */ +MOZ_BEGIN_EXTERN_C + +extern MFBT_API(void) +JS_Assert(const char *s, const char *file, JSIntn ln); + +MOZ_END_EXTERN_C + +/* + * MOZ_ASSERT() is a "strong" assertion of state, like libc's + * assert(). If a MOZ_ASSERT() fails in a debug build, the process in + * which it fails will stop running in a loud and dramatic way. + */ +#ifdef DEBUG + +# define MOZ_ASSERT(expr_) \ + ((expr_) ? (void)0 : JS_Assert(#expr_, __FILE__, __LINE__)) + +#else + +# define MOZ_ASSERT(expr_) ((void)0) + +#endif // DEBUG + #ifdef __cplusplus namespace mozilla { @@ -91,6 +127,124 @@ struct DebugOnly ~DebugOnly() {} }; + +/* + * This utility pales in comparison to Boost's aligned_storage. The utility + * simply assumes that JSUint64 is enough alignment for anyone. This may need + * to be extended one day... + * + * As an important side effect, pulling the storage into this template is + * enough obfuscation to confuse gcc's strict-aliasing analysis into not giving + * false negatives when we cast from the char buffer to whatever type we've + * constructed using the bytes. + */ +template +struct AlignedStorage +{ + union U { + char bytes[nbytes]; + uint64 _; + } u; + + const void *addr() const { return u.bytes; } + void *addr() { return u.bytes; } +}; + +template +struct AlignedStorage2 +{ + union U { + char bytes[sizeof(T)]; + uint64 _; + } u; + + const T *addr() const { return (const T *)u.bytes; } + T *addr() { return (T *)u.bytes; } +}; + +/* + * Small utility for lazily constructing objects without using dynamic storage. + * When a Maybe is constructed, it is |empty()|, i.e., no value of T has + * been constructed and no T destructor will be called when the Maybe is + * destroyed. Upon calling |construct|, a T object will be constructed with the + * given arguments and that object will be destroyed when the owning Maybe + * is destroyed. + * + * N.B. GCC seems to miss some optimizations with Maybe and may generate extra + * branches/loads/stores. Use with caution on hot paths. + */ +template +class Maybe +{ + AlignedStorage2 storage; + bool constructed; + + T &asT() { return *storage.addr(); } + + explicit Maybe(const Maybe &other); + const Maybe &operator=(const Maybe &other); + + public: + Maybe() { constructed = false; } + ~Maybe() { if (constructed) asT().~T(); } + + bool empty() const { return !constructed; } + + void construct() { + MOZ_ASSERT(!constructed); + new(storage.addr()) T(); + constructed = true; + } + + template + void construct(const T1 &t1) { + MOZ_ASSERT(!constructed); + new(storage.addr()) T(t1); + constructed = true; + } + + template + void construct(const T1 &t1, const T2 &t2) { + MOZ_ASSERT(!constructed); + new(storage.addr()) T(t1, t2); + constructed = true; + } + + template + void construct(const T1 &t1, const T2 &t2, const T3 &t3) { + MOZ_ASSERT(!constructed); + new(storage.addr()) T(t1, t2, t3); + constructed = true; + } + + template + void construct(const T1 &t1, const T2 &t2, const T3 &t3, const T4 &t4) { + MOZ_ASSERT(!constructed); + new(storage.addr()) T(t1, t2, t3, t4); + constructed = true; + } + + T *addr() { + MOZ_ASSERT(constructed); + return &asT(); + } + + T &ref() { + MOZ_ASSERT(constructed); + return asT(); + } + + void destroy() { + ref().~T(); + constructed = false; + } + + void destroyIfConstructed() { + if (!empty()) + destroy(); + } +}; + } /* namespace mozilla */ #endif /* __cplusplus */