[mq]: bug-462027-v9

This commit is contained in:
Jason Orendorff 2009-02-02 20:42:46 -06:00
parent 3a187c3da2
commit c9e5602d7f
11 changed files with 474 additions and 210 deletions

View File

@ -1544,8 +1544,10 @@ static JSString* FASTCALL
Array_p_join(JSContext* cx, JSObject* obj, JSString *str)
{
jsval v;
if (!array_join_sub(cx, obj, TO_STRING, str, &v))
if (!array_join_sub(cx, obj, TO_STRING, str, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return NULL;
}
JS_ASSERT(JSVAL_IS_STRING(v));
return JSVAL_TO_STRING(v);
}
@ -2146,7 +2148,8 @@ Array_p_push1(JSContext* cx, JSObject* obj, jsval v)
: array_push_slowly(cx, obj, 1, &v, &v)) {
return v;
}
return JSVAL_ERROR_COOKIE;
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_VOID;
}
#endif
@ -2213,12 +2216,13 @@ static jsval FASTCALL
Array_p_pop(JSContext* cx, JSObject* obj)
{
jsval v;
if (OBJ_IS_DENSE_ARRAY(cx, obj)
if (OBJ_IS_DENSE_ARRAY(cx, obj)
? array_pop_dense(cx, obj, &v)
: array_pop_slowly(cx, obj, &v)) {
return v;
}
return JSVAL_ERROR_COOKIE;
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_VOID;
}
#endif

View File

@ -49,8 +49,8 @@
#undef THIS
#endif
enum JSTNErrType { INFALLIBLE, FAIL_NULL, FAIL_NEG, FAIL_VOID, FAIL_JSVAL };
enum { JSTN_ERRTYPE_MASK = 7, JSTN_MORE = 8 };
enum JSTNErrType { INFALLIBLE, FAIL_STATUS, FAIL_NULL, FAIL_NEG, FAIL_VOID, FAIL_COOKIE };
enum { JSTN_ERRTYPE_MASK = 0x07, JSTN_UNBOX_AFTER = 0x08, JSTN_MORE = 0x10 };
#define JSTN_ERRTYPE(jstn) ((jstn)->flags & JSTN_ERRTYPE_MASK)
@ -85,7 +85,7 @@ struct JSTraceableNative {
const nanojit::CallInfo *builtin;
const char *prefix;
const char *argtypes;
uintN flags; /* JSTN_MORE | JSTNErrType */
uintN flags; /* JSTNErrType | JSTN_UNBOX_AFTER | JSTN_MORE */
};
/*
@ -120,13 +120,41 @@ struct JSTraceableNative {
* Types with -- for the two string fields are not permitted as argument types
* in JS_DEFINE_TRCINFO.
*
* If a traceable native can fail, the values that indicate failure are part of
* the return type:
* JSVAL_FAIL: JSVAL_ERROR_COOKIE
* BOOL_FAIL: JSVAL_TO_BOOLEAN(JSVAL_VOID)
* INT32_FAIL: any negative value
* STRING_FAIL: NULL
* OBJECT_FAIL_NULL: NULL
* There are three kinds of traceable-native error handling.
*
* - If a traceable native's return type ends with _FAIL, it always runs to
* completion. It can either succeed or fail with an error or exception;
* on success, it may or may not stay on trace. There may be side effects
* in any case. If the call succeeds but bails off trace, we resume in the
* interpreter at the next opcode.
*
* _FAIL builtins indicate failure or bailing off trace by setting bits in
* cx->builtinStatus.
*
* - If a traceable native's return type contains _RETRY, it can either
* succeed, fail with a JS exception, or tell the caller to bail off trace
* and retry the call from the interpreter. The last case happens if the
* builtin discovers that it can't do its job without examining the JS
* stack, reentering the interpreter, accessing properties of the global
* object, etc.
*
* The builtin must detect the need to retry before committing any side
* effects. If a builtin can't do this, it must use a _FAIL return type
* instead of _RETRY.
*
* _RETRY builtins indicate failure with a special return value that
* depends on the return type:
*
* BOOL_RETRY: JSVAL_TO_BOOLEAN(JSVAL_VOID)
* INT32_RETRY: any negative value
* STRING_RETRY: NULL
* OBJECT_RETRY_NULL: NULL
* JSVAL_RETRY: JSVAL_ERROR_COOKIE
*
* _RETRY function calls are faster than _FAIL calls. Each _RETRY call
* saves a write to cx->bailExit and a read from cx->builtinStatus.
*
* - All other traceable natives are infallible (e.g. Date.now, Math.log).
*
* Special builtins known to the tracer can have their own idiosyncratic
* error codes.
@ -142,29 +170,35 @@ struct JSTraceableNative {
* effects.
*/
#define _JS_CTYPE(ctype, size, pch, ach, flags) (ctype, size, pch, ach, flags)
#define _JS_CTYPE_CONTEXT _JS_CTYPE(JSContext *, _JS_PTR,"C", "", INFALLIBLE)
#define _JS_CTYPE_RUNTIME _JS_CTYPE(JSRuntime *, _JS_PTR,"R", "", INFALLIBLE)
#define _JS_CTYPE_THIS _JS_CTYPE(JSObject *, _JS_PTR,"T", "", INFALLIBLE)
#define _JS_CTYPE_THIS_DOUBLE _JS_CTYPE(jsdouble, _JS_F64,"D", "", INFALLIBLE)
#define _JS_CTYPE_THIS_STRING _JS_CTYPE(JSString *, _JS_PTR,"S", "", INFALLIBLE)
#define _JS_CTYPE_PC _JS_CTYPE(jsbytecode *, _JS_PTR,"P", "", INFALLIBLE)
#define _JS_CTYPE_JSVAL _JS_CTYPE(jsval, _JS_PTR, "","v", INFALLIBLE)
#define _JS_CTYPE_JSVAL_FAIL _JS_CTYPE(jsval, _JS_PTR, --, --, FAIL_JSVAL)
#define _JS_CTYPE_BOOL _JS_CTYPE(JSBool, _JS_I32, "","i", INFALLIBLE)
#define _JS_CTYPE_BOOL_FAIL _JS_CTYPE(int32, _JS_I32, --, --, FAIL_VOID)
#define _JS_CTYPE_INT32 _JS_CTYPE(int32, _JS_I32, "","i", INFALLIBLE)
#define _JS_CTYPE_INT32_FAIL _JS_CTYPE(int32, _JS_I32, --, --, FAIL_NEG)
#define _JS_CTYPE_UINT32 _JS_CTYPE(uint32, _JS_I32, --, --, INFALLIBLE)
#define _JS_CTYPE_DOUBLE _JS_CTYPE(jsdouble, _JS_F64, "","d", INFALLIBLE)
#define _JS_CTYPE_STRING _JS_CTYPE(JSString *, _JS_PTR, "","s", INFALLIBLE)
#define _JS_CTYPE_STRING_FAIL _JS_CTYPE(JSString *, _JS_PTR, --, --, FAIL_NULL)
#define _JS_CTYPE_OBJECT _JS_CTYPE(JSObject *, _JS_PTR, "","o", INFALLIBLE)
#define _JS_CTYPE_OBJECT_FAIL_NULL _JS_CTYPE(JSObject *, _JS_PTR, --, --, FAIL_NULL)
#define _JS_CTYPE_REGEXP _JS_CTYPE(JSObject *, _JS_PTR, "","r", INFALLIBLE)
#define _JS_CTYPE_SCOPEPROP _JS_CTYPE(JSScopeProperty *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_SIDEEXIT _JS_CTYPE(SideExit *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_INTERPSTATE _JS_CTYPE(InterpState *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_FRAGMENT _JS_CTYPE(nanojit::Fragment *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_JSVAL_CTYPE(size, pch, ach, flags) (jsval, size, pch, ach, (flags | JSTN_UNBOX_AFTER))
#define _JS_CTYPE_CONTEXT _JS_CTYPE(JSContext *, _JS_PTR,"C", "", INFALLIBLE)
#define _JS_CTYPE_RUNTIME _JS_CTYPE(JSRuntime *, _JS_PTR,"R", "", INFALLIBLE)
#define _JS_CTYPE_THIS _JS_CTYPE(JSObject *, _JS_PTR,"T", "", INFALLIBLE)
#define _JS_CTYPE_THIS_DOUBLE _JS_CTYPE(jsdouble, _JS_F64,"D", "", INFALLIBLE)
#define _JS_CTYPE_THIS_STRING _JS_CTYPE(JSString *, _JS_PTR,"S", "", INFALLIBLE)
#define _JS_CTYPE_PC _JS_CTYPE(jsbytecode *, _JS_PTR,"P", "", INFALLIBLE)
#define _JS_CTYPE_JSVAL _JS_JSVAL_CTYPE( _JS_PTR, "","v", INFALLIBLE)
#define _JS_CTYPE_JSVAL_RETRY _JS_JSVAL_CTYPE( _JS_PTR, --, --, FAIL_COOKIE)
#define _JS_CTYPE_JSVAL_FAIL _JS_JSVAL_CTYPE( _JS_PTR, --, --, FAIL_STATUS)
#define _JS_CTYPE_BOOL _JS_CTYPE(JSBool, _JS_I32, "","i", INFALLIBLE)
#define _JS_CTYPE_BOOL_RETRY _JS_CTYPE(int32, _JS_I32, --, --, FAIL_VOID)
#define _JS_CTYPE_BOOL_FAIL _JS_CTYPE(int32, _JS_I32, --, --, FAIL_STATUS)
#define _JS_CTYPE_INT32 _JS_CTYPE(int32, _JS_I32, "","i", INFALLIBLE)
#define _JS_CTYPE_INT32_RETRY _JS_CTYPE(int32, _JS_I32, --, --, FAIL_NEG)
#define _JS_CTYPE_UINT32 _JS_CTYPE(uint32, _JS_I32, --, --, INFALLIBLE)
#define _JS_CTYPE_DOUBLE _JS_CTYPE(jsdouble, _JS_F64, "","d", INFALLIBLE)
#define _JS_CTYPE_STRING _JS_CTYPE(JSString *, _JS_PTR, "","s", INFALLIBLE)
#define _JS_CTYPE_STRING_RETRY _JS_CTYPE(JSString *, _JS_PTR, --, --, FAIL_NULL)
#define _JS_CTYPE_STRING_FAIL _JS_CTYPE(JSString *, _JS_PTR, --, --, FAIL_STATUS)
#define _JS_CTYPE_OBJECT _JS_CTYPE(JSObject *, _JS_PTR, "","o", INFALLIBLE)
#define _JS_CTYPE_OBJECT_RETRY_NULL _JS_CTYPE(JSObject *, _JS_PTR, --, --, FAIL_NULL)
#define _JS_CTYPE_OBJECT_FAIL _JS_CTYPE(JSObject *, _JS_PTR, --, --, FAIL_STATUS)
#define _JS_CTYPE_REGEXP _JS_CTYPE(JSObject *, _JS_PTR, "","r", INFALLIBLE)
#define _JS_CTYPE_SCOPEPROP _JS_CTYPE(JSScopeProperty *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_SIDEEXIT _JS_CTYPE(SideExit *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_INTERPSTATE _JS_CTYPE(InterpState *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_FRAGMENT _JS_CTYPE(nanojit::Fragment *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_EXPAND(tokens) tokens

View File

@ -93,6 +93,9 @@ typedef struct JSGSNCache {
#define JS_CLEAR_GSN_CACHE(cx) GSN_CACHE_CLEAR(&JS_GSN_CACHE(cx))
#define JS_METER_GSN_CACHE(cx,cnt) GSN_CACHE_METER(&JS_GSN_CACHE(cx), cnt)
typedef struct InterpState InterpState;
typedef struct VMSideExit VMSideExit;
#ifdef __cplusplus
namespace nanojit {
class Fragment;
@ -158,6 +161,8 @@ typedef struct JSTraceMonitor {
CLS(TraceRecorder) abortStack;
} JSTraceMonitor;
typedef struct InterpStruct InterpStruct;
#ifdef JS_TRACER
# define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).onTrace)
#else
@ -261,6 +266,14 @@ typedef enum JSRuntimeState {
JSRTS_LANDING
} JSRuntimeState;
#ifdef JS_TRACER
typedef enum JSBuiltinStatus {
JSBUILTIN_OK = 0,
JSBUILTIN_BAILED = 1,
JSBUILTIN_ERROR = 2
} JSBuiltinStatus;
#endif
typedef enum JSBuiltinFunctionId {
JSBUILTIN_ObjectToIterator,
JSBUILTIN_CallIteratorNext,
@ -984,6 +997,23 @@ struct JSContext {
/* Current bytecode location (or NULL if no hint was supplied). */
jsbytecode *pcHint;
#ifdef JS_TRACER
/*
* State for the current tree execution. bailExit is valid if the tree has
* called back into native code via a _FAIL builtin and has not yet bailed,
* else garbage (NULL in debug builds).
*/
InterpState *interpState;
VMSideExit *bailExit;
/*
* Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
* JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
* if an error or exception occurred. Cleared on side exit.
*/
uint32 builtinStatus;
#endif
};
#define BEGIN_PC_HINT(pc) (cx->pcHint = (pc))

View File

@ -2031,7 +2031,7 @@ static JSFunctionSpec date_static_methods[] = {
};
JS_DEFINE_TRCINFO_1(date_valueOf,
(3, (static, JSVAL_FAIL, date_valueOf_tn, CONTEXT, THIS, STRING, 0, 0)))
(3, (static, JSVAL_RETRY, date_valueOf_tn, CONTEXT, THIS, STRING, 0, 0)))
static JSFunctionSpec date_methods[] = {
JS_FN("getTime", date_getTime, 0,0),

View File

@ -1932,11 +1932,11 @@ const char js_lookupSetter_str[] = "__lookupSetter__";
#endif
JS_DEFINE_TRCINFO_1(obj_valueOf,
(3, (static, JSVAL, Object_p_valueOf, CONTEXT, THIS, STRING, 0, 0)))
(3, (static, JSVAL, Object_p_valueOf, CONTEXT, THIS, STRING, 0, 0)))
JS_DEFINE_TRCINFO_1(obj_hasOwnProperty,
(3, (static, BOOL_FAIL, Object_p_hasOwnProperty, CONTEXT, THIS, STRING, 0, 0)))
(3, (static, BOOL_RETRY, Object_p_hasOwnProperty, CONTEXT, THIS, STRING, 0, 0)))
JS_DEFINE_TRCINFO_1(obj_propertyIsEnumerable,
(3, (static, BOOL_FAIL, Object_p_propertyIsEnumerable, CONTEXT, THIS, STRING, 0, 0)))
(3, (static, BOOL_RETRY, Object_p_propertyIsEnumerable, CONTEXT, THIS, STRING, 0, 0)))
static JSFunctionSpec object_methods[] = {
#if JS_HAS_TOSOURCE

View File

@ -64,6 +64,7 @@
#include "jsregexp.h"
#include "jsscan.h"
#include "jsscope.h"
#include "jsstaticcheck.h"
#include "jsstr.h"
#ifdef JS_TRACER
@ -3832,14 +3833,17 @@ MatchRegExp(REGlobalData *gData, REMatchState *x)
gData->skipped = (ptrdiff_t) x->cp;
#ifdef JS_JIT_SPEW
{
JSStackFrame *caller = js_GetScriptedCaller(gData->cx, NULL);
debug_only_v(printf("entering REGEXP trace at %s:%u@%u, code: %p\n",
caller ? caller->script->filename : "<unknown>",
caller ? js_FramePCToLineNumber(gData->cx, caller) : 0,
caller ? FramePCOffset(caller) : 0,
(void *) native););
}
debug_only_v({
VOUCH_DOES_NOT_REQUIRE_STACK();
JSStackFrame *caller = (JS_ON_TRACE(gData->cx))
? NULL
: js_GetScriptedCaller(gData->cx, NULL);
printf("entering REGEXP trace at %s:%u@%u, code: %p\n",
caller ? caller->script->filename : "<unknown>",
caller ? js_FramePCToLineNumber(gData->cx, caller) : 0,
caller ? FramePCOffset(caller) : 0,
(void *) native);
})
#endif
#if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
@ -4819,7 +4823,7 @@ Regexp_p_test(JSContext* cx, JSObject* regexp, JSString* str)
}
JS_DEFINE_TRCINFO_1(regexp_test,
(3, (static, BOOL_FAIL, Regexp_p_test, CONTEXT, THIS, STRING, 1, 1)))
(3, (static, BOOL_RETRY, Regexp_p_test, CONTEXT, THIS, STRING, 1, 1)))
#endif

View File

@ -1344,8 +1344,6 @@ match_or_replace(JSContext *cx,
destroy(cx, data);
}
} else {
jsval savedObject = JSVAL_NULL;
if (GET_MODE(data->flags) == MODE_REPLACE) {
test = JS_TRUE;
} else {
@ -1353,10 +1351,7 @@ match_or_replace(JSContext *cx,
* MODE_MATCH implies str_match is being called from a script or a
* scripted function. If the caller cares only about testing null
* vs. non-null return value, optimize away the array object that
* would normally be returned in *vp. Instead return an arbitrary
* object (not JSVAL_TRUE, for type map integrity; see bug 453564).
* The caller provides the object in *vp and is responsible for
* rooting it elsewhere.
* would normally be returned in *vp.
*
* Assume a full array result is required, then prove otherwise.
*/
@ -1370,16 +1365,12 @@ match_or_replace(JSContext *cx,
case JSOP_IFEQX:
case JSOP_IFNEX:
test = JS_TRUE;
savedObject = *vp;
JS_ASSERT(!JSVAL_IS_PRIMITIVE(savedObject));
break;
default:;
}
}
}
ok = js_ExecuteRegExp(cx, re, str, &index, test, vp);
if (ok && !JSVAL_IS_NULL(savedObject) && *vp == JSVAL_TRUE)
*vp = savedObject;
}
DROP_REGEXP(cx, re);
@ -1454,9 +1445,6 @@ str_match(JSContext *cx, uintN argc, jsval *vp)
for (fp = js_GetTopStackFrame(cx); fp && !fp->regs; fp = fp->down)
JS_ASSERT(!fp->script);
/* Root the object in vp[0]. See comment in match_or_replace. */
JSAutoTempValueRooter tvr(cx, vp[0]);
return StringMatchHelper(cx, argc, vp, fp ? fp->regs->pc : NULL);
}
@ -1464,22 +1452,22 @@ str_match(JSContext *cx, uintN argc, jsval *vp)
static jsval FASTCALL
String_p_match(JSContext* cx, JSString* str, jsbytecode *pc, JSObject* regexp)
{
/* arbitrary object in vp[0] */
jsval vp[3] = { OBJECT_TO_JSVAL(regexp), STRING_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
if (!StringMatchHelper(cx, 1, vp, pc))
return JSVAL_ERROR_COOKIE;
JS_ASSERT(JSVAL_IS_OBJECT(vp[0]));
jsval vp[3] = { JSVAL_NULL, STRING_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
if (!StringMatchHelper(cx, 1, vp, pc)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_VOID;
}
return vp[0];
}
static jsval FASTCALL
String_p_match_obj(JSContext* cx, JSObject* str, jsbytecode *pc, JSObject* regexp)
{
/* arbitrary object in vp[0] */
jsval vp[3] = { OBJECT_TO_JSVAL(regexp), OBJECT_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
if (!StringMatchHelper(cx, 1, vp, pc))
return JSVAL_ERROR_COOKIE;
JS_ASSERT(JSVAL_IS_OBJECT(vp[0]));
jsval vp[3] = { JSVAL_NULL, OBJECT_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
if (!StringMatchHelper(cx, 1, vp, pc)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_VOID;
}
return vp[0];
}
#endif
@ -2500,32 +2488,32 @@ JS_DEFINE_CALLINFO_2(extern, BOOL, js_EqualStrings, STRING, STRING,
JS_DEFINE_CALLINFO_2(extern, INT32, js_CompareStrings, STRING, STRING, 1, 1)
JS_DEFINE_TRCINFO_1(str_toString,
(2, (extern, STRING_FAIL, String_p_toString, CONTEXT, THIS, 1, 1)))
(2, (extern, STRING_FAIL, String_p_toString, CONTEXT, THIS, 1, 1)))
JS_DEFINE_TRCINFO_2(str_substring,
(4, (static, STRING_FAIL, String_p_substring, CONTEXT, THIS_STRING, INT32, INT32, 1, 1)),
(3, (static, STRING_FAIL, String_p_substring_1, CONTEXT, THIS_STRING, INT32, 1, 1)))
(4, (static, STRING_RETRY, String_p_substring, CONTEXT, THIS_STRING, INT32, INT32, 1, 1)),
(3, (static, STRING_RETRY, String_p_substring_1, CONTEXT, THIS_STRING, INT32, 1, 1)))
JS_DEFINE_TRCINFO_1(str_charAt,
(3, (extern, STRING_FAIL, js_String_getelem, CONTEXT, THIS_STRING, INT32, 1, 1)))
(3, (extern, STRING_RETRY, js_String_getelem, CONTEXT, THIS_STRING, INT32, 1, 1)))
JS_DEFINE_TRCINFO_1(str_charCodeAt,
(2, (extern, INT32_FAIL, js_String_p_charCodeAt, THIS_STRING, INT32, 1, 1)))
(2, (extern, INT32_RETRY, js_String_p_charCodeAt, THIS_STRING, INT32, 1, 1)))
JS_DEFINE_TRCINFO_4(str_concat,
(3, (static, STRING_FAIL, String_p_concat_1int, CONTEXT, THIS_STRING, INT32, 1, 1)),
(3, (extern, STRING_FAIL, js_ConcatStrings, CONTEXT, THIS_STRING, STRING, 1, 1)),
(4, (static, STRING_FAIL, String_p_concat_2str, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
(5, (static, STRING_FAIL, String_p_concat_3str, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
(3, (static, STRING_RETRY, String_p_concat_1int, CONTEXT, THIS_STRING, INT32, 1, 1)),
(3, (extern, STRING_RETRY, js_ConcatStrings, CONTEXT, THIS_STRING, STRING, 1, 1)),
(4, (static, STRING_RETRY, String_p_concat_2str, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
(5, (static, STRING_RETRY, String_p_concat_3str, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
JS_DEFINE_TRCINFO_2(str_match,
(4, (static, JSVAL_FAIL, String_p_match, CONTEXT, THIS_STRING, PC, REGEXP, 1, 1)),
(4, (static, JSVAL_FAIL, String_p_match_obj, CONTEXT, THIS, PC, REGEXP, 1, 1)))
(4, (static, JSVAL_FAIL, String_p_match, CONTEXT, THIS_STRING, PC, REGEXP, 1, 1)),
(4, (static, JSVAL_FAIL, String_p_match_obj, CONTEXT, THIS, PC, REGEXP, 1, 1)))
JS_DEFINE_TRCINFO_3(str_replace,
(4, (static, STRING_FAIL, String_p_replace_str, CONTEXT, THIS_STRING, REGEXP, STRING, 1, 1)),
(4, (static, STRING_FAIL, String_p_replace_str2, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
(5, (static, STRING_FAIL, String_p_replace_str3, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
(4, (static, STRING_RETRY, String_p_replace_str, CONTEXT, THIS_STRING, REGEXP, STRING, 1, 1)),
(4, (static, STRING_RETRY, String_p_replace_str2, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
(5, (static, STRING_RETRY, String_p_replace_str3, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
JS_DEFINE_TRCINFO_1(str_split,
(3, (static, OBJECT_FAIL_NULL, String_p_split, CONTEXT, THIS_STRING, STRING, 0, 0)))
(3, (static, OBJECT_RETRY_NULL, String_p_split, CONTEXT, THIS_STRING, STRING, 0, 0)))
JS_DEFINE_TRCINFO_1(str_toLowerCase,
(2, (extern, STRING_FAIL, js_toLowerCase, CONTEXT, THIS_STRING, 1, 1)))
(2, (extern, STRING_RETRY, js_toLowerCase, CONTEXT, THIS_STRING, 1, 1)))
JS_DEFINE_TRCINFO_1(str_toUpperCase,
(2, (extern, STRING_FAIL, js_toUpperCase, CONTEXT, THIS_STRING, 1, 1)))
(2, (extern, STRING_RETRY, js_toUpperCase, CONTEXT, THIS_STRING, 1, 1)))
#define GENERIC JSFUN_GENERIC_NATIVE
#define PRIMITIVE JSFUN_THISP_PRIMITIVE
@ -2662,7 +2650,7 @@ String_fromCharCode(JSContext* cx, int32 i)
#endif
JS_DEFINE_TRCINFO_1(str_fromCharCode,
(2, (static, STRING_FAIL, String_fromCharCode, CONTEXT, INT32, 1, 1)))
(2, (static, STRING_RETRY, String_fromCharCode, CONTEXT, INT32, 1, 1)))
static JSFunctionSpec string_static_methods[] = {
JS_TN("fromCharCode", str_fromCharCode, 1, 0, str_fromCharCode_trcinfo),

View File

@ -1218,7 +1218,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag
lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, gp)), "gp");
gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, global)), "gp");
eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
globalObj_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, globalObj)), "globalObj");
@ -2098,7 +2098,7 @@ TraceRecorder::snapshot(ExitType exitType)
/* WARNING: don't return before restoring the original pc if (resumeAfter). */
bool resumeAfter = (pendingTraceableNative &&
JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL);
JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS);
if (resumeAfter) {
JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY);
pc += cs.length;
@ -2130,7 +2130,8 @@ TraceRecorder::snapshot(ExitType exitType)
/* If we are capturing the stack state on a specific instruction, the value on
the top of the stack is a boxed value. */
if (resumeAfter) {
typemap[stackSlots - 1] = JSVAL_BOXED;
if (pendingTraceableNative->flags & JSTN_UNBOX_AFTER)
typemap[stackSlots - 1] = JSVAL_BOXED;
/* Now restore the the original pc (after which early returns are ok). */
MUST_FLOW_LABEL(restore_pc);
@ -3855,11 +3856,14 @@ js_FindVMCompatiblePeer(JSContext* cx, Fragment* f)
return NULL;
}
static void
LeaveTree(InterpState&, VMSideExit* lr);
/**
* Executes a tree.
*/
static VMSideExit*
js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
static JS_REQUIRES_STACK VMSideExit*
js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
VMSideExit** innermostNestedGuardp)
{
JS_ASSERT(f->code() && f->vmprivate);
@ -3869,67 +3873,75 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
TreeInfo* ti = (TreeInfo*)f->vmprivate;
unsigned ngslots = ti->globalSlots->length();
uint16* gslots = ti->globalSlots->data();
unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
double* global = (double*)alloca((globalFrameSize+1) * sizeof(double));
double stack_buffer[MAX_NATIVE_STACK_SLOTS];
double* stack = stack_buffer;
InterpState state;
state.cx = cx;
state.globalObj = globalObj;
state.inlineCallCountp = &inlineCallCount;
state.innermostNestedGuardp = innermostNestedGuardp;
state.outermostTree = ti;
state.lastTreeExitGuard = NULL;
state.lastTreeCallGuard = NULL;
state.rpAtLastTreeCall = NULL;
/* Make sure the global object is sane. */
JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) == ti->globalShape));
JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) == ti->globalShape));
/* Make sure our caller replenished the double pool. */
JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
/* Reserve objects and stack space now, to make leaving the tree infallible. */
void *reserve;
void *stackMark = JS_ARENA_MARK(&cx->stackPool);
if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
return NULL;
/* Setup the native global frame. */
unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
state.global = (double*)alloca((globalFrameSize+1) * sizeof(double));
/* Setup the native stack frame. */
double stack_buffer[MAX_NATIVE_STACK_SLOTS];
state.stackBase = stack_buffer;
double* entry_sp = &stack_buffer[ti->nativeStackBase/sizeof(double)];
state.sp = entry_sp;
state.eos = state.sp + MAX_NATIVE_STACK_SLOTS;
/* Setup the native call stack frame. */
FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
state.callstackBase = callstack_buffer;
state.rp = callstack_buffer;
state.eor = callstack_buffer + MAX_CALL_STACK_ENTRIES;
void *reserve;
state.stackMark = JS_ARENA_MARK(&cx->stackPool);
JS_ARENA_ALLOCATE(reserve, &cx->stackPool, MAX_INTERP_STACK_BYTES);
if (!reserve)
return NULL;
#ifdef DEBUG
bool jsframe_pop_blocks_set_on_entry = bool(cx->fp->flags & JSFRAME_POP_BLOCKS);
state.jsframe_pop_blocks_set_on_entry = bool(cx->fp->flags & JSFRAME_POP_BLOCKS);
memset(stack_buffer, 0xCD, sizeof(stack_buffer));
memset(global, 0xCD, (globalFrameSize+1)*sizeof(double));
#endif
memset(state.global, 0xCD, (globalFrameSize+1)*sizeof(double));
#endif
debug_only(*(uint64*)&global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
debug_only(*(uint64*)&state.global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
debug_only_v(printf("entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
cx->fp->script->filename,
js_FramePCToLineNumber(cx, cx->fp),
FramePCOffset(cx->fp),
ti->maxNativeStackSlots,
f->code());)
JS_ASSERT(ti->nGlobalTypes() == ngslots);
if (ngslots)
BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), global);
BuildNativeStackFrame(cx, 0/*callDepth*/, ti->typeMap.data(), stack);
double* entry_sp = &stack[ti->nativeStackBase/sizeof(double)];
FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
FrameInfo** callstack = callstack_buffer;
if (ngslots)
BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), state.global);
BuildNativeStackFrame(cx, 0/*callDepth*/, ti->typeMap.data(), stack_buffer);
InterpState state;
state.sp = (void*)entry_sp;
state.eos = ((double*)state.sp) + MAX_NATIVE_STACK_SLOTS;
state.rp = callstack;
state.eor = callstack + MAX_CALL_STACK_ENTRIES;
state.gp = global;
state.cx = cx;
state.globalObj = globalObj;
state.lastTreeExitGuard = NULL;
state.lastTreeCallGuard = NULL;
state.rpAtLastTreeCall = NULL;
union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
u.code = f->code();
#ifdef JS_JIT_SPEW
#if defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__))
uint64 start = rdtsc();
#endif
#ifdef EXECUTE_TREE_TIMER
state.startTime = rdtsc();
#endif
/* Set a flag that indicates to the runtime system that we are running in native code
@ -3938,7 +3950,8 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
and eventually triggers the GC. */
JS_ASSERT(!tm->onTrace);
tm->onTrace = true;
cx->interpState = &state;
debug_only(fflush(NULL);)
GuardRecord* rec;
#if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
@ -3951,8 +3964,19 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
AUDIT(traceTriggered);
JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth);
tm->onTrace = false;
LeaveTree(state, lr);
return state.innermost;
}
static JS_FORCES_STACK void
LeaveTree(InterpState& state, VMSideExit* lr)
{
VOUCH_DOES_NOT_REQUIRE_STACK();
JSContext* cx = state.cx;
FrameInfo** callstack = state.callstackBase;
double* stack = state.stackBase;
/* Except if we find that this is a nested bailout, the guard the call returned is the
one we have to use to adjust pc and sp. */
@ -3985,15 +4009,54 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
rp = (FrameInfo**)state.rpAtLastTreeCall;
}
innermost = state.lastTreeExitGuard;
if (innermostNestedGuardp)
*innermostNestedGuardp = nested;
if (state.innermostNestedGuardp)
*state.innermostNestedGuardp = nested;
JS_ASSERT(nested);
JS_ASSERT(nested->exitType == NESTED_EXIT);
JS_ASSERT(state.lastTreeExitGuard);
JS_ASSERT(state.lastTreeExitGuard->exitType != NESTED_EXIT);
}
JS_ARENA_RELEASE(&cx->stackPool, stackMark);
int32_t bs = cx->builtinStatus;
cx->builtinStatus = 0;
bool bailed = innermost->exitType == STATUS_EXIT && (bs & JSBUILTIN_BAILED);
if (bailed && !(bs & JSBUILTIN_ERROR)) {
/*
* Deep-bail case.
*
* A _FAIL native already called LeaveTree. We already reconstructed
* the interpreter stack, in pre-call state, with pc pointing to the
* CALL/APPLY op, for correctness. Then we continued in native code.
* The native succeeded (no exception or error). After it returned, the
* trace stored the return value (at the top of the native stack) and
* then immediately flunked the guard on cx->builtinStatus.
*
* Now LeaveTree has been called again from the tail of
* js_ExecuteTree. We are about to return to the interpreter. Adjust
* the top stack frame to resume on the next op.
*/
JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL || *cx->fp->regs->pc == JSOP_APPLY);
uintN argc = GET_ARGC(cx->fp->regs->pc);
cx->fp->regs->pc += JSOP_CALL_LENGTH;
cx->fp->regs->sp -= argc + 1;
JS_ASSERT_IF(!cx->fp->imacpc,
cx->fp->slots + cx->fp->script->nfixed +
js_ReconstructStackDepth(cx, cx->fp->script, cx->fp->regs->pc) ==
cx->fp->regs->sp);
/*
* The return value was not available when we reconstructed the stack,
* but we have it now. Box it.
*/
uint8* typeMap = getStackTypeMap(innermost);
NativeToValue(cx,
cx->fp->regs->sp[-1],
typeMap[innermost->numStackSlots - 1],
(jsdouble *) state.sp + innermost->sp_adj / sizeof(jsdouble) - 1);
return;
}
JS_ARENA_RELEASE(&cx->stackPool, state.stackMark);
while (callstack < rp) {
/* Synthesize a stack frame and write out the values in it using the type map pointer
on the native call stack. */
@ -4009,7 +4072,7 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
#endif
/* Keep track of the additional frames we put on the interpreter stack and the native
stack slots we consumed. */
++inlineCallCount;
++*state.inlineCallCountp;
++callstack;
stack += slots;
}
@ -4021,8 +4084,8 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
unsigned calldepth_slots = 0;
for (unsigned n = 0; n < calldepth; ++n) {
calldepth_slots += js_SynthesizeFrame(cx, *callstack[n]);
++inlineCallCount;
#ifdef DEBUG
++*state.inlineCallCountp;
#ifdef DEBUG
JSStackFrame* fp = cx->fp;
debug_only_v(printf("synthesized shallow frame for %s:%u@%u\n",
fp->script->filename, js_FramePCToLineNumber(cx, fp),
@ -4037,7 +4100,7 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
JSStackFrame* fp = cx->fp;
JS_ASSERT_IF(fp->flags & JSFRAME_POP_BLOCKS,
calldepth == 0 && jsframe_pop_blocks_set_on_entry);
calldepth == 0 && state.jsframe_pop_blocks_set_on_entry);
fp->blockChain = innermost->block;
/* If we are not exiting from an inlined frame the state->sp is spbase, otherwise spbase
@ -4048,8 +4111,8 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
fp->slots + fp->script->nfixed +
js_ReconstructStackDepth(cx, fp->script, fp->regs->pc) == fp->regs->sp);
#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))
uint64 cycles = rdtsc() - start;
#ifdef EXECUTE_TREE_TIMER
uint64 cycles = rdtsc() - state.startTime;
#elif defined(JS_JIT_SPEW)
uint64 cycles = 0;
#endif
@ -4070,19 +4133,21 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
which we don't have any type information available in the side exit. We merge in this
information from the entry type-map. See also comment in the constructor of TraceRecorder
why this is always safe to do. */
TreeInfo* outermostTree = state.outermostTree;
uint16* gslots = outermostTree->globalSlots->data();
unsigned ngslots = outermostTree->globalSlots->length();
JS_ASSERT(ngslots == outermostTree->nGlobalTypes());
unsigned exit_gslots = innermost->numGlobalSlots;
JS_ASSERT(ngslots == ti->nGlobalTypes());
JS_ASSERT(ngslots >= exit_gslots);
JS_ASSERT(exit_gslots <= ngslots);
uint8* globalTypeMap = getGlobalTypeMap(innermost);
if (exit_gslots < ngslots)
mergeTypeMaps(&globalTypeMap, &exit_gslots, ti->globalTypeMap(), ngslots,
mergeTypeMaps(&globalTypeMap, &exit_gslots, outermostTree->globalTypeMap(), ngslots,
(uint8*)alloca(sizeof(uint8) * ngslots));
JS_ASSERT(exit_gslots == ti->nGlobalTypes());
JS_ASSERT(exit_gslots == outermostTree->globalSlots->length());
/* write back interned globals */
FlushNativeGlobalFrame(cx, exit_gslots, gslots, globalTypeMap, global);
JS_ASSERT_IF(ngslots != 0, globalFrameSize == STOBJ_NSLOTS(globalObj));
JS_ASSERT(*(uint64*)&global[globalFrameSize] == 0xdeadbeefdeadbeefLL);
FlushNativeGlobalFrame(cx, exit_gslots, gslots, globalTypeMap, state.global);
JS_ASSERT(*(uint64*)&state.global[STOBJ_NSLOTS(state.globalObj)] == 0xdeadbeefdeadbeefLL);
/* write back native stack frame */
#ifdef DEBUG
@ -4107,7 +4172,7 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
AUDIT(timeoutIntoInterpreter);
#endif
return innermost;
state.innermost = innermost;
}
JS_REQUIRES_STACK bool
@ -4551,14 +4616,15 @@ JS_FORCES_STACK JSStackFrame *
js_GetTopStackFrame(JSContext *cx)
{
if (JS_ON_TRACE(cx)) {
/*
* TODO: If executing a tree, synthesize stack frames and bail off
* trace. See bug 462027.
*/
debug_only_v(printf("Internal error: getting top stack frame on trace.\n"));
#ifdef DEBUG_jason
JS_ASSERT(0);
/* It's a bug if a non-FAIL_STATUS builtin gets here. */
JS_ASSERT(cx->bailExit);
JS_TRACE_MONITOR(cx).onTrace = false;
LeaveTree(*cx->interpState, cx->bailExit);
#ifdef DEBUG
cx->bailExit = NULL;
#endif
cx->builtinStatus |= JSBUILTIN_BAILED;
}
return cx->fp;
}
@ -6599,6 +6665,21 @@ success:
JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
#endif
if (JSTN_ERRTYPE(known) == FAIL_STATUS) {
// This needs to capture the pre-call state of the stack. So do not set
// pendingTraceableNative before taking this snapshot.
JS_ASSERT(!pendingTraceableNative);
// Take snapshot for deep LeaveTree and store it in cx->bailExit.
LIns* rec_ins = snapshot(DEEP_BAIL_EXIT);
GuardRecord* rec = (GuardRecord *) rec_ins->payload();
JS_ASSERT(rec->exit);
lir->insStorei(INS_CONSTPTR(rec->exit), cx_ins, offsetof(JSContext, bailExit));
// Tell nanojit not to discard or defer stack writes before this call.
lir->insGuard(LIR_xbarrier, rec_ins, rec_ins);
}
LIns* res_ins = lir->insCall(known->builtin, args);
if (!constructing)
rval_ins = res_ins;
@ -6617,6 +6698,9 @@ success:
case FAIL_VOID:
guard(false, lir->ins2i(LIR_eq, res_ins, JSVAL_TO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
break;
case FAIL_COOKIE:
guard(false, lir->ins2(LIR_eq, res_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT);
break;
default:;
}
set(&fval, res_ins);
@ -6900,6 +6984,7 @@ GetProperty_tn(JSContext *cx, jsbytecode *pc, JSObject *obj, JSString *name)
BEGIN_PC_HINT(pc);
if (!js_ValueToStringId(cx, STRING_TO_JSVAL(name), &id) ||
!OBJ_GET_PROPERTY(cx, obj, id, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
v = JSVAL_ERROR_COOKIE;
}
END_PC_HINT();
@ -6927,11 +7012,15 @@ GetElement_tn(JSContext* cx, jsbytecode *pc, JSObject* obj, int32 index)
jsval v;
jsid id;
if (!js_Int32ToId(cx, index, &id))
if (!js_Int32ToId(cx, index, &id)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_ERROR_COOKIE;
}
BEGIN_PC_HINT(pc);
if (!OBJ_GET_PROPERTY(cx, obj, id, &v))
if (!OBJ_GET_PROPERTY(cx, obj, id, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
v = JSVAL_ERROR_COOKIE;
}
END_PC_HINT();
return v;
}
@ -7039,9 +7128,9 @@ SetProperty_tn(JSContext* cx, JSObject* obj, JSString* idstr, jsval v)
if (!js_ValueToStringId(cx, STRING_TO_JSVAL(idstr), &id) ||
!OBJ_SET_PROPERTY(cx, obj, id, &v)) {
return JSVAL_TO_BOOLEAN(JSVAL_VOID);
cx->builtinStatus |= JSBUILTIN_ERROR;
}
return JSVAL_TRUE;
return JSVAL_TO_BOOLEAN(JSVAL_VOID);
}
static JSBool
@ -7068,8 +7157,8 @@ SetElement_tn(JSContext* cx, JSObject* obj, int32 index, jsval v)
jsid id;
if (!js_Int32ToId(cx, index, &id) || !OBJ_SET_PROPERTY(cx, obj, id, &v))
return JSVAL_TO_BOOLEAN(JSVAL_VOID);
return JSVAL_TRUE;
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_TO_BOOLEAN(JSVAL_VOID);
}
JS_DEFINE_TRCINFO_1(SetProperty,
@ -7369,18 +7458,31 @@ JS_REQUIRES_STACK bool
TraceRecorder::record_FastNativeCallComplete()
{
JS_ASSERT(pendingTraceableNative);
/* At this point the generated code has already called the native function
and we can no longer fail back to the original pc location (JSOP_CALL)
because that would cause the interpreter to re-execute the native
function, which might have side effects.
Instead, snapshot(), which is invoked from unbox_jsval(), will see that
we are currently parked on a traceable native's JSOP_CALL instruction,
and it will advance the pc to restore by the length of the current
opcode, and indicate in the type map that the element on top of the
stack is a boxed value which doesn't need to be boxed if the type guard
generated by unbox_jsval() fails. */
Instead, snapshot(), which is invoked from unbox_jsval() below, will see
that we are currently parked on a traceable native's JSOP_CALL
instruction, and it will advance the pc to restore by the length of the
current opcode. If the native's return type is jsval, snapshot() will
also indicate in the type map that the element on top of the stack is a
boxed value which doesn't need to be boxed if the type guard generated
by unbox_jsval() fails. */
if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS) {
#ifdef DEBUG
// Keep cx->bailExit null when it's invalid.
lir->insStorei(INS_CONSTPTR(NULL), cx_ins, (int) offsetof(JSContext, bailExit));
#endif
guard(true,
lir->ins_eq0(
lir->insLoad(LIR_ld, cx_ins, (int) offsetof(JSContext, builtinStatus))),
STATUS_EXIT);
}
JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL ||
*cx->fp->regs->pc == JSOP_APPLY);
@ -7388,16 +7490,13 @@ TraceRecorder::record_FastNativeCallComplete()
LIns* v_ins = get(&v);
bool ok = true;
switch (JSTN_ERRTYPE(pendingTraceableNative)) {
case FAIL_JSVAL:
if (pendingTraceableNative->flags & JSTN_UNBOX_AFTER) {
unbox_jsval(v, v_ins);
set(&v, v_ins);
break;
case FAIL_NEG:
} else if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_NEG) {
/* Already added i2f in functionCall. */
JS_ASSERT(JSVAL_IS_NUMBER(v));
break;
default:
} else {
/* Convert the result to double if the builtin returns int32. */
if (JSVAL_IS_NUMBER(v) &&
(pendingTraceableNative->builtin->_argtypes & 3) == nanojit::ARGSIZE_LO) {
@ -8967,8 +9066,14 @@ static JSObject* FASTCALL
ObjectToIterator_tn(JSContext* cx, JSObject *obj, int32 flags)
{
jsval v = OBJECT_TO_JSVAL(obj);
if (!js_ValueToIterator(cx, flags, &v))
if (!js_ValueToIterator(cx, flags, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return NULL;
}
if (OBJ_GET_CLASS(cx, JSVAL_TO_OBJECT(v)) == &js_GeneratorClass) {
js_LeaveTrace(cx);
return NULL;
}
return JSVAL_TO_OBJECT(v);
}
@ -8982,15 +9087,17 @@ static jsval FASTCALL
CallIteratorNext_tn(JSContext* cx, JSObject* iterobj)
{
jsval v;
if (!js_CallIteratorNext(cx, iterobj, &v))
if (!js_CallIteratorNext(cx, iterobj, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_ERROR_COOKIE;
}
return v;
}
JS_DEFINE_TRCINFO_1(ObjectToIterator,
(3, (static, OBJECT_FAIL_NULL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0)))
(3, (static, OBJECT_FAIL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0)))
JS_DEFINE_TRCINFO_1(CallIteratorNext,
(2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0)))
(2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0)))
static const struct BuiltinFunctionInfo {
JSTraceableNative *tn;

View File

@ -213,7 +213,9 @@ enum ExitType {
OOM_EXIT,
OVERFLOW_EXIT,
UNSTABLE_LOOP_EXIT,
TIMEOUT_EXIT
TIMEOUT_EXIT,
DEEP_BAIL_EXIT,
STATUS_EXIT
};
struct VMSideExit : public nanojit::SideExit
@ -244,20 +246,18 @@ static inline uint8* getFullTypeMap(nanojit::SideExit* exit)
return getStackTypeMap(exit);
}
struct InterpState
{
void* sp; /* native stack pointer, stack[0] is spbase[0] */
void* rp; /* call stack pointer */
void* gp; /* global frame pointer */
JSContext *cx; /* current VM context handle */
void* eos; /* first unusable word after the native stack */
void* eor; /* first unusable word after the call stack */
VMSideExit* lastTreeExitGuard; /* guard we exited on during a tree call */
VMSideExit* lastTreeCallGuard; /* guard we want to grow from if the tree
call exit guard mismatched */
void* rpAtLastTreeCall; /* value of rp at innermost tree call guard */
JSObject* globalObj; /* pointer to the global object */
};
struct FrameInfo {
JSObject* callee; // callee function object
JSObject* block; // caller block chain head
intptr_t ip_adj; // caller script-based pc index and imacro pc
union {
struct {
uint16 spdist; // distance from fp->slots to fp->regs->sp at JSOP_CALL
uint16 argc; // actual argument count, may be < fun->nargs
} s;
uint32 word; // for spdist/argc LIR store in record_JSOP_CALL
};
};
struct UnstableExit
{
@ -309,18 +309,37 @@ public:
}
};
struct FrameInfo {
JSObject* callee; // callee function object
JSObject* block; // caller block chain head
intptr_t ip_adj; // caller script-based pc index and imacro pc
union {
struct {
uint16 spdist; // distance from fp->slots to fp->regs->sp at JSOP_CALL
uint16 argc; // actual argument count, may be < fun->nargs
} s;
uint32 word; // for spdist/argc LIR store in record_JSOP_CALL
};
};
#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))
# define EXECUTE_TREE_TIMER
#endif
struct InterpState
{
double *sp; // native stack pointer, stack[0] is spbase[0]
double *global; // global frame pointer
void *rp; // call stack pointer
JSContext *cx; // current VM context handle
double *eos; // first unusable word after the native stack
void *eor; // first unusable word after the call stack
VMSideExit* lastTreeExitGuard; // guard we exited on during a tree call
VMSideExit* lastTreeCallGuard; // guard we want to grow from if the tree
// call exit guard mismatched
void* rpAtLastTreeCall; // value of rp at innermost tree call guard
TreeInfo* outermostTree; // the outermost tree we initially invoked
JSObject* globalObj; // pointer to the global object
double* stackBase; // native stack base
FrameInfo** callstackBase; // call stack base
uintN* inlineCallCountp; // inline call count counter
VMSideExit** innermostNestedGuardp;
void* stackMark;
VMSideExit* innermost;
#ifdef EXECUTE_TREE_TIMER
uint64 startTime;
#endif
#ifdef DEBUG
bool jsframe_pop_blocks_set_on_entry;
#endif
};
enum JSMonitorRecordingStatus {
JSMRS_CONTINUE,

View File

@ -976,8 +976,10 @@ static jsval JS_FASTCALL
Print_tn(JSContext *cx, JSString *str)
{
char *bytes = JS_EncodeString(cx, str);
if (!bytes)
return JSVAL_ERROR_COOKIE;
if (!bytes) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_VOID;
}
fprintf(gOutFile, "%s\n", bytes);
JS_free(cx, bytes);
fflush(gOutFile);

View File

@ -2094,6 +2094,84 @@ function testArrayPushPop() {
testArrayPushPop.expected = "55,45";
test(testArrayPushPop);
function testSlowArrayPop() {
var a = [];
for (var i = 0; i < RUNLOOP; i++)
a[i] = [0];
a[RUNLOOP-1].__defineGetter__("0", function () { return 'xyzzy'; });
var last;
for (var i = 0; i < RUNLOOP; i++)
last = a[i].pop(); // reenters interpreter in getter
return last;
}
testSlowArrayPop.expected = 'xyzzy';
test(testSlowArrayPop);
// Same thing but it needs to reconstruct multiple stack frames (so,
// multiple functions called inside the loop)
function testSlowArrayPopMultiFrame() {
var a = [];
for (var i = 0; i < RUNLOOP; i++)
a[i] = [0];
a[RUNLOOP-1].__defineGetter__("0", function () { return 23; });
function child(a, i) {
return a[i].pop(); // reenters interpreter in getter
}
function parent(a, i) {
return child(a, i);
}
function gramps(a, i) {
return parent(a, i);
}
var last;
for (var i = 0; i < RUNLOOP; i++)
last = gramps(a, i);
return last;
}
testSlowArrayPopMultiFrame.expected = 23;
test(testSlowArrayPopMultiFrame);
// Same thing but nested trees, each reconstructing one or more stack frames
// (so, several functions with loops, such that the loops end up being
// nested though they are not lexically nested)
function testSlowArrayPopNestedTrees() {
var a = [];
for (var i = 0; i < RUNLOOP; i++)
a[i] = [0];
a[RUNLOOP-1].__defineGetter__("0", function () { return 3.14159 });
function child(a, i, j, k) {
var last = 2.71828;
for (var l = 0; l < RUNLOOP; l++)
if (i == RUNLOOP-1 && j == RUNLOOP-1 && k == RUNLOOP-1)
last = a[l].pop(); // reenters interpreter in getter
return last;
}
function parent(a, i, j) {
var last;
for (var k = 0; k < RUNLOOP; k++)
last = child(a, i, j, k);
return last;
}
function gramps(a, i) {
var last;
for (var j = 0; j < RUNLOOP; j++)
last = parent(a, i, j);
return last;
}
var last;
for (var i = 0; i < RUNLOOP; i++)
last = gramps(a, i);
return last;
}
testSlowArrayPopNestedTrees.expected = 3.14159;
test(testSlowArrayPopNestedTrees);
function testResumeOp() {
var a = [1,"2",3,"4",5,"6",7,"8",9,"10",11,"12",13,"14",15,"16"];
var x = "";
@ -4124,7 +4202,6 @@ function testInterpreterReentry5() {
}
test(testInterpreterReentry5);
/* // These tests should pass but currently crash, pending bug 462027.
function testInterpreterReentry6() {
var obj = {a:1, b:1, c:1, d:1, set e(x) { this._e = x; }};
for (var p in obj)
@ -4146,7 +4223,6 @@ function testInterpreterReentry7() {
}
testInterpreterReentry7.expected = "grue bleen";
test(testInterpreterReentry7);
*/
/*****************************************************************************
* *