[INFER] Remove on stack recompilation, allow removed on stack JIT frames to rejoin into the interpreter, bug 650163.

This commit is contained in:
Brian Hackett 2011-05-09 07:12:47 -07:00
parent a2e669df86
commit c0de8c6bb8
62 changed files with 2213 additions and 2516 deletions

View File

@ -117,8 +117,8 @@ GlobalObject::create(JSContext *cx, Class *clasp)
types::TypeObject *type = cx->newTypeObject("Global", NULL);
if (!type || !obj->setTypeAndUniqueShape(cx, type))
return NULL;
if (clasp->ext.equality && !cx->markTypeObjectHasSpecialEquality(type))
return NULL;
if (clasp->ext.equality)
cx->markTypeObjectHasSpecialEquality(type);
type->singleton = obj;
GlobalObject *globalObj = obj->asGlobal();
@ -132,9 +132,7 @@ GlobalObject::create(JSContext *cx, Class *clasp)
globalObj->setSlot(REGEXP_STATICS, ObjectValue(*res));
globalObj->setFlags(0);
if (!cx->addTypeProperty(type, js_undefined_str, UndefinedValue()))
return NULL;
cx->addTypeProperty(type, js_undefined_str, UndefinedValue());
return globalObj;
}

View File

@ -1564,8 +1564,8 @@ public:
void movq_mr(int offset, RegisterID base, RegisterID index, int scale, RegisterID dst)
{
js::JaegerSpew(js::JSpew_Insns,
IPFX "movq %s0x%x(%s), %s\n", MAYBE_PAD,
PRETTY_PRINT_OFFSET(offset), nameIReg(base), nameIReg(8,dst));
IPFX "movq %d(%s,%s,%d), %s\n", MAYBE_PAD,
offset, nameIReg(base), nameIReg(index), scale, nameIReg(8,dst));
m_formatter.oneByteOp64(OP_MOV_GvEv, dst, base, index, scale, offset);
}

View File

@ -0,0 +1,74 @@
// Test exotic ways of triggering recompilation.
// Lowered native call.
var x = 0;
var y = true;
for (var i = 0; i < 20; i++) {
x += Array.map.apply(undefined, [[0], function(x) { if (i == 10) eval("y = 20"); return 1; }])[0];
}
assertEq(x, 20);
assertEq(y, 20);
// Recompilation triggered by local function.
var o = {};
function what(q) {
function inner() { return q; }
o.f = inner;
var a = o.f();
return a;
}
for (var i = 0; i < 10; i++) {
var a = what(i);
assertEq(a, i);
}
// Lowered scripted call to apply returning code pointer.
var global = 3;
function foo(x, y) {
var q = x.apply(null, y);
if (q != 10)
assertEq(global, true);
}
foo(function(a) { global = a; return 10; }, [1]);
foo(function(a) { global = a; return 10; }, [1]);
foo(function(a) { global = a; return 10; }, [1]);
assertEq(global, 1);
foo(function(a) { global = a; return 3; }, [true]);
assertEq(global, true);
// Lowered scripted call returning NULL.
var oglobal = 3;
function xfoo(x, y) {
var q = x.apply(null, y);
if (q != 10)
assertEq(oglobal, true);
}
xfoo(function(a) { oglobal = a; return 10; }, [1]);
xfoo(function(a) { oglobal = a; return 10; }, [1]);
xfoo(function(a) { oglobal = a; return 10; }, [1]);
assertEq(oglobal, 1);
xfoo(function(a) { <x></x>; oglobal = a; return 3; }, [true]);
assertEq(oglobal, true);
// Recompilation out of SplatApplyArgs.
weirdarray = [,,1,2,3];
Object.defineProperty(weirdarray, 0, {get: function() { vglobal = 'true'; }});
var vglobal = 3;
function yfoo(x, y) {
var q = x.apply(null, y);
if (q != 10)
assertEq(vglobal, 'true');
else
assertEq(vglobal, 3);
}
yfoo(function(a) { return 10; }, [1]);
yfoo(function(a) { return 10; }, [1]);
yfoo(function(a) { return 10; }, [1]);
yfoo(function() { return 0; }, weirdarray);

View File

@ -13,7 +13,7 @@ function assertObjectsEqual(obj1, obj2) {
}
function forName(obj) {
assertJit();
// assertJit(); // :XXX: assertJit is a CALLNAME here which can cause a recompilation.
eval('');
var r = { };
for (x in obj)

View File

@ -210,6 +210,31 @@ GetBytecodeLength(jsbytecode *pc)
return js_GetVariableBytecodeLength(pc);
}
static inline unsigned
GetDefCount(JSScript *script, unsigned offset)
{
JS_ASSERT(offset < script->length);
jsbytecode *pc = script->code + offset;
if (js_CodeSpec[*pc].ndefs == -1)
return js_GetEnterBlockStackDefs(NULL, script, pc);
/*
* Add an extra pushed value for OR/AND opcodes, so that they are included
* in the pushed array of stack values for type inference.
*/
switch (JSOp(*pc)) {
case JSOP_OR:
case JSOP_ORX:
case JSOP_AND:
case JSOP_ANDX:
return 1;
case JSOP_FILTER:
return 2;
default:
return js_CodeSpec[*pc].ndefs;
}
}
static inline unsigned
GetUseCount(JSScript *script, unsigned offset)
{
@ -269,31 +294,6 @@ ExtendedUse(jsbytecode *pc)
}
}
static inline unsigned
GetDefCount(JSScript *script, unsigned offset)
{
JS_ASSERT(offset < script->length);
jsbytecode *pc = script->code + offset;
if (js_CodeSpec[*pc].ndefs == -1)
return js_GetEnterBlockStackDefs(NULL, script, pc);
/*
* Add an extra pushed value for OR/AND opcodes, so that they are included
* in the pushed array of stack values for type inference.
*/
switch (JSOp(*pc)) {
case JSOP_OR:
case JSOP_ORX:
case JSOP_AND:
case JSOP_ANDX:
return 1;
case JSOP_FILTER:
return 2;
default:
return js_CodeSpec[*pc].ndefs;
}
}
static inline ptrdiff_t
GetJumpOffset(jsbytecode *pc, jsbytecode *pc2)
{
@ -354,6 +354,14 @@ struct UntrapOpcode
*pc = JS_GetTrapOpcode(cx, script, pc);
}
void retrap()
{
if (trap) {
*pc = JSOP_TRAP;
trap = false;
}
}
~UntrapOpcode()
{
if (trap)
@ -901,6 +909,16 @@ class ScriptAnalysis
}
bool jumpTarget(const jsbytecode *pc) { return jumpTarget(pc - script->code); }
bool popGuaranteed(jsbytecode *pc) {
jsbytecode *next = pc + GetBytecodeLength(pc);
return JSOp(*next) == JSOP_POP && !jumpTarget(next);
}
bool incrementInitialValueObserved(jsbytecode *pc) {
const JSCodeSpec *cs = &js_CodeSpec[*pc];
return (cs->format & JOF_POST) && !popGuaranteed(pc);
}
const SSAValue &poppedValue(uint32 offset, uint32 which) {
JS_ASSERT(offset < script->length);
JS_ASSERT_IF(script->code[offset] != JSOP_TRAP,

View File

@ -3070,11 +3070,10 @@ JS_NewObject(JSContext *cx, JSClass *jsclasp, JSObject *proto, JSObject *parent)
JSObject *obj = NewNonFunction<WithProto::Class>(cx, clasp, proto, parent);
if (obj) {
if (clasp->ext.equality && !cx->markTypeObjectHasSpecialEquality(obj->getType()))
return NULL;
if (clasp->ext.equality)
cx->markTypeObjectHasSpecialEquality(obj->getType());
obj->syncSpecialEquality();
if (!cx->markTypeObjectUnknownProperties(obj->getType()))
return NULL;
cx->markTypeObjectUnknownProperties(obj->getType());
}
JS_ASSERT_IF(obj, obj->getParent());
@ -3089,9 +3088,11 @@ JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObj
return NULL;
types::TypeObject *type = cx->newTypeObject("Unique", proto);
if (obj->hasSpecialEquality() && !cx->markTypeObjectHasSpecialEquality(type))
if (!type)
return NULL;
if (!type || !obj->setTypeAndUniqueShape(cx, type))
if (obj->hasSpecialEquality())
cx->markTypeObjectHasSpecialEquality(type);
if (!obj->setTypeAndUniqueShape(cx, type))
return NULL;
type->singleton = obj;
@ -3115,8 +3116,7 @@ JS_NewObjectWithGivenProto(JSContext *cx, JSClass *jsclasp, JSObject *proto, JSO
JSObject *obj = NewNonFunction<WithProto::Given>(cx, clasp, proto, parent);
if (obj) {
obj->syncSpecialEquality();
if (!cx->markTypeObjectUnknownProperties(obj->getType()))
return NULL;
cx->markTypeObjectUnknownProperties(obj->getType());
}
return obj;
}
@ -3199,16 +3199,16 @@ JS_ConstructObjectWithArguments(JSContext *cx, JSClass *jsclasp, JSObject *proto
return js_ConstructObject(cx, clasp, proto, parent, argc, Valueify(argv));
}
JS_PUBLIC_API(JSBool)
JS_PUBLIC_API(void)
JS_AddTypeProperty(JSContext *cx, JSObject *obj, const char *name, jsval value)
{
return cx->addTypeProperty(obj->getType(), name, Valueify(value));
cx->addTypeProperty(obj->getType(), name, Valueify(value));
}
JS_PUBLIC_API(JSBool)
JS_PUBLIC_API(void)
JS_AddTypePropertyById(JSContext *cx, JSObject *obj, jsid id, jsval value)
{
return cx->addTypePropertyId(obj->getType(), id, Valueify(value));
cx->addTypePropertyId(obj->getType(), id, Valueify(value));
}
static JSBool
@ -3479,8 +3479,9 @@ DefineUCProperty(JSContext *cx, JSObject *obj, const jschar *name, size_t namele
uintN flags, intN tinyid)
{
JSAtom *atom = js_AtomizeChars(cx, name, AUTO_NAMELEN(name, namelen), 0);
if (!atom || !cx->addTypePropertyId(obj->getType(), ATOM_TO_JSID(atom), value))
if (!atom)
return false;
cx->addTypePropertyId(obj->getType(), ATOM_TO_JSID(atom), value);
return DefinePropertyById(cx, obj, ATOM_TO_JSID(atom), value, getter, setter, attrs,
flags, tinyid);
}
@ -3525,8 +3526,7 @@ JS_DefineObject(JSContext *cx, JSObject *obj, const char *name, JSClass *jsclasp
if (!nobj)
return NULL;
if (!cx->addTypeProperty(obj->getType(), name, ObjectValue(*nobj)))
return NULL;
cx->addTypeProperty(obj->getType(), name, ObjectValue(*nobj));
nobj->syncSpecialEquality();
if (!DefineProperty(cx, obj, name, ObjectValue(*nobj), NULL, NULL, attrs, 0, 0))
@ -3543,8 +3543,7 @@ JS_DefineConstDoubles(JSContext *cx, JSObject *obj, JSConstDoubleSpec *cds)
CHECK_REQUEST(cx);
for (ok = JS_TRUE; cds->name; cds++) {
if (!cx->addTypeProperty(obj->getType(), cds->name, TYPE_DOUBLE))
return false;
cx->addTypeProperty(obj->getType(), cds->name, TYPE_DOUBLE);
Value value = DoubleValue(cds->dval);
attrs = cds->flags;
if (!attrs)
@ -3574,9 +3573,7 @@ JS_DefineProperties(JSContext *cx, JSObject *obj, JSPropertySpec *ps)
type = TYPE_STRING;
JS_ASSERT(type);
ok = cx->addTypeProperty(obj->getType(), ps->name, type);
if (!ok)
break;
cx->addTypeProperty(obj->getType(), ps->name, type);
}
ok = DefineProperty(cx, obj, ps->name, UndefinedValue(),
@ -3619,8 +3616,7 @@ JS_AliasProperty(JSContext *cx, JSObject *obj, const char *name, const char *ali
ok = JS_FALSE;
} else {
/* Alias the properties within the type information for the object. */
if (!cx->aliasTypeProperties(obj->getType(), ATOM_TO_JSID(nameAtom), ATOM_TO_JSID(aliasAtom)))
return JS_FALSE;
cx->aliasTypeProperties(obj->getType(), ATOM_TO_JSID(nameAtom), ATOM_TO_JSID(aliasAtom));
shape = (Shape *)prop;
ok = (js_AddNativeProperty(cx, obj, ATOM_TO_JSID(aliasAtom),
@ -3894,8 +3890,7 @@ JS_SetPropertyById(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
assertSameCompartment(cx, obj, id);
JSAutoResolveFlags rf(cx, JSRESOLVE_QUALIFIED | JSRESOLVE_ASSIGNING);
if (!cx->addTypePropertyId(obj->getType(), id, Valueify(*vp)))
return false;
cx->addTypePropertyId(obj->getType(), id, Valueify(*vp));
return obj->setProperty(cx, id, Valueify(vp), false);
}
@ -4379,8 +4374,7 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
Value v;
if (!obj->getProperty(cx, r.front().id, &v))
return NULL;
if (!fun->script()->typeSetUpvar(cx, i, v))
return NULL;
fun->script()->typeSetUpvar(cx, i, v);
clone->getFlatClosureUpvars()[i] = v;
}

View File

@ -2287,10 +2287,10 @@ JS_DefineOwnProperty(JSContext *cx, JSObject *obj, jsid id, jsval descriptor, JS
/* Add properties to the type information for obj. */
extern JS_PUBLIC_API(JSBool)
extern JS_PUBLIC_API(void)
JS_AddTypeProperty(JSContext *cx, JSObject *obj, const char *name, jsval value);
extern JS_PUBLIC_API(JSBool)
extern JS_PUBLIC_API(void)
JS_AddTypePropertyById(JSContext *cx, JSObject *obj, jsid id, jsval value);
/*

View File

@ -555,8 +555,7 @@ DeleteArrayElement(JSContext *cx, JSObject *obj, jsdouble index, bool strict)
if (index <= jsuint(-1)) {
jsuint idx = jsuint(index);
if (idx < obj->getDenseArrayInitializedLength()) {
if (!obj->setDenseArrayNotPacked(cx))
return -1;
obj->setDenseArrayNotPacked(cx);
obj->setDenseArrayElement(idx, MagicValue(JS_ARRAY_HOLE));
if (!js_SuppressDeletedIndexProperties(cx, obj, idx, idx+1))
return -1;
@ -606,8 +605,8 @@ js_SetLengthProperty(JSContext *cx, JSObject *obj, jsdouble length)
* Arrays are already known to have lengths (if the length overflows, it will
* be caught by setArrayLength).
*/
if (!obj->isArray() && !cx->addTypePropertyId(obj->getType(), id, v))
return false;
if (!obj->isArray())
cx->addTypePropertyId(obj->getType(), id, v);
/* We don't support read-only array length yet. */
return obj->setProperty(cx, id, &v, false);
@ -974,8 +973,7 @@ array_deleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval, JSBool
}
if (js_IdIsIndex(id, &i) && i < obj->getDenseArrayInitializedLength()) {
if (!obj->setDenseArrayNotPacked(cx))
return false;
obj->setDenseArrayNotPacked(cx);
obj->setDenseArrayElement(i, MagicValue(JS_ARRAY_HOLE));
}
@ -1080,9 +1078,8 @@ JSObject::makeDenseArraySlow(JSContext *cx)
{
JS_ASSERT(isDenseArray());
if (!cx->markTypeArrayNotPacked(getType(), true))
return false;
JS_ALWAYS_TRUE(setDenseArrayNotPacked(cx));
cx->markTypeArrayNotPacked(getType(), true);
setDenseArrayNotPacked(cx);
/*
* Save old map now, before calling InitScopeForObject. We'll have to undo
@ -1611,8 +1608,8 @@ InitArrayObject(JSContext *cx, JSObject *obj, jsuint length, const Value *vector
obj->setDenseArrayElement(i, vector[i]);
hole |= vector[i].isMagic(JS_ARRAY_HOLE);
}
if (hole && !obj->setDenseArrayNotPacked(cx))
return false;
if (hole)
obj->setDenseArrayNotPacked(cx);
return true;
}
@ -1683,9 +1680,8 @@ array_reverse(JSContext *cx, uintN argc, Value *vp)
jsuint initlen = obj->getDenseArrayInitializedLength();
if (len > initlen) {
JS_ASSERT(cx->typeInferenceEnabled());
if (!obj->setDenseArrayNotPacked(cx))
return false;
ClearValueRange(obj->getDenseArrayElements() + initlen, len - initlen, true);
obj->setDenseArrayNotPacked(cx);
obj->setDenseArrayInitializedLength(len);
}
@ -2208,8 +2204,8 @@ array_push_slowly(JSContext *cx, JSObject *obj, uintN argc, Value *argv, Value *
rval->setNumber(newlength);
/* watch for length overflowing to a double. */
if (!rval->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!rval->isInt32())
cx->markTypeCallerOverflow();
return js_SetLengthProperty(cx, obj, newlength);
}
@ -2227,8 +2223,7 @@ array_push1_dense(JSContext* cx, JSObject* obj, const Value &v, Value *rval)
break;
}
if (cx->typeInferenceEnabled() && !cx->addTypePropertyId(obj->getType(), JSID_VOID, v))
return false;
cx->addTypePropertyId(obj->getType(), JSID_VOID, v);
obj->setDenseArrayLength(length + 1);
obj->setDenseArrayElement(length, v);
@ -2327,16 +2322,15 @@ array_pop_slowly(JSContext *cx, JSObject* obj, Value *vp)
return JS_FALSE;
if (index == 0) {
vp->setUndefined();
if (!cx->markTypeCallerUnexpected(TYPE_UNDEFINED))
return JS_FALSE;
cx->markTypeCallerUnexpected(TYPE_UNDEFINED);
} else {
index--;
/* Get the to-be-deleted property's value into vp. */
if (!GetElement(cx, obj, index, &hole, vp))
return JS_FALSE;
if (hole && !cx->markTypeCallerUnexpected(TYPE_UNDEFINED))
return JS_FALSE;
if (hole)
cx->markTypeCallerUnexpected(TYPE_UNDEFINED);
if (!hole && DeleteArrayElement(cx, obj, index, true) < 0)
return JS_FALSE;
}
@ -2352,15 +2346,14 @@ array_pop_dense(JSContext *cx, JSObject* obj, Value *vp)
index = obj->getArrayLength();
if (index == 0) {
vp->setUndefined();
if (!cx->markTypeCallerUnexpected(TYPE_UNDEFINED))
return JS_FALSE;
cx->markTypeCallerUnexpected(TYPE_UNDEFINED);
return JS_TRUE;
}
index--;
if (!GetElement(cx, obj, index, &hole, vp))
return JS_FALSE;
if (hole && !cx->markTypeCallerUnexpected(TYPE_UNDEFINED))
return JS_FALSE;
if (hole)
cx->markTypeCallerUnexpected(TYPE_UNDEFINED);
if (!hole && DeleteArrayElement(cx, obj, index, true) < 0)
return JS_FALSE;
@ -2394,8 +2387,7 @@ array_shift(JSContext *cx, uintN argc, Value *vp)
if (length == 0) {
vp->setUndefined();
if (!cx->markTypeCallerUnexpected(TYPE_UNDEFINED))
return JS_FALSE;
cx->markTypeCallerUnexpected(TYPE_UNDEFINED);
} else {
length--;
@ -2405,8 +2397,7 @@ array_shift(JSContext *cx, uintN argc, Value *vp)
*vp = obj->getDenseArrayElement(0);
if (vp->isMagic(JS_ARRAY_HOLE)) {
vp->setUndefined();
if (!cx->markTypeCallerUnexpected(TYPE_UNDEFINED))
return JS_FALSE;
cx->markTypeCallerUnexpected(TYPE_UNDEFINED);
}
Value *elems = obj->getDenseArrayElements();
memmove(elems, elems + 1, length * sizeof(jsval));
@ -2425,8 +2416,8 @@ array_shift(JSContext *cx, uintN argc, Value *vp)
if (!GetElement(cx, obj, 0, &hole, vp))
return JS_FALSE;
if (hole && !cx->markTypeCallerUnexpected(TYPE_UNDEFINED))
return JS_FALSE;
if (hole)
cx->markTypeCallerUnexpected(TYPE_UNDEFINED);
/* Slide down the array above the first element. */
AutoValueRooter tvr(cx);
@ -2512,8 +2503,8 @@ array_unshift(JSContext *cx, uintN argc, Value *vp)
vp->setNumber(newlen);
/* watch for length overflowing to a double. */
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return JS_TRUE;
}
@ -2543,8 +2534,9 @@ array_splice(JSContext *cx, uintN argc, Value *vp)
* result of the call so mark it at the callsite.
*/
type = cx->getTypeNewObject(JSProto_Array);
if (!type || !cx->markTypeCallerUnexpected((jstype) type))
if (!type)
return false;
cx->markTypeCallerUnexpected((jstype) type);
}
/* Create a new array value to return. */
@ -2610,10 +2602,8 @@ array_splice(JSContext *cx, uintN argc, Value *vp)
return JS_FALSE;
}
if (!cx->addTypePropertyId(obj2->getType(), JSID_VOID, tvr.value()))
return JS_FALSE;
/* Copy tvr.value() to the new array unless it's a hole. */
cx->addTypePropertyId(obj2->getType(), JSID_VOID, tvr.value());
if (!hole && !SetArrayElement(cx, obj2, last - begin, tvr.value()))
return JS_FALSE;
}
@ -2726,13 +2716,13 @@ array_concat(JSContext *cx, uintN argc, Value *vp)
return JS_FALSE;
if (nobj->getProto() == aobj->getProto())
nobj->setType(aobj->getType());
else if (!cx->markTypeCallerUnexpected(TYPE_UNKNOWN))
return JS_FALSE;
else
cx->markTypeCallerUnexpected(TYPE_UNKNOWN);
nobj->setType(aobj->getType());
if (!nobj->setArrayLength(cx, length))
return JS_FALSE;
if (!aobj->isPackedDenseArray() && !nobj->setDenseArrayNotPacked(cx))
return JS_FALSE;
if (!aobj->isPackedDenseArray())
nobj->setDenseArrayNotPacked(cx);
vp->setObject(*nobj);
if (argc == 0)
return JS_TRUE;
@ -2742,8 +2732,7 @@ array_concat(JSContext *cx, uintN argc, Value *vp)
nobj = NewDenseEmptyArray(cx);
if (!nobj)
return JS_FALSE;
if (!cx->markTypeCallerUnexpected(TYPE_UNKNOWN))
return JS_FALSE;
cx->markTypeCallerUnexpected(TYPE_UNKNOWN);
vp->setObject(*nobj);
length = 0;
}
@ -2772,8 +2761,8 @@ array_concat(JSContext *cx, uintN argc, Value *vp)
return false;
}
if (!hole && !cx->addTypePropertyId(nobj->getType(), JSID_VOID, tvr.value()))
return false;
if (!hole)
cx->addTypePropertyId(nobj->getType(), JSID_VOID, tvr.value());
/*
* Per ECMA 262, 15.4.4.4, step 9, ignore nonexistent
@ -2789,9 +2778,7 @@ array_concat(JSContext *cx, uintN argc, Value *vp)
}
}
if (!cx->addTypePropertyId(nobj->getType(), JSID_VOID, v))
return false;
cx->addTypePropertyId(nobj->getType(), JSID_VOID, v);
if (!SetArrayElement(cx, nobj, length, v))
return false;
length++;
@ -2860,8 +2847,9 @@ array_slice(JSContext *cx, uintN argc, Value *vp)
* result of the call so mark it at the callsite.
*/
type = cx->getTypeNewObject(JSProto_Array);
if (!type || !cx->markTypeCallerUnexpected((jstype) type))
if (!type)
return false;
cx->markTypeCallerUnexpected((jstype) type);
}
if (obj->isDenseArray() && end <= obj->getDenseArrayInitializedLength() &&
@ -2870,8 +2858,8 @@ array_slice(JSContext *cx, uintN argc, Value *vp)
if (!nobj)
return JS_FALSE;
nobj->setType(type);
if (!obj->isPackedDenseArray() && !nobj->setDenseArrayNotPacked(cx))
return JS_FALSE;
if (!obj->isPackedDenseArray())
nobj->setDenseArrayNotPacked(cx);
vp->setObject(*nobj);
return JS_TRUE;
}
@ -2960,8 +2948,8 @@ array_indexOfHelper(JSContext *cx, JSBool isLast, uintN argc, Value *vp)
return JS_FALSE;
if (equal) {
vp->setNumber(i);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return JS_TRUE;
}
}
@ -3150,7 +3138,7 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, Value *vp)
*vp = rval;
break;
case MAP:
ok = cx->addTypePropertyId(newarr->getType(), JSID_VOID, rval);
cx->addTypePropertyId(newarr->getType(), JSID_VOID, rval);
if (!ok)
goto out;
ok = SetArrayElement(cx, newarr, i, rval);
@ -3161,7 +3149,7 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, Value *vp)
if (!cond)
break;
/* The element passed the filter, so push it onto our result. */
ok = cx->addTypePropertyId(newarr->getType(), JSID_VOID, tvr.value());
cx->addTypePropertyId(newarr->getType(), JSID_VOID, tvr.value());
if (!ok)
goto out;
ok = SetArrayElement(cx, newarr, newlen++, tvr.value());
@ -3493,8 +3481,7 @@ js_Array(JSContext *cx, uintN argc, Value *vp)
return false;
obj = NewDenseCopiedArray(cx, argc, vp + 2);
} else if (!vp[2].isNumber()) {
if (!cx->addTypeProperty(type, NULL, vp[2]))
return false;
cx->addTypeProperty(type, NULL, vp[2]);
obj = NewDenseCopiedArray(cx, 1, vp + 2);
} else {
jsuint length;
@ -3563,14 +3550,14 @@ js_InitArrayClass(JSContext *cx, JSObject *obj)
if (!proto)
return NULL;
if (!JS_AddTypeProperty(cx, proto, "length", INT_TO_JSVAL(0)))
return NULL;
JS_AddTypeProperty(cx, proto, "length", INT_TO_JSVAL(0));
JS_ALWAYS_TRUE(proto->setArrayLength(cx, 0));
/* The default 'new' object for Array.prototype has unknown properties. */
TypeObject *newType = proto->getNewType(cx);
if (!newType || !cx->markTypeObjectUnknownProperties(newType))
if (!newType)
return NULL;
cx->markTypeObjectUnknownProperties(newType);
return proto;
}
@ -3844,8 +3831,8 @@ js_CloneDensePrimitiveArray(JSContext *cx, JSObject *obj, JSObject **clone)
if (!*clone)
return JS_FALSE;
if (!obj->isPackedDenseArray() && !(*clone)->setDenseArrayNotPacked(cx))
return JS_FALSE;
if (!obj->isPackedDenseArray())
(*clone)->setDenseArrayNotPacked(cx);
/* The length will be set to the initlen, above, but length might be larger. */
return (*clone)->setArrayLength(cx, length);

View File

@ -74,15 +74,14 @@ JSObject::isPackedDenseArray()
return flags & PACKED_ARRAY;
}
inline bool
inline void
JSObject::setDenseArrayNotPacked(JSContext *cx)
{
JS_ASSERT(isDenseArray());
if (flags & PACKED_ARRAY) {
flags ^= PACKED_ARRAY;
return cx->markTypeArrayNotPacked(getType(), false);
cx->markTypeArrayNotPacked(getType(), false);
}
return true;
}
inline JSObject::EnsureDenseResult
@ -107,8 +106,7 @@ JSObject::ensureDenseArrayElements(JSContext *cx, uintN index, uintN extra)
if (index < currentCapacity) {
JS_ASSERT(cx->typeInferenceEnabled());
if (index > initLength) {
if (!setDenseArrayNotPacked(cx))
return ED_FAILED;
setDenseArrayNotPacked(cx);
ClearValueRange(getDenseArrayElements() + initLength,
index - initLength, true);
}
@ -133,8 +131,7 @@ JSObject::ensureDenseArrayElements(JSContext *cx, uintN index, uintN extra)
if (index > initLength) {
ClearValueRange(getDenseArrayElements() + initLength,
index - initLength, true);
if (!setDenseArrayNotPacked(cx))
return ED_FAILED;
setDenseArrayNotPacked(cx);
}
setDenseArrayInitializedLength(requiredCapacity);
return ED_OK;
@ -154,8 +151,7 @@ JSObject::ensureDenseArrayElements(JSContext *cx, uintN index, uintN extra)
if (cx->typeInferenceEnabled()) {
if (index > initLength) {
if (!setDenseArrayNotPacked(cx))
return ED_FAILED;
setDenseArrayNotPacked(cx);
ClearValueRange(getDenseArrayElements() + initLength,
index - initLength, true);
}

View File

@ -1377,60 +1377,60 @@ struct JSContext
getTypeCallerInitObject(bool isArray);
/* Mark the immediate allocation site as having produced an unexpected value. */
inline bool markTypeCallerUnexpected(js::types::jstype type);
inline bool markTypeCallerUnexpected(const js::Value &value);
inline bool markTypeCallerOverflow();
inline void markTypeCallerUnexpected(js::types::jstype type);
inline void markTypeCallerUnexpected(const js::Value &value);
inline void markTypeCallerOverflow();
/*
* Monitor a javascript call, either on entry to the interpreter or made
* from within the interpreter.
*/
inline bool typeMonitorCall(const js::CallArgs &args, bool constructing);
inline void typeMonitorCall(const js::CallArgs &args, bool constructing);
/* Monitor an assignment made to a property by a script. */
inline bool typeMonitorAssign(JSObject *obj, jsid id, const js::Value &value);
inline void typeMonitorAssign(JSObject *obj, jsid id, const js::Value &value);
/* Add a possible value for the named property of obj. */
inline bool addTypeProperty(js::types::TypeObject *obj, const char *name, js::types::jstype type);
inline bool addTypeProperty(js::types::TypeObject *obj, const char *name, const js::Value &value);
inline bool addTypePropertyId(js::types::TypeObject *obj, jsid id, js::types::jstype type);
inline bool addTypePropertyId(js::types::TypeObject *obj, jsid id, const js::Value &value);
inline bool addTypePropertyId(js::types::TypeObject *obj, jsid id, js::types::ClonedTypeSet *types);
inline void addTypeProperty(js::types::TypeObject *obj, const char *name, js::types::jstype type);
inline void addTypeProperty(js::types::TypeObject *obj, const char *name, const js::Value &value);
inline void addTypePropertyId(js::types::TypeObject *obj, jsid id, js::types::jstype type);
inline void addTypePropertyId(js::types::TypeObject *obj, jsid id, const js::Value &value);
inline void addTypePropertyId(js::types::TypeObject *obj, jsid id, js::types::ClonedTypeSet *types);
/* Get the type to use for objects with no prototype. */
inline js::types::TypeObject *getTypeEmpty();
/* Alias two properties in the type information for obj. */
inline bool aliasTypeProperties(js::types::TypeObject *obj, jsid first, jsid second);
inline void aliasTypeProperties(js::types::TypeObject *obj, jsid first, jsid second);
/* Mark an array type as being not packed and, possibly, not dense. */
inline bool markTypeArrayNotPacked(js::types::TypeObject *obj, bool notDense);
inline void markTypeArrayNotPacked(js::types::TypeObject *obj, bool notDense);
/* Mark a function as being uninlineable (its .arguments property has been accessed). */
inline bool markTypeFunctionUninlineable(js::types::TypeObject *obj);
inline void markTypeFunctionUninlineable(js::types::TypeObject *obj);
/* Monitor all properties of a type object as unknown. */
inline bool markTypeObjectUnknownProperties(js::types::TypeObject *obj);
inline void markTypeObjectUnknownProperties(js::types::TypeObject *obj);
/* Mark a type as possibly having special equality hooks. */
inline bool markTypeObjectHasSpecialEquality(js::types::TypeObject *obj);
inline void markTypeObjectHasSpecialEquality(js::types::TypeObject *obj);
/* Mark any property which has been deleted or reconfigured. */
inline bool markTypePropertyConfigured(js::types::TypeObject *obj, jsid id);
inline void markTypePropertyConfigured(js::types::TypeObject *obj, jsid id);
/* Mark a global object as having had its slots reallocated. */
inline bool markGlobalReallocation(JSObject *obj);
inline void markGlobalReallocation(JSObject *obj);
/*
* For an array or object which has not yet escaped and been referenced elsewhere,
* pick a new type based on the object's current contents.
*/
inline bool fixArrayType(JSObject *obj);
inline bool fixObjectType(JSObject *obj);
inline void fixArrayType(JSObject *obj);
inline void fixObjectType(JSObject *obj);
private:
inline bool addTypeFlags(js::types::TypeObject *obj, js::types::TypeObjectFlags flags);
inline void addTypeFlags(js::types::TypeObject *obj, js::types::TypeObjectFlags flags);
}; /* struct JSContext */

View File

@ -1454,8 +1454,8 @@ date_getYear(JSContext *cx, uintN argc, Value *vp)
vp->setInt32(year);
} else {
*vp = yearVal;
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
}
return true;
@ -1472,8 +1472,8 @@ date_getFullYear(JSContext *cx, uintN argc, Value *vp)
return JS_FALSE;
*vp = obj->getSlot(JSObject::JSSLOT_DATE_LOCAL_YEAR);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return JS_TRUE;
}
@ -1486,8 +1486,8 @@ date_getUTCFullYear(JSContext *cx, uintN argc, Value *vp)
if (JSDOUBLE_IS_FINITE(result))
result = YearFromTime(result);
else if (!cx->markTypeCallerOverflow())
return false;
else
cx->markTypeCallerOverflow();
vp->setNumber(result);
return true;
@ -1504,8 +1504,8 @@ date_getMonth(JSContext *cx, uintN argc, Value *vp)
return false;
*vp = obj->getSlot(JSObject::JSSLOT_DATE_LOCAL_MONTH);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return true;
}
@ -1518,8 +1518,8 @@ date_getUTCMonth(JSContext *cx, uintN argc, Value *vp)
if (JSDOUBLE_IS_FINITE(result))
result = MonthFromTime(result);
else if (!cx->markTypeCallerOverflow())
return false;
else
cx->markTypeCallerOverflow();
vp->setNumber(result);
return true;
@ -1536,8 +1536,8 @@ date_getDate(JSContext *cx, uintN argc, Value *vp)
return false;
*vp = obj->getSlot(JSObject::JSSLOT_DATE_LOCAL_DATE);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return true;
}
@ -1550,8 +1550,8 @@ date_getUTCDate(JSContext *cx, uintN argc, Value *vp)
if (JSDOUBLE_IS_FINITE(result))
result = DateFromTime(result);
else if (!cx->markTypeCallerOverflow())
return false;
else
cx->markTypeCallerOverflow();
vp->setNumber(result);
return true;
@ -1568,8 +1568,8 @@ date_getDay(JSContext *cx, uintN argc, Value *vp)
return false;
*vp = obj->getSlot(JSObject::JSSLOT_DATE_LOCAL_DAY);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return true;
}
@ -1582,8 +1582,8 @@ date_getUTCDay(JSContext *cx, uintN argc, Value *vp)
if (JSDOUBLE_IS_FINITE(result))
result = WeekDay(result);
else if (!cx->markTypeCallerOverflow())
return false;
else
cx->markTypeCallerOverflow();
vp->setNumber(result);
return true;
@ -1600,8 +1600,8 @@ date_getHours(JSContext *cx, uintN argc, Value *vp)
return false;
*vp = obj->getSlot(JSObject::JSSLOT_DATE_LOCAL_HOURS);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return true;
}
@ -1614,8 +1614,8 @@ date_getUTCHours(JSContext *cx, uintN argc, Value *vp)
if (JSDOUBLE_IS_FINITE(result))
result = HourFromTime(result);
else if (!cx->markTypeCallerOverflow())
return false;
else
cx->markTypeCallerOverflow();
vp->setNumber(result);
return JS_TRUE;
@ -1632,8 +1632,8 @@ date_getMinutes(JSContext *cx, uintN argc, Value *vp)
return false;
*vp = obj->getSlot(JSObject::JSSLOT_DATE_LOCAL_MINUTES);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return true;
}
@ -1646,8 +1646,8 @@ date_getUTCMinutes(JSContext *cx, uintN argc, Value *vp)
if (JSDOUBLE_IS_FINITE(result))
result = MinFromTime(result);
else if (!cx->markTypeCallerOverflow())
return false;
else
cx->markTypeCallerOverflow();
vp->setNumber(result);
return true;
@ -1666,8 +1666,8 @@ date_getUTCSeconds(JSContext *cx, uintN argc, Value *vp)
return false;
*vp = obj->getSlot(JSObject::JSSLOT_DATE_LOCAL_SECONDS);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return true;
}
@ -1682,8 +1682,8 @@ date_getUTCMilliseconds(JSContext *cx, uintN argc, Value *vp)
if (JSDOUBLE_IS_FINITE(result))
result = msFromTime(result);
else if (!cx->markTypeCallerOverflow())
return false;
else
cx->markTypeCallerOverflow();
vp->setNumber(result);
return true;
@ -1710,8 +1710,8 @@ date_getTimezoneOffset(JSContext *cx, uintN argc, Value *vp)
* daylight savings time.
*/
jsdouble result = (utctime - localtime) / msPerMinute;
if (!JSDOUBLE_IS_FINITE(result) && !cx->markTypeCallerOverflow())
return false;
if (!JSDOUBLE_IS_FINITE(result))
cx->markTypeCallerOverflow();
vp->setNumber(result);
return true;
@ -2658,9 +2658,10 @@ js_InitDateClass(JSContext *cx, JSObject *obj)
AutoValueRooter toUTCStringFun(cx);
jsid toUTCStringId = ATOM_TO_JSID(cx->runtime->atomState.toUTCStringAtom);
jsid toGMTStringId = ATOM_TO_JSID(cx->runtime->atomState.toGMTStringAtom);
if (!js_GetProperty(cx, proto, toUTCStringId, toUTCStringFun.addr()) ||
!cx->addTypePropertyId(proto->getType(), toGMTStringId, toUTCStringFun.value()) ||
!js_DefineProperty(cx, proto, toGMTStringId, toUTCStringFun.addr(),
if (!js_GetProperty(cx, proto, toUTCStringId, toUTCStringFun.addr()))
return NULL;
cx->addTypePropertyId(proto->getType(), toGMTStringId, toUTCStringFun.value());
if (!js_DefineProperty(cx, proto, toGMTStringId, toUTCStringFun.addr(),
PropertyStub, StrictPropertyStub, 0)) {
return NULL;
}

View File

@ -248,10 +248,7 @@ js_SetSingleStepMode(JSContext *cx, JSScript *script, JSBool singleStep)
js::mjit::JITScript *jit = script->jitNormal ? script->jitNormal : script->jitCtor;
if (jit && script->singleStepMode != jit->singleStepMode) {
js::mjit::Recompiler recompiler(cx, script);
if (!recompiler.recompile()) {
script->singleStepMode = !singleStep;
return JS_FALSE;
}
recompiler.recompile();
}
#endif
return JS_TRUE;
@ -390,8 +387,7 @@ JS_SetTrap(JSContext *cx, JSScript *script, jsbytecode *pc,
#ifdef JS_METHODJIT
if (script->hasJITCode()) {
js::mjit::Recompiler recompiler(cx, script);
if (!recompiler.recompile())
return JS_FALSE;
recompiler.recompile();
}
#endif
@ -989,8 +985,7 @@ UpdateWatchpointShape(JSContext *cx, JSWatchPoint *wp, const Shape *newShape)
*/
StrictPropertyOp originalSetter = newShape->setter();
if (!cx->addTypePropertyId(wp->object->getType(), newShape->id, types::TYPE_UNKNOWN))
return NULL;
cx->addTypePropertyId(wp->object->getType(), newShape->id, types::TYPE_UNKNOWN);
/*
* Drop the watching setter into the object, in place of newShape. Note that a single
@ -1097,8 +1092,7 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
return false;
}
if (!cx->markTypePropertyConfigured(obj->getType(), propid))
return false;
cx->markTypePropertyConfigured(obj->getType(), propid);
JSObject *pobj;
JSProperty *prop;

View File

@ -4427,8 +4427,7 @@ JSParseNode::getConstantValue(JSContext *cx, bool strictChecks, Value *vp)
}
JS_ASSERT(idx == pn_count);
if (!cx->fixArrayType(obj))
return false;
cx->fixArrayType(obj);
vp->setObject(*obj);
return true;
}
@ -4468,8 +4467,7 @@ JSParseNode::getConstantValue(JSContext *cx, bool strictChecks, Value *vp)
}
}
if (!cx->fixObjectType(obj))
return false;
cx->fixObjectType(obj);
vp->setObject(*obj);
return true;
}

View File

@ -587,8 +587,8 @@ ArgSetter(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp)
if (fp) {
JSScript *script = fp->functionScript();
if (script->usesArguments) {
if (arg < fp->numFormalArgs() && !script->typeSetArgument(cx, arg, *vp))
return false;
if (arg < fp->numFormalArgs())
script->typeSetArgument(cx, arg, *vp);
fp->canonicalActualArg(arg) = *vp;
}
return true;
@ -1238,8 +1238,7 @@ SetCallArg(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp)
argp = &obj->callObjArg(i);
JSScript *script = obj->getCallObjCalleeFunction()->script();
if (!script->typeSetArgument(cx, i, *vp))
return false;
script->typeSetArgument(cx, i, *vp);
GC_POKE(cx, *argp);
*argp = *vp;
@ -1322,8 +1321,7 @@ SetCallVar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp)
varp = &obj->callObjVar(i);
JSScript *script = obj->getCallObjCalleeFunction()->script();
if (!script->typeSetLocal(cx, i, *vp))
return false;
script->typeSetLocal(cx, i, *vp);
GC_POKE(cx, *varp);
*varp = *vp;
@ -1606,8 +1604,8 @@ fun_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
* Mark the function's script as uninlineable, to expand any of its
* frames on the stack before we go looking for them.
*/
if (fun->isInterpreted() && !cx->markTypeFunctionUninlineable(fun->getType()))
return false;
if (fun->isInterpreted())
cx->markTypeFunctionUninlineable(fun->getType());
}
/* Find fun's top-most activation record. */
@ -1625,8 +1623,7 @@ fun_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
fp->prev()->pc(cx, fp, &inlined);
if (inlined) {
JSFunction *fun = fp->prev()->jit()->inlineFrames()[inlined->inlineIndex].fun;
if (!cx->markTypeFunctionUninlineable(fun->getType()))
return false;
cx->markTypeFunctionUninlineable(fun->getType());
}
}
#endif
@ -1695,8 +1692,7 @@ fun_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
JS_NOT_REACHED("fun_getProperty");
}
jsid nameid = atom ? ATOM_TO_JSID(atom) : JSID_VOID;
return cx->addTypePropertyId(obj->getType(), nameid, *vp);
return true;
}
struct LazyFunctionDataProp {
@ -1840,8 +1836,8 @@ fun_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags,
if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) {
JS_ASSERT(!IsInternalFunctionObject(obj));
if (!cx->addTypePropertyId(obj->getType(), id, types::TYPE_INT32) ||
!js_DefineNativeProperty(cx, obj, id, Int32Value(fun->nargs),
cx->addTypePropertyId(obj->getType(), id, types::TYPE_INT32);
if (!js_DefineNativeProperty(cx, obj, id, Int32Value(fun->nargs),
PropertyStub, StrictPropertyStub,
JSPROP_PERMANENT | JSPROP_READONLY, 0, 0, NULL)) {
return false;
@ -2756,8 +2752,9 @@ js_InitFunctionClass(JSContext *cx, JSObject *obj)
* This will be used for generic scripted functions, e.g. from non-compileAndGo code.
*/
TypeObject *newType = proto->getNewType(cx);
if (!newType || !cx->markTypeObjectUnknownProperties(newType))
if (!newType)
return NULL;
cx->markTypeObjectUnknownProperties(newType);
JSFunction *fun = js_NewFunction(cx, proto, NULL, 0, JSFUN_INTERPRETED, obj, NULL, NULL, NULL);
if (!fun)
@ -2923,12 +2920,10 @@ js_CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent,
TypeFunction *type = cx->newTypeFunction("ClonedFunction", clone->getProto());
if (!type || !clone->setTypeAndUniqueShape(cx, type))
return NULL;
if (fun->getType()->unknownProperties()) {
if (!cx->markTypeObjectUnknownProperties(type))
return NULL;
} else {
if (fun->getType()->unknownProperties())
cx->markTypeObjectUnknownProperties(type);
else
type->handler = fun->getType()->asFunction()->handler;
}
}
}
return clone;
@ -2995,13 +2990,10 @@ js_NewFlatClosure(JSContext *cx, JSFunction *fun, JSOp op, size_t oplen)
uintN level = fun->u.i.script->staticLevel;
JSUpvarArray *uva = fun->script()->upvars();
bool ok = true;
for (uint32 i = 0, n = uva->length; i < n; i++) {
upvars[i] = GetUpvar(cx, level, uva->vector[i]);
ok &= fun->script()->typeSetUpvar(cx, i, upvars[i]);
fun->script()->typeSetUpvar(cx, i, upvars[i]);
}
if (!ok)
return NULL;
return closure;
}
@ -3100,9 +3092,7 @@ js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, Native native,
if (!wasDelegate && obj->isDelegate())
obj->clearDelegate();
if (!cx->addTypePropertyId(obj->getType(), id, ObjectValue(*fun)))
return NULL;
cx->addTypePropertyId(obj->getType(), id, ObjectValue(*fun));
if (!obj->defineProperty(cx, id, ObjectValue(*fun), gop, sop, attrs & ~JSFUN_FLAGS_MASK))
return NULL;

View File

@ -1814,7 +1814,7 @@ TypeCompartment::growPendingArray(JSContext *cx)
pendingCapacity = newCapacity;
}
bool
void
TypeCompartment::dynamicCall(JSContext *cx, JSObject *callee,
const js::CallArgs &args, bool constructing)
{
@ -1825,8 +1825,7 @@ TypeCompartment::dynamicCall(JSContext *cx, JSObject *callee,
script->typeSetNewCalled(cx);
} else {
jstype type = GetValueType(cx, args.thisv());
if (!script->typeSetThis(cx, type))
return false;
script->typeSetThis(cx, type);
}
/*
@ -1835,21 +1834,15 @@ TypeCompartment::dynamicCall(JSContext *cx, JSObject *callee,
* accessed through the arguments object, which is monitored.
*/
unsigned arg = 0;
for (; arg < args.argc() && arg < nargs; arg++) {
if (!script->typeSetArgument(cx, arg, args[arg]))
return false;
}
for (; arg < args.argc() && arg < nargs; arg++)
script->typeSetArgument(cx, arg, args[arg]);
/* Watch for fewer actuals than formals to the call. */
for (; arg < nargs; arg++) {
if (!script->typeSetArgument(cx, arg, UndefinedValue()))
return false;
}
return true;
for (; arg < nargs; arg++)
script->typeSetArgument(cx, arg, UndefinedValue());
}
bool
void
TypeCompartment::dynamicPush(JSContext *cx, JSScript *script, uint32 offset, jstype type)
{
JS_ASSERT(cx->typeInferenceEnabled());
@ -1917,7 +1910,7 @@ TypeCompartment::dynamicPush(JSContext *cx, JSScript *script, uint32 offset, jst
*/
TypeSet *pushed = script->analysis(cx)->pushedTypes(offset, 0);
if (pushed->hasType(type))
return true;
return;
} else {
/* Scan all TypeResults on the script to check for a duplicate. */
TypeResult *result, **presult = &script->typeResults;
@ -1930,7 +1923,7 @@ TypeCompartment::dynamicPush(JSContext *cx, JSScript *script, uint32 offset, jst
result->next = script->typeResults;
script->typeResults = result;
}
return true;
return;
}
presult = &result->next;
}
@ -1942,7 +1935,7 @@ TypeCompartment::dynamicPush(JSContext *cx, JSScript *script, uint32 offset, jst
TypeResult *result = (TypeResult *) cx->calloc_(sizeof(TypeResult));
if (!result) {
setPendingNukeTypes(cx);
return false;
return;
}
result->offset = offset;
@ -1958,7 +1951,7 @@ TypeCompartment::dynamicPush(JSContext *cx, JSScript *script, uint32 offset, jst
analyze::ScriptAnalysis *analysis = script->analysis(cx);
if (!analysis) {
setPendingNukeTypes(cx);
return false;
return;
}
analysis->analyzeTypes(cx);
}
@ -1990,7 +1983,7 @@ TypeCompartment::dynamicPush(JSContext *cx, JSScript *script, uint32 offset, jst
TypeResult *result = (TypeResult *) cx->calloc_(sizeof(TypeResult));
if (!result) {
setPendingNukeTypes(cx);
return false;
return;
}
result->offset = offset;
result->type = TYPE_UNDEFINED;
@ -2001,11 +1994,9 @@ TypeCompartment::dynamicPush(JSContext *cx, JSScript *script, uint32 offset, jst
offset += analyze::GetBytecodeLength(pc);
}
}
return true;
}
bool
void
TypeCompartment::processPendingRecompiles(JSContext *cx)
{
/* Steal the list of scripts to recompile, else we will try to recursively recompile them. */
@ -2021,17 +2012,13 @@ TypeCompartment::processPendingRecompiles(JSContext *cx)
for (unsigned i = 0; i < pending->length(); i++) {
JSScript *script = (*pending)[i];
mjit::Recompiler recompiler(cx, script);
if (script->hasJITCode() && !recompiler.recompile()) {
pendingNukeTypes = true;
cx->delete_(pending);
return nukeTypes(cx);
}
if (script->hasJITCode())
recompiler.recompile();
}
#endif /* JS_METHODJIT */
cx->delete_(pending);
return true;
}
void
@ -2043,7 +2030,7 @@ TypeCompartment::setPendingNukeTypes(JSContext *cx)
}
}
bool
void
TypeCompartment::nukeTypes(JSContext *cx)
{
/*
@ -2065,8 +2052,6 @@ TypeCompartment::nukeTypes(JSContext *cx)
/* :FIXME: Implement this function. */
*((int*)0) = 0;
return true;
}
void
@ -2096,7 +2081,7 @@ TypeCompartment::addPendingRecompile(JSContext *cx, JSScript *script)
}
}
bool
void
TypeCompartment::dynamicAssign(JSContext *cx, JSObject *obj, jsid id, const Value &rval)
{
if (obj->isWith())
@ -2106,7 +2091,7 @@ TypeCompartment::dynamicAssign(JSContext *cx, JSObject *obj, jsid id, const Valu
TypeObject *object = obj->getType();
if (object->unknownProperties())
return true;
return;
id = MakeTypeId(cx, id);
@ -2117,20 +2102,20 @@ TypeCompartment::dynamicAssign(JSContext *cx, JSObject *obj, jsid id, const Valu
* :FIXME: this is too aggressive for things like prototype library initialization.
*/
JSOp op = JSOp(*cx->regs().pc);
if (id == id___proto__(cx) || (op == JSOP_SETELEM && !JSID_IS_VOID(id)))
return cx->markTypeObjectUnknownProperties(object);
if (id == id___proto__(cx) || (op == JSOP_SETELEM && !JSID_IS_VOID(id))) {
cx->markTypeObjectUnknownProperties(object);
return;
}
AutoEnterTypeInference enter(cx);
TypeSet *assignTypes = object->getProperty(cx, id, true);
if (!assignTypes || assignTypes->hasType(rvtype))
return true;
return;
InferSpew(ISpewOps, "externalType: monitorAssign %s %s: %s",
object->name(), TypeIdString(id), TypeString(rvtype));
assignTypes->addType(cx, rvtype);
return true;
}
void
@ -2278,15 +2263,17 @@ struct types::ArrayTableKey
}
};
bool
void
TypeCompartment::fixArrayType(JSContext *cx, JSObject *obj)
{
AutoEnterTypeInference enter(cx);
if (!arrayTypeTable) {
arrayTypeTable = cx->new_<ArrayTypeTable>();
if (!arrayTypeTable || !arrayTypeTable->init()) {
arrayTypeTable = NULL;
js_ReportOutOfMemory(cx);
return false;
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
}
@ -2300,7 +2287,7 @@ TypeCompartment::fixArrayType(JSContext *cx, JSObject *obj)
unsigned len = obj->getDenseArrayInitializedLength();
if (len == 0)
return true;
return;
jstype type = GetValueType(cx, obj->getDenseArrayElement(0));
@ -2310,7 +2297,7 @@ TypeCompartment::fixArrayType(JSContext *cx, JSObject *obj)
if (NumberTypes(type, ntype))
type = TYPE_DOUBLE;
else
return true;
return;
}
}
@ -2328,21 +2315,18 @@ TypeCompartment::fixArrayType(JSContext *cx, JSObject *obj)
TypeObject *objType = newTypeObject(cx, NULL, name, false, true, obj->getProto());
if (!objType) {
js_ReportOutOfMemory(cx);
return false;
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
obj->setType(objType);
if (!cx->addTypePropertyId(objType, JSID_VOID, type))
return false;
cx->addTypePropertyId(objType, JSID_VOID, type);
if (!arrayTypeTable->relookupOrAdd(p, key, objType)) {
js_ReportOutOfMemory(cx);
return false;
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
}
return true;
}
/*
@ -2389,15 +2373,17 @@ struct types::ObjectTableEntry
jstype *types;
};
bool
void
TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj)
{
AutoEnterTypeInference enter(cx);
if (!objectTypeTable) {
objectTypeTable = cx->new_<ObjectTypeTable>();
if (!objectTypeTable || !objectTypeTable->init()) {
objectTypeTable = NULL;
js_ReportOutOfMemory(cx);
return false;
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
}
@ -2410,7 +2396,7 @@ TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj)
JS_ASSERT(obj->isObject());
if (obj->slotSpan() == 0 || obj->inDictionaryMode())
return true;
return;
ObjectTypeTable::AddPtr p = objectTypeTable->lookupForAdd(obj);
const Shape *baseShape = obj->lastProperty();
@ -2427,15 +2413,14 @@ TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj)
const Shape *shape = baseShape;
while (!JSID_IS_EMPTY(shape->id)) {
if (shape->slot == i) {
if (!cx->addTypePropertyId(p->value.object, shape->id, TYPE_DOUBLE))
return false;
cx->addTypePropertyId(p->value.object, shape->id, TYPE_DOUBLE);
break;
}
shape = shape->previous();
}
}
} else {
return true;
return;
}
}
}
@ -2450,8 +2435,8 @@ TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj)
JSObject *xobj = NewBuiltinClassInstance(cx, &js_ObjectClass,
(gc::FinalizeKind) obj->finalizeKind());
if (!xobj) {
js_ReportOutOfMemory(cx);
return false;
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
AutoObjectRooter xvr(cx, xobj);
@ -2461,25 +2446,28 @@ TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj)
TypeObject *objType = newTypeObject(cx, NULL, name, false, false, obj->getProto());
if (!objType) {
js_ReportOutOfMemory(cx);
return false;
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
xobj->setType(objType);
jsid *ids = (jsid *) cx->calloc_(obj->slotSpan() * sizeof(jsid));
if (!ids)
return false;
if (!ids) {
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
jstype *types = (jstype *) cx->calloc_(obj->slotSpan() * sizeof(jstype));
if (!types)
return false;
if (!types) {
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
const Shape *shape = baseShape;
while (!JSID_IS_EMPTY(shape->id)) {
ids[shape->slot] = shape->id;
types[shape->slot] = GetValueType(cx, obj->getSlot(shape->slot));
if (!cx->addTypePropertyId(objType, shape->id, types[shape->slot]))
return false;
cx->addTypePropertyId(objType, shape->id, types[shape->slot]);
shape = shape->previous();
}
@ -2487,14 +2475,15 @@ TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj)
for (unsigned i = 0; i < obj->slotSpan(); i++) {
if (!js_DefineNativeProperty(cx, xobj, ids[i], UndefinedValue(), NULL, NULL,
JSPROP_ENUMERATE, 0, 0, NULL, 0)) {
return false;
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
}
JS_ASSERT(!xobj->inDictionaryMode());
const Shape *newShape = xobj->lastProperty();
if (!objType->addDefiniteProperties(cx, xobj, false))
return false;
return;
ObjectTableKey key;
key.ids = ids;
@ -2510,14 +2499,12 @@ TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj)
p = objectTypeTable->lookupForAdd(obj);
if (!objectTypeTable->add(p, key, entry)) {
js_ReportOutOfMemory(cx);
return false;
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
obj->setTypeAndShape(objType, newShape);
}
return true;
}
/////////////////////////////////////////////////////////////////////
@ -4098,9 +4085,9 @@ JSContext::newTypeObject(const char *base, const char *postfix, JSObject *proto,
* pushed by the specified bytecode.
*/
static inline bool
IgnorePushed(JSOp op, unsigned index)
IgnorePushed(const jsbytecode *pc, unsigned index)
{
switch (op) {
switch (JSOp(*pc)) {
/* We keep track of the scopes pushed by BINDNAME separately. */
case JSOP_BINDNAME:
case JSOP_BINDGNAME:
@ -4156,6 +4143,15 @@ IgnorePushed(JSOp op, unsigned index)
case JSOP_FINALLY:
return true;
/*
* We don't treat GETLOCAL immediately followed by a pop as a use-before-def,
* and while the type will have been inferred correctly the method JIT
* may not have written the local's initial undefined value to the stack,
* leaving a stale value.
*/
case JSOP_GETLOCAL:
return JSOp(pc[JSOP_GETLOCAL_LENGTH]) == JSOP_POP;
default:
return false;
}
@ -4166,11 +4162,15 @@ JSScript::makeVarTypes(JSContext *cx)
{
JS_ASSERT(!varTypes);
AutoEnterTypeInference enter(cx);
unsigned nargs = fun ? fun->nargs : 0;
unsigned count = 2 + nargs + nfixed + bindings.countUpvars();
varTypes = (TypeSet *) cx->calloc_(sizeof(TypeSet) * count);
if (!varTypes)
if (!varTypes) {
compartment->types.setPendingNukeTypes(cx);
return false;
}
#ifdef DEBUG
InferSpew(ISpewOps, "typeSet: T%p return #%u", returnTypes(), id());
@ -4226,7 +4226,7 @@ JSScript::typeCheckBytecode(JSContext *cx, const jsbytecode *pc, const js::Value
for (int i = 0; i < defCount; i++) {
const js::Value &val = sp[-defCount + i];
TypeSet *types = analysis_->pushedTypes(pc, i);
if (IgnorePushed(JSOp(*pc), i))
if (IgnorePushed(pc, i))
continue;
jstype type = GetValueType(cx, val);

View File

@ -744,8 +744,8 @@ struct TypeCompartment
ArrayTypeTable *arrayTypeTable;
ObjectTypeTable *objectTypeTable;
bool fixArrayType(JSContext *cx, JSObject *obj);
bool fixObjectType(JSContext *cx, JSObject *obj);
void fixArrayType(JSContext *cx, JSObject *obj);
void fixObjectType(JSContext *cx, JSObject *obj);
/* Constraint solving worklist structures. */
@ -798,12 +798,12 @@ struct TypeCompartment
* Add the specified type to the specified set, do any necessary reanalysis
* stemming from the change and recompile any affected scripts.
*/
bool dynamicPush(JSContext *cx, JSScript *script, uint32 offset, jstype type);
bool dynamicAssign(JSContext *cx, JSObject *obj, jsid id, const Value &rval);
bool dynamicCall(JSContext *cx, JSObject *callee, const CallArgs &args, bool constructing);
void dynamicPush(JSContext *cx, JSScript *script, uint32 offset, jstype type);
void dynamicAssign(JSContext *cx, JSObject *obj, jsid id, const Value &rval);
void dynamicCall(JSContext *cx, JSObject *callee, const CallArgs &args, bool constructing);
bool nukeTypes(JSContext *cx);
bool processPendingRecompiles(JSContext *cx);
void nukeTypes(JSContext *cx);
void processPendingRecompiles(JSContext *cx);
/* Mark all types as needing destruction once inference has 'finished'. */
void setPendingNukeTypes(JSContext *cx);

View File

@ -237,11 +237,11 @@ JSContext::getTypeCallerInitObject(bool isArray)
return getTypeNewObject(isArray ? JSProto_Array : JSProto_Object);
}
inline bool
inline void
JSContext::markTypeCallerUnexpected(js::types::jstype type)
{
if (!typeInferenceEnabled())
return true;
return;
/*
* Check that we are actually at a scripted callsite. This function is
@ -254,14 +254,14 @@ JSContext::markTypeCallerUnexpected(js::types::jstype type)
js::StackFrame *caller = js_GetScriptedCaller(this, NULL);
if (!caller)
return true;
return;
/*
* Watch out if the caller is in a different compartment from this one.
* This must have gone through a cross-compartment wrapper.
*/
if (caller->script()->compartment != compartment)
return true;
return;
JSScript *script;
jsbytecode *pc = caller->inlinepc(this, &script);
@ -277,55 +277,55 @@ JSContext::markTypeCallerUnexpected(js::types::jstype type)
/* This is also used for handling custom iterators. */
break;
default:
return true;
return;
}
return script->typeMonitorResult(this, pc, type);
script->typeMonitorResult(this, pc, type);
}
inline bool
inline void
JSContext::markTypeCallerUnexpected(const js::Value &value)
{
if (!typeInferenceEnabled())
return true;
return markTypeCallerUnexpected(js::types::GetValueType(this, value));
if (typeInferenceEnabled())
markTypeCallerUnexpected(js::types::GetValueType(this, value));
}
inline bool
inline void
JSContext::markTypeCallerOverflow()
{
return markTypeCallerUnexpected(js::types::TYPE_DOUBLE);
markTypeCallerUnexpected(js::types::TYPE_DOUBLE);
}
inline bool
inline void
JSContext::addTypeProperty(js::types::TypeObject *obj, const char *name, js::types::jstype type)
{
if (typeInferenceEnabled() && !obj->unknownProperties()) {
jsid id = JSID_VOID;
if (name) {
JSAtom *atom = js_Atomize(this, name, strlen(name), 0);
if (!atom)
return false;
if (!atom) {
js::types::AutoEnterTypeInference enter(this);
compartment->types.setPendingNukeTypes(this);
return;
}
id = ATOM_TO_JSID(atom);
}
return addTypePropertyId(obj, id, type);
addTypePropertyId(obj, id, type);
}
return true;
}
inline bool
inline void
JSContext::addTypeProperty(js::types::TypeObject *obj, const char *name, const js::Value &value)
{
if (typeInferenceEnabled() && !obj->unknownProperties())
return addTypeProperty(obj, name, js::types::GetValueType(this, value));
return true;
addTypeProperty(obj, name, js::types::GetValueType(this, value));
}
inline bool
inline void
JSContext::addTypePropertyId(js::types::TypeObject *obj, jsid id, js::types::jstype type)
{
if (!typeInferenceEnabled() || obj->unknownProperties())
return true;
return;
/* Convert string index properties into the common index property. */
id = js::types::MakeTypeId(this, id);
@ -334,42 +334,37 @@ JSContext::addTypePropertyId(js::types::TypeObject *obj, jsid id, js::types::jst
js::types::TypeSet *types = obj->getProperty(this, id, true);
if (!types || types->hasType(type))
return true;
return;
js::types::InferSpew(js::types::ISpewOps, "externalType: property %s %s: %s",
obj->name(), js::types::TypeIdString(id),
js::types::TypeString(type));
types->addType(this, type);
return true;
}
inline bool
inline void
JSContext::addTypePropertyId(js::types::TypeObject *obj, jsid id, const js::Value &value)
{
if (typeInferenceEnabled() && !obj->unknownProperties())
return addTypePropertyId(obj, id, js::types::GetValueType(this, value));
return true;
addTypePropertyId(obj, id, js::types::GetValueType(this, value));
}
inline bool
inline void
JSContext::addTypePropertyId(js::types::TypeObject *obj, jsid id, js::types::ClonedTypeSet *set)
{
if (obj->unknownProperties())
return true;
return;
id = js::types::MakeTypeId(this, id);
js::types::AutoEnterTypeInference enter(this);
js::types::TypeSet *types = obj->getProperty(this, id, true);
if (!types)
return true;
return;
js::types::InferSpew(js::types::ISpewOps, "externalType: property %s %s",
obj->name(), js::types::TypeIdString(id));
types->addTypeSet(this, set);
return true;
}
inline js::types::TypeObject *
@ -378,11 +373,11 @@ JSContext::getTypeEmpty()
return &compartment->types.typeEmpty;
}
inline bool
inline void
JSContext::aliasTypeProperties(js::types::TypeObject *obj, jsid first, jsid second)
{
if (!typeInferenceEnabled() || obj->unknownProperties())
return true;
return;
js::types::AutoEnterTypeInference enter(this);
@ -392,72 +387,61 @@ JSContext::aliasTypeProperties(js::types::TypeObject *obj, jsid first, jsid seco
js::types::TypeSet *firstTypes = obj->getProperty(this, first, true);
js::types::TypeSet *secondTypes = obj->getProperty(this, second, true);
if (!firstTypes || !secondTypes)
return false;
return;
firstTypes->addBaseSubset(this, obj, secondTypes);
secondTypes->addBaseSubset(this, obj, firstTypes);
return true;
}
inline bool
inline void
JSContext::addTypeFlags(js::types::TypeObject *obj, js::types::TypeObjectFlags flags)
{
if (!typeInferenceEnabled() || obj->hasFlags(flags))
return true;
return;
js::types::AutoEnterTypeInference enter(this);
obj->setFlags(this, flags);
return true;
}
inline bool
inline void
JSContext::markTypeArrayNotPacked(js::types::TypeObject *obj, bool notDense)
{
return addTypeFlags(obj, js::types::OBJECT_FLAG_NON_PACKED_ARRAY |
(notDense ? js::types::OBJECT_FLAG_NON_DENSE_ARRAY : 0));
addTypeFlags(obj, js::types::OBJECT_FLAG_NON_PACKED_ARRAY |
(notDense ? js::types::OBJECT_FLAG_NON_DENSE_ARRAY : 0));
}
inline bool
inline void
JSContext::markTypeFunctionUninlineable(js::types::TypeObject *obj)
{
return addTypeFlags(obj, js::types::OBJECT_FLAG_UNINLINEABLE);
addTypeFlags(obj, js::types::OBJECT_FLAG_UNINLINEABLE);
}
inline bool
inline void
JSContext::markTypeObjectHasSpecialEquality(js::types::TypeObject *obj)
{
return addTypeFlags(obj, js::types::OBJECT_FLAG_SPECIAL_EQUALITY);
addTypeFlags(obj, js::types::OBJECT_FLAG_SPECIAL_EQUALITY);
}
inline bool
inline void
JSContext::markTypePropertyConfigured(js::types::TypeObject *obj, jsid id)
{
if (!typeInferenceEnabled())
return true;
if (obj->unknownProperties())
return true;
if (!typeInferenceEnabled() || obj->unknownProperties())
return;
id = js::types::MakeTypeId(this, id);
js::types::AutoEnterTypeInference enter(this);
js::types::TypeSet *types = obj->getProperty(this, id, true);
if (types)
types->setOwnProperty(this, true);
return true;
}
inline bool
inline void
JSContext::markGlobalReallocation(JSObject *obj)
{
JS_ASSERT(obj->isGlobal());
if (!typeInferenceEnabled())
return true;
if (obj->getType()->unknownProperties())
return true;
if (!typeInferenceEnabled() || obj->getType()->unknownProperties())
return;
js::types::AutoEnterTypeInference enter(this);
js::types::TypeSet *types = obj->getType()->getProperty(this, JSID_VOID, false);
@ -468,52 +452,50 @@ JSContext::markGlobalReallocation(JSObject *obj)
constraint = constraint->next;
}
}
return true;
}
inline bool
inline void
JSContext::markTypeObjectUnknownProperties(js::types::TypeObject *obj)
{
if (!typeInferenceEnabled() || obj->unknownProperties())
return true;
return;
js::types::AutoEnterTypeInference enter(this);
obj->markUnknown(this);
return true;
}
inline bool
inline void
JSContext::typeMonitorAssign(JSObject *obj, jsid id, const js::Value &rval)
{
if (typeInferenceEnabled())
return compartment->types.dynamicAssign(this, obj, id, rval);
return true;
compartment->types.dynamicAssign(this, obj, id, rval);
}
inline bool
inline void
JSContext::typeMonitorCall(const js::CallArgs &args, bool constructing)
{
if (!typeInferenceEnabled())
return true;
return;
JSObject *callee = &args.callee();
if (!callee->isFunction() || !callee->getFunctionPrivate()->isInterpreted())
return true;
return;
return compartment->types.dynamicCall(this, callee, args, constructing);
compartment->types.dynamicCall(this, callee, args, constructing);
}
inline bool
inline void
JSContext::fixArrayType(JSObject *obj)
{
return !typeInferenceEnabled() || compartment->types.fixArrayType(this, obj);
if (typeInferenceEnabled())
compartment->types.fixArrayType(this, obj);
}
inline bool
inline void
JSContext::fixObjectType(JSObject *obj)
{
return !typeInferenceEnabled() || compartment->types.fixObjectType(this, obj);
if (typeInferenceEnabled())
compartment->types.fixObjectType(this, obj);
}
/////////////////////////////////////////////////////////////////////
@ -625,53 +607,51 @@ JSScript::getTypeInitObject(JSContext *cx, const jsbytecode *pc, bool isArray)
return cx->compartment->types.newInitializerTypeObject(cx, this, offset, isArray);
}
inline bool
inline void
JSScript::typeMonitorResult(JSContext *cx, const jsbytecode *pc,
js::types::jstype type)
{
if (cx->typeInferenceEnabled())
return cx->compartment->types.dynamicPush(cx, this, pc - code, type);
return true;
cx->compartment->types.dynamicPush(cx, this, pc - code, type);
}
inline bool
inline void
JSScript::typeMonitorResult(JSContext *cx, const jsbytecode *pc, const js::Value &rval)
{
if (cx->typeInferenceEnabled())
return typeMonitorResult(cx, pc, js::types::GetValueType(cx, rval));
return true;
typeMonitorResult(cx, pc, js::types::GetValueType(cx, rval));
}
inline bool
inline void
JSScript::typeMonitorOverflow(JSContext *cx, const jsbytecode *pc)
{
return typeMonitorResult(cx, pc, js::types::TYPE_DOUBLE);
typeMonitorResult(cx, pc, js::types::TYPE_DOUBLE);
}
inline bool
inline void
JSScript::typeMonitorUndefined(JSContext *cx, const jsbytecode *pc)
{
return typeMonitorResult(cx, pc, js::types::TYPE_UNDEFINED);
typeMonitorResult(cx, pc, js::types::TYPE_UNDEFINED);
}
inline bool
inline void
JSScript::typeMonitorString(JSContext *cx, const jsbytecode *pc)
{
return typeMonitorResult(cx, pc, js::types::TYPE_STRING);
typeMonitorResult(cx, pc, js::types::TYPE_STRING);
}
inline bool
inline void
JSScript::typeMonitorUnknown(JSContext *cx, const jsbytecode *pc)
{
return typeMonitorResult(cx, pc, js::types::TYPE_UNKNOWN);
typeMonitorResult(cx, pc, js::types::TYPE_UNKNOWN);
}
inline bool
inline void
JSScript::typeSetThis(JSContext *cx, js::types::jstype type)
{
JS_ASSERT(cx->typeInferenceEnabled());
if (!ensureVarTypes(cx))
return false;
return;
/* Analyze the script regardless if -a was used. */
bool analyze = !(analysis_ && analysis_->ranInference()) &&
@ -687,42 +667,36 @@ JSScript::typeSetThis(JSContext *cx, js::types::jstype type)
if (analyze && !(analysis_ && analysis_->ranInference())) {
js::analyze::ScriptAnalysis *analysis = this->analysis(cx);
if (!analysis)
return false;
return;
analysis->analyzeTypes(cx);
}
return true;
}
return true;
}
inline bool
inline void
JSScript::typeSetThis(JSContext *cx, const js::Value &value)
{
if (cx->typeInferenceEnabled())
return typeSetThis(cx, js::types::GetValueType(cx, value));
return true;
typeSetThis(cx, js::types::GetValueType(cx, value));
}
inline bool
inline void
JSScript::typeSetThis(JSContext *cx, js::types::ClonedTypeSet *set)
{
JS_ASSERT(cx->typeInferenceEnabled());
if (!ensureVarTypes(cx))
return false;
return;
js::types::AutoEnterTypeInference enter(cx);
js::types::InferSpew(js::types::ISpewOps, "externalType: setThis #%u", id());
thisTypes()->addTypeSet(cx, set);
return true;
}
inline bool
inline void
JSScript::typeSetNewCalled(JSContext *cx)
{
if (!cx->typeInferenceEnabled() || calledWithNew)
return true;
return;
calledWithNew = true;
/*
@ -737,103 +711,86 @@ JSScript::typeSetNewCalled(JSContext *cx)
js::types::AutoEnterTypeInference enter(cx);
js::analyze::ScriptAnalysis *analysis = this->analysis(cx);
if (!analysis)
return false;
return;
analysis->analyzeTypesNew(cx);
}
return true;
}
inline bool
inline void
JSScript::typeSetLocal(JSContext *cx, unsigned local, js::types::jstype type)
{
if (!cx->typeInferenceEnabled())
return true;
if (!ensureVarTypes(cx))
return false;
if (!cx->typeInferenceEnabled() || !ensureVarTypes(cx))
return;
if (!localTypes(local)->hasType(type)) {
js::types::AutoEnterTypeInference enter(cx);
js::types::InferSpew(js::types::ISpewOps, "externalType: setLocal #%u %u: %s",
id(), local, js::types::TypeString(type));
localTypes(local)->addType(cx, type);
return true;
}
return true;
}
inline bool
inline void
JSScript::typeSetLocal(JSContext *cx, unsigned local, const js::Value &value)
{
if (cx->typeInferenceEnabled()) {
js::types::jstype type = js::types::GetValueType(cx, value);
return typeSetLocal(cx, local, type);
typeSetLocal(cx, local, type);
}
return true;
}
inline bool
inline void
JSScript::typeSetLocal(JSContext *cx, unsigned local, js::types::ClonedTypeSet *set)
{
JS_ASSERT(cx->typeInferenceEnabled());
if (!ensureVarTypes(cx))
return false;
return;
js::types::AutoEnterTypeInference enter(cx);
js::types::InferSpew(js::types::ISpewOps, "externalType: setLocal #%u %u", id(), local);
localTypes(local)->addTypeSet(cx, set);
return true;
}
inline bool
inline void
JSScript::typeSetArgument(JSContext *cx, unsigned arg, js::types::jstype type)
{
if (!cx->typeInferenceEnabled())
return true;
if (!ensureVarTypes(cx))
return false;
if (!cx->typeInferenceEnabled() || !ensureVarTypes(cx))
return;
if (!argTypes(arg)->hasType(type)) {
js::types::AutoEnterTypeInference enter(cx);
js::types::InferSpew(js::types::ISpewOps, "externalType: setArg #%u %u: %s",
id(), arg, js::types::TypeString(type));
argTypes(arg)->addType(cx, type);
return true;
}
return true;
}
inline bool
inline void
JSScript::typeSetArgument(JSContext *cx, unsigned arg, const js::Value &value)
{
if (cx->typeInferenceEnabled()) {
js::types::jstype type = js::types::GetValueType(cx, value);
return typeSetArgument(cx, arg, type);
typeSetArgument(cx, arg, type);
}
return true;
}
inline bool
inline void
JSScript::typeSetArgument(JSContext *cx, unsigned arg, js::types::ClonedTypeSet *set)
{
JS_ASSERT(cx->typeInferenceEnabled());
if (!ensureVarTypes(cx))
return false;
return;
js::types::AutoEnterTypeInference enter(cx);
js::types::InferSpew(js::types::ISpewOps, "externalType: setArg #%u %u", id(), arg);
argTypes(arg)->addTypeSet(cx, set);
return true;
}
inline bool
inline void
JSScript::typeSetUpvar(JSContext *cx, unsigned upvar, const js::Value &value)
{
if (!cx->typeInferenceEnabled())
return true;
if (!ensureVarTypes(cx))
return false;
if (!cx->typeInferenceEnabled() || !ensureVarTypes(cx))
return;
js::types::jstype type = js::types::GetValueType(cx, value);
if (!upvarTypes(upvar)->hasType(type)) {
js::types::AutoEnterTypeInference enter(cx);
@ -841,10 +798,7 @@ JSScript::typeSetUpvar(JSContext *cx, unsigned upvar, const js::Value &value)
js::types::InferSpew(js::types::ISpewOps, "externalType: setUpvar #%u %u: %s",
id(), upvar, js::types::TypeString(type));
upvarTypes(upvar)->addType(cx, type);
return true;
}
return true;
}
namespace js {

View File

@ -515,8 +515,7 @@ js_OnUnknownMethod(JSContext *cx, Value *vp)
AutoValueRooter tvr(cx);
if (!js_GetMethod(cx, obj, id, JSGET_NO_METHOD_BARRIER, tvr.addr()))
return false;
if (!cx->fp()->script()->typeMonitorUnknown(cx, cx->regs().pc))
return false;
cx->fp()->script()->typeMonitorUnknown(cx, cx->regs().pc);
if (tvr.value().isPrimitive()) {
vp[0] = tvr.value();
@ -641,8 +640,7 @@ Invoke(JSContext *cx, const CallArgs &argsRef, ConstructOption option)
js_ReportIsNotFunction(cx, &args.calleev(), ToReportFlags(option));
return false;
}
if (!cx->markTypeCallerUnexpected(types::TYPE_UNKNOWN))
return false;
cx->markTypeCallerUnexpected(types::TYPE_UNKNOWN);
return CallJSNative(cx, clasp->call, args.argc(), args.base());
}
@ -668,8 +666,7 @@ Invoke(JSContext *cx, const CallArgs &argsRef, ConstructOption option)
return true;
}
if (!cx->typeMonitorCall(args, option == INVOKE_CONSTRUCTOR))
return false;
cx->typeMonitorCall(args, option == INVOKE_CONSTRUCTOR);
/* Get pointer to new frame/slots, prepare arguments. */
uint32 flags = ToFrameFlags(option);
@ -755,12 +752,9 @@ InvokeSessionGuard::start(JSContext *cx, const Value &calleev, const Value &this
* possible values the InvokeSession's client could pass in.
*/
jstype type = GetValueType(cx, thisv);
if (!script_->typeSetThis(cx, type))
return false;
for (unsigned i = 0; i < fun->nargs; i++) {
if (!script_->typeSetArgument(cx, i, TYPE_UNKNOWN))
return false;
}
script_->typeSetThis(cx, type);
for (unsigned i = 0; i < fun->nargs; i++)
script_->typeSetArgument(cx, i, TYPE_UNKNOWN);
}
#ifdef JS_METHODJIT
@ -988,8 +982,7 @@ Execute(JSContext *cx, JSObject &chain, JSScript *script,
if (cx->typeInferenceEnabled()) {
jstype type = GetValueType(cx, frame.fp()->thisValue());
if (!script->typeSetThis(cx, type))
return false;
script->typeSetThis(cx, type);
}
/* Run script until JSOP_STOP or error. */
@ -1275,8 +1268,7 @@ InvokeConstructor(JSContext *cx, const CallArgs &argsRef)
return true;
}
if (clasp->construct) {
if (!cx->markTypeCallerUnexpected(types::TYPE_UNKNOWN))
return false;
cx->markTypeCallerUnexpected(types::TYPE_UNKNOWN);
args.thisv().setMagicWithObjectOrNullPayload(NULL);
return CallJSNativeConstructor(cx, clasp->construct, args.argc(), args.base());
}
@ -2562,6 +2554,8 @@ Interpret(JSContext *cx, StackFrame *entryFrame, uintN inlineCallCount, InterpMo
goto error;
}
JS_ASSERT_IF(interpMode == JSINTERP_SKIP_TRAP, JSOp(*regs.pc) == JSOP_TRAP);
CHECK_INTERRUPT_HANDLER();
RESET_USE_METHODJIT();
@ -3152,8 +3146,7 @@ BEGIN_CASE(JSOP_FORGNAME)
JS_ASSERT(regs.sp[-1].isObject());
if (!IteratorNext(cx, &regs.sp[-1].toObject(), tvr.addr()))
goto error;
if (!cx->typeMonitorAssign(obj, id, tvr.value()))
goto error;
cx->typeMonitorAssign(obj, id, tvr.value());
if (!obj->setProperty(cx, id, tvr.addr(), script->strictModeCode))
goto error;
}
@ -3173,8 +3166,7 @@ BEGIN_CASE(JSOP_FORPROP)
JS_ASSERT(regs.sp[-2].isObject());
if (!IteratorNext(cx, &regs.sp[-2].toObject(), tvr.addr()))
goto error;
if (!cx->typeMonitorAssign(obj, id, tvr.value()))
goto error;
cx->typeMonitorAssign(obj, id, tvr.value());
if (!obj->setProperty(cx, id, tvr.addr(), script->strictModeCode))
goto error;
}
@ -3301,9 +3293,7 @@ BEGIN_CASE(JSOP_SETCONST)
JSObject &obj = cx->stack.currentVarObj();
const Value &ref = regs.sp[-1];
if (!cx->typeMonitorAssign(&obj, ATOM_TO_JSID(atom), ref))
goto error;
cx->typeMonitorAssign(&obj, ATOM_TO_JSID(atom), ref);
if (!obj.defineProperty(cx, ATOM_TO_JSID(atom), ref,
PropertyStub, StrictPropertyStub,
JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_READONLY)) {
@ -3320,8 +3310,7 @@ BEGIN_CASE(JSOP_ENUMCONSTELEM)
FETCH_OBJECT(cx, -2, obj);
jsid id;
FETCH_ELEMENT_ID(obj, -1, id);
if (!cx->typeMonitorAssign(obj, id, ref))
goto error;
cx->typeMonitorAssign(obj, id, ref);
if (!obj->defineProperty(cx, id, ref,
PropertyStub, StrictPropertyStub,
JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_READONLY)) {
@ -3570,8 +3559,8 @@ BEGIN_CASE(JSOP_URSH)
u >>= (j & 31);
regs.sp--;
if (!regs.sp[-1].setNumber(uint32(u)) && !script->typeMonitorOverflow(cx, regs.pc))
goto error;
if (!regs.sp[-1].setNumber(uint32(u)))
script->typeMonitorOverflow(cx, regs.pc);
}
END_CASE(JSOP_URSH)
@ -3586,8 +3575,7 @@ BEGIN_CASE(JSOP_ADD)
regs.sp--;
if (JS_UNLIKELY(bool((l ^ sum) & (r ^ sum) & 0x80000000))) {
regs.sp[-1].setDouble(double(l) + double(r));
if (!script->typeMonitorOverflow(cx, regs.pc))
goto error;
script->typeMonitorOverflow(cx, regs.pc);
} else {
regs.sp[-1].setInt32(sum);
}
@ -3598,8 +3586,7 @@ BEGIN_CASE(JSOP_ADD)
goto error;
regs.sp--;
regs.sp[-1] = rval;
if (!script->typeMonitorUnknown(cx, regs.pc))
goto error;
script->typeMonitorUnknown(cx, regs.pc);
} else
#endif
{
@ -3630,8 +3617,8 @@ BEGIN_CASE(JSOP_ADD)
JSString *str = js_ConcatStrings(cx, lstr, rstr);
if (!str)
goto error;
if ((lIsObject || rIsObject) && !script->typeMonitorString(cx, regs.pc))
goto error;
if (lIsObject || rIsObject)
script->typeMonitorString(cx, regs.pc);
regs.sp--;
regs.sp[-1].setString(str);
} else {
@ -3641,9 +3628,8 @@ BEGIN_CASE(JSOP_ADD)
l += r;
regs.sp--;
if (!regs.sp[-1].setNumber(l) &&
(lIsObject || rIsObject || (!lval.isDouble() && !rval.isDouble())) &&
!script->typeMonitorOverflow(cx, regs.pc)) {
goto error;
(lIsObject || rIsObject || (!lval.isDouble() && !rval.isDouble()))) {
script->typeMonitorOverflow(cx, regs.pc);
}
}
}
@ -3662,9 +3648,8 @@ END_CASE(JSOP_ADD)
double d = d1 OP d2; \
regs.sp--; \
if (!regs.sp[-1].setNumber(d) && \
!(lval.isDouble() || rval.isDouble()) && \
!script->typeMonitorOverflow(cx, regs.pc)) { \
goto error; \
!(lval.isDouble() || rval.isDouble())) { \
script->typeMonitorOverflow(cx, regs.pc); \
} \
JS_END_MACRO
@ -3703,14 +3688,12 @@ BEGIN_CASE(JSOP_DIV)
else
vp = &rt->positiveInfinityValue;
regs.sp[-1] = *vp;
if (!script->typeMonitorOverflow(cx, regs.pc))
goto error;
script->typeMonitorOverflow(cx, regs.pc);
} else {
d1 /= d2;
if (!regs.sp[-1].setNumber(d1) &&
!(lval.isDouble() || rval.isDouble()) &&
!script->typeMonitorOverflow(cx, regs.pc)) {
goto error;
!(lval.isDouble() || rval.isDouble())) {
script->typeMonitorOverflow(cx, regs.pc);
}
}
}
@ -3739,8 +3722,7 @@ BEGIN_CASE(JSOP_MOD)
d1 = js_fmod(d1, d2);
regs.sp[-1].setDouble(d1);
}
if (!script->typeMonitorOverflow(cx, regs.pc))
goto error;
script->typeMonitorOverflow(cx, regs.pc);
}
}
END_CASE(JSOP_MOD)
@ -3781,10 +3763,8 @@ BEGIN_CASE(JSOP_NEG)
if (!ValueToNumber(cx, regs.sp[-1], &d))
goto error;
d = -d;
if (!regs.sp[-1].setNumber(d) && !ref.isDouble() &&
!script->typeMonitorOverflow(cx, regs.pc)) {
goto error;
}
if (!regs.sp[-1].setNumber(d) && !ref.isDouble())
script->typeMonitorOverflow(cx, regs.pc);
}
}
END_CASE(JSOP_NEG)
@ -3792,8 +3772,8 @@ END_CASE(JSOP_NEG)
BEGIN_CASE(JSOP_POS)
if (!ValueToNumber(cx, &regs.sp[-1]))
goto error;
if (!regs.sp[-1].isInt32() && !script->typeMonitorOverflow(cx, regs.pc))
goto error;
if (!regs.sp[-1].isInt32())
script->typeMonitorOverflow(cx, regs.pc);
END_CASE(JSOP_POS)
BEGIN_CASE(JSOP_DELNAME)
@ -3898,8 +3878,8 @@ BEGIN_CASE(JSOP_PROPDEC)
FETCH_OBJECT(cx, i, obj);
if (JSID_IS_VOID(id)) {
FETCH_ELEMENT_ID(obj, -1, id);
if (!JSID_IS_INT(id) && !script->typeMonitorUnknown(cx, regs.pc))
goto error;
if (!JSID_IS_INT(id))
script->typeMonitorUnknown(cx, regs.pc);
}
goto do_incop;
@ -3963,10 +3943,8 @@ do_incop:
* typeMonitorUndefined does not capture the value being pushed here
* during compound operations in the method JIT.
*/
if (regs.sp[-1].isUndefined() &&
!cx->addTypePropertyId(obj->getType(), id, types::TYPE_UNDEFINED)) {
goto error;
}
if (regs.sp[-1].isUndefined())
cx->addTypePropertyId(obj->getType(), id, types::TYPE_UNDEFINED);
const JSCodeSpec *cs = &js_CodeSpec[op];
JS_ASSERT(cs->ndefs == 1);
@ -4003,10 +3981,8 @@ do_incop:
if (!js_DoIncDec(cx, cs, &regs.sp[-2], &regs.sp[-1]))
goto error;
if (!cx->typeMonitorAssign(obj, id, regs.sp[-1]))
goto error;
if (!script->typeMonitorOverflow(cx, regs.pc))
goto error;
cx->typeMonitorAssign(obj, id, regs.sp[-1]);
script->typeMonitorOverflow(cx, regs.pc);
{
JSAutoResolveFlags rf(cx, setPropFlags);
@ -4081,8 +4057,7 @@ BEGIN_CASE(JSOP_LOCALINC)
PUSH_COPY(*vp);
if (!js_DoIncDec(cx, &js_CodeSpec[op], &regs.sp[-1], vp))
goto error;
if (!script->typeMonitorOverflow(cx, regs.pc))
goto error;
script->typeMonitorOverflow(cx, regs.pc);
}
len = JSOP_INCARG_LENGTH;
JS_ASSERT(len == js_CodeSpec[op].length);
@ -4181,8 +4156,8 @@ BEGIN_CASE(JSOP_LENGTH)
}
} while (0);
if (rval.isUndefined() && !script->typeMonitorUndefined(cx, regs.pc))
goto error;
if (rval.isUndefined())
script->typeMonitorUndefined(cx, regs.pc);
regs.sp[-1] = rval;
assertSameCompartment(cx, regs.sp[-1]);
@ -4277,8 +4252,8 @@ BEGIN_CASE(JSOP_CALLPROP)
goto error;
}
#endif
if (rval.isUndefined() && !script->typeMonitorUndefined(cx, regs.pc))
goto error;
if (rval.isUndefined())
script->typeMonitorUndefined(cx, regs.pc);
}
END_CASE(JSOP_CALLPROP)
@ -4302,8 +4277,7 @@ BEGIN_CASE(JSOP_SETMETHOD)
JS_ASSERT_IF(op == JSOP_SETGNAME, obj == regs.fp()->scopeChain().getGlobal());
jsid id = ATOM_TO_JSID(atoms[GET_INDEX(regs.pc)]);
if (!cx->typeMonitorAssign(obj, id, rval))
goto error;
cx->typeMonitorAssign(obj, id, rval);
do {
PropertyCache *cache = &JS_PROPERTY_CACHE(cx);
@ -4511,17 +4485,15 @@ BEGIN_CASE(JSOP_GETELEM)
goto error;
copyFrom = &rval;
if (!JSID_IS_INT(id) && !script->typeMonitorUnknown(cx, regs.pc))
goto error;
if (!JSID_IS_INT(id))
script->typeMonitorUnknown(cx, regs.pc);
end_getelem:
regs.sp--;
regs.sp[-1] = *copyFrom;
assertSameCompartment(cx, regs.sp[-1]);
if (copyFrom->isUndefined()) {
if (!script->typeMonitorUndefined(cx, regs.pc))
goto error;
}
if (copyFrom->isUndefined())
script->typeMonitorUndefined(cx, regs.pc);
}
END_CASE(JSOP_GETELEM)
@ -4554,10 +4526,8 @@ BEGIN_CASE(JSOP_CALLELEM)
regs.sp[-1] = thisv;
}
if ((regs.sp[-2].isUndefined() || !JSID_IS_INT(id)) &&
!script->typeMonitorUnknown(cx, regs.pc)) {
goto error;
}
if (regs.sp[-2].isUndefined() || !JSID_IS_INT(id))
script->typeMonitorUnknown(cx, regs.pc);
}
END_CASE(JSOP_CALLELEM)
@ -4569,8 +4539,7 @@ BEGIN_CASE(JSOP_SETHOLE)
jsid id;
FETCH_ELEMENT_ID(obj, -2, id);
Value rval;
if (!cx->typeMonitorAssign(obj, id, regs.sp[-1]))
goto error;
cx->typeMonitorAssign(obj, id, regs.sp[-1]);
do {
if (obj->isDenseArray() && JSID_IS_INT(id)) {
jsuint length = obj->getDenseArrayInitializedLength();
@ -4605,8 +4574,7 @@ BEGIN_CASE(JSOP_ENUMELEM)
jsid id;
FETCH_ELEMENT_ID(obj, -1, id);
Value rval = regs.sp[-3];
if (!cx->typeMonitorAssign(obj, id, rval))
goto error;
cx->typeMonitorAssign(obj, id, rval);
if (!obj->setProperty(cx, id, &rval, script->strictModeCode))
goto error;
regs.sp -= 3;
@ -4701,8 +4669,7 @@ BEGIN_CASE(JSOP_FUNCALL)
goto error;
}
if (!cx->typeMonitorCall(CallArgsFromVp(argc, vp), flags & StackFrame::CONSTRUCTING))
goto error;
cx->typeMonitorCall(CallArgsFromVp(argc, vp), flags & StackFrame::CONSTRUCTING);
bool newType = (flags & StackFrame::CONSTRUCTING) &&
cx->typeInferenceEnabled() && UseNewType(cx, script, regs.pc);
@ -4833,10 +4800,8 @@ BEGIN_CASE(JSOP_CALLNAME)
PUSH_COPY(rval);
}
if (op == JSOP_NAME || op == JSOP_CALLNAME) {
if (!script->typeMonitorResult(cx, regs.pc, regs.sp[-1]))
goto error;
}
if (op == JSOP_NAME || op == JSOP_CALLNAME)
script->typeMonitorResult(cx, regs.pc, regs.sp[-1]);
JS_ASSERT(obj->isGlobal() || IsCacheableNonGlobalScope(obj));
if (op == JSOP_CALLNAME || op == JSOP_CALLGNAME)
@ -4855,8 +4820,7 @@ BEGIN_CASE(JSOP_CALLNAME)
JSOp op2 = js_GetOpcode(cx, script, regs.pc + JSOP_NAME_LENGTH);
if (op2 == JSOP_TYPEOF) {
PUSH_UNDEFINED();
if (!script->typeMonitorUndefined(cx, regs.pc))
goto error;
script->typeMonitorUndefined(cx, regs.pc);
len = JSOP_NAME_LENGTH;
DO_NEXT_OP(len);
}
@ -4877,13 +4841,10 @@ BEGIN_CASE(JSOP_CALLNAME)
}
PUSH_COPY(rval);
if (op == JSOP_NAME || op == JSOP_CALLNAME) {
if (!script->typeMonitorResult(cx, regs.pc, rval))
goto error;
} else if (rval.isUndefined()) {
if (!script->typeMonitorUndefined(cx, regs.pc))
goto error;
}
if (op == JSOP_NAME || op == JSOP_CALLNAME)
script->typeMonitorResult(cx, regs.pc, rval);
else if (rval.isUndefined())
script->typeMonitorUndefined(cx, regs.pc);
/* obj must be on the scope chain, thus not a function. */
if (op == JSOP_CALLNAME || op == JSOP_CALLGNAME)
@ -5147,6 +5108,12 @@ END_VARLEN_CASE
BEGIN_CASE(JSOP_TRAP)
{
if (interpMode == JSINTERP_SKIP_TRAP) {
interpMode = JSINTERP_SAFEPOINT;
op = JS_GetTrapOpcode(cx, script, regs.pc);
DO_OP();
}
Value rval;
JSTrapStatus status = JS_HandleTrap(cx, script, regs.pc, Jsvalify(&rval));
switch (status) {
@ -5308,8 +5275,8 @@ BEGIN_CASE(JSOP_CALLGLOBAL)
JSObject *obj = regs.fp()->scopeChain().getGlobal();
JS_ASSERT(obj->containsSlot(slot));
PUSH_COPY(obj->getSlot(slot));
if (regs.sp[-1].isUndefined() && !script->typeMonitorUndefined(cx, regs.pc))
goto error;
if (regs.sp[-1].isUndefined())
script->typeMonitorUndefined(cx, regs.pc);
if (op == JSOP_CALLGLOBAL)
PUSH_UNDEFINED();
}
@ -5428,9 +5395,7 @@ BEGIN_CASE(JSOP_DEFFUN)
goto error;
Value rval = ObjectValue(*obj);
if (!cx->typeMonitorAssign(parent, id, rval))
goto error;
cx->typeMonitorAssign(parent, id, rval);
do {
/* Steps 5d, 5f. */
@ -5497,9 +5462,7 @@ BEGIN_CASE(JSOP_DEFFUN_DBGFC)
jsid id = ATOM_TO_JSID(fun->atom);
if (!CheckRedeclaration(cx, &parent, id, attrs))
goto error;
if (!cx->typeMonitorAssign(&parent, id, rval))
goto error;
cx->typeMonitorAssign(&parent, id, rval);
if ((attrs == JSPROP_ENUMERATE)
? !parent.setProperty(cx, id, &rval, script->strictModeCode)
@ -5836,9 +5799,7 @@ BEGIN_CASE(JSOP_SETTER)
if (!CheckRedeclaration(cx, obj, id, attrs))
goto error;
if (!cx->addTypePropertyId(obj->getType(), id, TYPE_UNKNOWN))
goto error;
cx->addTypePropertyId(obj->getType(), id, TYPE_UNKNOWN);
if (!obj->defineProperty(cx, id, UndefinedValue(), getter, setter, attrs))
goto error;
@ -5968,8 +5929,7 @@ BEGIN_CASE(JSOP_INITMETHOD)
JS_ASSERT(slot == shape->slot);
}
if (!cx->typeMonitorAssign(obj, shape->id, rval))
goto error;
cx->typeMonitorAssign(obj, shape->id, rval);
/* A new object, or one we just extended in a recent initprop op. */
JS_ASSERT(!obj->lastProperty() ||
@ -5991,8 +5951,7 @@ BEGIN_CASE(JSOP_INITMETHOD)
LOAD_ATOM(0, atom);
jsid id = ATOM_TO_JSID(atom);
if (!cx->typeMonitorAssign(obj, id, rval))
goto error;
cx->typeMonitorAssign(obj, id, rval);
uintN defineHow = (op == JSOP_INITMETHOD)
? JSDNP_CACHE_RESULT | JSDNP_SET_METHOD
@ -6040,8 +5999,7 @@ BEGIN_CASE(JSOP_INITELEM)
goto error;
}
} else {
if (!cx->typeMonitorAssign(obj, id, rref))
goto error;
cx->typeMonitorAssign(obj, id, rref);
if (!obj->defineProperty(cx, id, rref, NULL, NULL, JSPROP_ENUMERATE))
goto error;
}
@ -6401,8 +6359,7 @@ BEGIN_CASE(JSOP_SETXMLNAME)
Value rval = regs.sp[-1];
jsid id;
FETCH_ELEMENT_ID(obj, -2, id);
if (!cx->typeMonitorAssign(obj, id, rval))
goto error;
cx->typeMonitorAssign(obj, id, rval);
if (!obj->setProperty(cx, id, &rval, script->strictModeCode))
goto error;
rval = regs.sp[-1];
@ -6691,8 +6648,7 @@ BEGIN_CASE(JSOP_ARRAYPUSH)
JS_ASSERT(script->nfixed <= slot);
JS_ASSERT(slot < script->nslots);
JSObject *obj = &regs.fp()->slots()[slot].toObject();
if (!cx->typeMonitorAssign(obj, JSID_VOID, regs.sp[-1]))
goto error;
cx->typeMonitorAssign(obj, JSID_VOID, regs.sp[-1]);
if (!js_ArrayCompPush(cx, obj, regs.sp[-1]))
goto error;
regs.sp--;

View File

@ -249,7 +249,8 @@ enum InterpMode
JSINTERP_NORMAL = 0, /* interpreter is running normally */
JSINTERP_RECORD = 1, /* interpreter has been started to record/run traces */
JSINTERP_SAFEPOINT = 2, /* interpreter should leave on a method JIT safe point */
JSINTERP_PROFILE = 3 /* interpreter should profile a loop */
JSINTERP_PROFILE = 3, /* interpreter should profile a loop */
JSINTERP_SKIP_TRAP = 4 /* as SAFEPOINT, but skip trap at first opcode */
};
/*

View File

@ -382,8 +382,8 @@ GetCustomIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
* If an Iterator object is used in a for loop then the values fetched in
* that loop are unknown, whether there is a custom __iterator__ or not.
*/
if (!(flags & JSITER_OWNONLY) && !cx->markTypeCallerUnexpected(types::TYPE_UNKNOWN))
return false;
if (!(flags & JSITER_OWNONLY))
cx->markTypeCallerUnexpected(types::TYPE_UNKNOWN);
/* Otherwise call it and return that object. */
LeaveTrace(cx);
@ -1248,8 +1248,7 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
jsbytecode *yieldpc = gen->regs.pc - JSOP_YIELD_LENGTH;
JS_ASSERT(JSOp(*yieldpc) == JSOP_YIELD);
if (!script->typeMonitorUnknown(cx, yieldpc))
return JS_FALSE;
script->typeMonitorUnknown(cx, yieldpc);
/*
* Store the argument to send as the result of the yield
@ -1495,8 +1494,7 @@ js_InitIteratorClasses(JSContext *cx, JSObject *obj)
if (!proto)
return NULL;
if (!cx->addTypeProperty(obj->getType(), js_StopIteration_str, ObjectValue(*proto)))
return NULL;
cx->addTypeProperty(obj->getType(), js_StopIteration_str, ObjectValue(*proto));
return proto;
}

View File

@ -126,14 +126,15 @@ js_math_abs(JSContext *cx, uintN argc, Value *vp)
if (argc == 0) {
vp->setDouble(js_NaN);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
if (!ValueToNumber(cx, vp[2], &x))
return JS_FALSE;
z = fabs(x);
vp->setNumber(z);
if (!vp[2].isDouble() && vp->isDouble() && !cx->markTypeCallerOverflow())
return false;
if (!vp[2].isDouble() && vp->isDouble())
cx->markTypeCallerOverflow();
return JS_TRUE;
}
@ -271,14 +272,15 @@ js_math_ceil(JSContext *cx, uintN argc, Value *vp)
if (argc == 0) {
vp->setDouble(js_NaN);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
if (!ValueToNumber(cx, vp[2], &x))
return JS_FALSE;
z = js_math_ceil_impl(x);
vp->setNumber(z);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return JS_TRUE;
}
@ -347,14 +349,15 @@ js_math_floor(JSContext *cx, uintN argc, Value *vp)
if (argc == 0) {
vp->setDouble(js_NaN);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
if (!ValueToNumber(cx, vp[2], &x))
return JS_FALSE;
z = js_math_floor_impl(x);
vp->setNumber(z);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return JS_TRUE;
}
@ -392,7 +395,8 @@ js_math_max(JSContext *cx, uintN argc, Value *vp)
if (argc == 0) {
vp->setDouble(js_NegativeInfinity);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
argv = vp + 2;
bool expectDouble = false;
@ -402,7 +406,8 @@ js_math_max(JSContext *cx, uintN argc, Value *vp)
return JS_FALSE;
if (JSDOUBLE_IS_NaN(x)) {
vp->setDouble(js_NaN);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
if (x == 0 && x == z) {
if (js_copysign(1.0, z) == -1)
@ -412,7 +417,7 @@ js_math_max(JSContext *cx, uintN argc, Value *vp)
}
}
if (!vp->setNumber(z) && !expectDouble)
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
@ -425,7 +430,8 @@ js_math_min(JSContext *cx, uintN argc, Value *vp)
if (argc == 0) {
vp->setDouble(js_PositiveInfinity);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
argv = vp + 2;
bool expectDouble = false;
@ -435,7 +441,8 @@ js_math_min(JSContext *cx, uintN argc, Value *vp)
return JS_FALSE;
if (JSDOUBLE_IS_NaN(x)) {
vp->setDouble(js_NaN);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
if (x == 0 && x == z) {
if (js_copysign(1.0, x) == -1)
@ -445,7 +452,7 @@ js_math_min(JSContext *cx, uintN argc, Value *vp)
}
}
if (!vp->setNumber(z) && !expectDouble)
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
@ -484,7 +491,8 @@ js_math_pow(JSContext *cx, uintN argc, Value *vp)
if (argc <= 1) {
vp->setDouble(js_NaN);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
bool expectDouble = vp[2].isDouble() || vp[3].isDouble();
if (!ValueToNumber(cx, vp[2], &x))
@ -498,11 +506,15 @@ js_math_pow(JSContext *cx, uintN argc, Value *vp)
if (JSDOUBLE_IS_FINITE(x) && x != 0.0) {
if (y == 0.5) {
vp->setNumber(sqrt(x));
return expectDouble || cx->markTypeCallerOverflow();
if (!expectDouble)
cx->markTypeCallerOverflow();
return JS_TRUE;
}
if (y == -0.5) {
vp->setNumber(1.0/sqrt(x));
return expectDouble || cx->markTypeCallerOverflow();
if (!expectDouble)
cx->markTypeCallerOverflow();
return JS_TRUE;
}
}
/*
@ -511,7 +523,9 @@ js_math_pow(JSContext *cx, uintN argc, Value *vp)
*/
if (!JSDOUBLE_IS_FINITE(y) && (x == 1.0 || x == -1.0)) {
vp->setDouble(js_NaN);
return expectDouble || cx->markTypeCallerOverflow();
if (!expectDouble)
cx->markTypeCallerOverflow();
return JS_TRUE;
}
/* pow(x, +-0) is always 1, even for x = NaN. */
if (y == 0) {
@ -525,7 +539,7 @@ js_math_pow(JSContext *cx, uintN argc, Value *vp)
z = pow(x, y);
if (!vp->setNumber(z) && !expectDouble)
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
@ -610,14 +624,15 @@ js_math_round(JSContext *cx, uintN argc, Value *vp)
if (argc == 0) {
vp->setDouble(js_NaN);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return JS_TRUE;
}
if (!ValueToNumber(cx, vp[2], &x))
return JS_FALSE;
z = js_copysign(floor(x + 0.5), x);
vp->setNumber(z);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return JS_TRUE;
}

View File

@ -424,7 +424,8 @@ num_parseInt(JSContext *cx, uintN argc, Value *vp)
/* Fast paths and exceptional cases. */
if (argc == 0) {
vp->setDouble(js_NaN);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return true;
}
if (argc == 1 || (vp[3].isInt32() && (vp[3].toInt32() == 0 || vp[3].toInt32() == 10))) {
@ -434,8 +435,8 @@ num_parseInt(JSContext *cx, uintN argc, Value *vp)
}
if (vp[2].isDouble()) {
vp->setNumber(ParseIntDoubleHelper(vp[2].toDouble()));
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return true;
}
}
@ -455,7 +456,8 @@ num_parseInt(JSContext *cx, uintN argc, Value *vp)
if (radix != 0) {
if (radix < 2 || radix > 36) {
vp->setDouble(js_NaN);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return true;
}
if (radix != 16)
stripPrefix = false;
@ -474,8 +476,8 @@ num_parseInt(JSContext *cx, uintN argc, Value *vp)
/* Step 15. */
vp->setNumber(number);
if (!vp->isInt32() && !cx->markTypeCallerOverflow())
return false;
if (!vp->isInt32())
cx->markTypeCallerOverflow();
return true;
}

View File

@ -1648,10 +1648,8 @@ js_obj_defineGetter(JSContext *cx, uintN argc, Value *vp)
if (!CheckAccess(cx, obj, id, JSACC_WATCH, &junk, &attrs))
return JS_FALSE;
if (!cx->addTypePropertyId(obj->getType(), id, TYPE_UNKNOWN))
return JS_FALSE;
if (!cx->markTypePropertyConfigured(obj->getType(), id))
return false;
cx->addTypePropertyId(obj->getType(), id, TYPE_UNKNOWN);
cx->markTypePropertyConfigured(obj->getType(), id);
call.rval().setUndefined();
return obj->defineProperty(cx, id, UndefinedValue(), getter, StrictPropertyStub,
@ -1688,10 +1686,8 @@ js_obj_defineSetter(JSContext *cx, uintN argc, Value *vp)
if (!CheckAccess(cx, obj, id, JSACC_WATCH, &junk, &attrs))
return JS_FALSE;
if (!cx->addTypePropertyId(obj->getType(), id, TYPE_UNKNOWN))
return JS_FALSE;
if (!cx->markTypePropertyConfigured(obj->getType(), id))
return false;
cx->addTypePropertyId(obj->getType(), id, TYPE_UNKNOWN);
cx->markTypePropertyConfigured(obj->getType(), id);
call.rval().setUndefined();
return obj->defineProperty(cx, id, UndefinedValue(), PropertyStub, setter,
@ -2481,16 +2477,12 @@ static JSBool
DefineProperty(JSContext *cx, JSObject *obj, const jsid &id, const PropDesc &desc, bool throwError,
bool *rval)
{
if (!cx->addTypePropertyId(obj->getType(), id, desc.value))
return false;
cx->addTypePropertyId(obj->getType(), id, desc.value);
if (!desc.get.isUndefined() || !desc.set.isUndefined()) {
if (!cx->addTypePropertyId(obj->getType(), id, TYPE_UNKNOWN))
return false;
if (!cx->markTypePropertyConfigured(obj->getType(), id))
return false;
cx->addTypePropertyId(obj->getType(), id, TYPE_UNKNOWN);
cx->markTypePropertyConfigured(obj->getType(), id);
} else if (!desc.configurable() || !desc.enumerable() || !desc.writable()) {
if (!cx->markTypePropertyConfigured(obj->getType(), id))
return false;
cx->markTypePropertyConfigured(obj->getType(), id);
}
if (obj->isArray())
@ -2634,8 +2626,7 @@ obj_create(JSContext *cx, uintN argc, Value *vp)
vp->setObject(*obj); /* Root and prepare for eventual return. */
/* Don't track types or array-ness for objects created here. */
if (!cx->markTypeObjectUnknownProperties(obj->getType()))
return false;
cx->markTypeObjectUnknownProperties(obj->getType());
/* 15.2.3.5 step 4. */
if (argc > 1 && !vp[3].isUndefined()) {
@ -3040,8 +3031,7 @@ js_CreateThisForFunction(JSContext *cx, JSObject *callee, bool newType)
obj->lastProperty());
if (!obj)
return NULL;
if (!callee->getFunctionPrivate()->script()->typeSetThis(cx, (types::jstype) type))
return NULL;
callee->getFunctionPrivate()->script()->typeSetThis(cx, (types::jstype) type);
}
return obj;
@ -3972,8 +3962,9 @@ js_InitObjectClass(JSContext *cx, JSObject *obj)
/* The default 'new' object for Object.prototype has unknown properties. */
TypeObject *newType = proto->getNewType(cx);
if (!newType || !cx->markTypeObjectUnknownProperties(newType))
if (!newType)
return NULL;
cx->markTypeObjectUnknownProperties(newType);
/* ECMA (15.1.2.1) says 'eval' is a property of the global object. */
jsid id = ATOM_TO_JSID(cx->runtime->atomState.evalAtom);
@ -3994,8 +3985,7 @@ DefineStandardSlot(JSContext *cx, JSObject *obj, JSProtoKey key, JSAtom *atom,
{
jsid id = ATOM_TO_JSID(atom);
if (!cx->addTypePropertyId(obj->getType(), id, v))
return false;
cx->addTypePropertyId(obj->getType(), id, v);
if (key != JSProto_Null) {
/*
@ -4097,10 +4087,9 @@ DefineConstructorAndPrototype(JSContext *cx, JSObject *obj, JSProtoKey key, JSAt
return NULL;
/* Mark types with a special equality hook as having unknown properties. */
if (clasp->ext.equality &&
(!cx->markTypeObjectUnknownProperties(type) ||
!cx->markTypeObjectUnknownProperties(proto->getType()))) {
return NULL;
if (clasp->ext.equality) {
cx->markTypeObjectUnknownProperties(type);
cx->markTypeObjectUnknownProperties(proto->getType());
}
proto->syncSpecialEquality();
@ -4585,10 +4574,8 @@ SetProto(JSContext *cx, JSObject *obj, JSObject *proto, bool checkForCycles)
* new type of the object, which is OK since we treat objects in type sets with
* unknown properties as interchangeable.
*/
if (!cx->markTypeObjectUnknownProperties(obj->getType()) ||
!cx->markTypeObjectUnknownProperties(type)) {
return false;
}
cx->markTypeObjectUnknownProperties(obj->getType());
cx->markTypeObjectUnknownProperties(type);
if (!proto || !checkForCycles) {
obj->setType(type);
@ -4750,8 +4737,7 @@ js_ConstructObject(JSContext *cx, Class *clasp, JSObject *proto, JSObject *paren
return NULL;
obj->syncSpecialEquality();
if (!cx->markTypeObjectUnknownProperties(obj->getType()))
return NULL;
cx->markTypeObjectUnknownProperties(obj->getType());
Value rval;
if (!InvokeConstructorWithGivenThis(cx, obj, cval, argc, argv, &rval))
@ -5628,8 +5614,7 @@ js_NativeGetInline(JSContext *cx, JSObject *receiver, JSObject *obj, JSObject *p
pobj->nativeSetSlot(slot, *vp);
}
if (!cx->addTypePropertyId(obj->getType(), shape->id, *vp))
return false;
cx->addTypePropertyId(obj->getType(), shape->id, *vp);
return true;
}
@ -6606,10 +6591,8 @@ js_GetClassPrototype(JSContext *cx, JSObject *scopeobj, JSProtoKey protoKey,
JSBool
js_SetClassPrototype(JSContext *cx, JSObject *ctor, JSObject *proto, uintN attrs)
{
if (!cx->addTypePropertyId(ctor->getType(), ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom),
ObjectOrNullValue(proto))) {
return JS_FALSE;
}
cx->addTypePropertyId(ctor->getType(), ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom),
ObjectOrNullValue(proto));
/*
* Use the given attributes for the prototype property of the constructor,

View File

@ -866,7 +866,7 @@ struct JSObject : js::gc::Cell {
/* Packed information for this array. May be incorrect if !cx->typeInferenceEnabled(). */
inline bool isPackedDenseArray();
inline bool setDenseArrayNotPacked(JSContext *cx);
inline void setDenseArrayNotPacked(JSContext *cx);
/*
* ensureDenseArrayElements ensures that the dense array can hold at least
@ -1777,8 +1777,8 @@ js_DefineNativePropertyWithType(JSContext *cx, JSObject *obj, jsid id, const js:
uintN flags, intN shortid, JSProperty **propp,
uintN defineHow = 0)
{
return JS_AddTypePropertyById(cx, obj, id, Jsvalify(value)) &&
js_DefineNativeProperty(cx, obj, id, value, getter, setter,
JS_AddTypePropertyById(cx, obj, id, Jsvalify(value));
return js_DefineNativeProperty(cx, obj, id, value, getter, setter,
attrs, flags, shortid, propp, defineHow);
}

View File

@ -127,8 +127,7 @@ JSObject::unbrand(JSContext *cx)
inline JSBool
JSObject::setAttributes(JSContext *cx, jsid id, uintN *attrsp)
{
if (!cx->markTypePropertyConfigured(getType(), id))
return false;
cx->markTypePropertyConfigured(getType(), id);
js::AttributesOp op = getOps()->setAttributes;
return (op ? op : js_SetAttributes)(cx, this, id, attrsp);
}
@ -136,10 +135,8 @@ JSObject::setAttributes(JSContext *cx, jsid id, uintN *attrsp)
inline JSBool
JSObject::deleteProperty(JSContext *cx, jsid id, js::Value *rval, JSBool strict)
{
if (!cx->addTypePropertyId(getType(), id, js::types::TYPE_UNDEFINED))
return false;
if (!cx->markTypePropertyConfigured(getType(), id))
return false;
cx->addTypePropertyId(getType(), id, js::types::TYPE_UNDEFINED);
cx->markTypePropertyConfigured(getType(), id);
js::DeleteIdOp op = getOps()->deleteProperty;
return (op ? op : js_DeleteProperty)(cx, this, id, rval, strict);
}
@ -435,11 +432,9 @@ JSObject::setArrayLength(JSContext *cx, uint32 length)
* Mark the type of this object as possibly not a dense array, per the
* requirements of OBJECT_FLAG_NON_DENSE_ARRAY.
*/
if (!cx->markTypeArrayNotPacked(getType(), true))
return false;
cx->markTypeArrayNotPacked(getType(), true);
jsid lengthId = ATOM_TO_JSID(cx->runtime->atomState.lengthAtom);
if (!cx->addTypePropertyId(getType(), lengthId, js::types::TYPE_DOUBLE))
return false;
cx->addTypePropertyId(getType(), lengthId, js::types::TYPE_DOUBLE);
}
setPrivate((void*) length);
@ -1701,8 +1696,8 @@ DefineConstructorAndPrototype(JSContext *cx, JSObject *global,
global->setSlot(key, ObjectValue(*ctor));
global->setSlot(key + JSProto_LIMIT, ObjectValue(*proto));
if (!cx->addTypePropertyId(global->getType(), id, ObjectValue(*ctor)) ||
!global->addDataProperty(cx, id, key + JSProto_LIMIT * 2, 0)) {
cx->addTypePropertyId(global->getType(), id, ObjectValue(*ctor));
if (!global->addDataProperty(cx, id, key + JSProto_LIMIT * 2, 0)) {
global->setSlot(key, UndefinedValue());
global->setSlot(key + JSProto_LIMIT, UndefinedValue());
return false;

View File

@ -158,8 +158,7 @@ js_json_stringify(JSContext *cx, uintN argc, Value *vp)
return JS_FALSE;
vp->setString(str);
} else {
if (!cx->markTypeCallerUnexpected(types::TYPE_UNDEFINED))
return JS_FALSE;
cx->markTypeCallerUnexpected(types::TYPE_UNDEFINED);
vp->setUndefined();
}
@ -1030,8 +1029,10 @@ CloseObject(JSContext *cx, JSONParser *jp)
return JS_FALSE;
JSObject *obj = &p.toObject();
if (obj->isArray() ? !cx->fixArrayType(obj) : !cx->fixObjectType(obj))
return JS_FALSE;
if (obj->isArray())
cx->fixArrayType(obj);
else
cx->fixObjectType(obj);
if (!js_SetLengthProperty(cx, jp->objectStack, len - 1))
return JS_FALSE;

View File

@ -1181,8 +1181,7 @@ Compiler::defineGlobals(JSContext *cx, GlobalScope &globalScope, JSScript *scrip
* optimizations only take place if the property is not defined.
*/
rval.setObject(*fun);
if (!cx->addTypePropertyId(globalObj->getType(), id, rval))
return false;
cx->addTypePropertyId(globalObj->getType(), id, rval);
} else {
rval.setUndefined();
}

View File

@ -1177,8 +1177,7 @@ NewProxyObject(JSContext *cx, JSProxyHandler *handler, const Value &priv, JSObje
}
/* Don't track types of properties of proxies. */
if (!cx->markTypeObjectUnknownProperties(obj->getType()))
return NULL;
cx->markTypeObjectUnknownProperties(obj->getType());
return obj;
}

View File

@ -830,8 +830,7 @@ regexp_construct(JSContext *cx, uintN argc, Value *vp)
* regexps for any associated compileAndGo RegExp global, not new
* regexps with different prototypes or RegExp.prototype itself.
*/
if (!cx->markTypeCallerUnexpected(*vp))
return false;
cx->markTypeCallerUnexpected(*vp);
return true;
}
}
@ -940,14 +939,12 @@ js_InitRegExpClass(JSContext *cx, JSObject *global)
if (!type->getEmptyShape(cx, &js_RegExpClass, FINALIZE_OBJECT0))
return NULL;
if (!cx->addTypeProperty(protoType, "source", TYPE_STRING) ||
!cx->addTypeProperty(protoType, "global", TYPE_BOOLEAN) ||
!cx->addTypeProperty(protoType, "ignoreCase", TYPE_BOOLEAN) ||
!cx->addTypeProperty(protoType, "multiline", TYPE_BOOLEAN) ||
!cx->addTypeProperty(protoType, "sticky", TYPE_BOOLEAN) ||
!cx->addTypeProperty(protoType, "lastIndex", TYPE_INT32)) {
return NULL;
}
cx->addTypeProperty(protoType, "source", TYPE_STRING);
cx->addTypeProperty(protoType, "global", TYPE_BOOLEAN);
cx->addTypeProperty(protoType, "ignoreCase", TYPE_BOOLEAN);
cx->addTypeProperty(protoType, "multiline", TYPE_BOOLEAN);
cx->addTypeProperty(protoType, "sticky", TYPE_BOOLEAN);
cx->addTypeProperty(protoType, "lastIndex", TYPE_INT32);
/* Install the fully-constructed RegExp and RegExp.prototype in global. */
if (!DefineConstructorAndPrototype(cx, global, JSProto_RegExp, ctor, proto))

View File

@ -464,7 +464,6 @@ struct JSScript {
#ifdef JS_METHODJIT
bool debugMode:1; /* script was compiled in debug mode */
bool singleStepMode:1; /* compile script in single-step mode */
bool inlineParents:1; /* script may be inlined in other frames */
bool failedBoundsCheck:1; /* script has had hoisted bounds checks fail */
#endif
@ -584,25 +583,25 @@ struct JSScript {
getTypeInitObject(JSContext *cx, const jsbytecode *pc, bool isArray);
/* Monitor a bytecode pushing an unexpected value. */
inline bool typeMonitorResult(JSContext *cx, const jsbytecode *pc, js::types::jstype type);
inline bool typeMonitorResult(JSContext *cx, const jsbytecode *pc, const js::Value &val);
inline bool typeMonitorUndefined(JSContext *cx, const jsbytecode *pc);
inline bool typeMonitorOverflow(JSContext *cx, const jsbytecode *pc);
inline bool typeMonitorString(JSContext *cx, const jsbytecode *pc);
inline bool typeMonitorUnknown(JSContext *cx, const jsbytecode *pc);
inline void typeMonitorResult(JSContext *cx, const jsbytecode *pc, js::types::jstype type);
inline void typeMonitorResult(JSContext *cx, const jsbytecode *pc, const js::Value &val);
inline void typeMonitorUndefined(JSContext *cx, const jsbytecode *pc);
inline void typeMonitorOverflow(JSContext *cx, const jsbytecode *pc);
inline void typeMonitorString(JSContext *cx, const jsbytecode *pc);
inline void typeMonitorUnknown(JSContext *cx, const jsbytecode *pc);
/* Add a type for a variable in this script. */
inline bool typeSetThis(JSContext *cx, js::types::jstype type);
inline bool typeSetThis(JSContext *cx, const js::Value &value);
inline bool typeSetThis(JSContext *cx, js::types::ClonedTypeSet *types);
inline bool typeSetNewCalled(JSContext *cx);
inline bool typeSetLocal(JSContext *cx, unsigned local, js::types::jstype type);
inline bool typeSetLocal(JSContext *cx, unsigned local, const js::Value &value);
inline bool typeSetLocal(JSContext *cx, unsigned local, js::types::ClonedTypeSet *types);
inline bool typeSetArgument(JSContext *cx, unsigned arg, js::types::jstype type);
inline bool typeSetArgument(JSContext *cx, unsigned arg, const js::Value &value);
inline bool typeSetArgument(JSContext *cx, unsigned arg, js::types::ClonedTypeSet *types);
inline bool typeSetUpvar(JSContext *cx, unsigned upvar, const js::Value &value);
inline void typeSetThis(JSContext *cx, js::types::jstype type);
inline void typeSetThis(JSContext *cx, const js::Value &value);
inline void typeSetThis(JSContext *cx, js::types::ClonedTypeSet *types);
inline void typeSetNewCalled(JSContext *cx);
inline void typeSetLocal(JSContext *cx, unsigned local, js::types::jstype type);
inline void typeSetLocal(JSContext *cx, unsigned local, const js::Value &value);
inline void typeSetLocal(JSContext *cx, unsigned local, js::types::ClonedTypeSet *types);
inline void typeSetArgument(JSContext *cx, unsigned arg, js::types::jstype type);
inline void typeSetArgument(JSContext *cx, unsigned arg, const js::Value &value);
inline void typeSetArgument(JSContext *cx, unsigned arg, js::types::ClonedTypeSet *types);
inline void typeSetUpvar(JSContext *cx, unsigned upvar, const js::Value &value);
/*
* Associates this script with a specific function, constructing a new type

View File

@ -1138,7 +1138,8 @@ js_str_charCodeAt(JSContext *cx, uintN argc, Value *vp)
out_of_range:
vp->setDouble(js_NaN);
return cx->markTypeCallerOverflow();
cx->markTypeCallerOverflow();
return true;
}
/*
@ -2711,8 +2712,7 @@ SplitHelper(JSContext *cx, JSLinearString *str, uint32 limit, Matcher splitMatch
return NULL;
} else {
/* Only string entries have been accounted for so far. */
if (!cx->addTypePropertyId(type, JSID_VOID, UndefinedValue()))
return NULL;
cx->addTypePropertyId(type, JSID_VOID, UndefinedValue());
if (!splits.append(UndefinedValue()))
return NULL;
}
@ -2807,8 +2807,9 @@ str_split(JSContext *cx, uintN argc, Value *vp)
return false;
TypeObject *type = cx->getTypeCallerInitObject(true);
if (!type || !cx->addTypeProperty(type, NULL, types::TYPE_STRING))
if (!type)
return false;
cx->addTypeProperty(type, NULL, types::TYPE_STRING);
/* Step 5: Use the second argument as the split limit, if given. */
uint32 limit;
@ -3624,12 +3625,12 @@ js_InitStringClass(JSContext *cx, JSObject *global)
}
jsid lengthId = ATOM_TO_JSID(cx->runtime->atomState.lengthAtom);
if (!cx->addTypePropertyId(proto->getType(), lengthId, TYPE_INT32))
return NULL;
cx->addTypePropertyId(proto->getType(), lengthId, TYPE_INT32);
TypeObject *type = proto->getNewType(cx);
if (!type || !cx->addTypePropertyId(type, JSID_VOID, TYPE_STRING))
if (!type)
return NULL;
cx->addTypePropertyId(type, JSID_VOID, TYPE_STRING);
/*
* Make sure proto's emptyShape is available to be shared by String

View File

@ -499,10 +499,6 @@ jitstats_getProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
JSAtom* str = JSID_TO_ATOM(id);
if (StringEqualsAscii(str, "HOTLOOP")) {
*vp = INT_TO_JSVAL(HOTLOOP);
if (!cx->addTypePropertyId(obj->getType(), id, Valueify(*vp)))
return JS_FALSE;
return JS_TRUE;
}
@ -514,10 +510,6 @@ jitstats_getProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
#else
*vp = BOOLEAN_TO_JSVAL(false);
#endif
if (!cx->addTypePropertyId(obj->getType(), id, Valueify(*vp)))
return JS_FALSE;
return JS_TRUE;
}
}

View File

@ -1542,16 +1542,11 @@ do { \
NULL, NULL); \
if (!proto) \
return NULL; \
if (!cx->addTypeProperty(proto->getType(), NULL, types::TYPE_INT32)) \
return NULL; \
if (_typedArray::ArrayElementTypeMayBeDouble() && \
!cx->addTypeProperty(proto->getType(), NULL, types::TYPE_DOUBLE)) { \
return NULL; \
} \
if (!cx->addTypeProperty(proto->getType(), "buffer", \
(types::jstype) bufferType)) { \
return NULL; \
} \
cx->addTypeProperty(proto->getType(), NULL, types::TYPE_INT32); \
if (_typedArray::ArrayElementTypeMayBeDouble()) \
cx->addTypeProperty(proto->getType(), NULL, types::TYPE_DOUBLE); \
cx->addTypeProperty(proto->getType(), "buffer", \
(types::jstype) bufferType); \
JSObject *ctor = JS_GetConstructor(cx, proto); \
if (!ctor || \
!JS_DefineProperty(cx, ctor, "BYTES_PER_ELEMENT", \

View File

@ -654,6 +654,11 @@ class Value
return data.asBits;
}
JS_ALWAYS_INLINE
void setRawBits(uint64 bits) {
data.asBits = bits;
}
/*
* In the extract/box/unbox functions below, "NonDouble" means this
* functions must not be called on a value that is a double. This allows

View File

@ -142,13 +142,10 @@ bool
JSWrapper::defineProperty(JSContext *cx, JSObject *wrapper, jsid id,
PropertyDescriptor *desc)
{
if (desc->attrs & (JSPROP_GETTER | JSPROP_SETTER)) {
if (!cx->addTypePropertyId(wrappedObject(wrapper)->getType(), id, types::TYPE_UNKNOWN))
return false;
} else {
if (!cx->addTypePropertyId(wrappedObject(wrapper)->getType(), id, desc->value))
return false;
}
if (desc->attrs & (JSPROP_GETTER | JSPROP_SETTER))
cx->addTypePropertyId(wrappedObject(wrapper)->getType(), id, types::TYPE_UNKNOWN);
else
cx->addTypePropertyId(wrappedObject(wrapper)->getType(), id, desc->value);
SET(JS_DefinePropertyById(cx, wrappedObject(wrapper), id, Jsvalify(desc->value),
Jsvalify(desc->getter), Jsvalify(desc->setter), desc->attrs));
@ -230,8 +227,7 @@ bool
JSWrapper::set(JSContext *cx, JSObject *wrapper, JSObject *receiver, jsid id, bool strict,
Value *vp)
{
if (!cx->addTypePropertyId(wrappedObject(wrapper)->getType(), id, *vp))
return false;
cx->addTypePropertyId(wrappedObject(wrapper)->getType(), id, *vp);
// FIXME (bug 596351): Need deal with strict mode.
SET(wrappedObject(wrapper)->setProperty(cx, id, vp, false));
@ -629,8 +625,8 @@ Reify(JSContext *cx, JSCompartment *origin, Value *vp)
bool
JSCrossCompartmentWrapper::iterate(JSContext *cx, JSObject *wrapper, uintN flags, Value *vp)
{
if (!(flags & JSITER_OWNONLY) && !cx->markTypeCallerUnexpected(types::TYPE_UNKNOWN))
return false;
if (!(flags & JSITER_OWNONLY))
cx->markTypeCallerUnexpected(types::TYPE_UNKNOWN);
PIERCE(cx, wrapper, GET,
NOTHING,

View File

@ -6862,7 +6862,8 @@ CopyXMLSettings(JSContext *cx, JSObject *from, JSObject *to)
if (!JSVAL_IS_BOOLEAN(v))
continue;
}
if (!JS_AddTypeProperty(cx, to, name, v) || !JS_SetProperty(cx, to, name, &v))
JS_AddTypeProperty(cx, to, name, v);
if (!JS_SetProperty(cx, to, name, &v))
return false;
}
@ -7155,11 +7156,10 @@ js_InitQNameClass(JSContext *cx, JSObject *obj)
/* Properties of QName objects are not modeled by type inference. */
TypeObject *type = proto->getNewType(cx);
if (!type ||
!cx->markTypeObjectUnknownProperties(type) ||
!cx->markTypeObjectUnknownProperties(proto->getType())) {
if (!type)
return NULL;
}
cx->markTypeObjectUnknownProperties(type);
cx->markTypeObjectUnknownProperties(proto->getType());
return proto;
}
@ -7188,11 +7188,10 @@ js_InitXMLClass(JSContext *cx, JSObject *obj)
/* Properties of XML objects are not modeled by type inference. */
TypeObject *type = proto->getNewType(cx);
if (!type ||
!cx->markTypeObjectUnknownProperties(type) ||
!cx->markTypeObjectUnknownProperties(proto->getType())) {
if (!type)
return NULL;
}
cx->markTypeObjectUnknownProperties(type);
cx->markTypeObjectUnknownProperties(proto->getType());
xml = js_NewXML(cx, JSXML_CLASS_TEXT);
if (!xml)
@ -7317,8 +7316,7 @@ js_GetDefaultXMLNamespace(JSContext *cx, jsval *vp)
obj = tmp;
}
if (!cx->addTypePropertyId(obj->getType(), JS_DEFAULT_XML_NAMESPACE_ID, types::TYPE_UNKNOWN))
return JS_FALSE;
cx->addTypePropertyId(obj->getType(), JS_DEFAULT_XML_NAMESPACE_ID, types::TYPE_UNKNOWN);
ns = js_ConstructObject(cx, &js_NamespaceClass, NULL, obj, 0, NULL);
if (!ns)
@ -7344,8 +7342,7 @@ js_SetDefaultXMLNamespace(JSContext *cx, const Value &v)
JSObject &varobj = cx->stack.currentVarObj();
if (!cx->addTypePropertyId(varobj.getType(), JS_DEFAULT_XML_NAMESPACE_ID, types::TYPE_UNKNOWN))
return JS_FALSE;
cx->addTypePropertyId(varobj.getType(), JS_DEFAULT_XML_NAMESPACE_ID, types::TYPE_UNKNOWN);
if (!varobj.defineProperty(cx, JS_DEFAULT_XML_NAMESPACE_ID, ObjectValue(*ns),
PropertyStub, StrictPropertyStub, JSPROP_PERMANENT)) {
return JS_FALSE;

File diff suppressed because it is too large Load Diff

View File

@ -53,12 +53,6 @@
namespace js {
namespace mjit {
struct PatchableFrame {
StackFrame *fp;
jsbytecode *pc;
bool scriptedCall;
};
/*
* Patch for storing call site and rejoin site return addresses at, for
* redirecting the return address in InvariantFailure.
@ -333,94 +327,20 @@ class Compiler : public BaseCompiler
DataLabelPtr inlinePatch;
uint32 inlineIndex;
jsbytecode *inlinepc;
size_t id;
RejoinState rejoin;
bool ool;
Label loopJumpLabel;
InvariantCodePatch loopPatch;
// An AutoRejoinSite needs to capture this call site.
bool needsRejoin;
InternalCallSite(uint32 returnOffset,
uint32 inlineIndex, jsbytecode *inlinepc, size_t id,
bool ool, bool needsRejoin)
uint32 inlineIndex, jsbytecode *inlinepc,
RejoinState rejoin, bool ool)
: returnOffset(returnOffset),
inlineIndex(inlineIndex), inlinepc(inlinepc), id(id),
ool(ool), needsRejoin(needsRejoin)
inlineIndex(inlineIndex), inlinepc(inlinepc),
rejoin(rejoin), ool(ool)
{ }
};
struct InternalRejoinSite {
Label label;
jsbytecode *pc;
size_t id;
InvariantCodePatch loopPatch;
InternalRejoinSite(Label label, jsbytecode *pc, size_t id)
: label(label), pc(pc), id(id)
{ }
};
struct AutoRejoinSite {
Compiler *cc;
jsbytecode *pc;
bool force;
bool ool;
Label oolLabel;
// number of call/rejoin sites when this AutoRejoinSite was created.
uint32 startSites;
uint32 rejoinSites;
void *stub1;
void *stub2;
void *stub3;
AutoRejoinSite(Compiler *cc, void *stub1, void *stub2 = NULL, void *stub3 = NULL)
: cc(cc), pc(cc->PC), force(false), ool(false),
startSites(cc->callSites.length()),
rejoinSites(cc->rejoinSites.length()),
stub1(stub1), stub2(stub2), stub3(stub3)
{}
void forceGeneration()
{
force = true;
}
/*
* Rejoin a particular slow path label in a synced state, rather than
* the current point of the fast path when the AutoRejoinSite finishes.
*/
void oolRejoin(Label label)
{
ool = true;
oolLabel = label;
}
~AutoRejoinSite()
{
if (cc->a != cc->outer)
return;
#ifdef DEBUG
JS_ASSERT(pc == cc->PC);
cc->checkRejoinSite(startSites, rejoinSites, stub1);
if (stub2)
cc->checkRejoinSite(startSites, rejoinSites, stub2);
if (stub3)
cc->checkRejoinSite(startSites, rejoinSites, stub3);
#endif
if (force || cc->needRejoins(pc)) {
cc->addRejoinSite(stub1, ool, oolLabel);
if (stub2)
cc->addRejoinSite(stub2, ool, oolLabel);
if (stub3)
cc->addRejoinSite(stub3, ool, oolLabel);
}
}
};
struct DoublePatch {
double d;
DataLabelPtr label;
@ -458,10 +378,6 @@ class Compiler : public BaseCompiler
JSObject *globalObj;
Value *globalSlots;
/* Existing frames on the stack whose slots may need to be updated. */
const Vector<PatchableFrame> *patchFrames;
bool *savedTraps;
Assembler masm;
FrameState frame;
@ -531,7 +447,6 @@ class Compiler : public BaseCompiler
#endif
js::Vector<CallPatchInfo, 64, CompilerAllocPolicy> callPatches;
js::Vector<InternalCallSite, 64, CompilerAllocPolicy> callSites;
js::Vector<InternalRejoinSite, 64, CompilerAllocPolicy> rejoinSites;
js::Vector<DoublePatch, 16, CompilerAllocPolicy> doubleList;
js::Vector<uint32, 4, CompilerAllocPolicy> fixedDoubleEntries;
js::Vector<JumpTable, 16> jumpTables;
@ -556,8 +471,7 @@ class Compiler : public BaseCompiler
friend class CompilerAllocPolicy;
public:
Compiler(JSContext *cx, JSScript *outerScript, bool isConstructing,
const Vector<PatchableFrame> *patchFrames);
Compiler(JSContext *cx, JSScript *outerScript, bool isConstructing);
~Compiler();
CompileStatus compile();
@ -567,35 +481,11 @@ class Compiler : public BaseCompiler
Label labelOf(jsbytecode *target, uint32 inlineIndex);
void addCallSite(const InternalCallSite &callSite);
void addReturnSite(bool ool);
void inlineStubCall(void *stub, bool needsRejoin);
bool loadOldTraps(const Vector<CallSite> &site);
void inlineStubCall(void *stub, RejoinState rejoin);
bool debugMode() { return debugMode_; }
bool inlining() { return inlining_; }
#ifdef DEBUG
void checkRejoinSite(uint32 nCallSites, uint32 nRejoinSites, void *stub);
#endif
void addRejoinSite(void *stub, bool ool, Label oolLabel);
bool needRejoins(jsbytecode *pc)
{
// We'll never rejoin into an inlined frame.
if (a != outer)
return false;
// We need all rejoin points if we might expand an inline frame.
if (outerScript->inlineParents)
return true;
// Otherwise, only add rejoin points where there are active frames on stack.
for (unsigned i = 0; patchFrames && i < patchFrames->length(); i++) {
if ((*patchFrames)[i].pc == pc)
return true;
}
return false;
}
jsbytecode *outerPC() {
if (a == outer)
return PC;
@ -610,11 +500,11 @@ class Compiler : public BaseCompiler
Assembler &getAssembler(bool ool) { return ool ? stubcc.masm : masm; }
InvariantCodePatch *getInvariantPatch(unsigned index, bool call) {
return call ? &callSites[index].loopPatch : &rejoinSites[index].loopPatch;
InvariantCodePatch *getInvariantPatch(unsigned index) {
return &callSites[index].loopPatch;
}
jsbytecode *getInvariantPC(unsigned index, bool call) {
return call ? callSites[index].inlinepc : rejoinSites[index].pc;
jsbytecode *getInvariantPC(unsigned index) {
return callSites[index].inlinepc;
}
bool arrayPrototypeHasIndexedProperty();
@ -663,7 +553,7 @@ class Compiler : public BaseCompiler
bool canUseApplyTricks();
/* Emitting helpers. */
bool emitStubCmpOp(BoolStub stub, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused);
bool emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused);
bool iter(uintN flags);
void iterNext();
bool iterMore();
@ -763,10 +653,10 @@ class Compiler : public BaseCompiler
MaybeRegisterID &mreg);
void maybeJumpIfNotDouble(Assembler &masm, MaybeJump &mj, FrameEntry *fe,
MaybeRegisterID &mreg);
bool jsop_relational(JSOp op, BoolStub stub, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused);
bool jsop_relational_full(JSOp op, BoolStub stub, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused);
bool jsop_relational_double(JSOp op, BoolStub stub, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused);
bool jsop_relational_int(JSOp op, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused);
bool jsop_relational(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
bool jsop_relational_full(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
bool jsop_relational_double(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
bool jsop_relational_int(JSOp op, jsbytecode *target, JSOp fused);
void emitLeftDoublePath(FrameEntry *lhs, FrameEntry *rhs, FrameState::BinaryAlloc &regs,
MaybeJump &lhsNotDouble, MaybeJump &rhsNotNumber,
@ -786,8 +676,8 @@ class Compiler : public BaseCompiler
bool booleanJumpScript(JSOp op, jsbytecode *target);
bool jsop_ifneq(JSOp op, jsbytecode *target);
bool jsop_andor(JSOp op, jsbytecode *target);
bool jsop_arginc(JSOp op, uint32 slot, bool popped);
bool jsop_localinc(JSOp op, uint32 slot, bool popped);
bool jsop_arginc(JSOp op, uint32 slot);
bool jsop_localinc(JSOp op, uint32 slot);
bool jsop_newinit();
void jsop_initmethod();
void jsop_initprop();
@ -798,8 +688,8 @@ class Compiler : public BaseCompiler
void jsop_getelem_dense(bool isPacked);
bool isCacheableBaseAndIndex(FrameEntry *obj, FrameEntry *id);
void jsop_stricteq(JSOp op);
bool jsop_equality(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused);
bool jsop_equality_int_string(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused);
bool jsop_equality(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
bool jsop_equality_int_string(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
void jsop_pos();
static inline Assembler::Condition
@ -851,49 +741,19 @@ class Compiler : public BaseCompiler
Call emitStubCall(void *ptr, DataLabelPtr *pinline);
};
// Given a stub call, emits the call into the inline assembly path. If
// debug mode is on, adds the appropriate instrumentation for recompilation.
#define INLINE_STUBCALL(stub) \
inlineStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), true)
// Given a stub call, emits the call into the inline assembly path. rejoin
// indicates how to rejoin should this call trigger expansion/discarding.
#define INLINE_STUBCALL(stub, rejoin) \
inlineStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), rejoin)
// Same as INLINE_STUBCALL, but cannot trigger recompilation.
#define INLINE_STUBCALL_NO_REJOIN(stub) \
inlineStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), false)
// Given a stub call, emits the call into the out-of-line assembly path. If
// debug mode is on, adds the appropriate instrumentation for recompilation.
// Given a stub call, emits the call into the out-of-line assembly path.
// Unlike the INLINE_STUBCALL variant, this returns the Call offset.
#define OOL_STUBCALL(stub) \
stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), true)
#define OOL_STUBCALL(stub, rejoin) \
stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), rejoin)
// Same as OOL_STUBCALL, but specifies a slot depth.
#define OOL_STUBCALL_LOCAL_SLOTS(stub, slots) \
stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), true, (slots))
// Same as OOL_STUBCALL, but cannot trigger recompilation.
#define OOL_STUBCALL_NO_REJOIN(stub) \
stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), false)
// Define rejoin sites at a PC. For every stub or scripted call emitted, there
// must be a rejoin site which captures it. These are scope based, so the
// rejoin site must be declared before the stub call and finish its scope after
// the call has been emitted. If it is emitted, the rejoin site will rejoin
// the inline code once the scope is finished.
#define REJOIN_SITE(stub) \
AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, (stub)))
#define REJOIN_SITE_2(stub1, stub2) \
AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, (stub1)), \
JS_FUNC_TO_DATA_PTR(void *, (stub2)))
#define REJOIN_SITE_3(stub1, stub2, stub3) \
AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, (stub1)), \
JS_FUNC_TO_DATA_PTR(void *, (stub2)), \
JS_FUNC_TO_DATA_PTR(void *, (stub3)))
#define REJOIN_SITE_ANY() \
AutoRejoinSite autoRejoin(this, (void *) RejoinSite::VARIADIC_ID)
#define OOL_STUBCALL_LOCAL_SLOTS(stub, rejoin, slots) \
stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), rejoin, (slots))
} /* namespace js */
} /* namespace mjit */

View File

@ -192,8 +192,6 @@ mjit::Compiler::maybeJumpIfNotDouble(Assembler &masm, MaybeJump &mj, FrameEntry
bool
mjit::Compiler::jsop_binary(JSOp op, VoidStub stub, JSValueType type, types::TypeSet *typeSet)
{
REJOIN_SITE(stub);
FrameEntry *rhs = frame.peek(-1);
FrameEntry *lhs = frame.peek(-2);
@ -231,7 +229,7 @@ mjit::Compiler::jsop_binary(JSOp op, VoidStub stub, JSValueType type, types::Typ
JS_ASSERT_IF(isStringResult && type != JSVAL_TYPE_UNKNOWN, type == JSVAL_TYPE_STRING);
prepareStubCall(Uses(2));
INLINE_STUBCALL(stub);
INLINE_STUBCALL(stub, REJOIN_BINARY);
frame.popn(2);
frame.pushSynced(isStringResult ? JSVAL_TYPE_STRING : type);
return true;
@ -411,7 +409,7 @@ mjit::Compiler::jsop_binary_double(FrameEntry *lhs, FrameEntry *rhs, JSOp op,
done.getJump().linkTo(masm.label(), &masm);
stubcc.leave();
OOL_STUBCALL(stub);
OOL_STUBCALL(stub, REJOIN_BINARY);
if (allocateRight)
frame.freeReg(fpRight);
@ -511,7 +509,7 @@ mjit::Compiler::jsop_binary_full_simple(FrameEntry *fe, JSOp op, VoidStub stub,
/* Slow call - use frame.sync to avoid erroneous jump repatching in stubcc. */
frame.sync(stubcc.masm, Uses(2));
stubcc.leave();
OOL_STUBCALL(stub);
OOL_STUBCALL(stub, REJOIN_BINARY);
/* Finish up stack operations. */
frame.popn(2);
@ -773,7 +771,7 @@ mjit::Compiler::jsop_binary_full(FrameEntry *lhs, FrameEntry *rhs, JSOp op,
/* Slow call - use frame.sync to avoid erroneous jump repatching in stubcc. */
frame.sync(stubcc.masm, Uses(2));
stubcc.leave();
OOL_STUBCALL(stub);
OOL_STUBCALL(stub, REJOIN_BINARY);
/* Finish up stack operations. */
frame.popn(2);
@ -809,7 +807,7 @@ mjit::Compiler::jsop_neg()
if (fe->isTypeKnown() && fe->getKnownType() > JSVAL_UPPER_INCL_TYPE_OF_NUMBER_SET) {
prepareStubCall(Uses(1));
INLINE_STUBCALL(stubs::Neg);
INLINE_STUBCALL(stubs::Neg, REJOIN_FALLTHROUGH);
frame.pop();
frame.pushSynced(type);
return;
@ -852,7 +850,7 @@ mjit::Compiler::jsop_neg()
masm.neg32(reg);
stubcc.leave();
OOL_STUBCALL(stubs::Neg);
OOL_STUBCALL(stubs::Neg, REJOIN_FALLTHROUGH);
frame.pop();
frame.pushTypedPayload(JSVAL_TYPE_INT32, reg);
@ -918,7 +916,7 @@ mjit::Compiler::jsop_neg()
frame.unpinReg(feTypeReg.reg());
stubcc.leave();
OOL_STUBCALL(stubs::Neg);
OOL_STUBCALL(stubs::Neg, REJOIN_FALLTHROUGH);
frame.pop();
frame.pushSynced(type);
@ -940,8 +938,6 @@ mjit::Compiler::jsop_neg()
bool
mjit::Compiler::jsop_mod()
{
REJOIN_SITE_ANY();
#if defined(JS_CPU_X86)
JSValueType type = knownPushedType(0);
@ -967,7 +963,7 @@ mjit::Compiler::jsop_mod()
#endif
{
prepareStubCall(Uses(2));
INLINE_STUBCALL(stubs::Mod);
INLINE_STUBCALL(stubs::Mod, REJOIN_FALLTHROUGH);
frame.popn(2);
frame.pushSynced(knownPushedType(0));
return true;
@ -1074,7 +1070,7 @@ mjit::Compiler::jsop_mod()
if (slowPath) {
stubcc.leave();
OOL_STUBCALL(stubs::Mod);
OOL_STUBCALL(stubs::Mod, REJOIN_FALLTHROUGH);
}
frame.popn(2);
@ -1090,7 +1086,7 @@ mjit::Compiler::jsop_mod()
if (gotNegZero.isSet()) {
stubcc.linkExit(gotNegZero.getJump(), Uses(2));
stubcc.leave();
OOL_STUBCALL(stubs::NegZeroHelper);
OOL_STUBCALL(stubs::NegZeroHelper, REJOIN_FALLTHROUGH);
stubcc.rejoin(Changes(1));
}
#endif
@ -1099,7 +1095,7 @@ mjit::Compiler::jsop_mod()
}
bool
mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin,
mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub,
jsbytecode *target, JSOp fused)
{
FrameEntry *rhs = frame.peek(-1);
@ -1189,20 +1185,19 @@ mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub, AutoRejoinSite
if (useIC) {
/* Adjust for the two values just pushed. */
ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
ic.stubCall = OOL_STUBCALL_LOCAL_SLOTS(ic::Equality,
ic.stubCall = OOL_STUBCALL_LOCAL_SLOTS(ic::Equality, REJOIN_BRANCH,
frame.totalDepth() + 2);
needStub = false;
}
#endif
if (needStub)
OOL_STUBCALL_LOCAL_SLOTS(stub, frame.totalDepth() + 2);
OOL_STUBCALL_LOCAL_SLOTS(stub, REJOIN_BRANCH, frame.totalDepth() + 2);
/*
* The stub call has no need to rejoin, since state is synced.
* Instead, we can just test the return value.
*/
autoRejoin.oolRejoin(stubcc.masm.label());
Jump stubBranch = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
Registers::ReturnReg, Registers::ReturnReg);
Jump stubFallthrough = stubcc.masm.jump();
@ -1288,7 +1283,7 @@ mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub, AutoRejoinSite
}
stubcc.leave();
OOL_STUBCALL(stub);
OOL_STUBCALL(stub, REJOIN_FALLTHROUGH);
RegisterID reg = frame.ownRegForData(lhs);
@ -1418,7 +1413,7 @@ DoubleCondForOp(JSOp op, JSOp fused)
}
bool
mjit::Compiler::jsop_relational_double(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused)
mjit::Compiler::jsop_relational_double(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused)
{
FrameEntry *rhs = frame.peek(-1);
FrameEntry *lhs = frame.peek(-2);
@ -1447,7 +1442,7 @@ mjit::Compiler::jsop_relational_double(JSOp op, BoolStub stub, AutoRejoinSite &a
if (rhsNotNumber.isSet())
stubcc.linkExitForBranch(rhsNotNumber.get());
stubcc.leave();
OOL_STUBCALL(stub);
OOL_STUBCALL(stub, REJOIN_BRANCH);
frame.syncAndKillEverything();
Jump j = masm.branchDouble(dblCond, fpLeft, fpRight);
@ -1459,7 +1454,6 @@ mjit::Compiler::jsop_relational_double(JSOp op, BoolStub stub, AutoRejoinSite &a
frame.popn(2);
autoRejoin.oolRejoin(stubcc.masm.label());
Jump sj = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
Registers::ReturnReg, Registers::ReturnReg);
@ -1478,7 +1472,7 @@ mjit::Compiler::jsop_relational_double(JSOp op, BoolStub stub, AutoRejoinSite &a
if (rhsNotNumber.isSet())
stubcc.linkExit(rhsNotNumber.get(), Uses(2));
stubcc.leave();
OOL_STUBCALL(stub);
OOL_STUBCALL(stub, REJOIN_FALLTHROUGH);
frame.popn(2);
@ -1504,7 +1498,7 @@ mjit::Compiler::jsop_relational_double(JSOp op, BoolStub stub, AutoRejoinSite &a
}
bool
mjit::Compiler::jsop_relational_int(JSOp op, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused)
mjit::Compiler::jsop_relational_int(JSOp op, jsbytecode *target, JSOp fused)
{
FrameEntry *rhs = frame.peek(-1);
FrameEntry *lhs = frame.peek(-2);
@ -1538,7 +1532,6 @@ mjit::Compiler::jsop_relational_int(JSOp op, AutoRejoinSite &autoRejoin, jsbytec
}
frame.popn(2);
autoRejoin.oolRejoin(stubcc.masm.label());
Jump sj = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
Registers::ReturnReg, Registers::ReturnReg);
@ -1565,10 +1558,8 @@ mjit::Compiler::jsop_relational_int(JSOp op, AutoRejoinSite &autoRejoin, jsbytec
/* See jsop_binary_full() for more information on how this works. */
bool
mjit::Compiler::jsop_relational_full(JSOp op, BoolStub stub, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused)
mjit::Compiler::jsop_relational_full(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused)
{
AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, stub));
FrameEntry *rhs = frame.peek(-1);
FrameEntry *lhs = frame.peek(-2);
@ -1647,8 +1638,7 @@ mjit::Compiler::jsop_relational_full(JSOp op, BoolStub stub, AutoRejoinSite &rej
*/
frame.sync(stubcc.masm, Uses(frame.frameSlots()));
stubcc.leave();
OOL_STUBCALL(stub);
autoRejoin.oolRejoin(stubcc.masm.label());
OOL_STUBCALL(stub, REJOIN_BRANCH);
}
/* Forget the world, preserving data. */
@ -1679,7 +1669,6 @@ mjit::Compiler::jsop_relational_full(JSOp op, BoolStub stub, AutoRejoinSite &rej
* The stub call has no need to rejoin since state is synced. Instead,
* we can just test the return value.
*/
autoRejoin.oolRejoin(stubcc.masm.label());
Jump j = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
Registers::ReturnReg, Registers::ReturnReg);
@ -1735,7 +1724,7 @@ mjit::Compiler::jsop_relational_full(JSOp op, BoolStub stub, AutoRejoinSite &rej
/* Emit the slow path - note full frame syncage. */
frame.sync(stubcc.masm, Uses(2));
stubcc.leave();
OOL_STUBCALL(stub);
OOL_STUBCALL(stub, REJOIN_FALLTHROUGH);
}
/* Get an integer comparison condition. */

View File

@ -74,7 +74,7 @@ mjit::Compiler::compileMathAbsInt(FrameEntry *arg)
stubcc.leave();
stubcc.masm.move(Imm32(1), Registers::ArgReg1);
OOL_STUBCALL(stubs::SlowCall);
OOL_STUBCALL(stubs::SlowCall, REJOIN_FALLTHROUGH);
frame.popn(3);
frame.pushTypedPayload(JSVAL_TYPE_INT32, reg);
@ -146,7 +146,7 @@ mjit::Compiler::compileRound(FrameEntry *arg, RoundingMode mode)
stubcc.leave();
stubcc.masm.move(Imm32(1), Registers::ArgReg1);
OOL_STUBCALL(stubs::SlowCall);
OOL_STUBCALL(stubs::SlowCall, REJOIN_FALLTHROUGH);
frame.popn(3);
frame.pushTypedPayload(JSVAL_TYPE_INT32, reg);
@ -218,7 +218,7 @@ mjit::Compiler::compileMathPowSimple(FrameEntry *arg1, FrameEntry *arg2)
stubcc.leave();
stubcc.masm.move(Imm32(2), Registers::ArgReg1);
OOL_STUBCALL(stubs::SlowCall);
OOL_STUBCALL(stubs::SlowCall, REJOIN_FALLTHROUGH);
frame.popn(4);
frame.pushDouble(fpResultReg);
@ -297,7 +297,7 @@ mjit::Compiler::compileGetChar(FrameEntry *thisValue, FrameEntry *arg, GetCharMo
stubcc.leave();
stubcc.masm.move(Imm32(1), Registers::ArgReg1);
OOL_STUBCALL(stubs::SlowCall);
OOL_STUBCALL(stubs::SlowCall, REJOIN_FALLTHROUGH);
frame.popn(3);
switch(mode) {

View File

@ -125,14 +125,12 @@ mjit::Compiler::ensureInteger(FrameEntry *fe, Uses uses)
void
mjit::Compiler::jsop_bitnot()
{
REJOIN_SITE_ANY();
FrameEntry *top = frame.peek(-1);
/* We only want to handle integers here. */
if (top->isNotType(JSVAL_TYPE_INT32) && top->isNotType(JSVAL_TYPE_DOUBLE)) {
prepareStubCall(Uses(1));
INLINE_STUBCALL(stubs::BitNot);
INLINE_STUBCALL(stubs::BitNot, REJOIN_FALLTHROUGH);
frame.pop();
frame.pushSynced(JSVAL_TYPE_INT32);
return;
@ -141,7 +139,7 @@ mjit::Compiler::jsop_bitnot()
ensureInteger(top, Uses(1));
stubcc.leave();
OOL_STUBCALL(stubs::BitNot);
OOL_STUBCALL(stubs::BitNot, REJOIN_FALLTHROUGH);
RegisterID reg = frame.ownRegForData(top);
masm.not32(reg);
@ -154,8 +152,6 @@ mjit::Compiler::jsop_bitnot()
void
mjit::Compiler::jsop_bitop(JSOp op)
{
REJOIN_SITE_ANY();
FrameEntry *rhs = frame.peek(-1);
FrameEntry *lhs = frame.peek(-2);
@ -198,7 +194,7 @@ mjit::Compiler::jsop_bitop(JSOp op)
RegisterID reg = frame.ownRegForData(lhs);
stubcc.leave();
OOL_STUBCALL(stub);
OOL_STUBCALL(stub, REJOIN_FALLTHROUGH);
frame.popn(2);
frame.pushTypedPayload(JSVAL_TYPE_INT32, reg);
@ -216,7 +212,7 @@ mjit::Compiler::jsop_bitop(JSOp op)
(rhs->isNotType(JSVAL_TYPE_INT32) && rhs->isNotType(JSVAL_TYPE_DOUBLE)) ||
(op == JSOP_URSH && rhs->isConstant() && rhs->getValue().toInt32() % 32 == 0)) {
prepareStubCall(Uses(2));
INLINE_STUBCALL(stub);
INLINE_STUBCALL(stub, REJOIN_FALLTHROUGH);
frame.popn(2);
frame.pushSynced(op != JSOP_URSH ? JSVAL_TYPE_INT32 : knownPushedType(0));
return;
@ -313,7 +309,7 @@ mjit::Compiler::jsop_bitop(JSOp op)
int shift = rhs->getValue().toInt32() & 0x1F;
stubcc.leave();
OOL_STUBCALL(stub);
OOL_STUBCALL(stub, REJOIN_FALLTHROUGH);
if (shift) {
if (op == JSOP_LSH)
@ -376,7 +372,7 @@ mjit::Compiler::jsop_bitop(JSOp op)
}
stubcc.leave();
OOL_STUBCALL(stub);
OOL_STUBCALL(stub, REJOIN_FALLTHROUGH);
frame.pop();
frame.pop();
@ -403,7 +399,7 @@ CheckNullOrUndefined(FrameEntry *fe)
}
bool
mjit::Compiler::jsop_equality(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused)
mjit::Compiler::jsop_equality(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused)
{
FrameEntry *rhs = frame.peek(-1);
FrameEntry *lhs = frame.peek(-2);
@ -417,7 +413,7 @@ mjit::Compiler::jsop_equality(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin
FrameEntry *test = lhsTest ? rhs : lhs;
if (test->isTypeKnown())
return emitStubCmpOp(stub, autoRejoin, target, fused);
return emitStubCmpOp(stub, target, fused);
/* The other side must be null or undefined. */
RegisterID reg = frame.ownRegForType(test);
@ -434,7 +430,6 @@ mjit::Compiler::jsop_equality(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin
frame.syncAndKillEverything();
frame.freeReg(reg);
autoRejoin.oolRejoin(stubcc.masm.label());
Jump sj = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
Registers::ReturnReg, Registers::ReturnReg);
@ -499,7 +494,6 @@ mjit::Compiler::jsop_equality(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin
Assembler::Condition cond = GetCompareCondition(op, fused);
if (target) {
fixDoubleTypes(target);
autoRejoin.oolRejoin(stubcc.masm.label());
Jump sj = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
Registers::ReturnReg, Registers::ReturnReg);
if (!frame.syncForBranch(target, Uses(2)))
@ -526,11 +520,11 @@ mjit::Compiler::jsop_equality(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin
}
}
return emitStubCmpOp(stub, autoRejoin, target, fused);
return emitStubCmpOp(stub, target, fused);
}
bool
mjit::Compiler::jsop_relational(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin,
mjit::Compiler::jsop_relational(JSOp op, BoolStub stub,
jsbytecode *target, JSOp fused)
{
FrameEntry *rhs = frame.peek(-1);
@ -545,42 +539,40 @@ mjit::Compiler::jsop_relational(JSOp op, BoolStub stub, AutoRejoinSite &autoRejo
(rhs->isNotType(JSVAL_TYPE_INT32) && rhs->isNotType(JSVAL_TYPE_DOUBLE) &&
rhs->isNotType(JSVAL_TYPE_STRING))) {
if (op == JSOP_EQ || op == JSOP_NE)
return jsop_equality(op, stub, autoRejoin, target, fused);
return emitStubCmpOp(stub, autoRejoin, target, fused);
return jsop_equality(op, stub, target, fused);
return emitStubCmpOp(stub, target, fused);
}
if (op == JSOP_EQ || op == JSOP_NE) {
if ((lhs->isNotType(JSVAL_TYPE_INT32) && lhs->isNotType(JSVAL_TYPE_STRING)) ||
(rhs->isNotType(JSVAL_TYPE_INT32) && rhs->isNotType(JSVAL_TYPE_STRING))) {
return emitStubCmpOp(stub, autoRejoin, target, fused);
return emitStubCmpOp(stub, target, fused);
} else if (!target && (lhs->isType(JSVAL_TYPE_STRING) || rhs->isType(JSVAL_TYPE_STRING))) {
return emitStubCmpOp(stub, autoRejoin, target, fused);
return emitStubCmpOp(stub, target, fused);
} else if (frame.haveSameBacking(lhs, rhs)) {
return emitStubCmpOp(stub, autoRejoin, target, fused);
return emitStubCmpOp(stub, target, fused);
} else {
return jsop_equality_int_string(op, stub, autoRejoin, target, fused);
return jsop_equality_int_string(op, stub, target, fused);
}
}
if (frame.haveSameBacking(lhs, rhs)) {
return emitStubCmpOp(stub, autoRejoin, target, fused);
return emitStubCmpOp(stub, target, fused);
} else if (lhs->isType(JSVAL_TYPE_STRING) || rhs->isType(JSVAL_TYPE_STRING)) {
return emitStubCmpOp(stub, autoRejoin, target, fused);
return emitStubCmpOp(stub, target, fused);
} else if (lhs->isType(JSVAL_TYPE_DOUBLE) || rhs->isType(JSVAL_TYPE_DOUBLE)) {
return jsop_relational_double(op, stub, autoRejoin, target, fused);
return jsop_relational_double(op, stub, target, fused);
} else if (cx->typeInferenceEnabled() &&
lhs->isType(JSVAL_TYPE_INT32) && rhs->isType(JSVAL_TYPE_INT32)) {
return jsop_relational_int(op, autoRejoin, target, fused);
return jsop_relational_int(op, target, fused);
} else {
return jsop_relational_full(op, stub, autoRejoin, target, fused);
return jsop_relational_full(op, stub, target, fused);
}
}
void
mjit::Compiler::jsop_not()
{
REJOIN_SITE_ANY();
FrameEntry *top = frame.peek(-1);
if (top->isConstant()) {
@ -632,7 +624,7 @@ mjit::Compiler::jsop_not()
default:
{
prepareStubCall(Uses(1));
INLINE_STUBCALL(stubs::ValueToBoolean);
INLINE_STUBCALL(stubs::ValueToBoolean, REJOIN_NONE);
RegisterID reg = Registers::ReturnReg;
frame.takeReg(reg);
@ -689,7 +681,7 @@ mjit::Compiler::jsop_not()
/* Leave. */
stubcc.leave();
OOL_STUBCALL(stubs::Not);
OOL_STUBCALL(stubs::Not, REJOIN_FALLTHROUGH);
frame.pop();
frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, data);
@ -782,7 +774,7 @@ mjit::Compiler::jsop_typeof()
}
prepareStubCall(Uses(1));
INLINE_STUBCALL_NO_REJOIN(stubs::TypeOf);
INLINE_STUBCALL(stubs::TypeOf, REJOIN_NONE);
frame.pop();
frame.takeReg(Registers::ReturnReg);
frame.pushTypedPayload(JSVAL_TYPE_STRING, Registers::ReturnReg);
@ -924,23 +916,23 @@ mjit::Compiler::jsop_andor(JSOp op, jsbytecode *target)
}
bool
mjit::Compiler::jsop_localinc(JSOp op, uint32 slot, bool popped)
mjit::Compiler::jsop_localinc(JSOp op, uint32 slot)
{
updateVarType();
types::TypeSet *types = pushedTypeSet(0);
JSValueType type = types ? types->getKnownTypeTag(cx) : JSVAL_TYPE_UNKNOWN;
if (popped || (op == JSOP_INCLOCAL || op == JSOP_DECLOCAL)) {
int amt = (op == JSOP_LOCALINC || op == JSOP_INCLOCAL) ? -1 : 1;
int amt = (op == JSOP_LOCALINC || op == JSOP_INCLOCAL) ? 1 : -1;
if (!analysis->incrementInitialValueObserved(PC)) {
// Before:
// After: V
frame.pushLocal(slot);
// Before: V
// After: V 1
frame.push(Int32Value(amt));
frame.push(Int32Value(-amt));
// Note, SUB will perform integer conversion for us.
// Before: V 1
@ -950,13 +942,8 @@ mjit::Compiler::jsop_localinc(JSOp op, uint32 slot, bool popped)
// Before: N+1
// After: N+1
frame.storeLocal(slot, popped, true);
if (popped)
frame.pop();
frame.storeLocal(slot, analysis->popGuaranteed(PC), true);
} else {
int amt = (op == JSOP_LOCALINC || op == JSOP_INCLOCAL) ? 1 : -1;
// Before:
// After: V
frame.pushLocal(slot);
@ -991,23 +978,23 @@ mjit::Compiler::jsop_localinc(JSOp op, uint32 slot, bool popped)
}
bool
mjit::Compiler::jsop_arginc(JSOp op, uint32 slot, bool popped)
mjit::Compiler::jsop_arginc(JSOp op, uint32 slot)
{
updateVarType();
types::TypeSet *types = pushedTypeSet(0);
JSValueType type = types ? types->getKnownTypeTag(cx) : JSVAL_TYPE_UNKNOWN;
if (popped || (op == JSOP_INCARG || op == JSOP_DECARG)) {
int amt = (op == JSOP_ARGINC || op == JSOP_INCARG) ? -1 : 1;
int amt = (op == JSOP_ARGINC || op == JSOP_INCARG) ? 1 : -1;
if (!analysis->incrementInitialValueObserved(PC)) {
// Before:
// After: V
frame.pushArg(slot);
// Before: V
// After: V 1
frame.push(Int32Value(amt));
frame.push(Int32Value(-amt));
// Note, SUB will perform integer conversion for us.
// Before: V 1
@ -1017,13 +1004,8 @@ mjit::Compiler::jsop_arginc(JSOp op, uint32 slot, bool popped)
// Before: N+1
// After: N+1
frame.storeArg(slot, popped);
if (popped)
frame.pop();
frame.storeArg(slot, analysis->popGuaranteed(PC));
} else {
int amt = (op == JSOP_ARGINC || op == JSOP_INCARG) ? 1 : -1;
// Before:
// After: V
frame.pushArg(slot);
@ -1194,7 +1176,7 @@ mjit::Compiler::jsop_setelem_dense()
masm.storeValue(vr, BaseIndex(slotsReg, key.reg(), masm.JSVAL_SCALE));
stubcc.leave();
OOL_STUBCALL(STRICT_VARIANT(stubs::SetElem));
OOL_STUBCALL(STRICT_VARIANT(stubs::SetElem), REJOIN_FALLTHROUGH);
if (!hoisted)
frame.freeReg(slotsReg);
@ -1205,8 +1187,6 @@ mjit::Compiler::jsop_setelem_dense()
bool
mjit::Compiler::jsop_setelem(bool popGuaranteed)
{
REJOIN_SITE_2(STRICT_VARIANT(ic::SetElement), STRICT_VARIANT(stubs::SetElem));
FrameEntry *obj = frame.peek(-3);
FrameEntry *id = frame.peek(-2);
FrameEntry *value = frame.peek(-1);
@ -1342,9 +1322,9 @@ mjit::Compiler::jsop_setelem(bool popGuaranteed)
stubcc.leave();
#if defined JS_POLYIC
passICAddress(&ic);
ic.slowPathCall = OOL_STUBCALL(STRICT_VARIANT(ic::SetElement));
ic.slowPathCall = OOL_STUBCALL(STRICT_VARIANT(ic::SetElement), REJOIN_FALLTHROUGH);
#else
OOL_STUBCALL(STRICT_VARIANT(stubs::SetElem));
OOL_STUBCALL(STRICT_VARIANT(stubs::SetElem), REJOIN_FALLTHROUGH);
#endif
ic.fastPathRejoin = masm.label();
@ -1506,7 +1486,7 @@ mjit::Compiler::jsop_getelem_dense(bool isPacked)
stubcc.linkExit(holeCheck, Uses(2));
stubcc.leave();
OOL_STUBCALL(stubs::GetElem);
OOL_STUBCALL(stubs::GetElem, REJOIN_FALLTHROUGH);
frame.popn(2);
@ -1534,9 +1514,6 @@ mjit::Compiler::jsop_getelem_dense(bool isPacked)
bool
mjit::Compiler::jsop_getelem(bool isCall)
{
REJOIN_SITE_2(isCall ? ic::CallElement : ic::GetElement,
isCall ? stubs::CallElem : stubs::GetElem);
FrameEntry *obj = frame.peek(-2);
FrameEntry *id = frame.peek(-1);
@ -1667,14 +1644,14 @@ mjit::Compiler::jsop_getelem(bool isCall)
#ifdef JS_POLYIC
passICAddress(&ic);
if (isCall)
ic.slowPathCall = OOL_STUBCALL(ic::CallElement);
ic.slowPathCall = OOL_STUBCALL(ic::CallElement, REJOIN_FALLTHROUGH);
else
ic.slowPathCall = OOL_STUBCALL(ic::GetElement);
ic.slowPathCall = OOL_STUBCALL(ic::GetElement, REJOIN_FALLTHROUGH);
#else
if (isCall)
ic.slowPathCall = OOL_STUBCALL(stubs::CallElem);
ic.slowPathCall = OOL_STUBCALL(stubs::CallElem, REJOIN_FALLTHROUGH);
else
ic.slowPathCall = OOL_STUBCALL(stubs::GetElem);
ic.slowPathCall = OOL_STUBCALL(stubs::GetElem, REJOIN_FALLTHROUGH);
#endif
ic.fastPathRejoin = masm.label();
@ -1857,9 +1834,9 @@ mjit::Compiler::jsop_stricteq(JSOp op)
prepareStubCall(Uses(2));
if (op == JSOP_STRICTEQ)
INLINE_STUBCALL_NO_REJOIN(stubs::StrictEq);
INLINE_STUBCALL(stubs::StrictEq, REJOIN_NONE);
else
INLINE_STUBCALL_NO_REJOIN(stubs::StrictNe);
INLINE_STUBCALL(stubs::StrictNe, REJOIN_NONE);
frame.popn(2);
frame.pushSynced(JSVAL_TYPE_BOOLEAN);
@ -1911,9 +1888,9 @@ mjit::Compiler::jsop_stricteq(JSOp op)
if (needStub) {
stubcc.leave();
if (op == JSOP_STRICTEQ)
OOL_STUBCALL_NO_REJOIN(stubs::StrictEq);
OOL_STUBCALL(stubs::StrictEq, REJOIN_NONE);
else
OOL_STUBCALL_NO_REJOIN(stubs::StrictNe);
OOL_STUBCALL(stubs::StrictNe, REJOIN_NONE);
}
frame.popn(2);
@ -1926,9 +1903,9 @@ mjit::Compiler::jsop_stricteq(JSOp op)
prepareStubCall(Uses(2));
if (op == JSOP_STRICTEQ)
INLINE_STUBCALL_NO_REJOIN(stubs::StrictEq);
INLINE_STUBCALL(stubs::StrictEq, REJOIN_NONE);
else
INLINE_STUBCALL_NO_REJOIN(stubs::StrictNe);
INLINE_STUBCALL(stubs::StrictNe, REJOIN_NONE);
frame.popn(2);
frame.pushSyncedType(JSVAL_TYPE_BOOLEAN);
@ -1939,15 +1916,13 @@ mjit::Compiler::jsop_stricteq(JSOp op)
void
mjit::Compiler::jsop_pos()
{
REJOIN_SITE(stubs::Pos);
FrameEntry *top = frame.peek(-1);
if (top->isTypeKnown()) {
if (top->getKnownType() <= JSVAL_TYPE_INT32)
return;
prepareStubCall(Uses(1));
INLINE_STUBCALL(stubs::Pos);
INLINE_STUBCALL(stubs::Pos, REJOIN_POS);
frame.pop();
frame.pushSynced(knownPushedType(0));
return;
@ -1963,7 +1938,7 @@ mjit::Compiler::jsop_pos()
stubcc.linkExit(j, Uses(1));
stubcc.leave();
OOL_STUBCALL(stubs::Pos);
OOL_STUBCALL(stubs::Pos, REJOIN_POS);
stubcc.rejoin(Changes(1));
}
@ -1971,8 +1946,6 @@ mjit::Compiler::jsop_pos()
void
mjit::Compiler::jsop_initmethod()
{
REJOIN_SITE_ANY();
#ifdef DEBUG
FrameEntry *obj = frame.peek(-2);
#endif
@ -1983,14 +1956,12 @@ mjit::Compiler::jsop_initmethod()
prepareStubCall(Uses(2));
masm.move(ImmPtr(atom), Registers::ArgReg1);
INLINE_STUBCALL(stubs::InitMethod);
INLINE_STUBCALL(stubs::InitMethod, REJOIN_FALLTHROUGH);
}
void
mjit::Compiler::jsop_initprop()
{
REJOIN_SITE_ANY();
FrameEntry *obj = frame.peek(-2);
FrameEntry *fe = frame.peek(-1);
JSAtom *atom = script->getAtom(fullAtomIndex(PC));
@ -2000,7 +1971,7 @@ mjit::Compiler::jsop_initprop()
if (!baseobj || monitored(PC)) {
prepareStubCall(Uses(2));
masm.move(ImmPtr(atom), Registers::ArgReg1);
INLINE_STUBCALL(stubs::InitProp);
INLINE_STUBCALL(stubs::InitProp, REJOIN_FALLTHROUGH);
return;
}
@ -2025,8 +1996,6 @@ mjit::Compiler::jsop_initprop()
void
mjit::Compiler::jsop_initelem()
{
REJOIN_SITE_ANY();
FrameEntry *obj = frame.peek(-3);
FrameEntry *id = frame.peek(-2);
FrameEntry *fe = frame.peek(-1);
@ -2043,7 +2012,7 @@ mjit::Compiler::jsop_initelem()
prepareStubCall(Uses(3));
masm.move(Imm32(next == JSOP_ENDINIT ? 1 : 0), Registers::ArgReg1);
INLINE_STUBCALL(stubs::InitElem);
INLINE_STUBCALL(stubs::InitElem, REJOIN_FALLTHROUGH);
return;
}

View File

@ -146,7 +146,7 @@ class FrameEntry
/* Accessors for entries which are copies of other mutable entries. */
bool isCopy() const { return !!copy; }
bool isCopied() const { return copied; }
bool isCopied() const { return copied != 0; }
const FrameEntry *backing() const {
return isCopy() ? copyOf() : this;
@ -170,14 +170,19 @@ class FrameEntry
}
void track(uint32 index) {
clear();
copied = 0;
copy = NULL;
index_ = index;
tracked = true;
}
void clear() {
copied = false;
copy = NULL;
JS_ASSERT(copied == 0);
if (copy) {
JS_ASSERT(copy->copied != 0);
copy->copied--;
copy = NULL;
}
}
uint32 trackerIndex() {
@ -221,30 +226,22 @@ class FrameEntry
knownType = cv.extractNonDoubleType();
}
void setCopied() {
JS_ASSERT(!isCopy());
copied = true;
}
FrameEntry *copyOf() const {
JS_ASSERT(isCopy());
JS_ASSERT_IF(!copy->temporary, copy < this);
return copy;
}
void setNotCopied() {
copied = false;
}
/*
* Set copy index.
*/
void setCopyOf(FrameEntry *fe) {
JS_ASSERT(!isCopied());
clear();
copy = fe;
if (fe) {
type.invalidate();
data.invalidate();
fe->copied++;
}
}
@ -270,19 +267,17 @@ class FrameEntry
RematInfo data;
uint32 index_;
FrameEntry *copy;
bool copied;
bool tracked;
bool temporary;
/* Number of copies of this entry. */
uint32 copied;
/*
* Offset of the last loop in which this entry was written or had a loop
* register assigned.
*/
uint32 lastLoop;
#if JS_BITS_PER_WORD == 32
void *padding;
#endif
};
} /* namespace mjit */

View File

@ -408,7 +408,6 @@ FrameState::pushNumber(RegisterID payload, bool asInt32)
JS_ASSERT(!freeRegs.hasReg(payload));
FrameEntry *fe = rawPush();
fe->clear();
if (asInt32) {
if (!fe->type.synced())
@ -427,7 +426,6 @@ inline void
FrameState::pushInt32(RegisterID payload)
{
FrameEntry *fe = rawPush();
fe->clear();
masm.storeTypeTag(ImmType(JSVAL_TYPE_INT32), addressOf(fe));
fe->type.setMemory();
@ -444,8 +442,6 @@ FrameState::pushUntypedPayload(JSValueType type, RegisterID payload)
FrameEntry *fe = rawPush();
fe->clear();
masm.storeTypeTag(ImmType(type), addressOf(fe));
/* The forceful type sync will assert otherwise. */
@ -454,8 +450,6 @@ FrameState::pushUntypedPayload(JSValueType type, RegisterID payload)
#endif
fe->type.setMemory();
fe->data.unsync();
fe->setNotCopied();
fe->setCopyOf(NULL);
fe->data.setRegister(payload);
regstate(payload).associate(fe, RematInfo::DATA);
}
@ -465,8 +459,6 @@ FrameState::pushUntypedValue(const Value &v)
{
FrameEntry *fe = rawPush();
fe->clear();
masm.storeValue(v, addressOf(fe));
/* The forceful type sync will assert otherwise. */
@ -476,8 +468,6 @@ FrameState::pushUntypedValue(const Value &v)
fe->type.setMemory();
fe->data.unsync();
fe->data.setMemory();
fe->setNotCopied();
fe->setCopyOf(NULL);
}
inline JSC::MacroAssembler::RegisterID
@ -919,8 +909,7 @@ FrameState::learnType(FrameEntry *fe, JSValueType type, bool unsync)
inline void
FrameState::learnType(FrameEntry *fe, JSValueType type, RegisterID data)
{
/* The copied bit may be set on an entry, but there should not be any actual copies. */
JS_ASSERT_IF(fe->isCopied(), !isEntryCopied(fe));
JS_ASSERT(!fe->isCopied());
forgetAllRegs(fe);
fe->clear();
@ -1292,10 +1281,7 @@ inline void
FrameState::eviscerate(FrameEntry *fe)
{
forgetAllRegs(fe);
fe->type.invalidate();
fe->data.invalidate();
fe->setNotCopied();
fe->setCopyOf(NULL);
fe->resetUnsynced();
}
inline StateRemat

View File

@ -167,17 +167,12 @@ FrameState::popActiveFrame()
a->analysis->clearAllocations();
if (a->parent) {
/* Free registers associated with local variables. */
Registers regs(Registers::AvailAnyRegs);
while (!regs.empty()) {
AnyRegisterID reg = regs.takeAnyReg();
if (!freeRegs.hasReg(reg)) {
FrameEntry *fe = regstate(reg).usedBy();
if (fe >= a->locals && !isTemporary(fe)) {
syncAndForgetFe(fe);
fe->clear();
}
}
/* Clear registers and copies used by local variables and stack slots. */
for (FrameEntry *fe = a->sp - 1; fe >= a->locals; fe--) {
if (!fe->isTracked())
continue;
forgetAllRegs(fe);
fe->clear();
}
}
@ -263,24 +258,6 @@ FrameState::variableLive(FrameEntry *fe, jsbytecode *pc) const
return a->analysis->liveness(entrySlot(fe)).live(offset);
}
bool
FrameState::isEntryCopied(FrameEntry *fe) const
{
/*
* :TODO: It would be better for fe->isCopied() to mean 'is actually copied'
* rather than 'might have copies', removing the need for this walk.
*/
JS_ASSERT(fe->isCopied());
for (uint32 i = fe->trackerIndex() + 1; i < tracker.nentries; i++) {
FrameEntry *nfe = tracker[i];
if (!deadEntry(nfe) && nfe->isCopy() && nfe->copyOf() == fe)
return true;
}
return false;
}
AnyRegisterID
FrameState::bestEvictReg(uint32 mask, bool includePinned) const
{
@ -332,7 +309,7 @@ FrameState::bestEvictReg(uint32 mask, bool includePinned) const
* for correctness with eviction of dead variables below, as testing
* variableLive does not consider any copies of the variable.
*/
if (fe->isCopied() && isEntryCopied(fe)) {
if (fe->isCopied()) {
if (!fallback.isSet()) {
fallback = reg;
fallbackOffset = 0;
@ -341,7 +318,7 @@ FrameState::bestEvictReg(uint32 mask, bool includePinned) const
continue;
}
if (isTemporary(fe) || fe < a->callee_) {
if (isTemporary(fe) || (a->parent && fe < a->locals)) {
/*
* All temporaries we currently generate are for loop invariants,
* which we treat as being live everywhere within the loop.
@ -353,7 +330,7 @@ FrameState::bestEvictReg(uint32 mask, bool includePinned) const
fallback = reg;
fallbackOffset = offset;
}
JaegerSpew(JSpew_Regalloc, " %s is a loop temporary\n", reg.name());
JaegerSpew(JSpew_Regalloc, " %s is a LICM or inline parent entry\n", reg.name());
continue;
}
@ -1139,6 +1116,9 @@ FrameState::assertValidRegisterState() const
{
Registers checkedFreeRegs(Registers::AvailAnyRegs);
/* Check that copied and copy info balance out. */
int32 copyCount = 0;
for (uint32 i = 0; i < tracker.nentries; i++) {
FrameEntry *fe = tracker[i];
if (deadEntry(fe))
@ -1151,9 +1131,13 @@ FrameState::assertValidRegisterState() const
JS_ASSERT(fe->trackerIndex() > fe->copyOf()->trackerIndex());
JS_ASSERT(!deadEntry(fe->copyOf()));
JS_ASSERT(fe->copyOf()->isCopied());
JS_ASSERT(!fe->isCopied());
copyCount--;
continue;
}
copyCount += fe->copied;
if (fe->type.inRegister()) {
checkedFreeRegs.takeReg(fe->type.reg());
JS_ASSERT(regstate(fe->type.reg()).fe() == fe);
@ -1171,6 +1155,7 @@ FrameState::assertValidRegisterState() const
}
}
JS_ASSERT(copyCount == 0);
JS_ASSERT(checkedFreeRegs == freeRegs);
for (uint32 i = 0; i < Registers::TotalRegisters; i++) {
@ -1694,13 +1679,8 @@ FrameState::ownRegForData(FrameEntry *fe)
return reg;
}
if (fe->isCopied()) {
FrameEntry *copy = uncopy(fe);
if (fe->isCopied()) {
fe->resetSynced();
return copyDataIntoReg(copy);
}
}
if (fe->isCopied())
uncopy(fe);
if (fe->data.inRegister()) {
reg = fe->data.reg();
@ -1851,15 +1831,9 @@ FrameState::pushCopyOf(FrameEntry *backing)
if (backing->isConstant()) {
fe->setConstant(Jsvalify(backing->getValue()));
} else {
fe->type.invalidate();
fe->data.invalidate();
if (backing->isCopy()) {
if (backing->isCopy())
backing = backing->copyOf();
fe->setCopyOf(backing);
} else {
fe->setCopyOf(backing);
backing->setCopied();
}
fe->setCopyOf(backing);
/* Maintain tracker ordering guarantees for copies. */
JS_ASSERT(backing->isCopied());
@ -1905,7 +1879,6 @@ FrameState::walkTrackerForUncopy(FrameEntry *original)
/* Mark all extra copies as copies of the new backing index. */
bestFe->setCopyOf(NULL);
if (ncopies > 1) {
bestFe->setCopied();
for (uint32 i = firstCopy; i < tracker.nentries; i++) {
FrameEntry *other = tracker[i];
if (deadEntry(other) || other == bestFe)
@ -1929,8 +1902,6 @@ FrameState::walkTrackerForUncopy(FrameEntry *original)
if (other->trackerIndex() < bestFe->trackerIndex())
swapInTracker(bestFe, other);
}
} else {
bestFe->setNotCopied();
}
return bestFe;
@ -1964,9 +1935,6 @@ FrameState::walkFrameForUncopy(FrameEntry *original)
}
}
if (ncopies)
bestFe->setCopied();
return bestFe;
}
@ -2003,10 +1971,7 @@ FrameState::uncopy(FrameEntry *original)
fe = walkFrameForUncopy(original);
else
fe = walkTrackerForUncopy(original);
if (!fe) {
original->setNotCopied();
return NULL;
}
JS_ASSERT(fe);
/*
* Switch the new backing store to the old backing store. During
@ -2084,7 +2049,7 @@ FrameState::storeLocal(uint32 n, bool popGuaranteed, bool fixedType)
return;
}
storeTop(local, popGuaranteed);
storeTop(local);
if (loop)
local->lastLoop = loop->headOffset();
@ -2106,7 +2071,7 @@ FrameState::storeArg(uint32 n, bool popGuaranteed)
return;
}
storeTop(arg, popGuaranteed);
storeTop(arg);
if (loop)
arg->lastLoop = loop->headOffset();
@ -2119,8 +2084,7 @@ FrameState::forgetEntry(FrameEntry *fe)
{
if (fe->isCopied()) {
uncopy(fe);
if (!fe->isCopied())
forgetAllRegs(fe);
fe->resetUnsynced();
} else {
forgetAllRegs(fe);
}
@ -2129,7 +2093,7 @@ FrameState::forgetEntry(FrameEntry *fe)
}
void
FrameState::storeTop(FrameEntry *target, bool popGuaranteed)
FrameState::storeTop(FrameEntry *target)
{
JS_ASSERT(!isTemporary(target));
@ -2155,8 +2119,7 @@ FrameState::storeTop(FrameEntry *target, bool popGuaranteed)
/* Constants are easy to propagate. */
if (top->isConstant()) {
target->setCopyOf(NULL);
target->setNotCopied();
target->clear();
target->setConstant(Jsvalify(top->getValue()));
if (trySyncType && target->isType(oldType))
target->type.sync();
@ -2186,7 +2149,6 @@ FrameState::storeTop(FrameEntry *target, bool popGuaranteed)
/* local.idx < backing.idx means local cannot be a copy yet */
if (target->trackerIndex() < backing->trackerIndex())
swapInTracker(backing, target);
target->setNotCopied();
target->setCopyOf(backing);
if (trySyncType && target->isType(oldType))
target->type.sync();
@ -2221,7 +2183,6 @@ FrameState::storeTop(FrameEntry *target, bool popGuaranteed)
}
}
}
backing->setNotCopied();
/*
* This is valid from the top->isCopy() path because we're guaranteed a
@ -2267,18 +2228,6 @@ FrameState::storeTop(FrameEntry *target, bool popGuaranteed)
if (trySyncType && target->isType(oldType))
target->type.sync();
/*
* Right now, |backing| is a copy of |target| (note the reversal), but
* |target| is not marked as copied. This is an optimization so uncopy()
* may avoid frame traversal.
*
* There are two cases where we must set the copy bit, however:
* - The fixup phase redirected more copies to |target|.
* - An immediate pop is not guaranteed.
*/
if (copied || !popGuaranteed)
target->setCopied();
}
void
@ -2286,7 +2235,7 @@ FrameState::shimmy(uint32 n)
{
JS_ASSERT(a->sp - n >= a->spBase);
int32 depth = 0 - int32(n);
storeTop(peek(depth - 1), true);
storeTop(peek(depth - 1));
popn(n);
}
@ -2295,10 +2244,28 @@ FrameState::shift(int32 n)
{
JS_ASSERT(n < 0);
JS_ASSERT(a->sp + n - 1 >= a->spBase);
storeTop(peek(n - 1), true);
storeTop(peek(n - 1));
pop();
}
void
FrameState::swap()
{
// A B
dupAt(-2);
// A B A
dupAt(-2);
// A B A B
shift(-3);
// B B A
shimmy(1);
// B A
}
void
FrameState::forgetKnownDouble(FrameEntry *fe)
{
@ -2505,8 +2472,12 @@ FrameState::binaryEntryLive(FrameEntry *fe) const
JS_ASSERT(fe != a->callee_);
/* Arguments are always treated as live within inline frames, see bestEvictReg. */
if (a->parent && fe < a->locals)
return true;
/* Caller must check that no copies are invalidated by rewriting the entry. */
return fe >= a->spBase || fe < a->callee_ || variableLive(fe, a->PC);
return fe >= a->spBase || variableLive(fe, a->PC);
}
void

View File

@ -616,7 +616,7 @@ class FrameState
/* Stores the top stack slot back to a local or slot. */
void storeLocal(uint32 n, bool popGuaranteed = false, bool fixedType = false);
void storeArg(uint32 n, bool popGuaranteed = false);
void storeTop(FrameEntry *target, bool popGuaranteed);
void storeTop(FrameEntry *target);
/*
* Restores state from a slow path.
@ -875,6 +875,9 @@ class FrameState
*/
void shift(int32 n);
/* Swaps the top two items on the stack. Requires two temp slots. */
void swap();
inline void setInTryBlock(bool inTryBlock) {
this->inTryBlock = inTryBlock;
}
@ -973,9 +976,6 @@ class FrameState
/* Whether fe is the only copy of backing. */
bool hasOnlyCopy(FrameEntry *backing, FrameEntry *fe);
/* Tests whether fe actually has any copies on the stack or in variables. */
bool isEntryCopied(FrameEntry *fe) const;
/*
* All registers in the FE are forgotten. If it is copied, it is uncopied
* beforehand.

View File

@ -176,8 +176,8 @@ top:
static void
InlineReturn(VMFrame &f)
{
JS_ASSERT(f.fp() != f.entryfp);
JS_ASSERT(!js_IsActiveWithOrBlock(f.cx, &f.fp()->scopeChain(), 0));
JS_ASSERT(f.cx->fp() != f.entryfp);
JS_ASSERT(!js_IsActiveWithOrBlock(f.cx, &f.cx->fp()->scopeChain(), 0));
f.cx->stack.popInlineFrame();
}
@ -210,6 +210,23 @@ RemovePartialFrame(JSContext *cx, StackFrame *fp)
cx->stack.popInlineFrame();
}
static inline bool
CheckStackQuota(VMFrame &f)
{
/* Include extra space for any inline frames. */
uint32 nvals = f.fp()->script()->nslots + VALUES_PER_STACK_FRAME + StackSpace::STACK_EXTRA;
if ((Value *)f.fp() + nvals >= f.stackLimit) {
StackSpace &space = f.cx->stack.space();
if (!space.bumpLimitWithinQuota(NULL, f.entryfp, f.regs.sp, nvals, &f.stackLimit)) {
/* Remove the current partially-constructed frame before throwing. */
RemovePartialFrame(f.cx, f.fp());
js_ReportOverRecursed(f.cx);
return false;
}
}
return true;
}
/*
* HitStackQuota is called after the early prologue pushing the new frame would
* overflow f.stackLimit.
@ -217,17 +234,8 @@ RemovePartialFrame(JSContext *cx, StackFrame *fp)
void JS_FASTCALL
stubs::HitStackQuota(VMFrame &f)
{
/* Include space for any inline frames. */
uintN nvals = f.fp()->script()->nslots + StackSpace::STACK_EXTRA;
JS_ASSERT(f.regs.sp == f.fp()->base());
StackSpace &space = f.cx->stack.space();
if (space.bumpLimitWithinQuota(NULL, f.entryfp, f.regs.sp, nvals, &f.stackLimit))
return;
/* Remove the current partially-constructed frame before throwing. */
RemovePartialFrame(f.cx, f.fp());
js_ReportOverRecursed(f.cx);
THROW();
if (!CheckStackQuota(f))
THROW();
}
/*
@ -258,17 +266,9 @@ stubs::FixupArity(VMFrame &f, uint32 nactual)
/* Reserve enough space for a callee frame. */
StackFrame *newfp = cx->stack.getInlineFrameWithinLimit(cx, (Value*) oldfp, nactual,
fun, fun->script(), &flags,
f.entryfp, &f.stackLimit);
if (!newfp) {
/*
* The PC is not coherent with the current frame, so fix it up for
* exception handling.
*/
JSInlinedSite *inline_;
f.regs.pc = f.jit()->nativeToPC(ncode, &inline_);
JS_ASSERT(!inline_);
f.entryfp, &f.stackLimit, ncode);
if (!newfp)
THROWV(NULL);
}
/* Reset the part of the stack frame set by the caller. */
newfp->initCallFrameCallerHalf(cx, flags, ncode);
@ -284,11 +284,11 @@ void * JS_FASTCALL
stubs::CompileFunction(VMFrame &f, uint32 nactual)
{
/*
* Write the scratch field of the VMFrame to indicate this is a call to
* CompileFunction from an IC (the recompiler cannot detect calls made from
* ICs automatically). This needs to be cleared out on all return paths.
* Note: the stubRejoin kind for the frame was written before the call, and
* needs to be cleared out on all return paths (doing this directly in the
* IC stub will not handle cases where we recompiled or threw).
*/
f.scratch = COMPILE_FUNCTION_SCRATCH_VALUE;
JS_ASSERT_IF(f.cx->typeInferenceEnabled(), f.stubRejoin);
/*
* We have a partially constructed frame. That's not really good enough to
@ -314,16 +314,13 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual)
if (nactual != fp->numFormalArgs()) {
fp = (StackFrame *)FixupArity(f, nactual);
if (!fp) {
f.scratch = NULL;
f.stubRejoin = 0;
return NULL;
}
}
CallArgs args = CallArgsFromArgv(fp->numFormalArgs(), fp->formalArgs());
if (!cx->typeMonitorCall(args, fp->isConstructing())) {
f.scratch = NULL;
return NULL;
}
cx->typeMonitorCall(args, fp->isConstructing());
/* Finish frame initialization. */
fp->initCallFrameLatePrologue();
@ -332,7 +329,7 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual)
f.regs.prepareToRun(fp, script);
if (fun->isHeavyweight() && !js::CreateFunCallObject(cx, fp)) {
f.scratch = NULL;
f.stubRejoin = 0;
THROWV(NULL);
}
@ -342,7 +339,7 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual)
/* Same constraint on fp as UncachedInlineCall. */
f.regs.popFrame((Value *) f.regs.fp());
f.scratch = NULL;
f.stubRejoin = 0;
return entry;
}
@ -353,7 +350,7 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual)
JSBool ok = Interpret(cx, fp);
InlineReturn(f);
f.scratch = NULL;
f.stubRejoin = 0;
if (!ok)
THROWV(NULL);
@ -382,27 +379,25 @@ UncachedInlineCall(VMFrame &f, uint32 flags, void **pret, bool *unjittable, uint
* the callee's args will end up getting marked as unknown.
*/
types::AutoEnterTypeInference enter(cx);
if (flags & StackFrame::CONSTRUCTING) {
if (!newscript->typeSetNewCalled(cx))
return false;
} else {
if (!newscript->typeSetThis(cx, &argTypes[0]))
return false;
}
for (unsigned i = 0; i < argc; i++) {
if (!newscript->typeSetArgument(cx, i, &argTypes[1 + i]))
return false;
}
if (flags & StackFrame::CONSTRUCTING)
newscript->typeSetNewCalled(cx);
else
newscript->typeSetThis(cx, &argTypes[0]);
for (unsigned i = 0; i < argc; i++)
newscript->typeSetArgument(cx, i, &argTypes[1 + i]);
} else {
CallArgs args = CallArgsFromVp(argc, vp);
if (!cx->typeMonitorCall(args, flags & StackFrame::CONSTRUCTING))
return false;
cx->typeMonitorCall(args, flags & StackFrame::CONSTRUCTING);
}
/* Preserve f.regs.fp while pushing the new frame. */
FrameRegs regs = f.regs;
PreserveRegsGuard regsGuard(cx, regs);
/* Get pointer to new frame/slots, prepare arguments. */
StackFrame *newfp = cx->stack.getInlineFrameWithinLimit(cx, f.regs.sp, argc,
newfun, newscript, &flags,
f.entryfp, &f.stackLimit);
f.entryfp, &f.stackLimit, NULL);
if (JS_UNLIKELY(!newfp))
return false;
@ -411,8 +406,7 @@ UncachedInlineCall(VMFrame &f, uint32 flags, void **pret, bool *unjittable, uint
SetValueRangeToUndefined(newfp->slots(), newscript->nfixed);
/* Officially push the frame. */
cx->stack.pushInlineFrame(newscript, newfp, f.regs);
JS_ASSERT(newfp == f.fp());
cx->stack.pushInlineFrame(newscript, newfp, regs);
/* Scope with a call object parented by callee's parent. */
if (newfun->isHeavyweight() && !js::CreateFunCallObject(cx, newfp))
@ -443,7 +437,7 @@ UncachedInlineCall(VMFrame &f, uint32 flags, void **pret, bool *unjittable, uint
* rejoining into a recompiled frame then the code patching up
* doubles needs to see the calling script's frame.
*/
f.regs.popFrame((Value *) f.regs.fp());
regs.popFrame((Value *) regs.fp());
return true;
}
}
@ -626,6 +620,19 @@ js_InternalThrow(VMFrame &f)
StackFrame *fp = cx->fp();
JSScript *script = fp->script();
if (!fp->jit()) {
/*
* This frame had JIT code at one point, but it has since been
* discarded due to a recompilation. Recompile it now. This can only
* fail due to OOM, in which case that OOM will propagate above the
* JaegerShot activation.
*/
CompileStatus status = TryCompile(cx, fp);
if (status != Compile_Okay)
return NULL;
}
return script->nativeCodeForPC(fp->isConstructing(), pc);
}
@ -1153,3 +1160,466 @@ stubs::InvokeTracer(VMFrame &f)
# endif /* JS_MONOIC */
#endif /* JS_TRACER */
/* :XXX: common out with identical copy in Compiler.cpp */
#if defined(JS_METHODJIT_SPEW)
static const char *OpcodeNames[] = {
# define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) #name,
# include "jsopcode.tbl"
# undef OPDEF
};
#endif
static void
FinishVarIncOp(VMFrame &f, RejoinState rejoin, Value ov, Value nv, Value *vp)
{
/* Finish an increment operation on a LOCAL or ARG. These do not involve property accesses. */
JS_ASSERT(rejoin == REJOIN_POS || rejoin == REJOIN_BINARY);
JSContext *cx = f.cx;
JSOp op = JSOp(*f.pc());
const JSCodeSpec *cs = &js_CodeSpec[op];
unsigned i = GET_SLOTNO(f.pc());
Value *var = (JOF_TYPE(cs->format) == JOF_LOCAL) ? f.fp()->slots() + i : &f.fp()->formalArg(i);
if (rejoin == REJOIN_POS) {
double d = ov.toNumber();
double N = (cs->format & JOF_INC) ? 1 : -1;
if (!nv.setNumber(d + N))
f.script()->typeMonitorOverflow(cx, f.pc());
}
*var = nv;
*vp = (cs->format & JOF_POST) ? ov : nv;
}
static bool
FinishObjIncOp(VMFrame &f, RejoinState rejoin, Value objv, Value ov, Value nv, Value *vp)
{
/*
* Finish a property access increment operation on a GNAME, NAME or PROP. We don't need
* to handle ELEM as these are always stubbed.
*/
JS_ASSERT(rejoin == REJOIN_BINDNAME || rejoin == REJOIN_GETTER ||
rejoin == REJOIN_POS || rejoin == REJOIN_BINARY);
JSContext *cx = f.cx;
JSObject *obj = ValueToObject(cx, &objv);
if (!obj)
return false;
JSOp op = JSOp(*f.pc());
const JSCodeSpec *cs = &js_CodeSpec[op];
JS_ASSERT(JOF_TYPE(cs->format) == JOF_ATOM);
jsid id = ATOM_TO_JSID(f.script()->getAtom(GET_SLOTNO(f.pc())));
if (rejoin == REJOIN_BINDNAME && !obj->getProperty(cx, id, &ov))
return false;
if (rejoin == REJOIN_BINDNAME || rejoin == REJOIN_GETTER) {
double d;
if (!ValueToNumber(cx, ov, &d))
return false;
ov.setNumber(d);
}
if (rejoin == REJOIN_BINDNAME || rejoin == REJOIN_GETTER || rejoin == REJOIN_POS) {
double d = ov.toNumber();
double N = (cs->format & JOF_INC) ? 1 : -1;
if (!nv.setNumber(d + N))
f.script()->typeMonitorOverflow(cx, f.pc());
}
cx->typeMonitorAssign(obj, id, nv);
uint32 setPropFlags = (cs->format & JOF_NAME)
? JSRESOLVE_ASSIGNING
: JSRESOLVE_ASSIGNING | JSRESOLVE_QUALIFIED;
{
JSAutoResolveFlags rf(cx, setPropFlags);
if (!obj->setProperty(cx, id, &nv, f.script()->strictModeCode))
return false;
}
*vp = (cs->format & JOF_POST) ? ov : nv;
return true;
}
extern "C" void *
js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VMFrame &f)
{
JSRejoinState jsrejoin = f.fp()->rejoin();
RejoinState rejoin;
if (jsrejoin & 0x1) {
/* Rejoin after a scripted call finished. Restore f.regs.pc and f.regs.inlined (NULL) */
uint32 pcOffset = jsrejoin >> 1;
f.regs.pc = f.fp()->script()->code + pcOffset;
f.regs.clearInlined();
rejoin = REJOIN_SCRIPTED;
} else {
rejoin = (RejoinState) (jsrejoin >> 1);
}
JSContext *cx = f.cx;
StackFrame *fp = f.regs.fp();
JSScript *script = fp->script();
jsbytecode *pc = f.regs.pc;
analyze::UntrapOpcode untrap(cx, script, pc);
JSOp op = JSOp(*pc);
const JSCodeSpec *cs = &js_CodeSpec[op];
analyze::AutoEnterAnalysis enter(cx);
analyze::ScriptAnalysis *analysis = script->analysis(cx);
if (analysis && !analysis->ranBytecode())
analysis->analyzeBytecode(cx);
if (!analysis || analysis->OOM()) {
js_ReportOutOfMemory(cx);
return js_InternalThrow(f);
}
/*
* f.regs.sp is not normally maintained by stubs (except for call prologues
* where it indicates the new frame), so is not expected to be coherent
* here. Update it to its value at the start of the opcode.
*/
Value *oldsp = f.regs.sp;
f.regs.sp = fp->base() + analysis->getCode(pc).stackDepth;
jsbytecode *nextpc = pc + analyze::GetBytecodeLength(pc);
Value *nextsp = NULL;
if (nextpc != script->code + script->length)
nextsp = fp->base() + analysis->getCode(nextpc).stackDepth;
JS_ASSERT(&cx->regs() == &f.regs);
#ifdef JS_METHODJIT_SPEW
JaegerSpew(JSpew_Recompile, "interpreter rejoin (file \"%s\") (line \"%d\") (op %s)\n",
script->filename, script->lineno, OpcodeNames[op]);
#endif
InterpMode interpMode = JSINTERP_SAFEPOINT;
if ((cs->format & (JOF_INC | JOF_DEC)) && rejoin != REJOIN_FALLTHROUGH && rejoin != REJOIN_RESUME) {
switch (op) {
case JSOP_INCLOCAL:
case JSOP_DECLOCAL:
case JSOP_LOCALINC:
case JSOP_LOCALDEC:
case JSOP_INCARG:
case JSOP_DECARG:
case JSOP_ARGINC:
case JSOP_ARGDEC:
if (rejoin != REJOIN_BINARY || !analysis->incrementInitialValueObserved(pc)) {
/* Stack layout is 'V', 'N' or 'N+1' (only if the N is not needed) */
FinishVarIncOp(f, rejoin, nextsp[-1], nextsp[-1], &nextsp[-1]);
} else {
/* Stack layout is 'N N+1' */
FinishVarIncOp(f, rejoin, nextsp[-1], nextsp[0], &nextsp[-1]);
}
break;
case JSOP_INCGNAME:
case JSOP_DECGNAME:
case JSOP_GNAMEINC:
case JSOP_GNAMEDEC:
case JSOP_INCNAME:
case JSOP_DECNAME:
case JSOP_NAMEINC:
case JSOP_NAMEDEC:
case JSOP_INCPROP:
case JSOP_DECPROP:
case JSOP_PROPINC:
case JSOP_PROPDEC:
if (rejoin != REJOIN_BINARY || !analysis->incrementInitialValueObserved(pc)) {
/* Stack layout is 'OBJ V', 'OBJ N' or 'OBJ N+1' (only if the N is not needed) */
if (!FinishObjIncOp(f, rejoin, nextsp[-1], nextsp[0], nextsp[0], &nextsp[-1]))
return js_InternalThrow(f);
} else {
/* Stack layout is 'N OBJ N+1' */
if (!FinishObjIncOp(f, rejoin, nextsp[0], nextsp[-1], nextsp[1], &nextsp[-1]))
return js_InternalThrow(f);
}
break;
default:
JS_NOT_REACHED("Bad op");
}
rejoin = REJOIN_FALLTHROUGH;
}
switch (rejoin) {
case REJOIN_SCRIPTED: {
#ifdef JS_NUNBOX32
uint64 rvalBits = (uint64)returnData | ((uint64)returnType << 32);
#elif JS_PUNBOX64
uint64 rvalBits = (uint64)returnData | (uint64)returnType;
#else
#error "Unknown boxing format"
#endif
nextsp[-1].setRawBits(rvalBits);
f.regs.pc = nextpc;
break;
}
case REJOIN_NONE:
JS_NOT_REACHED("Unpossible rejoin!");
break;
case REJOIN_RESUME:
break;
case REJOIN_TRAP:
/* Watch out for the case where the TRAP removed itself. */
interpMode = untrap.trap ? JSINTERP_SKIP_TRAP : JSINTERP_SAFEPOINT;
break;
case REJOIN_FALLTHROUGH:
f.regs.pc = nextpc;
break;
case REJOIN_NATIVE:
case REJOIN_NATIVE_LOWERED: {
/*
* We don't rejoin until after the native stub finishes execution, in
* which case it will have already loaded the return value into the
* return register pair, as for a scripted call. We need to release the
* reference on the compartment's orphaned native pools first, though.
*/
JaegerCompartment *jc = cx->compartment->jaegerCompartment;
JS_ASSERT(jc->orphanedNativeCount);
if (--jc->orphanedNativeCount == 0) {
for (unsigned i = 0; i < jc->orphanedNativePools.length(); i++)
jc->orphanedNativePools[i]->release();
jc->orphanedNativePools.clear();
}
if (rejoin == REJOIN_NATIVE_LOWERED) {
/*
* Lowered natives return like other native calls, but store their
* return values in the 'this' value's slot.
*/
nextsp[-1] = nextsp[0];
}
f.regs.pc = nextpc;
break;
}
case REJOIN_PUSH_BOOLEAN:
nextsp[-1].setBoolean(returnReg != NULL);
f.regs.pc = nextpc;
break;
case REJOIN_PUSH_OBJECT:
nextsp[-1].setObject(* (JSObject *) returnReg);
f.regs.pc = nextpc;
break;
case REJOIN_DEFLOCALFUN:
fp->slots()[GET_SLOTNO(pc)].setObject(* (JSObject *) returnReg);
f.regs.pc = nextpc;
break;
case REJOIN_THIS_PROTOTYPE: {
JSObject *callee = &fp->callee();
JSObject *proto = f.regs.sp[0].isObject() ? &f.regs.sp[0].toObject() : NULL;
JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
if (!obj)
return js_InternalThrow(f);
fp->formalArgs()[-1].setObject(*obj);
if (script->debugMode || Probes::callTrackingActive(cx))
js::ScriptDebugPrologue(cx, fp);
break;
}
case REJOIN_CHECK_ARGUMENTS: {
/*
* Do all the work needed in arity check JIT prologues after the
* arguments check occurs (FixupArity has been called if needed, but
* the stack check and late prologue have not been performed.
*/
if (!CheckStackQuota(f))
return js_InternalThrow(f);
if (fp->fun()->isHeavyweight()) {
if (!js::CreateFunCallObject(cx, fp))
return js_InternalThrow(f);
}
fp->initCallFrameLatePrologue();
/*
* Use the normal interpreter mode, which will construct the 'this'
* object if this is a constructor frame.
*/
interpMode = JSINTERP_NORMAL;
break;
}
case REJOIN_CALL_PROLOGUE:
case REJOIN_CALL_PROLOGUE_LOWERED_CALL:
case REJOIN_CALL_PROLOGUE_LOWERED_APPLY:
if (returnReg) {
uint32 argc = 0;
if (rejoin == REJOIN_CALL_PROLOGUE)
argc = GET_ARGC(pc);
else if (rejoin == REJOIN_CALL_PROLOGUE_LOWERED_CALL)
argc = GET_ARGC(pc) - 1;
else
argc = f.u.call.dynamicArgc;
/*
* The caller frame's code was discarded, but we still need to
* execute the callee and have a JIT code pointer to do so.
* Set the argc and frame registers as the call path does, but set
* the callee frame's return address to jump back into the
* Interpoline, and change the caller frame's rejoin to reflect the
* state after the call.
*/
f.regs.restorePartialFrame(oldsp); /* f.regs.sp stored the new frame */
f.scratch = (void *) argc; /* The interpoline will load f.scratch into argc */
f.fp()->setNativeReturnAddress(JS_FUNC_TO_DATA_PTR(void *, JaegerInterpolineScripted));
fp->setRejoin(REJOIN_SCRIPTED | ((pc - script->code) << 1));
return returnReg;
} else {
/*
* The call has already finished, and the return value is on the
* stack. For lowered call/apply, the return value has been stored
* in the wrong slot, so adjust it here.
*/
f.regs.pc = nextpc;
if (rejoin != REJOIN_CALL_PROLOGUE) {
/* Same offset return value as for lowered native calls. */
nextsp[-1] = nextsp[0];
}
}
break;
case REJOIN_CALL_SPLAT: {
f.regs.sp = nextsp + 2 + f.u.call.dynamicArgc;
if (!Invoke(cx, InvokeArgsAlreadyOnTheStack(f.u.call.dynamicArgc, nextsp)))
return js_InternalThrow(f);
nextsp[-1] = nextsp[0];
f.regs.pc = nextpc;
break;
}
case REJOIN_GETTER:
/*
* Match the PC to figure out whether this property fetch is part of a
* fused opcode which needs to be finished.
*/
switch (op) {
case JSOP_NAME:
case JSOP_GETGNAME:
case JSOP_GETGLOBAL:
case JSOP_GETPROP:
case JSOP_LENGTH:
/* Non-fused opcode, state is already correct for the next op. */
f.regs.pc = nextpc;
break;
case JSOP_CALLGNAME:
if (!ComputeImplicitThis(cx, &fp->scopeChain(), nextsp[-2], &nextsp[-1]))
return js_InternalThrow(f);
f.regs.pc = nextpc;
break;
case JSOP_CALLPROP: {
/*
* CALLPROP is compiled in terms of GETPROP for known strings.
* In such cases the top two entries are in place, but are swapped.
*/
JS_ASSERT(nextsp[-2].isString());
Value tmp = nextsp[-2];
nextsp[-2] = nextsp[-1];
nextsp[-1] = tmp;
f.regs.pc = nextpc;
break;
}
case JSOP_INSTANCEOF: {
/*
* If we recompiled from a getprop used within JSOP_INSTANCEOF,
* the stack looks like 'LHS RHS protov'. Inline the remaining
* portion of fun_hasInstance.
*/
if (f.regs.sp[0].isPrimitive()) {
js_ReportValueError(cx, JSMSG_BAD_PROTOTYPE, -1, f.regs.sp[-1], NULL);
return js_InternalThrow(f);
}
nextsp[-1].setBoolean(js_IsDelegate(cx, &f.regs.sp[0].toObject(), f.regs.sp[-2]));
f.regs.pc = nextpc;
break;
}
default:
JS_NOT_REACHED("Bad rejoin getter op");
}
break;
case REJOIN_POS:
/* Convert-to-number which might be part of an INC* op. */
JS_ASSERT(op == JSOP_POS);
f.regs.pc = nextpc;
break;
case REJOIN_BINARY:
/* Binary arithmetic op which might be part of an INC* op. */
JS_ASSERT(op == JSOP_ADD || op == JSOP_SUB || op == JSOP_MUL || op == JSOP_DIV);
f.regs.pc = nextpc;
break;
case REJOIN_BRANCH: {
/*
* This must be an opcode fused with IFNE/IFEQ. Unfused IFNE/IFEQ are
* implemented in terms of ValueToBoolean, which is infallible and
* cannot trigger recompilation.
*/
bool takeBranch = false;
switch (JSOp(*nextpc)) {
case JSOP_IFNE:
case JSOP_IFNEX:
takeBranch = returnReg != NULL;
break;
case JSOP_IFEQ:
case JSOP_IFEQX:
takeBranch = returnReg == NULL;
break;
default:
JS_NOT_REACHED("Bad branch op");
}
if (takeBranch)
f.regs.pc = nextpc + GET_JUMP_OFFSET(nextpc);
else
f.regs.pc = nextpc + analyze::GetBytecodeLength(nextpc);
break;
}
default:
JS_NOT_REACHED("Missing rejoin");
}
f.regs.sp = fp->base() + analysis->getCode(f.regs.pc).stackDepth;
/* Reinsert any trap before resuming in the interpreter. */
untrap.retrap();
if (!Interpret(cx, NULL, 0, interpMode))
return js_InternalThrow(f);
/* Force construction of the frame's return value, if it was not set. */
fp->returnValue();
/*
* The frame is done, but if it finished in the interpreter the call/args
* objects need to be detached from the frame.
*/
fp->putActivationObjects();
return fp->nativeReturnAddress();
}

View File

@ -194,7 +194,7 @@ LoopState::addJoin(unsigned index, bool script)
}
void
LoopState::addInvariantCall(Jump jump, Label label, bool ool, bool entry, unsigned patchIndex, bool patchCall)
LoopState::addInvariantCall(Jump jump, Label label, bool ool, bool entry, unsigned patchIndex)
{
RestoreInvariantCall call;
call.jump = jump;
@ -202,7 +202,6 @@ LoopState::addInvariantCall(Jump jump, Label label, bool ool, bool entry, unsign
call.ool = ool;
call.entry = entry;
call.patchIndex = patchIndex;
call.patchCall = patchCall;
call.temporaryCopies = frame.getTemporaryCopies();
restoreInvariantCalls.append(call);
@ -230,7 +229,7 @@ LoopState::flushLoop(StubCompiler &stubcc)
Assembler &masm = cc.getAssembler(true);
Vector<Jump> failureJumps(cx);
jsbytecode *pc = cc.getInvariantPC(call.patchIndex, call.patchCall);
jsbytecode *pc = cc.getInvariantPC(call.patchIndex);
if (call.ool) {
call.jump.linkTo(masm.label(), &masm);
@ -250,7 +249,7 @@ LoopState::flushLoop(StubCompiler &stubcc)
* Call InvariantFailure, setting up the return address to
* patch and any value for the call to return.
*/
InvariantCodePatch *patch = cc.getInvariantPatch(call.patchIndex, call.patchCall);
InvariantCodePatch *patch = cc.getInvariantPatch(call.patchIndex);
patch->hasPatch = true;
patch->codePatch = masm.storePtrWithPatch(ImmPtr(NULL),
FrameAddress(offsetof(VMFrame, scratch)));
@ -1131,8 +1130,10 @@ LoopState::restoreInvariants(jsbytecode *pc, Assembler &masm,
Registers regs(Registers::TempRegs);
regs.takeReg(Registers::ReturnReg);
JS_ASSERT(!regs.hasReg(JSReturnReg_Data));
JS_ASSERT(!regs.hasReg(JSReturnReg_Type));
if (regs.hasReg(JSReturnReg_Data))
regs.takeReg(JSReturnReg_Data);
if (regs.hasReg(JSReturnReg_Type))
regs.takeReg(JSReturnReg_Type);
RegisterID T0 = regs.takeAnyReg().reg();
RegisterID T1 = regs.takeAnyReg().reg();

View File

@ -146,10 +146,7 @@ class LoopState : public MacroAssemblerTypedefs
Label label;
bool ool;
bool entry;
/* Index into Compiler's callSites or rejoinSites */
unsigned patchIndex;
bool patchCall;
unsigned patchIndex; /* Index into Compiler's callSites. */
/* Any copies of temporaries on the stack */
Vector<TemporaryCopy> *temporaryCopies;
@ -240,7 +237,7 @@ class LoopState : public MacroAssemblerTypedefs
bool generatingInvariants() { return !skipAnalysis; }
/* Add a call with trailing jump/label, after which invariants need to be restored. */
void addInvariantCall(Jump jump, Label label, bool ool, bool entry, unsigned patchIndex, bool patchCall);
void addInvariantCall(Jump jump, Label label, bool ool, bool entry, unsigned patchIndex);
uint32 headOffset() { return lifetime->head; }
uint32 getLoopRegs() { return loopRegs.freeMask; }

View File

@ -86,7 +86,8 @@ StackFrame::methodjitStaticAsserts()
*
* JaegerTrampoline - Executes a method JIT-compiled JSFunction. This function
* creates a VMFrame on the machine stack and jumps into JIT'd code. The JIT'd
* code will eventually jump back to the VMFrame.
* code will eventually jump back to JaegerTrampolineReturn, clean up the
* VMFrame and return into C++.
*
* - Called from C++ function EnterMethodJIT.
* - Parameters: cx, fp, code, stackLimit
@ -107,7 +108,19 @@ StackFrame::methodjitStaticAsserts()
* at. Because the jit-code ABI conditions are satisfied, we can just jump to
* that point.
*
* - Used by RunTracer()
* JaegerInterpoline - After returning from a stub or scripted call made by JIT'd
* code, calls into Interpret and has it finish execution of the JIT'd script.
* If we have to throw away the JIT code for a script for some reason (either
* a new trap is added for debug code, or assumptions made by the JIT code
* have broken and forced its invalidation), the call returns into the
* Interpoline which calls Interpret to finish the JIT frame. The Interpret
* call may eventually recompile the script, in which case it will join into
* that code with a new VMFrame activation and JaegerTrampoline.
*
* - Returned into from stub calls originally made from JIT code.
* - An alternate version, JaegerInterpolineScripted, returns from scripted
* calls originally made from JIT code, and fixes up state to match the
* stub call ABI.
*/
#ifdef JS_METHODJIT_PROFILE_STUBS
@ -119,7 +132,8 @@ extern "C" void JS_FASTCALL
PushActiveVMFrame(VMFrame &f)
{
f.entryfp->script()->compartment->jaegerCompartment->pushActiveFrame(&f);
f.regs.fp()->setNativeReturnAddress(JS_FUNC_TO_DATA_PTR(void*, JaegerTrampolineReturn));
f.entryncode = f.entryfp->nativeReturnAddress();
f.entryfp->setNativeReturnAddress(JS_FUNC_TO_DATA_PTR(void*, JaegerTrampolineReturn));
f.regs.clearInlined();
}
@ -127,6 +141,7 @@ extern "C" void JS_FASTCALL
PopActiveVMFrame(VMFrame &f)
{
f.entryfp->script()->compartment->jaegerCompartment->popActiveFrame();
f.entryfp->setNativeReturnAddress(f.entryncode);
}
extern "C" void JS_FASTCALL
@ -180,7 +195,8 @@ JS_STATIC_ASSERT(sizeof(VMFrame) % 16 == 0);
* If these assertions break, update the constants below.
* *** DANGER ***
*/
JS_STATIC_ASSERT(offsetof(VMFrame, savedRBX) == 0x58);
JS_STATIC_ASSERT(offsetof(VMFrame, savedRBX) == 0x68);
JS_STATIC_ASSERT(offsetof(VMFrame, scratch) == 0x18);
JS_STATIC_ASSERT(VMFrame::offsetOfFp == 0x38);
JS_STATIC_ASSERT(JSVAL_TAG_MASK == 0xFFFF800000000000LL);
@ -210,6 +226,8 @@ SYMBOL_STRING(JaegerTrampoline) ":" "\n"
* rcx = inlineCallCount
* fp must go into rbx
*/
"pushq $0x0" "\n" /* stubRejoin */
"pushq %rsi" "\n" /* entryncode */
"pushq %rsi" "\n" /* entryfp */
"pushq %rcx" "\n" /* inlineCallCount */
"pushq %rdi" "\n" /* cx */
@ -238,11 +256,11 @@ asm (
".globl " SYMBOL_STRING(JaegerTrampolineReturn) "\n"
SYMBOL_STRING(JaegerTrampolineReturn) ":" "\n"
"or %rdi, %rsi" "\n"
"movq %rcx, 0x30(%rbx)" "\n"
"movq %rsi, 0x30(%rbx)" "\n"
"movq %rsp, %rdi" "\n"
"call " SYMBOL_STRING_VMFRAME(PopActiveVMFrame) "\n"
"addq $0x58, %rsp" "\n"
"addq $0x68, %rsp" "\n"
"popq %rbx" "\n"
"popq %r15" "\n"
"popq %r14" "\n"
@ -265,7 +283,7 @@ SYMBOL_STRING(JaegerThrowpoline) ":" "\n"
"throwpoline_exit:" "\n"
"movq %rsp, %rdi" "\n"
"call " SYMBOL_STRING_VMFRAME(PopActiveVMFrame) "\n"
"addq $0x58, %rsp" "\n"
"addq $0x68, %rsp" "\n"
"popq %rbx" "\n"
"popq %r15" "\n"
"popq %r14" "\n"
@ -276,6 +294,45 @@ SYMBOL_STRING(JaegerThrowpoline) ":" "\n"
"ret" "\n"
);
asm (
".text\n"
".globl " SYMBOL_STRING(JaegerInterpoline) "\n"
SYMBOL_STRING(JaegerInterpoline) ":" "\n"
"movq %rsp, %rcx" "\n"
"movq %rax, %rdx" "\n"
"call " SYMBOL_STRING_RELOC(js_InternalInterpret) "\n"
"movq 0x38(%rsp), %rbx" "\n" /* Load frame */
"movq 0x30(%rbx), %rsi" "\n" /* Load rval payload */
"and %r14, %rsi" "\n" /* Mask rval payload */
"movq 0x30(%rbx), %rdi" "\n" /* Load rval type */
"and %r13, %rdi" "\n" /* Mask rval type */
"movq 0x18(%rsp), %rcx" "\n" /* Load scratch -> argc */
"testq %rax, %rax" "\n"
"je interpoline_exit" "\n"
"jmp *%rax" "\n"
"interpoline_exit:" "\n"
"movq %rsp, %rdi" "\n"
"call " SYMBOL_STRING_VMFRAME(PopActiveVMFrame) "\n"
"addq $0x68, %rsp" "\n"
"popq %rbx" "\n"
"popq %r15" "\n"
"popq %r14" "\n"
"popq %r13" "\n"
"popq %r12" "\n"
"popq %rbp" "\n"
"xorq %rax,%rax" "\n"
"ret" "\n"
);
asm (
".text\n"
".globl " SYMBOL_STRING(JaegerInterpolineScripted) "\n"
SYMBOL_STRING(JaegerInterpolineScripted) ":" "\n"
"movq 0x20(%rbx), %rbx" "\n" /* load prev */
"movq %rbx, 0x38(%rsp)" "\n"
"jmp " SYMBOL_STRING_RELOC(JaegerInterpoline) "\n"
);
# elif defined(JS_CPU_X86)
/*
@ -285,8 +342,9 @@ SYMBOL_STRING(JaegerThrowpoline) ":" "\n"
* up the argument.
* *** DANGER ***
*/
JS_STATIC_ASSERT(offsetof(VMFrame, savedEBX) == 0x2c);
JS_STATIC_ASSERT((VMFrame::offsetOfFp) == 0x1C);
JS_STATIC_ASSERT(offsetof(VMFrame, savedEBX) == 0x3C);
JS_STATIC_ASSERT(offsetof(VMFrame, scratch) == 0xC);
JS_STATIC_ASSERT(VMFrame::offsetOfFp == 0x1C);
asm (
".text\n"
@ -303,6 +361,10 @@ SYMBOL_STRING(JaegerTrampoline) ":" "\n"
/* Build the JIT frame. Push fields in order,
* then align the stack to form esp == VMFrame. */
"movl 12(%ebp), %ebx" "\n" /* load fp */
"pushl %ebx" "\n" /* unused1 */
"pushl %ebx" "\n" /* unused0 */
"pushl $0x0" "\n" /* stubRejoin */
"pushl %ebx" "\n" /* entryncode */
"pushl %ebx" "\n" /* entryfp */
"pushl 20(%ebp)" "\n" /* stackLimit */
"pushl 8(%ebp)" "\n" /* cx */
@ -316,7 +378,7 @@ SYMBOL_STRING(JaegerTrampoline) ":" "\n"
"call " SYMBOL_STRING_VMFRAME(PushActiveVMFrame) "\n"
"movl 28(%esp), %ebp" "\n" /* load fp for JIT code */
"jmp *72(%esp)" "\n"
"jmp *88(%esp)" "\n"
);
asm (
@ -326,11 +388,11 @@ SYMBOL_STRING(JaegerTrampolineReturn) ":" "\n"
"movl %esi, 0x18(%ebp)" "\n"
"movl %edi, 0x1C(%ebp)" "\n"
"movl %esp, %ebp" "\n"
"addl $0x38, %ebp" "\n" /* Restore stack at STACK_BASE_DIFFERENCE */
"addl $0x48, %ebp" "\n" /* Restore stack at STACK_BASE_DIFFERENCE */
"movl %esp, %ecx" "\n"
"call " SYMBOL_STRING_VMFRAME(PopActiveVMFrame) "\n"
"addl $0x2C, %esp" "\n"
"addl $0x3C, %esp" "\n"
"popl %ebx" "\n"
"popl %edi" "\n"
"popl %esi" "\n"
@ -359,7 +421,7 @@ SYMBOL_STRING(JaegerThrowpoline) ":" "\n"
"throwpoline_exit:" "\n"
"movl %esp, %ecx" "\n"
"call " SYMBOL_STRING_VMFRAME(PopActiveVMFrame) "\n"
"addl $0x2c, %esp" "\n"
"addl $0x3c, %esp" "\n"
"popl %ebx" "\n"
"popl %edi" "\n"
"popl %esi" "\n"
@ -368,6 +430,45 @@ SYMBOL_STRING(JaegerThrowpoline) ":" "\n"
"ret" "\n"
);
asm (
".text\n"
".globl " SYMBOL_STRING(JaegerInterpoline) "\n"
SYMBOL_STRING(JaegerInterpoline) ":" "\n"
/* Align the stack to 16 bytes. */
"pushl %esp" "\n"
"pushl %eax" "\n"
"pushl %edi" "\n"
"pushl %esi" "\n"
"call " SYMBOL_STRING_RELOC(js_InternalInterpret) "\n"
"addl $0x10, %esp" "\n"
"movl 0x1C(%esp), %ebp" "\n" /* Load frame */
"movl 0x18(%ebp), %esi" "\n" /* Load rval payload */
"movl 0x1C(%ebp), %edi" "\n" /* Load rval type */
"movl 0xC(%esp), %ecx" "\n" /* Load scratch -> argc, for any scripted call */
"testl %eax, %eax" "\n"
"je interpoline_exit" "\n"
"jmp *%eax" "\n"
"interpoline_exit:" "\n"
"movl %esp, %ecx" "\n"
"call " SYMBOL_STRING_VMFRAME(PopActiveVMFrame) "\n"
"addl $0x3c, %esp" "\n"
"popl %ebx" "\n"
"popl %edi" "\n"
"popl %esi" "\n"
"popl %ebp" "\n"
"xorl %eax, %eax" "\n"
"ret" "\n"
);
asm (
".text\n"
".globl " SYMBOL_STRING(JaegerInterpolineScripted) "\n"
SYMBOL_STRING(JaegerInterpolineScripted) ":" "\n"
"movl 0x10(%ebp), %ebp" "\n" /* load prev. :XXX: STATIC_ASSERT this */
"movl %ebp, 0x1C(%esp)" "\n"
"jmp " SYMBOL_STRING_RELOC(JaegerInterpoline) "\n"
);
# elif defined(JS_CPU_ARM)
JS_STATIC_ASSERT(sizeof(VMFrame) == 80);
@ -524,7 +625,8 @@ SYMBOL_STRING(JaegerStubVeneer) ":" "\n"
* up the argument.
* *** DANGER ***
*/
JS_STATIC_ASSERT(offsetof(VMFrame, savedEBX) == 0x2c);
JS_STATIC_ASSERT(offsetof(VMFrame, savedEBX) == 0x3C);
JS_STATIC_ASSERT(offsetof(VMFrame, scratch) == 0xC);
JS_STATIC_ASSERT(VMFrame::offsetOfFp == 0x1C);
extern "C" {
@ -545,6 +647,10 @@ extern "C" {
* then align the stack to form esp == VMFrame. */
mov ebx, [ebp + 12];
push ebx;
push ebx;
push 0x0;
push ebx;
push ebx;
push [ebp + 20];
push [ebp + 8];
push ebx;
@ -557,7 +663,7 @@ extern "C" {
call PushActiveVMFrame;
mov ebp, [esp + 28]; /* load fp for JIT code */
jmp dword ptr [esp + 72];
jmp dword ptr [esp + 88];
}
}
@ -567,11 +673,11 @@ extern "C" {
mov [ebp + 0x18], esi;
mov [ebp + 0x1C], edi;
mov ebp, esp;
add ebp, 0x38; /* Restore stack at STACK_BASE_DIFFERENCE */
add ebp, 0x48; /* Restore stack at STACK_BASE_DIFFERENCE */
mov ecx, esp;
call PopActiveVMFrame;
add esp, 0x2C;
add esp, 0x3C;
pop ebx;
pop edi;
@ -602,7 +708,7 @@ extern "C" {
throwpoline_exit:
mov ecx, esp;
call PopActiveVMFrame;
add esp, 0x2c;
add esp, 0x3c;
pop ebx;
pop edi;
pop esi;
@ -611,6 +717,41 @@ extern "C" {
ret;
}
}
__declspec(naked) void *JaegerInterpoline() {
__asm {
/* Align the stack to 16 bytes. */
push esp;
push eax;
push edi;
push esi;
call js_InternalInterpret;
add esp, 0x10;
mov [esp + 0x1C], ebp; /* Load frame */
mov [ebp + 0x18], esi; /* Load rval payload */
mov [ebp + 0x1C], edi; /* Load rval type */
mov [esp + 0xC], ecx; /* Load scratch -> argc */
test eax, eax;
je interpoline_exit;
jmp eax;
interpoline_exit:
mov ecx, esp;
call PopActiveVMFrame;
add esp, 0x3c;
pop ebx;
pop edi;
pop esi;
pop ebp;
xor eax, eax
ret;
}
}
__declspec(naked) void *JaegerInterpolineScripted() {
mov [ebp + 0x10], ebp; /* Load prev */
mov ebp, [esp + 0x1C]; /* fp -> regs.fp */
jmp JaegerInterpoline;
}
}
// Windows x64 uses assembler version since compiler doesn't support
@ -629,6 +770,10 @@ JS_STATIC_ASSERT(JSVAL_PAYLOAD_MASK == 0x00007FFFFFFFFFFFLL);
#endif /* _WIN64 */
JaegerCompartment::JaegerCompartment()
: orphanedNativeCount(0), orphanedNativePools(SystemAllocPolicy())
{}
bool
JaegerCompartment::Initialize()
{
@ -767,16 +912,10 @@ JITScript::callSites() const
return (js::mjit::CallSite *)((char *)inlineFrames() + sizeof(js::mjit::InlineFrame) * nInlineFrames);
}
js::mjit::RejoinSite *
JITScript::rejoinSites() const
{
return (js::mjit::RejoinSite *)((char *)callSites() + sizeof(js::mjit::CallSite) * nCallSites);
}
char *
JITScript::commonSectionLimit() const
{
return (char *)rejoinSites() + sizeof(js::mjit::RejoinSite) * nRejoinSites;
return ((char *)callSites() + sizeof(js::mjit::CallSite) * nCallSites);
}
#ifdef JS_MONOIC
@ -1054,6 +1193,15 @@ JITScript::nativeToPC(void *returnAddress, CallSite **pinline) const
return script->code + ic.call->pcOffset;
}
jsbytecode *
mjit::NativeToPC(JITScript *jit, void *ncode)
{
CallSite *inline_;
jsbytecode *pc = jit->nativeToPC(ncode, &inline_);
JS_ASSERT(!inline_);
return pc;
}
void
JITScript::trace(JSTracer *trc)
{

View File

@ -112,17 +112,12 @@ struct VMFrame
JSContext *cx;
Value *stackLimit;
StackFrame *entryfp;
void *entryncode;
JSRejoinState stubRejoin; /* How to rejoin if inside a call from an IC stub. */
/*
* Value stored in the 'scratch' field when making a native call. This is used
* by the recompiler and this value must not be written in other cases
* (i.e. scratch must be used to store a pointer, not an integer.
* :XXX: remove horrible hack.
*/
#define NATIVE_CALL_SCRATCH_VALUE (void *) 0x1
/* Scratch value to aid in rejoining from CompileFunction calls made from ICs. */
#define COMPILE_FUNCTION_SCRATCH_VALUE (void *) 0x2
#if JS_BITS_PER_WORD == 32
void *unused0, *unused1; /* For 16 byte alignment */
#endif
#if defined(JS_CPU_X86)
void *savedEBX;
@ -222,14 +217,98 @@ extern "C" void JaegerStubVeneer(void);
namespace mjit {
/*
* For a C++ or scripted call made from JIT code, indicates properties of the
* register and stack state after the call finishes, which RejoinInterpreter
* must use to construct a coherent state for rejoining into the interpreter.
*/
enum RejoinState {
/*
* Return value of call at this bytecode is held in ReturnReg_{Data,Type}
* and needs to be restored before starting the next bytecode. f.regs.pc
* is *not* intact when rejoining from a scripted call (unlike all other
* rejoin states). The pc's offset into the script is stored in the upper
* 31 bits of the rejoin state, and the remaining values for RejoinState
* are shifted left by one in stack frames to leave the lower bit set only
* for scripted calls.
*/
REJOIN_SCRIPTED = 1,
/* Recompilations and frame expansion are impossible for this call. */
REJOIN_NONE,
/* State is coherent for the start of the current bytecode. */
REJOIN_RESUME,
/*
* State is coherent for the start of the current bytecode, which is a TRAP
* that has already been invoked and should not be invoked again.
*/
REJOIN_TRAP,
/* State is coherent for the start of the next (fallthrough) bytecode. */
REJOIN_FALLTHROUGH,
/*
* As for REJOIN_FALLTHROUGH, but holds a reference on the compartment's
* orphaned native pools which needs to be reclaimed by InternalInterpret.
* The return value needs to be adjusted if REJOIN_NATIVE_LOWERED.
*/
REJOIN_NATIVE,
REJOIN_NATIVE_LOWERED,
/* Call returns a payload, which should be pushed before starting next bytecode. */
REJOIN_PUSH_BOOLEAN,
REJOIN_PUSH_OBJECT,
/* Call returns an object, which should be assigned to a local per the current bytecode. */
REJOIN_DEFLOCALFUN,
/*
* During the prologue of constructing scripts, after the function's
* .prototype property has been fetched.
*/
REJOIN_THIS_PROTOTYPE,
/*
* Type check on arguments failed during prologue, need stack check and
* call object creation before script can execute.
*/
REJOIN_CHECK_ARGUMENTS,
/*
* State after calling a stub which returns a JIT code pointer for a call
* or NULL for an already-completed call.
*/
REJOIN_CALL_PROLOGUE,
REJOIN_CALL_PROLOGUE_LOWERED_CALL,
REJOIN_CALL_PROLOGUE_LOWERED_APPLY,
/* Triggered a recompilation while placing the arguments to an apply on the stack. */
REJOIN_CALL_SPLAT,
/* FALLTHROUGH ops which can be implemented as part of an IncOp. */
REJOIN_BINDNAME,
REJOIN_GETTER,
REJOIN_POS,
REJOIN_BINARY,
/*
* For an opcode fused with IFEQ/IFNE, call returns a boolean indicating
* the result of the comparison and whether to take or not take the branch.
*/
REJOIN_BRANCH
};
/* Helper to watch for recompilation and frame expansion activity on a compartment. */
struct RecompilationMonitor
{
JSContext *cx;
/*
* If either a recompilation or expansion occurs, then ICs and stubs should
* not depend on the frame or JITs being intact. The two are separated for logging.
* If either inline frame expansion or recompilation occurs, then ICs and
* stubs should not depend on the frame or JITs being intact. The two are
* separated for logging.
*/
unsigned recompilations;
unsigned frameExpansions;
@ -278,6 +357,7 @@ class JaegerCompartment {
public:
bool Initialize();
JaegerCompartment();
~JaegerCompartment() { Finish(); }
JSC::ExecutableAllocator *execAlloc() {
@ -310,6 +390,14 @@ class JaegerCompartment {
return JS_FUNC_TO_DATA_PTR(void *, trampolines.forceReturn);
#endif
}
/*
* References held on pools created for native ICs, where the IC was
* destroyed and we are waiting for the pool to finish use and jump
* into the interpoline.
*/
size_t orphanedNativeCount;
Vector<JSC::ExecutablePool *, 8, SystemAllocPolicy> orphanedNativePools;
};
/*
@ -393,7 +481,6 @@ namespace mjit {
struct InlineFrame;
struct CallSite;
struct RejoinSite;
struct NativeMapEntry {
size_t bcOff; /* bytecode offset in script */
@ -418,14 +505,11 @@ struct JITScript {
* Therefore, do not change the section ordering in finishThisUp() without
* changing nMICs() et al as well.
*/
uint32 nNmapPairs:30; /* The NativeMapEntrys are sorted by .bcOff.
uint32 nNmapPairs:31; /* The NativeMapEntrys are sorted by .bcOff.
.ncode values may not be NULL. */
bool singleStepMode:1; /* compiled in "single step mode" */
bool rejoinPoints:1; /* compiled with all rejoin points for
inline frame expansions */
uint32 nInlineFrames;
uint32 nCallSites;
uint32 nRejoinSites;
#ifdef JS_MONOIC
uint32 nGetGlobalNames;
uint32 nSetGlobalNames;
@ -460,7 +544,6 @@ struct JITScript {
NativeMapEntry *nmap() const;
js::mjit::InlineFrame *inlineFrames() const;
js::mjit::CallSite *callSites() const;
js::mjit::RejoinSite *rejoinSites() const;
#ifdef JS_MONOIC
ic::GetGlobalNameIC *getGlobalNames() const;
ic::SetGlobalNameIC *setGlobalNames() const;
@ -554,55 +637,17 @@ struct CallSite
uint32 codeOffset;
uint32 inlineIndex;
uint32 pcOffset;
size_t id;
RejoinState rejoin;
// The identifier is either the address of the stub function being called,
// or one of the below magic identifiers. Each of these can appear at most
// once per opcode.
// Identifier for traps. Since traps can be removed, we make sure they carry over
// from each compilation, and identify them with a single, canonical
// ID. Hopefully a SpiderMonkey file won't have two billion source lines.
static const size_t MAGIC_TRAP_ID = 0;
// Identifier for the return site from a scripted call.
static const size_t NCODE_RETURN_ID = 1;
void initialize(uint32 codeOffset, uint32 inlineIndex, uint32 pcOffset, size_t id) {
void initialize(uint32 codeOffset, uint32 inlineIndex, uint32 pcOffset, RejoinState rejoin) {
this->codeOffset = codeOffset;
this->inlineIndex = inlineIndex;
this->pcOffset = pcOffset;
this->id = id;
this->rejoin = rejoin;
}
bool isTrap() const {
return id == MAGIC_TRAP_ID;
}
};
struct RejoinSite
{
// When doing on stack recompilation, we take a frame that made a call at
// some CallSite in the original JIT and redirect it to a corresponding
// RejoinSite in the new JIT. The rejoin sites are similar to call sites,
// with the exception that they do additional checking and coercions from
// int to double to ensure the stack types are consistent with what the new
// JIT expects.
// Note: we don't rejoin at sites within inline calls, such inline frames
// are expanded first.
uint32 codeOffset;
uint32 pcOffset;
size_t id;
// Identifier which can match any callsite ID in the original script for
// this PC. This should appear after all other rejoin sites at the PC.
static const size_t VARIADIC_ID = 2;
void initialize(uint32 codeOffset, uint32 pcOffset, size_t id) {
this->codeOffset = codeOffset;
this->pcOffset = pcOffset;
this->id = id;
return rejoin == REJOIN_TRAP;
}
};
@ -678,6 +723,8 @@ JSScript::nativeCodeForPC(bool constructing, jsbytecode *pc)
}
extern "C" void JaegerTrampolineReturn();
extern "C" void JaegerInterpoline();
extern "C" void JaegerInterpolineScripted();
#if defined(_MSC_VER) || defined(_WIN64)
extern "C" void *JaegerThrowpoline(js::VMFrame *vmFrame);

View File

@ -626,6 +626,12 @@ class CallCompiler : public BaseCompiler
bool generateFullCallStub(JITScript *from, JSScript *script, uint32 flags)
{
/* We don't support calling CompileFunction from IC stubs in inlined frames. */
if (f.regs.inlined()) {
disable(from);
return true;
}
/*
* Create a stub that works with arity mismatches. Like the fast-path,
* this allocates a frame on the caller side, but also performs extra
@ -655,6 +661,16 @@ class CallCompiler : public BaseCompiler
masm.loadPtr(Address(t0, offset), t0);
Jump hasCode = masm.branchPtr(Assembler::Above, t0, ImmPtr(JS_UNJITTABLE_SCRIPT));
if (cx->typeInferenceEnabled()) {
/*
* Write the rejoin state to indicate this is a compilation call
* made from an IC (the recompiler cannot detect calls made from
* ICs automatically).
*/
masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.regs.pc, false)),
FrameAddress(offsetof(VMFrame, stubRejoin)));
}
/* Try and compile. On success we get back the nmap pointer. */
void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction);
if (ic.frameSize.isStatic()) {
@ -841,6 +857,17 @@ class CallCompiler : public BaseCompiler
/* Guard on the function object identity, for now. */
Jump funGuard = masm.branchPtr(Assembler::NotEqual, ic.funObjReg, ImmPtr(obj));
if (cx->typeInferenceEnabled()) {
/*
* Write the rejoin state for the recompiler to use if this call
* triggers recompilation. Natives use a different stack address to
* store the return value than FASTCALLs, and without additional
* information we cannot tell which one is active on a VMFrame.
*/
masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.regs.pc, true)),
FrameAddress(offsetof(VMFrame, stubRejoin)));
}
/* N.B. After this call, the frame will have a dynamic frame size. */
if (ic.frameSize.isDynamic()) {
masm.fallibleVMCall(cx->typeInferenceEnabled(),
@ -915,18 +942,6 @@ class CallCompiler : public BaseCompiler
masm.storeValue(v, Address(vpReg, sizeof(Value)));
}
if (cx->typeInferenceEnabled()) {
/*
* Write the NATIVE_SCRATCH_VALUE to the 'scratch' field of the
* VMFrame, so the recompiler can tell this was a native call.
* Natives use a different stack address to store the return
* value than FASTCALLs, and without additional information we
* cannot tell which one is active on a VMFrame.
*/
masm.storePtr(ImmPtr(NATIVE_CALL_SCRATCH_VALUE),
FrameAddress(offsetof(VMFrame, scratch)));
}
masm.restoreStackBase();
masm.setupABICall(Registers::NormalCall, 3);
masm.storeArg(2, vpReg);
@ -950,7 +965,7 @@ class CallCompiler : public BaseCompiler
masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, native), false);
if (cx->typeInferenceEnabled())
masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, scratch)));
masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
/* Reload fp, which may have been clobbered by restoreStackBase(). */
masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);

View File

@ -88,6 +88,20 @@ class FrameSize
uint32 getArgc(VMFrame &f) const {
return isStatic() ? staticArgc() : f.u.call.dynamicArgc;
}
bool lowered(jsbytecode *pc) const {
return isDynamic() || staticArgc() != GET_ARGC(pc);
}
RejoinState rejoinState(jsbytecode *pc, bool native) {
if (isStatic()) {
if (staticArgc() == GET_ARGC(pc))
return native ? REJOIN_NATIVE : REJOIN_CALL_PROLOGUE;
JS_ASSERT(staticArgc() == GET_ARGC(pc) - 1);
return native ? REJOIN_NATIVE_LOWERED : REJOIN_CALL_PROLOGUE_LOWERED_CALL;
}
return native ? REJOIN_NATIVE_LOWERED : REJOIN_CALL_PROLOGUE_LOWERED_APPLY;
}
};
namespace ic {

View File

@ -608,8 +608,7 @@ class SetPropCompiler : public PICStubCompiler
if (pic.typeMonitored) {
RecompilationMonitor monitor(cx);
if (!cx->addTypePropertyId(obj->getType(), shape->id, pic.rhsTypes))
return error();
cx->addTypePropertyId(obj->getType(), shape->id, pic.rhsTypes);
if (monitor.recompiled())
return Lookup_Uncacheable;
}
@ -628,8 +627,7 @@ class SetPropCompiler : public PICStubCompiler
return disable("invalid slot");
if (pic.typeMonitored) {
RecompilationMonitor monitor(cx);
if (!cx->addTypePropertyId(obj->getType(), shape->id, pic.rhsTypes))
return error();
cx->addTypePropertyId(obj->getType(), shape->id, pic.rhsTypes);
if (monitor.recompiled())
return Lookup_Uncacheable;
}
@ -646,13 +644,10 @@ class SetPropCompiler : public PICStubCompiler
uint16 slot = uint16(shape->shortid);
if (!script->ensureVarTypes(cx))
return error();
if (shape->setterOp() == SetCallArg) {
if (!script->typeSetArgument(cx, slot, pic.rhsTypes))
return error();
} else {
if (!script->typeSetLocal(cx, slot, pic.rhsTypes))
return error();
}
if (shape->setterOp() == SetCallArg)
script->typeSetArgument(cx, slot, pic.rhsTypes);
else
script->typeSetLocal(cx, slot, pic.rhsTypes);
if (monitor.recompiled())
return Lookup_Uncacheable;
}
@ -1786,8 +1781,8 @@ ic::GetProp(VMFrame &f, ic::PICInfo *pic)
* :FIXME: looking under the usePropCache abstraction, which is only unset for
* reads of the prototype.
*/
if (v.isUndefined() && usePropCache && !f.script()->typeMonitorUndefined(f.cx, f.pc()))
THROW();
if (v.isUndefined() && usePropCache)
f.script()->typeMonitorUndefined(f.cx, f.pc());
f.regs.sp[-1] = v;
}
@ -1954,8 +1949,8 @@ ic::CallProp(VMFrame &f, ic::PICInfo *pic)
}
#endif
if (regs.sp[-2].isUndefined() && !f.script()->typeMonitorUndefined(cx, f.pc()))
THROW();
if (regs.sp[-2].isUndefined())
f.script()->typeMonitorUndefined(cx, f.pc());
if (monitor.recompiled())
return;
@ -2007,8 +2002,8 @@ ic::XName(VMFrame &f, ic::PICInfo *pic)
THROW();
f.regs.sp[-1] = rval;
if (rval.isUndefined() && !f.script()->typeMonitorUndefined(f.cx, f.pc()))
THROW();
if (rval.isUndefined())
f.script()->typeMonitorUndefined(f.cx, f.pc());
}
void JS_FASTCALL
@ -2029,8 +2024,7 @@ ic::Name(VMFrame &f, ic::PICInfo *pic)
if (status == Lookup_Cacheable && !cc.updateTypes())
THROW();
if (!f.script()->typeMonitorResult(f.cx, f.pc(), rval))
THROW();
f.script()->typeMonitorResult(f.cx, f.pc(), rval);
}
static void JS_FASTCALL
@ -2487,8 +2481,8 @@ ic::CallElement(VMFrame &f, ic::GetElementIC *ic)
// If the result can be cached, the value was already retrieved.
JS_ASSERT(!f.regs.sp[-2].isMagic());
f.regs.sp[-1].setObject(*thisObj);
if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.pc()))
THROW();
if (!JSID_IS_INT(id))
f.script()->typeMonitorUnknown(cx, f.pc());
return;
}
}
@ -2508,10 +2502,10 @@ ic::CallElement(VMFrame &f, ic::GetElementIC *ic)
{
f.regs.sp[-1] = thisv;
}
if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.pc()))
THROW();
if (f.regs.sp[-2].isUndefined() && !f.script()->typeMonitorUndefined(cx, f.pc()))
THROW();
if (!JSID_IS_INT(id))
f.script()->typeMonitorUnknown(cx, f.pc());
if (f.regs.sp[-2].isUndefined())
f.script()->typeMonitorUndefined(cx, f.pc());
}
void JS_FASTCALL
@ -2553,20 +2547,18 @@ ic::GetElement(VMFrame &f, ic::GetElementIC *ic)
// If the result can be cached, the value was already retrieved.
JS_ASSERT(!f.regs.sp[-2].isMagic());
if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.pc()))
THROW();
if (!JSID_IS_INT(id))
f.script()->typeMonitorUnknown(cx, f.pc());
return;
}
}
if (!obj->getProperty(cx, id, &f.regs.sp[-2]))
THROW();
if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.pc()))
THROW();
if (f.regs.sp[-2].isUndefined()) {
if (!f.script()->typeMonitorUndefined(cx, f.pc()))
THROW();
}
if (!JSID_IS_INT(id))
f.script()->typeMonitorUnknown(cx, f.pc());
if (f.regs.sp[-2].isUndefined())
f.script()->typeMonitorUndefined(cx, f.pc());
}
#define APPLY_STRICTNESS(f, s) \

View File

@ -405,11 +405,11 @@ class PunboxAssembler : public JSC::MacroAssembler
if (holeCheck)
notHole = branchPtr(Equal, typeReg.reg(), ImmType(JSVAL_TYPE_MAGIC));
} else {
loadPayload(address, dataReg);
if (holeCheck) {
loadTypeTag(address, Registers::ValueReg);
notHole = branchPtr(Equal, Registers::ValueReg, ImmType(JSVAL_TYPE_MAGIC));
}
loadPayload(address, dataReg);
}
return notHole;
}

View File

@ -76,8 +76,30 @@ AutoScriptRetrapper::untrap(jsbytecode *pc)
return true;
}
Recompiler::PatchableAddress
Recompiler::findPatch(JITScript *jit, void **location)
static inline JSRejoinState ScriptedRejoin(uint32 pcOffset)
{
return REJOIN_SCRIPTED | (pcOffset << 1);
}
static inline JSRejoinState StubRejoin(RejoinState rejoin)
{
return rejoin << 1;
}
static inline void
SetRejoinState(StackFrame *fp, const CallSite &site, void **location)
{
if (site.rejoin == REJOIN_SCRIPTED) {
fp->setRejoin(ScriptedRejoin(site.pcOffset));
*location = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpolineScripted);
} else {
fp->setRejoin(StubRejoin(site.rejoin));
*location = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline);
}
}
void
Recompiler::patchCall(JITScript *jit, StackFrame *fp, void **location)
{
uint8* codeStart = (uint8 *)jit->code.m_code.executableAddress();
@ -85,73 +107,28 @@ Recompiler::findPatch(JITScript *jit, void **location)
for (uint32 i = 0; i < jit->nCallSites; i++) {
if (callSites_[i].codeOffset + codeStart == *location) {
JS_ASSERT(callSites_[i].inlineIndex == analyze::CrossScriptSSA::OUTER_FRAME);
PatchableAddress result;
result.location = location;
result.callSite = callSites_[i];
return result;
}
}
RejoinSite *rejoinSites_ = jit->rejoinSites();
for (uint32 i = 0; i < jit->nRejoinSites; i++) {
const RejoinSite &rs = rejoinSites_[i];
if (rs.codeOffset + codeStart == *location) {
PatchableAddress result;
result.location = location;
result.callSite.initialize(rs.codeOffset, uint32(-1), rs.pcOffset, rs.id);
return result;
SetRejoinState(fp, callSites_[i], location);
return;
}
}
JS_NOT_REACHED("failed to find call site");
return PatchableAddress();
}
void *
Recompiler::findRejoin(JITScript *jit, const CallSite &callSite)
{
JS_ASSERT(callSite.inlineIndex == uint32(-1));
RejoinSite *rejoinSites_ = jit->rejoinSites();
for (uint32 i = 0; i < jit->nRejoinSites; i++) {
RejoinSite &rs = rejoinSites_[i];
if (rs.pcOffset == callSite.pcOffset &&
(rs.id == callSite.id || rs.id == RejoinSite::VARIADIC_ID)) {
/*
* We should not catch rejoin sites for scripted calls with a
* variadic id, the rejoin code for these is different.
*/
JS_ASSERT_IF(rs.id == RejoinSite::VARIADIC_ID,
callSite.id != CallSite::NCODE_RETURN_ID);
uint8* codeStart = (uint8 *)jit->code.m_code.executableAddress();
return codeStart + rs.codeOffset;
}
}
/* We have no idea where to patch up to. */
JS_NOT_REACHED("Call site vanished.");
return NULL;
}
void
Recompiler::applyPatch(JITScript *jit, PatchableAddress& toPatch)
{
void *result = findRejoin(jit, toPatch.callSite);
JS_ASSERT(result);
*toPatch.location = result;
}
Recompiler::PatchableNative
Recompiler::stealNative(JITScript *jit, jsbytecode *pc)
Recompiler::patchNative(JSContext *cx, JITScript *jit, StackFrame *fp, jsbytecode *pc,
RejoinState rejoin)
{
/*
* There is a native IC at pc which triggered a recompilation. The recompilation
* could have been triggered either by the native call itself, or by a SplatApplyArgs
* preparing for the native call. Either way, we don't want to patch up the call,
* but will instead steal the pool for the native IC so it doesn't get freed
* with the old script, and patch up the jump at the end to point to the slow join
* point in the new script.
* with the old script, and patch up the jump at the end to go to the interpoline.
*/
fp->setRejoin(StubRejoin(rejoin));
cx->compartment->jaegerCompartment->orphanedNativeCount++;
unsigned i;
ic::CallICInfo *callICs = jit->callICs();
for (i = 0; i < jit->nCallICs; i++) {
@ -167,73 +144,21 @@ Recompiler::stealNative(JITScript *jit, jsbytecode *pc)
if (!pool) {
/* Already stole this stub. */
PatchableNative native;
native.pc = NULL;
native.guardedNative = NULL;
native.pool = NULL;
return native;
}
PatchableNative native;
native.pc = pc;
native.guardedNative = ic.fastGuardedNative;
native.pool = pool;
native.nativeStart = ic.nativeStart;
native.nativeFunGuard = ic.nativeFunGuard;
native.nativeJump = ic.nativeJump;
/*
* Mark as stolen in case there are multiple calls on the stack. Note that if
* recompilation fails due to low memory then this pool will leak.
*/
pool = NULL;
return native;
}
void
Recompiler::patchNative(JITScript *jit, PatchableNative &native)
{
if (!native.pc)
return;
unsigned i;
ic::CallICInfo *callICs = jit->callICs();
for (i = 0; i < jit->nCallICs; i++) {
CallSite *call = callICs[i].call;
if (call->inlineIndex == uint32(-1) && call->pcOffset == uint32(native.pc - jit->script->code))
break;
}
JS_ASSERT(i < jit->nCallICs);
ic::CallICInfo &ic = callICs[i];
ic.fastGuardedNative = native.guardedNative;
ic.pools[ic::CallICInfo::Pool_NativeStub] = native.pool;
ic.nativeStart = native.nativeStart;
ic.nativeFunGuard = native.nativeFunGuard;
ic.nativeJump = native.nativeJump;
/* Patch the jump on object identity to go to the native stub. */
/* Patch the native fallthrough to go to the interpoline. */
{
uint8 *start = (uint8 *)ic.funJump.executableAddress();
uint8 *start = (uint8 *)ic.nativeJump.executableAddress();
JSC::RepatchBuffer repatch(JSC::JITCode(start - 32, 64));
repatch.relink(ic.funJump, ic.nativeStart);
repatch.relink(ic.nativeJump, JSC::CodeLocationLabel(JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline)));
}
/* Patch the native function guard to go to the slow path. */
{
uint8 *start = (uint8 *)native.nativeFunGuard.executableAddress();
JSC::RepatchBuffer repatch(JSC::JITCode(start - 32, 64));
repatch.relink(native.nativeFunGuard, ic.slowPathStart);
}
/* :XXX: We leak the pool if this fails. Oh well. */
cx->compartment->jaegerCompartment->orphanedNativePools.append(pool);
/* Patch the native fallthrough to go to the slow join point. */
{
JSC::CodeLocationLabel joinPoint = ic.slowPathStart.labelAtOffset(ic.slowJoinOffset);
uint8 *start = (uint8 *)native.nativeJump.executableAddress();
JSC::RepatchBuffer repatch(JSC::JITCode(start - 32, 64));
repatch.relink(native.nativeJump, joinPoint);
}
/* Mark as stolen in case there are multiple calls on the stack. */
pool = NULL;
}
StackFrame *
@ -251,24 +176,26 @@ Recompiler::expandInlineFrameChain(JSContext *cx, StackFrame *outer, InlineFrame
fp->initInlineFrame(inner->fun, parent, inner->parentpc);
uint32 pcOffset = inner->parentpc - parent->script()->code;
/*
* We should have ensured during compilation that the erased frame has JIT
* code with rejoin points added. We don't try to compile such code on
* demand as this can trigger recompilations and a reentrant invocation of
* expandInlineFrames. Note that the outer frame does not need to have
* rejoin points, as it is definitely at an inline call and rejoin points
* are always added for such calls.
*/
JS_ASSERT(fp->jit() && fp->jit()->rejoinPoints);
PatchableAddress patch;
patch.location = fp->addressOfNativeReturnAddress();
patch.callSite.initialize(0, uint32(-1), pcOffset, CallSite::NCODE_RETURN_ID);
applyPatch(parent->jit(), patch);
void **location = fp->addressOfNativeReturnAddress();
*location = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpolineScripted);
parent->setRejoin(ScriptedRejoin(pcOffset));
return fp;
}
/*
* Whether a given return address for a frame indicates it returns directly
* into JIT code.
*/
static inline bool
JITCodeReturnAddress(void *data)
{
return data != NULL /* frame is interpreted */
&& data != JS_FUNC_TO_DATA_PTR(void *, JaegerTrampolineReturn)
&& data != JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline)
&& data != JS_FUNC_TO_DATA_PTR(void *, JaegerInterpolineScripted);
}
/*
* Expand all inlined frames within fp per 'inlined' and update next and regs
* to refer to the new innermost frame.
@ -285,39 +212,43 @@ Recompiler::expandInlineFrames(JSContext *cx, StackFrame *fp, mjit::CallSite *in
*/
cx->compartment->types.frameExpansions++;
/* Patch the VMFrame's return address if it is returning at the given inline site. */
/*
* Patch the VMFrame's return address if it is returning at the given inline site.
* Note there is no worry about handling a native or CompileFunction call here,
* as such IC stubs are not generated within inline frames.
*/
void **frameAddr = f->returnAddressLocation();
uint8* codeStart = (uint8 *)fp->jit()->code.m_code.executableAddress();
bool patchFrameReturn =
(f->scratch != NATIVE_CALL_SCRATCH_VALUE) &&
(*frameAddr == codeStart + inlined->codeOffset);
(f->stubRejoin == 0) && (*frameAddr == codeStart + inlined->codeOffset);
InlineFrame *inner = &fp->jit()->inlineFrames()[inlined->inlineIndex];
jsbytecode *innerpc = inner->fun->script()->code + inlined->pcOffset;
StackFrame *innerfp = expandInlineFrameChain(cx, fp, inner);
JITScript *jit = innerfp->jit();
if (f->regs.fp() == fp) {
JS_ASSERT(f->regs.inlined() == inlined);
f->regs.expandInline(innerfp, innerpc);
}
if (patchFrameReturn) {
PatchableAddress patch;
patch.location = frameAddr;
patch.callSite.initialize(0, uint32(-1), inlined->pcOffset, inlined->id);
applyPatch(jit, patch);
}
if (patchFrameReturn)
SetRejoinState(f->regs.fp(), *inlined, frameAddr);
/*
* Note: unlike the case for recompilation, during frame expansion we don't
* need to worry about the next VMFrame holding a reference to the inlined
* frame in its entryncode. entryncode is non-NULL only if the next frame's
* code was discarded and has executed via the Interpoline, which can only
* happen after all inline frames have been expanded.
*/
if (next) {
next->resetInlinePrev(innerfp, innerpc);
void **addr = next->addressOfNativeReturnAddress();
if (*addr != NULL && *addr != JS_FUNC_TO_DATA_PTR(void *, JaegerTrampolineReturn)) {
PatchableAddress patch;
patch.location = addr;
patch.callSite.initialize(0, uint32(-1), inlined->pcOffset, CallSite::NCODE_RETURN_ID);
applyPatch(jit, patch);
if (JITCodeReturnAddress(*addr)) {
innerfp->setRejoin(ScriptedRejoin(inlined->pcOffset));
*addr = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpolineScripted);
}
}
}
@ -372,33 +303,23 @@ Recompiler::Recompiler(JSContext *cx, JSScript *script)
/*
* Recompilation can be triggered either by the debugger (turning debug mode on for
* a script or setting/clearing a trap), or by dynamic changes in type information
* from type inference. When recompiling we also need to change any references to
* the old version of the script to refer to the new version of the script, including
* references on the JS stack. Things to do:
* from type inference. When recompiling we don't immediately recompile the JIT
* code, but destroy the old code and remove all references to the code, including
* those from active stack frames. Things to do:
*
* - Purge scripted call inline caches calling into the old script.
* - Purge scripted call inline caches calling into the script.
*
* - For arg/local/stack slots in frames on the stack that are now inferred
* as (int | double), make sure they are actually doubles. Before recompilation
* they may have been inferred as integers and stored to the stack as integers,
* but slots inferred as (int | double) are required to be definitely double.
*
* - For frames with an ncode return address in the original script, update
* to point to the corresponding return address in the new script.
* - For frames with an ncode return address in the original script, redirect
* to the interpoline.
*
* - For VMFrames with a stub call return address in the original script,
* update to point to the corresponding return address in the new script.
* This requires that the recompiled script has a superset of the stub calls
* in the original script. Stub calls are keyed to the function being called,
* so with less precise type information the call to a stub can move around
* (e.g. from inline to OOL path or vice versa) but can't disappear, and
* further operation after the stub should be consistent across compilations.
* redirect to the interpoline.
*
* - For VMFrames with a native call return address in a call IC in the original
* script (the only place where IC code makes calls), make a new stub to throw
* an exception or jump to the call's slow path join point.
* - For VMFrames whose entryncode address (the value of entryfp->ncode before
* being clobbered with JaegerTrampolineReturn) is in the original script,
* redirect that entryncode to the interpoline.
*/
bool
void
Recompiler::recompile()
{
JS_ASSERT(script->hasJITCode());
@ -414,23 +335,13 @@ Recompiler::recompile()
* 1) Scan the stack, looking at all return addresses that could go into JIT
* code.
* 2) If an address corresponds to a call site registered by |callSite| during
* the last compilation, remember it.
* 3) Purge the old compiled state and return if there were no active frames of
* this script on the stack.
* 4) Fix up the stack by replacing all saved addresses with the addresses the
* new compiler gives us for the call sites.
* the last compilation, patch it to go to the interpoline.
* 3) Purge the old compiled state.
*/
Vector<PatchableAddress> normalPatches(cx);
Vector<PatchableAddress> ctorPatches(cx);
Vector<PatchableNative> normalNatives(cx);
Vector<PatchableNative> ctorNatives(cx);
/* Frames containing data that may need to be patched from int to double. */
Vector<PatchableFrame> normalFrames(cx);
Vector<PatchableFrame> ctorFrames(cx);
// Find all JIT'd stack frames to account for return addresses that will
// need to be patched after recompilation.
VMFrame *nextf = NULL;
for (VMFrame *f = script->compartment->jaegerCompartment->activeFrame();
f != NULL;
f = f->previous) {
@ -444,119 +355,69 @@ Recompiler::recompile()
continue;
}
// Remember every frame for each type of JIT'd code.
PatchableFrame frame;
frame.fp = fp;
frame.pc = fp->pc(cx, next);
frame.scriptedCall = false;
if (next) {
// check for a scripted call returning into the recompiled script.
// this misses scanning the entry fp, which cannot return directly
// into JIT code.
void **addr = next->addressOfNativeReturnAddress();
if (!*addr) {
// next is an interpreted frame.
} else if (*addr == JS_FUNC_TO_DATA_PTR(void *, JaegerTrampolineReturn)) {
// next entered from the interpreter.
} else if (fp->isConstructing()) {
JS_ASSERT(script->jitCtor && script->jitCtor->isValidCode(*addr));
frame.scriptedCall = true;
if (!ctorPatches.append(findPatch(script->jitCtor, addr)))
return false;
} else {
JS_ASSERT(script->jitNormal && script->jitNormal->isValidCode(*addr));
frame.scriptedCall = true;
if (!normalPatches.append(findPatch(script->jitNormal, addr)))
return false;
if (JITCodeReturnAddress(*addr)) {
JS_ASSERT(fp->jit()->isValidCode(*addr));
patchCall(fp->jit(), fp, addr);
} else if (nextf && nextf->entryfp == next &&
JITCodeReturnAddress(nextf->entryncode)) {
JS_ASSERT(fp->jit()->isValidCode(nextf->entryncode));
patchCall(fp->jit(), fp, &nextf->entryncode);
}
}
if (fp->isConstructing() && !ctorFrames.append(frame))
return false;
if (!fp->isConstructing() && !normalFrames.append(frame))
return false;
next = fp;
}
/* Check if the VMFrame returns directly into the recompiled script. */
/*
* Check if the VMFrame returns directly into the recompiled script.
* This depends on an important invariant that f->fp() reflects the
* frame at the point where the call occurred, irregardless of any
* frames which were pushed inside the call.
*/
StackFrame *fp = f->fp();
void **addr = f->returnAddressLocation();
if (f->scratch == NATIVE_CALL_SCRATCH_VALUE) {
RejoinState rejoin = (RejoinState) f->stubRejoin;
if (rejoin == REJOIN_NATIVE || rejoin == REJOIN_NATIVE_LOWERED) {
// Native call.
if (fp->script() == script && fp->isConstructing()) {
if (!ctorNatives.append(stealNative(script->jitCtor, fp->pc(cx, NULL))))
return false;
} else if (fp->script() == script) {
if (!normalNatives.append(stealNative(script->jitNormal, fp->pc(cx, NULL))))
return false;
}
} else if (f->scratch == COMPILE_FUNCTION_SCRATCH_VALUE) {
if (fp->script() == script && fp->isConstructing())
patchNative(cx, script->jitCtor, fp, fp->pc(cx, NULL), rejoin);
else if (fp->script() == script)
patchNative(cx, script->jitNormal, fp, fp->pc(cx, NULL), rejoin);
} else if (rejoin) {
/* Recompilation triggered by CompileFunction. */
if (fp->prev()->script() == script) {
PatchableAddress patch;
patch.location = addr;
patch.callSite.initialize(0, uint32(-1), fp->prev()->pc(cx, NULL) - script->code,
(size_t) JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall));
if (fp->prev()->isConstructing()) {
if (!ctorPatches.append(patch))
return false;
} else {
if (!normalPatches.append(patch))
return false;
}
fp->prev()->setRejoin(StubRejoin(rejoin));
*addr = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline);
}
} else if (script->jitCtor && script->jitCtor->isValidCode(*addr)) {
if (!ctorPatches.append(findPatch(script->jitCtor, addr)))
return false;
patchCall(script->jitCtor, fp, addr);
} else if (script->jitNormal && script->jitNormal->isValidCode(*addr)) {
if (!normalPatches.append(findPatch(script->jitNormal, addr)))
return false;
patchCall(script->jitNormal, fp, addr);
}
nextf = f;
}
Vector<CallSite> normalSites(cx);
Vector<CallSite> ctorSites(cx);
if (script->jitNormal && !cleanup(script->jitNormal, &normalSites))
return false;
if (script->jitCtor && !cleanup(script->jitCtor, &ctorSites))
return false;
ReleaseScriptCode(cx, script, true);
ReleaseScriptCode(cx, script, false);
/*
* Regenerate the code if there are JIT frames on the stack, if this script
* has inline parents and thus always needs JIT code, or if it is a newly
* pushed frame by e.g. the interpreter. :XXX: it would be nice if we could
* ensure that compiling a script does not then trigger its recompilation.
*/
StackFrame *top = (cx->running() && cx->fp()->isScriptFrame()) ? cx->fp() : NULL;
bool keepNormal = !normalFrames.empty() || script->inlineParents ||
(top && top->script() == script && !top->isConstructing());
bool keepCtor = !ctorFrames.empty() ||
(top && top->script() == script && top->isConstructing());
if (keepNormal && !recompile(script, false,
normalFrames, normalPatches, normalSites, normalNatives)) {
return false;
if (script->jitNormal) {
cleanup(script->jitNormal);
ReleaseScriptCode(cx, script, true);
}
if (keepCtor && !recompile(script, true,
ctorFrames, ctorPatches, ctorSites, ctorNatives)) {
return false;
if (script->jitCtor) {
cleanup(script->jitCtor);
ReleaseScriptCode(cx, script, false);
}
JS_ASSERT_IF(keepNormal, script->jitNormal);
JS_ASSERT_IF(keepCtor, script->jitCtor);
cx->compartment->types.recompilations++;
return true;
}
bool
Recompiler::cleanup(JITScript *jit, Vector<CallSite> *sites)
void
Recompiler::cleanup(JITScript *jit)
{
while (!JS_CLIST_IS_EMPTY(&jit->callers)) {
JaegerSpew(JSpew_Recompile, "Purging IC caller\n");
@ -571,45 +432,6 @@ Recompiler::cleanup(JITScript *jit, Vector<CallSite> *sites)
repatch.relink(ic->funJump, ic->slowPathStart);
ic->purgeGuardedObject();
}
CallSite *callSites_ = jit->callSites();
for (uint32 i = 0; i < jit->nCallSites; i++) {
CallSite &site = callSites_[i];
if (site.isTrap() && !sites->append(site))
return false;
}
return true;
}
bool
Recompiler::recompile(JSScript *script, bool isConstructing,
Vector<PatchableFrame> &frames,
Vector<PatchableAddress> &patches, Vector<CallSite> &sites,
Vector<PatchableNative> &natives)
{
JaegerSpew(JSpew_Recompile, "On stack recompilation, %u frames, %u patches, %u natives\n",
frames.length(), patches.length(), natives.length());
CompileStatus status = Compile_Retry;
while (status == Compile_Retry) {
Compiler cc(cx, script, isConstructing, &frames);
if (!cc.loadOldTraps(sites))
return false;
status = cc.compile();
}
if (status != Compile_Okay)
return false;
JITScript *jit = script->getJIT(isConstructing);
/* Perform the earlier scanned patches */
for (uint32 i = 0; i < patches.length(); i++)
applyPatch(jit, patches[i]);
for (uint32 i = 0; i < natives.length(); i++)
patchNative(jit, natives[i]);
return true;
}
} /* namespace mjit */

View File

@ -75,31 +75,18 @@ class AutoScriptRetrapper
};
/*
* This class is responsible for sanely re-JITing a script and fixing up
* the world. If you ever change the code associated with a JSScript, or
* otherwise would cause existing JITed code to be incorrect, you /must/ use
* this to invalidate and potentially re-compile the existing JITed code,
* fixing up the stack in the process.
* This class is responsible for sanely destroying a JITed script while frames
* for it are still on the stack, removing all references in the world to it
* and patching up those existing frames to go into the interpreter. If you
* ever change the code associated with a JSScript, or otherwise would cause
* existing JITed code to be incorrect, you /must/ use this to invalidate the
* JITed code, fixing up the stack in the process.
*/
class Recompiler {
struct PatchableAddress {
void **location;
CallSite callSite;
};
struct PatchableNative {
jsbytecode *pc;
JSObject *guardedNative;
JSC::ExecutablePool *pool;
JSC::CodeLocationLabel nativeStart;
JSC::CodeLocationJump nativeFunGuard;
JSC::CodeLocationJump nativeJump;
};
public:
Recompiler(JSContext *cx, JSScript *script);
bool recompile();
void recompile();
static void
expandInlineFrames(JSContext *cx, StackFrame *fp, mjit::CallSite *inlined,
@ -109,22 +96,15 @@ private:
JSContext *cx;
JSScript *script;
static PatchableAddress findPatch(JITScript *jit, void **location);
static void * findRejoin(JITScript *jit, const CallSite &callSite);
static void applyPatch(JITScript *jit, PatchableAddress& toPatch);
PatchableNative stealNative(JITScript *jit, jsbytecode *pc);
void patchNative(JITScript *jit, PatchableNative &native);
bool recompile(JSScript *script, bool isConstructing,
Vector<PatchableFrame> &frames,
Vector<PatchableAddress> &patches, Vector<CallSite> &sites,
Vector<PatchableNative> &natives);
static void patchCall(JITScript *jit, StackFrame *fp, void **location);
static void patchNative(JSContext *cx, JITScript *jit, StackFrame *fp, jsbytecode *pc,
RejoinState rejoin);
static StackFrame *
expandInlineFrameChain(JSContext *cx, StackFrame *outer, InlineFrame *inner);
/* Detach jit from any IC callers and save any traps to sites. */
bool cleanup(JITScript *jit, Vector<CallSite> *sites);
/* Detach jit from any IC callers. */
static void cleanup(JITScript *jit);
};
} /* namespace mjit */

View File

@ -128,8 +128,7 @@ stubs::SetName(VMFrame &f, JSAtom *origAtom)
if (!obj)
THROW();
if (!cx->typeMonitorAssign(obj, ATOM_TO_JSID(origAtom), rval))
THROW();
cx->typeMonitorAssign(obj, ATOM_TO_JSID(origAtom), rval);
do {
PropertyCache *cache = &JS_PROPERTY_CACHE(cx);
@ -282,11 +281,10 @@ stubs::SetPropNoCache(VMFrame &f, JSAtom *atom)
{
JSObject *obj = ValueToObject(f.cx, &f.regs.sp[-2]);
if (!obj)
THROW();
THROW();
Value rval = f.regs.sp[-1];
if (!f.cx->typeMonitorAssign(obj, ATOM_TO_JSID(atom), rval))
THROW();
f.cx->typeMonitorAssign(obj, ATOM_TO_JSID(atom), rval);
if (!obj->setProperty(f.cx, ATOM_TO_JSID(atom), &f.regs.sp[-1], strict))
THROW();
@ -309,8 +307,7 @@ stubs::SetGlobalNameNoCache(VMFrame &f, JSAtom *atom)
THROW();
jsid id = ATOM_TO_JSID(atom);
if (!cx->typeMonitorAssign(obj, id, rval))
THROW();
cx->typeMonitorAssign(obj, id, rval);
if (!obj->setProperty(cx, id, &rval, strict))
THROW();
@ -379,8 +376,7 @@ NameOp(VMFrame &f, JSObject *obj, bool markresult, bool callname)
if (op2 == JSOP_TYPEOF) {
f.regs.sp++;
f.regs.sp[-1].setUndefined();
if (!f.script()->typeMonitorUndefined(cx, f.pc()))
return NULL;
f.script()->typeMonitorUndefined(cx, f.pc());
return obj;
}
ReportAtomNotDefined(cx, atom);
@ -405,20 +401,15 @@ NameOp(VMFrame &f, JSObject *obj, bool markresult, bool callname)
* In the latter case update the property's types themselves,
* to capture the type effect on the intermediate value.
*/
if (JSOp(*f.pc()) == JSOP_GETGNAME || JSOp(*f.pc()) == JSOP_CALLGNAME) {
if (!f.script()->typeMonitorUndefined(cx, f.pc()))
return NULL;
} else {
if (!cx->addTypePropertyId(obj->getType(), id, types::TYPE_UNDEFINED))
return NULL;
}
if (JSOp(*f.pc()) == JSOP_GETGNAME || JSOp(*f.pc()) == JSOP_CALLGNAME)
f.script()->typeMonitorUndefined(cx, f.pc());
else
cx->addTypePropertyId(obj->getType(), id, types::TYPE_UNDEFINED);
}
}
if (markresult) {
if (!f.script()->typeMonitorResult(cx, f.pc(), rval))
return NULL;
}
if (markresult)
f.script()->typeMonitorResult(cx, f.pc(), rval);
*f.regs.sp++ = rval;
@ -512,16 +503,14 @@ stubs::GetElem(VMFrame &f)
THROW();
copyFrom = &rval;
if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.pc()))
THROW();
if (!JSID_IS_INT(id))
f.script()->typeMonitorUnknown(cx, f.pc());
end_getelem:
f.regs.sp[-2] = *copyFrom;
if (copyFrom->isUndefined()) {
if (!f.script()->typeMonitorUndefined(cx, f.pc()))
THROW();
}
if (copyFrom->isUndefined())
f.script()->typeMonitorUndefined(cx, f.pc());
}
static inline bool
@ -567,10 +556,8 @@ stubs::CallElem(VMFrame &f)
{
regs.sp[-1] = thisv;
}
if ((regs.sp[-2].isUndefined() || !JSID_IS_INT(id)) &&
!f.script()->typeMonitorUnknown(cx, f.pc())) {
THROW();
}
if (regs.sp[-2].isUndefined() || !JSID_IS_INT(id))
f.script()->typeMonitorUnknown(cx, f.pc());
}
template<JSBool strict>
@ -594,8 +581,7 @@ stubs::SetElem(VMFrame &f)
if (!FetchElementId(f, obj, idval, id, &regs.sp[-2]))
THROW();
if (!cx->typeMonitorAssign(obj, id, rval))
THROW();
cx->typeMonitorAssign(obj, id, rval);
do {
if (obj->isDenseArray() && JSID_IS_INT(id)) {
@ -740,8 +726,8 @@ stubs::Ursh(VMFrame &f)
u >>= (j & 31);
if (!f.regs.sp[-2].setNumber(uint32(u)) && !f.script()->typeMonitorOverflow(f.cx, f.pc()))
THROW();
if (!f.regs.sp[-2].setNumber(uint32(u)))
f.script()->typeMonitorOverflow(f.cx, f.pc());
}
template<JSBool strict>
@ -815,8 +801,7 @@ stubs::DefFun(VMFrame &f, JSFunction *fun)
Value rval = ObjectValue(*obj);
if (!cx->typeMonitorAssign(parent, id, rval))
THROW();
cx->typeMonitorAssign(parent, id, rval);
do {
/* Steps 5d, 5f. */
@ -1065,14 +1050,13 @@ DefaultValue(VMFrame &f, JSType hint, Value &v, int n)
return true;
}
static inline bool
static inline void
MonitorArithmeticOverflow(VMFrame &f, const Value &v)
{
JSContext *cx = f.cx;
JS_ASSERT(v.isDouble());
if (!f.script()->typeMonitorOverflow(cx, f.pc()))
return false;
f.script()->typeMonitorOverflow(cx, f.pc());
/*
* Monitoring the overflow is not enough for fused INC operations on NAME/PROP,
@ -1094,16 +1078,16 @@ MonitorArithmeticOverflow(VMFrame &f, const Value &v)
break;
default:
return true;
return;
}
JSObject *obj = ValueToObject(cx, &ov);
if (!obj)
return true;
return;
JSAtom *atom;
GET_ATOM_FROM_BYTECODE(f.script(), f.pc(), 0, atom);
return cx->addTypePropertyId(obj->getType(), ATOM_TO_JSID(atom), TYPE_DOUBLE);
cx->addTypePropertyId(obj->getType(), ATOM_TO_JSID(atom), TYPE_DOUBLE);
}
void JS_FASTCALL
@ -1131,8 +1115,7 @@ stubs::Add(VMFrame &f)
THROW();
regs.sp--;
regs.sp[-1] = rval;
if (!f.script()->typeMonitorUnknown(cx, f.pc()))
THROW();
f.script()->typeMonitorUnknown(cx, f.pc());
} else
#endif
{
@ -1159,8 +1142,8 @@ stubs::Add(VMFrame &f)
THROW();
regs.sp[-1].setString(rstr);
}
if ((lIsObject || rIsObject) && !f.script()->typeMonitorString(cx, f.pc()))
THROW();
if (lIsObject || rIsObject)
f.script()->typeMonitorString(cx, f.pc());
goto string_concat;
} else {
@ -1169,9 +1152,9 @@ stubs::Add(VMFrame &f)
THROW();
l += r;
if (!regs.sp[-2].setNumber(l) &&
(lIsObject || rIsObject || (!lval.isDouble() && !rval.isDouble())) &&
!MonitorArithmeticOverflow(f, regs.sp[-2]))
THROW();
(lIsObject || rIsObject || (!lval.isDouble() && !rval.isDouble()))) {
MonitorArithmeticOverflow(f, regs.sp[-2]);
}
}
}
return;
@ -1196,8 +1179,8 @@ stubs::Sub(VMFrame &f)
THROW();
}
double d = d1 - d2;
if (!regs.sp[-2].setNumber(d) && !MonitorArithmeticOverflow(f, regs.sp[-2]))
THROW();
if (!regs.sp[-2].setNumber(d))
MonitorArithmeticOverflow(f, regs.sp[-2]);
}
void JS_FASTCALL
@ -1211,8 +1194,8 @@ stubs::Mul(VMFrame &f)
THROW();
}
double d = d1 * d2;
if (!regs.sp[-2].setNumber(d) && !f.script()->typeMonitorOverflow(cx, f.pc()))
THROW();
if (!regs.sp[-2].setNumber(d))
f.script()->typeMonitorOverflow(cx, f.pc());
}
void JS_FASTCALL
@ -1242,12 +1225,11 @@ stubs::Div(VMFrame &f)
else
vp = &rt->positiveInfinityValue;
regs.sp[-2] = *vp;
if (!f.script()->typeMonitorOverflow(cx, f.pc()))
THROW();
f.script()->typeMonitorOverflow(cx, f.pc());
} else {
d1 /= d2;
if (!regs.sp[-2].setNumber(d1) && !f.script()->typeMonitorOverflow(cx, f.pc()))
THROW();
if (!regs.sp[-2].setNumber(d1))
f.script()->typeMonitorOverflow(cx, f.pc());
}
}
@ -1276,8 +1258,7 @@ stubs::Mod(VMFrame &f)
d1 = js_fmod(d1, d2);
regs.sp[-2].setDouble(d1);
}
if (!f.script()->typeMonitorOverflow(cx, f.pc()))
THROW();
f.script()->typeMonitorOverflow(cx, f.pc());
}
}
@ -1321,8 +1302,7 @@ stubs::RecompileForInline(VMFrame &f)
{
ExpandInlineFrames(f.cx, true);
Recompiler recompiler(f.cx, f.script());
if (!recompiler.recompile())
THROW();
recompiler.recompile();
}
void JS_FASTCALL
@ -1379,8 +1359,7 @@ stubs::This(VMFrame &f)
*/
if (f.regs.inlined()) {
JSFunction *fun = f.jit()->inlineFrames()[f.regs.inlined()->inlineIndex].fun;
if (!f.cx->markTypeFunctionUninlineable(fun->getType()))
THROW();
f.cx->markTypeFunctionUninlineable(fun->getType());
}
if (!ComputeThis(f.cx, f.fp()))
@ -1395,8 +1374,8 @@ stubs::Neg(VMFrame &f)
if (!ValueToNumber(f.cx, f.regs.sp[-1], &d))
THROW();
d = -d;
if (!f.regs.sp[-1].setNumber(d) && !f.script()->typeMonitorOverflow(f.cx, f.pc()))
THROW();
if (!f.regs.sp[-1].setNumber(d))
f.script()->typeMonitorOverflow(f.cx, f.pc());
}
JSObject * JS_FASTCALL
@ -1471,8 +1450,7 @@ stubs::InitElem(VMFrame &f, uint32 last)
if (last && !js_SetLengthProperty(cx, obj, (jsuint) (JSID_TO_INT(id) + 1)))
THROW();
} else {
if (!cx->typeMonitorAssign(obj, id, rref))
THROW();
cx->typeMonitorAssign(obj, id, rref);
if (!obj->defineProperty(cx, id, rref, NULL, NULL, JSPROP_ENUMERATE))
THROW();
}
@ -1487,8 +1465,8 @@ stubs::GetUpvar(VMFrame &f, uint32 ck)
cookie.fromInteger(ck);
f.regs.sp[0] = GetUpvar(f.cx, staticLevel, cookie);
if (f.regs.sp[0].isUndefined() && !f.script()->typeMonitorUndefined(f.cx, f.pc()))
THROW();
if (f.regs.sp[0].isUndefined())
f.script()->typeMonitorUndefined(f.cx, f.pc());
}
JSObject * JS_FASTCALL
@ -1707,10 +1685,8 @@ ObjIncOp(VMFrame &f, JSObject *obj, jsid id)
}
v.setNumber(d);
if (!f.script()->typeMonitorOverflow(cx, f.pc()))
return false;
if (!cx->typeMonitorAssign(obj, id, v))
return false;
f.script()->typeMonitorOverflow(cx, f.pc());
cx->typeMonitorAssign(obj, id, v);
{
JSAutoResolveFlags rf(cx, setPropFlags);
@ -1834,8 +1810,8 @@ stubs::ElemInc(VMFrame &f)
THROW();
f.regs.sp[-3] = f.regs.sp[-1];
if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(f.cx, f.pc()))
THROW();
if (!JSID_IS_INT(id))
f.script()->typeMonitorUnknown(f.cx, f.pc());
}
template void JS_FASTCALL stubs::ElemInc<true>(VMFrame &f);
@ -1855,8 +1831,8 @@ stubs::ElemDec(VMFrame &f)
THROW();
f.regs.sp[-3] = f.regs.sp[-1];
if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(f.cx, f.pc()))
THROW();
if (!JSID_IS_INT(id))
f.script()->typeMonitorUnknown(f.cx, f.pc());
}
template void JS_FASTCALL stubs::ElemDec<true>(VMFrame &f);
@ -1876,8 +1852,8 @@ stubs::IncElem(VMFrame &f)
THROW();
f.regs.sp[-3] = f.regs.sp[-1];
if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(f.cx, f.pc()))
THROW();
if (!JSID_IS_INT(id))
f.script()->typeMonitorUnknown(f.cx, f.pc());
}
template void JS_FASTCALL stubs::IncElem<true>(VMFrame &f);
@ -1897,8 +1873,8 @@ stubs::DecElem(VMFrame &f)
THROW();
f.regs.sp[-3] = f.regs.sp[-1];
if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(f.cx, f.pc()))
THROW();
if (!JSID_IS_INT(id))
f.script()->typeMonitorUnknown(f.cx, f.pc());
}
template void JS_FASTCALL stubs::DecElem<true>(VMFrame &f);
@ -2075,8 +2051,8 @@ stubs::GetProp(VMFrame &f)
}
} while(0);
if (rval.isUndefined() && !f.script()->typeMonitorUndefined(cx, f.pc()))
THROW();
if (rval.isUndefined())
f.script()->typeMonitorUndefined(cx, f.pc());
regs.sp[-1] = rval;
}
@ -2188,8 +2164,8 @@ stubs::CallProp(VMFrame &f, JSAtom *origAtom)
THROW();
}
#endif
if (rval.isUndefined() && !f.script()->typeMonitorUndefined(cx, f.pc()))
THROW();
if (rval.isUndefined())
f.script()->typeMonitorUndefined(cx, f.pc());
}
void JS_FASTCALL
@ -2216,8 +2192,7 @@ InitPropOrMethod(VMFrame &f, JSAtom *atom, JSOp op)
JSObject *obj = &regs.sp[-2].toObject();
JS_ASSERT(obj->isNative());
if (!cx->typeMonitorAssign(obj, ATOM_TO_JSID(atom), rval))
THROW();
cx->typeMonitorAssign(obj, ATOM_TO_JSID(atom), rval);
/*
* Probe the property cache.
@ -2644,8 +2619,8 @@ stubs::Pos(VMFrame &f)
{
if (!ValueToNumber(f.cx, &f.regs.sp[-1]))
THROW();
if (!f.regs.sp[-1].isInt32() && !f.script()->typeMonitorOverflow(f.cx, f.pc()))
THROW();
if (!f.regs.sp[-1].isInt32())
f.script()->typeMonitorOverflow(f.cx, f.pc());
}
void JS_FASTCALL
@ -2771,9 +2746,7 @@ stubs::SetConst(VMFrame &f, JSAtom *atom)
JSObject *obj = &cx->stack.currentVarObj();
const Value &ref = f.regs.sp[-1];
if (!cx->typeMonitorAssign(obj, ATOM_TO_JSID(atom), ref))
THROW();
cx->typeMonitorAssign(obj, ATOM_TO_JSID(atom), ref);
if (!obj->defineProperty(cx, ATOM_TO_JSID(atom), ref,
PropertyStub, StrictPropertyStub,
JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_READONLY)) {
@ -2811,16 +2784,14 @@ template void JS_FASTCALL stubs::DelElem<false>(VMFrame &f);
void JS_FASTCALL
stubs::UndefinedHelper(VMFrame &f)
{
if (!f.script()->typeMonitorUndefined(f.cx, f.pc()))
THROW();
f.script()->typeMonitorUndefined(f.cx, f.pc());
f.regs.sp[-1].setUndefined();
}
void JS_FASTCALL
stubs::NegZeroHelper(VMFrame &f)
{
if (!f.script()->typeMonitorOverflow(f.cx, f.pc()))
THROW();
f.script()->typeMonitorOverflow(f.cx, f.pc());
f.regs.sp[-1].setDouble(-0.0);
}
@ -2855,15 +2826,10 @@ stubs::CheckArgumentTypes(VMFrame &f)
/* Postpone recompilations until all args have been updated. */
types::AutoEnterTypeInference enter(f.cx);
if (!f.fp()->isConstructing()) {
if (!script->typeSetThis(f.cx, fp->thisValue()))
THROW();
}
for (unsigned i = 0; i < fun->nargs; i++) {
if (!script->typeSetArgument(f.cx, i, fp->formalArg(i)))
THROW();
}
if (!f.fp()->isConstructing())
script->typeSetThis(f.cx, fp->thisValue());
for (unsigned i = 0; i < fun->nargs; i++)
script->typeSetArgument(f.cx, i, fp->formalArg(i));
}
if (monitor.recompiled())
@ -2921,8 +2887,7 @@ stubs::InvariantFailure(VMFrame &f, void *rval)
ExpandInlineFrames(f.cx, true);
Recompiler recompiler(f.cx, script);
if (!recompiler.recompile())
THROWV(NULL);
recompiler.recompile();
/* Return the same value (if any) as the call triggering the invariant failure. */
return rval;

View File

@ -256,5 +256,8 @@ inline FuncPtr FunctionTemplateConditional(bool cond, FuncPtr a, FuncPtr b) {
extern "C" void *
js_InternalThrow(js::VMFrame &f);
extern "C" void *
js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VMFrame &f);
#endif /* jslogic_h__ */

View File

@ -170,13 +170,13 @@ typedef JSC::MacroAssembler::Imm32 Imm32;
typedef JSC::MacroAssembler::DataLabelPtr DataLabelPtr;
JSC::MacroAssembler::Call
StubCompiler::emitStubCall(void *ptr, bool needsRejoin)
StubCompiler::emitStubCall(void *ptr, RejoinState rejoin)
{
return emitStubCall(ptr, needsRejoin, frame.totalDepth());
return emitStubCall(ptr, rejoin, frame.totalDepth());
}
JSC::MacroAssembler::Call
StubCompiler::emitStubCall(void *ptr, bool needsRejoin, int32 slots)
StubCompiler::emitStubCall(void *ptr, RejoinState rejoin, int32 slots)
{
JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
DataLabelPtr inlinePatch;
@ -187,7 +187,7 @@ StubCompiler::emitStubCall(void *ptr, bool needsRejoin, int32 slots)
/* Add the call site for debugging and recompilation. */
Compiler::InternalCallSite site(masm.callReturnOffset(cl),
cc.inlineIndex(), cc.inlinePC(),
(size_t)ptr, true, needsRejoin);
rejoin, true);
site.inlinePatch = inlinePatch;
/* Add a hook for restoring loop invariants if necessary. */
@ -198,7 +198,7 @@ StubCompiler::emitStubCall(void *ptr, bool needsRejoin, int32 slots)
/* MissedBoundsCheck* are not actually called, so f.regs need to be written before InvariantFailure. */
bool entry = (ptr == JS_FUNC_TO_DATA_PTR(void *, stubs::MissedBoundsCheckEntry))
|| (ptr == JS_FUNC_TO_DATA_PTR(void *, stubs::MissedBoundsCheckHead));
cc.loop->addInvariantCall(j, l, true, entry, cc.callSites.length(), true);
cc.loop->addInvariantCall(j, l, true, entry, cc.callSites.length());
}
cc.addCallSite(site);

View File

@ -137,8 +137,8 @@ class StubCompiler
bool jumpInScript(Jump j, jsbytecode *target);
unsigned crossJump(Jump j, Label l);
Call emitStubCall(void *ptr, bool needsRejoin);
Call emitStubCall(void *ptr, bool needsRejoin, int32 slots);
Call emitStubCall(void *ptr, RejoinState rejoin);
Call emitStubCall(void *ptr, RejoinState rejoin, int32 slots);
void patchJoin(unsigned i, bool script, Assembler::Address address, AnyRegisterID reg);
};

View File

@ -831,21 +831,33 @@ struct LimitCheck
{
StackFrame *base;
Value **limit;
void *topncode;
LimitCheck(StackFrame *base, Value **limit) : base(base), limit(limit) {}
LimitCheck(StackFrame *base, Value **limit, void *topncode)
: base(base), limit(limit), topncode(topncode)
{}
JS_ALWAYS_INLINE bool
operator()(JSContext *cx, StackSpace &space, Value *from, uintN nvals)
{
/*
* Include an extra sizeof(StackFrame) to satisfy the method-jit
* stackLimit invariant.
* Include extra space for a new stack frame, inlined frames and loop
* temporaries to satisfy the method-jit stackLimit invariant.
*/
nvals += VALUES_PER_STACK_FRAME;
nvals += StackSpace::STACK_EXTRA + VALUES_PER_STACK_FRAME;
JS_ASSERT(from < *limit);
if (*limit - from >= ptrdiff_t(nvals))
return true;
if (topncode) {
/*
* The current regs.pc may not be intact, set it in case bumping
* the limit fails.
*/
cx->regs().pc = mjit::NativeToPC(cx->fp()->jit(), topncode);
}
return space.bumpLimitWithinQuota(cx, base, from, nvals, limit);
}
};
@ -907,12 +919,12 @@ ContextStack::getInlineFrame(JSContext *cx, Value *sp, uintN nactual,
JS_ALWAYS_INLINE StackFrame *
ContextStack::getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual,
JSFunction *fun, JSScript *script, uint32 *flags,
StackFrame *fp, Value **limit) const
StackFrame *fp, Value **limit, void *topncode) const
{
JS_ASSERT(isCurrentAndActive());
JS_ASSERT(cx->regs().sp == sp);
return getCallFrame(cx, sp, nactual, fun, script, flags, detail::LimitCheck(fp, limit));
return getCallFrame(cx, sp, nactual, fun, script, flags, detail::LimitCheck(fp, limit, topncode));
}
JS_ALWAYS_INLINE void

View File

@ -58,7 +58,11 @@ class ExecuteFrameGuard;
class DummyFrameGuard;
class GeneratorFrameGuard;
namespace mjit { struct JITScript; struct CallSite; }
namespace mjit {
struct JITScript;
struct CallSite;
jsbytecode *NativeToPC(JITScript *jit, void *ncode);
}
namespace detail { struct OOMCheck; }
#ifdef JS_METHODJIT
@ -67,6 +71,8 @@ typedef js::mjit::CallSite JSInlinedSite;
struct JSInlinedSite {};
#endif
typedef /* js::mjit::RejoinState */ size_t JSRejoinState;
/*
* VM stack layout
*
@ -280,10 +286,8 @@ class StackFrame
jsbytecode *imacropc_; /* pc of macro caller */
void *hookData_; /* closure returned by call hook */
void *annotation_; /* perhaps remove with bug 546848 */
#if JS_BITS_PER_WORD == 32
void *padding;
#endif
JSRejoinState rejoin_; /* If rejoining into the interpreter
* from JIT code, state at rejoin. */
static void staticAsserts() {
JS_STATIC_ASSERT(offsetof(StackFrame, rval_) % sizeof(js::Value) == 0);
@ -770,6 +774,16 @@ class StackFrame
annotation_ = annot;
}
/* JIT rejoin state */
JSRejoinState rejoin() const {
return rejoin_;
}
void setRejoin(JSRejoinState state) {
rejoin_ = state;
}
/* Debugger hook data */
bool hasHookData() const {
@ -1039,6 +1053,11 @@ class FrameRegs
inlined_ = NULL;
}
/* For InternalInterpret: */
void restorePartialFrame(Value *newfp) {
fp_ = (StackFrame *) newfp;
}
/* For stubs::CompileFunction, ContextStack: */
void prepareToRun(StackFrame *fp, JSScript *script) {
pc = script->code;
@ -1371,7 +1390,7 @@ class ContextStack
inline StackFrame *
getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual,
JSFunction *fun, JSScript *script, uint32 *flags,
StackFrame *base, Value **limit) const;
StackFrame *base, Value **limit, void *topncode) const;
inline void pushInlineFrame(JSScript *script, StackFrame *fp, FrameRegs &regs);
inline void popInlineFrame();