Merge tracemonkey to mozilla-central.

This commit is contained in:
Robert Sayre 2009-04-01 08:12:53 -07:00
commit 14165ff453
21 changed files with 423 additions and 329 deletions

View File

@ -3237,6 +3237,8 @@ LookupResult(JSContext *cx, JSObject *obj, JSObject *obj2, JSProperty *prop)
rval = SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(obj2))
? LOCKED_OBJ_GET_SLOT(obj2, sprop->slot)
: JSVAL_TRUE;
} else if (OBJ_IS_DENSE_ARRAY(cx, obj2)) {
rval = js_GetDenseArrayElementValue(obj2, prop);
} else {
/* XXX bad API: no way to return "defined but value unknown" */
rval = JSVAL_TRUE;

View File

@ -1399,51 +1399,6 @@ struct JSExtendedClass {
#define JSCLASS_NO_OPTIONAL_MEMBERS 0,0,0,0,0,0,0,0
#define JSCLASS_NO_RESERVED_MEMBERS 0,0,0
/* For detailed comments on these function pointer types, see jspubtd.h. */
struct JSObjectOps {
/* Mandatory non-null function pointer members. */
JSNewObjectMapOp newObjectMap;
JSObjectMapOp destroyObjectMap;
JSLookupPropOp lookupProperty;
JSDefinePropOp defineProperty;
JSPropertyIdOp getProperty;
JSPropertyIdOp setProperty;
JSAttributesOp getAttributes;
JSAttributesOp setAttributes;
JSPropertyIdOp deleteProperty;
JSConvertOp defaultValue;
JSNewEnumerateOp enumerate;
JSCheckAccessIdOp checkAccess;
/* Optionally non-null members start here. */
JSObjectOp thisObject;
JSPropertyRefOp dropProperty;
JSNative call;
JSNative construct;
JSXDRObjectOp xdrObject;
JSHasInstanceOp hasInstance;
JSSetObjectSlotOp setProto;
JSSetObjectSlotOp setParent;
JSTraceOp trace;
JSFinalizeOp clear;
JSGetRequiredSlotOp getRequiredSlot;
JSSetRequiredSlotOp setRequiredSlot;
};
/*
* Classes that expose JSObjectOps via a non-null getObjectOps class hook may
* derive a property structure from this struct, return a pointer to it from
* lookupProperty and defineProperty, and use the pointer to avoid rehashing
* in getAttributes and setAttributes.
*
* The jsid type contains either an int jsval (see JSVAL_IS_INT above), or an
* internal pointer that is opaque to users of this API, but which users may
* convert from and to a jsval using JS_ValueToId and JS_IdToValue.
*/
struct JSProperty {
jsid id;
};
struct JSIdArray {
jsint length;
jsid vector[1]; /* actually, length jsid words */

View File

@ -739,6 +739,23 @@ array_dropProperty(JSContext *cx, JSObject *obj, JSProperty *prop)
#endif
}
jsval
js_GetDenseArrayElementValue(JSObject *obj, JSProperty *prop)
{
/* OBJ_IS_DENSE_ARRAY does not use the cx argument. */
JS_ASSERT(OBJ_IS_DENSE_ARRAY(cx, obj));
JS_ASSERT((void *) prop ==
(void *) &(obj->fslots[JSSLOT_ARRAY_LOOKUP_HOLDER]));
JS_ASSERT((jsval) prop->id == obj->fslots[JSSLOT_ARRAY_LOOKUP_HOLDER]);
JS_ASSERT(JSVAL_IS_INT(prop->id));
jsint i = JSID_TO_INT(prop->id);
JS_ASSERT(i >= 0);
jsval v = obj->dslots[i];
JS_ASSERT(v != JSVAL_HOLE);
return v;
}
static JSBool
array_getProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
{
@ -1205,10 +1222,9 @@ JSObjectOps js_ArrayObjectOps = {
array_enumerate, js_CheckAccess,
NULL, array_dropProperty,
NULL, NULL,
NULL, js_HasInstance,
js_SetProtoOrParent, js_SetProtoOrParent,
array_trace, NULL,
NULL, NULL
js_HasInstance, array_trace,
NULL, NULL,
NULL
};
static JSObjectOps *

View File

@ -223,6 +223,12 @@ js_ArrayToJSDoubleBuffer(JSContext *cx, JSObject *obj, jsuint offset, jsuint cou
JSBool
js_PrototypeHasIndexedProperties(JSContext *cx, JSObject *obj);
/*
* Utility to access the value from the id returned by array_lookupProperty.
*/
jsval
js_GetDenseArrayElementValue(JSObject *obj, JSProperty *prop);
JS_END_EXTERN_C
#endif /* jsarray_h___ */

View File

@ -111,6 +111,7 @@ struct VMFragment;
#define MONITOR_N_GLOBAL_STATES 4
struct GlobalState {
JSObject* globalObj;
uint32 globalShape;
CLS(SlotList) globalSlots;
};

View File

@ -191,10 +191,9 @@ struct JSTreeContext { /* tree context for semantic checks */
#define TCF_FUN_USES_NONLOCALS 0x20 /* function refers to non-local names */
#define TCF_FUN_HEAVYWEIGHT 0x40 /* function needs Call object per call */
#define TCF_FUN_IS_GENERATOR 0x80 /* parsed yield statement in function */
#define TCF_HAS_DEFXMLNS 0x100 /* default xml namespace = ...; parsed */
#define TCF_HAS_FUNCTION_STMT 0x200 /* block contains a function statement */
#define TCF_GENEXP_LAMBDA 0x400 /* flag lambda from generator expression */
#define TCF_COMPILE_N_GO 0x800 /* compiler-and-go mode of script, can
#define TCF_HAS_FUNCTION_STMT 0x100 /* block contains a function statement */
#define TCF_GENEXP_LAMBDA 0x200 /* flag lambda from generator expression */
#define TCF_COMPILE_N_GO 0x400 /* compiler-and-go mode of script, can
optimize name references based on scope
chain */
/*

View File

@ -110,10 +110,9 @@ JS_FRIEND_DATA(JSObjectOps) js_ObjectOps = {
js_Enumerate, js_CheckAccess,
NULL, NATIVE_DROP_PROPERTY,
js_Call, js_Construct,
NULL, js_HasInstance,
js_SetProtoOrParent, js_SetProtoOrParent,
js_TraceObject, js_Clear,
js_GetRequiredSlot, js_SetRequiredSlot
js_HasInstance, js_TraceObject,
js_Clear, js_GetRequiredSlot,
js_SetRequiredSlot
};
JSClass js_ObjectClass = {
@ -2012,6 +2011,51 @@ js_Object(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
return JS_TRUE;
}
static inline bool
CreateMapForObject(JSContext* cx, JSObject* obj, JSObject* proto, JSObjectOps* ops)
{
JSObjectMap* map;
JSClass* protoclasp;
JSClass* clasp = OBJ_GET_CLASS(cx, obj);
/*
* Share proto's map only if it has the same JSObjectOps, and only if
* proto's class has the same private and reserved slots as obj's map
* and class have. We assume that if prototype and object are of the
* same class, they always have the same number of computed reserved
* slots (returned via clasp->reserveSlots); otherwise, prototype and
* object classes must have the same (null or not) reserveSlots hook.
*/
if (proto &&
((map = proto->map)->ops == ops &&
((protoclasp = OBJ_GET_CLASS(cx, proto)) == clasp ||
(!((protoclasp->flags ^ clasp->flags) &
(JSCLASS_HAS_PRIVATE |
(JSCLASS_RESERVED_SLOTS_MASK << JSCLASS_RESERVED_SLOTS_SHIFT))) &&
protoclasp->reserveSlots == clasp->reserveSlots))))
{
/* Share the given prototype's map. */
obj->map = js_HoldObjectMap(cx, map);
return true;
}
map = ops->newObjectMap(cx, 1, ops, clasp, obj);
if (!map)
return false;
obj->map = map;
/* Let ops->newObjectMap set freeslot so as to reserve slots. */
uint32 nslots = map->freeslot;
JS_ASSERT(nslots >= JSSLOT_PRIVATE);
if (nslots > JS_INITIAL_NSLOTS &&
!js_ReallocSlots(cx, obj, nslots, JS_TRUE)) {
js_DropObjectMap(cx, map, obj);
return false;
}
return true;
}
#ifdef JS_TRACER
static inline JSObject*
@ -2028,10 +2072,10 @@ NewNativeObject(JSContext* cx, JSObject* proto, JSObject *parent)
for (unsigned i = JSSLOT_PRIVATE; i < JS_INITIAL_NSLOTS; ++i)
obj->fslots[i] = JSVAL_VOID;
JS_ASSERT(!OBJ_GET_CLASS(cx, proto)->getObjectOps);
JS_ASSERT(proto->map->ops == &js_ObjectOps);
obj->map = js_HoldObjectMap(cx, proto->map);
if (!CreateMapForObject(cx, obj, proto, &js_ObjectOps))
return NULL;
obj->dslots = NULL;
return obj;
}
@ -2308,10 +2352,9 @@ JS_FRIEND_DATA(JSObjectOps) js_WithObjectOps = {
with_Enumerate, with_CheckAccess,
with_ThisObject, NATIVE_DROP_PROPERTY,
NULL, NULL,
NULL, NULL,
js_SetProtoOrParent, js_SetProtoOrParent,
js_TraceObject, js_Clear,
NULL, NULL
NULL, js_TraceObject,
js_Clear, NULL,
NULL
};
static JSObjectOps *
@ -3051,9 +3094,7 @@ js_NewObjectWithGivenProto(JSContext *cx, JSClass *clasp, JSObject *proto,
{
JSObject *obj;
JSObjectOps *ops;
JSObjectMap *map;
JSClass *protoclasp;
uint32 nslots, i;
uint32 i;
JSTempValueRooter tvr;
#ifdef INCLUDE_MOZILLA_DTRACE
@ -3128,40 +3169,8 @@ js_NewObjectWithGivenProto(JSContext *cx, JSClass *clasp, JSObject *proto,
if (proto && !parent)
STOBJ_SET_PARENT(obj, OBJ_GET_PARENT(cx, proto));
/*
* Share proto's map only if it has the same JSObjectOps, and only if
* proto's class has the same private and reserved slots as obj's map
* and class have. We assume that if prototype and object are of the
* same class, they always have the same number of computed reserved
* slots (returned via clasp->reserveSlots); otherwise, prototype and
* object classes must have the same (null or not) reserveSlots hook.
*/
if (proto &&
(map = proto->map)->ops == ops &&
((protoclasp = OBJ_GET_CLASS(cx, proto)) == clasp ||
(!((protoclasp->flags ^ clasp->flags) &
(JSCLASS_HAS_PRIVATE |
(JSCLASS_RESERVED_SLOTS_MASK << JSCLASS_RESERVED_SLOTS_SHIFT))) &&
protoclasp->reserveSlots == clasp->reserveSlots)))
{
/* Share the given prototype's map. */
obj->map = js_HoldObjectMap(cx, map);
} else {
map = ops->newObjectMap(cx, 1, ops, clasp, obj);
if (!map)
goto bad;
obj->map = map;
/* Let ops->newObjectMap set freeslot so as to reserve slots. */
nslots = map->freeslot;
JS_ASSERT(nslots >= JSSLOT_PRIVATE);
if (nslots > JS_INITIAL_NSLOTS &&
!js_ReallocSlots(cx, obj, nslots, JS_TRUE)) {
js_DropObjectMap(cx, map, obj);
obj->map = NULL;
goto bad;
}
}
if (!CreateMapForObject(cx, obj, proto, ops))
goto bad;
/*
* Do not call debug hooks on trace, because we might be in a non-_FAIL

View File

@ -54,6 +54,48 @@
JS_BEGIN_EXTERN_C
/* For detailed comments on these function pointer types, see jsprvtd.h. */
struct JSObjectOps {
/* Mandatory non-null function pointer members. */
JSNewObjectMapOp newObjectMap;
JSObjectMapOp destroyObjectMap;
JSLookupPropOp lookupProperty;
JSDefinePropOp defineProperty;
JSPropertyIdOp getProperty;
JSPropertyIdOp setProperty;
JSAttributesOp getAttributes;
JSAttributesOp setAttributes;
JSPropertyIdOp deleteProperty;
JSConvertOp defaultValue;
JSNewEnumerateOp enumerate;
JSCheckAccessIdOp checkAccess;
/* Optionally non-null members start here. */
JSObjectOp thisObject;
JSPropertyRefOp dropProperty;
JSNative call;
JSNative construct;
JSHasInstanceOp hasInstance;
JSTraceOp trace;
JSFinalizeOp clear;
JSGetRequiredSlotOp getRequiredSlot;
JSSetRequiredSlotOp setRequiredSlot;
};
/*
* Classes that expose JSObjectOps via a non-null getObjectOps class hook may
* derive a property structure from this struct, return a pointer to it from
* lookupProperty and defineProperty, and use the pointer to avoid rehashing
* in getAttributes and setAttributes.
*
* The jsid type contains either an int jsval (see JSVAL_IS_INT above), or an
* internal pointer that is opaque to users of this API, but which users may
* convert from and to a jsval using JS_ValueToId and JS_IdToValue.
*/
struct JSProperty {
jsid id;
};
struct JSObjectMap {
jsrefcount nrefs; /* count of all referencing objects */
JSObjectOps *ops; /* high level object operation vtable */

View File

@ -906,7 +906,7 @@ FunctionBody(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc)
}
}
tc->flags = oldflags | (tc->flags & (TCF_FUN_FLAGS | TCF_HAS_DEFXMLNS));
tc->flags = oldflags | (tc->flags & TCF_FUN_FLAGS);
return pn;
}
@ -1453,7 +1453,7 @@ FunctionDef(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc,
pn->pn_funpob = funpob;
pn->pn_op = op;
pn->pn_body = body;
pn->pn_flags = funtc.flags & (TCF_FUN_FLAGS | TCF_HAS_DEFXMLNS | TCF_COMPILE_N_GO);
pn->pn_flags = funtc.flags & (TCF_FUN_FLAGS | TCF_COMPILE_N_GO);
TREE_CONTEXT_FINISH(cx, &funtc);
return result;
}
@ -3505,7 +3505,6 @@ Statement(JSContext *cx, JSTokenStream *ts, JSTreeContext *tc)
pn->pn_op = JSOP_DEFXMLNS;
pn->pn_pos.end = pn2->pn_pos.end;
pn->pn_kid = pn2;
tc->flags |= TCF_HAS_DEFXMLNS;
break;
#endif

View File

@ -253,6 +253,128 @@ struct JSTempValueRooter {
JSTempValueUnion u;
};
/* JSObjectOps function pointer typedefs. */
/*
* Create a new subclass of JSObjectMap (see jsobj.h), with the nrefs and ops
* members initialized from the same-named parameters, and with the nslots and
* freeslot members initialized according to ops and clasp. Return null on
* error, non-null on success.
*
* JSObjectMaps are reference-counted by generic code in the engine. Usually,
* the nrefs parameter to JSObjectOps.newObjectMap will be 1, to count the ref
* returned to the caller on success. After a successful construction, some
* number of js_HoldObjectMap and js_DropObjectMap calls ensue. When nrefs
* reaches 0 due to a js_DropObjectMap call, JSObjectOps.destroyObjectMap will
* be called to dispose of the map.
*/
typedef JSObjectMap *
(* JSNewObjectMapOp)(JSContext *cx, jsrefcount nrefs, JSObjectOps *ops,
JSClass *clasp, JSObject *obj);
/*
* Generic type for an infallible JSObjectMap operation, used currently by
* JSObjectOps.destroyObjectMap.
*/
typedef void
(* JSObjectMapOp)(JSContext *cx, JSObjectMap *map);
/*
* Look for id in obj and its prototype chain, returning false on error or
* exception, true on success. On success, return null in *propp if id was
* not found. If id was found, return the first object searching from obj
* along its prototype chain in which id names a direct property in *objp, and
* return a non-null, opaque property pointer in *propp.
*
* If JSLookupPropOp succeeds and returns with *propp non-null, that pointer
* may be passed as the prop parameter to a JSAttributesOp, as a short-cut
* that bypasses id re-lookup. In any case, a non-null *propp result after a
* successful lookup must be dropped via JSObjectOps.dropProperty.
*
* NB: successful return with non-null *propp means the implementation may
* have locked *objp and added a reference count associated with *propp, so
* callers should not risk deadlock by nesting or interleaving other lookups
* or any obj-bearing ops before dropping *propp.
*/
typedef JSBool
(* JSLookupPropOp)(JSContext *cx, JSObject *obj, jsid id, JSObject **objp,
JSProperty **propp);
/*
* Define obj[id], a direct property of obj named id, having the given initial
* value, with the specified getter, setter, and attributes. If the propp out
* param is non-null, *propp on successful return contains an opaque property
* pointer usable as a speedup hint with JSAttributesOp. But note that propp
* may be null, indicating that the caller is not interested in recovering an
* opaque pointer to the newly-defined property.
*
* If propp is non-null and JSDefinePropOp succeeds, its caller must be sure
* to drop *propp using JSObjectOps.dropProperty in short order, just as with
* JSLookupPropOp.
*/
typedef JSBool
(* JSDefinePropOp)(JSContext *cx, JSObject *obj, jsid id, jsval value,
JSPropertyOp getter, JSPropertyOp setter, uintN attrs,
JSProperty **propp);
/*
* Get, set, or delete obj[id], returning false on error or exception, true
* on success. If getting or setting, the new value is returned in *vp on
* success. If deleting without error, *vp will be JSVAL_FALSE if obj[id] is
* permanent, and JSVAL_TRUE if id named a direct property of obj that was in
* fact deleted, or if id names no direct property of obj (id could name a
* prototype property, or no property in obj or its prototype chain).
*/
typedef JSBool
(* JSPropertyIdOp)(JSContext *cx, JSObject *obj, jsid id, jsval *vp);
/*
* Get or set attributes of the property obj[id]. Return false on error or
* exception, true with current attributes in *attrsp. If prop is non-null,
* it must come from the *propp out parameter of a prior JSDefinePropOp or
* JSLookupPropOp call.
*/
typedef JSBool
(* JSAttributesOp)(JSContext *cx, JSObject *obj, jsid id, JSProperty *prop,
uintN *attrsp);
/*
* JSObjectOps.checkAccess type: check whether obj[id] may be accessed per
* mode, returning false on error/exception, true on success with obj[id]'s
* last-got value in *vp, and its attributes in *attrsp.
*/
typedef JSBool
(* JSCheckAccessIdOp)(JSContext *cx, JSObject *obj, jsid id, JSAccessMode mode,
jsval *vp, uintN *attrsp);
/*
* A generic type for functions taking a context, object, and property, with
* no return value. Used by JSObjectOps.dropProperty currently (see above,
* JSDefinePropOp and JSLookupPropOp, for the object-locking protocol in which
* dropProperty participates).
*/
typedef void
(* JSPropertyRefOp)(JSContext *cx, JSObject *obj, JSProperty *prop);
/*
* Get and set a required slot, one that should already have been allocated.
* These operations are infallible, so required slots must be pre-allocated,
* or implementations must suppress out-of-memory errors. The native ops
* (js_ObjectOps, see jsobj.c) access slots reserved by including a call to
* the JSCLASS_HAS_RESERVED_SLOTS(n) macro in the JSClass.flags initializer.
*
* NB: the slot parameter is a zero-based index into obj slots, unlike the
* index parameter to the JS_GetReservedSlot and JS_SetReservedSlot API entry
* points, which is a zero-based index into the JSCLASS_RESERVED_SLOTS(clasp)
* reserved slots that come after the initial well-known slots: proto, parent,
* class, and optionally, the private data slot.
*/
typedef jsval
(* JSGetRequiredSlotOp)(JSContext *cx, JSObject *obj, uint32 slot);
typedef JSBool
(* JSSetRequiredSlotOp)(JSContext *cx, JSObject *obj, uint32 slot, jsval v);
/*
* The following determines whether JS_EncodeCharacters and JS_DecodeBytes
* treat char[] as utf-8 or simply as bytes that need to be inflated/deflated.

View File

@ -414,99 +414,10 @@ typedef void
typedef uint32
(* JSReserveSlotsOp)(JSContext *cx, JSObject *obj);
/* JSObjectOps function pointer typedefs. */
/* JSExtendedClass function pointer typedefs. */
/*
* Create a new subclass of JSObjectMap (see jsobj.h), with the nrefs and ops
* members initialized from the same-named parameters, and with the nslots and
* freeslot members initialized according to ops and clasp. Return null on
* error, non-null on success.
*
* JSObjectMaps are reference-counted by generic code in the engine. Usually,
* the nrefs parameter to JSObjectOps.newObjectMap will be 1, to count the ref
* returned to the caller on success. After a successful construction, some
* number of js_HoldObjectMap and js_DropObjectMap calls ensue. When nrefs
* reaches 0 due to a js_DropObjectMap call, JSObjectOps.destroyObjectMap will
* be called to dispose of the map.
*/
typedef JSObjectMap *
(* JSNewObjectMapOp)(JSContext *cx, jsrefcount nrefs, JSObjectOps *ops,
JSClass *clasp, JSObject *obj);
/*
* Generic type for an infallible JSObjectMap operation, used currently by
* JSObjectOps.destroyObjectMap.
*/
typedef void
(* JSObjectMapOp)(JSContext *cx, JSObjectMap *map);
/*
* Look for id in obj and its prototype chain, returning false on error or
* exception, true on success. On success, return null in *propp if id was
* not found. If id was found, return the first object searching from obj
* along its prototype chain in which id names a direct property in *objp, and
* return a non-null, opaque property pointer in *propp.
*
* If JSLookupPropOp succeeds and returns with *propp non-null, that pointer
* may be passed as the prop parameter to a JSAttributesOp, as a short-cut
* that bypasses id re-lookup. In any case, a non-null *propp result after a
* successful lookup must be dropped via JSObjectOps.dropProperty.
*
* NB: successful return with non-null *propp means the implementation may
* have locked *objp and added a reference count associated with *propp, so
* callers should not risk deadlock by nesting or interleaving other lookups
* or any obj-bearing ops before dropping *propp.
*/
typedef JSBool
(* JSLookupPropOp)(JSContext *cx, JSObject *obj, jsid id, JSObject **objp,
JSProperty **propp);
/*
* Define obj[id], a direct property of obj named id, having the given initial
* value, with the specified getter, setter, and attributes. If the propp out
* param is non-null, *propp on successful return contains an opaque property
* pointer usable as a speedup hint with JSAttributesOp. But note that propp
* may be null, indicating that the caller is not interested in recovering an
* opaque pointer to the newly-defined property.
*
* If propp is non-null and JSDefinePropOp succeeds, its caller must be sure
* to drop *propp using JSObjectOps.dropProperty in short order, just as with
* JSLookupPropOp.
*/
typedef JSBool
(* JSDefinePropOp)(JSContext *cx, JSObject *obj, jsid id, jsval value,
JSPropertyOp getter, JSPropertyOp setter, uintN attrs,
JSProperty **propp);
/*
* Get, set, or delete obj[id], returning false on error or exception, true
* on success. If getting or setting, the new value is returned in *vp on
* success. If deleting without error, *vp will be JSVAL_FALSE if obj[id] is
* permanent, and JSVAL_TRUE if id named a direct property of obj that was in
* fact deleted, or if id names no direct property of obj (id could name a
* prototype property, or no property in obj or its prototype chain).
*/
typedef JSBool
(* JSPropertyIdOp)(JSContext *cx, JSObject *obj, jsid id, jsval *vp);
/*
* Get or set attributes of the property obj[id]. Return false on error or
* exception, true with current attributes in *attrsp. If prop is non-null,
* it must come from the *propp out parameter of a prior JSDefinePropOp or
* JSLookupPropOp call.
*/
typedef JSBool
(* JSAttributesOp)(JSContext *cx, JSObject *obj, jsid id, JSProperty *prop,
uintN *attrsp);
/*
* JSObjectOps.checkAccess type: check whether obj[id] may be accessed per
* mode, returning false on error/exception, true on success with obj[id]'s
* last-got value in *vp, and its attributes in *attrsp.
*/
typedef JSBool
(* JSCheckAccessIdOp)(JSContext *cx, JSObject *obj, jsid id, JSAccessMode mode,
jsval *vp, uintN *attrsp);
(* JSEqualityOp)(JSContext *cx, JSObject *obj, jsval v, JSBool *bp);
/*
* A generic type for functions mapping an object to another object, or null
@ -523,59 +434,6 @@ typedef JSObject *
typedef JSObject *
(* JSIteratorOp)(JSContext *cx, JSObject *obj, JSBool keysonly);
/*
* A generic type for functions taking a context, object, and property, with
* no return value. Used by JSObjectOps.dropProperty currently (see above,
* JSDefinePropOp and JSLookupPropOp, for the object-locking protocol in which
* dropProperty participates).
*/
typedef void
(* JSPropertyRefOp)(JSContext *cx, JSObject *obj, JSProperty *prop);
/*
* Function pointer type for JSObjectOps.setProto and JSObjectOps.setParent.
* These hooks must check for cycles without deadlocking, and otherwise take
* special steps. See jsobj.c and jsgc.c for details.
*/
typedef JSBool
(* JSSetObjectSlotOp)(JSContext *cx, JSObject *obj, uint32 slot,
JSObject *pobj);
/*
* Get and set a required slot, one that should already have been allocated.
* These operations are infallible, so required slots must be pre-allocated,
* or implementations must suppress out-of-memory errors. The native ops
* (js_ObjectOps, see jsobj.c) access slots reserved by including a call to
* the JSCLASS_HAS_RESERVED_SLOTS(n) macro in the JSClass.flags initializer.
*
* NB: the slot parameter is a zero-based index into obj slots, unlike the
* index parameter to the JS_GetReservedSlot and JS_SetReservedSlot API entry
* points, which is a zero-based index into the JSCLASS_RESERVED_SLOTS(clasp)
* reserved slots that come after the initial well-known slots: proto, parent,
* class, and optionally, the private data slot.
*/
typedef jsval
(* JSGetRequiredSlotOp)(JSContext *cx, JSObject *obj, uint32 slot);
typedef JSBool
(* JSSetRequiredSlotOp)(JSContext *cx, JSObject *obj, uint32 slot, jsval v);
typedef JSObject *
(* JSGetMethodOp)(JSContext *cx, JSObject *obj, jsid id, jsval *vp);
typedef JSBool
(* JSSetMethodOp)(JSContext *cx, JSObject *obj, jsid id, jsval *vp);
typedef JSBool
(* JSEnumerateValuesOp)(JSContext *cx, JSObject *obj, JSIterateOp enum_op,
jsval *statep, jsid *idp, jsval *vp);
typedef JSBool
(* JSEqualityOp)(JSContext *cx, JSObject *obj, jsval v, JSBool *bp);
typedef JSBool
(* JSConcatenateOp)(JSContext *cx, JSObject *obj, jsval v, jsval *vp);
/* Typedef for native functions called by the JS VM. */
typedef JSBool

View File

@ -71,6 +71,9 @@
#include "jstracer.h"
using namespace avmplus;
using namespace nanojit;
/* Amount of memory in the RE fragmento before flushing. */
#define MAX_MEM_IN_RE_FRAGMENTO (1 << 20)
#endif
typedef enum REOp {
@ -2438,7 +2441,8 @@ class RegExpNativeCompiler {
#endif
return JS_TRUE;
fail:
if (lirbuf->outOMem() || oom) {
if (lirbuf->outOMem() || oom ||
js_OverfullFragmento(fragmento, MAX_MEM_IN_RE_FRAGMENTO)) {
fragmento->clearFrags();
lirbuf->rewind();
} else {

View File

@ -1129,15 +1129,30 @@ js_AddScopeProperty(JSContext *cx, JSScope *scope, jsid id,
JS_ASSERT(scope->table);
CHECK_ANCESTOR_LINE(scope, JS_TRUE);
JSBool conflicts = JS_FALSE;
/*
* Our forking heuristic tries to balance the desire to avoid
* over-compacting (over-forking) against the desire to
* *periodically* fork anyways, in order to prevent paying scan
* penalties on each insert indefinitely, on a lineage with only
* a few old middle-deletions. So we fork if either:
*
* - A quick scan finds a true conflict.
* - We are passing through a doubling-threshold in size and
* have accumulated a nonzero count of uncompacted deletions.
*/
bool conflicts = false;
uint32 count = 0;
uint32 threshold = JS_BIT(JS_CeilingLog2(scope->entryCount));
for (sprop = SCOPE_LAST_PROP(scope); sprop; sprop = sprop->parent) {
++count;
if (sprop->id == id) {
conflicts = JS_TRUE;
conflicts = true;
break;
}
}
if (conflicts) {
if (conflicts || count > threshold) {
/*
* Enumerate live entries in scope->table using a temporary
* vector, by walking the (possibly sparse, due to deletions)

View File

@ -132,6 +132,9 @@ static const char tagChar[] = "OIDISIBI";
(MAX_NATIVE_STACK_SLOTS * sizeof(jsval) + \
MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame))
/* Amount of memory in the main fragmento before flushing. */
#define MAX_MEM_IN_MAIN_FRAGMENTO (1 << 24)
/* Max number of branches per tree. */
#define MAX_BRANCHES 32
@ -236,7 +239,7 @@ static avmplus::AvmCore* core = &s_core;
#ifdef JS_JIT_SPEW
void
js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, uint32 globalShape);
js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape);
#endif
/* We really need a better way to configure the JIT. Shaver, where is my fancy JIT object? */
@ -480,51 +483,55 @@ js_Backoff(Fragment* tree, const jsbytecode* where)
}
static inline size_t
fragmentHash(const void *ip, uint32 globalShape)
fragmentHash(const void *ip, JSObject* globalObj, uint32 globalShape)
{
uintptr_t h = HASH_SEED;
hash_accum(h, uintptr_t(ip), FRAGMENT_TABLE_MASK);
hash_accum(h, uintptr_t(globalObj), FRAGMENT_TABLE_MASK);
hash_accum(h, uintptr_t(globalShape), FRAGMENT_TABLE_MASK);
return size_t(h);
}
struct VMFragment : public Fragment
{
VMFragment(const void* _ip, uint32 _globalShape) :
VMFragment(const void* _ip, JSObject* _globalObj, uint32 _globalShape) :
Fragment(_ip),
next(NULL),
globalObj(_globalObj),
globalShape(_globalShape)
{}
VMFragment* next;
JSObject* globalObj;
uint32 globalShape;
};
static VMFragment*
getVMFragment(JSTraceMonitor* tm, const void *ip, uint32 globalShape)
getVMFragment(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape)
{
size_t h = fragmentHash(ip, globalShape);
size_t h = fragmentHash(ip, globalObj, globalShape);
VMFragment* vf = tm->vmfragments[h];
while (vf &&
! (vf->globalShape == globalShape &&
! (vf->globalObj == globalObj &&
vf->globalShape == globalShape &&
vf->ip == ip)) {
vf = vf->next;
}
return vf;
}
static Fragment*
getLoop(JSTraceMonitor* tm, const void *ip, uint32 globalShape)
static VMFragment*
getLoop(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape)
{
return getVMFragment(tm, ip, globalShape);
return getVMFragment(tm, ip, globalObj, globalShape);
}
static Fragment*
getAnchor(JSTraceMonitor* tm, const void *ip, uint32 globalShape)
getAnchor(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape)
{
VMFragment *f = new (&gc) VMFragment(ip, globalShape);
VMFragment *f = new (&gc) VMFragment(ip, globalObj, globalShape);
JS_ASSERT(f);
Fragment *p = getVMFragment(tm, ip, globalShape);
Fragment *p = getVMFragment(tm, ip, globalObj, globalShape);
if (p) {
f->first = p;
@ -536,7 +543,7 @@ getAnchor(JSTraceMonitor* tm, const void *ip, uint32 globalShape)
} else {
/* this is the first fragment */
f->first = f;
size_t h = fragmentHash(ip, globalShape);
size_t h = fragmentHash(ip, globalObj, globalShape);
f->next = tm->vmfragments[h];
tm->vmfragments[h] = f;
}
@ -558,7 +565,7 @@ js_AttemptCompilation(JSTraceMonitor* tm, JSObject* globalObj, jsbytecode* pc)
/*
* Breath new live into all peer fragments at the designated loop header.
*/
Fragment* f = (VMFragment*)getLoop(tm, pc, OBJ_SHAPE(globalObj));
Fragment* f = (VMFragment*)getLoop(tm, pc, globalObj, OBJ_SHAPE(globalObj));
if (!f) {
/*
* If the global object's shape changed, we can't easily find the
@ -1260,7 +1267,6 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag
cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
globalObj_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, globalObj)), "globalObj");
/* If we came from exit, we might not have enough global types. */
if (ti->globalSlots->length() > ti->nGlobalTypes())
@ -2536,7 +2542,7 @@ TraceRecorder::closeLoop(JSTraceMonitor* tm, bool& demote)
LIns* exitIns;
Fragment* peer;
VMSideExit* exit;
Fragment* peer_root;
VMFragment* peer_root;
Fragmento* fragmento = tm->fragmento;
exitIns = snapshot(UNSTABLE_LOOP_EXIT);
@ -2551,7 +2557,8 @@ TraceRecorder::closeLoop(JSTraceMonitor* tm, bool& demote)
JS_ASSERT(exit->numStackSlots == treeInfo->nStackTypes);
peer_root = getLoop(traceMonitor, fragment->root->ip, treeInfo->globalShape);
VMFragment* root = (VMFragment*)fragment->root;
peer_root = getLoop(traceMonitor, root->ip, root->globalObj, root->globalShape);
JS_ASSERT(peer_root != NULL);
stable = deduceTypeStability(peer_root, &peer, demote);
@ -2628,7 +2635,7 @@ TraceRecorder::closeLoop(JSTraceMonitor* tm, bool& demote)
}
JS_REQUIRES_STACK void
TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, Fragment* peer_root)
TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, VMFragment* peer_root)
{
if (fragment->kind == LoopTrace) {
TreeInfo* ti;
@ -2703,7 +2710,8 @@ TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, Fragment* peer_root)
}
}
debug_only_v(js_DumpPeerStability(traceMonitor, peer_root->ip, treeInfo->globalShape);)
debug_only_v(js_DumpPeerStability(traceMonitor, peer_root->ip,
peer_root->globalObj, peer_root->globalShape);)
}
/* Emit an always-exit guard and compile the tree (used for break statements. */
@ -2725,7 +2733,8 @@ TraceRecorder::endLoop(JSTraceMonitor* tm)
if (tm->fragmento->assm()->error() != nanojit::None)
return;
joinEdgesToEntry(tm->fragmento, getLoop(tm, fragment->root->ip, treeInfo->globalShape));
VMFragment* root = (VMFragment*)fragment->root;
joinEdgesToEntry(tm->fragmento, getLoop(tm, root->ip, root->globalObj, root->globalShape));
/* Note: this must always be done, in case we added new globals on trace and haven't yet
propagated those to linked and dependent trees. */
@ -3003,7 +3012,8 @@ js_DeleteRecorder(JSContext* cx)
/*
* If we ran out of memory, flush the code cache.
*/
if (JS_TRACE_MONITOR(cx).fragmento->assm()->error() == OutOMem) {
if (JS_TRACE_MONITOR(cx).fragmento->assm()->error() == OutOMem
|| js_OverfullFragmento(tm->fragmento, MAX_MEM_IN_MAIN_FRAGMENTO)) {
js_FlushJITCache(cx);
return false;
}
@ -3026,12 +3036,14 @@ js_CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj
uint32 globalShape = OBJ_SHAPE(globalObj);
if (tm->recorder) {
VMFragment* root = (VMFragment*)tm->recorder->getFragment()->root;
TreeInfo* ti = tm->recorder->getTreeInfo();
/* Check the global shape matches the recorder's treeinfo's shape. */
if (globalShape != ti->globalShape) {
if (globalObj != root->globalObj || globalShape != root->globalShape) {
AUDIT(globalShapeMismatchAtEntry);
debug_only_v(printf("Global shape mismatch (%u vs. %u), flushing cache.\n",
globalShape, ti->globalShape);)
debug_only_v(printf("Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
(void*)globalObj, globalShape, (void*)root->globalObj,
root->globalShape);)
return false;
}
if (shape)
@ -3046,12 +3058,13 @@ js_CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj
GlobalState &state = tm->globalStates[i];
if (state.globalShape == (uint32) -1) {
state.globalObj = globalObj;
state.globalShape = globalShape;
JS_ASSERT(state.globalSlots);
JS_ASSERT(state.globalSlots->length() == 0);
}
if (tm->globalStates[i].globalShape == globalShape) {
if (state.globalObj == globalObj && state.globalShape == globalShape) {
if (shape)
*shape = globalShape;
if (slots)
@ -3296,12 +3309,11 @@ js_SynthesizeFrame(JSContext* cx, const FrameInfo& fi)
JS_REQUIRES_STACK bool
js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer,
uint32 globalShape, SlotList* globalSlots)
JSObject* globalObj, uint32 globalShape, SlotList* globalSlots)
{
JS_ASSERT(f->root == f);
/* Make sure the global type map didn't change on us. */
JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
if (!js_CheckGlobalObjectShape(cx, tm, globalObj)) {
js_FlushJITCache(cx);
return false;
@ -3313,7 +3325,7 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer,
while (f->code() && f->peer)
f = f->peer;
if (f->code())
f = getAnchor(&JS_TRACE_MONITOR(cx), f->root->ip, globalShape);
f = getAnchor(&JS_TRACE_MONITOR(cx), f->root->ip, globalObj, globalShape);
if (!f) {
js_FlushJITCache(cx);
@ -3323,7 +3335,8 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer,
f->root = f;
f->lirbuf = tm->lirbuf;
if (f->lirbuf->outOMem()) {
if (f->lirbuf->outOMem() ||
js_OverfullFragmento(tm->fragmento, MAX_MEM_IN_MAIN_FRAGMENTO)) {
js_FlushJITCache(cx);
debug_only_v(printf("Out of memory recording new tree, flushing cache.\n");)
return false;
@ -3332,7 +3345,7 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer,
JS_ASSERT(!f->code() && !f->vmprivate);
/* setup the VM-private treeInfo structure for this fragment */
TreeInfo* ti = new (&gc) TreeInfo(f, globalShape, globalSlots);
TreeInfo* ti = new (&gc) TreeInfo(f, globalSlots);
/* capture the coerced type of each active slot in the type map */
ti->typeMap.captureTypes(cx, *globalSlots, 0/*callDepth*/);
@ -3342,7 +3355,8 @@ js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer,
since we are trying to stabilize something without properly connecting peer edges. */
#ifdef DEBUG
TreeInfo* ti_other;
for (Fragment* peer = getLoop(tm, f->root->ip, globalShape); peer != NULL; peer = peer->peer) {
for (Fragment* peer = getLoop(tm, f->root->ip, globalObj, globalShape); peer != NULL;
peer = peer->peer) {
if (!peer->code() || peer == f)
continue;
ti_other = (TreeInfo*)peer->vmprivate;
@ -3388,7 +3402,7 @@ JS_REQUIRES_STACK static bool
js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, jsbytecode* outer)
{
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
Fragment* from = exit->from->root;
VMFragment* from = (VMFragment*)exit->from->root;
TreeInfo* from_ti = (TreeInfo*)from->vmprivate;
JS_ASSERT(exit->from->root->code());
@ -3466,7 +3480,7 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, jsbytecode* outer)
tail = &uexit->next;
}
JS_ASSERT(bound);
debug_only_v(js_DumpPeerStability(tm, f->ip, from_ti->globalShape);)
debug_only_v(js_DumpPeerStability(tm, f->ip, from->globalObj, from->globalShape);)
break;
} else if (undemote) {
/* The original tree is unconnectable, so trash it. */
@ -3478,7 +3492,9 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, jsbytecode* outer)
if (bound)
return false;
return js_RecordTree(cx, tm, from->first, outer, from_ti->globalShape, from_ti->globalSlots);
VMFragment* root = (VMFragment*)from->root;
return js_RecordTree(cx, tm, from->first, outer, root->globalObj, root->globalShape,
from_ti->globalSlots);
}
static JS_REQUIRES_STACK bool
@ -3552,7 +3568,6 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
#endif
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
TreeInfo* ti = r->getTreeInfo();
/* Process deep abort requests. */
if (r->wasDeepAborted()) {
@ -3561,9 +3576,11 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
}
JS_ASSERT(r->getFragment() && !r->getFragment()->lastIns);
VMFragment* root = (VMFragment*)r->getFragment()->root;
/* Does this branch go to an inner loop? */
Fragment* f = getLoop(&JS_TRACE_MONITOR(cx), cx->fp->regs->pc, ti->globalShape);
Fragment* f = getLoop(&JS_TRACE_MONITOR(cx), cx->fp->regs->pc,
root->globalObj, root->globalShape);
if (!f) {
/* Not an inner loop we can call, abort trace. */
AUDIT(returnToDifferentLoopHeader);
@ -3619,13 +3636,13 @@ js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
f = empty;
if (!f) {
f = getAnchor(tm, cx->fp->regs->pc, globalShape);
f = getAnchor(tm, cx->fp->regs->pc, globalObj, globalShape);
if (!f) {
js_FlushJITCache(cx);
return false;
}
}
return js_RecordTree(cx, tm, f, outer, globalShape, globalSlots);
return js_RecordTree(cx, tm, f, outer, globalObj, globalShape, globalSlots);
}
r->prepareTreeCall(f);
@ -3852,7 +3869,7 @@ static JS_REQUIRES_STACK VMSideExit*
js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
VMSideExit** innermostNestedGuardp)
{
JS_ASSERT(f->code() && f->vmprivate);
JS_ASSERT(f->root == f && f->code() && f->vmprivate);
JS_ASSERT(cx->builtinStatus == 0);
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
@ -3863,7 +3880,8 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
/* Make sure the global object is sane. */
JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) == ti->globalShape));
JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) ==
((VMFragment*)f)->globalShape));
/* Make sure our caller replenished the double pool. */
JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
@ -3874,7 +3892,6 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
/* Setup the interpreter state block, which is followed by the native global frame. */
InterpState* state = (InterpState*)alloca(sizeof(InterpState) + (globalFrameSize+1)*sizeof(double));
state->cx = cx;
state->globalObj = globalObj;
state->inlineCallCountp = &inlineCallCount;
state->innermostNestedGuardp = innermostNestedGuardp;
state->outermostTree = ti;
@ -4149,7 +4166,8 @@ LeaveTree(InterpState& state, VMSideExit* lr)
/* write back interned globals */
double* global = (double*)(&state + 1);
FlushNativeGlobalFrame(cx, ngslots, gslots, globalTypeMap, global);
JS_ASSERT(*(uint64*)&global[STOBJ_NSLOTS(state.globalObj)] == 0xdeadbeefdeadbeefLL);
JS_ASSERT(*(uint64*)&global[STOBJ_NSLOTS(JS_GetGlobalForObject(cx, cx->fp->scopeChain))] ==
0xdeadbeefdeadbeefLL);
/* write back native stack frame */
#ifdef DEBUG
@ -4225,9 +4243,9 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
jsbytecode* pc = cx->fp->regs->pc;
Fragment* f = getLoop(tm, pc, globalShape);
Fragment* f = getLoop(tm, pc, globalObj, globalShape);
if (!f)
f = getAnchor(tm, pc, globalShape);
f = getAnchor(tm, pc, globalObj, globalShape);
if (!f) {
js_FlushJITCache(cx);
@ -4242,7 +4260,7 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
return false;
/* We can give RecordTree the root peer. If that peer is already taken, it will
walk the peer list and find us a free slot or allocate a new tree if needed. */
return js_RecordTree(cx, tm, f->first, NULL, globalShape, globalSlots);
return js_RecordTree(cx, tm, f->first, NULL, globalObj, globalShape, globalSlots);
}
debug_only_v(printf("Looking for compat peer %d@%d, from %p (ip: %p)\n",
@ -4343,7 +4361,9 @@ TraceRecorder::monitorRecording(JSContext* cx, TraceRecorder* tr, JSOp op)
return JSMRS_STOP;
}
if (tr->lirbuf->outOMem()) {
if (tr->lirbuf->outOMem() ||
js_OverfullFragmento(JS_TRACE_MONITOR(cx).fragmento,
MAX_MEM_IN_MAIN_FRAGMENTO)) {
js_AbortRecording(cx, "no more LIR memory");
js_FlushJITCache(cx);
return JSMRS_STOP;
@ -4595,7 +4615,7 @@ js_InitJIT(JSTraceMonitor *tm)
if (!tm->fragmento) {
JS_ASSERT(!tm->reservedDoublePool);
Fragmento* fragmento = new (&gc) Fragmento(core, 24);
Fragmento* fragmento = new (&gc) Fragmento(core, 32);
verbose_only(fragmento->labels = new (&gc) LabelMap(core, NULL);)
tm->fragmento = fragmento;
tm->lirbuf = new (&gc) LirBuffer(fragmento, NULL);
@ -4611,7 +4631,7 @@ js_InitJIT(JSTraceMonitor *tm)
memset(tm->vmfragments, 0, sizeof(tm->vmfragments));
}
if (!tm->reFragmento) {
Fragmento* fragmento = new (&gc) Fragmento(core, 20);
Fragmento* fragmento = new (&gc) Fragmento(core, 32);
verbose_only(fragmento->labels = new (&gc) LabelMap(core, NULL);)
tm->reFragmento = fragmento;
tm->reLirBuf = new (&gc) LirBuffer(fragmento, NULL);
@ -4726,6 +4746,46 @@ js_PurgeScriptFragments(JSContext* cx, JSScript* script)
}
}
bool
js_OverfullFragmento(Fragmento *frago, size_t maxsz)
{
/*
* You might imagine the outOMem flag on the lirbuf is sufficient
* to model the notion of "running out of memory", but there are actually
* two separate issues involved:
*
* 1. The process truly running out of memory: malloc() or mmap()
* failed.
*
* 2. The limit we put on the "intended size" of the tracemonkey code
* cache, in pages, has been exceeded.
*
* Condition 1 doesn't happen very often, but we're obliged to try to
* safely shut down and signal the rest of spidermonkey when it
* does. Condition 2 happens quite regularly.
*
* Presently, the code in this file doesn't check the outOMem condition
* often enough, and frequently misuses the unchecked results of
* lirbuffer insertions on the asssumption that it will notice the
* outOMem flag "soon enough" when it returns to the monitorRecording
* function. This turns out to be a false assumption if we use outOMem
* to signal condition 2: we regularly provoke "passing our intended
* size" and regularly fail to notice it in time to prevent writing
* over the end of an artificially self-limited LIR buffer.
*
* To mitigate, though not completely solve, this problem, we're
* modeling the two forms of memory exhaustion *separately* for the
* time being: condition 1 is handled by the outOMem flag inside
* nanojit, and condition 2 is being handled independently *here*. So
* we construct our fragmentos to use all available memory they like,
* and only report outOMem to us when there is literally no OS memory
* left. Merely purging our cache when we hit our highwater mark is
* handled by the (few) callers of this function.
*
*/
return (frago->_stats.pages > (maxsz >> NJ_LOG2_PAGE_SIZE));
}
JS_REQUIRES_STACK void
js_FlushJITCache(JSContext* cx)
{
@ -6121,7 +6181,7 @@ TraceRecorder::guardNotGlobalObject(JSObject* obj, LIns* obj_ins)
{
if (obj == globalObj)
ABORT_TRACE("reference aliases global object");
guard(false, lir->ins2(LIR_eq, obj_ins, globalObj_ins), MISMATCH_EXIT);
guard(false, lir->ins2(LIR_eq, obj_ins, INS_CONSTPTR(globalObj)), MISMATCH_EXIT);
return true;
}
@ -6682,7 +6742,7 @@ TraceRecorder::newArray(JSObject *ctor, uint32 argc, jsval *argv, jsval *rval)
// arr->dslots[i] = box_jsval(vp[i]); for i in 0..argc
LIns *dslots_ins = NULL;
for (uint32 i = 0; i < argc; i++) {
for (uint32 i = 0; i < argc && !lirbuf->outOMem(); i++) {
LIns *elt_ins = get(argv + i);
box_jsval(argv[i], elt_ins);
stobj_set_dslot(arr_ins, i, dslots_ins, elt_ins, "set_array_elt");
@ -7535,7 +7595,7 @@ TraceRecorder::record_JSOP_CALLNAME()
if (!activeCallOrGlobalSlot(obj, vp))
return false;
stack(0, get(vp));
stack(1, globalObj_ins);
stack(1, INS_CONSTPTR(globalObj));
return true;
}
@ -9681,14 +9741,14 @@ TraceRecorder::record_JSOP_LOOP()
#ifdef JS_JIT_SPEW
/* Prints information about entry typemaps and unstable exits for all peers at a PC */
void
js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, uint32 globalShape)
js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape)
{
Fragment* f;
TreeInfo* ti;
bool looped = false;
unsigned length = 0;
for (f = getLoop(tm, ip, globalShape); f != NULL; f = f->peer) {
for (f = getLoop(tm, ip, globalObj, globalShape); f != NULL; f = f->peer) {
if (!f->vmprivate)
continue;
printf("fragment %p:\nENTRY: ", (void*)f);

View File

@ -298,7 +298,6 @@ public:
unsigned maxCallDepth;
TypeMap typeMap;
unsigned nStackTypes;
uint32 globalShape;
SlotList* globalSlots;
/* Dependent trees must be trashed if this tree dies, and updated on missing global types */
Queue<nanojit::Fragment*> dependentTrees;
@ -314,7 +313,6 @@ public:
#endif
TreeInfo(nanojit::Fragment* _fragment,
uint32 _globalShape,
SlotList* _globalSlots)
: fragment(_fragment),
script(NULL),
@ -322,7 +320,6 @@ public:
nativeStackBase(0),
maxCallDepth(0),
nStackTypes(0),
globalShape(_globalShape),
globalSlots(_globalSlots),
branchCount(0),
unstableExits(NULL)
@ -356,7 +353,6 @@ struct InterpState
// call exit guard mismatched
void* rpAtLastTreeCall; // value of rp at innermost tree call guard
TreeInfo* outermostTree; // the outermost tree we initially invoked
JSObject* globalObj; // pointer to the global object
double* stackBase; // native stack base
FrameInfo** callstackBase; // call stack base
uintN* inlineCallCountp; // inline call count counter
@ -401,7 +397,6 @@ class TraceRecorder : public avmplus::GCObject {
nanojit::LIns* cx_ins;
nanojit::LIns* eos_ins;
nanojit::LIns* eor_ins;
nanojit::LIns* globalObj_ins;
nanojit::LIns* rval_ins;
nanojit::LIns* inner_sp_ins;
nanojit::LIns* invokevp_ins;
@ -572,7 +567,7 @@ public:
JS_REQUIRES_STACK void closeLoop(JSTraceMonitor* tm, bool& demote);
JS_REQUIRES_STACK void endLoop(JSTraceMonitor* tm);
JS_REQUIRES_STACK void joinEdgesToEntry(nanojit::Fragmento* fragmento,
nanojit::Fragment* peer_root);
VMFragment* peer_root);
void blacklist() { fragment->blacklist(); }
JS_REQUIRES_STACK bool adjustCallerTypes(nanojit::Fragment* f);
JS_REQUIRES_STACK nanojit::Fragment* findNestedCompatiblePeer(nanojit::Fragment* f,
@ -641,6 +636,9 @@ js_FinishJIT(JSTraceMonitor *tm);
extern void
js_PurgeScriptFragments(JSContext* cx, JSScript* script);
extern bool
js_OverfullFragmento(nanojit::Fragmento *frago, size_t maxsz);
extern void
js_FlushJITCache(JSContext* cx);

View File

@ -82,7 +82,6 @@
* - XXXbe patrol
* - Fuse objects and their JSXML* private data into single GC-things
* - fix function::foo vs. x.(foo == 42) collision using proper namespacing
* - fix the !TCF_HAS_DEFXMLNS optimization in js_FoldConstants
* - JSCLASS_DOCUMENT_OBSERVER support -- live two-way binding to Gecko's DOM!
* - JS_TypeOfValue sure could use a cleaner interface to "types"
*/
@ -5439,10 +5438,9 @@ JS_FRIEND_DATA(JSObjectOps) js_XMLObjectOps = {
xml_enumerate, js_CheckAccess,
NULL, NULL,
NULL, NULL,
NULL, xml_hasInstance,
js_SetProtoOrParent, js_SetProtoOrParent,
js_TraceObject, xml_clear,
NULL, NULL
xml_hasInstance, js_TraceObject,
xml_clear, NULL,
NULL
};
static JSObjectOps *

View File

@ -48,6 +48,7 @@
#include <string.h>
#include "jsj_private.h" /* LiveConnect internals */
#include "jsobj.h"
/* Shorthands for ASCII (7-bit) decimal and hex conversion. */
#define JS7_ISDEC(c) (((c) >= '0') && ((c) <= '9'))
@ -423,11 +424,8 @@ JSObjectOps JavaArray_ops = {
NULL, /* dropProperty */
NULL, /* call */
NULL, /* construct */
NULL, /* xdrObject */
NULL, /* hasInstance */
NULL, /* setProto */
NULL, /* setParent */
NULL, /* mark */
NULL, /* trace */
NULL, /* clear */
jsj_wrapper_getRequiredSlot, /* getRequiredSlot */
jsj_wrapper_setRequiredSlot /* setRequiredSlot */

View File

@ -57,6 +57,7 @@
#include <string.h>
#include "jsj_private.h" /* LiveConnect internals */
#include "jsobj.h"
static JSBool
JavaClass_convert(JSContext *cx, JSObject *obj, JSType type, jsval *vp)
@ -548,11 +549,8 @@ JSObjectOps JavaClass_ops = {
NULL, /* dropProperty */
jsj_JavaConstructorWrapper, /* call */
jsj_JavaConstructorWrapper, /* construct */
NULL, /* xdrObject */
JavaClass_hasInstance, /* hasInstance */
NULL, /* setProto */
NULL, /* setParent */
NULL, /* mark */
NULL, /* trace */
NULL, /* clear */
jsj_wrapper_getRequiredSlot, /* getRequiredSlot */
jsj_wrapper_setRequiredSlot /* setRequiredSlot */

View File

@ -1057,11 +1057,8 @@ JSObjectOps JavaObject_ops = {
NULL, /* dropProperty */
NULL, /* call */
NULL, /* construct */
NULL, /* xdrObject */
NULL, /* hasInstance */
NULL, /* setProto */
NULL, /* setParent */
NULL, /* mark */
NULL, /* trace */
NULL, /* clear */
jsj_wrapper_getRequiredSlot, /* getRequiredSlot */
jsj_wrapper_setRequiredSlot /* setRequiredSlot */

View File

@ -50,7 +50,7 @@ namespace nanojit
static uint32_t calcSaneCacheSize(uint32_t in)
{
if (in < uint32_t(NJ_LOG2_PAGE_SIZE)) return NJ_LOG2_PAGE_SIZE; // at least 1 page
if (in > 30) return 30; // 1GB should be enough for anyone
if (in > 32) return 32; // 4GB should be enough for anyone
return in;
}

View File

@ -4774,6 +4774,23 @@ function testDenseArrayProp()
testDenseArrayProp.expected = "ok";
test(testDenseArrayProp);
function testNewWithNonNativeProto()
{
function f() { }
var a = f.prototype = [];
for (var i = 0; i < 5; i++)
var o = new f();
return Object.getPrototypeOf(o) === a && o.splice === Array.prototype.splice;
}
testNewWithNonNativeProto.expected = true;
testNewWithNonNativeProto.jitstats = {
recorderStarted: 1,
recorderAborted: 0,
sideExitIntoInterpreter: 1
};
test(testNewWithNonNativeProto);
/*****************************************************************************
* *
* _____ _ _ _____ ______ _____ _______ *