Bug 1055472 - Part 17: Make the Array constructor properly subclassable. (r=jorendorff, r=bhackett, r=terrence)

This commit is contained in:
Eric Faust 2015-11-13 18:22:22 -08:00
parent 7f20e0e2e8
commit 83011e03de
12 changed files with 190 additions and 65 deletions

View File

@ -432,6 +432,13 @@ js::TraceRoot(JSTracer* trc, T* thingp, const char* name)
DispatchToTracer(trc, ConvertToBase(thingp), name);
}
template <typename T>
void
js::TraceRoot(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
{
TraceRoot(trc, thingp->unsafeGet(), name);
}
template <typename T>
void
js::TraceNullableRoot(JSTracer* trc, T* thingp, const char* name)
@ -441,6 +448,13 @@ js::TraceNullableRoot(JSTracer* trc, T* thingp, const char* name)
DispatchToTracer(trc, ConvertToBase(thingp), name);
}
template <typename T>
void
js::TraceNullableRoot(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
{
TraceNullableRoot(trc, thingp->unsafeGet(), name);
}
template <typename T>
void
js::TraceRange(JSTracer* trc, size_t len, WriteBarrieredBase<T>* vec, const char* name)
@ -473,7 +487,9 @@ js::TraceRootRange(JSTracer* trc, size_t len, T* vec, const char* name)
template void js::TraceManuallyBarrieredEdge<type>(JSTracer*, type*, const char*); \
template void js::TraceWeakEdge<type>(JSTracer*, WeakRef<type>*, const char*); \
template void js::TraceRoot<type>(JSTracer*, type*, const char*); \
template void js::TraceRoot<type>(JSTracer*, ReadBarriered<type>*, const char*); \
template void js::TraceNullableRoot<type>(JSTracer*, type*, const char*); \
template void js::TraceNullableRoot<type>(JSTracer*, ReadBarriered<type>*, const char*); \
template void js::TraceRange<type>(JSTracer*, size_t, WriteBarrieredBase<type>*, const char*); \
template void js::TraceRootRange<type>(JSTracer*, size_t, type*, const char*);
FOR_EACH_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS)

View File

@ -63,12 +63,20 @@ template <typename T>
void
TraceRoot(JSTracer* trc, T* thingp, const char* name);
template <typename T>
void
TraceRoot(JSTracer* trc, ReadBarriered<T>* thingp, const char* name);
// Idential to TraceRoot, except that this variant will not crash if |*thingp|
// is null.
template <typename T>
void
TraceNullableRoot(JSTracer* trc, T* thingp, const char* name);
template <typename T>
void
TraceNullableRoot(JSTracer* trc, ReadBarriered<T>* thingp, const char* name);
// Like TraceEdge, but for edges that do not use one of the automatic barrier
// classes and, thus, must be treated specially for moving GC. This method is
// separate from TraceEdge to make accidental use of such edges more obvious.

View File

@ -0,0 +1,17 @@
function f(v, expected) {
assertEq(v.prop, expected);
};
class SubArrayA extends Array {
}
class SubArrayB extends Array {
}
SubArrayA.prototype.prop = "A";
SubArrayB.prototype.prop = "B";
var a = new SubArrayA();
var b = new SubArrayB();
for (let i = 0; i < 10; i++) {
f(a, "A");
f(b, "B");
}

View File

@ -3060,9 +3060,9 @@ IsArrayConstructor(const Value& v)
}
static bool
ArrayFromCallArgs(JSContext* cx, CallArgs& args)
ArrayFromCallArgs(JSContext* cx, CallArgs& args, HandleObject proto = nullptr)
{
JSObject* obj = NewCopiedArrayForCallingAllocationSite(cx, args.array(), args.length());
JSObject* obj = NewCopiedArrayForCallingAllocationSite(cx, args.array(), args.length(), proto);
if (!obj)
return false;
@ -3183,8 +3183,12 @@ js::ArrayConstructor(JSContext* cx, unsigned argc, Value* vp)
{
CallArgs args = CallArgsFromVp(argc, vp);
RootedObject proto(cx);
if (!GetPrototypeFromCallableConstructor(cx, args, &proto))
return false;
if (args.length() != 1 || !args[0].isNumber())
return ArrayFromCallArgs(cx, args);
return ArrayFromCallArgs(cx, args, proto);
uint32_t length;
if (args[0].isInt32()) {
@ -3203,7 +3207,7 @@ js::ArrayConstructor(JSContext* cx, unsigned argc, Value* vp)
}
}
JSObject* obj = NewPartlyAllocatedArrayForCallingAllocationSite(cx, length);
JSObject* obj = NewPartlyAllocatedArrayForCallingAllocationSite(cx, length, proto);
if (!obj)
return false;
@ -3310,12 +3314,6 @@ EnsureNewArrayElements(ExclusiveContext* cx, ArrayObject* obj, uint32_t length)
return true;
}
static bool
NewArrayIsCachable(ExclusiveContext* cxArg, NewObjectKind newKind)
{
return cxArg->isJSContext() && newKind == GenericObject;
}
template <uint32_t maxLength>
static MOZ_ALWAYS_INLINE ArrayObject*
NewArray(ExclusiveContext* cxArg, uint32_t length,
@ -3325,13 +3323,18 @@ NewArray(ExclusiveContext* cxArg, uint32_t length,
MOZ_ASSERT(CanBeFinalizedInBackground(allocKind, &ArrayObject::class_));
allocKind = GetBackgroundAllocKind(allocKind);
bool isCachable = NewArrayIsCachable(cxArg, newKind);
RootedObject proto(cxArg, protoArg);
if (!proto && !GetBuiltinPrototype(cxArg, JSProto_Array, &proto))
return nullptr;
Rooted<TaggedProto> taggedProto(cxArg, TaggedProto(proto));
bool isCachable = NewObjectWithTaggedProtoIsCachable(cxArg, taggedProto, newKind, &ArrayObject::class_);
if (isCachable) {
JSContext* cx = cxArg->asJSContext();
JSRuntime* rt = cx->runtime();
NewObjectCache& cache = rt->newObjectCache;
NewObjectCache::EntryIndex entry = -1;
if (cache.lookupGlobal(&ArrayObject::class_, cx->global(), allocKind, &entry)) {
if (cache.lookupProto(&ArrayObject::class_, proto, allocKind, &entry)) {
gc::InitialHeap heap = GetInitialHeap(newKind, &ArrayObject::class_);
AutoSetNewObjectMetadata metadata(cx);
JSObject* obj = cache.newObjectFromHit(cx, entry, heap);
@ -3350,10 +3353,6 @@ NewArray(ExclusiveContext* cxArg, uint32_t length,
}
}
RootedObject proto(cxArg, protoArg);
if (!proto && !GetBuiltinPrototype(cxArg, JSProto_Array, &proto))
return nullptr;
RootedObjectGroup group(cxArg, ObjectGroup::defaultNewGroup(cxArg, &ArrayObject::class_,
TaggedProto(proto)));
if (!group)
@ -3389,8 +3388,8 @@ NewArray(ExclusiveContext* cxArg, uint32_t length,
if (isCachable) {
NewObjectCache& cache = cxArg->asJSContext()->runtime()->newObjectCache;
NewObjectCache::EntryIndex entry = -1;
cache.lookupGlobal(&ArrayObject::class_, cxArg->global(), allocKind, &entry);
cache.fillGlobal(entry, &ArrayObject::class_, cxArg->global(), allocKind, arr);
cache.lookupProto(&ArrayObject::class_, proto, allocKind, &entry);
cache.fillProto(entry, &ArrayObject::class_, taggedProto, allocKind, arr);
}
if (maxLength > 0 && !EnsureNewArrayElements(cxArg, arr, std::min(maxLength, length)))
@ -3490,7 +3489,9 @@ js::NewDenseCopyOnWriteArray(JSContext* cx, HandleArrayObject templateObject, gc
}
// Return a new boxed or unboxed array with the specified length and allocated
// capacity (up to maxLength), using the specified group if possible.
// capacity (up to maxLength), using the specified group if possible. If the
// specified group cannot be used, ensure that the created array at least has
// the given [[Prototype]].
template <uint32_t maxLength>
static inline JSObject*
NewArrayTryUseGroup(ExclusiveContext* cx, HandleObjectGroup group, size_t length,
@ -3504,14 +3505,14 @@ NewArrayTryUseGroup(ExclusiveContext* cx, HandleObjectGroup group, size_t length
if (group->shouldPreTenure() || group->maybePreliminaryObjects())
newKind = TenuredObject;
RootedObject proto(cx, group->proto().toObject());
if (group->maybeUnboxedLayout()) {
if (length > UnboxedArrayObject::MaximumCapacity)
return NewArray<maxLength>(cx, length, nullptr, newKind);
return NewArray<maxLength>(cx, length, proto, newKind);
return UnboxedArrayObject::create(cx, group, length, newKind, maxLength);
}
ArrayObject* res = NewArray<maxLength>(cx, length, nullptr, newKind);
ArrayObject* res = NewArray<maxLength>(cx, length, proto, newKind);
if (!res)
return nullptr;
@ -3590,9 +3591,9 @@ js::NewFullyAllocatedArrayForCallingAllocationSite(JSContext* cx, size_t length,
}
JSObject*
js::NewPartlyAllocatedArrayForCallingAllocationSite(JSContext* cx, size_t length)
js::NewPartlyAllocatedArrayForCallingAllocationSite(JSContext* cx, size_t length, HandleObject proto)
{
RootedObjectGroup group(cx, ObjectGroup::callingAllocationSiteGroup(cx, JSProto_Array));
RootedObjectGroup group(cx, ObjectGroup::callingAllocationSiteGroup(cx, JSProto_Array, proto));
if (!group)
return nullptr;
return NewArrayTryUseGroup<ArrayObject::EagerAllocationMaxLength>(cx, group, length);
@ -3652,9 +3653,10 @@ js::NewCopiedArrayTryUseGroup(ExclusiveContext* cx, HandleObjectGroup group,
}
JSObject*
js::NewCopiedArrayForCallingAllocationSite(JSContext* cx, const Value* vp, size_t length)
js::NewCopiedArrayForCallingAllocationSite(JSContext* cx, const Value* vp, size_t length,
HandleObject proto /* = nullptr */)
{
RootedObjectGroup group(cx, ObjectGroup::callingAllocationSiteGroup(cx, JSProto_Array));
RootedObjectGroup group(cx, ObjectGroup::callingAllocationSiteGroup(cx, JSProto_Array, proto));
if (!group)
return nullptr;
return NewCopiedArrayTryUseGroup(cx, group, vp, length);

View File

@ -101,7 +101,7 @@ NewFullyAllocatedArrayForCallingAllocationSite(JSContext* cx, size_t length,
bool forceAnalyze = false);
extern JSObject*
NewPartlyAllocatedArrayForCallingAllocationSite(JSContext* cx, size_t length);
NewPartlyAllocatedArrayForCallingAllocationSite(JSContext* cx, size_t length, HandleObject proto);
enum class ShouldUpdateTypes
{
@ -116,7 +116,8 @@ NewCopiedArrayTryUseGroup(ExclusiveContext* cx, HandleObjectGroup group,
ShouldUpdateTypes updateTypes = ShouldUpdateTypes::Update);
extern JSObject*
NewCopiedArrayForCallingAllocationSite(JSContext* cx, const Value* vp, size_t length);
NewCopiedArrayForCallingAllocationSite(JSContext* cx, const Value* vp, size_t length,
HandleObject proto = nullptr);
/*
* Determines whether a write to the given element on |obj| should fail because

View File

@ -689,9 +689,9 @@ NewObjectCache::fillProto(EntryIndex entry, const Class* clasp, js::TaggedProto
return fill(entry, clasp, proto.raw(), kind, obj);
}
static bool
NewObjectWithTaggedProtoIsCachable(ExclusiveContext* cxArg, Handle<TaggedProto> proto,
NewObjectKind newKind, const Class* clasp)
bool
js::NewObjectWithTaggedProtoIsCachable(ExclusiveContext* cxArg, Handle<TaggedProto> proto,
NewObjectKind newKind, const Class* clasp)
{
return cxArg->isJSContext() &&
proto.isObject() &&

View File

@ -1089,6 +1089,10 @@ GetInitialHeap(NewObjectKind newKind, const Class* clasp)
return gc::DefaultHeap;
}
bool
NewObjectWithTaggedProtoIsCachable(ExclusiveContext* cxArg, Handle<TaggedProto> proto,
NewObjectKind newKind, const Class* clasp);
// ES6 9.1.15 GetPrototypeFromConstructor.
extern bool
GetPrototypeFromConstructor(JSContext* cx, js::HandleObject newTarget, js::MutableHandleObject proto);

View File

@ -12,6 +12,7 @@ function testBuiltin(builtin, ...args) {
assertEq(instance instanceof inst, true);
assertEq(instance instanceof builtin, true);
assertEq(instance.called, true);
return instance;
}
function testBuiltinTypedArrays() {
@ -33,6 +34,21 @@ function testBuiltinTypedArrays() {
}
}
function testBuiltinArray() {
let argsLists = [
[],
[15],
[3.0],
["non-length one-arg"],
[5, 10, 15, "these are elements"]
];
for (let args of argsLists) {
let instance = testBuiltin(Array, ...args);
assertEq(Array.isArray(instance), true);
}
}
testBuiltin(Function);
testBuiltin(Object);
testBuiltin(Boolean);
@ -59,6 +75,7 @@ testBuiltinTypedArrays();
testBuiltin(DataView, new ArrayBuffer());
testBuiltin(DataView, new (newGlobal().ArrayBuffer)());
testBuiltin(String);
testBuiltinArray();
`;

View File

@ -0,0 +1,29 @@
var test = `
class foo extends Array { }
function testArrs(arrs) {
for (let arr of arrs) {
assertEq(Object.getPrototypeOf(arr), foo.prototype);
}
}
var arrs = [];
for (var i = 0; i < 25; i++)
arrs.push(new foo(1));
testArrs(arrs);
arrs[0].nonIndexedProp = "uhoh";
arrs.push(new foo(1));
testArrs(arrs);
`;
if (classesEnabled())
eval(test);
if (typeof reportCompare === 'function')
reportCompare(0,0,"OK");

View File

@ -101,9 +101,7 @@ for (var v of SOME_PRIMITIVE_VALUES.concat(nonConstructors)) {
// creates a real array object.
function someConstructor() {}
var result = Reflect.construct(Array, [], someConstructor);
assertEq(Reflect.getPrototypeOf(result),
Array.prototype, // should be someConstructor.prototype, per ES6 22.1.1.1 Array()
"Congratulations on implementing Array subclassing! Fix this test for +1 karma point.");
assertEq(Reflect.getPrototypeOf(result), someConstructor.prototype);
assertEq(result.length, 0);
assertEq(Array.isArray(result), true);

View File

@ -1345,40 +1345,69 @@ ObjectGroup::newPlainObject(ExclusiveContext* cx, IdValuePair* properties, size_
// ObjectGroupCompartment AllocationSiteTable
/////////////////////////////////////////////////////////////////////
struct ObjectGroupCompartment::AllocationSiteKey : public DefaultHasher<AllocationSiteKey> {
JSScript* script;
struct ObjectGroupCompartment::AllocationSiteKey : public DefaultHasher<AllocationSiteKey>,
public JS::Traceable {
ReadBarrieredScript script;
uint32_t offset : 24;
JSProtoKey kind : 8;
ReadBarrieredObject proto;
static const uint32_t OFFSET_LIMIT = (1 << 23);
AllocationSiteKey() { mozilla::PodZero(this); }
AllocationSiteKey(JSScript* script_, uint32_t offset_, JSProtoKey kind_, JSObject* proto_)
: script(script_), offset(offset_), kind(kind_), proto(proto_)
{
MOZ_ASSERT(offset_ < OFFSET_LIMIT);
}
AllocationSiteKey(AllocationSiteKey&& key)
: script(mozilla::Move(key.script)),
offset(key.offset),
kind(key.kind),
proto(mozilla::Move(key.proto))
{ }
AllocationSiteKey(const AllocationSiteKey& key)
: script(key.script),
offset(key.offset),
kind(key.kind),
proto(key.proto)
{ }
static inline uint32_t hash(AllocationSiteKey key) {
return uint32_t(size_t(key.script->offsetToPC(key.offset)) ^ key.kind);
return uint32_t(size_t(key.script->offsetToPC(key.offset)) ^ key.kind ^
MovableCellHasher<JSObject*>::hash(key.proto));
}
static inline bool match(const AllocationSiteKey& a, const AllocationSiteKey& b) {
return a.script == b.script && a.offset == b.offset && a.kind == b.kind;
return DefaultHasher<JSScript*>::match(a.script, b.script) &&
a.offset == b.offset &&
a.kind == b.kind &&
MovableCellHasher<JSObject*>::match(a.proto, b.proto);
}
static void trace(AllocationSiteKey* key, JSTracer* trc) {
TraceRoot(trc, &key->script, "AllocationSiteKey script");
TraceNullableRoot(trc, &key->proto, "AllocationSiteKey proto");
}
};
/* static */ ObjectGroup*
ObjectGroup::allocationSiteGroup(JSContext* cx, JSScript* script, jsbytecode* pc,
JSProtoKey kind)
ObjectGroup::allocationSiteGroup(JSContext* cx, JSScript* scriptArg, jsbytecode* pc,
JSProtoKey kind, HandleObject protoArg /* = nullptr */)
{
MOZ_ASSERT(!useSingletonForAllocationSite(script, pc, kind));
MOZ_ASSERT(!useSingletonForAllocationSite(scriptArg, pc, kind));
MOZ_ASSERT_IF(protoArg, kind == JSProto_Array);
uint32_t offset = script->pcToOffset(pc);
uint32_t offset = scriptArg->pcToOffset(pc);
if (offset >= ObjectGroupCompartment::AllocationSiteKey::OFFSET_LIMIT)
if (offset >= ObjectGroupCompartment::AllocationSiteKey::OFFSET_LIMIT) {
if (protoArg)
return defaultNewGroup(cx, GetClassForProtoKey(kind), TaggedProto(protoArg));
return defaultNewGroup(cx, kind);
ObjectGroupCompartment::AllocationSiteKey key;
key.script = script;
key.offset = offset;
key.kind = kind;
}
ObjectGroupCompartment::AllocationSiteTable*& table =
cx->compartment()->objectGroups.allocationSiteTable;
@ -1393,16 +1422,20 @@ ObjectGroup::allocationSiteGroup(JSContext* cx, JSScript* script, jsbytecode* pc
}
}
RootedScript script(cx, scriptArg);
RootedObject proto(cx, protoArg);
if (!proto && kind != JSProto_Null && !GetBuiltinPrototype(cx, kind, &proto))
return nullptr;
Rooted<ObjectGroupCompartment::AllocationSiteKey> key(cx,
ObjectGroupCompartment::AllocationSiteKey(script, offset, kind, proto));
ObjectGroupCompartment::AllocationSiteTable::AddPtr p = table->lookupForAdd(key);
if (p)
return p->value();
AutoEnterAnalysis enter(cx);
RootedObject proto(cx);
if (kind != JSProto_Null && !GetBuiltinPrototype(cx, kind, &proto))
return nullptr;
Rooted<TaggedProto> tagged(cx, TaggedProto(proto));
ObjectGroup* res = ObjectGroupCompartment::makeGroup(cx, GetClassForProtoKey(kind), tagged,
OBJECT_FLAG_FROM_ALLOCATION_SITE);
@ -1447,10 +1480,7 @@ void
ObjectGroupCompartment::replaceAllocationSiteGroup(JSScript* script, jsbytecode* pc,
JSProtoKey kind, ObjectGroup* group)
{
AllocationSiteKey key;
key.script = script;
key.offset = script->pcToOffset(pc);
key.kind = kind;
AllocationSiteKey key(script, script->pcToOffset(pc), kind, group->proto().toObjectOrNull());
AllocationSiteTable::Ptr p = allocationSiteTable->lookup(key);
MOZ_RELEASE_ASSERT(p);
@ -1463,12 +1493,16 @@ ObjectGroupCompartment::replaceAllocationSiteGroup(JSScript* script, jsbytecode*
}
/* static */ ObjectGroup*
ObjectGroup::callingAllocationSiteGroup(JSContext* cx, JSProtoKey key)
ObjectGroup::callingAllocationSiteGroup(JSContext* cx, JSProtoKey key, HandleObject proto)
{
MOZ_ASSERT_IF(proto, key == JSProto_Array);
jsbytecode* pc;
RootedScript script(cx, cx->currentScript(&pc));
if (script)
return allocationSiteGroup(cx, script, pc, key);
return allocationSiteGroup(cx, script, pc, key, proto);
if (proto)
return defaultNewGroup(cx, GetClassForProtoKey(key), TaggedProto(proto));
return defaultNewGroup(cx, key);
}
@ -1753,13 +1787,11 @@ ObjectGroupCompartment::sweep(FreeOp* fop)
if (allocationSiteTable) {
for (AllocationSiteTable::Enum e(*allocationSiteTable); !e.empty(); e.popFront()) {
AllocationSiteKey key = e.front().key();
bool keyDying = IsAboutToBeFinalizedUnbarriered(&key.script);
bool keyDying = IsAboutToBeFinalized(&e.front().mutableKey().script) ||
(e.front().key().proto && IsAboutToBeFinalized(&e.front().mutableKey().proto));
bool valDying = IsAboutToBeFinalized(&e.front().value());
if (keyDying || valDying)
e.removeFront();
else if (key.script != e.front().key().script)
e.rekeyFront(key);
}
}

View File

@ -510,10 +510,11 @@ class ObjectGroup : public gc::TenuredCell
// Get a non-singleton group to use for objects created at the specified
// allocation site.
static ObjectGroup* allocationSiteGroup(JSContext* cx, JSScript* script, jsbytecode* pc,
JSProtoKey key);
JSProtoKey key, HandleObject proto = nullptr);
// Get a non-singleton group to use for objects created in a JSNative call.
static ObjectGroup* callingAllocationSiteGroup(JSContext* cx, JSProtoKey key);
static ObjectGroup* callingAllocationSiteGroup(JSContext* cx, JSProtoKey key,
HandleObject proto = nullptr);
// Set the group or singleton-ness of an object created for an allocation site.
static bool