Bug 1273276 - Rename HeapPtr to GCPtr; r=jonco

--HG--
extra : rebase_source : 3e7bb83f9c648611312f04bcd50476dfc8a5b74b
This commit is contained in:
Terrence Cole 2016-05-18 12:03:23 -07:00
parent daf024be7d
commit 3bffd39627
46 changed files with 390 additions and 394 deletions

View File

@ -467,8 +467,8 @@ class Module : public mozilla::LinkedListElement<Module>
struct ImportExit {
void* code;
jit::BaselineScript* baselineScript;
HeapPtrFunction fun;
static_assert(sizeof(HeapPtrFunction) == sizeof(void*), "for JIT access");
GCPtrFunction fun;
static_assert(sizeof(GCPtrFunction) == sizeof(void*), "for JIT access");
};
struct EntryArg {
uint64_t lo;
@ -486,7 +486,7 @@ class Module : public mozilla::LinkedListElement<Module>
typedef Vector<FuncPtrTable, 0, SystemAllocPolicy> FuncPtrTableVector;
typedef Vector<CacheableChars, 0, SystemAllocPolicy> FuncLabelVector;
typedef RelocatablePtrArrayBufferObjectMaybeShared BufferPtr;
typedef HeapPtr<WasmModuleObject*> ModuleObjectPtr;
typedef GCPtr<WasmModuleObject*> ModuleObjectPtr;
// Initialized when constructed:
const UniqueConstModuleData module_;
@ -538,7 +538,7 @@ class Module : public mozilla::LinkedListElement<Module>
virtual void addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code, size_t* data);
void setOwner(WasmModuleObject* owner) { MOZ_ASSERT(!ownerObject_); ownerObject_ = owner; }
inline const HeapPtr<WasmModuleObject*>& owner() const;
inline const GCPtr<WasmModuleObject*>& owner() const;
void setSource(Bytes&& source) { source_ = Move(source); }
@ -688,7 +688,7 @@ class WasmModuleObject : public NativeObject
static const Class class_;
};
inline const HeapPtr<WasmModuleObject*>&
inline const GCPtr<WasmModuleObject*>&
wasm::Module::owner() const {
MOZ_ASSERT(&ownerObject_->module() == this);
return ownerObject_;

View File

@ -2614,7 +2614,7 @@ js::StoreScalar##T::Func(JSContext*, unsigned argc, Value* vp) \
#define JS_STORE_REFERENCE_CLASS_IMPL(_constant, T, _name) \
bool \
js::StoreReference##T::Func(JSContext* cx, unsigned argc, Value* vp) \
js::StoreReference##_name::Func(JSContext* cx, unsigned argc, Value* vp) \
{ \
CallArgs args = CallArgsFromVp(argc, vp); \
MOZ_ASSERT(args.length() == 4); \
@ -2641,7 +2641,7 @@ js::StoreReference##T::Func(JSContext* cx, unsigned argc, Value* vp) \
#define JS_LOAD_SCALAR_CLASS_IMPL(_constant, T, _name) \
bool \
js::LoadScalar##T::Func(JSContext*, unsigned argc, Value* vp) \
js::LoadScalar##T::Func(JSContext*, unsigned argc, Value* vp) \
{ \
CallArgs args = CallArgsFromVp(argc, vp); \
MOZ_ASSERT(args.length() == 2); \
@ -2661,7 +2661,7 @@ js::LoadScalar##T::Func(JSContext*, unsigned argc, Value* vp) \
#define JS_LOAD_REFERENCE_CLASS_IMPL(_constant, T, _name) \
bool \
js::LoadReference##T::Func(JSContext*, unsigned argc, Value* vp) \
js::LoadReference##_name::Func(JSContext*, unsigned argc, Value* vp) \
{ \
CallArgs args = CallArgsFromVp(argc, vp); \
MOZ_ASSERT(args.length() == 2); \
@ -2684,8 +2684,8 @@ js::LoadReference##T::Func(JSContext*, unsigned argc, Value* vp) \
// private methods `store()` and `load()`.
bool
StoreReferenceHeapValue::store(JSContext* cx, HeapValue* heap, const Value& v,
TypedObject* obj, jsid id)
StoreReferenceAny::store(JSContext* cx, GCPtrValue* heap, const Value& v,
TypedObject* obj, jsid id)
{
// Undefined values are not included in type inference information for
// value properties of typed objects, as these properties are always
@ -2702,8 +2702,8 @@ StoreReferenceHeapValue::store(JSContext* cx, HeapValue* heap, const Value& v,
}
bool
StoreReferenceHeapPtrObject::store(JSContext* cx, HeapPtrObject* heap, const Value& v,
TypedObject* obj, jsid id)
StoreReferenceObject::store(JSContext* cx, GCPtrObject* heap, const Value& v,
TypedObject* obj, jsid id)
{
MOZ_ASSERT(v.isObjectOrNull()); // or else Store_object is being misused
@ -2722,8 +2722,8 @@ StoreReferenceHeapPtrObject::store(JSContext* cx, HeapPtrObject* heap, const Val
}
bool
StoreReferenceHeapPtrString::store(JSContext* cx, HeapPtrString* heap, const Value& v,
TypedObject* obj, jsid id)
StoreReferencestring::store(JSContext* cx, GCPtrString* heap, const Value& v,
TypedObject* obj, jsid id)
{
MOZ_ASSERT(v.isString()); // or else Store_string is being misused
@ -2734,15 +2734,13 @@ StoreReferenceHeapPtrString::store(JSContext* cx, HeapPtrString* heap, const Val
}
void
LoadReferenceHeapValue::load(HeapValue* heap,
MutableHandleValue v)
LoadReferenceAny::load(GCPtrValue* heap, MutableHandleValue v)
{
v.set(*heap);
}
void
LoadReferenceHeapPtrObject::load(HeapPtrObject* heap,
MutableHandleValue v)
LoadReferenceObject::load(GCPtrObject* heap, MutableHandleValue v)
{
if (*heap)
v.setObject(**heap);
@ -2751,8 +2749,7 @@ LoadReferenceHeapPtrObject::load(HeapPtrObject* heap,
}
void
LoadReferenceHeapPtrString::load(HeapPtrString* heap,
MutableHandleValue v)
LoadReferencestring::load(GCPtrString* heap, MutableHandleValue v)
{
v.setString(*heap);
}
@ -2835,23 +2832,23 @@ MemoryInitVisitor::visitReference(ReferenceTypeDescr& descr, uint8_t* mem)
switch (descr.type()) {
case ReferenceTypeDescr::TYPE_ANY:
{
js::HeapValue* heapValue = reinterpret_cast<js::HeapValue*>(mem);
js::GCPtrValue* heapValue = reinterpret_cast<js::GCPtrValue*>(mem);
heapValue->init(UndefinedValue());
return;
}
case ReferenceTypeDescr::TYPE_OBJECT:
{
js::HeapPtrObject* objectPtr =
reinterpret_cast<js::HeapPtrObject*>(mem);
js::GCPtrObject* objectPtr =
reinterpret_cast<js::GCPtrObject*>(mem);
objectPtr->init(nullptr);
return;
}
case ReferenceTypeDescr::TYPE_STRING:
{
js::HeapPtrString* stringPtr =
reinterpret_cast<js::HeapPtrString*>(mem);
js::GCPtrString* stringPtr =
reinterpret_cast<js::GCPtrString*>(mem);
stringPtr->init(rt_->emptyString);
return;
}
@ -2905,21 +2902,21 @@ MemoryTracingVisitor::visitReference(ReferenceTypeDescr& descr, uint8_t* mem)
switch (descr.type()) {
case ReferenceTypeDescr::TYPE_ANY:
{
HeapValue* heapValue = reinterpret_cast<js::HeapValue*>(mem);
GCPtrValue* heapValue = reinterpret_cast<js::GCPtrValue*>(mem);
TraceEdge(trace_, heapValue, "reference-val");
return;
}
case ReferenceTypeDescr::TYPE_OBJECT:
{
HeapPtrObject* objectPtr = reinterpret_cast<js::HeapPtrObject*>(mem);
GCPtrObject* objectPtr = reinterpret_cast<js::GCPtrObject*>(mem);
TraceNullableEdge(trace_, objectPtr, "reference-obj");
return;
}
case ReferenceTypeDescr::TYPE_STRING:
{
HeapPtrString* stringPtr = reinterpret_cast<js::HeapPtrString*>(mem);
GCPtrString* stringPtr = reinterpret_cast<js::GCPtrString*>(mem);
TraceNullableEdge(trace_, stringPtr, "reference-str");
return;
}

View File

@ -310,10 +310,10 @@ class ReferenceTypeDescr : public SimpleTypeDescr
static MOZ_MUST_USE bool call(JSContext* cx, unsigned argc, Value* vp);
};
#define JS_FOR_EACH_REFERENCE_TYPE_REPR(macro_) \
macro_(ReferenceTypeDescr::TYPE_ANY, HeapValue, Any) \
macro_(ReferenceTypeDescr::TYPE_OBJECT, HeapPtrObject, Object) \
macro_(ReferenceTypeDescr::TYPE_STRING, HeapPtrString, string)
#define JS_FOR_EACH_REFERENCE_TYPE_REPR(macro_) \
macro_(ReferenceTypeDescr::TYPE_ANY, GCPtrValue, Any) \
macro_(ReferenceTypeDescr::TYPE_OBJECT, GCPtrObject, Object) \
macro_(ReferenceTypeDescr::TYPE_STRING, GCPtrString, string)
// Type descriptors whose instances are objects and hence which have
// an associated `prototype` property.
@ -498,7 +498,7 @@ class TypedObject : public JSObject
protected:
static const ObjectOps objectOps_;
HeapPtrShape shape_;
GCPtrShape shape_;
static MOZ_MUST_USE bool obj_lookupProperty(JSContext* cx, HandleObject obj,
HandleId id, MutableHandleObject objp,
@ -585,7 +585,7 @@ typedef Handle<TypedObject*> HandleTypedObject;
class OutlineTypedObject : public TypedObject
{
// The object which owns the data this object points to. Because this
// pointer is managed in tandem with |data|, this is not a HeapPtr and
// pointer is managed in tandem with |data|, this is not a GCPtr and
// barriers are managed directly.
JSObject* owner_;
@ -865,7 +865,7 @@ class StoreScalar##T { \
* - `value` is an object or null (`Store_Object`) or string (`Store_string`).
*/
#define JS_STORE_REFERENCE_CLASS_DEFN(_constant, T, _name) \
class StoreReference##T { \
class StoreReference##_name { \
private: \
static MOZ_MUST_USE bool store(JSContext* cx, T* heap, const Value& v, \
TypedObject* obj, jsid id); \
@ -899,7 +899,7 @@ class LoadScalar##T { \
* `targetDatum` must be attached.
*/
#define JS_LOAD_REFERENCE_CLASS_DEFN(_constant, T, _name) \
class LoadReference##T { \
class LoadReference##_name { \
private: \
static void load(T* heap, MutableHandleValue v); \
\

View File

@ -9380,7 +9380,7 @@ CGObjectList::finish(ObjectArray* array)
MOZ_ASSERT(length <= INDEX_LIMIT);
MOZ_ASSERT(length == array->length);
js::HeapPtrObject* cursor = array->vector + array->length;
js::GCPtrObject* cursor = array->vector + array->length;
ObjectBox* objbox = lastbox;
do {
--cursor;

View File

@ -122,7 +122,7 @@ AtomDecls<ParseHandler>::addShadowedForAnnexB(JSAtom* atom,
}
void
frontend::InitAtomMap(frontend::AtomIndexMap* indices, HeapPtrAtom* atoms)
frontend::InitAtomMap(frontend::AtomIndexMap* indices, GCPtrAtom* atoms)
{
if (indices->isMap()) {
typedef AtomIndexMap::WordMap WordMap;

View File

@ -37,7 +37,7 @@ typedef InlineMap<JSAtom*, DefinitionList, 24> AtomDefnListMap;
* the list and map->vector must point to pre-allocated memory.
*/
void
InitAtomMap(AtomIndexMap* indices, HeapPtrAtom* atoms);
InitAtomMap(AtomIndexMap* indices, GCPtrAtom* atoms);
/*
* A pool that permits the reuse of the backing storage for the defn, index, or

View File

@ -2908,7 +2908,7 @@ Parser<SyntaxParseHandler>::finishFunctionDefinition(Node pn, FunctionBox* funbo
freeVariables[i++] = LazyScript::FreeVariable(r.front().key());
MOZ_ASSERT(i == numFreeVariables);
HeapPtrFunction* innerFunctions = lazy->innerFunctions();
GCPtrFunction* innerFunctions = lazy->innerFunctions();
for (size_t i = 0; i < numInnerFunctions; i++)
innerFunctions[i].init(pc->innerFunctions[i]);

View File

@ -123,9 +123,7 @@
* all that's necessary to make some field be barriered is to replace
* Type* field;
* with
* HeapPtr<Type> field;
* There are also special classes HeapValue and HeapId, which barrier js::Value
* and jsid, respectively.
* GCPtr<Type> field;
*
* One additional note: not all object writes need to be pre-barriered. Writes
* to newly allocated objects do not need a pre-barrier. In these cases, we use
@ -141,11 +139,11 @@
* | | | | |
* | | | | PreBarriered provides pre-barriers only
* | | | |
* | | | HeapPtr provides pre- and post-barriers
* | | | GCPtr provides pre- and post-barriers
* | | |
* | | RelocatablePtr provides pre- and post-barriers and is relocatable
* | |
* | HeapSlot similar to HeapPtr, but tailored to slots storage
* | HeapSlot similar to GCPtr, but tailored to slots storage
* |
* ReadBarrieredBase base class which provides common read operations
* |
@ -163,7 +161,7 @@
* -> InternalBarrierMethods<T*>::preBarrier
* -> T::writeBarrierPre
*
* HeapPtr<T>::post and RelocatablePtr<T>::post
* GCPtr<T>::post and RelocatablePtr<T>::post
* -> InternalBarrierMethods<T*>::postBarrier
* -> T::writeBarrierPost
* -> InternalBarrierMethods<Value>::postBarrier
@ -202,6 +200,7 @@ class BaseShape;
class DebugScopeObject;
class GlobalObject;
class LazyScript;
class ModuleObject;
class ModuleEnvironmentObject;
class ModuleNamespaceObject;
class NativeObject;
@ -371,7 +370,7 @@ class WriteBarrieredBase : public BarrieredBase<T>
/*
* PreBarriered only automatically handles pre-barriers. Post-barriers must
* be manually implemented when using this class. HeapPtr and RelocatablePtr
* be manually implemented when using this class. GCPtr and RelocatablePtr
* should be used in all cases that do not require explicit low-level control
* of moving behavior, e.g. for HashMap keys.
*/
@ -409,30 +408,27 @@ class PreBarriered : public WriteBarrieredBase<T>
/*
* A pre- and post-barriered heap pointer, for use inside the JS engine.
*
* It must only be stored in memory that has GC lifetime. HeapPtr must not be
* It must only be stored in memory that has GC lifetime. GCPtr must not be
* used in contexts where it may be implicitly moved or deleted, e.g. most
* containers.
*
* Not to be confused with JS::Heap<T>. This is a different class from the
* external interface and implements substantially different semantics.
*
* The post-barriers implemented by this class are faster than those
* implemented by RelocatablePtr<T> or JS::Heap<T> at the cost of not
* automatically handling deletion or movement.
*/
template <class T>
class HeapPtr : public WriteBarrieredBase<T>
class GCPtr : public WriteBarrieredBase<T>
{
public:
HeapPtr() : WriteBarrieredBase<T>(JS::GCPolicy<T>::initial()) {}
explicit HeapPtr(T v) : WriteBarrieredBase<T>(v) {
GCPtr() : WriteBarrieredBase<T>(JS::GCPolicy<T>::initial()) {}
explicit GCPtr(T v) : WriteBarrieredBase<T>(v) {
this->post(JS::GCPolicy<T>::initial(), v);
}
explicit HeapPtr(const HeapPtr<T>& v) : WriteBarrieredBase<T>(v) {
explicit GCPtr(const GCPtr<T>& v) : WriteBarrieredBase<T>(v) {
this->post(JS::GCPolicy<T>::initial(), v);
}
#ifdef DEBUG
~HeapPtr() {
~GCPtr() {
// No prebarrier necessary as this only happens when we are sweeping or
// before the containing object becomes part of the GC graph.
MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsHandlingInitFailure());
@ -444,7 +440,7 @@ class HeapPtr : public WriteBarrieredBase<T>
this->post(JS::GCPolicy<T>::initial(), v);
}
DECLARE_POINTER_ASSIGN_OPS(HeapPtr, T);
DECLARE_POINTER_ASSIGN_OPS(GCPtr, T);
T unbarrieredGet() const {
return this->value;
@ -459,14 +455,14 @@ class HeapPtr : public WriteBarrieredBase<T>
}
/*
* Unlike RelocatablePtr<T>, HeapPtr<T> must be managed with GC lifetimes.
* Unlike RelocatablePtr<T>, GCPtr<T> must be managed with GC lifetimes.
* Specifically, the memory used by the pointer itself must be live until
* at least the next minor GC. For that reason, move semantics are invalid
* and are deleted here. Please note that not all containers support move
* semantics, so this does not completely prevent invalid uses.
*/
HeapPtr(HeapPtr<T>&&) = delete;
HeapPtr<T>& operator=(HeapPtr<T>&&) = delete;
GCPtr(GCPtr<T>&&) = delete;
GCPtr<T>& operator=(GCPtr<T>&&) = delete;
};
/*
@ -726,7 +722,7 @@ class HeapSlotArray
{}
operator const Value*() const {
JS_STATIC_ASSERT(sizeof(HeapPtr<Value>) == sizeof(Value));
JS_STATIC_ASSERT(sizeof(GCPtr<Value>) == sizeof(Value));
JS_STATIC_ASSERT(sizeof(HeapSlot) == sizeof(Value));
return reinterpret_cast<const Value*>(array);
}
@ -832,11 +828,11 @@ struct MovableCellHasher<ReadBarriered<T>>
static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
};
/* Useful for hashtables with a HeapPtr as key. */
/* Useful for hashtables with a GCPtr as key. */
template <class T>
struct HeapPtrHasher
struct GCPtrHasher
{
typedef HeapPtr<T> Key;
typedef GCPtr<T> Key;
typedef T Lookup;
static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
@ -844,9 +840,9 @@ struct HeapPtrHasher
static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
};
/* Specialized hashing policy for HeapPtrs. */
/* Specialized hashing policy for GCPtrs. */
template <class T>
struct DefaultHasher<HeapPtr<T>> : HeapPtrHasher<T> { };
struct DefaultHasher<GCPtr<T>> : GCPtrHasher<T> {};
template <class T>
struct PreBarrieredHasher
@ -911,34 +907,35 @@ typedef RelocatablePtr<JSString*> RelocatablePtrString;
typedef RelocatablePtr<JSAtom*> RelocatablePtrAtom;
typedef RelocatablePtr<ArrayBufferObjectMaybeShared*> RelocatablePtrArrayBufferObjectMaybeShared;
typedef HeapPtr<NativeObject*> HeapPtrNativeObject;
typedef HeapPtr<ArrayObject*> HeapPtrArrayObject;
typedef HeapPtr<ArrayBufferObjectMaybeShared*> HeapPtrArrayBufferObjectMaybeShared;
typedef HeapPtr<ArrayBufferObject*> HeapPtrArrayBufferObject;
typedef HeapPtr<BaseShape*> HeapPtrBaseShape;
typedef HeapPtr<JSAtom*> HeapPtrAtom;
typedef HeapPtr<JSFlatString*> HeapPtrFlatString;
typedef HeapPtr<JSFunction*> HeapPtrFunction;
typedef HeapPtr<JSLinearString*> HeapPtrLinearString;
typedef HeapPtr<JSObject*> HeapPtrObject;
typedef HeapPtr<JSScript*> HeapPtrScript;
typedef HeapPtr<JSString*> HeapPtrString;
typedef HeapPtr<ModuleEnvironmentObject*> HeapPtrModuleEnvironmentObject;
typedef HeapPtr<ModuleNamespaceObject*> HeapPtrModuleNamespaceObject;
typedef HeapPtr<PlainObject*> HeapPtrPlainObject;
typedef HeapPtr<PropertyName*> HeapPtrPropertyName;
typedef HeapPtr<Shape*> HeapPtrShape;
typedef HeapPtr<UnownedBaseShape*> HeapPtrUnownedBaseShape;
typedef HeapPtr<jit::JitCode*> HeapPtrJitCode;
typedef HeapPtr<ObjectGroup*> HeapPtrObjectGroup;
typedef GCPtr<NativeObject*> GCPtrNativeObject;
typedef GCPtr<ArrayObject*> GCPtrArrayObject;
typedef GCPtr<ArrayBufferObjectMaybeShared*> GCPtrArrayBufferObjectMaybeShared;
typedef GCPtr<ArrayBufferObject*> GCPtrArrayBufferObject;
typedef GCPtr<BaseShape*> GCPtrBaseShape;
typedef GCPtr<JSAtom*> GCPtrAtom;
typedef GCPtr<JSFlatString*> GCPtrFlatString;
typedef GCPtr<JSFunction*> GCPtrFunction;
typedef GCPtr<JSLinearString*> GCPtrLinearString;
typedef GCPtr<JSObject*> GCPtrObject;
typedef GCPtr<JSScript*> GCPtrScript;
typedef GCPtr<JSString*> GCPtrString;
typedef GCPtr<ModuleObject*> GCPtrModuleObject;
typedef GCPtr<ModuleEnvironmentObject*> GCPtrModuleEnvironmentObject;
typedef GCPtr<ModuleNamespaceObject*> GCPtrModuleNamespaceObject;
typedef GCPtr<PlainObject*> GCPtrPlainObject;
typedef GCPtr<PropertyName*> GCPtrPropertyName;
typedef GCPtr<Shape*> GCPtrShape;
typedef GCPtr<UnownedBaseShape*> GCPtrUnownedBaseShape;
typedef GCPtr<jit::JitCode*> GCPtrJitCode;
typedef GCPtr<ObjectGroup*> GCPtrObjectGroup;
typedef PreBarriered<Value> PreBarrieredValue;
typedef RelocatablePtr<Value> RelocatableValue;
typedef HeapPtr<Value> HeapValue;
typedef GCPtr<Value> GCPtrValue;
typedef PreBarriered<jsid> PreBarrieredId;
typedef RelocatablePtr<jsid> RelocatableId;
typedef HeapPtr<jsid> HeapId;
typedef GCPtr<jsid> GCPtrId;
typedef ImmutableTenuredPtr<PropertyName*> ImmutablePropertyNamePtr;
typedef ImmutableTenuredPtr<JS::Symbol*> ImmutableSymbolPtr;

View File

@ -982,7 +982,7 @@ LazyScript::traceChildren(JSTracer* trc)
TraceManuallyBarrieredEdge(trc, &atom, "lazyScriptFreeVariable");
}
HeapPtrFunction* innerFunctions = this->innerFunctions();
GCPtrFunction* innerFunctions = this->innerFunctions();
for (auto i : MakeRange(numInnerFunctions()))
TraceEdge(trc, &innerFunctions[i], "lazyScriptInnerFunction");
}
@ -1006,7 +1006,7 @@ js::GCMarker::eagerlyMarkChildren(LazyScript *thing)
for (auto i : MakeRange(thing->numFreeVariables()))
traverseEdge(thing, static_cast<JSString*>(freeVariables[i].atom()));
HeapPtrFunction* innerFunctions = thing->innerFunctions();
GCPtrFunction* innerFunctions = thing->innerFunctions();
for (auto i : MakeRange(thing->numInnerFunctions()))
traverseEdge(thing, static_cast<JSObject*>(innerFunctions[i]));
}

View File

@ -168,7 +168,7 @@ Zone::sweepBreakpoints(FreeOp* fop)
Breakpoint* nextbp;
for (Breakpoint* bp = site->firstBreakpoint(); bp; bp = nextbp) {
nextbp = bp->nextInSite();
HeapPtrNativeObject& dbgobj = bp->debugger->toJSObjectRef();
GCPtrNativeObject& dbgobj = bp->debugger->toJSObjectRef();
// If we are sweeping, then we expect the script and the
// debugger object to be swept in the same zone group, except if

View File

@ -40,12 +40,12 @@ FRAGMENT(Root, HeapSlot) {
FRAGMENT(Root, barriers) {
JSObject* obj = JS_NewPlainObject(cx);
js::PreBarriered<JSObject*> prebarriered(obj);
js::HeapPtr<JSObject*> heapptr(obj);
js::GCPtr<JSObject*> heapptr(obj);
js::RelocatablePtr<JSObject*> relocatable(obj);
JS::Value val = JS::ObjectValue(*obj);
js::PreBarrieredValue prebarrieredValue(JS::ObjectValue(*obj));
js::HeapValue heapValue(JS::ObjectValue(*obj));
js::GCPtrValue heapValue(JS::ObjectValue(*obj));
js::RelocatableValue relocatableValue(JS::ObjectValue(*obj));
breakpoint();

View File

@ -1059,31 +1059,31 @@ BaselineCacheIRCompiler::init(CacheKind kind)
}
template <typename T>
static HeapPtr<T>*
AsHeapPtr(uintptr_t* ptr)
static GCPtr<T>*
AsGCPtr(uintptr_t* ptr)
{
return reinterpret_cast<HeapPtr<T>*>(ptr);
return reinterpret_cast<GCPtr<T>*>(ptr);
}
template<class T>
HeapPtr<T>&
GCPtr<T>&
CacheIRStubInfo::getStubField(ICStub* stub, uint32_t field) const
{
uint8_t* stubData = (uint8_t*)stub + stubDataOffset_;
MOZ_ASSERT(uintptr_t(stubData) % sizeof(uintptr_t) == 0);
return *AsHeapPtr<T>((uintptr_t*)stubData + field);
return *AsGCPtr<T>((uintptr_t*)stubData + field);
}
template HeapPtr<Shape*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
template HeapPtr<ObjectGroup*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
template HeapPtr<JSObject*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
template GCPtr<Shape*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
template GCPtr<ObjectGroup*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
template GCPtr<JSObject*>& CacheIRStubInfo::getStubField(ICStub* stub, uint32_t offset) const;
template <typename T>
static void
InitHeapPtr(uintptr_t* ptr, uintptr_t val)
InitGCPtr(uintptr_t* ptr, uintptr_t val)
{
AsHeapPtr<T*>(ptr)->init((T*)val);
AsGCPtr<T*>(ptr)->init((T*)val);
}
void
@ -1097,13 +1097,13 @@ CacheIRWriter::copyStubData(uint8_t* dest) const
destWords[i] = stubFields_[i].word;
continue;
case StubField::GCType::Shape:
InitHeapPtr<Shape>(destWords + i, stubFields_[i].word);
InitGCPtr<Shape>(destWords + i, stubFields_[i].word);
continue;
case StubField::GCType::JSObject:
InitHeapPtr<JSObject>(destWords + i, stubFields_[i].word);
InitGCPtr<JSObject>(destWords + i, stubFields_[i].word);
continue;
case StubField::GCType::ObjectGroup:
InitHeapPtr<ObjectGroup>(destWords + i, stubFields_[i].word);
InitGCPtr<ObjectGroup>(destWords + i, stubFields_[i].word);
continue;
case StubField::GCType::Limit:
break;

View File

@ -53,7 +53,7 @@ class CacheIRStubInfo
const CacheIRWriter& writer);
template <class T>
js::HeapPtr<T>& getStubField(ICStub* stub, uint32_t field) const;
js::GCPtr<T>& getStubField(ICStub* stub, uint32_t field) const;
};
void TraceBaselineCacheIRStub(JSTracer* trc, ICStub* stub, const CacheIRStubInfo* stubInfo);

View File

@ -15,6 +15,7 @@
#include "jsopcode.h"
#include "builtin/TypedObject.h"
#include "gc/Barrier.h"
#include "jit/BaselineICList.h"
#include "jit/BaselineJIT.h"
#include "jit/SharedIC.h"
@ -125,12 +126,12 @@ class ICTypeUpdate_SingleObject : public ICStub
{
friend class ICStubSpace;
HeapPtrObject obj_;
GCPtrObject obj_;
ICTypeUpdate_SingleObject(JitCode* stubCode, JSObject* obj);
public:
HeapPtrObject& object() {
GCPtrObject& object() {
return obj_;
}
@ -160,12 +161,12 @@ class ICTypeUpdate_ObjectGroup : public ICStub
{
friend class ICStubSpace;
HeapPtrObjectGroup group_;
GCPtrObjectGroup group_;
ICTypeUpdate_ObjectGroup(JitCode* stubCode, ObjectGroup* group);
public:
HeapPtrObjectGroup& group() {
GCPtrObjectGroup& group() {
return group_;
}
@ -467,7 +468,7 @@ template <class T>
class ICGetElemNativeStubImpl : public ICGetElemNativeStub
{
protected:
HeapPtr<T> key_;
GCPtr<T> key_;
ICGetElemNativeStubImpl(ICStub::Kind kind, JitCode* stubCode, ICStub* firstMonitorStub,
ReceiverGuard guard, const T* key, AccessType acctype, bool needsAtomize)
@ -477,7 +478,7 @@ class ICGetElemNativeStubImpl : public ICGetElemNativeStub
{}
public:
HeapPtr<T>& key() {
GCPtr<T>& key() {
return key_;
}
static size_t offsetOfKey() {
@ -523,7 +524,7 @@ template <class T>
class ICGetElemNativeGetterStub : public ICGetElemNativeStubImpl<T>
{
protected:
HeapPtrFunction getter_;
GCPtrFunction getter_;
uint32_t pcOffset_;
ICGetElemNativeGetterStub(ICStub::Kind kind, JitCode* stubCode, ICStub* firstMonitorStub,
@ -531,7 +532,7 @@ class ICGetElemNativeGetterStub : public ICGetElemNativeStubImpl<T>
JSFunction* getter, uint32_t pcOffset);
public:
HeapPtrFunction& getter() {
GCPtrFunction& getter() {
return getter_;
}
static size_t offsetOfGetter() {
@ -593,22 +594,22 @@ template <class T>
class ICGetElem_NativePrototypeSlot : public ICGetElemNativeSlotStub<T>
{
friend class ICStubSpace;
HeapPtrObject holder_;
HeapPtrShape holderShape_;
GCPtrObject holder_;
GCPtrShape holderShape_;
ICGetElem_NativePrototypeSlot(JitCode* stubCode, ICStub* firstMonitorStub, ReceiverGuard guard,
const T* key, AccType acctype, bool needsAtomize, uint32_t offset,
JSObject* holder, Shape* holderShape);
public:
HeapPtrObject& holder() {
GCPtrObject& holder() {
return holder_;
}
static size_t offsetOfHolder() {
return offsetof(ICGetElem_NativePrototypeSlot, holder_);
}
HeapPtrShape& holderShape() {
GCPtrShape& holderShape() {
return holderShape_;
}
static size_t offsetOfHolderShape() {
@ -627,8 +628,8 @@ template <class T>
class ICGetElemNativePrototypeCallStub : public ICGetElemNativeGetterStub<T>
{
friend class ICStubSpace;
HeapPtrObject holder_;
HeapPtrShape holderShape_;
GCPtrObject holder_;
GCPtrShape holderShape_;
protected:
ICGetElemNativePrototypeCallStub(ICStub::Kind kind, JitCode* stubCode, ICStub* firstMonitorStub,
@ -637,14 +638,14 @@ class ICGetElemNativePrototypeCallStub : public ICGetElemNativeGetterStub<T>
JSObject* holder, Shape* holderShape);
public:
HeapPtrObject& holder() {
GCPtrObject& holder() {
return holder_;
}
static size_t offsetOfHolder() {
return offsetof(ICGetElemNativePrototypeCallStub, holder_);
}
HeapPtrShape& holderShape() {
GCPtrShape& holderShape() {
return holderShape_;
}
static size_t offsetOfHolderShape() {
@ -850,7 +851,7 @@ class ICGetElem_Dense : public ICMonitoredStub
{
friend class ICStubSpace;
HeapPtrShape shape_;
GCPtrShape shape_;
ICGetElem_Dense(JitCode* stubCode, ICStub* firstMonitorStub, Shape* shape);
@ -862,7 +863,7 @@ class ICGetElem_Dense : public ICMonitoredStub
return offsetof(ICGetElem_Dense, shape_);
}
HeapPtrShape& shape() {
GCPtrShape& shape() {
return shape_;
}
@ -895,7 +896,7 @@ class ICGetElem_UnboxedArray : public ICMonitoredStub
{
friend class ICStubSpace;
HeapPtrObjectGroup group_;
GCPtrObjectGroup group_;
ICGetElem_UnboxedArray(JitCode* stubCode, ICStub* firstMonitorStub, ObjectGroup* group);
@ -907,7 +908,7 @@ class ICGetElem_UnboxedArray : public ICMonitoredStub
return offsetof(ICGetElem_UnboxedArray, group_);
}
HeapPtrObjectGroup& group() {
GCPtrObjectGroup& group() {
return group_;
}
@ -945,7 +946,7 @@ class ICGetElem_TypedArray : public ICStub
friend class ICStubSpace;
protected: // Protected to silence Clang warning.
HeapPtrShape shape_;
GCPtrShape shape_;
ICGetElem_TypedArray(JitCode* stubCode, Shape* shape, Scalar::Type type);
@ -954,7 +955,7 @@ class ICGetElem_TypedArray : public ICStub
return offsetof(ICGetElem_TypedArray, shape_);
}
HeapPtrShape& shape() {
GCPtrShape& shape() {
return shape_;
}
@ -1076,8 +1077,8 @@ class ICSetElem_DenseOrUnboxedArray : public ICUpdatedStub
{
friend class ICStubSpace;
HeapPtrShape shape_; // null for unboxed arrays
HeapPtrObjectGroup group_;
GCPtrShape shape_; // null for unboxed arrays
GCPtrObjectGroup group_;
ICSetElem_DenseOrUnboxedArray(JitCode* stubCode, Shape* shape, ObjectGroup* group);
@ -1089,10 +1090,10 @@ class ICSetElem_DenseOrUnboxedArray : public ICUpdatedStub
return offsetof(ICSetElem_DenseOrUnboxedArray, group_);
}
HeapPtrShape& shape() {
GCPtrShape& shape() {
return shape_;
}
HeapPtrObjectGroup& group() {
GCPtrObjectGroup& group() {
return group_;
}
@ -1143,7 +1144,7 @@ class ICSetElem_DenseOrUnboxedArrayAdd : public ICUpdatedStub
static const size_t MAX_PROTO_CHAIN_DEPTH = 4;
protected:
HeapPtrObjectGroup group_;
GCPtrObjectGroup group_;
ICSetElem_DenseOrUnboxedArrayAdd(JitCode* stubCode, ObjectGroup* group, size_t protoChainDepth);
@ -1152,7 +1153,7 @@ class ICSetElem_DenseOrUnboxedArrayAdd : public ICUpdatedStub
return offsetof(ICSetElem_DenseOrUnboxedArrayAdd, group_);
}
HeapPtrObjectGroup& group() {
GCPtrObjectGroup& group() {
return group_;
}
size_t protoChainDepth() const {
@ -1179,7 +1180,7 @@ class ICSetElem_DenseOrUnboxedArrayAddImpl : public ICSetElem_DenseOrUnboxedArra
// Note: for unboxed arrays, the first shape is null.
static const size_t NumShapes = ProtoChainDepth + 1;
mozilla::Array<HeapPtrShape, NumShapes> shapes_;
mozilla::Array<GCPtrShape, NumShapes> shapes_;
ICSetElem_DenseOrUnboxedArrayAddImpl(JitCode* stubCode, ObjectGroup* group,
Handle<ShapeVector> shapes)
@ -1200,7 +1201,7 @@ class ICSetElem_DenseOrUnboxedArrayAddImpl : public ICSetElem_DenseOrUnboxedArra
return shapes_[i];
}
static size_t offsetOfShape(size_t idx) {
return offsetof(ICSetElem_DenseOrUnboxedArrayAddImpl, shapes_) + idx * sizeof(HeapPtrShape);
return offsetof(ICSetElem_DenseOrUnboxedArrayAddImpl, shapes_) + idx * sizeof(GCPtrShape);
}
};
@ -1245,7 +1246,7 @@ class ICSetElem_TypedArray : public ICStub
friend class ICStubSpace;
protected: // Protected to silence Clang warning.
HeapPtrShape shape_;
GCPtrShape shape_;
ICSetElem_TypedArray(JitCode* stubCode, Shape* shape, Scalar::Type type,
bool expectOutOfBounds);
@ -1263,7 +1264,7 @@ class ICSetElem_TypedArray : public ICStub
return offsetof(ICSetElem_TypedArray, shape_);
}
HeapPtrShape& shape() {
GCPtrShape& shape() {
return shape_;
}
@ -1331,22 +1332,22 @@ class ICIn_Fallback : public ICFallbackStub
// Base class for In_Native and In_NativePrototype stubs.
class ICInNativeStub : public ICStub
{
HeapPtrShape shape_;
HeapPtrPropertyName name_;
GCPtrShape shape_;
GCPtrPropertyName name_;
protected:
ICInNativeStub(ICStub::Kind kind, JitCode* stubCode, HandleShape shape,
HandlePropertyName name);
public:
HeapPtrShape& shape() {
GCPtrShape& shape() {
return shape_;
}
static size_t offsetOfShape() {
return offsetof(ICInNativeStub, shape_);
}
HeapPtrPropertyName& name() {
GCPtrPropertyName& name() {
return name_;
}
static size_t offsetOfName() {
@ -1371,17 +1372,17 @@ class ICIn_NativePrototype : public ICInNativeStub
{
friend class ICStubSpace;
HeapPtrObject holder_;
HeapPtrShape holderShape_;
GCPtrObject holder_;
GCPtrShape holderShape_;
ICIn_NativePrototype(JitCode* stubCode, HandleShape shape, HandlePropertyName name,
HandleObject holder, HandleShape holderShape);
public:
HeapPtrObject& holder() {
GCPtrObject& holder() {
return holder_;
}
HeapPtrShape& holderShape() {
GCPtrShape& holderShape() {
return holderShape_;
}
static size_t offsetOfHolder() {
@ -1431,7 +1432,7 @@ class ICIn_NativeDoesNotExist : public ICStub
{
friend class ICStubSpace;
HeapPtrPropertyName name_;
GCPtrPropertyName name_;
public:
static const size_t MAX_PROTO_CHAIN_DEPTH = 8;
@ -1445,7 +1446,7 @@ class ICIn_NativeDoesNotExist : public ICStub
MOZ_ASSERT(extra_ <= MAX_PROTO_CHAIN_DEPTH);
return extra_;
}
HeapPtrPropertyName& name() {
GCPtrPropertyName& name() {
return name_;
}
@ -1471,7 +1472,7 @@ class ICIn_NativeDoesNotExistImpl : public ICIn_NativeDoesNotExist
static const size_t NumShapes = ProtoChainDepth + 1;
private:
mozilla::Array<HeapPtrShape, NumShapes> shapes_;
mozilla::Array<GCPtrShape, NumShapes> shapes_;
ICIn_NativeDoesNotExistImpl(JitCode* stubCode, Handle<ShapeVector> shapes,
HandlePropertyName name);
@ -1483,7 +1484,7 @@ class ICIn_NativeDoesNotExistImpl : public ICIn_NativeDoesNotExist
}
static size_t offsetOfShape(size_t idx) {
return offsetof(ICIn_NativeDoesNotExistImpl, shapes_) + (idx * sizeof(HeapPtrShape));
return offsetof(ICIn_NativeDoesNotExistImpl, shapes_) + (idx * sizeof(GCPtrShape));
}
};
@ -1518,12 +1519,12 @@ class ICIn_Dense : public ICStub
{
friend class ICStubSpace;
HeapPtrShape shape_;
GCPtrShape shape_;
ICIn_Dense(JitCode* stubCode, HandleShape shape);
public:
HeapPtrShape& shape() {
GCPtrShape& shape() {
return shape_;
}
static size_t offsetOfShape() {
@ -1634,7 +1635,7 @@ class ICGetName_Scope : public ICMonitoredStub
static const size_t MAX_HOPS = 6;
mozilla::Array<HeapPtrShape, NumHops + 1> shapes_;
mozilla::Array<GCPtrShape, NumHops + 1> shapes_;
uint32_t offset_;
ICGetName_Scope(JitCode* stubCode, ICStub* firstMonitorStub,
@ -1652,7 +1653,7 @@ class ICGetName_Scope : public ICMonitoredStub
static size_t offsetOfShape(size_t index) {
MOZ_ASSERT(index <= NumHops);
return offsetof(ICGetName_Scope, shapes_) + (index * sizeof(HeapPtrShape));
return offsetof(ICGetName_Scope, shapes_) + (index * sizeof(GCPtrShape));
}
static size_t offsetOfOffset() {
return offsetof(ICGetName_Scope, offset_);
@ -1753,13 +1754,13 @@ class ICGetIntrinsic_Constant : public ICStub
{
friend class ICStubSpace;
HeapValue value_;
GCPtrValue value_;
ICGetIntrinsic_Constant(JitCode* stubCode, const Value& value);
~ICGetIntrinsic_Constant();
public:
HeapValue& value() {
GCPtrValue& value() {
return value_;
}
static size_t offsetOfValue() {
@ -1836,17 +1837,17 @@ class ICSetProp_Native : public ICUpdatedStub
friend class ICStubSpace;
protected: // Protected to silence Clang warning.
HeapPtrObjectGroup group_;
HeapPtrShape shape_;
GCPtrObjectGroup group_;
GCPtrShape shape_;
uint32_t offset_;
ICSetProp_Native(JitCode* stubCode, ObjectGroup* group, Shape* shape, uint32_t offset);
public:
HeapPtrObjectGroup& group() {
GCPtrObjectGroup& group() {
return group_;
}
HeapPtrShape& shape() {
GCPtrShape& shape() {
return shape_;
}
void notePreliminaryObject() {
@ -1901,9 +1902,9 @@ class ICSetProp_NativeAdd : public ICUpdatedStub
static const size_t MAX_PROTO_CHAIN_DEPTH = 4;
protected: // Protected to silence Clang warning.
HeapPtrObjectGroup group_;
HeapPtrShape newShape_;
HeapPtrObjectGroup newGroup_;
GCPtrObjectGroup group_;
GCPtrShape newShape_;
GCPtrObjectGroup newGroup_;
uint32_t offset_;
ICSetProp_NativeAdd(JitCode* stubCode, ObjectGroup* group, size_t protoChainDepth,
@ -1913,13 +1914,13 @@ class ICSetProp_NativeAdd : public ICUpdatedStub
size_t protoChainDepth() const {
return extra_;
}
HeapPtrObjectGroup& group() {
GCPtrObjectGroup& group() {
return group_;
}
HeapPtrShape& newShape() {
GCPtrShape& newShape() {
return newShape_;
}
HeapPtrObjectGroup& newGroup() {
GCPtrObjectGroup& newGroup() {
return newGroup_;
}
@ -1949,7 +1950,7 @@ class ICSetProp_NativeAddImpl : public ICSetProp_NativeAdd
friend class ICStubSpace;
static const size_t NumShapes = ProtoChainDepth + 1;
mozilla::Array<HeapPtrShape, NumShapes> shapes_;
mozilla::Array<GCPtrShape, NumShapes> shapes_;
ICSetProp_NativeAddImpl(JitCode* stubCode, ObjectGroup* group,
Handle<ShapeVector> shapes,
@ -1962,7 +1963,7 @@ class ICSetProp_NativeAddImpl : public ICSetProp_NativeAdd
}
static size_t offsetOfShape(size_t idx) {
return offsetof(ICSetProp_NativeAddImpl, shapes_) + (idx * sizeof(HeapPtrShape));
return offsetof(ICSetProp_NativeAddImpl, shapes_) + (idx * sizeof(GCPtrShape));
}
};
@ -2021,7 +2022,7 @@ class ICSetProp_Unboxed : public ICUpdatedStub
{
friend class ICStubSpace;
HeapPtrObjectGroup group_;
GCPtrObjectGroup group_;
uint32_t fieldOffset_;
ICSetProp_Unboxed(JitCode* stubCode, ObjectGroup* group, uint32_t fieldOffset)
@ -2033,7 +2034,7 @@ class ICSetProp_Unboxed : public ICUpdatedStub
}
public:
HeapPtrObjectGroup& group() {
GCPtrObjectGroup& group() {
return group_;
}
@ -2085,8 +2086,8 @@ class ICSetProp_TypedObject : public ICUpdatedStub
{
friend class ICStubSpace;
HeapPtrShape shape_;
HeapPtrObjectGroup group_;
GCPtrShape shape_;
GCPtrObjectGroup group_;
uint32_t fieldOffset_;
bool isObjectReference_;
@ -2102,10 +2103,10 @@ class ICSetProp_TypedObject : public ICUpdatedStub
}
public:
HeapPtrShape& shape() {
GCPtrShape& shape() {
return shape_;
}
HeapPtrObjectGroup& group() {
GCPtrObjectGroup& group() {
return group_;
}
bool isObjectReference() {
@ -2182,11 +2183,11 @@ class ICSetPropCallSetter : public ICStub
// sufficient, although Ion may use holder_ and holderShape_ even for own
// setters. In this case holderShape_ == receiverGuard_.shape_ (isOwnSetter
// below relies on this).
HeapPtrObject holder_;
HeapPtrShape holderShape_;
GCPtrObject holder_;
GCPtrShape holderShape_;
// Function to call.
HeapPtrFunction setter_;
GCPtrFunction setter_;
// PC of call, for profiler
uint32_t pcOffset_;
@ -2199,13 +2200,13 @@ class ICSetPropCallSetter : public ICStub
HeapReceiverGuard& receiverGuard() {
return receiverGuard_;
}
HeapPtrObject& holder() {
GCPtrObject& holder() {
return holder_;
}
HeapPtrShape& holderShape() {
GCPtrShape& holderShape() {
return holderShape_;
}
HeapPtrFunction& setter() {
GCPtrFunction& setter() {
return setter_;
}
@ -2459,8 +2460,8 @@ class ICCall_Scripted : public ICMonitoredStub
static const uint32_t MAX_ARGS_SPREAD_LENGTH = 16;
protected:
HeapPtrFunction callee_;
HeapPtrObject templateObject_;
GCPtrFunction callee_;
GCPtrObject templateObject_;
uint32_t pcOffset_;
ICCall_Scripted(JitCode* stubCode, ICStub* firstMonitorStub,
@ -2471,10 +2472,10 @@ class ICCall_Scripted : public ICMonitoredStub
static ICCall_Scripted* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
ICCall_Scripted& other);
HeapPtrFunction& callee() {
GCPtrFunction& callee() {
return callee_;
}
HeapPtrObject& templateObject() {
GCPtrObject& templateObject() {
return templateObject_;
}
@ -2563,8 +2564,8 @@ class ICCall_Native : public ICMonitoredStub
friend class ICStubSpace;
protected:
HeapPtrFunction callee_;
HeapPtrObject templateObject_;
GCPtrFunction callee_;
GCPtrObject templateObject_;
uint32_t pcOffset_;
#ifdef JS_SIMULATOR
@ -2579,10 +2580,10 @@ class ICCall_Native : public ICMonitoredStub
static ICCall_Native* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
ICCall_Native& other);
HeapPtrFunction& callee() {
GCPtrFunction& callee() {
return callee_;
}
HeapPtrObject& templateObject() {
GCPtrObject& templateObject() {
return templateObject_;
}
@ -2644,7 +2645,7 @@ class ICCall_ClassHook : public ICMonitoredStub
protected:
const Class* clasp_;
void* native_;
HeapPtrObject templateObject_;
GCPtrObject templateObject_;
uint32_t pcOffset_;
ICCall_ClassHook(JitCode* stubCode, ICStub* firstMonitorStub,
@ -2661,7 +2662,7 @@ class ICCall_ClassHook : public ICMonitoredStub
void* native() {
return native_;
}
HeapPtrObject& templateObject() {
GCPtrObject& templateObject() {
return templateObject_;
}
@ -2867,9 +2868,9 @@ class ICCall_StringSplit : public ICMonitoredStub
protected:
uint32_t pcOffset_;
HeapPtrString expectedStr_;
HeapPtrString expectedSep_;
HeapPtrObject templateObject_;
GCPtrString expectedStr_;
GCPtrString expectedSep_;
GCPtrObject templateObject_;
ICCall_StringSplit(JitCode* stubCode, ICStub* firstMonitorStub, uint32_t pcOffset, JSString* str,
JSString* sep, JSObject* templateObject)
@ -2891,15 +2892,15 @@ class ICCall_StringSplit : public ICMonitoredStub
return offsetof(ICCall_StringSplit, templateObject_);
}
HeapPtrString& expectedStr() {
GCPtrString& expectedStr() {
return expectedStr_;
}
HeapPtrString& expectedSep() {
GCPtrString& expectedSep() {
return expectedSep_;
}
HeapPtrObject& templateObject() {
GCPtrObject& templateObject() {
return templateObject_;
}
@ -3144,17 +3145,17 @@ class ICInstanceOf_Function : public ICStub
{
friend class ICStubSpace;
HeapPtrShape shape_;
HeapPtrObject prototypeObj_;
GCPtrShape shape_;
GCPtrObject prototypeObj_;
uint32_t slot_;
ICInstanceOf_Function(JitCode* stubCode, Shape* shape, JSObject* prototypeObj, uint32_t slot);
public:
HeapPtrShape& shape() {
GCPtrShape& shape() {
return shape_;
}
HeapPtrObject& prototypeObject() {
GCPtrObject& prototypeObject() {
return prototypeObj_;
}
uint32_t slot() const {
@ -3265,7 +3266,7 @@ class ICRest_Fallback : public ICFallbackStub
{
friend class ICStubSpace;
HeapPtrArrayObject templateObject_;
GCPtrArrayObject templateObject_;
ICRest_Fallback(JitCode* stubCode, ArrayObject* templateObject)
: ICFallbackStub(ICStub::Rest_Fallback, stubCode), templateObject_(templateObject)
@ -3274,7 +3275,7 @@ class ICRest_Fallback : public ICFallbackStub
public:
static const uint32_t MAX_OPTIMIZED_STUBS = 8;
HeapPtrArrayObject& templateObject() {
GCPtrArrayObject& templateObject() {
return templateObject_;
}

View File

@ -369,7 +369,7 @@ class IonCache
// the CacheLocations only have the lifespan of the jitcode, there is no need
// to trace or mark any of the scripts. Since JSScripts are always allocated
// tenured, and never moved, we can keep raw pointers, and there is no need
// for HeapPtrScripts here.
// for GCPtrScripts here.
struct CacheLocation {
jsbytecode* pc;
JSScript* script;

View File

@ -865,7 +865,7 @@ MacroAssembler::allocateObject(Register result, Register temp, gc::AllocKind all
if (!nDynamicSlots)
return freeListAllocate(result, temp, allocKind, fail);
callMallocStub(nDynamicSlots * sizeof(HeapValue), temp, fail);
callMallocStub(nDynamicSlots * sizeof(GCPtrValue), temp, fail);
Label failAlloc;
Label success;
@ -957,16 +957,16 @@ MacroAssembler::fillSlotsWithConstantValue(Address base, Register temp,
Address addr = base;
move32(Imm32(jv.s.payload.i32), temp);
for (unsigned i = start; i < end; ++i, addr.offset += sizeof(HeapValue))
for (unsigned i = start; i < end; ++i, addr.offset += sizeof(GCPtrValue))
store32(temp, ToPayload(addr));
addr = base;
move32(Imm32(jv.s.tag), temp);
for (unsigned i = start; i < end; ++i, addr.offset += sizeof(HeapValue))
for (unsigned i = start; i < end; ++i, addr.offset += sizeof(GCPtrValue))
store32(temp, ToType(addr));
#else
moveValue(v, temp);
for (uint32_t i = start; i < end; ++i, base.offset += sizeof(HeapValue))
for (uint32_t i = start; i < end; ++i, base.offset += sizeof(GCPtrValue))
storePtr(temp, base);
#endif
}

View File

@ -1498,12 +1498,12 @@ class ICTypeMonitor_SingleObject : public ICStub
{
friend class ICStubSpace;
HeapPtrObject obj_;
GCPtrObject obj_;
ICTypeMonitor_SingleObject(JitCode* stubCode, JSObject* obj);
public:
HeapPtrObject& object() {
GCPtrObject& object() {
return obj_;
}
@ -1532,12 +1532,12 @@ class ICTypeMonitor_ObjectGroup : public ICStub
{
friend class ICStubSpace;
HeapPtrObjectGroup group_;
GCPtrObjectGroup group_;
ICTypeMonitor_ObjectGroup(JitCode* stubCode, ObjectGroup* group);
public:
HeapPtrObjectGroup& group() {
GCPtrObjectGroup& group() {
return group_;
}
@ -2380,7 +2380,7 @@ class ICGetProp_Primitive : public ICMonitoredStub
protected: // Protected to silence Clang warning.
// Shape of String.prototype/Number.prototype to check for.
HeapPtrShape protoShape_;
GCPtrShape protoShape_;
// Fixed or dynamic slot offset.
uint32_t offset_;
@ -2389,7 +2389,7 @@ class ICGetProp_Primitive : public ICMonitoredStub
Shape* protoShape, uint32_t offset);
public:
HeapPtrShape& protoShape() {
GCPtrShape& protoShape() {
return protoShape_;
}
JSValueType primitiveType() const {
@ -2504,8 +2504,8 @@ class ICGetPropNativeStub : public ICMonitoredStub
class ICGetPropNativePrototypeStub : public ICGetPropNativeStub
{
// Holder and its shape.
HeapPtrObject holder_;
HeapPtrShape holderShape_;
GCPtrObject holder_;
GCPtrShape holderShape_;
protected:
ICGetPropNativePrototypeStub(ICStub::Kind kind, JitCode* stubCode, ICStub* firstMonitorStub,
@ -2513,10 +2513,10 @@ class ICGetPropNativePrototypeStub : public ICGetPropNativeStub
Shape* holderShape);
public:
HeapPtrObject& holder() {
GCPtrObject& holder() {
return holder_;
}
HeapPtrShape& holderShape() {
GCPtrShape& holderShape() {
return holderShape_;
}
static size_t offsetOfHolder() {
@ -2538,7 +2538,7 @@ class ICGetName_Global : public ICGetPropNativePrototypeStub
friend class ICStubSpace;
protected:
HeapPtrShape globalShape_;
GCPtrShape globalShape_;
ICGetName_Global(JitCode* stubCode, ICStub* firstMonitorStub, ReceiverGuard guard,
uint32_t slot, JSObject* holder, Shape* holderShape, Shape* globalShape);
@ -2547,7 +2547,7 @@ class ICGetName_Global : public ICGetPropNativePrototypeStub
static ICGetName_Global* Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
ICGetName_Global& other);
HeapPtrShape& globalShape() {
GCPtrShape& globalShape() {
return globalShape_;
}
static size_t offsetOfGlobalShape() {
@ -2637,12 +2637,12 @@ class ICGetPropCallGetter : public ICMonitoredStub
// sufficient, although Ion may use holder_ and holderShape_ even for own
// getters. In this case holderShape_ == receiverGuard_.shape_ (isOwnGetter
// below relies on this).
HeapPtrObject holder_;
GCPtrObject holder_;
HeapPtrShape holderShape_;
GCPtrShape holderShape_;
// Function to call.
HeapPtrFunction getter_;
GCPtrFunction getter_;
// PC offset of call
uint32_t pcOffset_;
@ -2652,13 +2652,13 @@ class ICGetPropCallGetter : public ICMonitoredStub
Shape* holderShape, JSFunction* getter, uint32_t pcOffset);
public:
HeapPtrObject& holder() {
GCPtrObject& holder() {
return holder_;
}
HeapPtrShape& holderShape() {
GCPtrShape& holderShape() {
return holderShape_;
}
HeapPtrFunction& getter() {
GCPtrFunction& getter() {
return getter_;
}
HeapReceiverGuard& receiverGuard() {
@ -2793,7 +2793,7 @@ class ICGetProp_CallNativeGlobal : public ICGetPropCallGetter
friend class ICStubSpace;
protected:
HeapPtrShape globalShape_;
GCPtrShape globalShape_;
ICGetProp_CallNativeGlobal(JitCode* stubCode, ICStub* firstMonitorStub,
ReceiverGuard receiverGuard,
@ -2809,7 +2809,7 @@ class ICGetProp_CallNativeGlobal : public ICGetPropCallGetter
ICStub* firstMonitorStub,
ICGetProp_CallNativeGlobal& other);
HeapPtrShape& globalShape() {
GCPtrShape& globalShape() {
return globalShape_;
}
static size_t offsetOfGlobalShape() {
@ -2847,7 +2847,7 @@ class ICGetPropCallDOMProxyNativeStub : public ICGetPropCallGetter
friend class ICStubSpace;
protected:
// Object shape of expected expando object. (nullptr if no expando object should be there)
HeapPtrShape expandoShape_;
GCPtrShape expandoShape_;
ICGetPropCallDOMProxyNativeStub(ICStub::Kind kind, JitCode* stubCode,
ICStub* firstMonitorStub, Shape* shape,
@ -2856,7 +2856,7 @@ class ICGetPropCallDOMProxyNativeStub : public ICGetPropCallGetter
JSFunction* getter, uint32_t pcOffset);
public:
HeapPtrShape& expandoShape() {
GCPtrShape& expandoShape() {
return expandoShape_;
}
static size_t offsetOfExpandoShape() {
@ -2952,9 +2952,9 @@ class ICGetProp_DOMProxyShadowed : public ICMonitoredStub
{
friend class ICStubSpace;
protected:
HeapPtrShape shape_;
GCPtrShape shape_;
const BaseProxyHandler* proxyHandler_;
HeapPtrPropertyName name_;
GCPtrPropertyName name_;
uint32_t pcOffset_;
ICGetProp_DOMProxyShadowed(JitCode* stubCode, ICStub* firstMonitorStub, Shape* shape,
@ -2966,10 +2966,10 @@ class ICGetProp_DOMProxyShadowed : public ICMonitoredStub
ICStub* firstMonitorStub,
ICGetProp_DOMProxyShadowed& other);
HeapPtrShape& shape() {
GCPtrShape& shape() {
return shape_;
}
HeapPtrPropertyName& name() {
GCPtrPropertyName& name() {
return name_;
}
@ -3076,11 +3076,11 @@ class ICNewArray_Fallback : public ICFallbackStub
{
friend class ICStubSpace;
HeapPtrObject templateObject_;
GCPtrObject templateObject_;
// The group used for objects created here is always available, even if the
// template object itself is not.
HeapPtrObjectGroup templateGroup_;
GCPtrObjectGroup templateGroup_;
ICNewArray_Fallback(JitCode* stubCode, ObjectGroup* templateGroup)
: ICFallbackStub(ICStub::NewArray_Fallback, stubCode),
@ -3103,7 +3103,7 @@ class ICNewArray_Fallback : public ICFallbackStub
}
};
HeapPtrObject& templateObject() {
GCPtrObject& templateObject() {
return templateObject_;
}
@ -3112,7 +3112,7 @@ class ICNewArray_Fallback : public ICFallbackStub
templateObject_ = obj;
}
HeapPtrObjectGroup& templateGroup() {
GCPtrObjectGroup& templateGroup() {
return templateGroup_;
}
@ -3128,7 +3128,7 @@ class ICNewObject_Fallback : public ICFallbackStub
{
friend class ICStubSpace;
HeapPtrObject templateObject_;
GCPtrObject templateObject_;
explicit ICNewObject_Fallback(JitCode* stubCode)
: ICFallbackStub(ICStub::NewObject_Fallback, stubCode), templateObject_(nullptr)
@ -3148,7 +3148,7 @@ class ICNewObject_Fallback : public ICFallbackStub
}
};
HeapPtrObject& templateObject() {
GCPtrObject& templateObject() {
return templateObject_;
}

View File

@ -711,7 +711,7 @@ JSFunction::trace(JSTracer* trc)
{
if (isExtended()) {
TraceRange(trc, ArrayLength(toExtended()->extendedSlots),
(HeapValue*)toExtended()->extendedSlots, "nativeReserved");
(GCPtrValue*)toExtended()->extendedSlots, "nativeReserved");
}
TraceNullableEdge(trc, &atom_, "atom");

View File

@ -144,7 +144,7 @@ class JSFunction : public js::NativeObject
} i;
void* nativeOrScript;
} u;
js::HeapPtrAtom atom_; /* name for diagnostics and decompiling */
js::GCPtrAtom atom_; /* name for diagnostics and decompiling */
public:
@ -352,12 +352,12 @@ class JSFunction : public js::NativeObject
void setEnvironment(JSObject* obj) {
MOZ_ASSERT(isInterpreted() && !isBeingParsed());
*reinterpret_cast<js::HeapPtrObject*>(&u.i.env_) = obj;
*reinterpret_cast<js::GCPtrObject*>(&u.i.env_) = obj;
}
void initEnvironment(JSObject* obj) {
MOZ_ASSERT(isInterpreted() && !isBeingParsed());
reinterpret_cast<js::HeapPtrObject*>(&u.i.env_)->init(obj);
reinterpret_cast<js::GCPtrObject*>(&u.i.env_)->init(obj);
}
void unsetEnvironment() {
@ -564,9 +564,9 @@ class JSFunction : public js::NativeObject
size_t getBoundFunctionArgumentCount() const;
private:
js::HeapPtrScript& mutableScript() {
js::GCPtrScript& mutableScript() {
MOZ_ASSERT(hasScript());
return *(js::HeapPtrScript*)&u.i.s.script_;
return *(js::GCPtrScript*)&u.i.s.script_;
}
inline js::FunctionExtended* toExtended();
@ -706,7 +706,7 @@ class FunctionExtended : public JSFunction
static inline size_t offsetOfExtendedSlot(unsigned which) {
MOZ_ASSERT(which < NUM_EXTENDED_SLOTS);
return offsetof(FunctionExtended, extendedSlots) + which * sizeof(HeapValue);
return offsetof(FunctionExtended, extendedSlots) + which * sizeof(GCPtrValue);
}
static inline size_t offsetOfArrowNewTargetSlot() {
return offsetOfExtendedSlot(ARROW_NEWTARGET_SLOT);
@ -716,7 +716,7 @@ class FunctionExtended : public JSFunction
friend class JSFunction;
/* Reserved slots available for storage by particular native functions. */
HeapValue extendedSlots[NUM_EXTENDED_SLOTS];
GCPtrValue extendedSlots[NUM_EXTENDED_SLOTS];
};
extern bool

View File

@ -1718,7 +1718,7 @@ GCRuntime::addRoot(Value* vp, const char* name)
* cases.
*/
if (isIncrementalGCInProgress())
HeapValue::writeBarrierPre(*vp);
GCPtrValue::writeBarrierPre(*vp);
return rootsHash.put(vp, name);
}
@ -2130,7 +2130,7 @@ RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind, size_t thingSize
// For copy-on-write objects that own their elements, fix up the
// owner pointer to point to the relocated object.
if (srcNative->denseElementsAreCopyOnWrite()) {
HeapPtrNativeObject& owner = dstNative->getElementsHeader()->ownerObject();
GCPtrNativeObject& owner = dstNative->getElementsHeader()->ownerObject();
if (owner == srcNative)
owner = dstNative;
}
@ -7589,7 +7589,7 @@ JS::IncrementalReferenceBarrier(GCCellPtr thing)
JS_PUBLIC_API(void)
JS::IncrementalValueBarrier(const Value& v)
{
js::HeapValue::writeBarrierPre(v);
js::GCPtrValue::writeBarrierPre(v);
}
JS_PUBLIC_API(void)

View File

@ -55,7 +55,7 @@ static const gc::AllocKind ITERATOR_FINALIZE_KIND = gc::AllocKind::OBJECT2_BACKG
void
NativeIterator::trace(JSTracer* trc)
{
for (HeapPtrFlatString* str = begin(); str < end(); str++)
for (GCPtrFlatString* str = begin(); str < end(); str++)
TraceNullableEdge(trc, str, "prop");
TraceNullableEdge(trc, &obj, "obj");
@ -608,7 +608,7 @@ NativeIterator::allocateIterator(JSContext* cx, uint32_t numGuards, uint32_t ple
void** extra = reinterpret_cast<void**>(ni + 1);
PodZero(ni);
PodZero(extra, extraLength);
ni->props_array = ni->props_cursor = reinterpret_cast<HeapPtrFlatString*>(extra);
ni->props_array = ni->props_cursor = reinterpret_cast<GCPtrFlatString*>(extra);
ni->props_end = ni->props_array + plength;
return ni;
}
@ -1293,9 +1293,9 @@ SuppressDeletedPropertyHelper(JSContext* cx, HandleObject obj, StringPredicate p
/* This only works for identified suppressed keys, not values. */
if (ni->isKeyIter() && ni->obj == obj && ni->props_cursor < ni->props_end) {
/* Check whether id is still to come. */
HeapPtrFlatString* props_cursor = ni->current();
HeapPtrFlatString* props_end = ni->end();
for (HeapPtrFlatString* idp = props_cursor; idp < props_end; ++idp) {
GCPtrFlatString* props_cursor = ni->current();
GCPtrFlatString* props_end = ni->end();
for (GCPtrFlatString* idp = props_cursor; idp < props_end; ++idp) {
if (predicate(*idp)) {
/*
* Check whether another property along the prototype chain
@ -1335,7 +1335,7 @@ SuppressDeletedPropertyHelper(JSContext* cx, HandleObject obj, StringPredicate p
if (idp == props_cursor) {
ni->incCursor();
} else {
for (HeapPtrFlatString* p = idp; p + 1 != props_end; p++)
for (GCPtrFlatString* p = idp; p + 1 != props_end; p++)
*p = *(p + 1);
ni->props_end = ni->end() - 1;

View File

@ -32,11 +32,11 @@ class PropertyIteratorObject;
struct NativeIterator
{
HeapPtrObject obj; // Object being iterated.
JSObject* iterObj_; // Internal iterator object.
HeapPtrFlatString* props_array;
HeapPtrFlatString* props_cursor;
HeapPtrFlatString* props_end;
GCPtrObject obj; // Object being iterated.
JSObject* iterObj_; // Internal iterator object.
GCPtrFlatString* props_array;
GCPtrFlatString* props_cursor;
GCPtrFlatString* props_end;
HeapReceiverGuard* guard_array;
uint32_t guard_length;
uint32_t guard_key;
@ -52,11 +52,11 @@ struct NativeIterator
return (flags & JSITER_FOREACH) == 0;
}
inline HeapPtrFlatString* begin() const {
inline GCPtrFlatString* begin() const {
return props_array;
}
inline HeapPtrFlatString* end() const {
inline GCPtrFlatString* end() const {
return props_end;
}
@ -67,7 +67,7 @@ struct NativeIterator
JSObject* iterObj() const {
return iterObj_;
}
HeapPtrFlatString* current() const {
GCPtrFlatString* current() const {
MOZ_ASSERT(props_cursor < props_end);
return props_cursor;
}

View File

@ -3862,7 +3862,7 @@ JSObject::traceChildren(JSTracer* trc)
do {
if (nobj->denseElementsAreCopyOnWrite()) {
HeapPtrNativeObject& owner = nobj->getElementsHeader()->ownerObject();
GCPtrNativeObject& owner = nobj->getElementsHeader()->ownerObject();
if (owner != nobj) {
TraceEdge(trc, &owner, "objectElementsOwner");
break;

View File

@ -98,7 +98,7 @@ bool SetImmutablePrototype(js::ExclusiveContext* cx, JS::HandleObject obj, bool*
class JSObject : public js::gc::Cell
{
protected:
js::HeapPtrObjectGroup group_;
js::GCPtrObjectGroup group_;
private:
friend class js::Shape;
@ -352,7 +352,7 @@ class JSObject : public js::gc::Cell
inline js::ObjectGroup* getGroup(JSContext* cx);
const js::HeapPtrObjectGroup& groupFromGC() const {
const js::GCPtrObjectGroup& groupFromGC() const {
/* Direct field access for use by GC. */
return group_;
}

View File

@ -576,7 +576,7 @@ static inline uint32_t
FindScopeObjectIndex(JSScript* script, NestedStaticScope& scope)
{
ObjectArray* objects = script->objects();
HeapPtrObject* vector = objects->vector;
GCPtrObject* vector = objects->vector;
unsigned length = objects->length;
for (unsigned i = 0; i < length; ++i) {
if (vector[i] == &scope)
@ -992,7 +992,7 @@ js::XDRScript(XDRState<mode>* xdr, HandleObject enclosingScopeArg, HandleScript
}
if (nconsts) {
HeapValue* vector = script->consts()->vector;
GCPtrValue* vector = script->consts()->vector;
RootedValue val(cx);
for (i = 0; i != nconsts; ++i) {
if (mode == XDR_ENCODE)
@ -1010,7 +1010,7 @@ js::XDRScript(XDRState<mode>* xdr, HandleObject enclosingScopeArg, HandleScript
* after the enclosing block has been XDR'd.
*/
for (i = 0; i != nobjects; ++i) {
HeapPtrObject* objp = &script->objects()->vector[i];
GCPtrObject* objp = &script->objects()->vector[i];
XDRClassKind classk;
if (mode == XDR_ENCODE) {
@ -1289,7 +1289,7 @@ js::XDRLazyScript(XDRState<mode>* xdr, HandleObject enclosingScope, HandleScript
// Code inner functions.
{
RootedFunction func(cx);
HeapPtrFunction* innerFunctions = lazy->innerFunctions();
GCPtrFunction* innerFunctions = lazy->innerFunctions();
size_t numInnerFunctions = lazy->numInnerFunctions();
for (size_t i = 0; i < numInnerFunctions; i++) {
if (mode == XDR_ENCODE)
@ -2447,12 +2447,12 @@ js::SharedScriptData::new_(ExclusiveContext* cx, uint32_t codeLength,
/*
* Call constructors to initialize the storage that will be accessed as a
* HeapPtrAtom array via atoms().
* GCPtrAtom array via atoms().
*/
HeapPtrAtom* atoms = entry->atoms();
GCPtrAtom* atoms = entry->atoms();
MOZ_ASSERT(reinterpret_cast<uintptr_t>(atoms) % sizeof(JSAtom*) == 0);
for (unsigned i = 0; i < natoms; ++i)
new (&atoms[i]) HeapPtrAtom();
new (&atoms[i]) GCPtrAtom();
return entry;
}
@ -2641,17 +2641,17 @@ JS_STATIC_ASSERT(KEEPS_JSVAL_ALIGNMENT(TryNoteArray));
JS_STATIC_ASSERT(KEEPS_JSVAL_ALIGNMENT(BlockScopeArray));
/* These assertions ensure there is no padding required between array elements. */
JS_STATIC_ASSERT(HAS_JSVAL_ALIGNMENT(HeapValue));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(HeapValue, HeapPtrObject));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(HeapPtrObject, HeapPtrObject));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(HeapPtrObject, JSTryNote));
JS_STATIC_ASSERT(HAS_JSVAL_ALIGNMENT(GCPtrValue));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(GCPtrValue, GCPtrObject));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(GCPtrObject, GCPtrObject));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(GCPtrObject, JSTryNote));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(JSTryNote, uint32_t));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(uint32_t, uint32_t));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(HeapValue, BlockScopeNote));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(GCPtrValue, BlockScopeNote));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(BlockScopeNote, BlockScopeNote));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(JSTryNote, BlockScopeNote));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(HeapPtrObject, BlockScopeNote));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(GCPtrObject, BlockScopeNote));
JS_STATIC_ASSERT(NO_PADDING_BETWEEN_ENTRIES(BlockScopeNote, uint32_t));
static inline size_t
@ -2780,13 +2780,13 @@ JSScript::partiallyInit(ExclusiveContext* cx, HandleScript script, uint32_t ncon
if (nconsts != 0) {
MOZ_ASSERT(reinterpret_cast<uintptr_t>(cursor) % sizeof(JS::Value) == 0);
script->consts()->length = nconsts;
script->consts()->vector = (HeapValue*)cursor;
script->consts()->vector = (GCPtrValue*)cursor;
cursor += nconsts * sizeof(script->consts()->vector[0]);
}
if (nobjects != 0) {
script->objects()->length = nobjects;
script->objects()->vector = (HeapPtrObject*)cursor;
script->objects()->vector = (GCPtrObject*)cursor;
cursor += nobjects * sizeof(script->objects()->vector[0]);
}
@ -3484,7 +3484,7 @@ js::detail::CopyScript(JSContext* cx, HandleObject scriptStaticScope, HandleScri
AutoObjectVector objects(cx);
if (nobjects != 0) {
HeapPtrObject* vector = src->objects()->vector;
GCPtrObject* vector = src->objects()->vector;
for (unsigned i = 0; i < nobjects; i++) {
RootedObject obj(cx, vector[i]);
RootedObject clone(cx);
@ -3600,13 +3600,13 @@ js::detail::CopyScript(JSContext* cx, HandleObject scriptStaticScope, HandleScri
dst->isDefaultClassConstructor_ = src->isDefaultClassConstructor();
if (nconsts != 0) {
HeapValue* vector = Rebase<HeapValue>(dst, src, src->consts()->vector);
GCPtrValue* vector = Rebase<GCPtrValue>(dst, src, src->consts()->vector);
dst->consts()->vector = vector;
for (unsigned i = 0; i < nconsts; ++i)
MOZ_ASSERT_IF(vector[i].isMarkable(), vector[i].toString()->isAtom());
}
if (nobjects != 0) {
HeapPtrObject* vector = Rebase<HeapPtrObject>(dst, src, src->objects()->vector);
GCPtrObject* vector = Rebase<GCPtrObject>(dst, src, src->objects()->vector);
dst->objects()->vector = vector;
for (unsigned i = 0; i < nobjects; ++i)
vector[i].init(&objects[i]->as<NativeObject>());
@ -4283,7 +4283,7 @@ LazyScript::CreateRaw(ExclusiveContext* cx, HandleFunction fun,
p.treatAsRunOnce = false;
size_t bytes = (p.numFreeVariables * sizeof(FreeVariable))
+ (p.numInnerFunctions * sizeof(HeapPtrFunction));
+ (p.numInnerFunctions * sizeof(GCPtrFunction));
ScopedJSFreePtr<uint8_t> table(bytes ? fun->zone()->pod_malloc<uint8_t>(bytes) : nullptr);
if (bytes && !table) {
@ -4352,7 +4352,7 @@ LazyScript::Create(ExclusiveContext* cx, HandleFunction fun,
for (i = 0, num = res->numFreeVariables(); i < num; i++)
variables[i] = FreeVariable(dummyAtom);
HeapPtrFunction* functions = res->innerFunctions();
GCPtrFunction* functions = res->innerFunctions();
for (i = 0, num = res->numInnerFunctions(); i < num; i++)
functions[i].init(dummyFun);

View File

@ -127,17 +127,17 @@ struct BlockScopeNote {
};
struct ConstArray {
js::HeapValue* vector; /* array of indexed constant values */
uint32_t length;
js::GCPtrValue* vector; // array of indexed constant values
uint32_t length;
};
struct ObjectArray {
js::HeapPtrObject* vector; // Array of indexed objects.
uint32_t length; // Count of indexed objects.
js::GCPtrObject* vector; // Array of indexed objects.
uint32_t length; // Count of indexed objects.
};
struct TryNoteArray {
JSTryNote* vector; // Array of indexed try notes.
JSTryNote* vector; // Array of indexed try notes.
uint32_t length; // Count of indexed try notes.
};
@ -151,7 +151,7 @@ class YieldOffsetArray {
detail::CopyScript(JSContext* cx, HandleObject scriptStaticScope, HandleScript src,
HandleScript dst);
uint32_t* vector_; // Array of bytecode offsets.
uint32_t* vector_; // Array of bytecode offsets.
uint32_t length_; // Count of bytecode offsets.
public:
@ -986,7 +986,7 @@ class JSScript : public js::gc::TenuredCell
uint8_t* data; /* pointer to variable-length data array (see
comment above Create() for details) */
js::HeapPtrAtom* atoms; /* maps immediate index to literal struct */
js::GCPtrAtom* atoms; /* maps immediate index to literal struct */
JSCompartment* compartment_;
@ -998,11 +998,11 @@ class JSScript : public js::gc::TenuredCell
//
// (When we clone a JSScript into a new compartment, we don't clone its
// source object. Instead, the clone refers to a wrapper.)
js::HeapPtrObject sourceObject_;
js::GCPtrObject sourceObject_;
js::HeapPtrFunction function_;
js::HeapPtr<js::ModuleObject*> module_;
js::HeapPtrObject enclosingStaticScope_;
js::GCPtrFunction function_;
js::GCPtrModuleObject module_;
js::GCPtrObject enclosingStaticScope_;
/*
* Information attached by Ion. Nexto a valid IonScript this could be
@ -1810,12 +1810,12 @@ class JSScript : public js::gc::TenuredCell
size_t natoms() const { return natoms_; }
js::HeapPtrAtom& getAtom(size_t index) const {
js::GCPtrAtom& getAtom(size_t index) const {
MOZ_ASSERT(index < natoms());
return atoms[index];
}
js::HeapPtrAtom& getAtom(jsbytecode* pc) const {
js::GCPtrAtom& getAtom(jsbytecode* pc) const {
MOZ_ASSERT(containsPC(pc) && containsPC(pc + sizeof(uint32_t)));
return getAtom(GET_UINT32_INDEX(pc));
}
@ -2149,15 +2149,15 @@ class LazyScript : public gc::TenuredCell
WeakRef<JSScript*> script_;
// Original function with which the lazy script is associated.
HeapPtrFunction function_;
GCPtrFunction function_;
// Function or block chain in which the script is nested, or nullptr.
HeapPtrObject enclosingScope_;
GCPtrObject enclosingScope_;
// ScriptSourceObject. We leave this set to nullptr until we generate
// bytecode for our immediate parent. This is never a CCW; we don't clone
// LazyScripts into other compartments.
HeapPtrObject sourceObject_;
GCPtrObject sourceObject_;
// Heap allocated table with any free variables or inner functions.
void* table_;
@ -2291,8 +2291,8 @@ class LazyScript : public gc::TenuredCell
uint32_t numInnerFunctions() const {
return p_.numInnerFunctions;
}
HeapPtrFunction* innerFunctions() {
return (HeapPtrFunction*)&freeVariables()[numFreeVariables()];
GCPtrFunction* innerFunctions() {
return (GCPtrFunction*)&freeVariables()[numFreeVariables()];
}
GeneratorKind generatorKind() const { return GeneratorKindFromBits(p_.generatorKindBits); }
@ -2422,10 +2422,10 @@ struct SharedScriptData
static SharedScriptData* new_(ExclusiveContext* cx, uint32_t codeLength,
uint32_t srcnotesLength, uint32_t natoms);
HeapPtrAtom* atoms() {
GCPtrAtom* atoms() {
if (!natoms)
return nullptr;
return reinterpret_cast<HeapPtrAtom*>(data + length - sizeof(JSAtom*) * natoms);
return reinterpret_cast<GCPtrAtom*>(data + length - sizeof(JSAtom*) * natoms);
}
static SharedScriptData* fromBytecode(const jsbytecode* bytecode) {

View File

@ -95,7 +95,7 @@ class WeakMapBase : public mozilla::LinkedListElement<WeakMapBase>
protected:
// Object that this weak map is part of, if any.
HeapPtrObject memberOf;
GCPtrObject memberOf;
// Zone containing this weak map.
JS::Zone* zone;

View File

@ -33,7 +33,7 @@ inline void
ArgumentsObject::setElement(JSContext* cx, uint32_t i, const Value& v)
{
MOZ_ASSERT(!isElementDeleted(i));
HeapValue& lhs = data()->args[i];
GCPtrValue& lhs = data()->args[i];
if (IsMagicScopeSlotValue(lhs)) {
uint32_t slot = SlotFromMagicScopeSlotValue(lhs);
CallObject& callobj = getFixedSlot(MAYBE_CALL_SLOT).toObject().as<CallObject>();

View File

@ -21,7 +21,7 @@ using namespace js;
using namespace js::gc;
static void
CopyStackFrameArguments(const AbstractFramePtr frame, HeapValue* dst, unsigned totalArgs)
CopyStackFrameArguments(const AbstractFramePtr frame, GCPtrValue* dst, unsigned totalArgs)
{
MOZ_ASSERT_IF(frame.isInterpreterFrame(), !frame.asInterpreterFrame()->runningInJit());
@ -68,7 +68,7 @@ struct CopyFrameArgs
: frame_(frame)
{ }
void copyArgs(JSContext*, HeapValue* dst, unsigned totalArgs) const {
void copyArgs(JSContext*, GCPtrValue* dst, unsigned totalArgs) const {
CopyStackFrameArguments(frame_, dst, totalArgs);
}
@ -90,7 +90,7 @@ struct CopyJitFrameArgs
: frame_(frame), callObj_(callObj)
{ }
void copyArgs(JSContext*, HeapValue* dstBase, unsigned totalArgs) const {
void copyArgs(JSContext*, GCPtrValue* dstBase, unsigned totalArgs) const {
unsigned numActuals = frame_->numActualArgs();
unsigned numFormals = jit::CalleeTokenToFunction(frame_->calleeToken())->nargs();
MOZ_ASSERT(numActuals <= totalArgs);
@ -100,12 +100,12 @@ struct CopyJitFrameArgs
/* Copy all arguments. */
Value* src = frame_->argv() + 1; /* +1 to skip this. */
Value* end = src + numActuals;
HeapValue* dst = dstBase;
GCPtrValue* dst = dstBase;
while (src != end)
(dst++)->init(*src++);
if (numActuals < numFormals) {
HeapValue* dstEnd = dstBase + totalArgs;
GCPtrValue* dstEnd = dstBase + totalArgs;
while (dst != dstEnd)
(dst++)->init(UndefinedValue());
}
@ -128,7 +128,7 @@ struct CopyScriptFrameIterArgs
: iter_(iter)
{ }
void copyArgs(JSContext* cx, HeapValue* dstBase, unsigned totalArgs) const {
void copyArgs(JSContext* cx, GCPtrValue* dstBase, unsigned totalArgs) const {
/* Copy actual arguments. */
iter_.unaliasedForEachActual(cx, CopyToHeap(dstBase));
@ -140,8 +140,8 @@ struct CopyScriptFrameIterArgs
MOZ_ASSERT(Max(numActuals, numFormals) == totalArgs);
if (numActuals < numFormals) {
HeapValue* dst = dstBase + numActuals;
HeapValue* dstEnd = dstBase + totalArgs;
GCPtrValue* dst = dstBase + numActuals;
GCPtrValue* dstEnd = dstBase + totalArgs;
while (dst != dstEnd)
(dst++)->init(UndefinedValue());
}

View File

@ -42,7 +42,7 @@ struct ArgumentsData
* arguments.callee, or MagicValue(JS_OVERWRITTEN_CALLEE) if
* arguments.callee has been modified.
*/
HeapValue callee;
GCPtrValue callee;
/* The script for the function containing this arguments object. */
JSScript* script;
@ -61,16 +61,16 @@ struct ArgumentsData
* canonical value so any element access to the arguments object should load
* the value out of the CallObject (which is pointed to by MAYBE_CALL_SLOT).
*/
HeapValue args[1];
GCPtrValue args[1];
/* For jit use: */
static ptrdiff_t offsetOfArgs() { return offsetof(ArgumentsData, args); }
/* Iterate args. */
HeapValue* begin() { return args; }
const HeapValue* begin() const { return args; }
HeapValue* end() { return args + numArgs; }
const HeapValue* end() const { return args + numArgs; }
GCPtrValue* begin() { return args; }
const GCPtrValue* begin() const { return args; }
GCPtrValue* end() { return args + numArgs; }
const GCPtrValue* end() const { return args + numArgs; }
};
// Maximum supported value of arguments.length. This bounds the maximum
@ -272,7 +272,7 @@ class ArgumentsObject : public NativeObject
void setArg(unsigned i, const Value& v) {
MOZ_ASSERT(i < data()->numArgs);
HeapValue& lhs = data()->args[i];
GCPtrValue& lhs = data()->args[i];
MOZ_ASSERT(!lhs.isMagic());
lhs = v;
}

View File

@ -2747,7 +2747,7 @@ Debugger::markAllIteratively(GCMarker* trc)
* - it isn't already marked
* - it actually has hooks that might be called
*/
HeapPtrNativeObject& dbgobj = dbg->toJSObjectRef();
GCPtrNativeObject& dbgobj = dbg->toJSObjectRef();
if (!dbgobj->zone()->isGCMarking())
continue;
@ -2796,7 +2796,7 @@ Debugger::markAll(JSTracer* trc)
for (WeakGlobalObjectSet::Enum e(dbg->debuggees); !e.empty(); e.popFront())
TraceManuallyBarrieredEdge(trc, e.mutableFront().unsafeGet(), "Global Object");
HeapPtrNativeObject& dbgobj = dbg->toJSObjectRef();
GCPtrNativeObject& dbgobj = dbg->toJSObjectRef();
TraceEdge(trc, &dbgobj, "Debugger Object");
dbg->scripts.trace(trc);

View File

@ -350,10 +350,10 @@ class Debugger : private mozilla::LinkedListElement<Debugger>
static void writeBarrierPost(Debugger** vp, Debugger* prev, Debugger* next) {}
private:
HeapPtrNativeObject object; /* The Debugger object. Strong reference. */
WeakGlobalObjectSet debuggees; /* Debuggee globals. Cross-compartment weak references. */
GCPtrNativeObject object; /* The Debugger object. Strong reference. */
WeakGlobalObjectSet debuggees; /* Debuggee globals. Cross-compartment weak references. */
JS::ZoneSet debuggeeZones; /* Set of zones that we have debuggees in. */
js::HeapPtrObject uncaughtExceptionHook; /* Strong reference. */
js::GCPtrObject uncaughtExceptionHook; /* Strong reference. */
bool enabled;
bool allowUnobservedAsmJS;
@ -755,8 +755,8 @@ class Debugger : private mozilla::LinkedListElement<Debugger>
~Debugger();
bool init(JSContext* cx);
inline const js::HeapPtrNativeObject& toJSObject() const;
inline js::HeapPtrNativeObject& toJSObjectRef();
inline const js::GCPtrNativeObject& toJSObject() const;
inline js::GCPtrNativeObject& toJSObjectRef();
static inline Debugger* fromJSObject(const JSObject* obj);
static Debugger* fromChildJSObject(JSObject* obj);
@ -1163,14 +1163,14 @@ Debugger::fromOnNewGlobalObjectWatchersLink(JSCList* link) {
return reinterpret_cast<Debugger*>(p - offsetof(Debugger, onNewGlobalObjectWatchersLink));
}
const js::HeapPtrNativeObject&
const js::GCPtrNativeObject&
Debugger::toJSObject() const
{
MOZ_ASSERT(object);
return object;
}
js::HeapPtrNativeObject&
js::GCPtrNativeObject&
Debugger::toJSObjectRef()
{
MOZ_ASSERT(object);

View File

@ -84,7 +84,7 @@ ObjectElements::ConvertElementsToDoubles(JSContext* cx, uintptr_t elementsPtr)
/* static */ bool
ObjectElements::MakeElementsCopyOnWrite(ExclusiveContext* cx, NativeObject* obj)
{
static_assert(sizeof(HeapSlot) >= sizeof(HeapPtrObject),
static_assert(sizeof(HeapSlot) >= sizeof(GCPtrObject),
"there must be enough room for the owner object pointer at "
"the end of the elements");
if (!obj->ensureElements(cx, obj->getDenseInitializedLength() + 1))

View File

@ -53,7 +53,7 @@ Debug_SetValueRangeToCrashOnTouch(Value* vec, size_t len)
}
static MOZ_ALWAYS_INLINE void
Debug_SetValueRangeToCrashOnTouch(HeapValue* vec, size_t len)
Debug_SetValueRangeToCrashOnTouch(GCPtrValue* vec, size_t len)
{
#ifdef DEBUG
Debug_SetValueRangeToCrashOnTouch((Value*) vec, len);
@ -286,9 +286,9 @@ class ObjectElements
return flags & SHARED_MEMORY;
}
HeapPtrNativeObject& ownerObject() const {
GCPtrNativeObject& ownerObject() const {
MOZ_ASSERT(isCopyOnWrite());
return *(HeapPtrNativeObject*)(&elements()[initializedLength]);
return *(GCPtrNativeObject*)(&elements()[initializedLength]);
}
static int offsetOfFlags() {
@ -379,7 +379,7 @@ class NativeObject : public JSObject
{
protected:
// Property layout description and other state.
HeapPtrShape shape_;
GCPtrShape shape_;
/* Slots for object properties. */
js::HeapSlot* slots_;

View File

@ -84,7 +84,7 @@ class ObjectGroup : public gc::TenuredCell
const Class* clasp_;
/* Prototype shared by objects in this group. */
HeapPtr<TaggedProto> proto_;
GCPtr<TaggedProto> proto_;
/* Compartment shared by objects in this group. */
JSCompartment* compartment_;
@ -103,11 +103,11 @@ class ObjectGroup : public gc::TenuredCell
return proto_.isDynamic();
}
const HeapPtr<TaggedProto>& proto() const {
const GCPtr<TaggedProto>& proto() const {
return proto_;
}
HeapPtr<TaggedProto>& proto() {
GCPtr<TaggedProto>& proto() {
return proto_;
}
@ -280,7 +280,7 @@ class ObjectGroup : public gc::TenuredCell
// Identifier for this property, JSID_VOID for the aggregate integer
// index property, or JSID_EMPTY for properties holding constraints
// listening to changes in the group's state.
HeapId id;
GCPtrId id;
// Possible own types for this property.
HeapTypeSet types;

View File

@ -178,20 +178,20 @@ struct ForOfPIC
{
private:
// Pointer to canonical Array.prototype and ArrayIterator.prototype
HeapPtrNativeObject arrayProto_;
HeapPtrNativeObject arrayIteratorProto_;
GCPtrNativeObject arrayProto_;
GCPtrNativeObject arrayIteratorProto_;
// Shape of matching Array.prototype object, and slot containing
// the @@iterator for it, and the canonical value.
HeapPtrShape arrayProtoShape_;
GCPtrShape arrayProtoShape_;
uint32_t arrayProtoIteratorSlot_;
HeapValue canonicalIteratorFunc_;
GCPtrValue canonicalIteratorFunc_;
// Shape of matching ArrayIteratorProto, and slot containing
// the 'next' property, and the canonical value.
HeapPtrShape arrayIteratorProtoShape_;
GCPtrShape arrayIteratorProtoShape_;
uint32_t arrayIteratorProtoNextSlot_;
HeapValue canonicalNextFunc_;
GCPtrValue canonicalNextFunc_;
// Initialization flag marking lazy initialization of above fields.
bool initialized_;

View File

@ -89,7 +89,7 @@ ProxyObject::nuke(const BaseProxyHandler* handler)
JS_FRIEND_API(void)
js::SetValueInProxy(Value* slot, const Value& value)
{
// Slots in proxies are not HeapValues, so do a cast whenever assigning
// Slots in proxies are not GCPtrValues, so do a cast whenever assigning
// values to them which might trigger a barrier.
*reinterpret_cast<HeapValue*>(slot) = value;
*reinterpret_cast<GCPtrValue*>(slot) = value;
}

View File

@ -16,7 +16,7 @@ namespace js {
// instantiated.
class ProxyObject : public JSObject
{
HeapPtrShape shape;
GCPtrShape shape;
// GetProxyDataLayout computes the address of this field.
detail::ProxyDataLayout data;
@ -39,8 +39,8 @@ class ProxyObject : public JSObject
void setCrossCompartmentPrivate(const Value& priv);
void setSameCompartmentPrivate(const Value& priv);
HeapValue* slotOfPrivate() {
return reinterpret_cast<HeapValue*>(&detail::GetProxyDataLayout(this)->values->privateSlot);
GCPtrValue* slotOfPrivate() {
return reinterpret_cast<GCPtrValue*>(&detail::GetProxyDataLayout(this)->values->privateSlot);
}
JSObject* target() const {
@ -75,9 +75,9 @@ class ProxyObject : public JSObject
}
private:
HeapValue* slotOfExtra(size_t n) {
GCPtrValue* slotOfExtra(size_t n) {
MOZ_ASSERT(n < detail::PROXY_EXTRA_SLOTS);
return reinterpret_cast<HeapValue*>(&detail::GetProxyDataLayout(this)->values->extraSlots[n]);
return reinterpret_cast<GCPtrValue*>(&detail::GetProxyDataLayout(this)->values->extraSlots[n]);
}
static bool isValidProxyClass(const Class* clasp) {

View File

@ -68,8 +68,8 @@ class ReceiverGuard
class HeapReceiverGuard
{
HeapPtrObjectGroup group_;
HeapPtrShape shape_;
GCPtrObjectGroup group_;
GCPtrShape shape_;
public:
explicit HeapReceiverGuard(const ReceiverGuard& guard)

View File

@ -2487,8 +2487,8 @@ static const JSFunctionSpec intrinsic_functions[] = {
#undef LOAD_AND_STORE_SCALAR_FN_DECLS
#define LOAD_AND_STORE_REFERENCE_FN_DECLS(_constant, _type, _name) \
JS_FN("Store_" #_name, js::StoreReference##_type::Func, 3, 0), \
JS_FN("Load_" #_name, js::LoadReference##_type::Func, 3, 0),
JS_FN("Store_" #_name, js::StoreReference##_name::Func, 3, 0), \
JS_FN("Load_" #_name, js::LoadReference##_name::Func, 3, 0),
JS_FOR_EACH_REFERENCE_TYPE_REPR(LOAD_AND_STORE_REFERENCE_FN_DECLS)
#undef LOAD_AND_STORE_REFERENCE_FN_DECLS

View File

@ -90,7 +90,7 @@ Shape::removeFromDictionary(NativeObject* obj)
}
void
Shape::insertIntoDictionary(HeapPtrShape* dictp)
Shape::insertIntoDictionary(GCPtrShape* dictp)
{
// Don't assert inDictionaryMode() here because we may be called from
// JSObject::toDictionaryMode via JSObject::newDictionaryShape.
@ -104,7 +104,7 @@ Shape::insertIntoDictionary(HeapPtrShape* dictp)
setParent(dictp->get());
if (parent)
parent->listp = &parent;
listp = (HeapPtrShape*) dictp;
listp = (GCPtrShape*) dictp;
*dictp = this;
}
@ -483,7 +483,7 @@ js::NativeObject::toDictionaryMode(ExclusiveContext* cx)
return false;
}
HeapPtrShape* listp = dictionaryShape ? &dictionaryShape->parent : nullptr;
GCPtrShape* listp = dictionaryShape ? &dictionaryShape->parent : nullptr;
StackShape child(shape);
dprop->initDictionaryShape(child, self->numFixedSlots(), listp);

View File

@ -379,7 +379,7 @@ class BaseShape : public gc::TenuredCell
* dictionary last properties. */
/* For owned BaseShapes, the canonical unowned BaseShape. */
HeapPtrUnownedBaseShape unowned_;
GCPtrUnownedBaseShape unowned_;
/* For owned BaseShapes, the shape's shape table. */
ShapeTable* table_;
@ -542,8 +542,8 @@ class Shape : public gc::TenuredCell
friend class js::gc::RelocationOverlay;
protected:
HeapPtrBaseShape base_;
PreBarrieredId propid_;
GCPtrBaseShape base_;
PreBarrieredId propid_;
enum SlotInfo : uint32_t
{
@ -576,15 +576,15 @@ class Shape : public gc::TenuredCell
uint8_t attrs; /* attributes, see jsapi.h JSPROP_* */
uint8_t flags; /* flags, see below for defines */
HeapPtrShape parent; /* parent node, reverse for..in order */
GCPtrShape parent; /* parent node, reverse for..in order */
/* kids is valid when !inDictionary(), listp is valid when inDictionary(). */
union {
KidsPointer kids; /* null, single child, or a tagged ptr
to many-kids data structure */
HeapPtrShape* listp; /* dictionary list starting at shape_
has a double-indirect back pointer,
either to the next shape's parent if not
last, else to obj->shape_ */
KidsPointer kids; /* null, single child, or a tagged ptr
to many-kids data structure */
GCPtrShape* listp; /* dictionary list starting at shape_
has a double-indirect back pointer,
either to the next shape's parent if not
last, else to obj->shape_ */
};
template<MaybeAdding Adding = MaybeAdding::NotAdding>
@ -593,9 +593,10 @@ class Shape : public gc::TenuredCell
static inline Shape* searchNoHashify(Shape* start, jsid id);
void removeFromDictionary(NativeObject* obj);
void insertIntoDictionary(HeapPtrShape* dictp);
void insertIntoDictionary(GCPtrShape* dictp);
inline void initDictionaryShape(const StackShape& child, uint32_t nfixed, HeapPtrShape* dictp);
inline void initDictionaryShape(const StackShape& child, uint32_t nfixed,
GCPtrShape* dictp);
/* Replace the base shape of the last shape in a non-dictionary lineage with base. */
static Shape* replaceLastProperty(ExclusiveContext* cx, StackBaseShape& base,
@ -652,7 +653,7 @@ class Shape : public gc::TenuredCell
return *(AccessorShape*)this;
}
const HeapPtrShape& previous() const { return parent; }
const GCPtrShape& previous() const { return parent; }
JSCompartment* compartment() const { return base()->compartment(); }
JSCompartment* maybeCompartment() const { return compartment(); }
@ -1350,7 +1351,7 @@ Shape::setterObject() const
}
inline void
Shape::initDictionaryShape(const StackShape& child, uint32_t nfixed, HeapPtrShape* dictp)
Shape::initDictionaryShape(const StackShape& child, uint32_t nfixed, GCPtrShape* dictp)
{
if (child.isAccessorShape())
new (this) AccessorShape(child, nfixed);

View File

@ -158,8 +158,8 @@ struct CopyTo
struct CopyToHeap
{
HeapValue* dst;
explicit CopyToHeap(HeapValue* dst) : dst(dst) {}
GCPtrValue* dst;
explicit CopyToHeap(GCPtrValue* dst) : dst(dst) {}
void operator()(const Value& src) { dst->init(src); ++dst; }
};

View File

@ -101,7 +101,7 @@ class RootedBase<TaggedProto> : public TaggedProtoOperations<Rooted<TaggedProto>
{};
template <>
class BarrieredBaseMixins<TaggedProto> : public TaggedProtoOperations<HeapPtr<TaggedProto>>
class BarrieredBaseMixins<TaggedProto> : public TaggedProtoOperations<GCPtr<TaggedProto>>
{};
// If the TaggedProto is a JSObject pointer, convert to that type and call |f|

View File

@ -311,13 +311,13 @@ UnboxedPlainObject::trace(JSTracer* trc, JSObject* obj)
uint8_t* data = obj->as<UnboxedPlainObject>().data();
while (*list != -1) {
HeapPtrString* heap = reinterpret_cast<HeapPtrString*>(data + *list);
GCPtrString* heap = reinterpret_cast<GCPtrString*>(data + *list);
TraceEdge(trc, heap, "unboxed_string");
list++;
}
list++;
while (*list != -1) {
HeapPtrObject* heap = reinterpret_cast<HeapPtrObject*>(data + *list);
GCPtrObject* heap = reinterpret_cast<GCPtrObject*>(data + *list);
TraceNullableEdge(trc, heap, "unboxed_object");
list++;
}
@ -349,9 +349,9 @@ UnboxedPlainObject::ensureExpando(JSContext* cx, Handle<UnboxedPlainObject*> obj
MOZ_ASSERT_IF(!IsInsideNursery(expando), !IsInsideNursery(obj));
// As with setValue(), we need to manually trigger post barriers on the
// whole object. If we treat the field as a HeapPtrObject and later convert
// the object to its native representation, we will end up with a corrupted
// store buffer entry.
// whole object. If we treat the field as a GCPtrObject and later
// convert the object to its native representation, we will end up with a
// corrupted store buffer entry.
if (IsInsideNursery(expando) && !IsInsideNursery(obj))
cx->runtime()->gc.storeBuffer.putWholeCell(obj);
@ -649,13 +649,13 @@ UnboxedPlainObject::create(ExclusiveContext* cx, HandleObjectGroup group, NewObj
if (list) {
uint8_t* data = res->data();
while (*list != -1) {
HeapPtrString* heap = reinterpret_cast<HeapPtrString*>(data + *list);
GCPtrString* heap = reinterpret_cast<GCPtrString*>(data + *list);
heap->init(cx->names().empty);
list++;
}
list++;
while (*list != -1) {
HeapPtrObject* heap = reinterpret_cast<HeapPtrObject*>(data + *list);
GCPtrObject* heap = reinterpret_cast<GCPtrObject*>(data + *list);
heap->init(nullptr);
list++;
}
@ -1150,14 +1150,14 @@ UnboxedArrayObject::trace(JSTracer* trc, JSObject* obj)
switch (type) {
case JSVAL_TYPE_OBJECT:
for (size_t i = 0; i < initlen; i++) {
HeapPtrObject* heap = reinterpret_cast<HeapPtrObject*>(elements + i);
GCPtrObject* heap = reinterpret_cast<GCPtrObject*>(elements + i);
TraceNullableEdge(trc, heap, "unboxed_object");
}
break;
case JSVAL_TYPE_STRING:
for (size_t i = 0; i < initlen; i++) {
HeapPtrString* heap = reinterpret_cast<HeapPtrString*>(elements + i);
GCPtrString* heap = reinterpret_cast<GCPtrString*>(elements + i);
TraceEdge(trc, heap, "unboxed_string");
}
break;

View File

@ -61,11 +61,11 @@ class UnboxedLayout : public mozilla::LinkedListElement<UnboxedLayout>
// If objects in this group have ever been converted to native objects,
// these store the corresponding native group and initial shape for such
// objects. Type information for this object is reflected in nativeGroup.
HeapPtrObjectGroup nativeGroup_;
HeapPtrShape nativeShape_;
GCPtrObjectGroup nativeGroup_;
GCPtrShape nativeShape_;
// Any script/pc which the associated group is created for.
HeapPtrScript allocationScript_;
GCPtrScript allocationScript_;
jsbytecode* allocationPc_;
// If nativeGroup is set and this object originally had a TypeNewScript or
@ -73,7 +73,7 @@ class UnboxedLayout : public mozilla::LinkedListElement<UnboxedLayout>
// this one. This link is only needed to keep the replacement group from
// being GC'ed. If it were GC'ed and a new one regenerated later, that new
// group might have a different allocation kind from this group.
HeapPtrObjectGroup replacementGroup_;
GCPtrObjectGroup replacementGroup_;
// The following members are only used for unboxed plain objects.
@ -93,7 +93,7 @@ class UnboxedLayout : public mozilla::LinkedListElement<UnboxedLayout>
// If this layout has been used to construct script or JSON constant
// objects, this code might be filled in to more quickly fill in objects
// from an array of values.
HeapPtrJitCode constructorCode_;
GCPtrJitCode constructorCode_;
// The following members are only used for unboxed arrays.