Bug 1481998 - Make mozilla::Hash{Map,Set}'s entry storage allocation lazy. r=luke,sfink

Entry storage allocation now occurs on the first lookupForAdd()/put()/putNew().
This removes the need for init() and initialized(), and matches how
PLDHashTable/nsTHashtable work. It also removes the need for init() functions
in a lot of types that are built on top of mozilla::Hash{Map,Set}.

Pros:

- No need for init() calls and subsequent checks.

- No memory allocated for empty tables, which are not that uncommon.

Cons:

- An extra branch in lookup() and lookupForAdd(), but not in put()/putNew(),
  because the existing checkOverloaded() can handle it.

Specifics:

- Construction now can take a length parameter.

- init() is removed. Explicit length-setting, when necessary, now occurs in the
  constructors.

- initialized() is removed.

- capacity() now returns zero when the entry storage is absent.

- lookupForAdd() is no longer `const`, because it can instantiate the storage,
  which requires modifications.

- lookupForAdd() can now return an invalid AddPtr in two cases:

  - old: hashing failure (due to OOM in the hasher)

  - new: OOM while instantiating entry storage

  The existing failure handling paths for the old case work for the new case.

- clear(), finish(), and clearAndShrink() are replaced by clear(), compact(),
  and reserve(). The old compactIfUnderloaded() is also removed.

- Capacity computation code is now in its own functions, bestCapacity() and
  hashShift(). setTableSizeLog2() is removed.

- uint32_t is used throughout for capacities, instead of size_t, for
  consistency with other similar values.

- changeTableSize() now takes a capacity instead of a deltaLog2, and it can now
  handle !mTable.

Measurements:

- Total source code size is reduced by over 900 lines. Also, lots of existing
  lines got shorter (i.e. two checks were reduced to one).

- Executable size barely changed, down by 2 KiB on Linux64. The extra branches
  are compensated for by the lack of init() calls.

- Speed changed negligibly. The instruction count for Bench_Cpp_MozHash
  increased from 2.84 billion to 2.89 billion but any execution time change was
  well below noise.
This commit is contained in:
Nicholas Nethercote 2018-08-10 18:00:29 +10:00
parent a04c29f828
commit b9e071e2e8
139 changed files with 494 additions and 1311 deletions

View File

@ -406,9 +406,6 @@ readSizeOfNextMessage(ZeroCopyInputStream& stream, uint32_t* sizep)
bool
HeapSnapshot::init(JSContext* cx, const uint8_t* buffer, uint32_t size)
{
if (!nodes.init() || !frames.init())
return false;
ArrayInputStream stream(buffer, size);
GzipInputStream gzipStream(&stream);
uint32_t sizeOfMessage = 0;
@ -439,8 +436,6 @@ HeapSnapshot::init(JSContext* cx, const uint8_t* buffer, uint32_t size)
// The set of all node ids we've found edges pointing to.
NodeIdSet edgeReferents(cx);
if (NS_WARN_IF(!edgeReferents.init()))
return false;
if (NS_WARN_IF(!saveNode(root, edgeReferents)))
return false;
@ -478,10 +473,6 @@ HeapSnapshot::TakeCensus(JSContext* cx, JS::HandleObject options,
JS::MutableHandleValue rval, ErrorResult& rv)
{
JS::ubi::Census census(cx);
if (NS_WARN_IF(!census.init())) {
rv.Throw(NS_ERROR_OUT_OF_MEMORY);
return;
}
JS::ubi::CountTypePtr rootType;
if (NS_WARN_IF(!JS::ubi::ParseCensusOptions(cx, census, options, rootType))) {
@ -501,10 +492,6 @@ HeapSnapshot::TakeCensus(JSContext* cx, JS::HandleObject options,
JS::AutoCheckCannotGC nogc;
JS::ubi::CensusTraversal traversal(cx, handler, nogc);
if (NS_WARN_IF(!traversal.init())) {
rv.Throw(NS_ERROR_OUT_OF_MEMORY);
return;
}
if (NS_WARN_IF(!traversal.addStart(getRoot()))) {
rv.Throw(NS_ERROR_OUT_OF_MEMORY);
@ -610,10 +597,6 @@ HeapSnapshot::ComputeShortestPaths(JSContext*cx, uint64_t start,
// snapshot.
JS::ubi::NodeSet targetsSet;
if (NS_WARN_IF(!targetsSet.init())) {
rv.Throw(NS_ERROR_OUT_OF_MEMORY);
return;
}
for (const auto& target : targets) {
Maybe<JS::ubi::Node> targetNode = getNodeById(target);
@ -722,9 +705,6 @@ HeapSnapshot::ComputeShortestPaths(JSContext*cx, uint64_t start,
static bool
PopulateCompartmentsWithGlobals(CompartmentSet& compartments, AutoObjectVector& globals)
{
if (!compartments.init())
return false;
unsigned length = globals.length();
for (unsigned i = 0; i < length; i++) {
if (!compartments.put(GetObjectCompartment(globals[i])))
@ -768,7 +748,7 @@ EstablishBoundaries(JSContext* cx,
CompartmentSet& compartments)
{
MOZ_ASSERT(!roots.initialized());
MOZ_ASSERT(!compartments.initialized());
MOZ_ASSERT(compartments.empty());
bool foundBoundaryProperty = false;
@ -851,8 +831,6 @@ EstablishBoundaries(JSContext* cx,
}
MOZ_ASSERT(roots.initialized());
MOZ_ASSERT_IF(boundaries.mDebugger.WasPassed(), compartments.initialized());
MOZ_ASSERT_IF(boundaries.mGlobals.WasPassed(), compartments.initialized());
return true;
}
@ -1257,12 +1235,6 @@ public:
, compartments(compartments)
{ }
bool init() {
return framesAlreadySerialized.init() &&
twoByteStringsAlreadySerialized.init() &&
oneByteStringsAlreadySerialized.init();
}
~StreamWriter() override { }
bool writeMetadata(uint64_t timestamp) final {
@ -1440,8 +1412,6 @@ WriteHeapGraph(JSContext* cx,
HeapSnapshotHandler handler(writer, compartments);
HeapSnapshotHandler::Traversal traversal(cx, handler, noGC);
if (!traversal.init())
return false;
traversal.wantNames = wantNames;
bool ok = traversal.addStartVisited(node) &&
@ -1621,11 +1591,7 @@ ChromeUtils::SaveHeapSnapshotShared(GlobalObject& global,
return;
StreamWriter writer(cx, gzipStream, wantNames,
compartments.initialized() ? &compartments : nullptr);
if (NS_WARN_IF(!writer.init())) {
rv.Throw(NS_ERROR_OUT_OF_MEMORY);
return;
}
!compartments.empty() ? &compartments : nullptr);
MOZ_ASSERT(maybeNoGC.isSome());
ubi::Node roots(&rootList);
@ -1638,7 +1604,7 @@ ChromeUtils::SaveHeapSnapshotShared(GlobalObject& global,
roots,
writer,
wantNames,
compartments.initialized() ? &compartments : nullptr,
!compartments.empty() ? &compartments : nullptr,
maybeNoGC.ref(),
nodeCount,
edgeCount))

View File

@ -137,7 +137,6 @@ public:
// Get the root node of this heap snapshot's graph.
JS::ubi::Node getRoot() {
MOZ_ASSERT(nodes.initialized());
auto p = nodes.lookup(rootId);
MOZ_ASSERT(p);
const DeserializedNode& node = *p;

View File

@ -27,7 +27,6 @@ DEF_TEST(DoesCrossCompartmentBoundaries, {
// Our set of target compartments is both the old and new compartments.
JS::CompartmentSet targetCompartments;
ASSERT_TRUE(targetCompartments.init());
ASSERT_TRUE(targetCompartments.put(compartment));
ASSERT_TRUE(targetCompartments.put(newCompartment));

View File

@ -28,7 +28,6 @@ DEF_TEST(DoesntCrossCompartmentBoundaries, {
// Our set of target compartments is only the pre-existing compartment and
// does not include the new compartment.
JS::CompartmentSet targetCompartments;
ASSERT_TRUE(targetCompartments.init());
ASSERT_TRUE(targetCompartments.put(compartment));
FakeNode nodeA;

View File

@ -285,7 +285,6 @@ CustomElementRegistry::CustomElementRegistry(nsPIDOMWindowInner* aWindow)
, mIsCustomDefinitionRunning(false)
{
MOZ_ASSERT(aWindow);
MOZ_ALWAYS_TRUE(mConstructors.init());
mozilla::HoldJSObjects(this);
}

View File

@ -369,12 +369,6 @@ CreateJSObjWrapperTable()
}
sJSObjWrappers = MakeUnique<JSObjWrapperTable>();
if (!sJSObjWrappers->init(16)) {
sJSObjWrappers = nullptr;
NS_ERROR("Error initializing sJSObjWrappers!");
return false;
}
sJSObjWrappersAccessible = true;
return true;
}

View File

@ -42,11 +42,6 @@ JavaScriptChild::~JavaScriptChild()
bool
JavaScriptChild::init()
{
if (!WrapperOwner::init())
return false;
if (!WrapperAnswer::init())
return false;
JSContext* cx = dom::danger::GetJSContext();
JS_AddWeakPointerZonesCallback(cx, UpdateChildWeakPointersBeforeSweepingZoneGroup, this);
JS_AddExtraGCRootsTracer(cx, TraceChild, this);

View File

@ -31,21 +31,17 @@ TraceParent(JSTracer* trc, void* data)
static_cast<JavaScriptParent*>(data)->trace(trc);
}
JavaScriptParent::JavaScriptParent()
: savedNextCPOWNumber_(1)
{
JS_AddExtraGCRootsTracer(danger::GetJSContext(), TraceParent, this);
}
JavaScriptParent::~JavaScriptParent()
{
JS_RemoveExtraGCRootsTracer(danger::GetJSContext(), TraceParent, this);
}
bool
JavaScriptParent::init()
{
if (!WrapperOwner::init())
return false;
JS_AddExtraGCRootsTracer(danger::GetJSContext(), TraceParent, this);
return true;
}
static bool
ForbidUnsafeBrowserCPOWs()
{
@ -151,12 +147,7 @@ JavaScriptParent::afterProcessTask()
PJavaScriptParent*
mozilla::jsipc::NewJavaScriptParent()
{
JavaScriptParent* parent = new JavaScriptParent();
if (!parent->init()) {
delete parent;
return nullptr;
}
return parent;
return new JavaScriptParent();
}
void

View File

@ -17,10 +17,9 @@ namespace jsipc {
class JavaScriptParent : public JavaScriptBase<PJavaScriptParent>
{
public:
JavaScriptParent() : savedNextCPOWNumber_(1) {}
JavaScriptParent();
virtual ~JavaScriptParent();
bool init();
void trace(JSTracer* trc);
void drop(JSObject* obj);

View File

@ -20,18 +20,10 @@ using namespace mozilla;
using namespace mozilla::jsipc;
IdToObjectMap::IdToObjectMap()
: table_(SystemAllocPolicy())
: table_(SystemAllocPolicy(), 32)
{
}
bool
IdToObjectMap::init()
{
if (table_.initialized())
return true;
return table_.init(32);
}
void
IdToObjectMap::trace(JSTracer* trc, uint64_t minimumId)
{
@ -105,10 +97,9 @@ IdToObjectMap::has(const ObjectId& id, const JSObject* obj) const
}
#endif
bool
ObjectToIdMap::init()
ObjectToIdMap::ObjectToIdMap()
: table_(SystemAllocPolicy(), 32)
{
return table_.initialized() || table_.init(32);
}
void
@ -179,21 +170,6 @@ JavaScriptShared::~JavaScriptShared()
MOZ_RELEASE_ASSERT(cpows_.empty());
}
bool
JavaScriptShared::init()
{
if (!objects_.init())
return false;
if (!cpows_.init())
return false;
if (!unwaivedObjectIds_.init())
return false;
if (!waivedObjectIds_.init())
return false;
return true;
}
void
JavaScriptShared::decref()
{

View File

@ -98,7 +98,6 @@ class IdToObjectMap
public:
IdToObjectMap();
bool init();
void trace(JSTracer* trc, uint64_t minimumId = 0);
void sweep();
@ -125,7 +124,8 @@ class ObjectToIdMap
using Table = JS::GCHashMap<JS::Heap<JSObject*>, ObjectId, Hasher, js::SystemAllocPolicy>;
public:
bool init();
ObjectToIdMap();
void trace(JSTracer* trc);
void sweep();
@ -146,8 +146,6 @@ class JavaScriptShared : public CPOWManager
JavaScriptShared();
virtual ~JavaScriptShared();
bool init();
void decref();
void incref();

View File

@ -906,15 +906,6 @@ WrapperOwner::updatePointer(JSObject* obj, const JSObject* old)
cpows_.add(objId, obj);
}
bool
WrapperOwner::init()
{
if (!JavaScriptShared::init())
return false;
return true;
}
bool
WrapperOwner::getPropertyKeys(JSContext* cx, HandleObject proxy, uint32_t flags, AutoIdVector& props)
{

View File

@ -24,7 +24,6 @@ class WrapperOwner : public virtual JavaScriptShared
ActorDestroyReason;
WrapperOwner();
bool init();
// Standard internal methods.
// (The traps should be in the same order like js/Proxy.h)

View File

@ -60,11 +60,11 @@ class GCHashMap : public js::HashMap<Key, Value, HashPolicy, AllocPolicy>
public:
explicit GCHashMap(AllocPolicy a = AllocPolicy()) : Base(a) {}
explicit GCHashMap(size_t length) : Base(length) {}
GCHashMap(AllocPolicy a, size_t length) : Base(a, length) {}
static void trace(GCHashMap* map, JSTracer* trc) { map->trace(trc); }
void trace(JSTracer* trc) {
if (!this->initialized())
return;
for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
GCPolicy<Value>::trace(trc, &e.front().value(), "hashmap value");
GCPolicy<Key>::trace(trc, &e.front().mutableKey(), "hashmap key");
@ -72,13 +72,10 @@ class GCHashMap : public js::HashMap<Key, Value, HashPolicy, AllocPolicy>
}
bool needsSweep() const {
return this->initialized() && !this->empty();
return !this->empty();
}
void sweep() {
if (!this->initialized())
return;
for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
if (MapSweepPolicy::needsSweep(&e.front().mutableKey(), &e.front().value()))
e.removeFront();
@ -117,12 +114,11 @@ class GCRekeyableHashMap : public JS::GCHashMap<Key, Value, HashPolicy, AllocPol
using Base = JS::GCHashMap<Key, Value, HashPolicy, AllocPolicy>;
public:
explicit GCRekeyableHashMap(AllocPolicy a = AllocPolicy()) : Base(a) {}
explicit GCRekeyableHashMap(AllocPolicy a = AllocPolicy()) : Base(a) {}
explicit GCRekeyableHashMap(size_t length) : Base(length) {}
GCRekeyableHashMap(AllocPolicy a, size_t length) : Base(a, length) {}
void sweep() {
if (!this->initialized())
return;
for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
Key key(e.front().key());
if (MapSweepPolicy::needsSweep(&key, &e.front().value()))
@ -153,9 +149,7 @@ class WrappedPtrOperations<JS::GCHashMap<Args...>, Wrapper>
using Ptr = typename Map::Ptr;
using Range = typename Map::Range;
bool initialized() const { return map().initialized(); }
Ptr lookup(const Lookup& l) const { return map().lookup(l); }
AddPtr lookupForAdd(const Lookup& l) const { return map().lookupForAdd(l); }
Range all() const { return map().all(); }
bool empty() const { return map().empty(); }
uint32_t count() const { return map().count(); }
@ -184,10 +178,10 @@ class MutableWrappedPtrOperations<JS::GCHashMap<Args...>, Wrapper>
using Ptr = typename Map::Ptr;
using Range = typename Map::Range;
bool init(uint32_t len = 16) { return map().init(len); }
void clear() { map().clear(); }
void finish() { map().finish(); }
void clearAndCompact() { map().clearAndCompact(); }
void remove(Ptr p) { map().remove(p); }
AddPtr lookupForAdd(const Lookup& l) { return map().lookupForAdd(l); }
template<typename KeyInput, typename ValueInput>
bool add(AddPtr& p, KeyInput&& k, ValueInput&& v) {
@ -243,22 +237,20 @@ class GCHashSet : public js::HashSet<T, HashPolicy, AllocPolicy>
public:
explicit GCHashSet(AllocPolicy a = AllocPolicy()) : Base(a) {}
explicit GCHashSet(size_t length) : Base(length) {}
GCHashSet(AllocPolicy a, size_t length) : Base(a, length) {}
static void trace(GCHashSet* set, JSTracer* trc) { set->trace(trc); }
void trace(JSTracer* trc) {
if (!this->initialized())
return;
for (typename Base::Enum e(*this); !e.empty(); e.popFront())
GCPolicy<T>::trace(trc, &e.mutableFront(), "hashset element");
}
bool needsSweep() const {
return this->initialized() && !this->empty();
return !this->empty();
}
void sweep() {
if (!this->initialized())
return;
for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
if (GCPolicy<T>::needsSweep(&e.mutableFront()))
e.removeFront();
@ -296,9 +288,7 @@ class WrappedPtrOperations<JS::GCHashSet<Args...>, Wrapper>
using Ptr = typename Set::Ptr;
using Range = typename Set::Range;
bool initialized() const { return set().initialized(); }
Ptr lookup(const Lookup& l) const { return set().lookup(l); }
AddPtr lookupForAdd(const Lookup& l) const { return set().lookupForAdd(l); }
Range all() const { return set().all(); }
bool empty() const { return set().empty(); }
uint32_t count() const { return set().count(); }
@ -328,11 +318,12 @@ class MutableWrappedPtrOperations<JS::GCHashSet<Args...>, Wrapper>
using Ptr = typename Set::Ptr;
using Range = typename Set::Range;
bool init(uint32_t len = 16) { return set().init(len); }
void clear() { set().clear(); }
void finish() { set().finish(); }
void clearAndCompact() { set().clearAndCompact(); }
MOZ_MUST_USE bool reserve(uint32_t len) { return set().reserve(len); }
void remove(Ptr p) { set().remove(p); }
void remove(const Lookup& l) { set().remove(l); }
AddPtr lookupForAdd(const Lookup& l) { return set().lookupForAdd(l); }
template<typename TInput>
bool add(AddPtr& p, TInput&& t) {
@ -395,9 +386,6 @@ class WeakCache<GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapSweepPolicy>>
}
size_t sweep() override {
if (!this->initialized())
return 0;
size_t steps = map.count();
map.sweep();
return steps;
@ -467,10 +455,6 @@ class WeakCache<GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapSweepPolicy>>
}
};
bool initialized() const {
return map.initialized();
}
Ptr lookup(const Lookup& l) const {
Ptr ptr = map.lookup(l);
if (needsBarrier && ptr && entryNeedsSweep(*ptr)) {
@ -480,7 +464,7 @@ class WeakCache<GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapSweepPolicy>>
return ptr;
}
AddPtr lookupForAdd(const Lookup& l) const {
AddPtr lookupForAdd(const Lookup& l) {
AddPtr ptr = map.lookupForAdd(l);
if (needsBarrier && ptr && entryNeedsSweep(*ptr)) {
const_cast<Map&>(map).remove(ptr);
@ -524,11 +508,6 @@ class WeakCache<GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapSweepPolicy>>
return mallocSizeOf(this) + map.shallowSizeOfExcludingThis(mallocSizeOf);
}
bool init(uint32_t len = 16) {
MOZ_ASSERT(!needsBarrier);
return map.init(len);
}
void clear() {
// This operation is not currently allowed while barriers are in place
// since it doesn't make sense to clear a cache while it is being swept.
@ -536,11 +515,11 @@ class WeakCache<GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapSweepPolicy>>
map.clear();
}
void finish() {
void clearAndCompact() {
// This operation is not currently allowed while barriers are in place
// since it doesn't make sense to destroy a cache while it is being swept.
// since it doesn't make sense to clear a cache while it is being swept.
MOZ_ASSERT(!needsBarrier);
map.finish();
map.clearAndCompact();
}
void remove(Ptr p) {
@ -602,9 +581,6 @@ class WeakCache<GCHashSet<T, HashPolicy, AllocPolicy>>
{}
size_t sweep() override {
if (!this->initialized())
return 0;
size_t steps = set.count();
set.sweep();
return steps;
@ -674,10 +650,6 @@ class WeakCache<GCHashSet<T, HashPolicy, AllocPolicy>>
}
};
bool initialized() const {
return set.initialized();
}
Ptr lookup(const Lookup& l) const {
Ptr ptr = set.lookup(l);
if (needsBarrier && ptr && entryNeedsSweep(*ptr)) {
@ -687,7 +659,7 @@ class WeakCache<GCHashSet<T, HashPolicy, AllocPolicy>>
return ptr;
}
AddPtr lookupForAdd(const Lookup& l) const {
AddPtr lookupForAdd(const Lookup& l) {
AddPtr ptr = set.lookupForAdd(l);
if (needsBarrier && ptr && entryNeedsSweep(*ptr)) {
const_cast<Set&>(set).remove(ptr);
@ -731,11 +703,6 @@ class WeakCache<GCHashSet<T, HashPolicy, AllocPolicy>>
return mallocSizeOf(this) + set.shallowSizeOfExcludingThis(mallocSizeOf);
}
bool init(uint32_t len = 16) {
MOZ_ASSERT(!needsBarrier);
return set.init(len);
}
void clear() {
// This operation is not currently allowed while barriers are in place
// since it doesn't make sense to clear a cache while it is being swept.
@ -743,11 +710,11 @@ class WeakCache<GCHashSet<T, HashPolicy, AllocPolicy>>
set.clear();
}
void finish() {
void clearAndCompact() {
// This operation is not currently allowed while barriers are in place
// since it doesn't make sense to destroy a cache while it is being swept.
// since it doesn't make sense to clear a cache while it is being swept.
MOZ_ASSERT(!needsBarrier);
set.finish();
set.clearAndCompact();
}
void remove(Ptr p) {

View File

@ -569,7 +569,7 @@ struct RuntimeSizes
notableScriptSources()
{
allScriptSources = js_new<ScriptSourcesHashMap>();
if (!allScriptSources || !allScriptSources->init())
if (!allScriptSources)
MOZ_CRASH("oom");
}

View File

@ -88,9 +88,6 @@ struct BreadthFirst {
traversalBegun(false), stopRequested(false), abandonRequested(false)
{ }
// Initialize this traversal object. Return false on OOM.
bool init() { return visited.init(); }
// Add |node| as a starting point for the traversal. You may add
// as many starting points as you like. Return false on OOM.
bool addStart(Node node) { return pending.append(node); }

View File

@ -206,8 +206,6 @@ struct Census {
JS::ZoneSet targetZones;
explicit Census(JSContext* cx) : cx(cx) { }
MOZ_MUST_USE JS_PUBLIC_API(bool) init();
};
// A BreadthFirst handler type that conducts a census, using a CountBase to

View File

@ -338,7 +338,6 @@ class JS_PUBLIC_API(DominatorTree)
if (!p) {
mozilla::UniquePtr<NodeSet, DeletePolicy<NodeSet>> set(js_new<NodeSet>());
if (!set ||
!set->init() ||
!predecessorSets.add(p, edge.referent, std::move(set)))
{
return false;
@ -349,8 +348,7 @@ class JS_PUBLIC_API(DominatorTree)
};
PostOrder traversal(cx, noGC);
return traversal.init() &&
traversal.addStart(root) &&
return traversal.addStart(root) &&
traversal.traverse(onNode, onEdge);
}
@ -358,10 +356,10 @@ class JS_PUBLIC_API(DominatorTree)
// `postOrder`.
static MOZ_MUST_USE bool mapNodesToTheirIndices(JS::ubi::Vector<Node>& postOrder,
NodeToIndexMap& map) {
MOZ_ASSERT(!map.initialized());
MOZ_ASSERT(map.empty());
MOZ_ASSERT(postOrder.length() < UINT32_MAX);
uint32_t length = postOrder.length();
if (!map.init(length))
if (!map.reserve(length))
return false;
for (uint32_t i = 0; i < length; i++)
map.putNewInfallible(postOrder[i], i);
@ -403,7 +401,7 @@ class JS_PUBLIC_API(DominatorTree)
predecessorVectors[i].infallibleAppend(ptr->value());
}
}
predecessorSets.finish();
predecessorSets.clearAndCompact();
return true;
}
@ -515,7 +513,7 @@ class JS_PUBLIC_API(DominatorTree)
Create(JSContext* cx, AutoCheckCannotGC& noGC, const Node& root) {
JS::ubi::Vector<Node> postOrder;
PredecessorSets predecessorSets;
if (!predecessorSets.init() || !doTraversal(cx, noGC, root, postOrder, predecessorSets))
if (!doTraversal(cx, noGC, root, postOrder, predecessorSets))
return mozilla::Nothing();
MOZ_ASSERT(postOrder.length() < UINT32_MAX);
@ -528,7 +526,7 @@ class JS_PUBLIC_API(DominatorTree)
// implementation, but we have to pay a little bit of upfront cost to
// convert our data structures to play along first.
NodeToIndexMap nodeToPostOrderIndex;
NodeToIndexMap nodeToPostOrderIndex(postOrder.length());
if (!mapNodesToTheirIndices(postOrder, nodeToPostOrderIndex))
return mozilla::Nothing();

View File

@ -123,9 +123,6 @@ struct PostOrder {
#endif
{ }
// Initialize this traversal object. Return false on OOM.
MOZ_MUST_USE bool init() { return seen.init(); }
// Add `node` as a starting point for the traversal. You may add
// as many starting points as you like. Returns false on OOM.
MOZ_MUST_USE bool addStart(const Node& node) {

View File

@ -188,18 +188,11 @@ struct JS_PUBLIC_API(ShortestPaths)
: maxNumPaths_(maxNumPaths)
, root_(root)
, targets_(std::move(targets))
, paths_()
, paths_(targets_.count())
, backEdges_()
{
MOZ_ASSERT(maxNumPaths_ > 0);
MOZ_ASSERT(root_);
MOZ_ASSERT(targets_.initialized());
}
bool initialized() const {
return targets_.initialized() &&
paths_.initialized() &&
backEdges_.initialized();
}
public:
@ -249,15 +242,12 @@ struct JS_PUBLIC_API(ShortestPaths)
MOZ_ASSERT(targets.count() > 0);
MOZ_ASSERT(maxNumPaths > 0);
size_t count = targets.count();
ShortestPaths paths(maxNumPaths, root, std::move(targets));
if (!paths.paths_.init(count))
return mozilla::Nothing();
Handler handler(paths);
Traversal traversal(cx, handler, noGC);
traversal.wantNames = true;
if (!traversal.init() || !traversal.addStart(root) || !traversal.traverse())
if (!traversal.addStart(root) || !traversal.traverse())
return mozilla::Nothing();
// Take ownership of the back edges we created while traversing the
@ -265,7 +255,6 @@ struct JS_PUBLIC_API(ShortestPaths)
// use-after-free.
paths.backEdges_ = std::move(traversal.visited);
MOZ_ASSERT(paths.initialized());
return mozilla::Some(std::move(paths));
}
@ -275,7 +264,6 @@ struct JS_PUBLIC_API(ShortestPaths)
* instance.
*/
NodeSet::Iterator targetIter() const {
MOZ_ASSERT(initialized());
return targets_.iter();
}
@ -291,7 +279,6 @@ struct JS_PUBLIC_API(ShortestPaths)
*/
template <class Func>
MOZ_MUST_USE bool forEachPath(const Node& target, Func func) {
MOZ_ASSERT(initialized());
MOZ_ASSERT(targets_.has(target));
auto ptr = paths_.lookup(target);

View File

@ -661,9 +661,7 @@ js::Stringify(JSContext* cx, MutableHandleValue vp, JSObject* replacer_, const V
// is passed in. If we end up having to add elements past this
// size, the set will naturally resize to accommodate them.
const uint32_t MaxInitialSize = 32;
Rooted<GCHashSet<jsid>> idSet(cx, GCHashSet<jsid>(cx));
if (!idSet.init(Min(len, MaxInitialSize)))
return false;
Rooted<GCHashSet<jsid>> idSet(cx, GCHashSet<jsid>(cx, Min(len, MaxInitialSize)));
/* Step 4b(iii)(4). */
uint32_t k = 0;

View File

@ -349,11 +349,6 @@ IndirectBindingMap::put(JSContext* cx, HandleId name,
if (!map_) {
MOZ_ASSERT(!cx->zone()->createdForHelperThread());
map_.emplace(cx->zone());
if (!map_->init()) {
map_.reset();
ReportOutOfMemory(cx);
return false;
}
}
RootedShape shape(cx, environment->lookup(cx, localName));
@ -1231,14 +1226,6 @@ ModuleBuilder::ModuleBuilder(JSContext* cx, HandleModuleObject module,
starExportEntries_(cx, ExportEntryVector(cx))
{}
bool
ModuleBuilder::init()
{
return requestedModuleSpecifiers_.init() &&
importEntries_.init() &&
exportNames_.init();
}
bool
ModuleBuilder::buildTables()
{

View File

@ -352,7 +352,6 @@ class MOZ_STACK_CLASS ModuleBuilder
public:
explicit ModuleBuilder(JSContext* cx, HandleModuleObject module,
const frontend::TokenStreamAnyChars& tokenStream);
bool init();
bool processImport(frontend::ParseNode* pn);
bool processExport(frontend::ParseNode* pn);

View File

@ -4579,9 +4579,6 @@ OffThreadPromiseRuntimeState::OffThreadPromiseRuntimeState()
numCanceled_(0),
internalDispatchQueueClosed_(false)
{
AutoEnterOOMUnsafeRegion noOOM;
if (!live_.init())
noOOM.crash("OffThreadPromiseRuntimeState");
}
OffThreadPromiseRuntimeState::~OffThreadPromiseRuntimeState()

View File

@ -3469,8 +3469,6 @@ reflect_parse(JSContext* cx, uint32_t argc, Value* vp)
options.allowHTMLComments = target == ParseGoal::Script;
mozilla::Range<const char16_t> chars = linearChars.twoByteRange();
UsedNameTracker usedNames(cx);
if (!usedNames.init())
return false;
RootedScriptSourceObject sourceObject(cx, frontend::CreateScriptSourceObject(cx, options,
mozilla::Nothing()));
@ -3500,8 +3498,6 @@ reflect_parse(JSContext* cx, uint32_t argc, Value* vp)
return false;
ModuleBuilder builder(cx, module, parser.anyChars);
if (!builder.init())
return false;
ModuleSharedContext modulesc(cx, module, &cx->global()->emptyGlobalScope(), builder);
pn = parser.moduleBody(&modulesc);

View File

@ -1254,8 +1254,7 @@ ClearSavedFrames(JSContext* cx, unsigned argc, Value* vp)
CallArgs args = CallArgsFromVp(argc, vp);
js::SavedStacks& savedStacks = cx->realm()->savedStacks();
if (savedStacks.initialized())
savedStacks.clear();
savedStacks.clear();
for (ActivationIterator iter(cx); !iter.done(); ++iter)
iter->clearLiveSavedFrameCache();
@ -3593,7 +3592,7 @@ FindPath(JSContext* cx, unsigned argc, Value* vp)
heaptools::FindPathHandler handler(cx, start, target, &nodes, edges);
heaptools::FindPathHandler::Traversal traversal(cx, handler, autoCannotGC);
if (!traversal.init() || !traversal.addStart(start)) {
if (!traversal.addStart(start)) {
ReportOutOfMemory(cx);
return false;
}
@ -3719,10 +3718,6 @@ ShortestPaths(JSContext* cx, unsigned argc, Value* vp)
JS::AutoCheckCannotGC noGC(cx);
JS::ubi::NodeSet targets;
if (!targets.init()) {
ReportOutOfMemory(cx);
return false;
}
for (size_t i = 0; i < length; i++) {
RootedValue val(cx, objs->getDenseElement(i));

View File

@ -2163,11 +2163,6 @@ InlineTransparentTypedObject::getOrCreateBuffer(JSContext* cx)
if (!table)
return nullptr;
if (!table->init()) {
ReportOutOfMemory(cx);
return nullptr;
}
realm.lazyArrayBuffers = std::move(table);
}

View File

@ -41,10 +41,6 @@ WeakCollectionPutEntryInternal(JSContext* cx, Handle<WeakCollectionObject*> obj,
auto newMap = cx->make_unique<ObjectValueMap>(cx, obj.get());
if (!newMap)
return false;
if (!newMap->init()) {
JS_ReportOutOfMemory(cx);
return false;
}
map = newMap.release();
obj->setPrivate(map);
}

View File

@ -111,12 +111,7 @@ js::intl::SharedIntlData::ensureTimeZones(JSContext* cx)
// If ensureTimeZones() was called previously, but didn't complete due to
// OOM, clear all sets/maps and start from scratch.
if (availableTimeZones.initialized())
availableTimeZones.finish();
if (!availableTimeZones.init()) {
ReportOutOfMemory(cx);
return false;
}
availableTimeZones.clearAndCompact();
UErrorCode status = U_ZERO_ERROR;
UEnumeration* values = ucal_openTimeZones(&status);
@ -158,12 +153,7 @@ js::intl::SharedIntlData::ensureTimeZones(JSContext* cx)
}
}
if (ianaZonesTreatedAsLinksByICU.initialized())
ianaZonesTreatedAsLinksByICU.finish();
if (!ianaZonesTreatedAsLinksByICU.init()) {
ReportOutOfMemory(cx);
return false;
}
ianaZonesTreatedAsLinksByICU.clearAndCompact();
for (const char* rawTimeZone : timezone::ianaZonesTreatedAsLinksByICU) {
MOZ_ASSERT(rawTimeZone != nullptr);
@ -181,12 +171,7 @@ js::intl::SharedIntlData::ensureTimeZones(JSContext* cx)
}
}
if (ianaLinksCanonicalizedDifferentlyByICU.initialized())
ianaLinksCanonicalizedDifferentlyByICU.finish();
if (!ianaLinksCanonicalizedDifferentlyByICU.init()) {
ReportOutOfMemory(cx);
return false;
}
ianaLinksCanonicalizedDifferentlyByICU.clearAndCompact();
RootedAtom linkName(cx);
RootedAtom& target = timeZone;
@ -308,12 +293,7 @@ js::intl::SharedIntlData::ensureUpperCaseFirstLocales(JSContext* cx)
// If ensureUpperCaseFirstLocales() was called previously, but didn't
// complete due to OOM, clear all data and start from scratch.
if (upperCaseFirstLocales.initialized())
upperCaseFirstLocales.finish();
if (!upperCaseFirstLocales.init()) {
ReportOutOfMemory(cx);
return false;
}
upperCaseFirstLocales.clearAndCompact();
UErrorCode status = U_ZERO_ERROR;
UEnumeration* available = ucol_openAvailableLocales(&status);
@ -393,10 +373,10 @@ js::intl::SharedIntlData::isUpperCaseFirst(JSContext* cx, HandleString locale, b
void
js::intl::SharedIntlData::destroyInstance()
{
availableTimeZones.finish();
ianaZonesTreatedAsLinksByICU.finish();
ianaLinksCanonicalizedDifferentlyByICU.finish();
upperCaseFirstLocales.finish();
availableTimeZones.clearAndCompact();
ianaZonesTreatedAsLinksByICU.clearAndCompact();
ianaLinksCanonicalizedDifferentlyByICU.clearAndCompact();
upperCaseFirstLocales.clearAndCompact();
}
void

View File

@ -6104,11 +6104,7 @@ StructType::DefineInternal(JSContext* cx, JSObject* typeObj_, JSObject* fieldsOb
return false;
// Create a FieldInfoHash to stash on the type object.
Rooted<FieldInfoHash> fields(cx);
if (!fields.init(len)) {
JS_ReportOutOfMemory(cx);
return false;
}
Rooted<FieldInfoHash> fields(cx, len);
// Process the field types.
size_t structSize, structAlign;
@ -6215,7 +6211,6 @@ StructType::DefineInternal(JSContext* cx, JSObject* typeObj_, JSObject* fieldsOb
JS_ReportOutOfMemory(cx);
return false;
}
MOZ_ASSERT(heapHash->initialized());
JS_SetReservedSlot(typeObj, SLOT_FIELDINFO, PrivateValue(heapHash));
JS_SetReservedSlot(typeObj, SLOT_SIZE, sizeVal);

View File

@ -12,10 +12,8 @@ using namespace js;
SparseBitmap::~SparseBitmap()
{
if (data.initialized()) {
for (Data::Range r(data.all()); !r.empty(); r.popFront())
js_delete(r.front().value());
}
for (Data::Range r(data.all()); !r.empty(); r.popFront())
js_delete(r.front().value());
}
size_t
@ -28,10 +26,9 @@ SparseBitmap::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
}
SparseBitmap::BitBlock&
SparseBitmap::createBlock(Data::AddPtr p, size_t blockId)
SparseBitmap::createBlock(Data::AddPtr p, size_t blockId, AutoEnterOOMUnsafeRegion& oomUnsafe)
{
MOZ_ASSERT(!p);
AutoEnterOOMUnsafeRegion oomUnsafe;
MOZ_ASSERT(!p && p.isValid());
BitBlock* block = js_new<BitBlock>();
if (!block || !data.add(p, blockId, block))
oomUnsafe.crash("Bitmap OOM");

View File

@ -88,7 +88,7 @@ class SparseBitmap
return std::min<size_t>((size_t)WordsInBlock, std::max<long>(count, 0));
}
BitBlock& createBlock(Data::AddPtr p, size_t blockId);
BitBlock& createBlock(Data::AddPtr p, size_t blockId, AutoEnterOOMUnsafeRegion& oomUnsafe);
MOZ_ALWAYS_INLINE BitBlock* getBlock(size_t blockId) const {
Data::Ptr p = data.lookup(blockId);
@ -96,14 +96,16 @@ class SparseBitmap
}
MOZ_ALWAYS_INLINE BitBlock& getOrCreateBlock(size_t blockId) {
// The lookupForAdd() needs protection against injected OOMs, as does
// the add() within createBlock().
AutoEnterOOMUnsafeRegion oomUnsafe;
Data::AddPtr p = data.lookupForAdd(blockId);
if (p)
return *p->value();
return createBlock(p, blockId);
return createBlock(p, blockId, oomUnsafe);
}
public:
bool init() { return data.init(); }
~SparseBitmap();
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf);

View File

@ -71,13 +71,7 @@ class InlineTable : private AllocPolicy
MOZ_MUST_USE bool switchToTable() {
MOZ_ASSERT(inlNext_ == InlineEntries);
if (table_.initialized()) {
table_.clear();
} else {
if (!table_.init(count()))
return false;
MOZ_ASSERT(table_.initialized());
}
table_.clear();
InlineEntry* end = inlineEnd();
for (InlineEntry* it = inlineStart(); it != end; ++it) {
@ -330,7 +324,7 @@ class InlineTable : private AllocPolicy
--inlCount_;
return;
}
MOZ_ASSERT(table_.initialized() && usingTable());
MOZ_ASSERT(usingTable());
table_.remove(p.tablePtr_);
}

View File

@ -64,6 +64,8 @@ struct BinaryASTSupport {
}
};
BinaryASTSupport();
JS::Result<const BinVariant*> binVariant(JSContext*, const CharSlice);
JS::Result<const BinField*> binField(JSContext*, const CharSlice);
JS::Result<const BinKind*> binKind(JSContext*, const CharSlice);

View File

@ -73,15 +73,17 @@ const char* describeBinVariant(const BinVariant& variant)
} // namespace frontend
BinaryASTSupport::BinaryASTSupport()
: binKindMap_(frontend::BINKIND_LIMIT)
, binFieldMap_(frontend::BINFIELD_LIMIT)
, binVariantMap_(frontend::BINVARIANT_LIMIT)
{
}
JS::Result<const js::frontend::BinKind*>
BinaryASTSupport::binKind(JSContext* cx, const CharSlice key)
{
if (!binKindMap_.initialized()) {
// Initialize lazily.
if (!binKindMap_.init(frontend::BINKIND_LIMIT))
return ReportOutOfMemoryResult(cx);
if (binKindMap_.empty()) {
for (size_t i = 0; i < frontend::BINKIND_LIMIT; ++i) {
const BinKind variant = static_cast<BinKind>(i);
const CharSlice& key = getBinKind(variant);
@ -100,12 +102,9 @@ BinaryASTSupport::binKind(JSContext* cx, const CharSlice key)
}
JS::Result<const js::frontend::BinVariant*>
BinaryASTSupport::binVariant(JSContext* cx, const CharSlice key) {
if (!binVariantMap_.initialized()) {
// Initialize lazily.
if (!binVariantMap_.init(frontend::BINVARIANT_LIMIT))
return ReportOutOfMemoryResult(cx);
BinaryASTSupport::binVariant(JSContext* cx, const CharSlice key)
{
if (binVariantMap_.empty()) {
for (size_t i = 0; i < frontend::BINVARIANT_LIMIT; ++i) {
const BinVariant variant = static_cast<BinVariant>(i);
const CharSlice& key = getBinVariant(variant);

View File

@ -125,8 +125,6 @@ BinTokenReaderMultipart::readHeader()
return raiseOOM();
if (!slicesTable_.reserve(stringsNumberOfEntries))
return raiseOOM();
if (!variantsTable_.init())
return raiseOOM();
RootedAtom atom(cx_);
for (uint32_t i = 0; i < stringsNumberOfEntries; ++i) {

View File

@ -223,8 +223,6 @@ bool
BytecodeCompiler::createParser(ParseGoal goal)
{
usedNames.emplace(cx);
if (!usedNames->init())
return false;
if (canLazilyParse()) {
syntaxParser.emplace(cx, alloc, options, sourceBuffer.get(), sourceBuffer.length(),
@ -410,8 +408,6 @@ BytecodeCompiler::compileModule()
module->init(script);
ModuleBuilder builder(cx, module, parser->anyChars);
if (!builder.init())
return nullptr;
ModuleSharedContext modulesc(cx, module, enclosingScope, builder);
ParseNode* pn = parser->moduleBody(&modulesc);
@ -629,8 +625,6 @@ frontend::CompileGlobalBinASTScript(JSContext* cx, LifoAlloc& alloc, const ReadO
AutoAssertReportedException assertException(cx);
frontend::UsedNameTracker usedNames(cx);
if (!usedNames.init())
return nullptr;
RootedScriptSourceObject sourceObj(cx, CreateScriptSourceObject(cx, options));
@ -819,8 +813,6 @@ frontend::CompileLazyFunction(JSContext* cx, Handle<LazyScript*> lazy, const cha
}
UsedNameTracker usedNames(cx);
if (!usedNames.init())
return false;
RootedScriptSourceObject sourceObject(cx, &lazy->sourceObject());
Parser<FullParseHandler, char16_t> parser(cx, cx->tempLifoAlloc(), options, chars, length,

View File

@ -121,10 +121,6 @@ class UsedNameTracker
scopeCounter_(0)
{ }
MOZ_MUST_USE bool init() {
return map_.init();
}
uint32_t nextScriptId() {
MOZ_ASSERT(scriptCounter_ != UINT32_MAX,
"ParseContext::Scope::init should have prevented wraparound");

View File

@ -51,10 +51,6 @@ testBinASTReaderFuzz(const uint8_t* buf, size_t size) {
}
js::frontend::UsedNameTracker binUsedNames(gCx);
if (!binUsedNames.init()) {
ReportOutOfMemory(gCx);
return 0;
}
js::frontend::BinASTParser<js::frontend::BinTokenReaderTester> reader(gCx, gCx->tempLifoAlloc(), binUsedNames, options);

View File

@ -963,6 +963,7 @@ GCRuntime::GCRuntime(JSRuntime* rt) :
stats_(rt),
marker(rt),
usage(nullptr),
rootsHash(256),
nextCellUniqueId_(LargestTaggedNullCellPointer + 1), // Ensure disjoint from null tagged pointers.
numArenasFreeCommitted(0),
verifyPreData(nullptr),
@ -1287,9 +1288,6 @@ GCRuntime::init(uint32_t maxbytes, uint32_t maxNurseryBytes)
{
MOZ_ASSERT(SystemPageSize());
if (!rootsHash.ref().init(256))
return false;
{
AutoLockGCBgAlloc lock(rt);
@ -4699,9 +4697,6 @@ js::gc::MarkingValidator::nonIncrementalMark(AutoGCSession& session)
* Currently this does not validate gray marking.
*/
if (!map.init())
return;
JSRuntime* runtime = gc->rt;
GCMarker* gcmarker = &gc->marker;
@ -4732,8 +4727,6 @@ js::gc::MarkingValidator::nonIncrementalMark(AutoGCSession& session)
*/
WeakMapSet markedWeakMaps;
if (!markedWeakMaps.init())
return;
/*
* For saving, smush all of the keys into one big table and split them back
@ -8077,7 +8070,7 @@ js::NewRealm(JSContext* cx, JSPrincipals* principals, const JS::RealmOptions& op
if (!comp) {
compHolder = cx->make_unique<JS::Compartment>(zone);
if (!compHolder || !compHolder->init(cx))
if (!compHolder)
return nullptr;
comp = compHolder.get();
@ -8254,9 +8247,6 @@ GCRuntime::mergeRealms(Realm* source, Realm* target)
if (!target->scriptNameMap)
oomUnsafe.crash("Failed to create a script name map.");
if (!target->scriptNameMap->init())
oomUnsafe.crash("Failed to initialize a script name map.");
}
for (ScriptNameMap::Range r = source->scriptNameMap->all(); !r.empty(); r.popFront()) {

View File

@ -24,7 +24,7 @@ struct DependentAddPtr
typedef typename T::Entry Entry;
template <class Lookup>
DependentAddPtr(const JSContext* cx, const T& table, const Lookup& lookup)
DependentAddPtr(const JSContext* cx, T& table, const Lookup& lookup)
: addPtr(table.lookupForAdd(lookup))
, originalGcNumber(cx->zone()->gcNumber())
{}
@ -56,7 +56,7 @@ struct DependentAddPtr
const Entry* operator->() const { return &*addPtr; }
private:
AddPtr addPtr ;
AddPtr addPtr;
const uint64_t originalGcNumber;
template <class KeyInput>

View File

@ -2711,7 +2711,6 @@ js::gc::StoreBuffer::MonoTypeBuffer<T>::trace(StoreBuffer* owner, TenuringTracer
{
mozilla::ReentrancyGuard g(*owner);
MOZ_ASSERT(owner->isEnabled());
MOZ_ASSERT(stores_.initialized());
if (last_)
last_.trace(mover);
for (typename StoreSet::Range r = stores_.all(); !r.empty(); r.popFront())

View File

@ -49,7 +49,6 @@ struct js::Nursery::FreeMallocedBuffersTask : public GCParallelTaskHelper<FreeMa
explicit FreeMallocedBuffersTask(FreeOp* fop)
: GCParallelTaskHelper(fop->runtime()),
fop_(fop) {}
bool init() { return buffers_.init(); }
void transferBuffersToFree(MallocedBuffersSet& buffersToFree,
const AutoLockHelperThreadState& lock);
~FreeMallocedBuffersTask() { join(); }
@ -149,11 +148,8 @@ js::Nursery::Nursery(JSRuntime* rt)
bool
js::Nursery::init(uint32_t maxNurseryBytes, AutoLockGCBgAlloc& lock)
{
if (!mallocedBuffers.init())
return false;
freeMallocedBuffersTask = js_new<FreeMallocedBuffersTask>(runtime()->defaultFreeOp());
if (!freeMallocedBuffersTask || !freeMallocedBuffersTask->init())
if (!freeMallocedBuffersTask)
return false;
// The nursery is permanently disabled when recording or replaying. Nursery
@ -500,8 +496,6 @@ Nursery::setIndirectForwardingPointer(void* oldData, void* newData)
MOZ_ASSERT(!isInside(newData) || (uintptr_t(newData) & ChunkMask) == 0);
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!forwardedBuffers.initialized() && !forwardedBuffers.init())
oomUnsafe.crash("Nursery::setForwardingPointer");
#ifdef DEBUG
if (ForwardedBufferMap::Ptr p = forwardedBuffers.lookup(oldData))
MOZ_ASSERT(p->value() == newData);
@ -530,11 +524,9 @@ js::Nursery::forwardBufferPointer(HeapSlot** pSlotsElems)
// The new location for this buffer is either stored inline with it or in
// the forwardedBuffers table.
do {
if (forwardedBuffers.initialized()) {
if (ForwardedBufferMap::Ptr p = forwardedBuffers.lookup(old)) {
*pSlotsElems = reinterpret_cast<HeapSlot*>(p->value());
break;
}
if (ForwardedBufferMap::Ptr p = forwardedBuffers.lookup(old)) {
*pSlotsElems = reinterpret_cast<HeapSlot*>(p->value());
break;
}
*pSlotsElems = *reinterpret_cast<HeapSlot**>(old);
@ -929,7 +921,7 @@ js::Nursery::doCollection(JS::gcreason::Reason reason, TenureCountCache& tenureC
// Update any slot or element pointers whose destination has been tenured.
startProfile(ProfileKey::UpdateJitActivations);
js::jit::UpdateJitActivationsForMinorGC(rt);
forwardedBuffers.finish();
forwardedBuffers.clearAndCompact();
endProfile(ProfileKey::UpdateJitActivations);
startProfile(ProfileKey::ObjectsTenuredCallback);

View File

@ -288,8 +288,6 @@ class Nursery
return allocatedChunkCount() * gc::ChunkSize;
}
size_t sizeOfMallocedBuffers(mozilla::MallocSizeOf mallocSizeOf) const {
if (!mallocedBuffers.initialized())
return 0;
size_t total = 0;
for (MallocedBuffersSet::Range r = mallocedBuffers.all(); !r.empty(); r.popFront())
total += mallocSizeOf(r.front());

View File

@ -90,8 +90,8 @@ class NurseryAwareHashMap
using Entry = typename MapType::Entry;
explicit NurseryAwareHashMap(AllocPolicy a = AllocPolicy()) : map(a) {}
MOZ_MUST_USE bool init(uint32_t len = 16) { return map.init(len); }
explicit NurseryAwareHashMap(size_t length) : map(length) {}
NurseryAwareHashMap(AllocPolicy a, size_t length) : map(a, length) {}
bool empty() const { return map.empty(); }
Ptr lookup(const Lookup& l) const { return map.lookup(l); }

View File

@ -432,8 +432,7 @@ js::gc::GCRuntime::finishRoots()
rt->finishAtoms();
if (rootsHash.ref().initialized())
rootsHash.ref().clear();
rootsHash.ref().clear();
rt->finishPersistentRoots();

View File

@ -50,10 +50,7 @@ StoreBuffer::enable()
checkEmpty();
if (!bufferVal.init() ||
!bufferCell.init() ||
!bufferSlot.init() ||
!bufferWholeCell.init() ||
if (!bufferWholeCell.init() ||
!bufferGeneric.init())
{
return false;

View File

@ -83,24 +83,14 @@ class StoreBuffer
const static size_t MaxEntries = 48 * 1024 / sizeof(T);
explicit MonoTypeBuffer() : last_(T()) {}
~MonoTypeBuffer() { stores_.finish(); }
MOZ_MUST_USE bool init() {
if (!stores_.initialized() && !stores_.init())
return false;
clear();
return true;
}
void clear() {
last_ = T();
if (stores_.initialized())
stores_.clear();
stores_.clear();
}
/* Add one item to the buffer. */
void put(StoreBuffer* owner, const T& t) {
MOZ_ASSERT(stores_.initialized());
sinkStore(owner);
last_ = t;
}
@ -117,7 +107,6 @@ class StoreBuffer
/* Move any buffered stores to the canonical store set. */
void sinkStore(StoreBuffer* owner) {
MOZ_ASSERT(stores_.initialized());
if (last_) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!stores_.put(last_))
@ -142,7 +131,7 @@ class StoreBuffer
}
bool isEmpty() const {
return last_ == T() && (!stores_.initialized() || stores_.empty());
return last_ == T() && stores_.empty();
}
private:

View File

@ -212,9 +212,6 @@ gc::GCRuntime::startVerifyPreBarriers()
trc->edgeptr = (char*)trc->root;
trc->term = trc->edgeptr + size;
if (!trc->nodemap.init())
goto oom;
/* Create the root node. */
trc->curnode = MakeNode(trc, nullptr, JS::TraceKind(0));
@ -458,7 +455,6 @@ class HeapCheckTracerBase : public JS::CallbackTracer
{
public:
explicit HeapCheckTracerBase(JSRuntime* rt, WeakMapTraceKind weakTraceKind);
bool init();
bool traceHeap(AutoTraceSession& session);
virtual void checkCell(Cell* cell) = 0;
@ -505,12 +501,6 @@ HeapCheckTracerBase::HeapCheckTracerBase(JSRuntime* rt, WeakMapTraceKind weakTra
#endif
}
bool
HeapCheckTracerBase::init()
{
return visited.init();
}
void
HeapCheckTracerBase::onChild(const JS::GCCellPtr& thing)
{
@ -674,8 +664,7 @@ js::gc::CheckHeapAfterGC(JSRuntime* rt)
gcType = CheckHeapTracer::GCType::NonMoving;
CheckHeapTracer tracer(rt, gcType);
if (tracer.init())
tracer.check(session);
tracer.check(session);
}
#endif /* JSGC_HASH_TABLE_CHECKS */
@ -743,8 +732,6 @@ js::CheckGrayMarkingState(JSRuntime* rt)
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
AutoTraceSession session(rt);
CheckGrayMarkingTracer tracer(rt);
if (!tracer.init())
return true; // Ignore failure
return tracer.check(session);
}

View File

@ -28,17 +28,9 @@ static T* extractUnbarriered(T* v)
template <class K, class V, class HP>
WeakMap<K, V, HP>::WeakMap(JSContext* cx, JSObject* memOf)
: Base(cx->zone()), WeakMapBase(memOf, cx->zone())
{}
template <class K, class V, class HP>
bool
WeakMap<K, V, HP>::init(uint32_t len)
{
if (!Base::init(len))
return false;
zone()->gcWeakMapList().insertFront(this);
marked = JS::IsIncrementalGCInProgress(TlsContext.get());
return true;
}
// Trace a WeakMap entry based on 'markedCell' getting marked, where 'origKey'
@ -80,9 +72,6 @@ WeakMap<K, V, HP>::trace(JSTracer* trc)
TraceNullableEdge(trc, &memberOf, "WeakMap owner");
if (!Base::initialized())
return;
if (trc->isMarkingTracer()) {
MOZ_ASSERT(trc->weakMapAction() == ExpandWeakMaps);
marked = true;

View File

@ -81,8 +81,7 @@ WeakMapBase::sweepZone(JS::Zone* zone)
if (m->marked) {
m->sweep();
} else {
/* Destroy the hash map now to catch any use after this point. */
m->finish();
m->clearAndCompact();
m->removeFrom(zone->gcWeakMapList());
}
m = next;
@ -157,16 +156,9 @@ ObjectWeakMap::ObjectWeakMap(JSContext* cx)
: map(cx, nullptr)
{}
bool
ObjectWeakMap::init()
{
return map.init();
}
JSObject*
ObjectWeakMap::lookup(const JSObject* obj)
{
MOZ_ASSERT(map.initialized());
if (ObjectValueMap::Ptr p = map.lookup(const_cast<JSObject*>(obj)))
return &p->value().toObject();
return nullptr;
@ -176,7 +168,6 @@ bool
ObjectWeakMap::add(JSContext* cx, JSObject* obj, JSObject* target)
{
MOZ_ASSERT(obj && target);
MOZ_ASSERT(map.initialized());
MOZ_ASSERT(!map.has(obj));
if (!map.put(obj, ObjectValue(*target))) {
@ -190,7 +181,6 @@ ObjectWeakMap::add(JSContext* cx, JSObject* obj, JSObject* target)
void
ObjectWeakMap::clear()
{
MOZ_ASSERT(map.initialized());
map.clear();
}
@ -203,7 +193,6 @@ ObjectWeakMap::trace(JSTracer* trc)
size_t
ObjectWeakMap::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
{
MOZ_ASSERT(map.initialized());
return map.shallowSizeOfExcludingThis(mallocSizeOf);
}
@ -211,7 +200,6 @@ ObjectWeakMap::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
void
ObjectWeakMap::checkAfterMovingGC()
{
MOZ_ASSERT(map.initialized());
for (ObjectValueMap::Range r = map.all(); !r.empty(); r.popFront()) {
CheckGCThingAfterMovingGC(r.front().key().get());
CheckGCThingAfterMovingGC(&r.front().value().toObject());

View File

@ -92,7 +92,7 @@ class WeakMapBase : public mozilla::LinkedListElement<WeakMapBase>
virtual bool findZoneEdges() = 0;
virtual void sweep() = 0;
virtual void traceMappings(WeakMapTracer* tracer) = 0;
virtual void finish() = 0;
virtual void clearAndCompact() = 0;
// Any weakmap key types that want to participate in the non-iterative
// ephemeron marking must override this method.
@ -127,8 +127,6 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, ZoneAllocPolicy>,
explicit WeakMap(JSContext* cx, JSObject* memOf = nullptr);
bool init(uint32_t len = 16);
// Overwritten to add a read barrier to prevent an incorrectly gray value
// from escaping the weak map. See the UnmarkGrayTracer::onChild comment in
// gc/Marking.cpp.
@ -139,7 +137,7 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, ZoneAllocPolicy>,
return p;
}
AddPtr lookupForAdd(const Lookup& l) const {
AddPtr lookupForAdd(const Lookup& l) {
AddPtr p = Base::lookupForAdd(l);
if (p)
exposeGCThingToActiveJS(p->value());
@ -178,8 +176,9 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, ZoneAllocPolicy>,
void sweep() override;
void finish() override {
Base::finish();
void clearAndCompact() override {
Base::clear();
Base::compact();
}
/* memberOf can be nullptr, which means that the map is not part of a JSObject. */
@ -212,7 +211,6 @@ class ObjectWeakMap
public:
explicit ObjectWeakMap(JSContext* cx);
bool init();
JS::Zone* zone() const { return map.zone(); }

View File

@ -66,7 +66,7 @@ JS::WeakMapPtr<K, V>::init(JSContext* cx)
MOZ_ASSERT(!initialized());
typename WeakMapDetails::Utils<K, V>::PtrType map =
cx->new_<typename WeakMapDetails::Utils<K,V>::Type>(cx);
if (!map || !map->init())
if (!map)
return false;
ptr = map;
return true;

View File

@ -105,13 +105,7 @@ bool
Zone::init(bool isSystemArg)
{
isSystem = isSystemArg;
return uniqueIds().init() &&
gcSweepGroupEdges().init() &&
gcWeakKeys().init() &&
typeDescrObjects().init() &&
markedAtoms().init() &&
atomCache().init() &&
regExps.init();
return gcWeakKeys().init();
}
void
@ -288,7 +282,7 @@ Zone::createJitZone(JSContext* cx)
return nullptr;
UniquePtr<jit::JitZone> jitZone(cx->new_<js::jit::JitZone>());
if (!jitZone || !jitZone->init(cx))
if (!jitZone)
return nullptr;
jitZone_ = jitZone.release();
@ -364,10 +358,8 @@ Zone::clearTables()
{
MOZ_ASSERT(regExps.empty());
if (baseShapes().initialized())
baseShapes().clear();
if (initialShapes().initialized())
initialShapes().clear();
baseShapes().clear();
initialShapes().clear();
}
void
@ -453,7 +445,7 @@ Zone::purgeAtomCache()
MOZ_ASSERT(!hasKeptAtoms());
MOZ_ASSERT(!purgeAtomsDeferred);
atomCache().clearAndShrink();
atomCache().clearAndCompact();
// Also purge the dtoa caches so that subsequent lookups populate atom
// cache too.

View File

@ -10217,9 +10217,6 @@ CodeGenerator::generate()
if (!addNativeToBytecodeEntry(startSite))
return false;
if (!snapshots_.init())
return false;
if (!safepoints_.init(gen->alloc()))
return false;

View File

@ -99,8 +99,7 @@ ExecutableAllocator::~ExecutableAllocator()
m_smallPools[i]->release(/* willDestroy = */true);
// If this asserts we have a pool leak.
MOZ_ASSERT_IF((m_pools.initialized() &&
TlsContext.get()->runtime()->gc.shutdownCollectedEverything()),
MOZ_ASSERT_IF(TlsContext.get()->runtime()->gc.shutdownCollectedEverything(),
m_pools.empty());
}
@ -180,9 +179,6 @@ ExecutableAllocator::createPool(size_t n)
if (allocSize == OVERSIZE_ALLOCATION)
return nullptr;
if (!m_pools.initialized() && !m_pools.init())
return nullptr;
ExecutablePool::Allocation a = systemAlloc(allocSize);
if (!a.pages)
return nullptr;
@ -235,8 +231,6 @@ ExecutableAllocator::releasePoolPages(ExecutablePool* pool)
MOZ_ASSERT(pool->m_allocation.pages);
systemRelease(pool->m_allocation);
MOZ_ASSERT(m_pools.initialized());
// Pool may not be present in m_pools if we hit OOM during creation.
if (auto ptr = m_pools.lookup(pool))
m_pools.remove(ptr);
@ -263,15 +257,13 @@ ExecutableAllocator::purge()
void
ExecutableAllocator::addSizeOfCode(JS::CodeSizes* sizes) const
{
if (m_pools.initialized()) {
for (ExecPoolHashSet::Range r = m_pools.all(); !r.empty(); r.popFront()) {
ExecutablePool* pool = r.front();
sizes->ion += pool->m_codeBytes[CodeKind::Ion];
sizes->baseline += pool->m_codeBytes[CodeKind::Baseline];
sizes->regexp += pool->m_codeBytes[CodeKind::RegExp];
sizes->other += pool->m_codeBytes[CodeKind::Other];
sizes->unused += pool->m_allocation.size - pool->usedCodeBytes();
}
for (ExecPoolHashSet::Range r = m_pools.all(); !r.empty(); r.popFront()) {
ExecutablePool* pool = r.front();
sizes->ion += pool->m_codeBytes[CodeKind::Ion];
sizes->baseline += pool->m_codeBytes[CodeKind::Baseline];
sizes->regexp += pool->m_codeBytes[CodeKind::RegExp];
sizes->other += pool->m_codeBytes[CodeKind::Other];
sizes->unused += pool->m_allocation.size - pool->usedCodeBytes();
}
}

View File

@ -221,7 +221,7 @@ JitRuntime::initialize(JSContext* cx)
JitContext jctx(cx, nullptr);
functionWrappers_ = cx->new_<VMWrapperMap>(cx);
if (!functionWrappers_ || !functionWrappers_->init())
if (!functionWrappers_)
return false;
StackMacroAssembler masm;
@ -410,11 +410,6 @@ JitRealm::initialize(JSContext* cx)
if (!stubCodes_)
return false;
if (!stubCodes_->init()) {
ReportOutOfMemory(cx);
return false;
}
stringsCanBeInNursery = cx->nursery().canAllocateStrings();
return true;
@ -443,17 +438,6 @@ JitRealm::performStubReadBarriers(uint32_t stubsToBarrier) const
}
}
bool
JitZone::init(JSContext* cx)
{
if (!baselineCacheIRStubCodes_.init()) {
ReportOutOfMemory(cx);
return false;
}
return true;
}
void
jit::FreeIonBuilder(IonBuilder* builder)
{
@ -686,7 +670,6 @@ TrampolinePtr
JitRuntime::getVMWrapper(const VMFunction& f) const
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
MOZ_ASSERT(trampolineCode_);
JitRuntime::VMWrapperMap::Ptr p = functionWrappers_->readonlyThreadsafeLookup(&f);
@ -1483,8 +1466,6 @@ OptimizeMIR(MIRGenerator* mir)
}
ValueNumberer gvn(mir, graph);
if (!gvn.init())
return false;
// Alias analysis is required for LICM and GVN so that we don't move
// loads across stores.

View File

@ -3581,9 +3581,6 @@ jit::EliminateRedundantChecks(MIRGraph& graph)
{
BoundsCheckMap checks(graph.alloc());
if (!checks.init())
return false;
// Stack for pre-order CFG traversal.
Vector<MBasicBlock*, 1, JitAllocPolicy> worklist(graph.alloc());

View File

@ -398,7 +398,6 @@ class JitZone
BaselineCacheIRStubCodeMap baselineCacheIRStubCodes_;
public:
MOZ_MUST_USE bool init(JSContext* cx);
void sweep();
void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
@ -433,22 +432,18 @@ class JitZone
}
CacheIRStubInfo* getIonCacheIRStubInfo(const CacheIRStubKey::Lookup& key) {
if (!ionCacheIRStubInfoSet_.initialized())
return nullptr;
IonCacheIRStubInfoSet::Ptr p = ionCacheIRStubInfoSet_.lookup(key);
return p ? p->stubInfo.get() : nullptr;
}
MOZ_MUST_USE bool putIonCacheIRStubInfo(const CacheIRStubKey::Lookup& lookup,
CacheIRStubKey& key)
{
if (!ionCacheIRStubInfoSet_.initialized() && !ionCacheIRStubInfoSet_.init())
return false;
IonCacheIRStubInfoSet::AddPtr p = ionCacheIRStubInfoSet_.lookupForAdd(lookup);
MOZ_ASSERT(!p);
return ionCacheIRStubInfoSet_.add(p, std::move(key));
}
void purgeIonCacheIRStubInfo() {
ionCacheIRStubInfoSet_.finish();
ionCacheIRStubInfoSet_.clearAndCompact();
}
};

View File

@ -37,8 +37,6 @@ LIRGraph::LIRGraph(MIRGraph* mir)
bool
LIRGraph::addConstantToPool(const Value& v, uint32_t* index)
{
MOZ_ASSERT(constantPoolMap_.initialized());
ConstantPoolMap::AddPtr p = constantPoolMap_.lookupForAdd(v);
if (p) {
*index = p->value();

View File

@ -1898,7 +1898,7 @@ class LIRGraph
explicit LIRGraph(MIRGraph* mir);
MOZ_MUST_USE bool init() {
return constantPoolMap_.init() && blocks_.init(mir_.alloc(), mir_.numBlocks());
return blocks_.init(mir_.alloc(), mir_.numBlocks());
}
MIRGraph& mir() const {
return mir_;

View File

@ -250,9 +250,6 @@ LoopUnroller::go(LoopIterationBound* bound)
graph.insertBlockAfter(unrolledBackedge, newPreheader);
graph.renumberBlocksAfter(oldPreheader);
if (!unrolledDefinitions.init())
return false;
// Add phis to the unrolled loop header which correspond to the phis in the
// original loop header.
MOZ_ASSERT(header->getPredecessor(0) == oldPreheader);

View File

@ -373,7 +373,6 @@ class jit::UniqueTrackedTypes
list_(cx)
{ }
bool init() { return map_.init(); }
bool getIndexOf(TypeSet::Type ty, uint8_t* indexp);
uint32_t count() const { MOZ_ASSERT(map_.count() == list_.length()); return list_.length(); }
@ -965,8 +964,6 @@ jit::WriteIonTrackedOptimizationsTable(JSContext* cx, CompactBufferWriter& write
// Write out type info payloads.
UniqueTrackedTypes uniqueTypes(cx);
if (!uniqueTypes.init())
return false;
for (const UniqueTrackedOptimizations::SortEntry* p = vec.begin(); p != vec.end(); p++) {
const TempOptimizationTypeInfoVector* v = p->types;

View File

@ -182,7 +182,6 @@ class UniqueTrackedOptimizations
sorted_(cx)
{ }
MOZ_MUST_USE bool init() { return map_.init(); }
MOZ_MUST_USE bool add(const TrackedOptimizations* optimizations);
MOZ_MUST_USE bool sortByFrequency(JSContext* cx);

View File

@ -75,7 +75,7 @@ AllocationIntegrityState::record()
}
}
return seen.init();
return true;
}
bool

View File

@ -547,14 +547,12 @@ SnapshotReader::readAllocation()
return RValueAllocation::read(allocReader_);
}
bool
SnapshotWriter::init()
{
// Based on the measurements made in Bug 962555 comment 20, this should be
// enough to prevent the reallocation of the hash table for at least half of
// the compilations.
return allocMap_.init(32);
}
SnapshotWriter::SnapshotWriter()
// Based on the measurements made in Bug 962555 comment 20, this length
// should be enough to prevent the reallocation of the hash table for at
// least half of the compilations.
: allocMap_(32)
{}
RecoverReader::RecoverReader(SnapshotReader& snapshot, const uint8_t* recovers, uint32_t size)
: reader_(nullptr, nullptr),
@ -647,8 +645,6 @@ SnapshotWriter::trackSnapshot(uint32_t pcOpcode, uint32_t mirOpcode, uint32_t mi
bool
SnapshotWriter::add(const RValueAllocation& alloc)
{
MOZ_ASSERT(allocMap_.initialized());
uint32_t offset;
RValueAllocMap::AddPtr p = allocMap_.lookupForAdd(alloc);
if (!p) {

View File

@ -400,7 +400,7 @@ class SnapshotWriter
SnapshotOffset lastStart_;
public:
MOZ_MUST_USE bool init();
SnapshotWriter();
SnapshotOffset startSnapshot(RecoverOffset recoverOffset, BailoutKind kind);
#ifdef TRACK_SNAPSHOTS

View File

@ -78,13 +78,6 @@ ValueNumberer::VisibleValues::VisibleValues(TempAllocator& alloc)
: set_(alloc)
{}
// Initialize the set.
bool
ValueNumberer::VisibleValues::init()
{
return set_.init();
}
// Look up the first entry for |def|.
ValueNumberer::VisibleValues::Ptr
ValueNumberer::VisibleValues::findLeader(const MDefinition* def) const
@ -1208,6 +1201,12 @@ bool ValueNumberer::cleanupOSRFixups()
ValueNumberer::ValueNumberer(MIRGenerator* mir, MIRGraph& graph)
: mir_(mir), graph_(graph),
// Initialize the value set. It's tempting to pass in a length that is a
// function of graph_.getNumInstructionIds(). But if we start out with a
// large capacity, it will be far larger than the actual element count for
// most of the pass, so when we remove elements, it would often think it
// needs to compact itself. Empirically, just letting the HashTable grow as
// needed on its own seems to work pretty well.
values_(graph.alloc()),
deadDefs_(graph.alloc()),
remainingBlocks_(graph.alloc()),
@ -1220,18 +1219,6 @@ ValueNumberer::ValueNumberer(MIRGenerator* mir, MIRGraph& graph)
hasOSRFixups_(false)
{}
bool
ValueNumberer::init()
{
// Initialize the value set. It's tempting to pass in a size here of some
// function of graph_.getNumInstructionIds(), however if we start out with a
// large capacity, it will be far larger than the actual element count for
// most of the pass, so when we remove elements, it would often think it
// needs to compact itself. Empirically, just letting the HashTable grow as
// needed on its own seems to work pretty well.
return values_.init();
}
bool
ValueNumberer::run(UpdateAliasAnalysisFlag updateAliasAnalysis)
{

View File

@ -41,7 +41,6 @@ class ValueNumberer
public:
explicit VisibleValues(TempAllocator& alloc);
MOZ_MUST_USE bool init();
typedef ValueSet::Ptr Ptr;
typedef ValueSet::AddPtr AddPtr;
@ -108,7 +107,6 @@ class ValueNumberer
public:
ValueNumberer(MIRGenerator* mir, MIRGraph& graph);
MOZ_MUST_USE bool init();
enum UpdateAliasAnalysisFlag {
DontUpdateAliasAnalysis,

View File

@ -31,8 +31,6 @@ jit::EliminateBoundsChecks(MIRGenerator* mir, MIRGraph& graph)
{
// Map for dominating block where a given definition was checked
LastSeenMap lastSeen;
if (!lastSeen.init())
return false;
for (ReversePostorderIterator bIter(graph.rpoBegin()); bIter != graph.rpoEnd(); bIter++) {
MBasicBlock* block = *bIter;

View File

@ -1282,7 +1282,10 @@ Simulator::~Simulator()
SimulatorProcess::SimulatorProcess()
: cacheLock_(mutexid::SimulatorCacheLock)
, redirection_(nullptr)
{}
{
if (getenv("ARM_SIM_ICACHE_CHECKS"))
ICacheCheckingDisableCount = 0;
}
SimulatorProcess::~SimulatorProcess()
{
@ -1294,15 +1297,6 @@ SimulatorProcess::~SimulatorProcess()
}
}
bool
SimulatorProcess::init()
{
if (getenv("ARM_SIM_ICACHE_CHECKS"))
ICacheCheckingDisableCount = 0;
return icache_.init();
}
/* static */ void*
Simulator::RedirectNativeFunction(void* nativeFunction, ABIFunctionType type)
{

View File

@ -487,7 +487,7 @@ class SimulatorProcess
static bool initialize() {
singleton_ = js_new<SimulatorProcess>();
return singleton_ && singleton_->init();
return singleton_;
}
static void destroy() {
js_delete(singleton_);
@ -498,8 +498,6 @@ class SimulatorProcess
~SimulatorProcess();
private:
bool init();
static SimulatorProcess* singleton_;
// This lock creates a critical section around 'redirection_' and

View File

@ -716,7 +716,6 @@ bool
JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
uint32_t wrapperOffset = startTrampolineCode(masm);

View File

@ -524,7 +524,6 @@ bool
JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
uint32_t wrapperOffset = startTrampolineCode(masm);

View File

@ -880,8 +880,6 @@ CodeGeneratorShared::generateCompactTrackedOptimizationsMap(JSContext* cx, JitCo
return true;
UniqueTrackedOptimizations unique(cx);
if (!unique.init())
return false;
// Iterate through all entries to deduplicate their optimization attempts.
for (size_t i = 0; i < trackedOptimizations_.length(); i++) {

View File

@ -604,7 +604,6 @@ bool
JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
uint32_t wrapperOffset = startTrampolineCode(masm);

View File

@ -139,11 +139,6 @@ MacroAssemblerX86Shared::getConstant(const typename T::Pod& value, Map& map,
Vector<T, 0, SystemAllocPolicy>& vec)
{
typedef typename Map::AddPtr AddPtr;
if (!map.initialized()) {
enoughMemory_ &= map.init();
if (!enoughMemory_)
return nullptr;
}
size_t index;
if (AddPtr p = map.lookupForAdd(value)) {
index = p->value();

View File

@ -624,7 +624,6 @@ bool
JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
{
MOZ_ASSERT(functionWrappers_);
MOZ_ASSERT(functionWrappers_->initialized());
uint32_t wrapperOffset = startTrampolineCode(masm);

View File

@ -166,8 +166,6 @@ runTestFromPath(JSContext* cx, const char* path)
txtOptions.setFileAndLine(txtPath.begin(), 0);
UsedNameTracker txtUsedNames(cx);
if (!txtUsedNames.init())
MOZ_CRASH("Couldn't initialize used names");
RootedScriptSourceObject sourceObject(cx, frontend::CreateScriptSourceObject(
cx, txtOptions, mozilla::Nothing()));
@ -206,8 +204,6 @@ runTestFromPath(JSContext* cx, const char* path)
binOptions.setFileAndLine(binPath.begin(), 0);
js::frontend::UsedNameTracker binUsedNames(cx);
if (!binUsedNames.init())
MOZ_CRASH("Couldn't initialized binUsedNames");
js::frontend::BinASTParser<Tok> binParser(cx, allocScope.alloc(), binUsedNames, binOptions);

View File

@ -145,9 +145,7 @@ using MyHashMap = js::GCHashMap<js::Shape*, JSObject*>;
BEGIN_TEST(testGCRootedHashMap)
{
JS::Rooted<MyHashMap> map(cx, MyHashMap(cx));
CHECK(map.init(15));
CHECK(map.initialized());
JS::Rooted<MyHashMap> map(cx, MyHashMap(cx, 15));
for (size_t i = 0; i < 10; ++i) {
RootedObject obj(cx, JS_NewObject(cx, nullptr));
@ -205,9 +203,7 @@ CheckMyHashMap(JSContext* cx, Handle<MyHashMap> map)
BEGIN_TEST(testGCHandleHashMap)
{
JS::Rooted<MyHashMap> map(cx, MyHashMap(cx));
CHECK(map.init(15));
CHECK(map.initialized());
JS::Rooted<MyHashMap> map(cx, MyHashMap(cx, 15));
CHECK(FillMyHashMap(cx, &map));

View File

@ -344,7 +344,6 @@ TestUnassociatedWeakMaps()
// Make a weakmap that's not associated with a JSObject.
auto weakMap = cx->make_unique<GCManagedObjectWeakMap>(cx);
CHECK(weakMap);
CHECK(weakMap->init());
// Make sure this gets traced during GC.
Rooted<GCManagedObjectWeakMap*> rootMap(cx, weakMap.get());

View File

@ -122,7 +122,6 @@ BEGIN_TEST(testTracingIncomingCCWs)
// Ensure that |TraceIncomingCCWs| finds the object wrapped by the CCW.
JS::CompartmentSet compartments;
CHECK(compartments.init());
CHECK(compartments.put(global2->compartment()));
void* thing = wrappee.get();

View File

@ -32,7 +32,6 @@ BEGIN_TEST(testWeakCacheSet)
SystemAllocPolicy>;
using Cache = JS::WeakCache<ObjectSet>;
Cache cache(JS::GetObjectZone(tenured1));
CHECK(cache.init());
cache.put(tenured1);
cache.put(tenured2);
@ -73,7 +72,6 @@ BEGIN_TEST(testWeakCacheMap)
js::MovableCellHasher<JS::Heap<JSObject*>>>;
using Cache = JS::WeakCache<ObjectMap>;
Cache cache(JS::GetObjectZone(tenured1), cx);
CHECK(cache.init());
cache.put(tenured1, 1);
cache.put(tenured2, 2);
@ -284,8 +282,6 @@ TestSet()
TempAllocPolicy>;
using Cache = JS::WeakCache<ObjectSet>;
Cache cache(JS::GetObjectZone(global), cx);
CHECK(cache.init());
CHECK(cache.initialized());
// Sweep empty cache.
@ -405,7 +401,6 @@ TestSet()
CHECK(cache.has(obj4));
cache.clear();
cache.finish();
return true;
}
@ -418,8 +413,6 @@ TestMap()
TempAllocPolicy>;
using Cache = JS::WeakCache<ObjectMap>;
Cache cache(JS::GetObjectZone(global), cx);
CHECK(cache.init());
CHECK(cache.initialized());
// Sweep empty cache.
@ -541,7 +534,6 @@ TestMap()
CHECK(cache.has(obj4));
cache.clear();
cache.finish();
return true;
}
@ -556,7 +548,6 @@ TestReplaceDyingInSet()
MovableCellHasher<NumberAndObjectEntry>,
TempAllocPolicy>>;
Cache cache(JS::GetObjectZone(global), cx);
CHECK(cache.init());
RootedObject value1(cx, JS_NewPlainObject(cx));
RootedObject value2(cx, JS_NewPlainObject(cx));
@ -620,7 +611,6 @@ TestReplaceDyingInMap()
DefaultHasher<uint32_t>,
TempAllocPolicy>>;
Cache cache(JS::GetObjectZone(global), cx);
CHECK(cache.init());
RootedObject value1(cx, JS_NewPlainObject(cx));
RootedObject value2(cx, JS_NewPlainObject(cx));
@ -687,7 +677,6 @@ TestUniqueIDLookups()
MovableCellHasher<ObjectEntry>,
TempAllocPolicy>>;
Cache cache(JS::GetObjectZone(global), cx);
CHECK(cache.init());
Rooted<GCVector<JSObject*, 0, SystemAllocPolicy>> liveObjects(cx);

View File

@ -143,8 +143,6 @@ template <class NewKeyFunction>
static bool
SlowRekey(IntMap* m) {
IntMap tmp;
if (!tmp.init())
return false;
for (auto iter = m->iter(); !iter.done(); iter.next()) {
if (NewKeyFunction::shouldBeRemoved(iter.get().key()))
@ -169,8 +167,6 @@ template <class NewKeyFunction>
static bool
SlowRekey(IntSet* s) {
IntSet tmp;
if (!tmp.init())
return false;
for (auto iter = s->iter(); !iter.done(); iter.next()) {
if (NewKeyFunction::shouldBeRemoved(iter.get()))
@ -194,8 +190,6 @@ SlowRekey(IntSet* s) {
BEGIN_TEST(testHashRekeyManual)
{
IntMap am, bm;
CHECK(am.init());
CHECK(bm.init());
for (size_t i = 0; i < TestIterations; ++i) {
#ifdef FUZZ
fprintf(stderr, "map1: %lu\n", i);
@ -216,8 +210,6 @@ BEGIN_TEST(testHashRekeyManual)
}
IntSet as, bs;
CHECK(as.init());
CHECK(bs.init());
for (size_t i = 0; i < TestIterations; ++i) {
#ifdef FUZZ
fprintf(stderr, "set1: %lu\n", i);
@ -244,8 +236,6 @@ END_TEST(testHashRekeyManual)
BEGIN_TEST(testHashRekeyManualRemoval)
{
IntMap am, bm;
CHECK(am.init());
CHECK(bm.init());
for (size_t i = 0; i < TestIterations; ++i) {
#ifdef FUZZ
fprintf(stderr, "map2: %lu\n", i);
@ -270,8 +260,6 @@ BEGIN_TEST(testHashRekeyManualRemoval)
}
IntSet as, bs;
CHECK(as.init());
CHECK(bs.init());
for (size_t i = 0; i < TestIterations; ++i) {
#ifdef FUZZ
fprintf(stderr, "set1: %lu\n", i);
@ -338,7 +326,6 @@ BEGIN_TEST(testHashSetOfMoveOnlyType)
typedef js::HashSet<MoveOnlyType, MoveOnlyType::HashPolicy, js::SystemAllocPolicy> Set;
Set set;
CHECK(set.init());
MoveOnlyType a(1);
@ -357,9 +344,6 @@ GrowUntilResize()
{
IntMap m;
if (!m.init())
return false;
// Add entries until we've resized the table four times.
size_t lastCapacity = m.capacity();
size_t resizes = 0;
@ -367,7 +351,7 @@ GrowUntilResize()
while (resizes < 4) {
auto p = m.lookupForAdd(key);
if (!p && !m.add(p, key, 0))
return false; // OOM'd while adding
return false; // OOM'd in lookupForAdd() or add()
size_t capacity = m.capacity();
if (capacity != lastCapacity) {
@ -398,7 +382,6 @@ END_TEST(testHashMapGrowOOM)
BEGIN_TEST(testHashTableMovableModIterator)
{
IntSet set;
CHECK(set.init());
// Exercise returning a hash table ModIterator object from a function.
@ -435,3 +418,84 @@ IntSet::ModIterator setModIter(IntSet& set)
}
END_TEST(testHashTableMovableModIterator)
BEGIN_TEST(testHashLazyStorage)
{
// The following code depends on the current capacity computation, which
// could change in the future.
uint32_t defaultCap = 32;
uint32_t minCap = 4;
IntSet set;
CHECK(set.capacity() == 0);
CHECK(set.put(1));
CHECK(set.capacity() == defaultCap);
set.compact(); // effectively a no-op
CHECK(set.capacity() == minCap);
set.clear();
CHECK(set.capacity() == minCap);
set.compact();
CHECK(set.capacity() == 0);
CHECK(set.putNew(1));
CHECK(set.capacity() == minCap);
set.clear();
set.compact();
CHECK(set.capacity() == 0);
// lookupForAdd() instantiates, even if not followed by add().
set.lookupForAdd(1);
CHECK(set.capacity() == minCap);
set.clear();
set.compact();
CHECK(set.capacity() == 0);
CHECK(set.reserve(0)); // a no-op
CHECK(set.capacity() == 0);
CHECK(set.reserve(1));
CHECK(set.capacity() == minCap);
CHECK(set.reserve(0)); // a no-op
CHECK(set.capacity() == minCap);
CHECK(set.reserve(2)); // effectively a no-op
CHECK(set.capacity() == minCap);
// No need to clear here because we didn't add anything.
set.compact();
CHECK(set.capacity() == 0);
CHECK(set.reserve(128));
CHECK(set.capacity() == 256);
CHECK(set.reserve(3)); // effectively a no-op
CHECK(set.capacity() == 256);
for (int i = 0; i < 8; i++) {
CHECK(set.putNew(i));
}
CHECK(set.count() == 8);
CHECK(set.capacity() == 256);
set.compact();
CHECK(set.capacity() == 16);
set.compact(); // effectively a no-op
CHECK(set.capacity() == 16);
for (int i = 8; i < 16; i++) {
CHECK(set.putNew(i));
}
CHECK(set.count() == 16);
CHECK(set.capacity() == 32);
set.clear();
CHECK(set.capacity() == 32);
set.compact();
CHECK(set.capacity() == 0);
return true;
}
END_TEST(testHashLazyStorage)

View File

@ -86,8 +86,6 @@ struct MinimalFunc : MinimalAlloc
if (!BuildPhiReverseMapping(graph))
return false;
ValueNumberer gvn(&mir, graph);
if (!gvn.init())
return false;
if (!gvn.run(ValueNumberer::DontUpdateAliasAnalysis))
return false;
return true;

View File

@ -368,7 +368,6 @@ BEGIN_TEST(test_ubiPostOrder)
FakeNode g('g');
js::HashSet<ExpectedEdge> expectedEdges(cx);
CHECK(expectedEdges.init());
auto declareEdge = [&](FakeNode& from, FakeNode& to) {
return from.addEdgeTo(to) && expectedEdges.putNew(ExpectedEdge(from, to));
@ -395,7 +394,6 @@ BEGIN_TEST(test_ubiPostOrder)
JS::AutoCheckCannotGC nogc(cx);
JS::ubi::PostOrder traversal(cx, nogc);
CHECK(traversal.init());
CHECK(traversal.addStart(&r));
auto onNode = [&](const JS::ubi::Node& node) {
@ -590,7 +588,6 @@ BEGIN_TEST(test_JS_ubi_DominatorTree)
fprintf(stderr, "Checking %c's dominated set:\n", node.name);
js::HashSet<char> expectedDominatedSet(cx);
CHECK(expectedDominatedSet.init());
for (auto& rel : domination) {
if (&rel.dominator == &node) {
fprintf(stderr, " Expecting %c\n", rel.dominated.name);
@ -714,7 +711,6 @@ BEGIN_TEST(test_JS_ubi_ShortestPaths_no_path)
JS::AutoCheckCannotGC noGC(cx);
JS::ubi::NodeSet targets;
CHECK(targets.init());
CHECK(targets.put(&b));
maybeShortestPaths = JS::ubi::ShortestPaths::Create(cx, noGC, 10, &a,
@ -756,7 +752,6 @@ BEGIN_TEST(test_JS_ubi_ShortestPaths_one_path)
JS::AutoCheckCannotGC noGC(cx);
JS::ubi::NodeSet targets;
CHECK(targets.init());
CHECK(targets.put(&b));
maybeShortestPaths = JS::ubi::ShortestPaths::Create(cx, noGC, 10, &a,
@ -823,7 +818,6 @@ BEGIN_TEST(test_JS_ubi_ShortestPaths_multiple_paths)
JS::AutoCheckCannotGC noGC(cx);
JS::ubi::NodeSet targets;
CHECK(targets.init());
CHECK(targets.put(&f));
maybeShortestPaths = JS::ubi::ShortestPaths::Create(cx, noGC, 10, &a,
@ -915,7 +909,6 @@ BEGIN_TEST(test_JS_ubi_ShortestPaths_more_paths_than_max)
JS::AutoCheckCannotGC noGC(cx);
JS::ubi::NodeSet targets;
CHECK(targets.init());
CHECK(targets.put(&f));
maybeShortestPaths = JS::ubi::ShortestPaths::Create(cx, noGC, 1, &a,
@ -965,7 +958,6 @@ BEGIN_TEST(test_JS_ubi_ShortestPaths_multiple_edges_to_target)
JS::AutoCheckCannotGC noGC(cx);
JS::ubi::NodeSet targets;
CHECK(targets.init());
CHECK(targets.put(&b));
maybeShortestPaths = JS::ubi::ShortestPaths::Create(cx, noGC, 10, &a,

View File

@ -4385,8 +4385,6 @@ JS_BufferIsCompilableUnit(JSContext* cx, HandleObject obj, const char* utf8, siz
CompileOptions options(cx);
frontend::UsedNameTracker usedNames(cx);
if (!usedNames.init())
return false;
RootedScriptSourceObject sourceObject(cx, frontend::CreateScriptSourceObject(cx, options,
mozilla::Nothing()));

View File

@ -749,9 +749,7 @@ ScriptedProxyHandler::ownPropertyKeys(JSContext* cx, HandleObject proxy, AutoIdV
return false;
// Steps 9, 18.
Rooted<GCHashSet<jsid>> uncheckedResultKeys(cx, GCHashSet<jsid>(cx));
if (!uncheckedResultKeys.init(trapResult.length()))
return false;
Rooted<GCHashSet<jsid>> uncheckedResultKeys(cx, GCHashSet<jsid>(cx, trapResult.length()));
for (size_t i = 0, len = trapResult.length(); i < len; i++) {
MOZ_ASSERT(!JSID_IS_VOID(trapResult[i]));

View File

@ -4484,8 +4484,6 @@ BinParse(JSContext* cx, unsigned argc, Value* vp)
.setFileAndLine("<ArrayBuffer>", 1);
UsedNameTracker usedNames(cx);
if (!usedNames.init())
return false;
JS::Result<ParseNode*> parsed(nullptr);
if (useMultipart) {
@ -4581,8 +4579,6 @@ Parse(JSContext* cx, unsigned argc, Value* vp)
.setAllowSyntaxParser(allowSyntaxParser);
UsedNameTracker usedNames(cx);
if (!usedNames.init())
return false;
RootedScriptSourceObject sourceObject(cx, frontend::CreateScriptSourceObject(cx, options,
Nothing()));
@ -4638,8 +4634,6 @@ SyntaxParse(JSContext* cx, unsigned argc, Value* vp)
const char16_t* chars = stableChars.twoByteRange().begin().get();
size_t length = scriptContents->length();
UsedNameTracker usedNames(cx);
if (!usedNames.init())
return false;
RootedScriptSourceObject sourceObject(cx, frontend::CreateScriptSourceObject(cx, options,
Nothing()));

View File

@ -1500,11 +1500,6 @@ InnerViewTable::addView(JSContext* cx, ArrayBufferObject* buffer, ArrayBufferVie
// ArrayBufferObject entries are only added when there are multiple views.
MOZ_ASSERT(buffer->firstView());
if (!map.initialized() && !map.init()) {
ReportOutOfMemory(cx);
return false;
}
Map::AddPtr p = map.lookupForAdd(buffer);
MOZ_ASSERT(!gc::IsInsideNursery(buffer));
@ -1553,9 +1548,6 @@ InnerViewTable::addView(JSContext* cx, ArrayBufferObject* buffer, ArrayBufferVie
InnerViewTable::ViewVector*
InnerViewTable::maybeViewsUnbarriered(ArrayBufferObject* buffer)
{
if (!map.initialized())
return nullptr;
Map::Ptr p = map.lookup(buffer);
if (p)
return &p->value();
@ -1628,9 +1620,6 @@ InnerViewTable::sweepAfterMinorGC()
size_t
InnerViewTable::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
{
if (!map.initialized())
return 0;
size_t vectorSize = 0;
for (Map::Enum e(map); !e.empty(); e.popFront())
vectorSize += e.front().value().sizeOfExcludingThis(mallocSizeOf);

View File

@ -12,15 +12,6 @@ using namespace js;
using mozilla::PodZero;
bool
RuntimeCaches::init()
{
if (!evalCache.init())
return false;
return true;
}
void
NewObjectCache::clearNurseryObjects(JSRuntime* rt)
{

View File

@ -247,8 +247,6 @@ class RuntimeCaches
js::UncompressedSourceCache uncompressedSourceCache;
js::EvalCache evalCache;
bool init();
void purgeForMinorGC(JSRuntime* rt) {
newObjectCache.clearNurseryObjects(rt);
evalCache.sweep();
@ -256,8 +254,7 @@ class RuntimeCaches
void purgeForCompaction() {
newObjectCache.purge();
if (evalCache.initialized())
evalCache.clear();
evalCache.clear();
}
void purge() {

View File

@ -114,7 +114,7 @@ LCovSource::exportInto(GenericPrinter& out) const
out.printf("BRF:%zu\n", numBranchesFound_);
out.printf("BRH:%zu\n", numBranchesHit_);
if (linesHit_.initialized()) {
if (!linesHit_.empty()) {
for (size_t lineno = 1; lineno <= maxLineHit_; ++lineno) {
if (auto p = linesHit_.lookup(lineno))
out.printf("DA:%zu,%" PRIu64 "\n", lineno, p->value());
@ -140,9 +140,6 @@ LCovSource::writeScriptName(LSprinter& out, JSScript* script)
bool
LCovSource::writeScript(JSScript* script)
{
if (!linesHit_.initialized() && !linesHit_.init())
return false;
numFunctionsFound_++;
outFN_.printf("FN:%u,", script->lineno());
if (!writeScriptName(outFN_, script))

View File

@ -38,20 +38,10 @@ using namespace js::gc;
Compartment::Compartment(Zone* zone)
: zone_(zone),
runtime_(zone->runtimeFromAnyThread())
runtime_(zone->runtimeFromAnyThread()),
crossCompartmentWrappers(0)
{}
bool
Compartment::init(JSContext* cx)
{
if (!crossCompartmentWrappers.init(0)) {
ReportOutOfMemory(cx);
return false;
}
return true;
}
#ifdef JSGC_HASH_TABLE_CHECKS
namespace {

View File

@ -273,7 +273,8 @@ class WrapperMap
Ptr(const InnerMap::Ptr& p, InnerMap& m) : InnerMap::Ptr(p), map(&m) {}
};
MOZ_MUST_USE bool init(uint32_t len) { return map.init(len); }
WrapperMap() {}
explicit WrapperMap(size_t aLen) : map(aLen) {}
bool empty() {
if (map.empty())
@ -305,8 +306,8 @@ class WrapperMap
MOZ_ASSERT(k.is<JSString*>() == !c);
auto p = map.lookupForAdd(c);
if (!p) {
InnerMap m;
if (!m.init(InitialInnerMapSize) || !map.add(p, c, std::move(m)))
InnerMap m(InitialInnerMapSize);
if (!map.add(p, c, std::move(m)))
return false;
}
return p->value().put(k, v);
@ -436,7 +437,6 @@ class JS::Compartment
public:
explicit Compartment(JS::Zone* zone);
MOZ_MUST_USE bool init(JSContext* cx);
void destroy(js::FreeOp* fop);
MOZ_MUST_USE inline bool wrap(JSContext* cx, JS::MutableHandleValue vp);

View File

@ -697,11 +697,13 @@ Debugger::Debugger(JSContext* cx, NativeObject* dbg)
&traceLoggerScriptedCallsLastDrainedSize);
}
#endif
cx->runtime()->debuggerList().insertBack(this);
}
Debugger::~Debugger()
{
MOZ_ASSERT_IF(debuggees.initialized(), debuggees.empty());
MOZ_ASSERT(debuggees.empty());
allocationsLog.clear();
// We don't have to worry about locking here since Debugger is not
@ -715,29 +717,6 @@ Debugger::~Debugger()
}
}
bool
Debugger::init(JSContext* cx)
{
if (!debuggees.init() ||
!debuggeeZones.init() ||
!frames.init() ||
!scripts.init() ||
!lazyScripts.init() ||
!sources.init() ||
!objects.init() ||
!observedGCs.init() ||
!environments.init() ||
!wasmInstanceScripts.init() ||
!wasmInstanceSources.init())
{
ReportOutOfMemory(cx);
return false;
}
cx->runtime()->debuggerList().insertBack(this);
return true;
}
JS_STATIC_ASSERT(unsigned(JSSLOT_DEBUGFRAME_OWNER) == unsigned(JSSLOT_DEBUGSCRIPT_OWNER));
JS_STATIC_ASSERT(unsigned(JSSLOT_DEBUGFRAME_OWNER) == unsigned(JSSLOT_DEBUGSOURCE_OWNER));
JS_STATIC_ASSERT(unsigned(JSSLOT_DEBUGFRAME_OWNER) == unsigned(JSSLOT_DEBUGOBJECT_OWNER));
@ -2395,7 +2374,6 @@ class MOZ_RAII ExecutionObservableRealms : public Debugger::ExecutionObservableS
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
}
bool init() { return realms_.init() && zones_.init(); }
bool add(Realm* realm) { return realms_.put(realm) && zones_.put(realm->zone()); }
using RealmRange = HashSet<Realm*>::Range;
@ -2760,7 +2738,7 @@ Debugger::ensureExecutionObservabilityOfRealm(JSContext* cx, Realm* realm)
if (realm->debuggerObservesAllExecution())
return true;
ExecutionObservableRealms obs(cx);
if (!obs.init() || !obs.add(realm))
if (!obs.add(realm))
return false;
realm->updateDebuggerObservesAllExecution();
return updateExecutionObservability(cx, obs, Observing);
@ -2811,8 +2789,6 @@ bool
Debugger::updateObservesAllExecutionOnDebuggees(JSContext* cx, IsObserving observing)
{
ExecutionObservableRealms obs(cx);
if (!obs.init())
return false;
for (WeakGlobalObjectSet::Range r = debuggees.all(); !r.empty(); r.popFront()) {
GlobalObject* global = r.front();
@ -2841,8 +2817,6 @@ bool
Debugger::updateObservesCoverageOnDebuggees(JSContext* cx, IsObserving observing)
{
ExecutionObservableRealms obs(cx);
if (!obs.init())
return false;
for (WeakGlobalObjectSet::Range r = debuggees.all(); !r.empty(); r.popFront()) {
GlobalObject* global = r.front();
@ -3195,12 +3169,10 @@ Debugger::trace(JSTracer* trc)
// (Once we support generator frames properly, we will need
// weakly-referenced Debugger.Frame objects as well, for suspended generator
// frames.)
if (frames.initialized()) {
for (FrameMap::Range r = frames.all(); !r.empty(); r.popFront()) {
HeapPtr<DebuggerFrame*>& frameobj = r.front().value();
TraceEdge(trc, &frameobj, "live Debugger.Frame");
MOZ_ASSERT(frameobj->getPrivate(frameobj->numFixedSlotsMaybeForwarded()));
}
for (FrameMap::Range r = frames.all(); !r.empty(); r.popFront()) {
HeapPtr<DebuggerFrame*>& frameobj = r.front().value();
TraceEdge(trc, &frameobj, "live Debugger.Frame");
MOZ_ASSERT(frameobj->getPrivate(frameobj->numFixedSlotsMaybeForwarded()));
}
allocationsLog.trace(trc);
@ -3778,8 +3750,6 @@ Debugger::removeDebuggee(JSContext* cx, unsigned argc, Value* vp)
return false;
ExecutionObservableRealms obs(cx);
if (!obs.init())
return false;
if (dbg->debuggees.has(global)) {
dbg->removeDebuggeeGlobal(cx->runtime()->defaultFreeOp(), global, nullptr);
@ -3803,8 +3773,6 @@ Debugger::removeAllDebuggees(JSContext* cx, unsigned argc, Value* vp)
THIS_DEBUGGER(cx, argc, vp, "removeAllDebuggees", args, dbg);
ExecutionObservableRealms obs(cx);
if (!obs.init())
return false;
for (WeakGlobalObjectSet::Enum e(dbg->debuggees); !e.empty(); e.popFront()) {
Rooted<GlobalObject*> global(cx, e.front());
@ -3940,7 +3908,7 @@ Debugger::construct(JSContext* cx, unsigned argc, Value* vp)
{
// Construct the underlying C++ object.
auto dbg = cx->make_unique<Debugger>(cx, obj.get());
if (!dbg || !dbg->init(cx))
if (!dbg)
return false;
debugger = dbg.release();
@ -4227,21 +4195,6 @@ class MOZ_STACK_CLASS Debugger::ScriptQuery
oom(false)
{}
/*
* Initialize this ScriptQuery. Raise an error and return false if we
* haven't enough memory.
*/
bool init() {
if (!realms.init() ||
!innermostForRealm.init())
{
ReportOutOfMemory(cx);
return false;
}
return true;
}
/*
* Parse the query object |query|, and prepare to match only the scripts
* it specifies.
@ -4741,8 +4694,6 @@ Debugger::findScripts(JSContext* cx, unsigned argc, Value* vp)
}
ScriptQuery query(cx, dbg);
if (!query.init())
return false;
if (args.length() >= 1) {
RootedObject queryObject(cx, NonNullObject(cx, args[0]));
@ -4846,11 +4797,6 @@ class MOZ_STACK_CLASS Debugger::ObjectQuery
if (!prepareQuery())
return false;
if (!debuggeeCompartments.init()) {
ReportOutOfMemory(cx);
return false;
}
for (WeakGlobalObjectSet::Range r = dbg->allDebuggees(); !r.empty(); r.popFront()) {
if (!debuggeeCompartments.put(r.front()->compartment())) {
ReportOutOfMemory(cx);
@ -4870,10 +4816,6 @@ class MOZ_STACK_CLASS Debugger::ObjectQuery
}
Traversal traversal(cx, *this, maybeNoGC.ref());
if (!traversal.init()) {
ReportOutOfMemory(cx);
return false;
}
traversal.wantNames = false;
return traversal.addStart(JS::ubi::Node(&rootList)) &&
@ -5104,8 +5046,6 @@ Debugger::isCompilableUnit(JSContext* cx, unsigned argc, Value* vp)
CompileOptions options(cx);
frontend::UsedNameTracker usedNames(cx);
if (!usedNames.init())
return false;
RootedScriptSourceObject sourceObject(cx, frontend::CreateScriptSourceObject(cx, options,
Nothing()));

View File

@ -165,10 +165,6 @@ class DebuggerWeakMap : private WeakMap<HeapPtr<UnbarrieredKey>, HeapPtr<JSObjec
using Base::all;
using Base::trace;
MOZ_MUST_USE bool init(uint32_t len = 16) {
return Base::init(len) && zoneCounts.init();
}
template<typename KeyInput, typename ValueInput>
bool relookupOrAdd(AddPtr& p, const KeyInput& k, const ValueInput& v) {
MOZ_ASSERT(v->compartment() == this->compartment);
@ -881,7 +877,6 @@ class Debugger : private mozilla::LinkedListElement<Debugger>
Debugger(JSContext* cx, NativeObject* dbg);
~Debugger();
MOZ_MUST_USE bool init(JSContext* cx);
inline const js::GCPtrNativeObject& toJSObject() const;
inline js::GCPtrNativeObject& toJSObjectRef();
static inline Debugger* fromJSObject(const JSObject* obj);

View File

@ -383,8 +383,6 @@ DebuggerMemory::takeCensus(JSContext* cx, unsigned argc, Value* vp)
#endif
Census census(cx);
if (!census.init())
return false;
CountTypePtr rootType;
RootedObject options(cx);
@ -417,10 +415,6 @@ DebuggerMemory::takeCensus(JSContext* cx, unsigned argc, Value* vp)
}
JS::ubi::CensusTraversal traversal(cx, handler, maybeNoGC.ref());
if (!traversal.init()) {
ReportOutOfMemory(cx);
return false;
}
traversal.wantNames = false;
if (!traversal.addStart(JS::ubi::Node(&rootList)) ||

View File

@ -58,8 +58,7 @@ void
EnvironmentCoordinateNameCache::purge()
{
shape = nullptr;
if (map.initialized())
map.finish();
map.clearAndCompact();
}
PropertyName*
@ -69,14 +68,11 @@ js::EnvironmentCoordinateName(EnvironmentCoordinateNameCache& cache, JSScript* s
Shape* shape = EnvironmentCoordinateToEnvironmentShape(script, pc);
if (shape != cache.shape && shape->slot() >= ENV_COORDINATE_NAME_THRESHOLD) {
cache.purge();
if (cache.map.init(shape->slot())) {
if (cache.map.reserve(shape->slot())) {
cache.shape = shape;
Shape::Range<NoGC> r(shape);
while (!r.empty()) {
if (!cache.map.putNew(r.front().slot(), r.front().propid())) {
cache.purge();
break;
}
cache.map.putNewInfallible(r.front().slot(), r.front().propid());
r.popFront();
}
}
@ -2414,13 +2410,7 @@ DebugEnvironments::DebugEnvironments(JSContext* cx, Zone* zone)
DebugEnvironments::~DebugEnvironments()
{
MOZ_ASSERT_IF(missingEnvs.initialized(), missingEnvs.empty());
}
bool
DebugEnvironments::init()
{
return proxiedEnvs.init() && missingEnvs.init() && liveEnvs.init();
MOZ_ASSERT(missingEnvs.empty());
}
void
@ -2525,11 +2515,6 @@ DebugEnvironments::ensureRealmData(JSContext* cx)
if (!debugEnvs)
return nullptr;
if (!debugEnvs->init()) {
ReportOutOfMemory(cx);
return nullptr;
}
realm->debugEnvsRef() = std::move(debugEnvs);
return realm->debugEnvs();
}
@ -3687,8 +3672,6 @@ static bool
AnalyzeEntrainedVariablesInScript(JSContext* cx, HandleScript script, HandleScript innerScript)
{
PropertyNameSet remainingNames(cx);
if (!remainingNames.init())
return false;
for (BindingIter bi(script); bi; bi++) {
if (bi.closedOver()) {

View File

@ -994,8 +994,6 @@ class DebugEnvironments
Zone* zone() const { return zone_; }
private:
bool init();
static DebugEnvironments* ensureRealmData(JSContext* cx);
template <typename Environment, typename Scope>

Some files were not shown because too many files have changed in this diff Show More