Bug 1367795 - Add barriers to JS::WeakCache for GCHashSet r=sfink

This commit is contained in:
Jon Coppeard 2017-07-10 18:27:43 +01:00
parent af750debd2
commit 7f6cdb57e8
5 changed files with 212 additions and 69 deletions

View File

@ -7,6 +7,8 @@
#ifndef GCHashTable_h
#define GCHashTable_h
#include "mozilla/Maybe.h"
#include "js/GCPolicyAPI.h"
#include "js/HashTable.h"
#include "js/RootingAPI.h"
@ -388,8 +390,13 @@ class WeakCache<GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapSweepPolicy>>
return map.needsSweep();
}
void sweep() override {
return map.sweep();
size_t sweep() override {
if (!this->initialized())
return 0;
size_t steps = map.count();
map.sweep();
return steps;
}
using Lookup = typename MainMap::Lookup;
@ -456,41 +463,143 @@ class WeakCache<GCHashSet<T, HashPolicy, AllocPolicy>>
using Self = WeakCache<Set>;
Set set;
bool needsBarrier;
public:
using Entry = typename Set::Entry;
template <typename... Args>
explicit WeakCache(Zone* zone, Args&&... args)
: WeakCacheBase(zone), set(mozilla::Forward<Args>(args)...)
: WeakCacheBase(zone), set(mozilla::Forward<Args>(args)...), needsBarrier(false)
{}
template <typename... Args>
explicit WeakCache(JSRuntime* rt, Args&&... args)
: WeakCacheBase(rt), set(mozilla::Forward<Args>(args)...)
: WeakCacheBase(rt), set(mozilla::Forward<Args>(args)...), needsBarrier(false)
{}
void sweep() override {
size_t sweep() override {
if (!this->initialized())
return 0;
size_t steps = set.count();
set.sweep();
return steps;
}
bool needsSweep() override {
return set.needsSweep();
}
// Const interface.
bool setNeedsIncrementalBarrier(bool needs) override {
MOZ_ASSERT(needsBarrier != needs);
needsBarrier = needs;
return true;
}
bool needsIncrementalBarrier() const override {
return needsBarrier;
}
private:
static bool entryNeedsSweep(const Entry& prior) {
Entry entry(prior);
bool result = GCPolicy<T>::needsSweep(&entry);
MOZ_ASSERT(prior == entry); // We shouldn't update here.
return result;
}
public:
using Lookup = typename Set::Lookup;
using AddPtr = typename Set::AddPtr;
using Entry = typename Set::Entry;
using Ptr = typename Set::Ptr;
using Range = typename Set::Range;
using AddPtr = typename Set::AddPtr;
struct Range
{
explicit Range(const typename Set::Range& r)
: range(r)
{
settle();
}
Range() {}
bool empty() const { return range.empty(); }
const Entry& front() const { return range.front(); }
void popFront() {
range.popFront();
settle();
}
private:
typename Set::Range range;
void settle() {
while (!empty() && entryNeedsSweep(front()))
popFront();
}
};
struct Enum : public Set::Enum
{
explicit Enum(Self& cache)
: Set::Enum(cache.set)
{
// This operation is not allowed while barriers are in place as we
// may also need to enumerate the set for sweeping.
MOZ_ASSERT(!cache.needsBarrier);
}
};
bool initialized() const {
return set.initialized();
}
Ptr lookup(const Lookup& l) const {
Ptr ptr = set.lookup(l);
if (needsBarrier && ptr && entryNeedsSweep(*ptr)) {
const_cast<Set&>(set).remove(ptr);
return Ptr();
}
return ptr;
}
AddPtr lookupForAdd(const Lookup& l) const {
AddPtr ptr = set.lookupForAdd(l);
if (needsBarrier && ptr && entryNeedsSweep(*ptr)) {
const_cast<Set&>(set).remove(ptr);
return set.lookupForAdd(l);
}
return ptr;
}
Range all() const {
return Range(set.all());
}
bool empty() const {
// This operation is not currently allowed while barriers are in place
// as it would require iterating the set and the caller expects a
// constant time operation.
MOZ_ASSERT(!needsBarrier);
return set.empty();
}
uint32_t count() const {
// This operation is not currently allowed while barriers are in place
// as it would require iterating the set and the caller expects a
// constant time operation.
MOZ_ASSERT(!needsBarrier);
return set.count();
}
size_t capacity() const {
return set.capacity();
}
bool has(const Lookup& l) const {
return lookup(l).found();
}
bool initialized() const { return set.initialized(); }
Ptr lookup(const Lookup& l) const { return set.lookup(l); }
AddPtr lookupForAdd(const Lookup& l) const { return set.lookupForAdd(l); }
Range all() const { return set.all(); }
bool empty() const { return set.empty(); }
uint32_t count() const { return set.count(); }
size_t capacity() const { return set.capacity(); }
bool has(const Lookup& l) const { return set.lookup(l).found(); }
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
return set.sizeOfExcludingThis(mallocSizeOf);
}
@ -498,15 +607,37 @@ class WeakCache<GCHashSet<T, HashPolicy, AllocPolicy>>
return mallocSizeOf(this) + set.sizeOfExcludingThis(mallocSizeOf);
}
// Non-const interface.
bool init(uint32_t len = 16) {
MOZ_ASSERT(!needsBarrier);
return set.init(len);
}
struct Enum : public Set::Enum { explicit Enum(Self& o) : Set::Enum(o.set) {} };
void clear() {
// This operation is not currently allowed while barriers are in place
// since it doesn't make sense to clear a cache while it is being swept.
MOZ_ASSERT(!needsBarrier);
set.clear();
}
bool init(uint32_t len = 16) { return set.init(len); }
void clear() { set.clear(); }
void finish() { set.finish(); }
void remove(Ptr p) { set.remove(p); }
void remove(const Lookup& l) { set.remove(l); }
void finish() {
// This operation is not currently allowed while barriers are in place
// since it doesn't make sense to destroy a cache while it is being swept.
MOZ_ASSERT(!needsBarrier);
set.finish();
}
void remove(Ptr p) {
// This currently supports removing entries during incremental
// sweeping. If we allow these tables to be swept incrementally this may
// no longer be possible.
set.remove(p);
}
void remove(const Lookup& l) {
Ptr p = lookup(l);
if (p)
remove(p);
}
template<typename TInput>
bool add(AddPtr& p, TInput&& t) {

View File

@ -37,8 +37,17 @@ class WeakCacheBase : public mozilla::LinkedListElement<WeakCacheBase>
WeakCacheBase(WeakCacheBase&& other) = default;
virtual ~WeakCacheBase() {}
virtual void sweep() = 0;
virtual size_t sweep() = 0;
virtual bool needsSweep() = 0;
virtual bool setNeedsIncrementalBarrier(bool needs) {
// Derived classes do not support incremental barriers by default.
return false;
}
virtual bool needsIncrementalBarrier() const {
// Derived classes do not support incremental barriers by default.
return false;
}
};
} // namespace detail
@ -67,8 +76,9 @@ class WeakCache : protected detail::WeakCacheBase,
const T& get() const { return cache; }
T& get() { return cache; }
void sweep() override {
size_t sweep() override {
GCPolicy<T>::sweep(&cache);
return 0;
}
bool needsSweep() override {

View File

@ -384,6 +384,15 @@ struct ObjectGroupCompartment::NewEntry
: clasp(clasp), proto(proto), associated(associated)
{}
explicit Lookup(const NewEntry& entry)
: clasp(entry.group.unbarrieredGet()->clasp()),
proto(entry.group.unbarrieredGet()->proto()),
associated(entry.associated)
{
if (associated && associated->is<JSFunction>())
clasp = nullptr;
}
bool hasAssocId() const {
return !associated || associated->zone()->hasUniqueId(associated);
}
@ -437,6 +446,10 @@ struct ObjectGroupCompartment::NewEntry
return (IsAboutToBeFinalized(&group) ||
(associated && IsAboutToBeFinalizedUnbarriered(&associated)));
}
bool operator==(const NewEntry& other) const {
return group == other.group && associated == other.associated;
}
};
namespace js {
@ -1916,12 +1929,7 @@ ObjectGroupCompartment::checkNewTableAfterMovingGC(NewTable* table)
CheckGCThingAfterMovingGC(proto.toObject());
CheckGCThingAfterMovingGC(entry.associated);
const Class* clasp = entry.group.unbarrieredGet()->clasp();
if (entry.associated && entry.associated->is<JSFunction>())
clasp = nullptr;
NewEntry::Lookup lookup(clasp, proto, entry.associated);
auto ptr = table->lookup(lookup);
auto ptr = table->lookup(NewEntry::Lookup(entry));
MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
}
}

View File

@ -1120,21 +1120,6 @@ Shape::setObjectFlags(JSContext* cx, BaseShape::Flag flags, TaggedProto proto, S
return replaceLastProperty(cx, base, proto, lastRoot);
}
/* static */ inline HashNumber
StackBaseShape::hash(const Lookup& lookup)
{
HashNumber hash = lookup.flags;
hash = RotateLeft(hash, 4) ^ (uintptr_t(lookup.clasp) >> 3);
return hash;
}
/* static */ inline bool
StackBaseShape::match(const ReadBarriered<UnownedBaseShape*>& key, const Lookup& lookup)
{
return key.unbarrieredGet()->flags == lookup.flags &&
key.unbarrieredGet()->clasp_ == lookup.clasp;
}
inline
BaseShape::BaseShape(const StackBaseShape& base)
: clasp_(base.clasp),
@ -1295,23 +1280,6 @@ InitialShapeEntry::InitialShapeEntry(Shape* shape, const Lookup::ShapeProto& pro
{
}
/* static */ inline HashNumber
InitialShapeEntry::hash(const Lookup& lookup)
{
return (RotateLeft(uintptr_t(lookup.clasp) >> 3, 4) ^ lookup.proto.hashCode()) +
lookup.nfixed;
}
/* static */ inline bool
InitialShapeEntry::match(const InitialShapeEntry& key, const Lookup& lookup)
{
const Shape* shape = key.shape.unbarrieredGet();
return lookup.clasp == shape->getObjectClass()
&& lookup.nfixed == shape->numFixedSlots()
&& lookup.baseFlags == shape->getObjectFlags()
&& lookup.proto.match(key.proto);
}
#ifdef JSGC_HASH_TABLE_CHECKS
void

View File

@ -659,8 +659,15 @@ struct StackBaseShape : public DefaultHasher<ReadBarriered<UnownedBaseShape*>>
}
};
static inline HashNumber hash(const Lookup& lookup);
static inline bool match(const ReadBarriered<UnownedBaseShape*>& key, const Lookup& lookup);
static HashNumber hash(const Lookup& lookup) {
HashNumber hash = lookup.flags;
hash = mozilla::RotateLeft(hash, 4) ^ (uintptr_t(lookup.clasp) >> 3);
return hash;
}
static inline bool match(const ReadBarriered<UnownedBaseShape*>& key, const Lookup& lookup) {
return key.unbarrieredGet()->flags == lookup.flags &&
key.unbarrieredGet()->clasp_ == lookup.clasp;
}
};
static MOZ_ALWAYS_INLINE js::HashNumber
@ -1322,6 +1329,10 @@ class InitialShapeProto
void setProto(TaggedProto proto) {
proto_ = proto;
}
bool operator==(const InitialShapeProto& other) const {
return key_ == other.key_ && proto_ == other.proto_;
}
};
/*
@ -1370,9 +1381,20 @@ struct InitialShapeEntry
inline InitialShapeEntry();
inline InitialShapeEntry(Shape* shape, const Lookup::ShapeProto& proto);
static inline HashNumber hash(const Lookup& lookup);
static inline bool match(const InitialShapeEntry& key, const Lookup& lookup);
static void rekey(InitialShapeEntry& k, const InitialShapeEntry& newKey) { k = newKey; }
static HashNumber hash(const Lookup& lookup) {
return (mozilla::RotateLeft(uintptr_t(lookup.clasp) >> 3, 4) ^ lookup.proto.hashCode()) +
lookup.nfixed;
}
static inline bool match(const InitialShapeEntry& key, const Lookup& lookup) {
const Shape* shape = key.shape.unbarrieredGet();
return lookup.clasp == shape->getObjectClass()
&& lookup.nfixed == shape->numFixedSlots()
&& lookup.baseFlags == shape->getObjectFlags()
&& lookup.proto.match(key.proto);
}
static void rekey(InitialShapeEntry& k, const InitialShapeEntry& newKey) {
k = newKey;
}
bool needsSweep() {
Shape* ushape = shape.unbarrieredGet();
@ -1381,6 +1403,10 @@ struct InitialShapeEntry
return (gc::IsAboutToBeFinalizedUnbarriered(&ushape) ||
(uproto.isObject() && gc::IsAboutToBeFinalizedUnbarriered(&protoObj)));
}
bool operator==(const InitialShapeEntry& other) const {
return shape == other.shape && proto == other.proto;
}
};
using InitialShapeSet = JS::WeakCache<JS::GCHashSet<InitialShapeEntry,