Bug 1464472. r=jandem

--HG--
extra : rebase_source : edabc2a07d4f08ac4fbc8c8d05f760f798e513ef
This commit is contained in:
Jeff Walden 2018-05-26 03:07:36 -07:00
parent b9a5416431
commit 2fc83a4f68
6 changed files with 94 additions and 21 deletions

View File

@ -1979,10 +1979,8 @@ BaselineCacheIRCompiler::emitGuardAndGetIterator()
masm.loadPtr(iterAddr, output);
masm.loadObjPrivate(output, JSObject::ITER_CLASS_NFIXED_SLOTS, niScratch);
// Ensure the |active| and |unreusable| bits are not set.
masm.branchTest32(Assembler::NonZero,
Address(niScratch, NativeIterator::offsetOfFlags()),
Imm32(NativeIterator::Flags::All), failure->label());
// Ensure the iterator is reusable: see NativeIterator::isReusable.
masm.branchIfNativeIteratorNotReusable(niScratch, failure->label());
// Pre-write barrier for store to 'objectBeingIterated_'.
Address iterObjAddr(niScratch, NativeIterator::offsetOfObjectBeingIterated());

View File

@ -2308,10 +2308,8 @@ IonCacheIRCompiler::emitGuardAndGetIterator()
masm.movePtr(ImmGCPtr(iterobj), output);
masm.loadObjPrivate(output, JSObject::ITER_CLASS_NFIXED_SLOTS, niScratch);
// Ensure the |active| and |unreusable| bits are not set.
masm.branchTest32(Assembler::NonZero,
Address(niScratch, NativeIterator::offsetOfFlags()),
Imm32(NativeIterator::Flags::All), failure->label());
// Ensure the iterator is reusable: see NativeIterator::isReusable.
masm.branchIfNativeIteratorNotReusable(niScratch, failure->label());
// Pre-write barrier for store to 'objectBeingIterated_'.
Address iterObjAddr(niScratch, NativeIterator::offsetOfObjectBeingIterated());

View File

@ -3785,6 +3785,29 @@ MacroAssembler::debugAssertObjHasFixedSlots(Register obj, Register scratch)
#endif
}
void
MacroAssembler::branchIfNativeIteratorNotReusable(Register ni, Label* notReusable)
{
// See NativeIterator::isReusable.
Address flagsAddr(ni, NativeIterator::offsetOfFlags());
#ifdef DEBUG
Label niIsInitialized;
branchTest32(Assembler::NonZero,
flagsAddr,
Imm32(NativeIterator::Flags::Initialized),
&niIsInitialized);
assumeUnreachable("Expected a NativeIterator that's been completely "
"initialized");
bind(&niIsInitialized);
#endif
branchTest32(Assembler::NonZero,
flagsAddr,
Imm32(NativeIterator::Flags::NotReusable),
notReusable);
}
template <typename T, size_t N, typename P>
static bool
AddPendingReadBarrier(Vector<T*, N, P>& list, T* value)

View File

@ -2192,6 +2192,8 @@ class MacroAssembler : public MacroAssemblerSpecific
void debugAssertIsObject(const ValueOperand& val);
void debugAssertObjHasFixedSlots(Register obj, Register scratch);
void branchIfNativeIteratorNotReusable(Register ni, Label* notReusable);
using MacroAssemblerSpecific::extractTag;
Register extractTag(const TypedOrValueRegister& reg, Register scratch) {
if (reg.hasValue())

View File

@ -9,6 +9,7 @@
#include "vm/Iteration.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/Likely.h"
#include "mozilla/Maybe.h"
#include "mozilla/MemoryReporting.h"
#include "mozilla/PodOperations.h"
@ -69,9 +70,14 @@ NativeIterator::trace(JSTracer* trc)
guard.trace(trc);
});
std::for_each(propertiesBegin(), propertiesEnd(),
GCPtrFlatString* begin = MOZ_LIKELY(isInitialized()) ? propertiesBegin() : propertyCursor_;
std::for_each(begin, propertiesEnd(),
[trc](GCPtrFlatString& prop) {
TraceNullableEdge(trc, &prop, "prop");
// Properties begin life non-null and never *become*
// null. (Deletion-suppression will shift trailing
// properties over a deleted property in the properties
// array, but it doesn't null them out.)
TraceEdge(trc, &prop, "prop");
});
}
@ -734,6 +740,8 @@ NativeIterator::NativeIterator(JSContext* cx, Handle<PropertyIteratorObject*> pr
}
MOZ_ASSERT(static_cast<void*>(guardsEnd_) == propertyCursor_);
markInitialized();
MOZ_ASSERT(!*hadError);
}
@ -1299,9 +1307,7 @@ SuppressDeletedProperty(JSContext* cx, NativeIterator* ni, HandleObject obj,
ni->trimLastProperty();
}
// Modified NativeIterators omit properties that possibly shouldn't
// be omitted, so they can't be reused.
ni->markNotReusable();
ni->markHasUnvisitedPropertyDeletion();
return true;
}

View File

@ -58,9 +58,10 @@ struct NativeIterator
// active. Not serialized by XDR.
struct Flags
{
static constexpr uint32_t Active = 0x1;
static constexpr uint32_t NotReusable = 0x2;
static constexpr uint32_t All = Active | NotReusable;
static constexpr uint32_t Initialized = 0x1;
static constexpr uint32_t Active = 0x2;
static constexpr uint32_t HasUnvisitedPropertyDeletion = 0x4;
static constexpr uint32_t NotReusable = Active | HasUnvisitedPropertyDeletion;
};
private:
@ -131,6 +132,15 @@ struct NativeIterator
"HeapReceiverGuards are present, with no padding space "
"required for correct alignment");
// We *could* just check the assertion below if we wanted, but the
// incompletely-initialized NativeIterator case matters for so little
// code that we prefer not imposing the condition-check on every single
// user.
MOZ_ASSERT(isInitialized(),
"NativeIterator must be initialized, or else |guardsEnd_| "
"isn't necessarily the start of properties and instead "
"|propertyCursor_| instead is");
return reinterpret_cast<GCPtrFlatString*>(guardsEnd_);
}
@ -154,12 +164,20 @@ struct NativeIterator
}
void resetPropertyCursorForReuse() {
MOZ_ASSERT(isInitialized());
// This function is called unconditionally on IteratorClose, so
// unvisited properties might have been deleted, so we can't assert
// this NativeIterator is reusable. (Should we not bother resetting
// the cursor in that case?)
// Note: JIT code inlines |propertyCursor_| resetting when an iterator
// ends: see |CodeGenerator::visitIteratorEnd|.
propertyCursor_ = propertiesBegin();
}
bool previousPropertyWas(JS::Handle<JSFlatString*> str) {
MOZ_ASSERT(isInitialized());
return propertyCursor_ > propertiesBegin() && propertyCursor_[-1] == str;
}
@ -168,6 +186,8 @@ struct NativeIterator
}
void trimLastProperty() {
MOZ_ASSERT(isInitialized());
propertiesEnd_--;
// This invokes the pre barrier on this property, since it's no longer
@ -189,6 +209,7 @@ struct NativeIterator
}
void incCursor() {
MOZ_ASSERT(isInitialized());
propertyCursor_++;
}
@ -196,29 +217,52 @@ struct NativeIterator
return guardKey_;
}
bool isInitialized() const {
return flags_ & Flags::Initialized;
}
private:
void markInitialized() {
MOZ_ASSERT(flags_ == 0);
flags_ = Flags::Initialized;
}
public:
bool isActive() const {
MOZ_ASSERT(isInitialized());
return flags_ & Flags::Active;
}
void markActive() {
MOZ_ASSERT(isInitialized());
flags_ |= Flags::Active;
}
void markInactive() {
MOZ_ASSERT(isInitialized());
flags_ &= ~Flags::Active;
}
bool isReusable() const {
// Cached NativeIterators are reusable if they're not active and
// aren't marked as not reusable, i.e. if no flags are set.
return flags_ == 0;
MOZ_ASSERT(isInitialized());
return flags_ == Flags::Initialized;
}
void markNotReusable() {
flags_ |= Flags::NotReusable;
void markHasUnvisitedPropertyDeletion() {
MOZ_ASSERT(isInitialized());
flags_ |= Flags::HasUnvisitedPropertyDeletion;
}
void link(NativeIterator* other) {
// The NativeIterator sentinel doesn't have to be linked, because it's
// the start of the list. Anything else added should have been
// initialized.
MOZ_ASSERT(isInitialized());
/* A NativeIterator cannot appear in the enumerator list twice. */
MOZ_ASSERT(!next_ && !prev_);
@ -228,6 +272,8 @@ struct NativeIterator
other->prev_ = this;
}
void unlink() {
MOZ_ASSERT(isInitialized());
next_->prev_ = prev_;
prev_->next_ = next_;
next_ = nullptr;