Bug 1448039 - Remove UNOPTIMIZEABLE bits from baseline IC fallback stubs. r=tcampbell

Differential Revision: https://phabricator.services.mozilla.com/D7794

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Ashley Hauck 2018-10-11 01:49:42 +00:00
parent 8b82ddcd76
commit 8fc722d9ca
8 changed files with 91 additions and 470 deletions

View File

@ -16,6 +16,6 @@ for (var i = 0; i < 3; i++) {
assertEq(0, 1);
} catch (e) {
assertEq(e.message === `y is undefined; can't access its "length" property` ||
e.message === "undefined has no properties", true);
e.message === `can't access property "length" of undefined`, true);
}
}

View File

@ -336,6 +336,33 @@ ICStub::trace(JSTracer* trc)
}
}
// This helper handles ICState updates/transitions while attaching CacheIR stubs.
template<typename IRGenerator, typename... Args>
static void
TryAttachStub(const char *name, JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub, BaselineCacheIRStubKind kind, Args&&... args)
{
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
RootedScript script(cx, frame->script());
jsbytecode* pc = stub->icEntry()->pc(script);
bool attached = false;
IRGenerator gen(cx, script, pc, stub->state().mode(), std::forward<Args>(args)...);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
kind, script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " %s %s CacheIR stub", attached ? "Attached" : "Failed to attach", name);
}
}
if (!attached) {
stub->state().trackNotAttached();
}
}
}
//
@ -1584,30 +1611,7 @@ DoToBoolFallback(JSContext* cx, BaselineFrame* frame, ICToBool_Fallback* stub, H
MOZ_ASSERT(!arg.isBoolean());
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
RootedScript script(cx, frame->script());
jsbytecode* pc = stub->icEntry()->pc(script);
ToBoolIRGenerator gen(cx, script, pc, stub->state().mode(),
arg);
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached ToBool CacheIR stub, attached is now %d", attached);
}
}
if (!attached) {
stub->state().trackNotAttached();
}
}
TryAttachStub<ToBoolIRGenerator>("ToBool", cx, frame, stub, BaselineCacheIRStubKind::Regular, arg);
bool cond = ToBoolean(arg);
ret.setBoolean(cond);
@ -1786,10 +1790,6 @@ DoGetElemFallback(JSContext* cx, BaselineFrame* frame, ICGetElem_Fallback* stub_
stub->noteNegativeIndex();
}
if (!attached && !isTemporarilyUnoptimizable) {
stub->noteUnoptimizableAccess();
}
return true;
}
@ -1867,10 +1867,6 @@ DoGetElemSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetElem_Fallback*
stub->noteNegativeIndex();
}
if (!attached && !isTemporarilyUnoptimizable) {
stub->noteUnoptimizableAccess();
}
return true;
}
@ -2063,14 +2059,15 @@ DoSetElemFallback(JSContext* cx, BaselineFrame* frame, ICSetElem_Fallback* stub_
BaselineCacheIRStubKind::Updated,
frame->script(), stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub");
SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
if (gen.shouldNotePreliminaryObjectStub()) {
newStub->toCacheIR_Updated()->notePreliminaryObject();
} else if (gen.shouldUnlinkPreliminaryObjectStubs()) {
StripPreliminaryObjectStubs(cx, stub);
}
JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub");
SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
return true;
}
} else {
@ -2248,28 +2245,7 @@ DoInFallback(JSContext* cx, BaselineFrame* frame, ICIn_Fallback* stub_,
return false;
}
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
RootedScript script(cx, frame->script());
jsbytecode* pc = stub->icEntry()->pc(script);
HasPropIRGenerator gen(cx, script, pc, CacheKind::In, stub->state().mode(), key, objValue);
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached In CacheIR stub");
}
}
if (!attached) {
stub->state().trackNotAttached();
}
}
TryAttachStub<HasPropIRGenerator>("In", cx, frame, stub, BaselineCacheIRStubKind::Regular, CacheKind::In, key, objValue);
RootedObject obj(cx, &objValue.toObject());
bool cond = false;
@ -2317,29 +2293,9 @@ DoHasOwnFallback(JSContext* cx, BaselineFrame* frame, ICHasOwn_Fallback* stub_,
FallbackICSpew(cx, stub, "HasOwn");
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
RootedScript script(cx, frame->script());
jsbytecode* pc = stub->icEntry()->pc(script);
HasPropIRGenerator gen(cx, script, pc, CacheKind::HasOwn,
stub->state().mode(), keyValue, objValue);
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached HasOwn CacheIR stub");
}
}
if (!attached) {
stub->state().trackNotAttached();
}
}
TryAttachStub<HasPropIRGenerator>("HasOwn", cx, frame, stub,
BaselineCacheIRStubKind::Regular, CacheKind::HasOwn,
keyValue, objValue);
bool found;
if (!HasOwnProperty(cx, objValue, keyValue, &found)) {
@ -2393,26 +2349,8 @@ DoGetNameFallback(JSContext* cx, BaselineFrame* frame, ICGetName_Fallback* stub_
MOZ_ASSERT(op == JSOP_GETNAME || op == JSOP_GETGNAME);
RootedPropertyName name(cx, script->getName(pc));
bool attached = false;
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
GetNameIRGenerator gen(cx, script, pc, stub->state().mode(), envChain, name);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Monitored,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached GetName CacheIR stub");
}
}
if (!attached) {
stub->state().trackNotAttached();
}
}
TryAttachStub<GetNameIRGenerator>("GetName", cx, frame, stub, BaselineCacheIRStubKind::Monitored, envChain, name);
static_assert(JSOP_GETGNAME_LENGTH == JSOP_GETNAME_LENGTH,
"Otherwise our check for JSOP_TYPEOF isn't ok");
@ -2439,9 +2377,6 @@ DoGetNameFallback(JSContext* cx, BaselineFrame* frame, ICGetName_Fallback* stub_
return false;
}
if (!attached) {
stub->noteUnoptimizableAccess();
}
return true;
}
@ -2480,26 +2415,7 @@ DoBindNameFallback(JSContext* cx, BaselineFrame* frame, ICBindName_Fallback* stu
RootedPropertyName name(cx, frame->script()->getName(pc));
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
bool attached = false;
RootedScript script(cx, frame->script());
BindNameIRGenerator gen(cx, script, pc, stub->state().mode(), envChain, name);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached BindName CacheIR stub");
}
}
if (!attached) {
stub->state().trackNotAttached();
}
}
TryAttachStub<BindNameIRGenerator>("BindName", cx, frame, stub, BaselineCacheIRStubKind::Regular, envChain, name);
RootedObject scope(cx);
if (!LookupNameUnqualified(cx, name, envChain, &scope)) {
@ -2562,26 +2478,7 @@ DoGetIntrinsicFallback(JSContext* cx, BaselineFrame* frame, ICGetIntrinsic_Fallb
return true;
}
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
bool attached = false;
RootedScript script(cx, frame->script());
GetIntrinsicIRGenerator gen(cx, script, pc, stub->state().mode(), res);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached GetIntrinsic CacheIR stub");
}
}
if (!attached) {
stub->state().trackNotAttached();
}
}
TryAttachStub<GetIntrinsicIRGenerator>("GetIntrinsic", cx, frame, stub, BaselineCacheIRStubKind::Regular, res);
return true;
}
@ -2679,7 +2576,7 @@ DoGetPropFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback* stub_
BaselineCacheIRStubKind::Monitored,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
JitSpew(JitSpew_BaselineIC, " Attached GetProp CacheIR stub");
if (gen.shouldNotePreliminaryObjectStub()) {
newStub->toCacheIR_Monitored()->notePreliminaryObject();
} else if (gen.shouldUnlinkPreliminaryObjectStubs()) {
@ -2708,16 +2605,6 @@ DoGetPropFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback* stub_
if (!stub->addMonitorStubForValue(cx, frame, types, res)) {
return false;
}
if (attached) {
return true;
}
MOZ_ASSERT(!attached);
if (!isTemporarilyUnoptimizable) {
stub->noteUnoptimizableAccess();
}
return true;
}
@ -2757,7 +2644,7 @@ DoGetPropSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback*
BaselineCacheIRStubKind::Monitored,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
JitSpew(JitSpew_BaselineIC, " Attached GetPropSuper CacheIR stub");
if (gen.shouldNotePreliminaryObjectStub()) {
newStub->toCacheIR_Monitored()->notePreliminaryObject();
} else if (gen.shouldUnlinkPreliminaryObjectStubs()) {
@ -2789,15 +2676,6 @@ DoGetPropSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback*
return false;
}
if (attached) {
return true;
}
MOZ_ASSERT(!attached);
if (!isTemporarilyUnoptimizable) {
stub->noteUnoptimizableAccess();
}
return true;
}
@ -3025,14 +2903,15 @@ DoSetPropFallback(JSContext* cx, BaselineFrame* frame, ICSetProp_Fallback* stub_
BaselineCacheIRStubKind::Updated,
frame->script(), stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached SetProp CacheIR stub");
SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
if (gen.shouldNotePreliminaryObjectStub()) {
newStub->toCacheIR_Updated()->notePreliminaryObject();
} else if (gen.shouldUnlinkPreliminaryObjectStubs()) {
StripPreliminaryObjectStubs(cx, stub);
}
JitSpew(JitSpew_BaselineIC, " Attached SetProp CacheIR stub");
SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
}
} else {
gen.trackAttached(IRGenerator::NotAttached);
@ -3042,10 +2921,6 @@ DoSetPropFallback(JSContext* cx, BaselineFrame* frame, ICSetProp_Fallback* stub_
}
}
if (!attached && !isTemporarilyUnoptimizable) {
stub->noteUnoptimizableAccess();
}
return true;
}
@ -3843,7 +3718,6 @@ DoCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub_, uint
}
if (!handled) {
stub->noteUnoptimizableCall();
if (canAttachStub) {
stub->state().trackNotAttached();
}
@ -3896,9 +3770,6 @@ DoSpreadCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub_
return false;
}
if (!handled) {
stub->noteUnoptimizableCall();
}
return true;
}
@ -5395,28 +5266,7 @@ DoGetIteratorFallback(JSContext* cx, BaselineFrame* frame, ICGetIterator_Fallbac
{
FallbackICSpew(cx, stub, "GetIterator");
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
RootedScript script(cx, frame->script());
jsbytecode* pc = stub->icEntry()->pc(script);
GetIteratorIRGenerator gen(cx, script, pc, stub->state().mode(), value);
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached GetIterator CacheIR stub");
}
}
if (!attached) {
stub->state().trackNotAttached();
}
}
TryAttachStub<GetIteratorIRGenerator>("GetIterator", cx, frame, stub, BaselineCacheIRStubKind::Regular, value);
JSObject* iterobj = ValueToIterator(cx, value);
if (!iterobj) {
@ -5585,41 +5435,6 @@ ICIteratorClose_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
// InstanceOf_Fallback
//
static bool
TryAttachInstanceOfStub(JSContext* cx, BaselineFrame* frame, ICInstanceOf_Fallback* stub,
HandleValue lhs, HandleObject rhs, bool* attached)
{
MOZ_ASSERT(!*attached);
FallbackICSpew(cx, stub, "InstanceOf");
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
RootedScript script(cx, frame->script());
jsbytecode* pc = stub->icEntry()->pc(script);
InstanceOfIRGenerator gen(cx, script, pc, stub->state().mode(),
lhs,
rhs);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached InstanceOf CacheIR stub, attached is now %d", *attached);
}
}
if (!attached) {
stub->state().trackNotAttached();
}
}
return true;
}
static bool
DoInstanceOfFallback(JSContext* cx, BaselineFrame* frame, ICInstanceOf_Fallback* stub_,
HandleValue lhs, HandleValue rhs, MutableHandleValue res)
@ -5648,7 +5463,10 @@ DoInstanceOfFallback(JSContext* cx, BaselineFrame* frame, ICInstanceOf_Fallback*
}
if (!obj->is<JSFunction>()) {
stub->noteUnoptimizableAccess();
// ensure we've recorded at least one failure, so we can detect there was a non-optimizable case
if (!stub->state().hasFailures()) {
stub->state().trackNotAttached();
}
return true;
}
@ -5656,13 +5474,7 @@ DoInstanceOfFallback(JSContext* cx, BaselineFrame* frame, ICInstanceOf_Fallback*
// for use during Ion compilation.
EnsureTrackPropertyTypes(cx, obj, NameToId(cx->names().prototype));
bool attached = false;
if (!TryAttachInstanceOfStub(cx, frame, stub, lhs, obj, &attached)) {
return false;
}
if (!attached) {
stub->noteUnoptimizableAccess();
}
TryAttachStub<InstanceOfIRGenerator>("InstanceOf", cx, frame, stub, BaselineCacheIRStubKind::Regular, lhs, obj);
return true;
}
@ -5699,28 +5511,7 @@ DoTypeOfFallback(JSContext* cx, BaselineFrame* frame, ICTypeOf_Fallback* stub, H
{
FallbackICSpew(cx, stub, "TypeOf");
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
RootedScript script(cx, frame->script());
jsbytecode* pc = stub->icEntry()->pc(script);
TypeOfIRGenerator gen(cx, script, pc, stub->state().mode(), val);
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached TypeOf CacheIR stub");
}
}
if (!attached) {
stub->state().trackNotAttached();
}
}
TryAttachStub<TypeOfIRGenerator>("TypeOf", cx, frame, stub, BaselineCacheIRStubKind::Regular, val);
JSType type = js::TypeOfValue(val);
RootedString string(cx, TypeName(type, cx->names()));
@ -6061,24 +5852,7 @@ DoUnaryArithFallback(JSContext* cx, BaselineFrame* frame, ICUnaryArith_Fallback*
stub->setSawDoubleResult();
}
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
UnaryArithIRGenerator gen(cx, script, pc, stub->state().mode(),
op, val, res);
if (gen.tryAttachStub()) {
bool attached = false;
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached UnaryArith CacheIR stub for %s", CodeName[op]);
}
}
}
TryAttachStub<UnaryArithIRGenerator>("UniaryArith", cx, frame, stub, BaselineCacheIRStubKind::Regular, op, val, res);
return true;
}
@ -6212,35 +5986,7 @@ DoBinaryArithFallback(JSContext* cx, BaselineFrame* frame, ICBinaryArith_Fallbac
stub->setSawDoubleResult();
}
// Check if debug mode toggling made the stub invalid.
if (stub.invalid()) {
return true;
}
if (ret.isDouble()) {
stub->setSawDoubleResult();
}
if (stub->state().maybeTransition()) {
stub->discardStubs(cx);
}
if (stub->state().canAttachStub()) {
BinaryArithIRGenerator gen(cx, script, pc, stub->state().mode(),
op, lhs, rhs, ret);
if (gen.tryAttachStub()) {
bool attached = false;
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached BinaryArith CacheIR stub for %s", CodeName[op]);
}
} else {
stub->noteUnoptimizableOperands();
}
}
TryAttachStub<BinaryArithIRGenerator>("BinaryArith", cx, frame, stub, BaselineCacheIRStubKind::Regular, op, lhs, rhs, ret);
return true;
}
@ -6352,28 +6098,7 @@ DoCompareFallback(JSContext* cx, BaselineFrame* frame, ICCompare_Fallback* stub_
return true;
}
// Check to see if a new stub should be generated.
if (stub->numOptimizedStubs() >= ICCompare_Fallback::MAX_OPTIMIZED_STUBS) {
// TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
// But for now we just bail.
return true;
}
if (stub->state().canAttachStub()) {
CompareIRGenerator gen(cx, script, pc, stub->state().mode(), op, lhs, rhs);
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
}
return true;
}
}
stub->noteUnoptimizableAccess();
TryAttachStub<CompareIRGenerator>("Compare", cx, frame, stub, BaselineCacheIRStubKind::Regular, op, lhs, rhs);
return true;
}
@ -6486,18 +6211,8 @@ DoNewObject(JSContext* cx, BaselineFrame* frame, ICNewObject_Fallback* stub, Mut
return false;
}
if (!JitOptions.disableCacheIR) {
bool attached = false;
NewObjectIRGenerator gen(cx, script, pc, stub->state().mode(), JSOp(*pc), templateObject);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " NewObject Attached CacheIR stub");
}
}
}
TryAttachStub<NewObjectIRGenerator>("NewObject", cx, frame, stub, BaselineCacheIRStubKind::Regular, JSOp(*pc), templateObject);
stub->setTemplateObject(templateObject);
}
}

View File

@ -1797,7 +1797,6 @@ class ICGetElem_Fallback : public ICMonitoredFallbackStub
{ }
static const uint16_t EXTRA_NEGATIVE_INDEX = 0x1;
static const uint16_t EXTRA_UNOPTIMIZABLE_ACCESS = 0x2;
public:
void noteNegativeIndex() {
@ -1806,12 +1805,6 @@ class ICGetElem_Fallback : public ICMonitoredFallbackStub
bool hasNegativeIndex() const {
return extra_ & EXTRA_NEGATIVE_INDEX;
}
void noteUnoptimizableAccess() {
extra_ |= EXTRA_UNOPTIMIZABLE_ACCESS;
}
bool hadUnoptimizableAccess() const {
return extra_ & EXTRA_UNOPTIMIZABLE_ACCESS;
}
// Compiler for this stub kind.
class Compiler : public ICStubCompiler {
@ -1938,15 +1931,6 @@ class ICGetName_Fallback : public ICMonitoredFallbackStub
{ }
public:
static const size_t UNOPTIMIZABLE_ACCESS_BIT = 0;
void noteUnoptimizableAccess() {
extra_ |= (1u << UNOPTIMIZABLE_ACCESS_BIT);
}
bool hadUnoptimizableAccess() const {
return extra_ & (1u << UNOPTIMIZABLE_ACCESS_BIT);
}
class Compiler : public ICStubCompiler {
protected:
MOZ_MUST_USE bool generateStubCode(MacroAssembler& masm) override;
@ -2027,16 +2011,8 @@ class ICGetProp_Fallback : public ICMonitoredFallbackStub
{ }
public:
static const size_t UNOPTIMIZABLE_ACCESS_BIT = 0;
static const size_t ACCESSED_GETTER_BIT = 1;
void noteUnoptimizableAccess() {
extra_ |= (1u << UNOPTIMIZABLE_ACCESS_BIT);
}
bool hadUnoptimizableAccess() const {
return extra_ & (1u << UNOPTIMIZABLE_ACCESS_BIT);
}
void noteAccessedGetter() {
extra_ |= (1u << ACCESSED_GETTER_BIT);
}
@ -2084,14 +2060,6 @@ class ICSetProp_Fallback : public ICFallbackStub
{ }
public:
static const size_t UNOPTIMIZABLE_ACCESS_BIT = 0;
void noteUnoptimizableAccess() {
extra_ |= (1u << UNOPTIMIZABLE_ACCESS_BIT);
}
bool hadUnoptimizableAccess() const {
return extra_ & (1u << UNOPTIMIZABLE_ACCESS_BIT);
}
class Compiler : public ICStubCompiler {
protected:
CodeOffset bailoutReturnOffset_;
@ -2149,8 +2117,6 @@ class ICCall_Fallback : public ICMonitoredFallbackStub
{
friend class ICStubSpace;
public:
static const unsigned UNOPTIMIZABLE_CALL_FLAG = 0x1;
static const uint32_t MAX_OPTIMIZED_STUBS = 16;
private:
@ -2159,13 +2125,6 @@ class ICCall_Fallback : public ICMonitoredFallbackStub
{}
public:
void noteUnoptimizableCall() {
extra_ |= UNOPTIMIZABLE_CALL_FLAG;
}
bool hadUnoptimizableCall() const {
return extra_ & UNOPTIMIZABLE_CALL_FLAG;
}
bool scriptedStubsAreGeneralized() const {
return hasStub(Call_AnyScripted);
}
@ -2856,17 +2815,7 @@ class ICInstanceOf_Fallback : public ICFallbackStub
: ICFallbackStub(ICStub::InstanceOf_Fallback, stubCode)
{ }
static const uint16_t UNOPTIMIZABLE_ACCESS_BIT = 0x1;
public:
void noteUnoptimizableAccess() {
extra_ |= UNOPTIMIZABLE_ACCESS_BIT;
}
bool hadUnoptimizableAccess() const {
return extra_ & UNOPTIMIZABLE_ACCESS_BIT;
}
class Compiler : public ICStubCompiler {
protected:
MOZ_MUST_USE bool generateStubCode(MacroAssembler& masm) override;
@ -3071,16 +3020,6 @@ class ICCompare_Fallback : public ICFallbackStub
: ICFallbackStub(ICStub::Compare_Fallback, stubCode) {}
public:
static const uint32_t MAX_OPTIMIZED_STUBS = 8;
static const size_t UNOPTIMIZABLE_ACCESS_BIT = 0;
void noteUnoptimizableAccess() {
extra_ |= (1u << UNOPTIMIZABLE_ACCESS_BIT);
}
bool hadUnoptimizableAccess() const {
return extra_ & (1u << UNOPTIMIZABLE_ACCESS_BIT);
}
// Compiler for this stub kind.
class Compiler : public ICStubCompiler {
protected:
@ -3113,7 +3052,6 @@ class ICBinaryArith_Fallback : public ICFallbackStub
}
static const uint16_t SAW_DOUBLE_RESULT_BIT = 0x1;
static const uint16_t UNOPTIMIZABLE_OPERANDS_BIT = 0x2;
public:
static const uint32_t MAX_OPTIMIZED_STUBS = 8;
@ -3124,12 +3062,6 @@ class ICBinaryArith_Fallback : public ICFallbackStub
void setSawDoubleResult() {
extra_ |= SAW_DOUBLE_RESULT_BIT;
}
bool hadUnoptimizableOperands() const {
return extra_ & UNOPTIMIZABLE_OPERANDS_BIT;
}
void noteUnoptimizableOperands() {
extra_ |= UNOPTIMIZABLE_OPERANDS_BIT;
}
// Compiler for this stub kind.
class Compiler : public ICStubCompiler {

View File

@ -284,14 +284,8 @@ BaselineInspector::maybeInfoForPropertyOp(jsbytecode* pc, ReceiverVector& receiv
stub = stub->next();
}
if (stub->isGetProp_Fallback()) {
if (stub->toGetProp_Fallback()->hadUnoptimizableAccess()) {
receivers.clear();
}
} else {
if (stub->toSetProp_Fallback()->hadUnoptimizableAccess()) {
receivers.clear();
}
if (stub->toFallbackStub()->state().hasFailures()) {
receivers.clear();
}
// Don't inline if there are more than 5 receivers.
@ -619,7 +613,7 @@ BaselineInspector::expectedCompareType(jsbytecode* pc)
if (ICStub* fallback = second ? second->next() : first->next()) {
MOZ_ASSERT(fallback->isFallback());
if (fallback->toCompare_Fallback()->hadUnoptimizableAccess()) {
if (fallback->toFallbackStub()->state().hasFailures()) {
return MCompare::Compare_Unknown;
}
}
@ -697,9 +691,8 @@ BaselineInspector::expectedBinaryArithSpecialization(jsbytecode* pc)
ICStub* stubs[2];
const ICEntry& entry = icEntryFromPC(pc);
ICStub* stub = entry.fallbackStub();
if (stub->isBinaryArith_Fallback() &&
stub->toBinaryArith_Fallback()->hadUnoptimizableOperands())
ICFallbackStub* stub = entry.fallbackStub();
if (stub->state().hasFailures())
{
return MIRType::None;
}
@ -847,7 +840,7 @@ BaselineInspector::getSingleCallee(jsbytecode* pc)
const ICEntry& entry = icEntryFromPC(pc);
ICStub* stub = entry.firstStub();
if (entry.fallbackStub()->toCall_Fallback()->hadUnoptimizableCall()) {
if (entry.fallbackStub()->state().hasFailures()) {
return nullptr;
}
@ -1249,13 +1242,9 @@ BaselineInspector::commonGetPropFunction(jsbytecode* pc, bool innerized,
{
return false;
}
} else if (stub->isGetProp_Fallback()) {
} else if (stub->isFallback()) {
// If we have an unoptimizable access, don't try to optimize.
if (stub->toGetProp_Fallback()->hadUnoptimizableAccess()) {
return false;
}
} else if (stub->isGetName_Fallback()) {
if (stub->toGetName_Fallback()->hadUnoptimizableAccess()) {
if (stub->toFallbackStub()->state().hasFailures()) {
return false;
}
} else {
@ -1334,20 +1323,11 @@ BaselineInspector::megamorphicGetterSetterFunction(jsbytecode* pc, bool isGetter
*getterOrSetter = setter;
continue;
}
if (stub->isGetProp_Fallback()) {
if (stub->toGetProp_Fallback()->hadUnoptimizableAccess()) {
if (stub->isFallback()) {
if (stub->toFallbackStub()->state().hasFailures()) {
return false;
}
if (stub->toGetProp_Fallback()->state().mode() != ICState::Mode::Megamorphic) {
return false;
}
continue;
}
if (stub->isSetProp_Fallback()) {
if (stub->toSetProp_Fallback()->hadUnoptimizableAccess()) {
return false;
}
if (stub->toSetProp_Fallback()->state().mode() != ICState::Mode::Megamorphic) {
if (stub->toFallbackStub()->state().mode() != ICState::Mode::Megamorphic) {
return false;
}
continue;
@ -1487,8 +1467,8 @@ BaselineInspector::commonSetPropFunction(jsbytecode* pc, JSObject** holder, Shap
{
return false;
}
} else if (!stub->isSetProp_Fallback() ||
stub->toSetProp_Fallback()->hadUnoptimizableAccess())
} else if (!stub->isFallback() ||
stub->toFallbackStub()->state().hasFailures())
{
// We have an unoptimizable access, so don't try to optimize.
return false;
@ -1597,7 +1577,7 @@ BaselineInspector::maybeInfoForProtoReadSlot(jsbytecode* pc, ReceiverVector& rec
stub = stub->next();
}
if (stub->toGetProp_Fallback()->hadUnoptimizableAccess()) {
if (stub->toFallbackStub()->state().hasFailures()) {
receivers.clear();
}
@ -1641,28 +1621,18 @@ BaselineInspector::expectedPropertyAccessInputType(jsbytecode* pc)
MIRType type = MIRType::None;
for (ICStub* stub = entry.firstStub(); stub; stub = stub->next()) {
MIRType stubType;
switch (stub->kind()) {
case ICStub::GetProp_Fallback:
if (stub->toGetProp_Fallback()->hadUnoptimizableAccess()) {
return MIRType::Value;
}
continue;
case ICStub::GetElem_Fallback:
if (stub->toGetElem_Fallback()->hadUnoptimizableAccess()) {
return MIRType::Value;
}
continue;
case ICStub::CacheIR_Monitored:
MIRType stubType = MIRType::None;
if (stub->isCacheIR_Monitored()) {
stubType = GetCacheIRExpectedInputType(stub->toCacheIR_Monitored());
if (stubType == MIRType::Value) {
return MIRType::Value;
}
break;
default:
} else if (stub->isGetElem_Fallback() || stub->isGetProp_Fallback()) {
// If we have an unoptimizable access, don't try to optimize.
if (stub->toFallbackStub()->state().hasFailures()) {
return MIRType::Value;
}
} else {
MOZ_CRASH("Unexpected stub");
}
@ -1694,7 +1664,7 @@ BaselineInspector::instanceOfData(jsbytecode* pc, Shape** shape, uint32_t* slot,
if (!firstStub->next() ||
!firstStub->isCacheIR_Regular() ||
!firstStub->next()->isInstanceOf_Fallback() ||
firstStub->next()->toInstanceOf_Fallback()->hadUnoptimizableAccess())
firstStub->next()->toInstanceOf_Fallback()->state().hasFailures())
{
return false;
}

View File

@ -2882,7 +2882,7 @@ BindNameIRGenerator::trackAttached(const char* name)
}
HasPropIRGenerator::HasPropIRGenerator(JSContext* cx, HandleScript script, jsbytecode* pc,
CacheKind cacheKind, ICState::Mode mode,
ICState::Mode mode, CacheKind cacheKind,
HandleValue idVal, HandleValue val)
: IRGenerator(cx, script, pc, cacheKind, mode),
val_(val),

View File

@ -1824,8 +1824,8 @@ class MOZ_RAII HasPropIRGenerator : public IRGenerator
public:
// NOTE: Argument order is PROPERTY, OBJECT
HasPropIRGenerator(JSContext* cx, HandleScript script, jsbytecode* pc, CacheKind cacheKind,
ICState::Mode mode, HandleValue idVal, HandleValue val);
HasPropIRGenerator(JSContext* cx, HandleScript script, jsbytecode* pc, ICState::Mode mode,
CacheKind cacheKind, HandleValue idVal, HandleValue val);
bool tryAttachStub();
};

View File

@ -64,6 +64,7 @@ class ICState
Mode mode() const { return mode_; }
size_t numOptimizedStubs() const { return numOptimizedStubs_; }
bool hasFailures() const { return (numFailures_ != 0); }
MOZ_ALWAYS_INLINE bool canAttachStub() const {
// Note: we cannot assert that numOptimizedStubs_ <= MaxOptimizedStubs
@ -110,7 +111,10 @@ class ICState
// methods, because they are only used by CacheIR ICs.
MOZ_ASSERT(numOptimizedStubs_ < 16);
numOptimizedStubs_++;
numFailures_ = 0;
// As a heuristic, reduce the failure count after each successful attach
// to delay hitting Generic mode. Reset to 1 instead of 0 so that
// BaselineInspector can distinguish no-failures from rare-failures.
numFailures_ = std::min(numFailures_, static_cast<uint8_t>(1));
}
void trackNotAttached() {
// Note: we can't assert numFailures_ < maxFailures() because

View File

@ -480,7 +480,7 @@ IonHasOwnIC::update(JSContext* cx, HandleScript outerScript, IonHasOwnIC* ic,
if (ic->state().canAttachStub()) {
bool attached = false;
RootedScript script(cx, ic->script());
HasPropIRGenerator gen(cx, script, pc, CacheKind::HasOwn, ic->state().mode(), idVal, val);
HasPropIRGenerator gen(cx, script, pc, ic->state().mode(), CacheKind::HasOwn, idVal, val);
if (gen.tryAttachStub()) {
ic->attachCacheIRStub(cx, gen.writerRef(), gen.cacheKind(), ionScript, &attached);
}
@ -514,7 +514,7 @@ IonInIC::update(JSContext* cx, HandleScript outerScript, IonInIC* ic,
RootedScript script(cx, ic->script());
RootedValue objV(cx, ObjectValue(*obj));
jsbytecode* pc = ic->pc();
HasPropIRGenerator gen(cx, script, pc, CacheKind::In, ic->state().mode(), key, objV);
HasPropIRGenerator gen(cx, script, pc, ic->state().mode(), CacheKind::In, key, objV);
if (gen.tryAttachStub()) {
ic->attachCacheIRStub(cx, gen.writerRef(), gen.cacheKind(), ionScript, &attached);
}