Bug 1059364 - Don't emit ObjectGroupDispatch fallback path if we know it's never used. r=bhackett

This commit is contained in:
Jan de Mooij 2015-03-05 15:47:13 +01:00
parent fc6c5d2e40
commit 6a87cf6fe0
5 changed files with 91 additions and 25 deletions

View File

@ -754,12 +754,10 @@ CodeGenerator::visitObjectGroupDispatch(LObjectGroupDispatch *lir)
Register input = ToRegister(lir->input());
Register temp = ToRegister(lir->temp());
// Hold the incoming ObjectGroup.
// Load the incoming ObjectGroup in temp.
masm.loadPtr(Address(input, JSObject::offsetOfGroup()), temp);
// Compare ObjectGroups.
MacroAssembler::BranchGCPtr lastBranch;
LBlock *lastBlock = nullptr;
InlinePropertyTable *propTable = mir->propTable();
@ -784,7 +782,22 @@ CodeGenerator::visitObjectGroupDispatch(LObjectGroupDispatch *lir)
MOZ_ASSERT(found);
}
// Unknown function: jump to fallback block.
// Jump to fallback block if we have an unknown ObjectGroup. If there's no
// fallback block, we should have handled all cases.
if (!mir->hasFallback()) {
MOZ_ASSERT(lastBranch.isInitialized());
#ifdef DEBUG
Label ok;
lastBranch.relink(&ok);
lastBranch.emit(masm);
masm.assumeUnreachable("Unexpected ObjectGroup");
masm.bind(&ok);
#endif
if (!isNextBlock(lastBlock))
masm.jump(lastBlock->label());
return;
}
LBlock *fallback = skipTrivialBlocks(mir->getFallback())->lir();
if (!lastBranch.isInitialized()) {
@ -2676,9 +2689,7 @@ CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
Register temp = ToTempRegisterOrInvalid(lir->temp());
if (lir->object()->isConstant()) {
#ifdef DEBUG
MOZ_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
#endif
} else {
masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->object()), temp,
ool->rejoin());

View File

@ -5354,12 +5354,6 @@ IonBuilder::inlineCalls(CallInfo &callInfo, const ObjectVector &targets, BoolVec
}
retPhi->reserveLength(count);
// During inlining the 'this' value is assigned a type set which is
// specialized to the groups which can generate that inlining target.
// After inlining the original type set is restored.
TemporaryTypeSet *cacheObjectTypeSet =
maybeCache ? maybeCache->object()->resultTypeSet() : nullptr;
// Inline each of the inlineable targets.
for (uint32_t i = 0; i < targets.length(); i++) {
// Target must be inlineable.
@ -5407,12 +5401,16 @@ IonBuilder::inlineCalls(CallInfo &callInfo, const ObjectVector &targets, BoolVec
inlineInfo.setFun(funcDef);
if (maybeCache) {
// Assign the 'this' value a TypeSet specialized to the groups that
// can generate this inlining target.
MOZ_ASSERT(callInfo.thisArg() == maybeCache->object());
TemporaryTypeSet *targetThisTypes =
maybeCache->propTable()->buildTypeSetForFunction(target);
if (!targetThisTypes)
TemporaryTypeSet *thisTypes = maybeCache->propTable()->buildTypeSetForFunction(target);
if (!thisTypes)
return false;
maybeCache->object()->setResultTypeSet(targetThisTypes);
MFilterTypeSet *filter = MFilterTypeSet::New(alloc(), inlineInfo.thisArg(), thisTypes);
inlineBlock->add(filter);
inlineInfo.setThis(filter);
}
// Inline the call into the inlineBlock.
@ -5447,27 +5445,68 @@ IonBuilder::inlineCalls(CallInfo &callInfo, const ObjectVector &targets, BoolVec
}
// Patch the InlinePropertyTable to not dispatch to vetoed paths.
bool useFallback;
if (maybeCache) {
maybeCache->object()->setResultTypeSet(cacheObjectTypeSet);
InlinePropertyTable *propTable = maybeCache->propTable();
propTable->trimTo(targets, choiceSet);
// If all paths were vetoed, output only a generic fallback path.
if (propTable->numEntries() == 0) {
// If all paths were vetoed, output only a generic fallback path.
MOZ_ASSERT(dispatch->numCases() == 0);
maybeCache = nullptr;
useFallback = true;
} else {
// We need a fallback path if the ObjectGroup dispatch does not
// handle all incoming objects.
useFallback = false;
TemporaryTypeSet *objectTypes = maybeCache->object()->resultTypeSet();
for (uint32_t i = 0; i < objectTypes->getObjectCount(); i++) {
TypeSet::ObjectKey *obj = objectTypes->getObject(i);
if (!obj)
continue;
if (!obj->isGroup()) {
useFallback = true;
break;
}
if (!propTable->hasObjectGroup(obj->group())) {
useFallback = true;
break;
}
}
if (!useFallback) {
// The object group dispatch handles all possible incoming
// objects, so the cache and barrier will not be reached and
// can be eliminated.
if (callInfo.fun()->isGetPropertyCache()) {
MOZ_ASSERT(callInfo.fun() == maybeCache);
} else {
MTypeBarrier *barrier = callInfo.fun()->toTypeBarrier();
MOZ_ASSERT(!barrier->hasUses());
MOZ_ASSERT(barrier->type() == MIRType_Object);
MOZ_ASSERT(barrier->input()->isGetPropertyCache());
MOZ_ASSERT(barrier->input()->toGetPropertyCache() == maybeCache);
barrier->block()->discard(barrier);
}
MOZ_ASSERT(!maybeCache->hasUses());
maybeCache->block()->discard(maybeCache);
}
}
} else {
useFallback = dispatch->numCases() < targets.length();
}
// If necessary, generate a fallback path.
// MObjectGroupDispatch always uses a fallback path.
if (maybeCache || dispatch->numCases() < targets.length()) {
if (useFallback) {
// Generate fallback blocks, and set |current| to the fallback return block.
if (maybeCache) {
MBasicBlock *fallbackTarget;
if (!inlineObjectGroupFallback(callInfo, dispatchBlock, (MObjectGroupDispatch *)dispatch,
maybeCache, &fallbackTarget))
if (!inlineObjectGroupFallback(callInfo, dispatchBlock,
dispatch->toObjectGroupDispatch(),
maybeCache, &fallbackTarget))
{
return false;
}

View File

@ -2154,7 +2154,7 @@ class LFunctionDispatch : public LInstructionHelper<0, 1, 0>
setOperand(0, in);
}
MFunctionDispatch *mir() {
MFunctionDispatch *mir() const {
return mir_->toFunctionDispatch();
}
};
@ -2167,6 +2167,10 @@ class LObjectGroupDispatch : public LInstructionHelper<0, 1, 1>
public:
LIR_HEADER(ObjectGroupDispatch);
const char *extraName() const {
return mir()->hasFallback() ? "HasFallback" : "NoFallback";
}
LObjectGroupDispatch(const LAllocation &in, const LDefinition &temp) {
setOperand(0, in);
setTemp(0, temp);
@ -2176,7 +2180,7 @@ class LObjectGroupDispatch : public LInstructionHelper<0, 1, 1>
return getTemp(0);
}
MObjectGroupDispatch *mir() {
MObjectGroupDispatch *mir() const {
return mir_->toObjectGroupDispatch();
}
};

View File

@ -4189,6 +4189,16 @@ InlinePropertyTable::hasFunction(JSFunction *func) const
return false;
}
bool
InlinePropertyTable::hasObjectGroup(ObjectGroup *group) const
{
for (size_t i = 0; i < numEntries(); i++) {
if (entries_[i]->group == group)
return true;
}
return false;
}
TemporaryTypeSet *
InlinePropertyTable::buildTypeSetForFunction(JSFunction *func) const
{

View File

@ -9526,6 +9526,8 @@ class InlinePropertyTable : public TempObject
}
bool hasFunction(JSFunction *func) const;
bool hasObjectGroup(ObjectGroup *group) const;
TemporaryTypeSet *buildTypeSetForFunction(JSFunction *func) const;
// Remove targets that vetoed inlining from the InlinePropertyTable.