mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-12-16 08:08:01 +00:00
[RS4GC] "Constant fold" the rs4gc-split-vector-values flag
This flag was part of a migration to a new means of handling vectors-of-points which was described in the llvm-dev thread "FYI: Relocating vector of pointers". The old code path has been off by default for a while without complaints, so time to cleanup. llvm-svn: 261569
This commit is contained in:
parent
369e0e389f
commit
f10b87b138
@ -75,13 +75,6 @@ static cl::opt<bool>
|
||||
AllowStatepointWithNoDeoptInfo("rs4gc-allow-statepoint-with-no-deopt-info",
|
||||
cl::Hidden, cl::init(true));
|
||||
|
||||
/// Should we split vectors of pointers into their individual elements? This
|
||||
/// is known to be buggy, but the alternate implementation isn't yet ready.
|
||||
/// This is purely to provide a debugging and dianostic hook until the vector
|
||||
/// split is replaced with vector relocations.
|
||||
static cl::opt<bool> UseVectorSplit("rs4gc-split-vector-values", cl::Hidden,
|
||||
cl::init(false));
|
||||
|
||||
namespace {
|
||||
struct RewriteStatepointsForGC : public ModulePass {
|
||||
static char ID; // Pass identification, replacement for typeid
|
||||
@ -1819,139 +1812,6 @@ static void findLiveReferences(
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove any vector of pointers from the live set by scalarizing them over the
|
||||
/// statepoint instruction. Adds the scalarized pieces to the live set. It
|
||||
/// would be preferable to include the vector in the statepoint itself, but
|
||||
/// the lowering code currently does not handle that. Extending it would be
|
||||
/// slightly non-trivial since it requires a format change. Given how rare
|
||||
/// such cases are (for the moment?) scalarizing is an acceptable compromise.
|
||||
static void splitVectorValues(Instruction *StatepointInst,
|
||||
StatepointLiveSetTy &LiveSet,
|
||||
DenseMap<Value *, Value *>& PointerToBase,
|
||||
DominatorTree &DT) {
|
||||
SmallVector<Value *, 16> ToSplit;
|
||||
for (Value *V : LiveSet)
|
||||
if (isa<VectorType>(V->getType()))
|
||||
ToSplit.push_back(V);
|
||||
|
||||
if (ToSplit.empty())
|
||||
return;
|
||||
|
||||
DenseMap<Value *, SmallVector<Value *, 16>> ElementMapping;
|
||||
|
||||
Function &F = *(StatepointInst->getParent()->getParent());
|
||||
|
||||
DenseMap<Value *, AllocaInst *> AllocaMap;
|
||||
// First is normal return, second is exceptional return (invoke only)
|
||||
DenseMap<Value *, std::pair<Value *, Value *>> Replacements;
|
||||
for (Value *V : ToSplit) {
|
||||
AllocaInst *Alloca =
|
||||
new AllocaInst(V->getType(), "", F.getEntryBlock().getFirstNonPHI());
|
||||
AllocaMap[V] = Alloca;
|
||||
|
||||
VectorType *VT = cast<VectorType>(V->getType());
|
||||
IRBuilder<> Builder(StatepointInst);
|
||||
SmallVector<Value *, 16> Elements;
|
||||
for (unsigned i = 0; i < VT->getNumElements(); i++)
|
||||
Elements.push_back(Builder.CreateExtractElement(V, Builder.getInt32(i)));
|
||||
ElementMapping[V] = Elements;
|
||||
|
||||
auto InsertVectorReform = [&](Instruction *IP) {
|
||||
Builder.SetInsertPoint(IP);
|
||||
Builder.SetCurrentDebugLocation(IP->getDebugLoc());
|
||||
Value *ResultVec = UndefValue::get(VT);
|
||||
for (unsigned i = 0; i < VT->getNumElements(); i++)
|
||||
ResultVec = Builder.CreateInsertElement(ResultVec, Elements[i],
|
||||
Builder.getInt32(i));
|
||||
return ResultVec;
|
||||
};
|
||||
|
||||
if (isa<CallInst>(StatepointInst)) {
|
||||
BasicBlock::iterator Next(StatepointInst);
|
||||
Next++;
|
||||
Instruction *IP = &*(Next);
|
||||
Replacements[V].first = InsertVectorReform(IP);
|
||||
Replacements[V].second = nullptr;
|
||||
} else {
|
||||
InvokeInst *Invoke = cast<InvokeInst>(StatepointInst);
|
||||
// We've already normalized - check that we don't have shared destination
|
||||
// blocks
|
||||
BasicBlock *NormalDest = Invoke->getNormalDest();
|
||||
assert(!isa<PHINode>(NormalDest->begin()));
|
||||
BasicBlock *UnwindDest = Invoke->getUnwindDest();
|
||||
assert(!isa<PHINode>(UnwindDest->begin()));
|
||||
// Insert insert element sequences in both successors
|
||||
Instruction *IP = &*(NormalDest->getFirstInsertionPt());
|
||||
Replacements[V].first = InsertVectorReform(IP);
|
||||
IP = &*(UnwindDest->getFirstInsertionPt());
|
||||
Replacements[V].second = InsertVectorReform(IP);
|
||||
}
|
||||
}
|
||||
|
||||
for (Value *V : ToSplit) {
|
||||
AllocaInst *Alloca = AllocaMap[V];
|
||||
|
||||
// Capture all users before we start mutating use lists
|
||||
SmallVector<Instruction *, 16> Users;
|
||||
for (User *U : V->users())
|
||||
Users.push_back(cast<Instruction>(U));
|
||||
|
||||
for (Instruction *I : Users) {
|
||||
if (auto Phi = dyn_cast<PHINode>(I)) {
|
||||
for (unsigned i = 0; i < Phi->getNumIncomingValues(); i++)
|
||||
if (V == Phi->getIncomingValue(i)) {
|
||||
LoadInst *Load = new LoadInst(
|
||||
Alloca, "", Phi->getIncomingBlock(i)->getTerminator());
|
||||
Phi->setIncomingValue(i, Load);
|
||||
}
|
||||
} else {
|
||||
LoadInst *Load = new LoadInst(Alloca, "", I);
|
||||
I->replaceUsesOfWith(V, Load);
|
||||
}
|
||||
}
|
||||
|
||||
// Store the original value and the replacement value into the alloca
|
||||
StoreInst *Store = new StoreInst(V, Alloca);
|
||||
if (auto I = dyn_cast<Instruction>(V))
|
||||
Store->insertAfter(I);
|
||||
else
|
||||
Store->insertAfter(Alloca);
|
||||
|
||||
// Normal return for invoke, or call return
|
||||
Instruction *Replacement = cast<Instruction>(Replacements[V].first);
|
||||
(new StoreInst(Replacement, Alloca))->insertAfter(Replacement);
|
||||
// Unwind return for invoke only
|
||||
Replacement = cast_or_null<Instruction>(Replacements[V].second);
|
||||
if (Replacement)
|
||||
(new StoreInst(Replacement, Alloca))->insertAfter(Replacement);
|
||||
}
|
||||
|
||||
// apply mem2reg to promote alloca to SSA
|
||||
SmallVector<AllocaInst *, 16> Allocas;
|
||||
for (Value *V : ToSplit)
|
||||
Allocas.push_back(AllocaMap[V]);
|
||||
PromoteMemToReg(Allocas, DT);
|
||||
|
||||
// Update our tracking of live pointers and base mappings to account for the
|
||||
// changes we just made.
|
||||
for (Value *V : ToSplit) {
|
||||
auto &Elements = ElementMapping[V];
|
||||
|
||||
LiveSet.erase(V);
|
||||
LiveSet.insert(Elements.begin(), Elements.end());
|
||||
// We need to update the base mapping as well.
|
||||
assert(PointerToBase.count(V));
|
||||
Value *OldBase = PointerToBase[V];
|
||||
auto &BaseElements = ElementMapping[OldBase];
|
||||
PointerToBase.erase(V);
|
||||
assert(Elements.size() == BaseElements.size());
|
||||
for (unsigned i = 0; i < Elements.size(); i++) {
|
||||
Value *Elem = Elements[i];
|
||||
PointerToBase[Elem] = BaseElements[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function for the "rematerializeLiveValues". It walks use chain
|
||||
// starting from the "CurrentValue" until it meets "BaseValue". Only "simple"
|
||||
// values are visited (currently it is GEP's and casts). Returns true if it
|
||||
@ -2268,22 +2128,6 @@ static bool insertParsePoints(Function &F, DominatorTree &DT,
|
||||
|
||||
Holders.clear();
|
||||
|
||||
// Do a limited scalarization of any live at safepoint vector values which
|
||||
// contain pointers. This enables this pass to run after vectorization at
|
||||
// the cost of some possible performance loss. Note: This is known to not
|
||||
// handle updating of the side tables correctly which can lead to relocation
|
||||
// bugs when the same vector is live at multiple statepoints. We're in the
|
||||
// process of implementing the alternate lowering - relocating the
|
||||
// vector-of-pointers as first class item and updating the backend to
|
||||
// understand that - but that's not yet complete.
|
||||
if (UseVectorSplit)
|
||||
for (size_t i = 0; i < Records.size(); i++) {
|
||||
PartiallyConstructedSafepointRecord &Info = Records[i];
|
||||
Instruction *Statepoint = ToUpdate[i].getInstruction();
|
||||
splitVectorValues(cast<Instruction>(Statepoint), Info.LiveSet,
|
||||
Info.PointerToBase, DT);
|
||||
}
|
||||
|
||||
// In order to reduce live set of statepoint we might choose to rematerialize
|
||||
// some values instead of relocating them. This is purely an optimization and
|
||||
// does not influence correctness.
|
||||
|
@ -1,6 +1,6 @@
|
||||
; Test that we can correctly handle vectors of pointers in statepoint
|
||||
; rewriting.
|
||||
; RUN: opt < %s -rewrite-statepoints-for-gc -rs4gc-split-vector-values=0 -S | FileCheck %s
|
||||
; RUN: opt < %s -rewrite-statepoints-for-gc -S | FileCheck %s
|
||||
|
||||
; A non-vector relocation for comparison
|
||||
define i64 addrspace(1)* @test(i64 addrspace(1)* %obj) gc "statepoint-example" {
|
||||
|
@ -1,167 +0,0 @@
|
||||
; Test that we can correctly handle vectors of pointers in statepoint
|
||||
; rewriting. Currently, we scalarize, but that's an implementation detail.
|
||||
; RUN: opt < %s -rewrite-statepoints-for-gc -rs4gc-split-vector-values -S | FileCheck %s
|
||||
|
||||
; A non-vector relocation for comparison
|
||||
|
||||
define i64 addrspace(1)* @test(i64 addrspace(1)* %obj) gc "statepoint-example" {
|
||||
; CHECK-LABEL: test
|
||||
; CHECK: gc.statepoint
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: ret i64 addrspace(1)* %obj.relocated.casted
|
||||
; A base vector from a argument
|
||||
entry:
|
||||
call void @do_safepoint() [ "deopt"() ]
|
||||
ret i64 addrspace(1)* %obj
|
||||
}
|
||||
|
||||
define <2 x i64 addrspace(1)*> @test2(<2 x i64 addrspace(1)*> %obj) gc "statepoint-example" {
|
||||
; CHECK-LABEL: test2
|
||||
; CHECK: extractelement
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: gc.statepoint
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: ret <2 x i64 addrspace(1)*> %7
|
||||
; A base vector from a load
|
||||
entry:
|
||||
call void @do_safepoint() [ "deopt"() ]
|
||||
ret <2 x i64 addrspace(1)*> %obj
|
||||
}
|
||||
|
||||
define <2 x i64 addrspace(1)*> @test3(<2 x i64 addrspace(1)*>* %ptr) gc "statepoint-example" {
|
||||
; CHECK-LABEL: test3
|
||||
; CHECK: load
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: gc.statepoint
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: ret <2 x i64 addrspace(1)*> %7
|
||||
; When a statepoint is an invoke rather than a call
|
||||
entry:
|
||||
%obj = load <2 x i64 addrspace(1)*>, <2 x i64 addrspace(1)*>* %ptr
|
||||
call void @do_safepoint() [ "deopt"() ]
|
||||
ret <2 x i64 addrspace(1)*> %obj
|
||||
}
|
||||
|
||||
declare i32 @fake_personality_function()
|
||||
|
||||
define <2 x i64 addrspace(1)*> @test4(<2 x i64 addrspace(1)*>* %ptr) gc "statepoint-example" personality i32 ()* @fake_personality_function {
|
||||
; CHECK-LABEL: test4
|
||||
; CHECK: load
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: gc.statepoint
|
||||
entry:
|
||||
%obj = load <2 x i64 addrspace(1)*>, <2 x i64 addrspace(1)*>* %ptr
|
||||
invoke void @do_safepoint() [ "deopt"() ]
|
||||
to label %normal_return unwind label %exceptional_return
|
||||
|
||||
normal_return: ; preds = %entry
|
||||
; CHECK-LABEL: normal_return:
|
||||
; CHECK: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: ret <2 x i64 addrspace(1)*> %7
|
||||
ret <2 x i64 addrspace(1)*> %obj
|
||||
|
||||
exceptional_return: ; preds = %entry
|
||||
; CHECK-LABEL: exceptional_return:
|
||||
; CHECK: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: ret <2 x i64 addrspace(1)*> %13
|
||||
; Can we handle an insert element with a constant offset? This effectively
|
||||
; tests both the equal and inequal case since we have to relocate both indices
|
||||
; in the vector.
|
||||
%landing_pad4 = landingpad token
|
||||
cleanup
|
||||
ret <2 x i64 addrspace(1)*> %obj
|
||||
}
|
||||
|
||||
define <2 x i64 addrspace(1)*> @test5(i64 addrspace(1)* %p) gc "statepoint-example" {
|
||||
; CHECK-LABEL: test5
|
||||
; CHECK: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: gc.statepoint
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: ret <2 x i64 addrspace(1)*>
|
||||
; A base vector from a load
|
||||
entry:
|
||||
%vec = insertelement <2 x i64 addrspace(1)*> undef, i64 addrspace(1)* %p, i32 0
|
||||
call void @do_safepoint() [ "deopt"() ]
|
||||
ret <2 x i64 addrspace(1)*> %vec
|
||||
}
|
||||
|
||||
define <2 x i64 addrspace(1)*> @test6(i1 %cnd, <2 x i64 addrspace(1)*>* %ptr) gc "statepoint-example" {
|
||||
; CHECK-LABEL: test6
|
||||
entry:
|
||||
br i1 %cnd, label %taken, label %untaken
|
||||
|
||||
taken: ; preds = %entry
|
||||
%obja = load <2 x i64 addrspace(1)*>, <2 x i64 addrspace(1)*>* %ptr
|
||||
br label %merge
|
||||
|
||||
untaken: ; preds = %entry
|
||||
%objb = load <2 x i64 addrspace(1)*>, <2 x i64 addrspace(1)*>* %ptr
|
||||
br label %merge
|
||||
|
||||
merge: ; preds = %untaken, %taken
|
||||
; CHECK-LABEL: merge:
|
||||
; CHECK-NEXT: = phi
|
||||
; CHECK-NEXT: = phi
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: extractelement
|
||||
; CHECK-NEXT: gc.statepoint
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: gc.relocate
|
||||
; CHECK-NEXT: bitcast
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: insertelement
|
||||
; CHECK-NEXT: ret <2 x i64 addrspace(1)*>
|
||||
%obj = phi <2 x i64 addrspace(1)*> [ %obja, %taken ], [ %objb, %untaken ]
|
||||
call void @do_safepoint() [ "deopt"() ]
|
||||
ret <2 x i64 addrspace(1)*> %obj
|
||||
}
|
||||
|
||||
declare void @do_safepoint()
|
Loading…
Reference in New Issue
Block a user