mirror of
https://github.com/RPCSX/llvm.git
synced 2025-02-01 01:54:21 +00:00
move some functions, add a comment.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@82444 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
1ce0829e83
commit
ca74940fee
@ -1022,9 +1022,173 @@ static Value *CoerceAvailableValueToLoadType(Value *StoredVal,
|
||||
return new BitCastInst(StoredVal, LoadedTy, "bitcast", InsertPt);
|
||||
}
|
||||
|
||||
/// GetBaseWithConstantOffset - Analyze the specified pointer to see if it can
|
||||
/// be expressed as a base pointer plus a constant offset. Return the base and
|
||||
/// offset to the caller.
|
||||
static Value *GetBaseWithConstantOffset(Value *Ptr, int64_t &Offset,
|
||||
const TargetData *TD) {
|
||||
Operator *PtrOp = dyn_cast<Operator>(Ptr);
|
||||
if (PtrOp == 0) return Ptr;
|
||||
|
||||
// Just look through bitcasts.
|
||||
if (PtrOp->getOpcode() == Instruction::BitCast)
|
||||
return GetBaseWithConstantOffset(PtrOp->getOperand(0), Offset, TD);
|
||||
|
||||
// If this is a GEP with constant indices, we can look through it.
|
||||
GEPOperator *GEP = dyn_cast<GEPOperator>(PtrOp);
|
||||
if (GEP == 0 || !GEP->hasAllConstantIndices()) return Ptr;
|
||||
|
||||
gep_type_iterator GTI = gep_type_begin(GEP);
|
||||
for (User::op_iterator I = GEP->idx_begin(), E = GEP->idx_end(); I != E;
|
||||
++I, ++GTI) {
|
||||
ConstantInt *OpC = cast<ConstantInt>(*I);
|
||||
if (OpC->isZero()) continue;
|
||||
|
||||
// Handle a struct and array indices which add their offset to the pointer.
|
||||
if (const StructType *STy = dyn_cast<StructType>(*GTI)) {
|
||||
Offset += TD->getStructLayout(STy)->getElementOffset(OpC->getZExtValue());
|
||||
} else {
|
||||
uint64_t Size = TD->getTypeAllocSize(GTI.getIndexedType());
|
||||
Offset += OpC->getSExtValue()*Size;
|
||||
}
|
||||
}
|
||||
|
||||
// Re-sign extend from the pointer size if needed to get overflow edge cases
|
||||
// right.
|
||||
unsigned PtrSize = TD->getPointerSizeInBits();
|
||||
if (PtrSize < 64)
|
||||
Offset = (Offset << (64-PtrSize)) >> (64-PtrSize);
|
||||
|
||||
return GetBaseWithConstantOffset(GEP->getPointerOperand(), Offset, TD);
|
||||
}
|
||||
|
||||
|
||||
/// AnalyzeLoadFromClobberingStore - This function is called when we have a
|
||||
/// memdep query of a load that ends up being a clobbering store. This means
|
||||
/// that the store *may* provide bits used by the load but we can't be sure
|
||||
/// because the pointers don't mustalias. Check this case to see if there is
|
||||
/// anything more we can do before we give up. This returns -1 if we have to
|
||||
/// give up, or a byte number in the stored value of the piece that feeds the
|
||||
/// load.
|
||||
static int AnalyzeLoadFromClobberingStore(LoadInst *L, StoreInst *DepSI,
|
||||
const TargetData *TD) {
|
||||
int64_t StoreOffset = 0, LoadOffset = 0;
|
||||
Value *StoreBase =
|
||||
GetBaseWithConstantOffset(DepSI->getPointerOperand(), StoreOffset, TD);
|
||||
Value *LoadBase =
|
||||
GetBaseWithConstantOffset(L->getPointerOperand(), LoadOffset, TD);
|
||||
if (StoreBase != LoadBase)
|
||||
return -1;
|
||||
|
||||
// If the load and store are to the exact same address, they should have been
|
||||
// a must alias. AA must have gotten confused.
|
||||
// FIXME: Study to see if/when this happens.
|
||||
if (LoadOffset == StoreOffset) {
|
||||
#if 0
|
||||
errs() << "STORE/LOAD DEP WITH COMMON POINTER MISSED:\n"
|
||||
<< "Base = " << *StoreBase << "\n"
|
||||
<< "Store Ptr = " << *DepSI->getPointerOperand() << "\n"
|
||||
<< "Store Offs = " << StoreOffset << " - " << *DepSI << "\n"
|
||||
<< "Load Ptr = " << *L->getPointerOperand() << "\n"
|
||||
<< "Load Offs = " << LoadOffset << " - " << *L << "\n\n";
|
||||
errs() << "'" << L->getParent()->getParent()->getName() << "'"
|
||||
<< *L->getParent();
|
||||
#endif
|
||||
return -1;
|
||||
}
|
||||
|
||||
// If the load and store don't overlap at all, the store doesn't provide
|
||||
// anything to the load. In this case, they really don't alias at all, AA
|
||||
// must have gotten confused.
|
||||
// FIXME: Investigate cases where this bails out, e.g. rdar://7238614. Then
|
||||
// remove this check, as it is duplicated with what we have below.
|
||||
uint64_t StoreSize = TD->getTypeSizeInBits(DepSI->getOperand(0)->getType());
|
||||
uint64_t LoadSize = TD->getTypeSizeInBits(L->getType());
|
||||
|
||||
if ((StoreSize & 7) | (LoadSize & 7))
|
||||
return -1;
|
||||
StoreSize >>= 3; // Convert to bytes.
|
||||
LoadSize >>= 3;
|
||||
|
||||
|
||||
bool isAAFailure = false;
|
||||
if (StoreOffset < LoadOffset) {
|
||||
isAAFailure = StoreOffset+int64_t(StoreSize) <= LoadOffset;
|
||||
} else {
|
||||
isAAFailure = LoadOffset+int64_t(LoadSize) <= StoreOffset;
|
||||
}
|
||||
if (isAAFailure) {
|
||||
#if 0
|
||||
errs() << "STORE LOAD DEP WITH COMMON BASE:\n"
|
||||
<< "Base = " << *StoreBase << "\n"
|
||||
<< "Store Ptr = " << *DepSI->getPointerOperand() << "\n"
|
||||
<< "Store Offs = " << StoreOffset << " - " << *DepSI << "\n"
|
||||
<< "Load Ptr = " << *L->getPointerOperand() << "\n"
|
||||
<< "Load Offs = " << LoadOffset << " - " << *L << "\n\n";
|
||||
errs() << "'" << L->getParent()->getParent()->getName() << "'"
|
||||
<< *L->getParent();
|
||||
#endif
|
||||
return -1;
|
||||
}
|
||||
|
||||
// If the Load isn't completely contained within the stored bits, we don't
|
||||
// have all the bits to feed it. We could do something crazy in the future
|
||||
// (issue a smaller load then merge the bits in) but this seems unlikely to be
|
||||
// valuable.
|
||||
if (StoreOffset > LoadOffset ||
|
||||
StoreOffset+StoreSize < LoadOffset+LoadSize)
|
||||
return -1;
|
||||
|
||||
// Okay, we can do this transformation. Return the number of bytes into the
|
||||
// store that the load is.
|
||||
return LoadOffset-StoreOffset;
|
||||
}
|
||||
|
||||
|
||||
/// GetStoreValueForLoad - This function is called when we have a
|
||||
/// memdep query of a load that ends up being a clobbering store. This means
|
||||
/// that the store *may* provide bits used by the load but we can't be sure
|
||||
/// because the pointers don't mustalias. Check this case to see if there is
|
||||
/// anything more we can do before we give up.
|
||||
static Value *GetStoreValueForLoad(Value *SrcVal, int Offset,const Type *LoadTy,
|
||||
Instruction *InsertPt, const TargetData *TD){
|
||||
LLVMContext &Ctx = SrcVal->getType()->getContext();
|
||||
|
||||
uint64_t StoreSize = TD->getTypeSizeInBits(SrcVal->getType())/8;
|
||||
uint64_t LoadSize = TD->getTypeSizeInBits(LoadTy)/8;
|
||||
|
||||
|
||||
// Compute which bits of the stored value are being used by the load. Convert
|
||||
// to an integer type to start with.
|
||||
if (isa<PointerType>(SrcVal->getType()))
|
||||
SrcVal = new PtrToIntInst(SrcVal, TD->getIntPtrType(Ctx), "tmp", InsertPt);
|
||||
if (!isa<IntegerType>(SrcVal->getType()))
|
||||
SrcVal = new BitCastInst(SrcVal, IntegerType::get(Ctx, StoreSize*8),
|
||||
"tmp", InsertPt);
|
||||
|
||||
// Shift the bits to the least significant depending on endianness.
|
||||
unsigned ShiftAmt;
|
||||
if (TD->isLittleEndian()) {
|
||||
ShiftAmt = Offset*8;
|
||||
} else {
|
||||
ShiftAmt = StoreSize-LoadSize-Offset;
|
||||
}
|
||||
|
||||
SrcVal = BinaryOperator::CreateLShr(SrcVal,
|
||||
ConstantInt::get(SrcVal->getType(), ShiftAmt), "tmp", InsertPt);
|
||||
|
||||
SrcVal = new TruncInst(SrcVal, IntegerType::get(Ctx, LoadSize*8),
|
||||
"tmp", InsertPt);
|
||||
|
||||
return CoerceAvailableValueToLoadType(SrcVal, LoadTy, InsertPt, *TD);
|
||||
}
|
||||
|
||||
/// GetAvailableBlockValues - Given the ValuesPerBlock list, convert all of the
|
||||
/// available values to values of the expected LoadTy in their blocks and insert
|
||||
/// the new values into BlockReplValues.
|
||||
static void
|
||||
GetAvailableBlockValues(DenseMap<BasicBlock*, Value*> &BlockReplValues,
|
||||
SmallVector<std::pair<BasicBlock*,
|
||||
const SmallVector<std::pair<BasicBlock*,
|
||||
Value*>, 16> &ValuesPerBlock,
|
||||
const Type *LoadTy,
|
||||
const TargetData *TD) {
|
||||
@ -1343,169 +1507,6 @@ bool GVN::processNonLocalLoad(LoadInst *LI,
|
||||
return true;
|
||||
}
|
||||
|
||||
/// GetBaseWithConstantOffset - Analyze the specified pointer to see if it can
|
||||
/// be expressed as a base pointer plus a constant offset. Return the base and
|
||||
/// offset to the caller.
|
||||
static Value *GetBaseWithConstantOffset(Value *Ptr, int64_t &Offset,
|
||||
const TargetData *TD) {
|
||||
Operator *PtrOp = dyn_cast<Operator>(Ptr);
|
||||
if (PtrOp == 0) return Ptr;
|
||||
|
||||
// Just look through bitcasts.
|
||||
if (PtrOp->getOpcode() == Instruction::BitCast)
|
||||
return GetBaseWithConstantOffset(PtrOp->getOperand(0), Offset, TD);
|
||||
|
||||
// If this is a GEP with constant indices, we can look through it.
|
||||
GEPOperator *GEP = dyn_cast<GEPOperator>(PtrOp);
|
||||
if (GEP == 0 || !GEP->hasAllConstantIndices()) return Ptr;
|
||||
|
||||
gep_type_iterator GTI = gep_type_begin(GEP);
|
||||
for (User::op_iterator I = GEP->idx_begin(), E = GEP->idx_end(); I != E;
|
||||
++I, ++GTI) {
|
||||
ConstantInt *OpC = cast<ConstantInt>(*I);
|
||||
if (OpC->isZero()) continue;
|
||||
|
||||
// Handle a struct and array indices which add their offset to the pointer.
|
||||
if (const StructType *STy = dyn_cast<StructType>(*GTI)) {
|
||||
Offset += TD->getStructLayout(STy)->getElementOffset(OpC->getZExtValue());
|
||||
} else {
|
||||
uint64_t Size = TD->getTypeAllocSize(GTI.getIndexedType());
|
||||
Offset += OpC->getSExtValue()*Size;
|
||||
}
|
||||
}
|
||||
|
||||
// Re-sign extend from the pointer size if needed to get overflow edge cases
|
||||
// right.
|
||||
unsigned PtrSize = TD->getPointerSizeInBits();
|
||||
if (PtrSize < 64)
|
||||
Offset = (Offset << (64-PtrSize)) >> (64-PtrSize);
|
||||
|
||||
return GetBaseWithConstantOffset(GEP->getPointerOperand(), Offset, TD);
|
||||
}
|
||||
|
||||
|
||||
/// AnalyzeLoadFromClobberingStore - This function is called when we have a
|
||||
/// memdep query of a load that ends up being a clobbering store. This means
|
||||
/// that the store *may* provide bits used by the load but we can't be sure
|
||||
/// because the pointers don't mustalias. Check this case to see if there is
|
||||
/// anything more we can do before we give up. This returns -1 if we have to
|
||||
/// give up, or a byte number in the stored value of the piece that feeds the
|
||||
/// load.
|
||||
static int AnalyzeLoadFromClobberingStore(LoadInst *L, StoreInst *DepSI,
|
||||
const TargetData *TD) {
|
||||
int64_t StoreOffset = 0, LoadOffset = 0;
|
||||
Value *StoreBase =
|
||||
GetBaseWithConstantOffset(DepSI->getPointerOperand(), StoreOffset, TD);
|
||||
Value *LoadBase =
|
||||
GetBaseWithConstantOffset(L->getPointerOperand(), LoadOffset, TD);
|
||||
if (StoreBase != LoadBase)
|
||||
return -1;
|
||||
|
||||
// If the load and store are to the exact same address, they should have been
|
||||
// a must alias. AA must have gotten confused.
|
||||
// FIXME: Study to see if/when this happens.
|
||||
if (LoadOffset == StoreOffset) {
|
||||
#if 0
|
||||
errs() << "STORE/LOAD DEP WITH COMMON POINTER MISSED:\n"
|
||||
<< "Base = " << *StoreBase << "\n"
|
||||
<< "Store Ptr = " << *DepSI->getPointerOperand() << "\n"
|
||||
<< "Store Offs = " << StoreOffset << " - " << *DepSI << "\n"
|
||||
<< "Load Ptr = " << *L->getPointerOperand() << "\n"
|
||||
<< "Load Offs = " << LoadOffset << " - " << *L << "\n\n";
|
||||
errs() << "'" << L->getParent()->getParent()->getName() << "'"
|
||||
<< *L->getParent();
|
||||
#endif
|
||||
return -1;
|
||||
}
|
||||
|
||||
// If the load and store don't overlap at all, the store doesn't provide
|
||||
// anything to the load. In this case, they really don't alias at all, AA
|
||||
// must have gotten confused.
|
||||
// FIXME: Investigate cases where this bails out, e.g. rdar://7238614. Then
|
||||
// remove this check, as it is duplicated with what we have below.
|
||||
uint64_t StoreSize = TD->getTypeSizeInBits(DepSI->getOperand(0)->getType());
|
||||
uint64_t LoadSize = TD->getTypeSizeInBits(L->getType());
|
||||
|
||||
if ((StoreSize & 7) | (LoadSize & 7))
|
||||
return -1;
|
||||
StoreSize >>= 3; // Convert to bytes.
|
||||
LoadSize >>= 3;
|
||||
|
||||
|
||||
bool isAAFailure = false;
|
||||
if (StoreOffset < LoadOffset) {
|
||||
isAAFailure = StoreOffset+int64_t(StoreSize) <= LoadOffset;
|
||||
} else {
|
||||
isAAFailure = LoadOffset+int64_t(LoadSize) <= StoreOffset;
|
||||
}
|
||||
if (isAAFailure) {
|
||||
#if 0
|
||||
errs() << "STORE LOAD DEP WITH COMMON BASE:\n"
|
||||
<< "Base = " << *StoreBase << "\n"
|
||||
<< "Store Ptr = " << *DepSI->getPointerOperand() << "\n"
|
||||
<< "Store Offs = " << StoreOffset << " - " << *DepSI << "\n"
|
||||
<< "Load Ptr = " << *L->getPointerOperand() << "\n"
|
||||
<< "Load Offs = " << LoadOffset << " - " << *L << "\n\n";
|
||||
errs() << "'" << L->getParent()->getParent()->getName() << "'"
|
||||
<< *L->getParent();
|
||||
#endif
|
||||
return -1;
|
||||
}
|
||||
|
||||
// If the Load isn't completely contained within the stored bits, we don't
|
||||
// have all the bits to feed it. We could do something crazy in the future
|
||||
// (issue a smaller load then merge the bits in) but this seems unlikely to be
|
||||
// valuable.
|
||||
if (StoreOffset > LoadOffset ||
|
||||
StoreOffset+StoreSize < LoadOffset+LoadSize)
|
||||
return -1;
|
||||
|
||||
// Okay, we can do this transformation. Return the number of bytes into the
|
||||
// store that the load is.
|
||||
return LoadOffset-StoreOffset;
|
||||
}
|
||||
|
||||
|
||||
/// GetStoreValueForLoad - This function is called when we have a
|
||||
/// memdep query of a load that ends up being a clobbering store. This means
|
||||
/// that the store *may* provide bits used by the load but we can't be sure
|
||||
/// because the pointers don't mustalias. Check this case to see if there is
|
||||
/// anything more we can do before we give up.
|
||||
static Value *GetStoreValueForLoad(Value *SrcVal, int Offset,const Type *LoadTy,
|
||||
Instruction *InsertPt, const TargetData *TD){
|
||||
LLVMContext &Ctx = SrcVal->getType()->getContext();
|
||||
|
||||
uint64_t StoreSize = TD->getTypeSizeInBits(SrcVal->getType())/8;
|
||||
uint64_t LoadSize = TD->getTypeSizeInBits(LoadTy)/8;
|
||||
|
||||
|
||||
// Compute which bits of the stored value are being used by the load. Convert
|
||||
// to an integer type to start with.
|
||||
if (isa<PointerType>(SrcVal->getType()))
|
||||
SrcVal = new PtrToIntInst(SrcVal, TD->getIntPtrType(Ctx), "tmp", InsertPt);
|
||||
if (!isa<IntegerType>(SrcVal->getType()))
|
||||
SrcVal = new BitCastInst(SrcVal, IntegerType::get(Ctx, StoreSize*8),
|
||||
"tmp", InsertPt);
|
||||
|
||||
// Shift the bits to the least significant depending on endianness.
|
||||
unsigned ShiftAmt;
|
||||
if (TD->isLittleEndian()) {
|
||||
ShiftAmt = Offset*8;
|
||||
} else {
|
||||
ShiftAmt = StoreSize-LoadSize-Offset;
|
||||
}
|
||||
|
||||
SrcVal = BinaryOperator::CreateLShr(SrcVal,
|
||||
ConstantInt::get(SrcVal->getType(), ShiftAmt), "tmp", InsertPt);
|
||||
|
||||
SrcVal = new TruncInst(SrcVal, IntegerType::get(Ctx, LoadSize*8),
|
||||
"tmp", InsertPt);
|
||||
|
||||
return CoerceAvailableValueToLoadType(SrcVal, LoadTy, InsertPt, *TD);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/// processLoad - Attempt to eliminate a load, first by eliminating it
|
||||
/// locally, and then attempting non-local elimination if that fails.
|
||||
bool GVN::processLoad(LoadInst *L, SmallVectorImpl<Instruction*> &toErase) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user