InstCombine: Teach llvm.objectsize folding to look through GEPs.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@122958 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Benjamin Kramer 2011-01-06 13:07:49 +00:00
parent 5191e7cc26
commit 783a5c2b69
2 changed files with 51 additions and 50 deletions

View File

@ -250,82 +250,73 @@ Instruction *InstCombiner::visitCallInst(CallInst &CI) {
if (!TD) break;
const Type *ReturnTy = CI.getType();
bool Min = (cast<ConstantInt>(II->getArgOperand(1))->getZExtValue() == 1);
uint64_t DontKnow = II->getArgOperand(1) == Builder->getTrue() ? 0 : -1ULL;
// Get to the real allocated thing and offset as fast as possible.
Value *Op1 = II->getArgOperand(0)->stripPointerCasts();
uint64_t Offset = 0;
uint64_t Size = -1ULL;
// Try to look through constant GEPs.
if (GEPOperator *GEP = dyn_cast<GEPOperator>(Op1)) {
if (!GEP->hasAllConstantIndices()) break;
// Get the current byte offset into the thing. Use the original
// operand in case we're looking through a bitcast.
SmallVector<Value*, 8> Ops(GEP->idx_begin(), GEP->idx_end());
Offset = TD->getIndexedOffset(GEP->getPointerOperandType(),
Ops.data(), Ops.size());
Op1 = GEP->getPointerOperand()->stripPointerCasts();
// Make sure we're not a constant offset from an external
// global.
if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Op1))
if (!GV->hasDefinitiveInitializer()) break;
}
// If we've stripped down to a single global variable that we
// can know the size of then just return that.
if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Op1)) {
if (GV->hasDefinitiveInitializer()) {
Constant *C = GV->getInitializer();
uint64_t GlobalSize = TD->getTypeAllocSize(C->getType());
return ReplaceInstUsesWith(CI, ConstantInt::get(ReturnTy, GlobalSize));
Size = TD->getTypeAllocSize(C->getType());
} else {
// Can't determine size of the GV.
Constant *RetVal = ConstantInt::get(ReturnTy, Min ? 0 : -1ULL);
Constant *RetVal = ConstantInt::get(ReturnTy, DontKnow);
return ReplaceInstUsesWith(CI, RetVal);
}
} else if (AllocaInst *AI = dyn_cast<AllocaInst>(Op1)) {
// Get alloca size.
if (AI->getAllocatedType()->isSized()) {
uint64_t AllocaSize = TD->getTypeAllocSize(AI->getAllocatedType());
Size = TD->getTypeAllocSize(AI->getAllocatedType());
if (AI->isArrayAllocation()) {
const ConstantInt *C = dyn_cast<ConstantInt>(AI->getArraySize());
if (!C) break;
AllocaSize *= C->getZExtValue();
Size *= C->getZExtValue();
}
return ReplaceInstUsesWith(CI, ConstantInt::get(ReturnTy, AllocaSize));
}
} else if (CallInst *MI = extractMallocCall(Op1)) {
const Type* MallocType = getMallocAllocatedType(MI);
// Get alloca size.
if (MallocType && MallocType->isSized()) {
if (Value *NElems = getMallocArraySize(MI, TD, true)) {
const Type* MallocType = getMallocAllocatedType(MI);
if (MallocType && MallocType->isSized())
if (Value *NElems = getMallocArraySize(MI, TD, true))
if (ConstantInt *NElements = dyn_cast<ConstantInt>(NElems))
return ReplaceInstUsesWith(CI, ConstantInt::get(ReturnTy,
(NElements->getZExtValue() * TD->getTypeAllocSize(MallocType))));
}
}
} else if (ConstantExpr *CE = dyn_cast<ConstantExpr>(Op1)) {
// Only handle constant GEPs here.
if (CE->getOpcode() != Instruction::GetElementPtr) break;
GEPOperator *GEP = cast<GEPOperator>(CE);
// Make sure we're not a constant offset from an external
// global.
Value *Operand = GEP->getPointerOperand();
Operand = Operand->stripPointerCasts();
if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Operand))
if (!GV->hasDefinitiveInitializer()) break;
// Get what we're pointing to and its size.
const PointerType *BaseType =
cast<PointerType>(Operand->getType());
uint64_t Size = TD->getTypeAllocSize(BaseType->getElementType());
// Get the current byte offset into the thing. Use the original
// operand in case we're looking through a bitcast.
SmallVector<Value*, 8> Ops(CE->op_begin()+1, CE->op_end());
const PointerType *OffsetType =
cast<PointerType>(GEP->getPointerOperand()->getType());
uint64_t Offset = TD->getIndexedOffset(OffsetType, &Ops[0], Ops.size());
if (Size < Offset) {
// Out of bound reference? Negative index normalized to large
// index? Just return "I don't know".
Constant *RetVal = ConstantInt::get(ReturnTy, Min ? 0 : -1ULL);
return ReplaceInstUsesWith(CI, RetVal);
}
Constant *RetVal = ConstantInt::get(ReturnTy, Size-Offset);
return ReplaceInstUsesWith(CI, RetVal);
}
Size = NElements->getZExtValue() * TD->getTypeAllocSize(MallocType);
}
// Do not return "I don't know" here. Later optimization passes could
// make it possible to evaluate objectsize to a constant.
break;
if (Size == -1ULL)
break;
if (Size < Offset) {
// Out of bound reference? Negative index normalized to large
// index? Just return "I don't know".
return ReplaceInstUsesWith(CI, ConstantInt::get(ReturnTy, DontKnow));
}
return ReplaceInstUsesWith(CI, ConstantInt::get(ReturnTy, Size-Offset));
}
case Intrinsic::bswap:
// bswap(bswap(x)) -> x

View File

@ -150,3 +150,13 @@ declare i8* @__memset_chk(i8*, i32, i32, i32) nounwind
declare noalias i8* @malloc(i32) nounwind
declare i32 @llvm.objectsize.i32(i8*, i1) nounwind readonly
define i32 @test7() {
; CHECK: @test7
%alloc = call noalias i8* @malloc(i32 48) nounwind
%gep = getelementptr inbounds i8* %alloc, i32 16
%objsize = call i32 @llvm.objectsize.i32(i8* %gep, i1 false) nounwind readonly
; CHECK-NEXT: ret i32 32
ret i32 %objsize
}