[ValueTracking] Don't do an unchecked shift in ComputeNumSignBits

Summary:
Previously we used to return a bogus result, 0, for IR like `ashr %val,
-1`.

I've also added an assert checking that `ComputeNumSignBits` at least
returns 1.  That assert found an already checked in test case where we
were returning a bad result for `ashr %val, -1`.

Fixes PR32045.

Reviewers: spatel, majnemer

Reviewed By: spatel, majnemer

Subscribers: efriedma, mcrosier, llvm-commits

Differential Revision: https://reviews.llvm.org/D30311

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@296273 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Sanjoy Das 2017-02-25 20:30:45 +00:00
parent 95d021ba68
commit 9b2526d03d
3 changed files with 79 additions and 2 deletions

View File

@ -2113,13 +2113,29 @@ static unsigned computeNumSignBitsVectorConstant(const Value *V,
return MinSignBits;
}
static unsigned ComputeNumSignBitsImpl(const Value *V, unsigned Depth,
const Query &Q);
static unsigned ComputeNumSignBits(const Value *V, unsigned Depth,
const Query &Q) {
unsigned Result = ComputeNumSignBitsImpl(V, Depth, Q);
assert(Result > 0 && "At least one sign bit needs to be present!");
return Result;
}
/// Return the number of times the sign bit of the register is replicated into
/// the other bits. We know that at least 1 bit is always equal to the sign bit
/// (itself), but other cases can give us information. For example, immediately
/// after an "ashr X, 2", we know that the top 3 bits are all equal to each
/// other, so we return 3. For vectors, return the number of sign bits for the
/// vector element with the mininum number of known sign bits.
unsigned ComputeNumSignBits(const Value *V, unsigned Depth, const Query &Q) {
static unsigned ComputeNumSignBitsImpl(const Value *V, unsigned Depth,
const Query &Q) {
// We return the minimum number of sign bits that are guaranteed to be present
// in V, so for undef we have to conservatively return 1. We don't have the
// same behavior for poison though -- that's a FIXME today.
unsigned TyBits = Q.DL.getTypeSizeInBits(V->getType()->getScalarType());
unsigned Tmp, Tmp2;
unsigned FirstAnswer = 1;
@ -2195,7 +2211,10 @@ unsigned ComputeNumSignBits(const Value *V, unsigned Depth, const Query &Q) {
// ashr X, C -> adds C sign bits. Vectors too.
const APInt *ShAmt;
if (match(U->getOperand(1), m_APInt(ShAmt))) {
Tmp += ShAmt->getZExtValue();
unsigned ShAmtLimited = ShAmt->getZExtValue();
if (ShAmtLimited >= TyBits)
break; // Bad shift.
Tmp += ShAmtLimited;
if (Tmp > TyBits) Tmp = TyBits;
}
return Tmp;

View File

@ -0,0 +1,39 @@
; RUN: opt -S -indvars < %s | FileCheck %s
; This is not an IndVarSimplify bug, but the original symptom
; manifested as one.
define i32 @foo(i32 %a, i32 %b, i32 %c, i32* %sink) {
; CHECK-LABEL: @foo(
; CHECK: for.end:
; CHECK-NEXT: [[SHR:%.*]] = ashr i32 %neg3, -1
; CHECK-NEXT: [[SUB:%.*]] = sub nsw i32 0, [[SHR]]
; CHECK-NEXT: [[SHR1:%.*]] = ashr i32 [[SUB]], [[B:%.*]]
; CHECK-NEXT: [[NEG:%.*]] = xor i32 [[SHR1]], -1
; CHECK-NEXT: store i32 [[NEG]], i32* %sink
;
entry:
%tobool2 = icmp eq i32 %a, 0
br i1 %tobool2, label %exit, label %preheader
preheader:
%neg3 = phi i32 [ %c, %entry ], [ %neg, %for.end ]
br label %for
for:
%p = phi i32 [ %dec, %for ], [ 1, %preheader ]
%cmp = icmp sgt i32 %p, -1
%dec = add nsw i32 %p, -1
br i1 %cmp, label %for, label %for.end
for.end:
%shr = ashr i32 %neg3, %p
%sub = sub nsw i32 0, %shr
%shr1 = ashr i32 %sub, %b
%neg = xor i32 %shr1, -1
store i32 %neg, i32* %sink
br i1 false, label %exit, label %preheader
exit:
ret i32 0
}

View File

@ -239,3 +239,22 @@ TEST(ValueTracking, GuaranteedToTransferExecutionToSuccessor) {
Index++;
}
}
TEST(ValueTracking, ComputeNumSignBits_PR32045) {
StringRef Assembly = "define i32 @f(i32 %a) { "
" %val = ashr i32 %a, -1 "
" ret i32 %val "
"} ";
LLVMContext Context;
SMDiagnostic Error;
auto M = parseAssemblyString(Assembly, Error, Context);
assert(M && "Bad assembly?");
auto *F = M->getFunction("f");
assert(F && "Bad assembly?");
auto *RVal =
cast<ReturnInst>(F->getEntryBlock().getTerminator())->getOperand(0);
EXPECT_EQ(ComputeNumSignBits(RVal, M->getDataLayout()), 1);
}