[InstrProf] Use atomic profile counter updates for TSan

Thread sanitizer instrumentation fails to skip all loads and stores to
profile counters. This can happen if profile counter updates are merged:

  %.sink = phi i64* ...
  %pgocount5 = load i64, i64* %.sink
  %27 = add i64 %pgocount5, 1
  %28 = bitcast i64* %.sink to i8*
  call void @__tsan_write8(i8* %28)
  store i64 %27, i64* %.sink

To suppress TSan diagnostics about racy counter updates, make the
counter updates atomic when TSan is enabled. If there's general interest
in this mode it can be surfaced as a clang/swift driver option.

Testing: check-{llvm,clang,profile}

rdar://40477803

Differential Revision: https://reviews.llvm.org/D50867

llvm-svn: 339955
This commit is contained in:
Vedant Kumar 2018-08-16 22:24:47 +00:00
parent ee4811b176
commit 82e946227d
3 changed files with 33 additions and 6 deletions

View File

@ -111,6 +111,9 @@ struct InstrProfOptions {
// Do counter register promotion
bool DoCounterPromotion = false;
// Use atomic profile counter increments.
bool Atomic = false;
// Name of the profile file to use as output
std::string InstrProfileOutput;

View File

@ -96,6 +96,11 @@ cl::opt<double> NumCountersPerValueSite(
// is usually smaller than 2.
cl::init(1.0));
cl::opt<bool> AtomicCounterUpdateAll(
"instrprof-atomic-counter-update-all", cl::ZeroOrMore,
cl::desc("Make all profile counter updates atomic (for testing only)"),
cl::init(false));
cl::opt<bool> AtomicCounterUpdatePromoted(
"atomic-counter-update-promoted", cl::ZeroOrMore,
cl::desc("Do counter update using atomic fetch add "
@ -597,12 +602,17 @@ void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) {
IRBuilder<> Builder(Inc);
uint64_t Index = Inc->getIndex()->getZExtValue();
Value *Addr = Builder.CreateConstInBoundsGEP2_64(Counters, 0, Index);
Value *Load = Builder.CreateLoad(Addr, "pgocount");
auto *Count = Builder.CreateAdd(Load, Inc->getStep());
auto *Store = Builder.CreateStore(Count, Addr);
Inc->replaceAllUsesWith(Store);
if (isCounterPromotionEnabled())
PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
if (Options.Atomic || AtomicCounterUpdateAll) {
Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(),
AtomicOrdering::Monotonic);
} else {
Value *Load = Builder.CreateLoad(Addr, "pgocount");
auto *Count = Builder.CreateAdd(Load, Inc->getStep());
auto *Store = Builder.CreateStore(Count, Addr);
if (isCounterPromotionEnabled())
PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
}
Inc->eraseFromParent();
}

View File

@ -0,0 +1,14 @@
; RUN: opt < %s -S -instrprof -instrprof-atomic-counter-update-all | FileCheck %s
target triple = "x86_64-apple-macosx10.10.0"
@__profn_foo = hidden constant [3 x i8] c"foo"
; CHECK-LABEL: define void @foo
; CHECK-NEXT: atomicrmw add i64* getelementptr inbounds ([1 x i64], [1 x i64]* @__profc_foo, i64 0, i64 0), i64 1 monotonic
define void @foo() {
call void @llvm.instrprof.increment(i8* getelementptr inbounds ([3 x i8], [3 x i8]* @__profn_foo, i32 0, i32 0), i64 0, i32 1, i32 0)
ret void
}
declare void @llvm.instrprof.increment(i8*, i64, i32, i32)