mirror of
https://github.com/capstone-engine/llvm-capstone.git
synced 2024-12-27 18:28:14 +00:00
tsan: support __ATOMIC_HLE_ACQUIRE/RELEASE flags
HLE flags can be combined with memory order in atomic operations. Currently tsan runtime crashes on e.g. IsStoreOrder(mo) in atomic store if any of these additional flags are specified. Filter these flags out. See the comment as to why it is safe. llvm-svn: 298378
This commit is contained in:
parent
00ece756c3
commit
de033e6cdb
@ -450,10 +450,27 @@ static void AtomicFence(ThreadState *thr, uptr pc, morder mo) {
|
||||
|
||||
// C/C++
|
||||
|
||||
static morder covert_morder(morder mo) {
|
||||
if (flags()->force_seq_cst_atomics)
|
||||
return (morder)mo_seq_cst;
|
||||
|
||||
// Filter out additional memory order flags:
|
||||
// MEMMODEL_SYNC = 1 << 15
|
||||
// __ATOMIC_HLE_ACQUIRE = 1 << 16
|
||||
// __ATOMIC_HLE_RELEASE = 1 << 17
|
||||
//
|
||||
// HLE is an optimization, and we pretend that elision always fails.
|
||||
// MEMMODEL_SYNC is used when lowering __sync_ atomics,
|
||||
// since we use __sync_ atomics for actual atomic operations,
|
||||
// we can safely ignore it as well. It also subtly affects semantics,
|
||||
// but we don't model the difference.
|
||||
return (morder)(mo & 0x7fff);
|
||||
}
|
||||
|
||||
#define SCOPED_ATOMIC(func, ...) \
|
||||
const uptr callpc = (uptr)__builtin_return_address(0); \
|
||||
uptr pc = StackTrace::GetCurrentPc(); \
|
||||
mo = flags()->force_seq_cst_atomics ? (morder)mo_seq_cst : mo; \
|
||||
mo = covert_morder(mo); \
|
||||
ThreadState *const thr = cur_thread(); \
|
||||
if (thr->ignore_interceptors) \
|
||||
return NoTsanAtomic##func(__VA_ARGS__); \
|
||||
|
25
compiler-rt/test/tsan/atomic_hle.cc
Normal file
25
compiler-rt/test/tsan/atomic_hle.cc
Normal file
@ -0,0 +1,25 @@
|
||||
// RUN: %clangxx_tsan -O1 %s -o %t && %run %t 2>&1 | FileCheck %s
|
||||
#include "test.h"
|
||||
#include <sanitizer/tsan_interface_atomic.h>
|
||||
|
||||
#ifndef __ATOMIC_HLE_ACQUIRE
|
||||
#define __ATOMIC_HLE_ACQUIRE (1 << 16)
|
||||
#endif
|
||||
#ifndef __ATOMIC_HLE_RELEASE
|
||||
#define __ATOMIC_HLE_RELEASE (1 << 17)
|
||||
#endif
|
||||
|
||||
int main() {
|
||||
volatile int x = 0;
|
||||
//__atomic_fetch_add(&x, 1, __ATOMIC_ACQUIRE | __ATOMIC_HLE_ACQUIRE);
|
||||
//__atomic_store_n(&x, 0, __ATOMIC_RELEASE | __ATOMIC_HLE_RELEASE);
|
||||
__tsan_atomic32_fetch_add(&x, 1,
|
||||
(__tsan_memory_order)(__ATOMIC_ACQUIRE | __ATOMIC_HLE_ACQUIRE));
|
||||
__tsan_atomic32_store(&x, 0,
|
||||
(__tsan_memory_order)(__ATOMIC_RELEASE | __ATOMIC_HLE_RELEASE));
|
||||
fprintf(stderr, "DONE\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
// CHECK: DONE
|
||||
|
Loading…
Reference in New Issue
Block a user