[ASan] Renamed SHADOW_XYZ to ASAN_SHADOW_XYZ.

Follow up from D115271.

Reviewed By: vitalybuka

Differential Revision: https://reviews.llvm.org/D115293
This commit is contained in:
Kirill Stoimenov 2021-12-07 23:18:04 +00:00
parent 8a1f2d6580
commit ad56941a57
13 changed files with 124 additions and 119 deletions

View File

@ -112,7 +112,7 @@ void AsanDeactivate() {
disabled.quarantine_size_mb = 0;
disabled.thread_local_quarantine_size_kb = 0;
// Redzone must be at least Max(16, granularity) bytes long.
disabled.min_redzone = Max(16, (int)SHADOW_GRANULARITY);
disabled.min_redzone = Max(16, (int)ASAN_SHADOW_GRANULARITY);
disabled.max_redzone = disabled.min_redzone;
disabled.alloc_dealloc_mismatch = false;
disabled.may_return_null = true;

View File

@ -210,8 +210,7 @@ struct QuarantineCallback {
CHECK_EQ(old_chunk_state, CHUNK_QUARANTINE);
}
PoisonShadow(m->Beg(),
RoundUpTo(m->UsedSize(), SHADOW_GRANULARITY),
PoisonShadow(m->Beg(), RoundUpTo(m->UsedSize(), ASAN_SHADOW_GRANULARITY),
kAsanHeapLeftRedzoneMagic);
// Statistics.
@ -357,7 +356,7 @@ struct Allocator {
if (chunk < beg && beg < end && end <= chunk_end) {
// Looks like a valid AsanChunk in use, poison redzones only.
PoisonShadow(chunk, beg - chunk, kAsanHeapLeftRedzoneMagic);
uptr end_aligned_down = RoundDownTo(end, SHADOW_GRANULARITY);
uptr end_aligned_down = RoundDownTo(end, ASAN_SHADOW_GRANULARITY);
FastPoisonShadowPartialRightRedzone(
end_aligned_down, end - end_aligned_down,
chunk_end - end_aligned_down, kAsanHeapLeftRedzoneMagic);
@ -482,7 +481,7 @@ struct Allocator {
}
Flags &fl = *flags();
CHECK(stack);
const uptr min_alignment = SHADOW_GRANULARITY;
const uptr min_alignment = ASAN_SHADOW_GRANULARITY;
const uptr user_requested_alignment_log =
ComputeUserRequestedAlignmentLog(alignment);
if (alignment < min_alignment)
@ -563,7 +562,7 @@ struct Allocator {
m->SetAllocContext(t ? t->tid() : kMainTid, StackDepotPut(*stack));
uptr size_rounded_down_to_granularity =
RoundDownTo(size, SHADOW_GRANULARITY);
RoundDownTo(size, ASAN_SHADOW_GRANULARITY);
// Unpoison the bulk of the memory region.
if (size_rounded_down_to_granularity)
PoisonShadow(user_beg, size_rounded_down_to_granularity, 0);
@ -571,7 +570,7 @@ struct Allocator {
if (size != size_rounded_down_to_granularity && CanPoisonMemory()) {
u8 *shadow =
(u8 *)MemToShadow(user_beg + size_rounded_down_to_granularity);
*shadow = fl.poison_partial ? (size & (SHADOW_GRANULARITY - 1)) : 0;
*shadow = fl.poison_partial ? (size & (ASAN_SHADOW_GRANULARITY - 1)) : 0;
}
AsanStats &thread_stats = GetCurrentThreadStats();
@ -641,8 +640,7 @@ struct Allocator {
}
// Poison the region.
PoisonShadow(m->Beg(),
RoundUpTo(m->UsedSize(), SHADOW_GRANULARITY),
PoisonShadow(m->Beg(), RoundUpTo(m->UsedSize(), ASAN_SHADOW_GRANULARITY),
kAsanHeapFreeMagic);
AsanStats &thread_stats = GetCurrentThreadStats();

View File

@ -141,7 +141,7 @@ uptr __asan_get_free_stack(uptr addr, uptr *trace, uptr size, u32 *thread_id) {
SANITIZER_INTERFACE_ATTRIBUTE
void __asan_get_shadow_mapping(uptr *shadow_scale, uptr *shadow_offset) {
if (shadow_scale)
*shadow_scale = SHADOW_SCALE;
*shadow_scale = ASAN_SHADOW_SCALE;
if (shadow_offset)
*shadow_offset = SHADOW_OFFSET;
*shadow_offset = ASAN_SHADOW_OFFSET;
}

View File

@ -329,7 +329,7 @@ void ErrorBadParamsToAnnotateContiguousContainer::Print() {
" old_mid : %p\n"
" new_mid : %p\n",
(void *)beg, (void *)end, (void *)old_mid, (void *)new_mid);
uptr granularity = SHADOW_GRANULARITY;
uptr granularity = ASAN_SHADOW_GRANULARITY;
if (!IsAligned(beg, granularity))
Report("ERROR: beg is not aligned by %zu\n", granularity);
stack->Print();
@ -410,7 +410,8 @@ ErrorGeneric::ErrorGeneric(u32 tid, uptr pc_, uptr bp_, uptr sp_, uptr addr,
if (AddrIsInMem(addr)) {
u8 *shadow_addr = (u8 *)MemToShadow(addr);
// If we are accessing 16 bytes, look at the second shadow byte.
if (*shadow_addr == 0 && access_size > SHADOW_GRANULARITY) shadow_addr++;
if (*shadow_addr == 0 && access_size > ASAN_SHADOW_GRANULARITY)
shadow_addr++;
// If we are in the partial right redzone, look at the next shadow byte.
if (*shadow_addr > 0 && *shadow_addr < 128) shadow_addr++;
bool far_from_bounds = false;
@ -501,10 +502,11 @@ static void PrintLegend(InternalScopedString *str) {
str->append(
"Shadow byte legend (one shadow byte represents %d "
"application bytes):\n",
(int)SHADOW_GRANULARITY);
(int)ASAN_SHADOW_GRANULARITY);
PrintShadowByte(str, " Addressable: ", 0);
str->append(" Partially addressable: ");
for (u8 i = 1; i < SHADOW_GRANULARITY; i++) PrintShadowByte(str, "", i, " ");
for (u8 i = 1; i < ASAN_SHADOW_GRANULARITY; i++)
PrintShadowByte(str, "", i, " ");
str->append("\n");
PrintShadowByte(str, " Heap left redzone: ",
kAsanHeapLeftRedzoneMagic);

View File

@ -28,8 +28,8 @@ static const u64 kAllocaRedzoneMask = 31UL;
// For small size classes inline PoisonShadow for better performance.
ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr));
if (SHADOW_SCALE == 3 && class_id <= 6) {
// This code expects SHADOW_SCALE=3.
if (ASAN_SHADOW_SCALE == 3 && class_id <= 6) {
// This code expects ASAN_SHADOW_SCALE=3.
for (uptr i = 0; i < (((uptr)1) << class_id); i++) {
shadow[i] = magic;
// Make sure this does not become memset.
@ -294,10 +294,10 @@ void __asan_alloca_poison(uptr addr, uptr size) {
uptr LeftRedzoneAddr = addr - kAllocaRedzoneSize;
uptr PartialRzAddr = addr + size;
uptr RightRzAddr = (PartialRzAddr + kAllocaRedzoneMask) & ~kAllocaRedzoneMask;
uptr PartialRzAligned = PartialRzAddr & ~(SHADOW_GRANULARITY - 1);
uptr PartialRzAligned = PartialRzAddr & ~(ASAN_SHADOW_GRANULARITY - 1);
FastPoisonShadow(LeftRedzoneAddr, kAllocaRedzoneSize, kAsanAllocaLeftMagic);
FastPoisonShadowPartialRightRedzone(
PartialRzAligned, PartialRzAddr % SHADOW_GRANULARITY,
PartialRzAligned, PartialRzAddr % ASAN_SHADOW_GRANULARITY,
RightRzAddr - PartialRzAligned, kAsanAllocaRightMagic);
FastPoisonShadow(RightRzAddr, kAllocaRedzoneSize, kAsanAllocaRightMagic);
}
@ -305,7 +305,8 @@ void __asan_alloca_poison(uptr addr, uptr size) {
SANITIZER_INTERFACE_ATTRIBUTE
void __asan_allocas_unpoison(uptr top, uptr bottom) {
if ((!top) || (top > bottom)) return;
REAL(memset)(reinterpret_cast<void*>(MemToShadow(top)), 0,
(bottom - top) / SHADOW_GRANULARITY);
REAL(memset)
(reinterpret_cast<void *>(MemToShadow(top)), 0,
(bottom - top) / ASAN_SHADOW_GRANULARITY);
}
} // extern "C"

View File

@ -140,9 +140,9 @@ void InitializeFlags() {
SanitizerToolName);
Die();
}
// Ensure that redzone is at least SHADOW_GRANULARITY.
if (f->redzone < (int)SHADOW_GRANULARITY)
f->redzone = SHADOW_GRANULARITY;
// Ensure that redzone is at least ASAN_SHADOW_GRANULARITY.
if (f->redzone < (int)ASAN_SHADOW_GRANULARITY)
f->redzone = ASAN_SHADOW_GRANULARITY;
// Make "strict_init_order" imply "check_initialization_order".
// TODO(samsonov): Use a single runtime flag for an init-order checker.
if (f->strict_init_order) {

View File

@ -61,14 +61,13 @@ ALWAYS_INLINE void PoisonShadowForGlobal(const Global *g, u8 value) {
}
ALWAYS_INLINE void PoisonRedZones(const Global &g) {
uptr aligned_size = RoundUpTo(g.size, SHADOW_GRANULARITY);
uptr aligned_size = RoundUpTo(g.size, ASAN_SHADOW_GRANULARITY);
FastPoisonShadow(g.beg + aligned_size, g.size_with_redzone - aligned_size,
kAsanGlobalRedzoneMagic);
if (g.size != aligned_size) {
FastPoisonShadowPartialRightRedzone(
g.beg + RoundDownTo(g.size, SHADOW_GRANULARITY),
g.size % SHADOW_GRANULARITY,
SHADOW_GRANULARITY,
g.beg + RoundDownTo(g.size, ASAN_SHADOW_GRANULARITY),
g.size % ASAN_SHADOW_GRANULARITY, ASAN_SHADOW_GRANULARITY,
kAsanGlobalRedzoneMagic);
}
}

View File

@ -107,7 +107,7 @@ uptr FindDynamicShadowStart() {
return FindPremappedShadowStart(shadow_size_bytes);
#endif
return MapDynamicShadow(shadow_size_bytes, SHADOW_SCALE,
return MapDynamicShadow(shadow_size_bytes, ASAN_SHADOW_SCALE,
/*min_shadow_base_alignment*/ 0, kHighMemEnd);
}

View File

@ -151,65 +151,66 @@
// || `[0x30000000, 0x35ffffff]` || LowShadow ||
// || `[0x00000000, 0x2fffffff]` || LowMem ||
#define SHADOW_SCALE 3
#define ASAN_SHADOW_SCALE 3
static const u64 kDefaultShadowSentinel = ~(uptr)0;
#if SANITIZER_FUCHSIA
# define SHADOW_OFFSET_CONST (0)
# define ASAN_SHADOW_OFFSET_CONST (0)
#elif SANITIZER_WORDSIZE == 32
# if SANITIZER_ANDROID
# define SHADOW_OFFSET_DYNAMIC
# define ASAN_SHADOW_OFFSET_DYNAMIC
# elif defined(__mips__)
# define SHADOW_OFFSET_CONST 0x0aaa0000
# define ASAN_SHADOW_OFFSET_CONST 0x0aaa0000
# elif SANITIZER_FREEBSD
# define SHADOW_OFFSET_CONST 0x40000000
# define ASAN_SHADOW_OFFSET_CONST 0x40000000
# elif SANITIZER_NETBSD
# define SHADOW_OFFSET_CONST 0x40000000
# define ASAN_SHADOW_OFFSET_CONST 0x40000000
# elif SANITIZER_WINDOWS
# define SHADOW_OFFSET_CONST 0x30000000
# define ASAN_SHADOW_OFFSET_CONST 0x30000000
# elif SANITIZER_IOS
# define SHADOW_OFFSET_DYNAMIC
# define ASAN_SHADOW_OFFSET_DYNAMIC
# else
# define SHADOW_OFFSET_CONST 0x20000000
# define ASAN_SHADOW_OFFSET_CONST 0x20000000
# endif
#else
# if SANITIZER_IOS
# define SHADOW_OFFSET_DYNAMIC
# define ASAN_SHADOW_OFFSET_DYNAMIC
# elif SANITIZER_MAC && defined(__aarch64__)
# define SHADOW_OFFSET_DYNAMIC
# define ASAN_SHADOW_OFFSET_DYNAMIC
# elif SANITIZER_RISCV64
# define SHADOW_OFFSET_CONST 0x0000000d55550000
# define ASAN_SHADOW_OFFSET_CONST 0x0000000d55550000
# elif defined(__aarch64__)
# define SHADOW_OFFSET_CONST 0x0000001000000000
# define ASAN_SHADOW_OFFSET_CONST 0x0000001000000000
# elif defined(__powerpc64__)
# define SHADOW_OFFSET_CONST 0x0000100000000000
# define ASAN_SHADOW_OFFSET_CONST 0x0000100000000000
# elif defined(__s390x__)
# define SHADOW_OFFSET_CONST 0x0010000000000000
# define ASAN_SHADOW_OFFSET_CONST 0x0010000000000000
# elif SANITIZER_FREEBSD
# define SHADOW_OFFSET_CONST 0x0000400000000000
# define ASAN_SHADOW_OFFSET_CONST 0x0000400000000000
# elif SANITIZER_NETBSD
# define SHADOW_OFFSET_CONST 0x0000400000000000
# define ASAN_SHADOW_OFFSET_CONST 0x0000400000000000
# elif SANITIZER_MAC
# define SHADOW_OFFSET_CONST 0x0000100000000000
# define ASAN_SHADOW_OFFSET_CONST 0x0000100000000000
# elif defined(__mips64)
# define SHADOW_OFFSET_CONST 0x0000002000000000
# define ASAN_SHADOW_OFFSET_CONST 0x0000002000000000
# elif defined(__sparc__)
# define SHADOW_OFFSET_CONST 0x0000080000000000
# define ASAN_SHADOW_OFFSET_CONST 0x0000080000000000
# elif SANITIZER_WINDOWS64
# define SHADOW_OFFSET_DYNAMIC
# define ASAN_SHADOW_OFFSET_DYNAMIC
# else
# define SHADOW_OFFSET_CONST 0x000000007FFFFFFF & (~0xFFFULL << SHADOW_SCALE)
# define ASAN_SHADOW_OFFSET_CONST \
0x000000007FFFFFFF & (~0xFFFULL << ASAN_SHADOW_SCALE)
# endif
#endif
#if defined(SHADOW_OFFSET_CONST)
static const u64 kConstShadowOffset = SHADOW_OFFSET_CONST;
# define SHADOW_OFFSET kConstShadowOffset
#elif defined(SHADOW_OFFSET_DYNAMIC)
# define SHADOW_OFFSET __asan_shadow_memory_dynamic_address
#if defined(ASAN_SHADOW_OFFSET_CONST)
static const u64 kConstShadowOffset = ASAN_SHADOW_OFFSET_CONST;
# define ASAN_SHADOW_OFFSET kConstShadowOffset
#elif defined(ASAN_SHADOW_OFFSET_DYNAMIC)
# define ASAN_SHADOW_OFFSET __asan_shadow_memory_dynamic_address
#else
# error "SHADOW_OFFSET can't be determined."
# error "ASAN_SHADOW_OFFSET can't be determined."
#endif
#if SANITIZER_ANDROID && defined(__arm__)
@ -218,7 +219,7 @@ static const u64 kConstShadowOffset = SHADOW_OFFSET_CONST;
# define ASAN_PREMAP_SHADOW 0
#endif
#define SHADOW_GRANULARITY (1ULL << SHADOW_SCALE)
#define ASAN_SHADOW_GRANULARITY (1ULL << ASAN_SHADOW_SCALE)
#define DO_ASAN_MAPPING_PROFILE 0 // Set to 1 to profile the functions below.
@ -252,36 +253,37 @@ extern uptr kHighMemEnd, kMidMemBeg, kMidMemEnd; // Initialized in __asan_init.
#if defined(__sparc__) && SANITIZER_WORDSIZE == 64
# include "asan_mapping_sparc64.h"
#else
#define MEM_TO_SHADOW(mem) (((mem) >> SHADOW_SCALE) + (SHADOW_OFFSET))
# define MEM_TO_SHADOW(mem) \
(((mem) >> ASAN_SHADOW_SCALE) + (ASAN_SHADOW_OFFSET))
#define kLowMemBeg 0
#define kLowMemEnd (SHADOW_OFFSET ? SHADOW_OFFSET - 1 : 0)
# define kLowMemBeg 0
# define kLowMemEnd (ASAN_SHADOW_OFFSET ? ASAN_SHADOW_OFFSET - 1 : 0)
#define kLowShadowBeg SHADOW_OFFSET
#define kLowShadowEnd MEM_TO_SHADOW(kLowMemEnd)
# define kLowShadowBeg ASAN_SHADOW_OFFSET
# define kLowShadowEnd MEM_TO_SHADOW(kLowMemEnd)
#define kHighMemBeg (MEM_TO_SHADOW(kHighMemEnd) + 1)
# define kHighMemBeg (MEM_TO_SHADOW(kHighMemEnd) + 1)
#define kHighShadowBeg MEM_TO_SHADOW(kHighMemBeg)
#define kHighShadowEnd MEM_TO_SHADOW(kHighMemEnd)
# define kHighShadowBeg MEM_TO_SHADOW(kHighMemBeg)
# define kHighShadowEnd MEM_TO_SHADOW(kHighMemEnd)
# define kMidShadowBeg MEM_TO_SHADOW(kMidMemBeg)
# define kMidShadowEnd MEM_TO_SHADOW(kMidMemEnd)
# define kMidShadowBeg MEM_TO_SHADOW(kMidMemBeg)
# define kMidShadowEnd MEM_TO_SHADOW(kMidMemEnd)
// With the zero shadow base we can not actually map pages starting from 0.
// This constant is somewhat arbitrary.
#define kZeroBaseShadowStart 0
#define kZeroBaseMaxShadowStart (1 << 18)
# define kZeroBaseShadowStart 0
# define kZeroBaseMaxShadowStart (1 << 18)
#define kShadowGapBeg (kLowShadowEnd ? kLowShadowEnd + 1 \
: kZeroBaseShadowStart)
#define kShadowGapEnd ((kMidMemBeg ? kMidShadowBeg : kHighShadowBeg) - 1)
# define kShadowGapBeg \
(kLowShadowEnd ? kLowShadowEnd + 1 : kZeroBaseShadowStart)
# define kShadowGapEnd ((kMidMemBeg ? kMidShadowBeg : kHighShadowBeg) - 1)
#define kShadowGap2Beg (kMidMemBeg ? kMidShadowEnd + 1 : 0)
#define kShadowGap2End (kMidMemBeg ? kMidMemBeg - 1 : 0)
# define kShadowGap2Beg (kMidMemBeg ? kMidShadowEnd + 1 : 0)
# define kShadowGap2End (kMidMemBeg ? kMidMemBeg - 1 : 0)
#define kShadowGap3Beg (kMidMemBeg ? kMidMemEnd + 1 : 0)
#define kShadowGap3End (kMidMemBeg ? kHighShadowBeg - 1 : 0)
# define kShadowGap3Beg (kMidMemBeg ? kMidMemEnd + 1 : 0)
# define kShadowGap3End (kMidMemBeg ? kHighShadowBeg - 1 : 0)
namespace __asan {
@ -319,13 +321,13 @@ static inline bool AddrIsInShadowGap(uptr a) {
PROFILE_ASAN_MAPPING();
if (kMidMemBeg) {
if (a <= kShadowGapEnd)
return SHADOW_OFFSET == 0 || a >= kShadowGapBeg;
return ASAN_SHADOW_OFFSET == 0 || a >= kShadowGapBeg;
return (a >= kShadowGap2Beg && a <= kShadowGap2End) ||
(a >= kShadowGap3Beg && a <= kShadowGap3End);
}
// In zero-based shadow mode we treat addresses near zero as addresses
// in shadow gap as well.
if (SHADOW_OFFSET == 0)
if (ASAN_SHADOW_OFFSET == 0)
return a <= kShadowGapEnd;
return a >= kShadowGapBeg && a <= kShadowGapEnd;
}
@ -336,7 +338,9 @@ static inline bool AddrIsInShadowGap(uptr a) {
namespace __asan {
static inline uptr MemToShadowSize(uptr size) { return size >> SHADOW_SCALE; }
static inline uptr MemToShadowSize(uptr size) {
return size >> ASAN_SHADOW_SCALE;
}
static inline bool AddrIsInMem(uptr a) {
PROFILE_ASAN_MAPPING();
@ -357,7 +361,7 @@ static inline bool AddrIsInShadow(uptr a) {
static inline bool AddrIsAlignedByGranularity(uptr a) {
PROFILE_ASAN_MAPPING();
return (a & (SHADOW_GRANULARITY - 1)) == 0;
return (a & (ASAN_SHADOW_GRANULARITY - 1)) == 0;
}
static inline bool AddressIsPoisoned(uptr a) {
@ -366,8 +370,8 @@ static inline bool AddressIsPoisoned(uptr a) {
u8 *shadow_address = (u8*)MEM_TO_SHADOW(a);
s8 shadow_value = *shadow_address;
if (shadow_value) {
u8 last_accessed_byte = (a & (SHADOW_GRANULARITY - 1))
+ kAccessSize - 1;
u8 last_accessed_byte =
(a & (ASAN_SHADOW_GRANULARITY - 1)) + kAccessSize - 1;
return (last_accessed_byte >= shadow_value);
}
return false;

View File

@ -35,7 +35,7 @@ void PoisonShadow(uptr addr, uptr size, u8 value) {
CHECK(AddrIsAlignedByGranularity(addr));
CHECK(AddrIsInMem(addr));
CHECK(AddrIsAlignedByGranularity(addr + size));
CHECK(AddrIsInMem(addr + size - SHADOW_GRANULARITY));
CHECK(AddrIsInMem(addr + size - ASAN_SHADOW_GRANULARITY));
CHECK(REAL(memset));
FastPoisonShadow(addr, size, value);
}
@ -52,12 +52,12 @@ void PoisonShadowPartialRightRedzone(uptr addr,
struct ShadowSegmentEndpoint {
u8 *chunk;
s8 offset; // in [0, SHADOW_GRANULARITY)
s8 offset; // in [0, ASAN_SHADOW_GRANULARITY)
s8 value; // = *chunk;
explicit ShadowSegmentEndpoint(uptr address) {
chunk = (u8*)MemToShadow(address);
offset = address & (SHADOW_GRANULARITY - 1);
offset = address & (ASAN_SHADOW_GRANULARITY - 1);
value = *chunk;
}
};
@ -72,14 +72,14 @@ void AsanPoisonOrUnpoisonIntraObjectRedzone(uptr ptr, uptr size, bool poison) {
}
CHECK(size);
CHECK_LE(size, 4096);
CHECK(IsAligned(end, SHADOW_GRANULARITY));
if (!IsAligned(ptr, SHADOW_GRANULARITY)) {
CHECK(IsAligned(end, ASAN_SHADOW_GRANULARITY));
if (!IsAligned(ptr, ASAN_SHADOW_GRANULARITY)) {
*(u8 *)MemToShadow(ptr) =
poison ? static_cast<u8>(ptr % SHADOW_GRANULARITY) : 0;
ptr |= SHADOW_GRANULARITY - 1;
poison ? static_cast<u8>(ptr % ASAN_SHADOW_GRANULARITY) : 0;
ptr |= ASAN_SHADOW_GRANULARITY - 1;
ptr++;
}
for (; ptr < end; ptr += SHADOW_GRANULARITY)
for (; ptr < end; ptr += ASAN_SHADOW_GRANULARITY)
*(u8*)MemToShadow(ptr) = poison ? kAsanIntraObjectRedzone : 0;
}
@ -181,12 +181,12 @@ uptr __asan_region_is_poisoned(uptr beg, uptr size) {
if (!AddrIsInMem(end))
return end;
CHECK_LT(beg, end);
uptr aligned_b = RoundUpTo(beg, SHADOW_GRANULARITY);
uptr aligned_e = RoundDownTo(end, SHADOW_GRANULARITY);
uptr aligned_b = RoundUpTo(beg, ASAN_SHADOW_GRANULARITY);
uptr aligned_e = RoundDownTo(end, ASAN_SHADOW_GRANULARITY);
uptr shadow_beg = MemToShadow(aligned_b);
uptr shadow_end = MemToShadow(aligned_e);
// First check the first and the last application bytes,
// then check the SHADOW_GRANULARITY-aligned region by calling
// then check the ASAN_SHADOW_GRANULARITY-aligned region by calling
// mem_is_zero on the corresponding shadow.
if (!__asan::AddressIsPoisoned(beg) && !__asan::AddressIsPoisoned(end - 1) &&
(shadow_end <= shadow_beg ||
@ -285,7 +285,7 @@ uptr __asan_load_cxx_array_cookie(uptr *p) {
// assumes that left border of region to be poisoned is properly aligned.
static void PoisonAlignedStackMemory(uptr addr, uptr size, bool do_poison) {
if (size == 0) return;
uptr aligned_size = size & ~(SHADOW_GRANULARITY - 1);
uptr aligned_size = size & ~(ASAN_SHADOW_GRANULARITY - 1);
PoisonShadow(addr, aligned_size,
do_poison ? kAsanStackUseAfterScopeMagic : 0);
if (size == aligned_size)
@ -351,7 +351,7 @@ void __sanitizer_annotate_contiguous_container(const void *beg_p,
uptr end = reinterpret_cast<uptr>(end_p);
uptr old_mid = reinterpret_cast<uptr>(old_mid_p);
uptr new_mid = reinterpret_cast<uptr>(new_mid_p);
uptr granularity = SHADOW_GRANULARITY;
uptr granularity = ASAN_SHADOW_GRANULARITY;
if (!(beg <= old_mid && beg <= new_mid && old_mid <= end && new_mid <= end &&
IsAligned(beg, granularity))) {
GET_STACK_TRACE_FATAL_HERE;

View File

@ -44,8 +44,8 @@ ALWAYS_INLINE void FastPoisonShadow(uptr aligned_beg, uptr aligned_size,
common_flags()->clear_shadow_mmap_threshold);
#else
uptr shadow_beg = MEM_TO_SHADOW(aligned_beg);
uptr shadow_end = MEM_TO_SHADOW(
aligned_beg + aligned_size - SHADOW_GRANULARITY) + 1;
uptr shadow_end =
MEM_TO_SHADOW(aligned_beg + aligned_size - ASAN_SHADOW_GRANULARITY) + 1;
// FIXME: Page states are different on Windows, so using the same interface
// for mapping shadow and zeroing out pages doesn't "just work", so we should
// probably provide higher-level interface for these operations.
@ -78,11 +78,12 @@ ALWAYS_INLINE void FastPoisonShadowPartialRightRedzone(
DCHECK(CanPoisonMemory());
bool poison_partial = flags()->poison_partial;
u8 *shadow = (u8*)MEM_TO_SHADOW(aligned_addr);
for (uptr i = 0; i < redzone_size; i += SHADOW_GRANULARITY, shadow++) {
if (i + SHADOW_GRANULARITY <= size) {
for (uptr i = 0; i < redzone_size; i += ASAN_SHADOW_GRANULARITY, shadow++) {
if (i + ASAN_SHADOW_GRANULARITY <= size) {
*shadow = 0; // fully addressable
} else if (i >= size) {
*shadow = (SHADOW_GRANULARITY == 128) ? 0xff : value; // unaddressable
*shadow =
(ASAN_SHADOW_GRANULARITY == 128) ? 0xff : value; // unaddressable
} else {
// first size-i bytes are addressable
*shadow = poison_partial ? static_cast<u8>(size - i) : 0;

View File

@ -146,11 +146,11 @@ ASAN_REPORT_ERROR_N(store, true)
#define ASAN_MEMORY_ACCESS_CALLBACK_BODY(type, is_write, size, exp_arg, fatal) \
uptr sp = MEM_TO_SHADOW(addr); \
uptr s = size <= SHADOW_GRANULARITY ? *reinterpret_cast<u8 *>(sp) \
: *reinterpret_cast<u16 *>(sp); \
uptr s = size <= ASAN_SHADOW_GRANULARITY ? *reinterpret_cast<u8 *>(sp) \
: *reinterpret_cast<u16 *>(sp); \
if (UNLIKELY(s)) { \
if (UNLIKELY(size >= SHADOW_GRANULARITY || \
((s8)((addr & (SHADOW_GRANULARITY - 1)) + size - 1)) >= \
if (UNLIKELY(size >= ASAN_SHADOW_GRANULARITY || \
((s8)((addr & (ASAN_SHADOW_GRANULARITY - 1)) + size - 1)) >= \
(s8)s)) { \
ReportGenericErrorWrapper(addr, is_write, size, exp_arg, fatal); \
} \
@ -309,7 +309,7 @@ static void InitializeHighMemEnd() {
kHighMemEnd = GetMaxUserVirtualAddress();
// Increase kHighMemEnd to make sure it's properly
// aligned together with kHighMemBeg:
kHighMemEnd |= (GetMmapGranularity() << SHADOW_SCALE) - 1;
kHighMemEnd |= (GetMmapGranularity() << ASAN_SHADOW_SCALE) - 1;
#endif // !ASAN_FIXED_MAPPING
CHECK_EQ((kHighMemBeg % GetMmapGranularity()), 0);
}
@ -361,10 +361,10 @@ void PrintAddressSpaceLayout() {
Printf("malloc_context_size=%zu\n",
(uptr)common_flags()->malloc_context_size);
Printf("SHADOW_SCALE: %d\n", (int)SHADOW_SCALE);
Printf("SHADOW_GRANULARITY: %d\n", (int)SHADOW_GRANULARITY);
Printf("SHADOW_OFFSET: 0x%zx\n", (uptr)SHADOW_OFFSET);
CHECK(SHADOW_SCALE >= 3 && SHADOW_SCALE <= 7);
Printf("SHADOW_SCALE: %d\n", (int)ASAN_SHADOW_SCALE);
Printf("SHADOW_GRANULARITY: %d\n", (int)ASAN_SHADOW_GRANULARITY);
Printf("SHADOW_OFFSET: 0x%zx\n", (uptr)ASAN_SHADOW_OFFSET);
CHECK(ASAN_SHADOW_SCALE >= 3 && ASAN_SHADOW_SCALE <= 7);
if (kMidMemBeg)
CHECK(kMidShadowBeg > kLowShadowEnd &&
kMidMemBeg > kMidShadowEnd &&
@ -421,7 +421,7 @@ static void AsanInitInternal() {
MaybeReexec();
// Setup internal allocator callback.
SetLowLevelAllocateMinAlignment(SHADOW_GRANULARITY);
SetLowLevelAllocateMinAlignment(ASAN_SHADOW_GRANULARITY);
SetLowLevelAllocateCallback(OnLowLevelAllocate);
InitializeAsanInterceptors();
@ -539,7 +539,7 @@ void UnpoisonStack(uptr bottom, uptr top, const char *type) {
top - bottom);
return;
}
PoisonShadow(bottom, RoundUpTo(top - bottom, SHADOW_GRANULARITY), 0);
PoisonShadow(bottom, RoundUpTo(top - bottom, ASAN_SHADOW_GRANULARITY), 0);
}
static void UnpoisonDefaultStack() {

View File

@ -305,7 +305,7 @@ void AsanThread::SetThreadStackAndTls(const InitOptions *options) {
uptr stack_size = 0;
GetThreadStackAndTls(tid() == kMainTid, &stack_bottom_, &stack_size,
&tls_begin_, &tls_size);
stack_top_ = RoundDownTo(stack_bottom_ + stack_size, SHADOW_GRANULARITY);
stack_top_ = RoundDownTo(stack_bottom_ + stack_size, ASAN_SHADOW_GRANULARITY);
tls_end_ = tls_begin_ + tls_size;
dtls_ = DTLS_Get();
@ -321,8 +321,8 @@ void AsanThread::ClearShadowForThreadStackAndTLS() {
if (stack_top_ != stack_bottom_)
PoisonShadow(stack_bottom_, stack_top_ - stack_bottom_, 0);
if (tls_begin_ != tls_end_) {
uptr tls_begin_aligned = RoundDownTo(tls_begin_, SHADOW_GRANULARITY);
uptr tls_end_aligned = RoundUpTo(tls_end_, SHADOW_GRANULARITY);
uptr tls_begin_aligned = RoundDownTo(tls_begin_, ASAN_SHADOW_GRANULARITY);
uptr tls_end_aligned = RoundUpTo(tls_end_, ASAN_SHADOW_GRANULARITY);
FastPoisonShadowPartialRightRedzone(tls_begin_aligned,
tls_end_ - tls_begin_aligned,
tls_end_aligned - tls_end_, 0);
@ -346,27 +346,27 @@ bool AsanThread::GetStackFrameAccessByAddr(uptr addr,
return true;
}
uptr aligned_addr = RoundDownTo(addr, SANITIZER_WORDSIZE / 8); // align addr.
uptr mem_ptr = RoundDownTo(aligned_addr, SHADOW_GRANULARITY);
uptr mem_ptr = RoundDownTo(aligned_addr, ASAN_SHADOW_GRANULARITY);
u8 *shadow_ptr = (u8*)MemToShadow(aligned_addr);
u8 *shadow_bottom = (u8*)MemToShadow(bottom);
while (shadow_ptr >= shadow_bottom &&
*shadow_ptr != kAsanStackLeftRedzoneMagic) {
shadow_ptr--;
mem_ptr -= SHADOW_GRANULARITY;
mem_ptr -= ASAN_SHADOW_GRANULARITY;
}
while (shadow_ptr >= shadow_bottom &&
*shadow_ptr == kAsanStackLeftRedzoneMagic) {
shadow_ptr--;
mem_ptr -= SHADOW_GRANULARITY;
mem_ptr -= ASAN_SHADOW_GRANULARITY;
}
if (shadow_ptr < shadow_bottom) {
return false;
}
uptr* ptr = (uptr*)(mem_ptr + SHADOW_GRANULARITY);
uptr *ptr = (uptr *)(mem_ptr + ASAN_SHADOW_GRANULARITY);
CHECK(ptr[0] == kCurrentStackFrameMagic);
access->offset = addr - (uptr)ptr;
access->frame_pc = ptr[2];