Path: blob/main/contrib/llvm-project/compiler-rt/lib/asan/asan_fake_stack.cpp
35233 views
//===-- asan_fake_stack.cpp -----------------------------------------------===//1//2// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.3// See https://llvm.org/LICENSE.txt for license information.4// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception5//6//===----------------------------------------------------------------------===//7//8// This file is a part of AddressSanitizer, an address sanity checker.9//10// FakeStack is used to detect use-after-return bugs.11//===----------------------------------------------------------------------===//1213#include "asan_allocator.h"14#include "asan_poisoning.h"15#include "asan_thread.h"1617namespace __asan {1819static const u64 kMagic1 = kAsanStackAfterReturnMagic;20static const u64 kMagic2 = (kMagic1 << 8) | kMagic1;21static const u64 kMagic4 = (kMagic2 << 16) | kMagic2;22static const u64 kMagic8 = (kMagic4 << 32) | kMagic4;2324static const u64 kAllocaRedzoneSize = 32UL;25static const u64 kAllocaRedzoneMask = 31UL;2627// For small size classes inline PoisonShadow for better performance.28ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {29u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr));30if (ASAN_SHADOW_SCALE == 3 && class_id <= 6) {31// This code expects ASAN_SHADOW_SCALE=3.32for (uptr i = 0; i < (((uptr)1) << class_id); i++) {33shadow[i] = magic;34// Make sure this does not become memset.35SanitizerBreakOptimization(nullptr);36}37} else {38// The size class is too big, it's cheaper to poison only size bytes.39PoisonShadow(ptr, size, static_cast<u8>(magic));40}41}4243FakeStack *FakeStack::Create(uptr stack_size_log) {44static uptr kMinStackSizeLog = 16;45static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28);46if (stack_size_log < kMinStackSizeLog)47stack_size_log = kMinStackSizeLog;48if (stack_size_log > kMaxStackSizeLog)49stack_size_log = kMaxStackSizeLog;50uptr size = RequiredSize(stack_size_log);51FakeStack *res = reinterpret_cast<FakeStack *>(52flags()->uar_noreserve ? MmapNoReserveOrDie(size, "FakeStack")53: MmapOrDie(size, "FakeStack"));54res->stack_size_log_ = stack_size_log;55u8 *p = reinterpret_cast<u8 *>(res);56VReport(1,57"T%d: FakeStack created: %p -- %p stack_size_log: %zd; "58"mmapped %zdK, noreserve=%d \n",59GetCurrentTidOrInvalid(), (void *)p,60(void *)(p + FakeStack::RequiredSize(stack_size_log)), stack_size_log,61size >> 10, flags()->uar_noreserve);62return res;63}6465void FakeStack::Destroy(int tid) {66PoisonAll(0);67if (Verbosity() >= 2) {68InternalScopedString str;69for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++)70str.AppendF("%zd: %zd/%zd; ", class_id, hint_position_[class_id],71NumberOfFrames(stack_size_log(), class_id));72Report("T%d: FakeStack destroyed: %s\n", tid, str.data());73}74uptr size = RequiredSize(stack_size_log_);75FlushUnneededASanShadowMemory(reinterpret_cast<uptr>(this), size);76UnmapOrDie(this, size);77}7879void FakeStack::PoisonAll(u8 magic) {80PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()),81magic);82}8384#if !defined(_MSC_VER) || defined(__clang__)85ALWAYS_INLINE USED86#endif87FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,88uptr real_stack) {89CHECK_LT(class_id, kNumberOfSizeClasses);90if (needs_gc_)91GC(real_stack);92uptr &hint_position = hint_position_[class_id];93const int num_iter = NumberOfFrames(stack_size_log, class_id);94u8 *flags = GetFlags(stack_size_log, class_id);95for (int i = 0; i < num_iter; i++) {96uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++);97// This part is tricky. On one hand, checking and setting flags[pos]98// should be atomic to ensure async-signal safety. But on the other hand,99// if the signal arrives between checking and setting flags[pos], the100// signal handler's fake stack will start from a different hint_position101// and so will not touch this particular byte. So, it is safe to do this102// with regular non-atomic load and store (at least I was not able to make103// this code crash).104if (flags[pos]) continue;105flags[pos] = 1;106FakeFrame *res = reinterpret_cast<FakeFrame *>(107GetFrame(stack_size_log, class_id, pos));108res->real_stack = real_stack;109*SavedFlagPtr(reinterpret_cast<uptr>(res), class_id) = &flags[pos];110return res;111}112return nullptr; // We are out of fake stack.113}114115uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {116uptr stack_size_log = this->stack_size_log();117uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0));118uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log);119if (ptr < beg || ptr >= end) return 0;120uptr class_id = (ptr - beg) >> stack_size_log;121uptr base = beg + (class_id << stack_size_log);122CHECK_LE(base, ptr);123CHECK_LT(ptr, base + (((uptr)1) << stack_size_log));124uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id);125uptr res = base + pos * BytesInSizeClass(class_id);126*frame_end = res + BytesInSizeClass(class_id);127*frame_beg = res + sizeof(FakeFrame);128return res;129}130131void FakeStack::HandleNoReturn() {132needs_gc_ = true;133}134135// Hack: The statement below is not true if we take into account sigaltstack or136// makecontext. It should be possible to make GC to discard wrong stack frame if137// we use these tools. For now, let's support the simplest case and allow GC to138// discard only frames from the default stack, assuming there is no buffer on139// the stack which is used for makecontext or sigaltstack.140//141// When throw, longjmp or some such happens we don't call OnFree() and142// as the result may leak one or more fake frames, but the good news is that143// we are notified about all such events by HandleNoReturn().144// If we recently had such no-return event we need to collect garbage frames.145// We do it based on their 'real_stack' values -- everything that is lower146// than the current real_stack is garbage.147NOINLINE void FakeStack::GC(uptr real_stack) {148AsanThread *curr_thread = GetCurrentThread();149if (!curr_thread)150return; // Try again when we have a thread.151auto top = curr_thread->stack_top();152auto bottom = curr_thread->stack_bottom();153if (real_stack < bottom || real_stack > top)154return; // Not the default stack.155156for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {157u8 *flags = GetFlags(stack_size_log(), class_id);158for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;159i++) {160if (flags[i] == 0) continue; // not allocated.161FakeFrame *ff = reinterpret_cast<FakeFrame *>(162GetFrame(stack_size_log(), class_id, i));163// GC only on the default stack.164if (bottom < ff->real_stack && ff->real_stack < real_stack) {165flags[i] = 0;166// Poison the frame, so the any access will be reported as UAR.167SetShadow(reinterpret_cast<uptr>(ff), BytesInSizeClass(class_id),168class_id, kMagic8);169}170}171}172needs_gc_ = false;173}174175void FakeStack::ForEachFakeFrame(RangeIteratorCallback callback, void *arg) {176for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {177u8 *flags = GetFlags(stack_size_log(), class_id);178for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;179i++) {180if (flags[i] == 0) continue; // not allocated.181FakeFrame *ff = reinterpret_cast<FakeFrame *>(182GetFrame(stack_size_log(), class_id, i));183uptr begin = reinterpret_cast<uptr>(ff);184callback(begin, begin + FakeStack::BytesInSizeClass(class_id), arg);185}186}187}188189#if (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA190static THREADLOCAL FakeStack *fake_stack_tls;191192FakeStack *GetTLSFakeStack() {193return fake_stack_tls;194}195void SetTLSFakeStack(FakeStack *fs) {196fake_stack_tls = fs;197}198#else199FakeStack *GetTLSFakeStack() { return 0; }200void SetTLSFakeStack(FakeStack *fs) { }201#endif // (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA202203static FakeStack *GetFakeStack() {204AsanThread *t = GetCurrentThread();205if (!t) return nullptr;206return t->get_or_create_fake_stack();207}208209static FakeStack *GetFakeStackFast() {210if (FakeStack *fs = GetTLSFakeStack())211return fs;212if (!__asan_option_detect_stack_use_after_return)213return nullptr;214return GetFakeStack();215}216217static FakeStack *GetFakeStackFastAlways() {218if (FakeStack *fs = GetTLSFakeStack())219return fs;220return GetFakeStack();221}222223static ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) {224FakeStack *fs = GetFakeStackFast();225if (!fs)226return 0;227FakeFrame *ff =228fs->Allocate(fs->stack_size_log(), class_id, GET_CURRENT_FRAME());229if (!ff)230return 0; // Out of fake stack.231uptr ptr = reinterpret_cast<uptr>(ff);232SetShadow(ptr, size, class_id, 0);233return ptr;234}235236static ALWAYS_INLINE uptr OnMallocAlways(uptr class_id, uptr size) {237FakeStack *fs = GetFakeStackFastAlways();238if (!fs)239return 0;240FakeFrame *ff =241fs->Allocate(fs->stack_size_log(), class_id, GET_CURRENT_FRAME());242if (!ff)243return 0; // Out of fake stack.244uptr ptr = reinterpret_cast<uptr>(ff);245SetShadow(ptr, size, class_id, 0);246return ptr;247}248249static ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) {250FakeStack::Deallocate(ptr, class_id);251SetShadow(ptr, size, class_id, kMagic8);252}253254} // namespace __asan255256// ---------------------- Interface ---------------- {{{1257using namespace __asan;258#define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id) \259extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \260__asan_stack_malloc_##class_id(uptr size) { \261return OnMalloc(class_id, size); \262} \263extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \264__asan_stack_malloc_always_##class_id(uptr size) { \265return OnMallocAlways(class_id, size); \266} \267extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \268uptr ptr, uptr size) { \269OnFree(ptr, class_id, size); \270}271272DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0)273DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1)274DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2)275DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3)276DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4)277DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5)278DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6)279DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7)280DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8)281DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9)282DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10)283284extern "C" {285// TODO: remove this method and fix tests that use it by setting286// -asan-use-after-return=never, after modal UAR flag lands287// (https://github.com/google/sanitizers/issues/1394)288SANITIZER_INTERFACE_ATTRIBUTE289void *__asan_get_current_fake_stack() { return GetFakeStackFast(); }290291SANITIZER_INTERFACE_ATTRIBUTE292void *__asan_addr_is_in_fake_stack(void *fake_stack, void *addr, void **beg,293void **end) {294FakeStack *fs = reinterpret_cast<FakeStack*>(fake_stack);295if (!fs) return nullptr;296uptr frame_beg, frame_end;297FakeFrame *frame = reinterpret_cast<FakeFrame *>(fs->AddrIsInFakeStack(298reinterpret_cast<uptr>(addr), &frame_beg, &frame_end));299if (!frame) return nullptr;300if (frame->magic != kCurrentStackFrameMagic)301return nullptr;302if (beg) *beg = reinterpret_cast<void*>(frame_beg);303if (end) *end = reinterpret_cast<void*>(frame_end);304return reinterpret_cast<void*>(frame->real_stack);305}306307SANITIZER_INTERFACE_ATTRIBUTE308void __asan_alloca_poison(uptr addr, uptr size) {309uptr LeftRedzoneAddr = addr - kAllocaRedzoneSize;310uptr PartialRzAddr = addr + size;311uptr RightRzAddr = (PartialRzAddr + kAllocaRedzoneMask) & ~kAllocaRedzoneMask;312uptr PartialRzAligned = PartialRzAddr & ~(ASAN_SHADOW_GRANULARITY - 1);313FastPoisonShadow(LeftRedzoneAddr, kAllocaRedzoneSize, kAsanAllocaLeftMagic);314FastPoisonShadowPartialRightRedzone(315PartialRzAligned, PartialRzAddr % ASAN_SHADOW_GRANULARITY,316RightRzAddr - PartialRzAligned, kAsanAllocaRightMagic);317FastPoisonShadow(RightRzAddr, kAllocaRedzoneSize, kAsanAllocaRightMagic);318}319320SANITIZER_INTERFACE_ATTRIBUTE321void __asan_allocas_unpoison(uptr top, uptr bottom) {322if ((!top) || (top > bottom)) return;323REAL(memset)324(reinterpret_cast<void *>(MemToShadow(top)), 0,325(bottom - top) / ASAN_SHADOW_GRANULARITY);326}327} // extern "C"328329330