Path: blob/main/system/lib/libcxx/src/memory.cpp
6175 views
//===----------------------------------------------------------------------===//1//2// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.3// See https://llvm.org/LICENSE.txt for license information.4// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception5//6//===----------------------------------------------------------------------===//78#include <__config>9#ifdef _LIBCPP_DEPRECATED_ABI_LEGACY_LIBRARY_DEFINITIONS_FOR_INLINE_FUNCTIONS10# define _LIBCPP_SHARED_PTR_DEFINE_LEGACY_INLINE_FUNCTIONS11#endif1213#include <__functional/hash.h>14#include <memory>15#include <typeinfo>1617#if _LIBCPP_HAS_THREADS18# include <mutex>19# include <thread>20# if defined(__ELF__) && defined(_LIBCPP_LINK_PTHREAD_LIB)21# pragma comment(lib, "pthread")22# endif23#endif2425#include "include/atomic_support.h"2627_LIBCPP_BEGIN_NAMESPACE_STD2829bad_weak_ptr::~bad_weak_ptr() noexcept {}3031const char* bad_weak_ptr::what() const noexcept { return "bad_weak_ptr"; }3233__shared_count::~__shared_count() {}3435__shared_weak_count::~__shared_weak_count() {}3637#if defined(_LIBCPP_SHARED_PTR_DEFINE_LEGACY_INLINE_FUNCTIONS)38void __shared_count::__add_shared() noexcept { __libcpp_atomic_refcount_increment(__shared_owners_); }3940bool __shared_count::__release_shared() noexcept {41if (__libcpp_atomic_refcount_decrement(__shared_owners_) == -1) {42__on_zero_shared();43return true;44}45return false;46}4748void __shared_weak_count::__add_shared() noexcept { __shared_count::__add_shared(); }4950void __shared_weak_count::__add_weak() noexcept { __libcpp_atomic_refcount_increment(__shared_weak_owners_); }5152void __shared_weak_count::__release_shared() noexcept {53if (__shared_count::__release_shared())54__release_weak();55}56#endif // _LIBCPP_SHARED_PTR_DEFINE_LEGACY_INLINE_FUNCTIONS5758void __shared_weak_count::__release_weak() noexcept {59// NOTE: The acquire load here is an optimization of the very60// common case where a shared pointer is being destructed while61// having no other contended references.62//63// BENEFIT: We avoid expensive atomic stores like XADD and STREX64// in a common case. Those instructions are slow and do nasty65// things to caches.66//67// IS THIS SAFE? Yes. During weak destruction, if we see that we68// are the last reference, we know that no-one else is accessing69// us. If someone were accessing us, then they would be doing so70// while the last shared / weak_ptr was being destructed, and71// that's undefined anyway.72//73// If we see anything other than a 0, then we have possible74// contention, and need to use an atomicrmw primitive.75// The same arguments don't apply for increment, where it is legal76// (though inadvisable) to share shared_ptr references between77// threads, and have them all get copied at once. The argument78// also doesn't apply for __release_shared, because an outstanding79// weak_ptr::lock() could read / modify the shared count.80if (__libcpp_atomic_load(&__shared_weak_owners_, _AO_Acquire) == 0) {81// no need to do this store, because we are about82// to destroy everything.83//__libcpp_atomic_store(&__shared_weak_owners_, -1, _AO_Release);84__on_zero_shared_weak();85} else if (__libcpp_atomic_refcount_decrement(__shared_weak_owners_) == -1)86__on_zero_shared_weak();87}8889__shared_weak_count* __shared_weak_count::lock() noexcept {90long object_owners = __libcpp_atomic_load(&__shared_owners_);91while (object_owners != -1) {92if (__libcpp_atomic_compare_exchange(&__shared_owners_, &object_owners, object_owners + 1))93return this;94}95return nullptr;96}9798const void* __shared_weak_count::__get_deleter(const type_info&) const noexcept { return nullptr; }99100#if _LIBCPP_HAS_THREADS101102static constexpr std::size_t __sp_mut_count = 32;103static constinit __libcpp_mutex_t mut_back[__sp_mut_count] = {104_LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER,105_LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER,106_LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER,107_LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER,108_LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER,109_LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER,110_LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER,111_LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER, _LIBCPP_MUTEX_INITIALIZER};112113constexpr __sp_mut::__sp_mut(void* p) noexcept : __lx_(p) {}114115void __sp_mut::lock() noexcept {116auto m = static_cast<__libcpp_mutex_t*>(__lx_);117__libcpp_mutex_lock(m);118}119120void __sp_mut::unlock() noexcept { __libcpp_mutex_unlock(static_cast<__libcpp_mutex_t*>(__lx_)); }121122__sp_mut& __get_sp_mut(const void* p) {123static constinit __sp_mut muts[__sp_mut_count] = {124&mut_back[0], &mut_back[1], &mut_back[2], &mut_back[3], &mut_back[4], &mut_back[5], &mut_back[6],125&mut_back[7], &mut_back[8], &mut_back[9], &mut_back[10], &mut_back[11], &mut_back[12], &mut_back[13],126&mut_back[14], &mut_back[15], &mut_back[16], &mut_back[17], &mut_back[18], &mut_back[19], &mut_back[20],127&mut_back[21], &mut_back[22], &mut_back[23], &mut_back[24], &mut_back[25], &mut_back[26], &mut_back[27],128&mut_back[28], &mut_back[29], &mut_back[30], &mut_back[31]};129return muts[hash<const void*>()(p) & (__sp_mut_count - 1)];130}131132#endif // _LIBCPP_HAS_THREADS133134void* align(size_t alignment, size_t size, void*& ptr, size_t& space) {135void* r = nullptr;136if (size <= space) {137char* p1 = static_cast<char*>(ptr);138char* p2 = reinterpret_cast<char*>(reinterpret_cast<uintptr_t>(p1 + (alignment - 1)) & -alignment);139size_t d = static_cast<size_t>(p2 - p1);140if (d <= space - size) {141r = p2;142ptr = r;143space -= d;144}145}146return r;147}148149_LIBCPP_END_NAMESPACE_STD150151152