Path: blob/main/contrib/llvm-project/libcxx/include/__atomic/atomic_ref.h
35262 views
// -*- C++ -*-1//===----------------------------------------------------------------------===//2//3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.4// See https://llvm.org/LICENSE.txt for license information.5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception6//7// Kokkos v. 4.08// Copyright (2022) National Technology & Engineering9// Solutions of Sandia, LLC (NTESS).10//11// Under the terms of Contract DE-NA0003525 with NTESS,12// the U.S. Government retains certain rights in this software.13//14//===---------------------------------------------------------------------===//1516#ifndef _LIBCPP___ATOMIC_ATOMIC_REF_H17#define _LIBCPP___ATOMIC_ATOMIC_REF_H1819#include <__assert>20#include <__atomic/atomic_sync.h>21#include <__atomic/check_memory_order.h>22#include <__atomic/to_gcc_order.h>23#include <__concepts/arithmetic.h>24#include <__concepts/same_as.h>25#include <__config>26#include <__memory/addressof.h>27#include <__type_traits/has_unique_object_representation.h>28#include <__type_traits/is_trivially_copyable.h>29#include <cstddef>30#include <cstdint>31#include <cstring>3233#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)34# pragma GCC system_header35#endif3637_LIBCPP_PUSH_MACROS38#include <__undef_macros>3940_LIBCPP_BEGIN_NAMESPACE_STD4142#if _LIBCPP_STD_VER >= 204344// These types are required to make __atomic_is_always_lock_free work across GCC and Clang.45// The purpose of this trick is to make sure that we provide an object with the correct alignment46// to __atomic_is_always_lock_free, since that answer depends on the alignment.47template <size_t _Alignment>48struct __alignment_checker_type {49alignas(_Alignment) char __data;50};5152template <size_t _Alignment>53struct __get_aligner_instance {54static constexpr __alignment_checker_type<_Alignment> __instance{};55};5657template <class _Tp>58struct __atomic_ref_base {59private:60_LIBCPP_HIDE_FROM_ABI static _Tp* __clear_padding(_Tp& __val) noexcept {61_Tp* __ptr = std::addressof(__val);62# if __has_builtin(__builtin_clear_padding)63__builtin_clear_padding(__ptr);64# endif65return __ptr;66}6768_LIBCPP_HIDE_FROM_ABI static bool __compare_exchange(69_Tp* __ptr, _Tp* __expected, _Tp* __desired, bool __is_weak, int __success, int __failure) noexcept {70if constexpr (71# if __has_builtin(__builtin_clear_padding)72has_unique_object_representations_v<_Tp> || floating_point<_Tp>73# else74true // NOLINT(readability-simplify-boolean-expr)75# endif76) {77return __atomic_compare_exchange(__ptr, __expected, __desired, __is_weak, __success, __failure);78} else { // _Tp has padding bits and __builtin_clear_padding is available79__clear_padding(*__desired);80_Tp __copy = *__expected;81__clear_padding(__copy);82// The algorithm we use here is basically to perform `__atomic_compare_exchange` on the83// values until it has either succeeded, or failed because the value representation of the84// objects involved was different. This is why we loop around __atomic_compare_exchange:85// we basically loop until its failure is caused by the value representation of the objects86// being different, not only their object representation.87while (true) {88_Tp __prev = __copy;89if (__atomic_compare_exchange(__ptr, std::addressof(__copy), __desired, __is_weak, __success, __failure)) {90return true;91}92_Tp __curr = __copy;93if (std::memcmp(__clear_padding(__prev), __clear_padding(__curr), sizeof(_Tp)) != 0) {94// Value representation without padding bits do not compare equal ->95// write the current content of *ptr into *expected96std::memcpy(__expected, std::addressof(__copy), sizeof(_Tp));97return false;98}99}100}101}102103friend struct __atomic_waitable_traits<__atomic_ref_base<_Tp>>;104105// require types that are 1, 2, 4, 8, or 16 bytes in length to be aligned to at least their size to be potentially106// used lock-free107static constexpr size_t __min_alignment = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || (sizeof(_Tp) > 16) ? 0 : sizeof(_Tp);108109public:110using value_type = _Tp;111112static constexpr size_t required_alignment = alignof(_Tp) > __min_alignment ? alignof(_Tp) : __min_alignment;113114// The __atomic_always_lock_free builtin takes into account the alignment of the pointer if provided,115// so we create a fake pointer with a suitable alignment when querying it. Note that we are guaranteed116// that the pointer is going to be aligned properly at runtime because that is a (checked) precondition117// of atomic_ref's constructor.118static constexpr bool is_always_lock_free =119__atomic_always_lock_free(sizeof(_Tp), &__get_aligner_instance<required_alignment>::__instance);120121_LIBCPP_HIDE_FROM_ABI bool is_lock_free() const noexcept { return __atomic_is_lock_free(sizeof(_Tp), __ptr_); }122123_LIBCPP_HIDE_FROM_ABI void store(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept124_LIBCPP_CHECK_STORE_MEMORY_ORDER(__order) {125_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(126__order == memory_order::relaxed || __order == memory_order::release || __order == memory_order::seq_cst,127"atomic_ref: memory order argument to atomic store operation is invalid");128__atomic_store(__ptr_, __clear_padding(__desired), std::__to_gcc_order(__order));129}130131_LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept {132store(__desired);133return __desired;134}135136_LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __order = memory_order::seq_cst) const noexcept137_LIBCPP_CHECK_LOAD_MEMORY_ORDER(__order) {138_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(139__order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||140__order == memory_order::seq_cst,141"atomic_ref: memory order argument to atomic load operation is invalid");142alignas(_Tp) byte __mem[sizeof(_Tp)];143auto* __ret = reinterpret_cast<_Tp*>(__mem);144__atomic_load(__ptr_, __ret, std::__to_gcc_order(__order));145return *__ret;146}147148_LIBCPP_HIDE_FROM_ABI operator _Tp() const noexcept { return load(); }149150_LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {151alignas(_Tp) byte __mem[sizeof(_Tp)];152auto* __ret = reinterpret_cast<_Tp*>(__mem);153__atomic_exchange(__ptr_, __clear_padding(__desired), __ret, std::__to_gcc_order(__order));154return *__ret;155}156157_LIBCPP_HIDE_FROM_ABI bool158compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept159_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) {160_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(161__failure == memory_order::relaxed || __failure == memory_order::consume ||162__failure == memory_order::acquire || __failure == memory_order::seq_cst,163"atomic_ref: failure memory order argument to weak atomic compare-and-exchange operation is invalid");164return __compare_exchange(165__ptr_,166std::addressof(__expected),167std::addressof(__desired),168true,169std::__to_gcc_order(__success),170std::__to_gcc_order(__failure));171}172_LIBCPP_HIDE_FROM_ABI bool173compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept174_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) {175_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(176__failure == memory_order::relaxed || __failure == memory_order::consume ||177__failure == memory_order::acquire || __failure == memory_order::seq_cst,178"atomic_ref: failure memory order argument to strong atomic compare-and-exchange operation is invalid");179return __compare_exchange(180__ptr_,181std::addressof(__expected),182std::addressof(__desired),183false,184std::__to_gcc_order(__success),185std::__to_gcc_order(__failure));186}187188_LIBCPP_HIDE_FROM_ABI bool189compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {190return __compare_exchange(191__ptr_,192std::addressof(__expected),193std::addressof(__desired),194true,195std::__to_gcc_order(__order),196std::__to_gcc_failure_order(__order));197}198_LIBCPP_HIDE_FROM_ABI bool199compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {200return __compare_exchange(201__ptr_,202std::addressof(__expected),203std::addressof(__desired),204false,205std::__to_gcc_order(__order),206std::__to_gcc_failure_order(__order));207}208209_LIBCPP_HIDE_FROM_ABI void wait(_Tp __old, memory_order __order = memory_order::seq_cst) const noexcept210_LIBCPP_CHECK_WAIT_MEMORY_ORDER(__order) {211_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(212__order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||213__order == memory_order::seq_cst,214"atomic_ref: memory order argument to atomic wait operation is invalid");215std::__atomic_wait(*this, __old, __order);216}217_LIBCPP_HIDE_FROM_ABI void notify_one() const noexcept { std::__atomic_notify_one(*this); }218_LIBCPP_HIDE_FROM_ABI void notify_all() const noexcept { std::__atomic_notify_all(*this); }219220protected:221typedef _Tp _Aligned_Tp __attribute__((aligned(required_alignment)));222_Aligned_Tp* __ptr_;223224_LIBCPP_HIDE_FROM_ABI __atomic_ref_base(_Tp& __obj) : __ptr_(std::addressof(__obj)) {}225};226227template <class _Tp>228struct __atomic_waitable_traits<__atomic_ref_base<_Tp>> {229static _LIBCPP_HIDE_FROM_ABI _Tp __atomic_load(const __atomic_ref_base<_Tp>& __a, memory_order __order) {230return __a.load(__order);231}232static _LIBCPP_HIDE_FROM_ABI const _Tp* __atomic_contention_address(const __atomic_ref_base<_Tp>& __a) {233return __a.__ptr_;234}235};236237template <class _Tp>238struct atomic_ref : public __atomic_ref_base<_Tp> {239static_assert(is_trivially_copyable_v<_Tp>, "std::atomic_ref<T> requires that 'T' be a trivially copyable type");240241using __base = __atomic_ref_base<_Tp>;242243_LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {244_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(245reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,246"atomic_ref ctor: referenced object must be aligned to required_alignment");247}248249_LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;250251_LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }252253atomic_ref& operator=(const atomic_ref&) = delete;254};255256template <class _Tp>257requires(std::integral<_Tp> && !std::same_as<bool, _Tp>)258struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {259using __base = __atomic_ref_base<_Tp>;260261using difference_type = typename __base::value_type;262263_LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {264_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(265reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,266"atomic_ref ctor: referenced object must be aligned to required_alignment");267}268269_LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;270271_LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }272273atomic_ref& operator=(const atomic_ref&) = delete;274275_LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {276return __atomic_fetch_add(this->__ptr_, __arg, std::__to_gcc_order(__order));277}278_LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {279return __atomic_fetch_sub(this->__ptr_, __arg, std::__to_gcc_order(__order));280}281_LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {282return __atomic_fetch_and(this->__ptr_, __arg, std::__to_gcc_order(__order));283}284_LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {285return __atomic_fetch_or(this->__ptr_, __arg, std::__to_gcc_order(__order));286}287_LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {288return __atomic_fetch_xor(this->__ptr_, __arg, std::__to_gcc_order(__order));289}290291_LIBCPP_HIDE_FROM_ABI _Tp operator++(int) const noexcept { return fetch_add(_Tp(1)); }292_LIBCPP_HIDE_FROM_ABI _Tp operator--(int) const noexcept { return fetch_sub(_Tp(1)); }293_LIBCPP_HIDE_FROM_ABI _Tp operator++() const noexcept { return fetch_add(_Tp(1)) + _Tp(1); }294_LIBCPP_HIDE_FROM_ABI _Tp operator--() const noexcept { return fetch_sub(_Tp(1)) - _Tp(1); }295_LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }296_LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }297_LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __arg) const noexcept { return fetch_and(__arg) & __arg; }298_LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __arg) const noexcept { return fetch_or(__arg) | __arg; }299_LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __arg) const noexcept { return fetch_xor(__arg) ^ __arg; }300};301302template <class _Tp>303requires std::floating_point<_Tp>304struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {305using __base = __atomic_ref_base<_Tp>;306307using difference_type = typename __base::value_type;308309_LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {310_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(311reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,312"atomic_ref ctor: referenced object must be aligned to required_alignment");313}314315_LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;316317_LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }318319atomic_ref& operator=(const atomic_ref&) = delete;320321_LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {322_Tp __old = this->load(memory_order_relaxed);323_Tp __new = __old + __arg;324while (!this->compare_exchange_weak(__old, __new, __order, memory_order_relaxed)) {325__new = __old + __arg;326}327return __old;328}329_LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {330_Tp __old = this->load(memory_order_relaxed);331_Tp __new = __old - __arg;332while (!this->compare_exchange_weak(__old, __new, __order, memory_order_relaxed)) {333__new = __old - __arg;334}335return __old;336}337338_LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }339_LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }340};341342template <class _Tp>343struct atomic_ref<_Tp*> : public __atomic_ref_base<_Tp*> {344using __base = __atomic_ref_base<_Tp*>;345346using difference_type = ptrdiff_t;347348_LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp*& __ptr) : __base(__ptr) {}349350_LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __desired) const noexcept { return __base::operator=(__desired); }351352atomic_ref& operator=(const atomic_ref&) = delete;353354_LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {355return __atomic_fetch_add(this->__ptr_, __arg * sizeof(_Tp), std::__to_gcc_order(__order));356}357_LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {358return __atomic_fetch_sub(this->__ptr_, __arg * sizeof(_Tp), std::__to_gcc_order(__order));359}360361_LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) const noexcept { return fetch_add(1); }362_LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) const noexcept { return fetch_sub(1); }363_LIBCPP_HIDE_FROM_ABI _Tp* operator++() const noexcept { return fetch_add(1) + 1; }364_LIBCPP_HIDE_FROM_ABI _Tp* operator--() const noexcept { return fetch_sub(1) - 1; }365_LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __arg) const noexcept { return fetch_add(__arg) + __arg; }366_LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __arg) const noexcept { return fetch_sub(__arg) - __arg; }367};368369_LIBCPP_CTAD_SUPPORTED_FOR_TYPE(atomic_ref);370371#endif // _LIBCPP_STD_VER >= 20372373_LIBCPP_END_NAMESPACE_STD374375_LIBCPP_POP_MACROS376377#endif // _LIBCPP__ATOMIC_ATOMIC_REF_H378379380