Path: blob/master/Utilities/cmlibuv/src/unix/atomic-ops.h
3156 views
/* Copyright (c) 2013, Ben Noordhuis <[email protected]>1*2* Permission to use, copy, modify, and/or distribute this software for any3* purpose with or without fee is hereby granted, provided that the above4* copyright notice and this permission notice appear in all copies.5*6* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES7* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF8* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR9* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES10* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN11* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF12* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.13*/1415#ifndef UV_ATOMIC_OPS_H_16#define UV_ATOMIC_OPS_H_1718#include "internal.h" /* UV_UNUSED */1920#if defined(__SUNPRO_C) || defined(__SUNPRO_CC)21#include <atomic.h>22#endif2324UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval));25UV_UNUSED(static void cpu_relax(void));2627/* Prefer hand-rolled assembly over the gcc builtins because the latter also28* issue full memory barriers.29*/30UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval)) {31#if defined(__i386__) || defined(__x86_64__)32int out;33__asm__ __volatile__ ("lock; cmpxchg %2, %1;"34: "=a" (out), "+m" (*(volatile int*) ptr)35: "r" (newval), "0" (oldval)36: "memory");37return out;38#elif defined(_AIX) && defined(__ibmxl__)39/* FIXME: This is not actually atomic but XLClang 16.1 for AIX40does not provide __sync_val_compare_and_swap or an equivalent.41Its documentation suggests using C++11 atomics but this is C. */42__compare_and_swap((volatile int*)ptr, &oldval, newval);43return oldval;44#elif defined(__MVS__)45/* Use hand-rolled assembly because codegen from builtin __plo_CSST results in46* a runtime bug.47*/48__asm(" cs %0,%2,%1 \n " : "+r"(oldval), "+m"(*ptr) : "r"(newval) :);49return oldval;50#elif defined(__SUNPRO_C) || defined(__SUNPRO_CC)51return atomic_cas_uint((uint_t *)ptr, (uint_t)oldval, (uint_t)newval);52#else53return __sync_val_compare_and_swap(ptr, oldval, newval);54#endif55}5657UV_UNUSED(static void cpu_relax(void)) {58#if defined(__i386__) || defined(__x86_64__)59__asm__ __volatile__ ("rep; nop" ::: "memory"); /* a.k.a. PAUSE */60#elif (defined(__arm__) && __ARM_ARCH >= 7) || defined(__aarch64__)61__asm__ __volatile__ ("yield" ::: "memory");62#elif (defined(__ppc__) || defined(__ppc64__)) && defined(__APPLE__)63__asm volatile ("" : : : "memory");64#elif !defined(__APPLE__) && (defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__))65__asm__ __volatile__ ("or 1,1,1; or 2,2,2" ::: "memory");66#endif67}6869#endif /* UV_ATOMIC_OPS_H_ */707172