Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Kitware
GitHub Repository: Kitware/CMake
Path: blob/master/Utilities/cmlibuv/src/unix/atomic-ops.h
3156 views
1
/* Copyright (c) 2013, Ben Noordhuis <[email protected]>
2
*
3
* Permission to use, copy, modify, and/or distribute this software for any
4
* purpose with or without fee is hereby granted, provided that the above
5
* copyright notice and this permission notice appear in all copies.
6
*
7
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
10
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
12
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
13
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
14
*/
15
16
#ifndef UV_ATOMIC_OPS_H_
17
#define UV_ATOMIC_OPS_H_
18
19
#include "internal.h" /* UV_UNUSED */
20
21
#if defined(__SUNPRO_C) || defined(__SUNPRO_CC)
22
#include <atomic.h>
23
#endif
24
25
UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval));
26
UV_UNUSED(static void cpu_relax(void));
27
28
/* Prefer hand-rolled assembly over the gcc builtins because the latter also
29
* issue full memory barriers.
30
*/
31
UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval)) {
32
#if defined(__i386__) || defined(__x86_64__)
33
int out;
34
__asm__ __volatile__ ("lock; cmpxchg %2, %1;"
35
: "=a" (out), "+m" (*(volatile int*) ptr)
36
: "r" (newval), "0" (oldval)
37
: "memory");
38
return out;
39
#elif defined(_AIX) && defined(__ibmxl__)
40
/* FIXME: This is not actually atomic but XLClang 16.1 for AIX
41
does not provide __sync_val_compare_and_swap or an equivalent.
42
Its documentation suggests using C++11 atomics but this is C. */
43
__compare_and_swap((volatile int*)ptr, &oldval, newval);
44
return oldval;
45
#elif defined(__MVS__)
46
/* Use hand-rolled assembly because codegen from builtin __plo_CSST results in
47
* a runtime bug.
48
*/
49
__asm(" cs %0,%2,%1 \n " : "+r"(oldval), "+m"(*ptr) : "r"(newval) :);
50
return oldval;
51
#elif defined(__SUNPRO_C) || defined(__SUNPRO_CC)
52
return atomic_cas_uint((uint_t *)ptr, (uint_t)oldval, (uint_t)newval);
53
#else
54
return __sync_val_compare_and_swap(ptr, oldval, newval);
55
#endif
56
}
57
58
UV_UNUSED(static void cpu_relax(void)) {
59
#if defined(__i386__) || defined(__x86_64__)
60
__asm__ __volatile__ ("rep; nop" ::: "memory"); /* a.k.a. PAUSE */
61
#elif (defined(__arm__) && __ARM_ARCH >= 7) || defined(__aarch64__)
62
__asm__ __volatile__ ("yield" ::: "memory");
63
#elif (defined(__ppc__) || defined(__ppc64__)) && defined(__APPLE__)
64
__asm volatile ("" : : : "memory");
65
#elif !defined(__APPLE__) && (defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__))
66
__asm__ __volatile__ ("or 1,1,1; or 2,2,2" ::: "memory");
67
#endif
68
}
69
70
#endif /* UV_ATOMIC_OPS_H_ */
71
72