Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/s390/include/asm/atomic.h
26493 views
1
/* SPDX-License-Identifier: GPL-2.0 */
2
/*
3
* Copyright IBM Corp. 1999, 2016
4
* Author(s): Martin Schwidefsky <[email protected]>,
5
* Denis Joseph Barrow,
6
* Arnd Bergmann,
7
*/
8
9
#ifndef __ARCH_S390_ATOMIC__
10
#define __ARCH_S390_ATOMIC__
11
12
#include <linux/compiler.h>
13
#include <linux/types.h>
14
#include <asm/atomic_ops.h>
15
#include <asm/barrier.h>
16
#include <asm/cmpxchg.h>
17
18
static __always_inline int arch_atomic_read(const atomic_t *v)
19
{
20
return __atomic_read(&v->counter);
21
}
22
#define arch_atomic_read arch_atomic_read
23
24
static __always_inline void arch_atomic_set(atomic_t *v, int i)
25
{
26
__atomic_set(&v->counter, i);
27
}
28
#define arch_atomic_set arch_atomic_set
29
30
static __always_inline int arch_atomic_add_return(int i, atomic_t *v)
31
{
32
return __atomic_add_barrier(i, &v->counter) + i;
33
}
34
#define arch_atomic_add_return arch_atomic_add_return
35
36
static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
37
{
38
return __atomic_add_barrier(i, &v->counter);
39
}
40
#define arch_atomic_fetch_add arch_atomic_fetch_add
41
42
static __always_inline void arch_atomic_add(int i, atomic_t *v)
43
{
44
__atomic_add(i, &v->counter);
45
}
46
#define arch_atomic_add arch_atomic_add
47
48
static __always_inline void arch_atomic_inc(atomic_t *v)
49
{
50
__atomic_add_const(1, &v->counter);
51
}
52
#define arch_atomic_inc arch_atomic_inc
53
54
static __always_inline void arch_atomic_dec(atomic_t *v)
55
{
56
__atomic_add_const(-1, &v->counter);
57
}
58
#define arch_atomic_dec arch_atomic_dec
59
60
static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
61
{
62
return __atomic_add_and_test_barrier(-i, &v->counter);
63
}
64
#define arch_atomic_sub_and_test arch_atomic_sub_and_test
65
66
static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
67
{
68
return __atomic_add_const_and_test_barrier(-1, &v->counter);
69
}
70
#define arch_atomic_dec_and_test arch_atomic_dec_and_test
71
72
static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
73
{
74
return __atomic_add_const_and_test_barrier(1, &v->counter);
75
}
76
#define arch_atomic_inc_and_test arch_atomic_inc_and_test
77
78
#define arch_atomic_sub(_i, _v) arch_atomic_add(-(int)(_i), _v)
79
#define arch_atomic_sub_return(_i, _v) arch_atomic_add_return(-(int)(_i), _v)
80
#define arch_atomic_fetch_sub(_i, _v) arch_atomic_fetch_add(-(int)(_i), _v)
81
82
#define ATOMIC_OPS(op) \
83
static __always_inline void arch_atomic_##op(int i, atomic_t *v) \
84
{ \
85
__atomic_##op(i, &v->counter); \
86
} \
87
static __always_inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
88
{ \
89
return __atomic_##op##_barrier(i, &v->counter); \
90
}
91
92
ATOMIC_OPS(and)
93
ATOMIC_OPS(or)
94
ATOMIC_OPS(xor)
95
96
#undef ATOMIC_OPS
97
98
#define arch_atomic_and arch_atomic_and
99
#define arch_atomic_or arch_atomic_or
100
#define arch_atomic_xor arch_atomic_xor
101
#define arch_atomic_fetch_and arch_atomic_fetch_and
102
#define arch_atomic_fetch_or arch_atomic_fetch_or
103
#define arch_atomic_fetch_xor arch_atomic_fetch_xor
104
105
static __always_inline int arch_atomic_xchg(atomic_t *v, int new)
106
{
107
return arch_xchg(&v->counter, new);
108
}
109
#define arch_atomic_xchg arch_atomic_xchg
110
111
static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
112
{
113
return arch_cmpxchg(&v->counter, old, new);
114
}
115
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
116
117
static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
118
{
119
return arch_try_cmpxchg(&v->counter, old, new);
120
}
121
#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
122
123
#define ATOMIC64_INIT(i) { (i) }
124
125
static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
126
{
127
return __atomic64_read((long *)&v->counter);
128
}
129
#define arch_atomic64_read arch_atomic64_read
130
131
static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i)
132
{
133
__atomic64_set((long *)&v->counter, i);
134
}
135
#define arch_atomic64_set arch_atomic64_set
136
137
static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
138
{
139
return __atomic64_add_barrier(i, (long *)&v->counter) + i;
140
}
141
#define arch_atomic64_add_return arch_atomic64_add_return
142
143
static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
144
{
145
return __atomic64_add_barrier(i, (long *)&v->counter);
146
}
147
#define arch_atomic64_fetch_add arch_atomic64_fetch_add
148
149
static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
150
{
151
__atomic64_add(i, (long *)&v->counter);
152
}
153
#define arch_atomic64_add arch_atomic64_add
154
155
static __always_inline void arch_atomic64_inc(atomic64_t *v)
156
{
157
__atomic64_add_const(1, (long *)&v->counter);
158
}
159
#define arch_atomic64_inc arch_atomic64_inc
160
161
static __always_inline void arch_atomic64_dec(atomic64_t *v)
162
{
163
__atomic64_add_const(-1, (long *)&v->counter);
164
}
165
#define arch_atomic64_dec arch_atomic64_dec
166
167
static __always_inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
168
{
169
return __atomic64_add_and_test_barrier(-i, (long *)&v->counter);
170
}
171
#define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
172
173
static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v)
174
{
175
return __atomic64_add_const_and_test_barrier(-1, (long *)&v->counter);
176
}
177
#define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
178
179
static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v)
180
{
181
return __atomic64_add_const_and_test_barrier(1, (long *)&v->counter);
182
}
183
#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
184
185
static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
186
{
187
return arch_xchg(&v->counter, new);
188
}
189
#define arch_atomic64_xchg arch_atomic64_xchg
190
191
static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
192
{
193
return arch_cmpxchg(&v->counter, old, new);
194
}
195
#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
196
197
static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
198
{
199
return arch_try_cmpxchg(&v->counter, old, new);
200
}
201
#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
202
203
#define ATOMIC64_OPS(op) \
204
static __always_inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
205
{ \
206
__atomic64_##op(i, (long *)&v->counter); \
207
} \
208
static __always_inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v) \
209
{ \
210
return __atomic64_##op##_barrier(i, (long *)&v->counter); \
211
}
212
213
ATOMIC64_OPS(and)
214
ATOMIC64_OPS(or)
215
ATOMIC64_OPS(xor)
216
217
#undef ATOMIC64_OPS
218
219
#define arch_atomic64_and arch_atomic64_and
220
#define arch_atomic64_or arch_atomic64_or
221
#define arch_atomic64_xor arch_atomic64_xor
222
#define arch_atomic64_fetch_and arch_atomic64_fetch_and
223
#define arch_atomic64_fetch_or arch_atomic64_fetch_or
224
#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
225
226
#define arch_atomic64_sub_return(_i, _v) arch_atomic64_add_return(-(s64)(_i), _v)
227
#define arch_atomic64_fetch_sub(_i, _v) arch_atomic64_fetch_add(-(s64)(_i), _v)
228
#define arch_atomic64_sub(_i, _v) arch_atomic64_add(-(s64)(_i), _v)
229
230
#endif /* __ARCH_S390_ATOMIC__ */
231
232