Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/alpha/include/asm/cmpxchg.h
26481 views
1
/* SPDX-License-Identifier: GPL-2.0 */
2
#ifndef _ALPHA_CMPXCHG_H
3
#define _ALPHA_CMPXCHG_H
4
5
/*
6
* Atomic exchange.
7
* Since it can be used to implement critical sections
8
* it must clobber "memory" (also for interrupts in UP).
9
*/
10
11
static inline unsigned long
12
____xchg_u8(volatile char *m, unsigned long val)
13
{
14
unsigned long ret, tmp, addr64;
15
16
__asm__ __volatile__(
17
" andnot %4,7,%3\n"
18
" insbl %1,%4,%1\n"
19
"1: ldq_l %2,0(%3)\n"
20
" extbl %2,%4,%0\n"
21
" mskbl %2,%4,%2\n"
22
" or %1,%2,%2\n"
23
" stq_c %2,0(%3)\n"
24
" beq %2,2f\n"
25
".subsection 2\n"
26
"2: br 1b\n"
27
".previous"
28
: "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
29
: "r" ((long)m), "1" (val) : "memory");
30
31
return ret;
32
}
33
34
static inline unsigned long
35
____xchg_u16(volatile short *m, unsigned long val)
36
{
37
unsigned long ret, tmp, addr64;
38
39
__asm__ __volatile__(
40
" andnot %4,7,%3\n"
41
" inswl %1,%4,%1\n"
42
"1: ldq_l %2,0(%3)\n"
43
" extwl %2,%4,%0\n"
44
" mskwl %2,%4,%2\n"
45
" or %1,%2,%2\n"
46
" stq_c %2,0(%3)\n"
47
" beq %2,2f\n"
48
".subsection 2\n"
49
"2: br 1b\n"
50
".previous"
51
: "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
52
: "r" ((long)m), "1" (val) : "memory");
53
54
return ret;
55
}
56
57
static inline unsigned long
58
____xchg_u32(volatile int *m, unsigned long val)
59
{
60
unsigned long dummy;
61
62
__asm__ __volatile__(
63
"1: ldl_l %0,%4\n"
64
" bis $31,%3,%1\n"
65
" stl_c %1,%2\n"
66
" beq %1,2f\n"
67
".subsection 2\n"
68
"2: br 1b\n"
69
".previous"
70
: "=&r" (val), "=&r" (dummy), "=m" (*m)
71
: "rI" (val), "m" (*m) : "memory");
72
73
return val;
74
}
75
76
static inline unsigned long
77
____xchg_u64(volatile long *m, unsigned long val)
78
{
79
unsigned long dummy;
80
81
__asm__ __volatile__(
82
"1: ldq_l %0,%4\n"
83
" bis $31,%3,%1\n"
84
" stq_c %1,%2\n"
85
" beq %1,2f\n"
86
".subsection 2\n"
87
"2: br 1b\n"
88
".previous"
89
: "=&r" (val), "=&r" (dummy), "=m" (*m)
90
: "rI" (val), "m" (*m) : "memory");
91
92
return val;
93
}
94
95
/* This function doesn't exist, so you'll get a linker error
96
if something tries to do an invalid xchg(). */
97
extern void __xchg_called_with_bad_pointer(void);
98
99
static __always_inline unsigned long
100
____xchg(volatile void *ptr, unsigned long x, int size)
101
{
102
return
103
size == 1 ? ____xchg_u8(ptr, x) :
104
size == 2 ? ____xchg_u16(ptr, x) :
105
size == 4 ? ____xchg_u32(ptr, x) :
106
size == 8 ? ____xchg_u64(ptr, x) :
107
(__xchg_called_with_bad_pointer(), x);
108
}
109
110
/*
111
* Atomic compare and exchange. Compare OLD with MEM, if identical,
112
* store NEW in MEM. Return the initial value in MEM. Success is
113
* indicated by comparing RETURN with OLD.
114
*/
115
116
static inline unsigned long
117
____cmpxchg_u8(volatile char *m, unsigned char old, unsigned char new)
118
{
119
unsigned long prev, tmp, cmp, addr64;
120
121
__asm__ __volatile__(
122
" andnot %5,7,%4\n"
123
" insbl %1,%5,%1\n"
124
"1: ldq_l %2,0(%4)\n"
125
" extbl %2,%5,%0\n"
126
" cmpeq %0,%6,%3\n"
127
" beq %3,2f\n"
128
" mskbl %2,%5,%2\n"
129
" or %1,%2,%2\n"
130
" stq_c %2,0(%4)\n"
131
" beq %2,3f\n"
132
"2:\n"
133
".subsection 2\n"
134
"3: br 1b\n"
135
".previous"
136
: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
137
: "r" ((long)m), "Ir" (old), "1" (new) : "memory");
138
139
return prev;
140
}
141
142
static inline unsigned long
143
____cmpxchg_u16(volatile short *m, unsigned short old, unsigned short new)
144
{
145
unsigned long prev, tmp, cmp, addr64;
146
147
__asm__ __volatile__(
148
" andnot %5,7,%4\n"
149
" inswl %1,%5,%1\n"
150
"1: ldq_l %2,0(%4)\n"
151
" extwl %2,%5,%0\n"
152
" cmpeq %0,%6,%3\n"
153
" beq %3,2f\n"
154
" mskwl %2,%5,%2\n"
155
" or %1,%2,%2\n"
156
" stq_c %2,0(%4)\n"
157
" beq %2,3f\n"
158
"2:\n"
159
".subsection 2\n"
160
"3: br 1b\n"
161
".previous"
162
: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
163
: "r" ((long)m), "Ir" (old), "1" (new) : "memory");
164
165
return prev;
166
}
167
168
static inline unsigned long
169
____cmpxchg_u32(volatile int *m, int old, int new)
170
{
171
unsigned long prev, cmp;
172
173
__asm__ __volatile__(
174
"1: ldl_l %0,%5\n"
175
" cmpeq %0,%3,%1\n"
176
" beq %1,2f\n"
177
" mov %4,%1\n"
178
" stl_c %1,%2\n"
179
" beq %1,3f\n"
180
"2:\n"
181
".subsection 2\n"
182
"3: br 1b\n"
183
".previous"
184
: "=&r"(prev), "=&r"(cmp), "=m"(*m)
185
: "r"((long) old), "r"(new), "m"(*m) : "memory");
186
187
return prev;
188
}
189
190
static inline unsigned long
191
____cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new)
192
{
193
unsigned long prev, cmp;
194
195
__asm__ __volatile__(
196
"1: ldq_l %0,%5\n"
197
" cmpeq %0,%3,%1\n"
198
" beq %1,2f\n"
199
" mov %4,%1\n"
200
" stq_c %1,%2\n"
201
" beq %1,3f\n"
202
"2:\n"
203
".subsection 2\n"
204
"3: br 1b\n"
205
".previous"
206
: "=&r"(prev), "=&r"(cmp), "=m"(*m)
207
: "r"((long) old), "r"(new), "m"(*m) : "memory");
208
209
return prev;
210
}
211
212
/* This function doesn't exist, so you'll get a linker error
213
if something tries to do an invalid cmpxchg(). */
214
extern void __cmpxchg_called_with_bad_pointer(void);
215
216
static __always_inline unsigned long
217
____cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
218
int size)
219
{
220
return
221
size == 1 ? ____cmpxchg_u8(ptr, old, new) :
222
size == 2 ? ____cmpxchg_u16(ptr, old, new) :
223
size == 4 ? ____cmpxchg_u32(ptr, old, new) :
224
size == 8 ? ____cmpxchg_u64(ptr, old, new) :
225
(__cmpxchg_called_with_bad_pointer(), old);
226
}
227
228
#define xchg_local(ptr, x) \
229
({ \
230
__typeof__(*(ptr)) _x_ = (x); \
231
(__typeof__(*(ptr))) ____xchg((ptr), (unsigned long)_x_, \
232
sizeof(*(ptr))); \
233
})
234
235
#define arch_cmpxchg_local(ptr, o, n) \
236
({ \
237
__typeof__(*(ptr)) _o_ = (o); \
238
__typeof__(*(ptr)) _n_ = (n); \
239
(__typeof__(*(ptr))) ____cmpxchg((ptr), (unsigned long)_o_, \
240
(unsigned long)_n_, \
241
sizeof(*(ptr))); \
242
})
243
244
#define arch_cmpxchg64_local(ptr, o, n) \
245
({ \
246
BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
247
cmpxchg_local((ptr), (o), (n)); \
248
})
249
250
/*
251
* The leading and the trailing memory barriers guarantee that these
252
* operations are fully ordered.
253
*/
254
#define arch_xchg(ptr, x) \
255
({ \
256
__typeof__(*(ptr)) __ret; \
257
__typeof__(*(ptr)) _x_ = (x); \
258
smp_mb(); \
259
__ret = (__typeof__(*(ptr))) \
260
____xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
261
smp_mb(); \
262
__ret; \
263
})
264
265
#define arch_cmpxchg(ptr, o, n) \
266
({ \
267
__typeof__(*(ptr)) __ret; \
268
__typeof__(*(ptr)) _o_ = (o); \
269
__typeof__(*(ptr)) _n_ = (n); \
270
smp_mb(); \
271
__ret = (__typeof__(*(ptr))) ____cmpxchg((ptr), \
272
(unsigned long)_o_, (unsigned long)_n_, sizeof(*(ptr)));\
273
smp_mb(); \
274
__ret; \
275
})
276
277
#define arch_cmpxchg64(ptr, o, n) \
278
({ \
279
BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
280
arch_cmpxchg((ptr), (o), (n)); \
281
})
282
283
#endif /* _ALPHA_CMPXCHG_H */
284
285