Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/loongarch/include/asm/atomic-amo.h
38237 views
1
/* SPDX-License-Identifier: GPL-2.0 */
2
/*
3
* Atomic operations (AMO).
4
*
5
* Copyright (C) 2020-2025 Loongson Technology Corporation Limited
6
*/
7
8
#ifndef _ASM_ATOMIC_AMO_H
9
#define _ASM_ATOMIC_AMO_H
10
11
#include <linux/types.h>
12
#include <asm/barrier.h>
13
#include <asm/cmpxchg.h>
14
15
#define ATOMIC_OP(op, I, asm_op) \
16
static inline void arch_atomic_##op(int i, atomic_t *v) \
17
{ \
18
__asm__ __volatile__( \
19
"am"#asm_op".w" " $zero, %1, %0 \n" \
20
: "+ZB" (v->counter) \
21
: "r" (I) \
22
: "memory"); \
23
}
24
25
#define ATOMIC_OP_RETURN(op, I, asm_op, c_op, mb, suffix) \
26
static inline int arch_atomic_##op##_return##suffix(int i, atomic_t *v) \
27
{ \
28
int result; \
29
\
30
__asm__ __volatile__( \
31
"am"#asm_op#mb".w" " %1, %2, %0 \n" \
32
: "+ZB" (v->counter), "=&r" (result) \
33
: "r" (I) \
34
: "memory"); \
35
\
36
return result c_op I; \
37
}
38
39
#define ATOMIC_FETCH_OP(op, I, asm_op, mb, suffix) \
40
static inline int arch_atomic_fetch_##op##suffix(int i, atomic_t *v) \
41
{ \
42
int result; \
43
\
44
__asm__ __volatile__( \
45
"am"#asm_op#mb".w" " %1, %2, %0 \n" \
46
: "+ZB" (v->counter), "=&r" (result) \
47
: "r" (I) \
48
: "memory"); \
49
\
50
return result; \
51
}
52
53
#define ATOMIC_OPS(op, I, asm_op, c_op) \
54
ATOMIC_OP(op, I, asm_op) \
55
ATOMIC_OP_RETURN(op, I, asm_op, c_op, _db, ) \
56
ATOMIC_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \
57
ATOMIC_FETCH_OP(op, I, asm_op, _db, ) \
58
ATOMIC_FETCH_OP(op, I, asm_op, , _relaxed)
59
60
ATOMIC_OPS(add, i, add, +)
61
ATOMIC_OPS(sub, -i, add, +)
62
63
#define arch_atomic_add_return arch_atomic_add_return
64
#define arch_atomic_add_return_acquire arch_atomic_add_return
65
#define arch_atomic_add_return_release arch_atomic_add_return
66
#define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
67
#define arch_atomic_sub_return arch_atomic_sub_return
68
#define arch_atomic_sub_return_acquire arch_atomic_sub_return
69
#define arch_atomic_sub_return_release arch_atomic_sub_return
70
#define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
71
#define arch_atomic_fetch_add arch_atomic_fetch_add
72
#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
73
#define arch_atomic_fetch_add_release arch_atomic_fetch_add
74
#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed
75
#define arch_atomic_fetch_sub arch_atomic_fetch_sub
76
#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
77
#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
78
#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed
79
80
#undef ATOMIC_OPS
81
82
#define ATOMIC_OPS(op, I, asm_op) \
83
ATOMIC_OP(op, I, asm_op) \
84
ATOMIC_FETCH_OP(op, I, asm_op, _db, ) \
85
ATOMIC_FETCH_OP(op, I, asm_op, , _relaxed)
86
87
ATOMIC_OPS(and, i, and)
88
ATOMIC_OPS(or, i, or)
89
ATOMIC_OPS(xor, i, xor)
90
91
#define arch_atomic_fetch_and arch_atomic_fetch_and
92
#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
93
#define arch_atomic_fetch_and_release arch_atomic_fetch_and
94
#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed
95
#define arch_atomic_fetch_or arch_atomic_fetch_or
96
#define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
97
#define arch_atomic_fetch_or_release arch_atomic_fetch_or
98
#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
99
#define arch_atomic_fetch_xor arch_atomic_fetch_xor
100
#define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
101
#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
102
#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed
103
104
#undef ATOMIC_OPS
105
#undef ATOMIC_FETCH_OP
106
#undef ATOMIC_OP_RETURN
107
#undef ATOMIC_OP
108
109
#ifdef CONFIG_64BIT
110
111
#define ATOMIC64_OP(op, I, asm_op) \
112
static inline void arch_atomic64_##op(long i, atomic64_t *v) \
113
{ \
114
__asm__ __volatile__( \
115
"am"#asm_op".d " " $zero, %1, %0 \n" \
116
: "+ZB" (v->counter) \
117
: "r" (I) \
118
: "memory"); \
119
}
120
121
#define ATOMIC64_OP_RETURN(op, I, asm_op, c_op, mb, suffix) \
122
static inline long arch_atomic64_##op##_return##suffix(long i, atomic64_t *v) \
123
{ \
124
long result; \
125
__asm__ __volatile__( \
126
"am"#asm_op#mb".d " " %1, %2, %0 \n" \
127
: "+ZB" (v->counter), "=&r" (result) \
128
: "r" (I) \
129
: "memory"); \
130
\
131
return result c_op I; \
132
}
133
134
#define ATOMIC64_FETCH_OP(op, I, asm_op, mb, suffix) \
135
static inline long arch_atomic64_fetch_##op##suffix(long i, atomic64_t *v) \
136
{ \
137
long result; \
138
\
139
__asm__ __volatile__( \
140
"am"#asm_op#mb".d " " %1, %2, %0 \n" \
141
: "+ZB" (v->counter), "=&r" (result) \
142
: "r" (I) \
143
: "memory"); \
144
\
145
return result; \
146
}
147
148
#define ATOMIC64_OPS(op, I, asm_op, c_op) \
149
ATOMIC64_OP(op, I, asm_op) \
150
ATOMIC64_OP_RETURN(op, I, asm_op, c_op, _db, ) \
151
ATOMIC64_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \
152
ATOMIC64_FETCH_OP(op, I, asm_op, _db, ) \
153
ATOMIC64_FETCH_OP(op, I, asm_op, , _relaxed)
154
155
ATOMIC64_OPS(add, i, add, +)
156
ATOMIC64_OPS(sub, -i, add, +)
157
158
#define arch_atomic64_add_return arch_atomic64_add_return
159
#define arch_atomic64_add_return_acquire arch_atomic64_add_return
160
#define arch_atomic64_add_return_release arch_atomic64_add_return
161
#define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed
162
#define arch_atomic64_sub_return arch_atomic64_sub_return
163
#define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
164
#define arch_atomic64_sub_return_release arch_atomic64_sub_return
165
#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed
166
#define arch_atomic64_fetch_add arch_atomic64_fetch_add
167
#define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
168
#define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
169
#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed
170
#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
171
#define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
172
#define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
173
#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed
174
175
#undef ATOMIC64_OPS
176
177
#define ATOMIC64_OPS(op, I, asm_op) \
178
ATOMIC64_OP(op, I, asm_op) \
179
ATOMIC64_FETCH_OP(op, I, asm_op, _db, ) \
180
ATOMIC64_FETCH_OP(op, I, asm_op, , _relaxed)
181
182
ATOMIC64_OPS(and, i, and)
183
ATOMIC64_OPS(or, i, or)
184
ATOMIC64_OPS(xor, i, xor)
185
186
#define arch_atomic64_fetch_and arch_atomic64_fetch_and
187
#define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
188
#define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
189
#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed
190
#define arch_atomic64_fetch_or arch_atomic64_fetch_or
191
#define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
192
#define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
193
#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed
194
#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
195
#define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
196
#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
197
#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed
198
199
#undef ATOMIC64_OPS
200
#undef ATOMIC64_FETCH_OP
201
#undef ATOMIC64_OP_RETURN
202
#undef ATOMIC64_OP
203
204
#endif
205
206
#endif /* _ASM_ATOMIC_AMO_H */
207
208