Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
awilliam
GitHub Repository: awilliam/linux-vfio
Path: blob/master/arch/m32r/include/asm/atomic.h
10818 views
1
#ifndef _ASM_M32R_ATOMIC_H
2
#define _ASM_M32R_ATOMIC_H
3
4
/*
5
* linux/include/asm-m32r/atomic.h
6
*
7
* M32R version:
8
* Copyright (C) 2001, 2002 Hitoshi Yamamoto
9
* Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
10
*/
11
12
#include <linux/types.h>
13
#include <asm/assembler.h>
14
#include <asm/system.h>
15
16
/*
17
* Atomic operations that C can't guarantee us. Useful for
18
* resource counting etc..
19
*/
20
21
#define ATOMIC_INIT(i) { (i) }
22
23
/**
24
* atomic_read - read atomic variable
25
* @v: pointer of type atomic_t
26
*
27
* Atomically reads the value of @v.
28
*/
29
#define atomic_read(v) (*(volatile int *)&(v)->counter)
30
31
/**
32
* atomic_set - set atomic variable
33
* @v: pointer of type atomic_t
34
* @i: required value
35
*
36
* Atomically sets the value of @v to @i.
37
*/
38
#define atomic_set(v,i) (((v)->counter) = (i))
39
40
/**
41
* atomic_add_return - add integer to atomic variable and return it
42
* @i: integer value to add
43
* @v: pointer of type atomic_t
44
*
45
* Atomically adds @i to @v and return (@i + @v).
46
*/
47
static __inline__ int atomic_add_return(int i, atomic_t *v)
48
{
49
unsigned long flags;
50
int result;
51
52
local_irq_save(flags);
53
__asm__ __volatile__ (
54
"# atomic_add_return \n\t"
55
DCACHE_CLEAR("%0", "r4", "%1")
56
M32R_LOCK" %0, @%1; \n\t"
57
"add %0, %2; \n\t"
58
M32R_UNLOCK" %0, @%1; \n\t"
59
: "=&r" (result)
60
: "r" (&v->counter), "r" (i)
61
: "memory"
62
#ifdef CONFIG_CHIP_M32700_TS1
63
, "r4"
64
#endif /* CONFIG_CHIP_M32700_TS1 */
65
);
66
local_irq_restore(flags);
67
68
return result;
69
}
70
71
/**
72
* atomic_sub_return - subtract integer from atomic variable and return it
73
* @i: integer value to subtract
74
* @v: pointer of type atomic_t
75
*
76
* Atomically subtracts @i from @v and return (@v - @i).
77
*/
78
static __inline__ int atomic_sub_return(int i, atomic_t *v)
79
{
80
unsigned long flags;
81
int result;
82
83
local_irq_save(flags);
84
__asm__ __volatile__ (
85
"# atomic_sub_return \n\t"
86
DCACHE_CLEAR("%0", "r4", "%1")
87
M32R_LOCK" %0, @%1; \n\t"
88
"sub %0, %2; \n\t"
89
M32R_UNLOCK" %0, @%1; \n\t"
90
: "=&r" (result)
91
: "r" (&v->counter), "r" (i)
92
: "memory"
93
#ifdef CONFIG_CHIP_M32700_TS1
94
, "r4"
95
#endif /* CONFIG_CHIP_M32700_TS1 */
96
);
97
local_irq_restore(flags);
98
99
return result;
100
}
101
102
/**
103
* atomic_add - add integer to atomic variable
104
* @i: integer value to add
105
* @v: pointer of type atomic_t
106
*
107
* Atomically adds @i to @v.
108
*/
109
#define atomic_add(i,v) ((void) atomic_add_return((i), (v)))
110
111
/**
112
* atomic_sub - subtract the atomic variable
113
* @i: integer value to subtract
114
* @v: pointer of type atomic_t
115
*
116
* Atomically subtracts @i from @v.
117
*/
118
#define atomic_sub(i,v) ((void) atomic_sub_return((i), (v)))
119
120
/**
121
* atomic_sub_and_test - subtract value from variable and test result
122
* @i: integer value to subtract
123
* @v: pointer of type atomic_t
124
*
125
* Atomically subtracts @i from @v and returns
126
* true if the result is zero, or false for all
127
* other cases.
128
*/
129
#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
130
131
/**
132
* atomic_inc_return - increment atomic variable and return it
133
* @v: pointer of type atomic_t
134
*
135
* Atomically increments @v by 1 and returns the result.
136
*/
137
static __inline__ int atomic_inc_return(atomic_t *v)
138
{
139
unsigned long flags;
140
int result;
141
142
local_irq_save(flags);
143
__asm__ __volatile__ (
144
"# atomic_inc_return \n\t"
145
DCACHE_CLEAR("%0", "r4", "%1")
146
M32R_LOCK" %0, @%1; \n\t"
147
"addi %0, #1; \n\t"
148
M32R_UNLOCK" %0, @%1; \n\t"
149
: "=&r" (result)
150
: "r" (&v->counter)
151
: "memory"
152
#ifdef CONFIG_CHIP_M32700_TS1
153
, "r4"
154
#endif /* CONFIG_CHIP_M32700_TS1 */
155
);
156
local_irq_restore(flags);
157
158
return result;
159
}
160
161
/**
162
* atomic_dec_return - decrement atomic variable and return it
163
* @v: pointer of type atomic_t
164
*
165
* Atomically decrements @v by 1 and returns the result.
166
*/
167
static __inline__ int atomic_dec_return(atomic_t *v)
168
{
169
unsigned long flags;
170
int result;
171
172
local_irq_save(flags);
173
__asm__ __volatile__ (
174
"# atomic_dec_return \n\t"
175
DCACHE_CLEAR("%0", "r4", "%1")
176
M32R_LOCK" %0, @%1; \n\t"
177
"addi %0, #-1; \n\t"
178
M32R_UNLOCK" %0, @%1; \n\t"
179
: "=&r" (result)
180
: "r" (&v->counter)
181
: "memory"
182
#ifdef CONFIG_CHIP_M32700_TS1
183
, "r4"
184
#endif /* CONFIG_CHIP_M32700_TS1 */
185
);
186
local_irq_restore(flags);
187
188
return result;
189
}
190
191
/**
192
* atomic_inc - increment atomic variable
193
* @v: pointer of type atomic_t
194
*
195
* Atomically increments @v by 1.
196
*/
197
#define atomic_inc(v) ((void)atomic_inc_return(v))
198
199
/**
200
* atomic_dec - decrement atomic variable
201
* @v: pointer of type atomic_t
202
*
203
* Atomically decrements @v by 1.
204
*/
205
#define atomic_dec(v) ((void)atomic_dec_return(v))
206
207
/**
208
* atomic_inc_and_test - increment and test
209
* @v: pointer of type atomic_t
210
*
211
* Atomically increments @v by 1
212
* and returns true if the result is zero, or false for all
213
* other cases.
214
*/
215
#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
216
217
/**
218
* atomic_dec_and_test - decrement and test
219
* @v: pointer of type atomic_t
220
*
221
* Atomically decrements @v by 1 and
222
* returns true if the result is 0, or false for all
223
* other cases.
224
*/
225
#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
226
227
/**
228
* atomic_add_negative - add and test if negative
229
* @v: pointer of type atomic_t
230
* @i: integer value to add
231
*
232
* Atomically adds @i to @v and returns true
233
* if the result is negative, or false when
234
* result is greater than or equal to zero.
235
*/
236
#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
237
238
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
239
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
240
241
/**
242
* atomic_add_unless - add unless the number is a given value
243
* @v: pointer of type atomic_t
244
* @a: the amount to add to v...
245
* @u: ...unless v is equal to u.
246
*
247
* Atomically adds @a to @v, so long as it was not @u.
248
* Returns non-zero if @v was not @u, and zero otherwise.
249
*/
250
static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
251
{
252
int c, old;
253
c = atomic_read(v);
254
for (;;) {
255
if (unlikely(c == (u)))
256
break;
257
old = atomic_cmpxchg((v), c, c + (a));
258
if (likely(old == c))
259
break;
260
c = old;
261
}
262
return c != (u);
263
}
264
265
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
266
267
static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t *addr)
268
{
269
unsigned long flags;
270
unsigned long tmp;
271
272
local_irq_save(flags);
273
__asm__ __volatile__ (
274
"# atomic_clear_mask \n\t"
275
DCACHE_CLEAR("%0", "r5", "%1")
276
M32R_LOCK" %0, @%1; \n\t"
277
"and %0, %2; \n\t"
278
M32R_UNLOCK" %0, @%1; \n\t"
279
: "=&r" (tmp)
280
: "r" (addr), "r" (~mask)
281
: "memory"
282
#ifdef CONFIG_CHIP_M32700_TS1
283
, "r5"
284
#endif /* CONFIG_CHIP_M32700_TS1 */
285
);
286
local_irq_restore(flags);
287
}
288
289
static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr)
290
{
291
unsigned long flags;
292
unsigned long tmp;
293
294
local_irq_save(flags);
295
__asm__ __volatile__ (
296
"# atomic_set_mask \n\t"
297
DCACHE_CLEAR("%0", "r5", "%1")
298
M32R_LOCK" %0, @%1; \n\t"
299
"or %0, %2; \n\t"
300
M32R_UNLOCK" %0, @%1; \n\t"
301
: "=&r" (tmp)
302
: "r" (addr), "r" (mask)
303
: "memory"
304
#ifdef CONFIG_CHIP_M32700_TS1
305
, "r5"
306
#endif /* CONFIG_CHIP_M32700_TS1 */
307
);
308
local_irq_restore(flags);
309
}
310
311
/* Atomic operations are already serializing on m32r */
312
#define smp_mb__before_atomic_dec() barrier()
313
#define smp_mb__after_atomic_dec() barrier()
314
#define smp_mb__before_atomic_inc() barrier()
315
#define smp_mb__after_atomic_inc() barrier()
316
317
#include <asm-generic/atomic-long.h>
318
#endif /* _ASM_M32R_ATOMIC_H */
319
320