Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/sparc/lib/atomic32.c
26439 views
1
// SPDX-License-Identifier: GPL-2.0
2
/*
3
* atomic32.c: 32-bit atomic_t implementation
4
*
5
* Copyright (C) 2004 Keith M Wesolowski
6
* Copyright (C) 2007 Kyle McMartin
7
*
8
* Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
9
*/
10
11
#include <linux/atomic.h>
12
#include <linux/spinlock.h>
13
#include <linux/module.h>
14
15
#ifdef CONFIG_SMP
16
#define ATOMIC_HASH_SIZE 4
17
#define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
18
19
spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
20
[0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash)
21
};
22
23
#else /* SMP */
24
25
static DEFINE_SPINLOCK(dummy);
26
#define ATOMIC_HASH_SIZE 1
27
#define ATOMIC_HASH(a) (&dummy)
28
29
#endif /* SMP */
30
31
#define ATOMIC_FETCH_OP(op, c_op) \
32
int arch_atomic_fetch_##op(int i, atomic_t *v) \
33
{ \
34
int ret; \
35
unsigned long flags; \
36
spin_lock_irqsave(ATOMIC_HASH(v), flags); \
37
\
38
ret = v->counter; \
39
v->counter c_op i; \
40
\
41
spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
42
return ret; \
43
} \
44
EXPORT_SYMBOL(arch_atomic_fetch_##op);
45
46
#define ATOMIC_OP_RETURN(op, c_op) \
47
int arch_atomic_##op##_return(int i, atomic_t *v) \
48
{ \
49
int ret; \
50
unsigned long flags; \
51
spin_lock_irqsave(ATOMIC_HASH(v), flags); \
52
\
53
ret = (v->counter c_op i); \
54
\
55
spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
56
return ret; \
57
} \
58
EXPORT_SYMBOL(arch_atomic_##op##_return);
59
60
ATOMIC_OP_RETURN(add, +=)
61
62
ATOMIC_FETCH_OP(add, +=)
63
ATOMIC_FETCH_OP(and, &=)
64
ATOMIC_FETCH_OP(or, |=)
65
ATOMIC_FETCH_OP(xor, ^=)
66
67
#undef ATOMIC_FETCH_OP
68
#undef ATOMIC_OP_RETURN
69
70
int arch_atomic_xchg(atomic_t *v, int new)
71
{
72
int ret;
73
unsigned long flags;
74
75
spin_lock_irqsave(ATOMIC_HASH(v), flags);
76
ret = v->counter;
77
v->counter = new;
78
spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
79
return ret;
80
}
81
EXPORT_SYMBOL(arch_atomic_xchg);
82
83
int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
84
{
85
int ret;
86
unsigned long flags;
87
88
spin_lock_irqsave(ATOMIC_HASH(v), flags);
89
ret = v->counter;
90
if (likely(ret == old))
91
v->counter = new;
92
93
spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
94
return ret;
95
}
96
EXPORT_SYMBOL(arch_atomic_cmpxchg);
97
98
int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
99
{
100
int ret;
101
unsigned long flags;
102
103
spin_lock_irqsave(ATOMIC_HASH(v), flags);
104
ret = v->counter;
105
if (ret != u)
106
v->counter += a;
107
spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
108
return ret;
109
}
110
EXPORT_SYMBOL(arch_atomic_fetch_add_unless);
111
112
/* Atomic operations are already serializing */
113
void arch_atomic_set(atomic_t *v, int i)
114
{
115
unsigned long flags;
116
117
spin_lock_irqsave(ATOMIC_HASH(v), flags);
118
v->counter = i;
119
spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
120
}
121
EXPORT_SYMBOL(arch_atomic_set);
122
123
unsigned long sp32___set_bit(unsigned long *addr, unsigned long mask)
124
{
125
unsigned long old, flags;
126
127
spin_lock_irqsave(ATOMIC_HASH(addr), flags);
128
old = *addr;
129
*addr = old | mask;
130
spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
131
132
return old & mask;
133
}
134
EXPORT_SYMBOL(sp32___set_bit);
135
136
unsigned long sp32___clear_bit(unsigned long *addr, unsigned long mask)
137
{
138
unsigned long old, flags;
139
140
spin_lock_irqsave(ATOMIC_HASH(addr), flags);
141
old = *addr;
142
*addr = old & ~mask;
143
spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
144
145
return old & mask;
146
}
147
EXPORT_SYMBOL(sp32___clear_bit);
148
149
unsigned long sp32___change_bit(unsigned long *addr, unsigned long mask)
150
{
151
unsigned long old, flags;
152
153
spin_lock_irqsave(ATOMIC_HASH(addr), flags);
154
old = *addr;
155
*addr = old ^ mask;
156
spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
157
158
return old & mask;
159
}
160
EXPORT_SYMBOL(sp32___change_bit);
161
162
#define CMPXCHG(T) \
163
T __cmpxchg_##T(volatile T *ptr, T old, T new) \
164
{ \
165
unsigned long flags; \
166
T prev; \
167
\
168
spin_lock_irqsave(ATOMIC_HASH(ptr), flags); \
169
if ((prev = *ptr) == old) \
170
*ptr = new; \
171
spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);\
172
\
173
return prev; \
174
}
175
176
CMPXCHG(u8)
177
CMPXCHG(u16)
178
CMPXCHG(u32)
179
CMPXCHG(u64)
180
EXPORT_SYMBOL(__cmpxchg_u8);
181
EXPORT_SYMBOL(__cmpxchg_u16);
182
EXPORT_SYMBOL(__cmpxchg_u32);
183
EXPORT_SYMBOL(__cmpxchg_u64);
184
185
unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
186
{
187
unsigned long flags;
188
u32 prev;
189
190
spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
191
prev = *ptr;
192
*ptr = new;
193
spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
194
195
return (unsigned long)prev;
196
}
197
EXPORT_SYMBOL(__xchg_u32);
198
199