Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/loongarch/include/asm/atomic.h
50399 views
1
/* SPDX-License-Identifier: GPL-2.0 */
2
/*
3
* Atomic operations.
4
*
5
* Copyright (C) 2020-2022 Loongson Technology Corporation Limited
6
*/
7
#ifndef _ASM_ATOMIC_H
8
#define _ASM_ATOMIC_H
9
10
#include <linux/types.h>
11
#include <asm/barrier.h>
12
#include <asm/cmpxchg.h>
13
14
#ifdef CONFIG_CPU_HAS_AMO
15
#include <asm/atomic-amo.h>
16
#else
17
#include <asm/atomic-llsc.h>
18
#endif
19
20
#ifdef CONFIG_GENERIC_ATOMIC64
21
#include <asm-generic/atomic64.h>
22
#endif
23
24
#if __SIZEOF_LONG__ == 4
25
#define __LL "ll.w "
26
#define __SC "sc.w "
27
#define __AMADD "amadd.w "
28
#define __AMOR "amor.w "
29
#define __AMAND_DB "amand_db.w "
30
#define __AMOR_DB "amor_db.w "
31
#define __AMXOR_DB "amxor_db.w "
32
#elif __SIZEOF_LONG__ == 8
33
#define __LL "ll.d "
34
#define __SC "sc.d "
35
#define __AMADD "amadd.d "
36
#define __AMOR "amor.d "
37
#define __AMAND_DB "amand_db.d "
38
#define __AMOR_DB "amor_db.d "
39
#define __AMXOR_DB "amxor_db.d "
40
#endif
41
42
#define ATOMIC_INIT(i) { (i) }
43
44
#define arch_atomic_read(v) READ_ONCE((v)->counter)
45
#define arch_atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
46
47
static inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
48
{
49
int prev, rc;
50
51
__asm__ __volatile__ (
52
"0: ll.w %[p], %[c]\n"
53
" beq %[p], %[u], 1f\n"
54
" add.w %[rc], %[p], %[a]\n"
55
" sc.w %[rc], %[c]\n"
56
" beqz %[rc], 0b\n"
57
" b 2f\n"
58
"1:\n"
59
__WEAK_LLSC_MB
60
"2:\n"
61
: [p]"=&r" (prev), [rc]"=&r" (rc),
62
[c]"=ZB" (v->counter)
63
: [a]"r" (a), [u]"r" (u)
64
: "memory");
65
66
return prev;
67
}
68
#define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
69
70
static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
71
{
72
int result;
73
int temp;
74
75
if (__builtin_constant_p(i)) {
76
__asm__ __volatile__(
77
"1: ll.w %1, %2 # atomic_sub_if_positive\n"
78
" addi.w %0, %1, %3 \n"
79
" move %1, %0 \n"
80
" bltz %0, 2f \n"
81
" sc.w %1, %2 \n"
82
" beqz %1, 1b \n"
83
"2: \n"
84
__WEAK_LLSC_MB
85
: "=&r" (result), "=&r" (temp), "+ZC" (v->counter)
86
: "I" (-i));
87
} else {
88
__asm__ __volatile__(
89
"1: ll.w %1, %2 # atomic_sub_if_positive\n"
90
" sub.w %0, %1, %3 \n"
91
" move %1, %0 \n"
92
" bltz %0, 2f \n"
93
" sc.w %1, %2 \n"
94
" beqz %1, 1b \n"
95
"2: \n"
96
__WEAK_LLSC_MB
97
: "=&r" (result), "=&r" (temp), "+ZC" (v->counter)
98
: "r" (i));
99
}
100
101
return result;
102
}
103
104
#define arch_atomic_dec_if_positive(v) arch_atomic_sub_if_positive(1, v)
105
106
#ifdef CONFIG_64BIT
107
108
#define ATOMIC64_INIT(i) { (i) }
109
110
#define arch_atomic64_read(v) READ_ONCE((v)->counter)
111
#define arch_atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
112
113
static inline long arch_atomic64_fetch_add_unless(atomic64_t *v, long a, long u)
114
{
115
long prev, rc;
116
117
__asm__ __volatile__ (
118
"0: ll.d %[p], %[c]\n"
119
" beq %[p], %[u], 1f\n"
120
" add.d %[rc], %[p], %[a]\n"
121
" sc.d %[rc], %[c]\n"
122
" beqz %[rc], 0b\n"
123
" b 2f\n"
124
"1:\n"
125
__WEAK_LLSC_MB
126
"2:\n"
127
: [p]"=&r" (prev), [rc]"=&r" (rc),
128
[c] "=ZB" (v->counter)
129
: [a]"r" (a), [u]"r" (u)
130
: "memory");
131
132
return prev;
133
}
134
#define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
135
136
static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v)
137
{
138
long result;
139
long temp;
140
141
if (__builtin_constant_p(i)) {
142
__asm__ __volatile__(
143
"1: ll.d %1, %2 # atomic64_sub_if_positive \n"
144
" addi.d %0, %1, %3 \n"
145
" move %1, %0 \n"
146
" bltz %0, 2f \n"
147
" sc.d %1, %2 \n"
148
" beqz %1, 1b \n"
149
"2: \n"
150
__WEAK_LLSC_MB
151
: "=&r" (result), "=&r" (temp), "+ZC" (v->counter)
152
: "I" (-i));
153
} else {
154
__asm__ __volatile__(
155
"1: ll.d %1, %2 # atomic64_sub_if_positive \n"
156
" sub.d %0, %1, %3 \n"
157
" move %1, %0 \n"
158
" bltz %0, 2f \n"
159
" sc.d %1, %2 \n"
160
" beqz %1, 1b \n"
161
"2: \n"
162
__WEAK_LLSC_MB
163
: "=&r" (result), "=&r" (temp), "+ZC" (v->counter)
164
: "r" (i));
165
}
166
167
return result;
168
}
169
170
#define arch_atomic64_dec_if_positive(v) arch_atomic64_sub_if_positive(1, v)
171
172
#endif /* CONFIG_64BIT */
173
174
#endif /* _ASM_ATOMIC_H */
175
176