Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/x86/include/asm/cmpxchg_32.h
26481 views
1
/* SPDX-License-Identifier: GPL-2.0 */
2
#ifndef _ASM_X86_CMPXCHG_32_H
3
#define _ASM_X86_CMPXCHG_32_H
4
5
/*
6
* Note: if you use __cmpxchg64(), or their variants,
7
* you need to test for the feature in boot_cpu_data.
8
*/
9
10
union __u64_halves {
11
u64 full;
12
struct {
13
u32 low, high;
14
};
15
};
16
17
#define __arch_cmpxchg64(_ptr, _old, _new, _lock) \
18
({ \
19
union __u64_halves o = { .full = (_old), }, \
20
n = { .full = (_new), }; \
21
\
22
asm_inline volatile(_lock "cmpxchg8b %[ptr]" \
23
: [ptr] "+m" (*(_ptr)), \
24
"+a" (o.low), "+d" (o.high) \
25
: "b" (n.low), "c" (n.high) \
26
: "memory"); \
27
\
28
o.full; \
29
})
30
31
32
static __always_inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
33
{
34
return __arch_cmpxchg64(ptr, old, new, LOCK_PREFIX);
35
}
36
37
static __always_inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
38
{
39
return __arch_cmpxchg64(ptr, old, new,);
40
}
41
42
#define __arch_try_cmpxchg64(_ptr, _oldp, _new, _lock) \
43
({ \
44
union __u64_halves o = { .full = *(_oldp), }, \
45
n = { .full = (_new), }; \
46
bool ret; \
47
\
48
asm_inline volatile(_lock "cmpxchg8b %[ptr]" \
49
CC_SET(e) \
50
: CC_OUT(e) (ret), \
51
[ptr] "+m" (*(_ptr)), \
52
"+a" (o.low), "+d" (o.high) \
53
: "b" (n.low), "c" (n.high) \
54
: "memory"); \
55
\
56
if (unlikely(!ret)) \
57
*(_oldp) = o.full; \
58
\
59
likely(ret); \
60
})
61
62
static __always_inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
63
{
64
return __arch_try_cmpxchg64(ptr, oldp, new, LOCK_PREFIX);
65
}
66
67
static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
68
{
69
return __arch_try_cmpxchg64(ptr, oldp, new,);
70
}
71
72
#ifdef CONFIG_X86_CX8
73
74
#define arch_cmpxchg64 __cmpxchg64
75
76
#define arch_cmpxchg64_local __cmpxchg64_local
77
78
#define arch_try_cmpxchg64 __try_cmpxchg64
79
80
#define arch_try_cmpxchg64_local __try_cmpxchg64_local
81
82
#else
83
84
/*
85
* Building a kernel capable running on 80386 and 80486. It may be necessary
86
* to simulate the cmpxchg8b on the 80386 and 80486 CPU.
87
*/
88
89
#define __arch_cmpxchg64_emu(_ptr, _old, _new, _lock_loc, _lock) \
90
({ \
91
union __u64_halves o = { .full = (_old), }, \
92
n = { .full = (_new), }; \
93
\
94
asm_inline volatile( \
95
ALTERNATIVE(_lock_loc \
96
"call cmpxchg8b_emu", \
97
_lock "cmpxchg8b %a[ptr]", X86_FEATURE_CX8) \
98
: ALT_OUTPUT_SP("+a" (o.low), "+d" (o.high)) \
99
: "b" (n.low), "c" (n.high), \
100
[ptr] "S" (_ptr) \
101
: "memory"); \
102
\
103
o.full; \
104
})
105
106
static __always_inline u64 arch_cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
107
{
108
return __arch_cmpxchg64_emu(ptr, old, new, LOCK_PREFIX_HERE, "lock ");
109
}
110
#define arch_cmpxchg64 arch_cmpxchg64
111
112
static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
113
{
114
return __arch_cmpxchg64_emu(ptr, old, new, ,);
115
}
116
#define arch_cmpxchg64_local arch_cmpxchg64_local
117
118
#define __arch_try_cmpxchg64_emu(_ptr, _oldp, _new, _lock_loc, _lock) \
119
({ \
120
union __u64_halves o = { .full = *(_oldp), }, \
121
n = { .full = (_new), }; \
122
bool ret; \
123
\
124
asm_inline volatile( \
125
ALTERNATIVE(_lock_loc \
126
"call cmpxchg8b_emu", \
127
_lock "cmpxchg8b %a[ptr]", X86_FEATURE_CX8) \
128
CC_SET(e) \
129
: ALT_OUTPUT_SP(CC_OUT(e) (ret), \
130
"+a" (o.low), "+d" (o.high)) \
131
: "b" (n.low), "c" (n.high), \
132
[ptr] "S" (_ptr) \
133
: "memory"); \
134
\
135
if (unlikely(!ret)) \
136
*(_oldp) = o.full; \
137
\
138
likely(ret); \
139
})
140
141
static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
142
{
143
return __arch_try_cmpxchg64_emu(ptr, oldp, new, LOCK_PREFIX_HERE, "lock ");
144
}
145
#define arch_try_cmpxchg64 arch_try_cmpxchg64
146
147
static __always_inline bool arch_try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
148
{
149
return __arch_try_cmpxchg64_emu(ptr, oldp, new, ,);
150
}
151
#define arch_try_cmpxchg64_local arch_try_cmpxchg64_local
152
153
#endif
154
155
#define system_has_cmpxchg64() boot_cpu_has(X86_FEATURE_CX8)
156
157
#endif /* _ASM_X86_CMPXCHG_32_H */
158
159