Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/sparc/include/asm/backoff.h
26481 views
1
/* SPDX-License-Identifier: GPL-2.0 */
2
#ifndef _SPARC64_BACKOFF_H
3
#define _SPARC64_BACKOFF_H
4
5
/* The macros in this file implement an exponential backoff facility
6
* for atomic operations.
7
*
8
* When multiple threads compete on an atomic operation, it is
9
* possible for one thread to be continually denied a successful
10
* completion of the compare-and-swap instruction. Heavily
11
* threaded cpu implementations like Niagara can compound this
12
* problem even further.
13
*
14
* When an atomic operation fails and needs to be retried, we spin a
15
* certain number of times. At each subsequent failure of the same
16
* operation we double the spin count, realizing an exponential
17
* backoff.
18
*
19
* When we spin, we try to use an operation that will cause the
20
* current cpu strand to block, and therefore make the core fully
21
* available to any other runnable strands. There are two
22
* options, based upon cpu capabilities.
23
*
24
* On all cpus prior to SPARC-T4 we do three dummy reads of the
25
* condition code register. Each read blocks the strand for something
26
* between 40 and 50 cpu cycles.
27
*
28
* For SPARC-T4 and later we have a special "pause" instruction
29
* available. This is implemented using writes to register %asr27.
30
* The cpu will block the number of cycles written into the register,
31
* unless a disrupting trap happens first. SPARC-T4 specifically
32
* implements pause with a granularity of 8 cycles. Each strand has
33
* an internal pause counter which decrements every 8 cycles. So the
34
* chip shifts the %asr27 value down by 3 bits, and writes the result
35
* into the pause counter. If a value smaller than 8 is written, the
36
* chip blocks for 1 cycle.
37
*
38
* To achieve the same amount of backoff as the three %ccr reads give
39
* on earlier chips, we shift the backoff value up by 7 bits. (Three
40
* %ccr reads block for about 128 cycles, 1 << 7 == 128) We write the
41
* whole amount we want to block into the pause register, rather than
42
* loop writing 128 each time.
43
*/
44
45
#define BACKOFF_LIMIT (4 * 1024)
46
47
#ifdef CONFIG_SMP
48
49
#define BACKOFF_SETUP(reg) \
50
mov 1, reg
51
52
#define BACKOFF_LABEL(spin_label, continue_label) \
53
spin_label
54
55
#define BACKOFF_SPIN(reg, tmp, label) \
56
mov reg, tmp; \
57
88: rd %ccr, %g0; \
58
rd %ccr, %g0; \
59
rd %ccr, %g0; \
60
.section .pause_3insn_patch,"ax";\
61
.word 88b; \
62
sllx tmp, 7, tmp; \
63
wr tmp, 0, %asr27; \
64
clr tmp; \
65
.previous; \
66
brnz,pt tmp, 88b; \
67
sub tmp, 1, tmp; \
68
set BACKOFF_LIMIT, tmp; \
69
cmp reg, tmp; \
70
bg,pn %xcc, label; \
71
nop; \
72
ba,pt %xcc, label; \
73
sllx reg, 1, reg;
74
75
#else
76
77
#define BACKOFF_SETUP(reg)
78
79
#define BACKOFF_LABEL(spin_label, continue_label) \
80
continue_label
81
82
#define BACKOFF_SPIN(reg, tmp, label)
83
84
#endif
85
86
#endif /* _SPARC64_BACKOFF_H */
87
88