Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/sys/riscv/include/atomic.h
39534 views
1
/*-
2
* Copyright (c) 2015-2024 Ruslan Bukin <[email protected]>
3
* All rights reserved.
4
*
5
* Portions of this software were developed by SRI International and the
6
* University of Cambridge Computer Laboratory under DARPA/AFRL contract
7
* FA8750-10-C-0237 ("CTSRD"), as part of the DARPA CRASH research programme.
8
*
9
* Portions of this software were developed by the University of Cambridge
10
* Computer Laboratory as part of the CTSRD Project, with support from the
11
* UK Higher Education Innovation Fund (HEIF).
12
*
13
* Redistribution and use in source and binary forms, with or without
14
* modification, are permitted provided that the following conditions
15
* are met:
16
* 1. Redistributions of source code must retain the above copyright
17
* notice, this list of conditions and the following disclaimer.
18
* 2. Redistributions in binary form must reproduce the above copyright
19
* notice, this list of conditions and the following disclaimer in the
20
* documentation and/or other materials provided with the distribution.
21
*
22
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
23
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
25
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
26
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
28
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
29
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
30
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
31
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
32
* SUCH DAMAGE.
33
*/
34
35
#ifndef _MACHINE_ATOMIC_H_
36
#define _MACHINE_ATOMIC_H_
37
38
#include <sys/atomic_common.h>
39
40
#define fence() __asm __volatile("fence" ::: "memory");
41
#define mb() fence()
42
#define rmb() fence()
43
#define wmb() fence()
44
45
static __inline int atomic_cmpset_8(__volatile uint8_t *, uint8_t, uint8_t);
46
static __inline int atomic_fcmpset_8(__volatile uint8_t *, uint8_t *, uint8_t);
47
static __inline int atomic_cmpset_16(__volatile uint16_t *, uint16_t, uint16_t);
48
static __inline int atomic_fcmpset_16(__volatile uint16_t *, uint16_t *,
49
uint16_t);
50
51
#define ATOMIC_ACQ_REL(NAME, WIDTH) \
52
static __inline void \
53
atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
54
{ \
55
atomic_##NAME##_##WIDTH(p, v); \
56
fence(); \
57
} \
58
\
59
static __inline void \
60
atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
61
{ \
62
fence(); \
63
atomic_##NAME##_##WIDTH(p, v); \
64
}
65
66
#define ATOMIC_CMPSET_ACQ_REL(WIDTH) \
67
static __inline int \
68
atomic_cmpset_acq_##WIDTH(__volatile uint##WIDTH##_t *p, \
69
uint##WIDTH##_t cmpval, uint##WIDTH##_t newval) \
70
{ \
71
int retval; \
72
\
73
retval = atomic_cmpset_##WIDTH(p, cmpval, newval); \
74
fence(); \
75
return (retval); \
76
} \
77
\
78
static __inline int \
79
atomic_cmpset_rel_##WIDTH(__volatile uint##WIDTH##_t *p, \
80
uint##WIDTH##_t cmpval, uint##WIDTH##_t newval) \
81
{ \
82
fence(); \
83
return (atomic_cmpset_##WIDTH(p, cmpval, newval)); \
84
}
85
86
#define ATOMIC_FCMPSET_ACQ_REL(WIDTH) \
87
static __inline int \
88
atomic_fcmpset_acq_##WIDTH(__volatile uint##WIDTH##_t *p, \
89
uint##WIDTH##_t *cmpval, uint##WIDTH##_t newval) \
90
{ \
91
int retval; \
92
\
93
retval = atomic_fcmpset_##WIDTH(p, cmpval, newval); \
94
fence(); \
95
return (retval); \
96
} \
97
\
98
static __inline int \
99
atomic_fcmpset_rel_##WIDTH(__volatile uint##WIDTH##_t *p, \
100
uint##WIDTH##_t *cmpval, uint##WIDTH##_t newval) \
101
{ \
102
fence(); \
103
return (atomic_fcmpset_##WIDTH(p, cmpval, newval)); \
104
}
105
106
ATOMIC_CMPSET_ACQ_REL(8);
107
ATOMIC_FCMPSET_ACQ_REL(8);
108
109
#define atomic_cmpset_char atomic_cmpset_8
110
#define atomic_cmpset_acq_char atomic_cmpset_acq_8
111
#define atomic_cmpset_rel_char atomic_cmpset_rel_8
112
#define atomic_fcmpset_char atomic_fcmpset_8
113
#define atomic_fcmpset_acq_char atomic_fcmpset_acq_8
114
#define atomic_fcmpset_rel_char atomic_fcmpset_rel_8
115
116
#define atomic_cmpset_short atomic_cmpset_16
117
#define atomic_fcmpset_short atomic_fcmpset_16
118
119
ATOMIC_CMPSET_ACQ_REL(16);
120
ATOMIC_FCMPSET_ACQ_REL(16);
121
122
#define atomic_load_acq_16 atomic_load_acq_16
123
static __inline uint16_t
124
atomic_load_acq_16(const volatile uint16_t *p)
125
{
126
uint16_t ret;
127
128
ret = *p;
129
130
fence();
131
132
return (ret);
133
}
134
135
static __inline void
136
atomic_store_rel_16(volatile uint16_t *p, uint16_t val)
137
{
138
139
fence();
140
141
*p = val;
142
}
143
144
#define atomic_cmpset_acq_short atomic_cmpset_acq_16
145
#define atomic_fcmpset_acq_short atomic_fcmpset_acq_16
146
#define atomic_load_acq_short atomic_load_acq_16
147
148
#define atomic_cmpset_rel_short atomic_cmpset_rel_16
149
#define atomic_fcmpset_rel_short atomic_fcmpset_rel_16
150
#define atomic_store_rel_short atomic_store_rel_16
151
152
static __inline void
153
atomic_add_32(volatile uint32_t *p, uint32_t val)
154
{
155
156
__asm __volatile("amoadd.w zero, %1, %0"
157
: "+A" (*p)
158
: "r" (val)
159
: "memory");
160
}
161
162
static __inline void
163
atomic_subtract_32(volatile uint32_t *p, uint32_t val)
164
{
165
166
__asm __volatile("amoadd.w zero, %1, %0"
167
: "+A" (*p)
168
: "r" (-val)
169
: "memory");
170
}
171
172
static __inline void
173
atomic_set_32(volatile uint32_t *p, uint32_t val)
174
{
175
176
__asm __volatile("amoor.w zero, %1, %0"
177
: "+A" (*p)
178
: "r" (val)
179
: "memory");
180
}
181
182
static __inline void
183
atomic_clear_32(volatile uint32_t *p, uint32_t val)
184
{
185
186
__asm __volatile("amoand.w zero, %1, %0"
187
: "+A" (*p)
188
: "r" (~val)
189
: "memory");
190
}
191
192
static __inline int
193
atomic_cmpset_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
194
{
195
uint32_t tmp;
196
int res;
197
198
res = 0;
199
200
__asm __volatile(
201
"0:"
202
"li %1, 1\n" /* Preset to fail */
203
"lr.w %0, %2\n"
204
"bne %0, %z3, 1f\n"
205
"sc.w %1, %z4, %2\n"
206
"bnez %1, 0b\n"
207
"1:"
208
: "=&r" (tmp), "=&r" (res), "+A" (*p)
209
: "rJ" ((long)(int32_t)cmpval), "rJ" (newval)
210
: "memory");
211
212
return (!res);
213
}
214
215
static __inline int
216
atomic_fcmpset_32(volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
217
{
218
uint32_t tmp;
219
int res;
220
221
res = 0;
222
223
__asm __volatile(
224
"0:"
225
"li %1, 1\n" /* Preset to fail */
226
"lr.w %0, %2\n" /* Load old value */
227
"bne %0, %z4, 1f\n" /* Compare */
228
"sc.w %1, %z5, %2\n" /* Try to store new value */
229
"j 2f\n"
230
"1:"
231
"sw %0, %3\n" /* Save old value */
232
"2:"
233
: "=&r" (tmp), "=&r" (res), "+A" (*p), "+A" (*cmpval)
234
: "rJ" ((long)(int32_t)*cmpval), "rJ" (newval)
235
: "memory");
236
237
return (!res);
238
}
239
240
static __inline uint32_t
241
atomic_fetchadd_32(volatile uint32_t *p, uint32_t val)
242
{
243
uint32_t ret;
244
245
__asm __volatile("amoadd.w %0, %2, %1"
246
: "=&r" (ret), "+A" (*p)
247
: "r" (val)
248
: "memory");
249
250
return (ret);
251
}
252
253
static __inline uint32_t
254
atomic_readandclear_32(volatile uint32_t *p)
255
{
256
uint32_t ret;
257
uint32_t val;
258
259
val = 0;
260
261
__asm __volatile("amoswap.w %0, %2, %1"
262
: "=&r"(ret), "+A" (*p)
263
: "r" (val)
264
: "memory");
265
266
return (ret);
267
}
268
269
static __inline int
270
atomic_testandclear_32(volatile uint32_t *p, u_int val)
271
{
272
uint32_t mask, old;
273
274
mask = 1u << (val & 31);
275
__asm __volatile("amoand.w %0, %2, %1"
276
: "=&r" (old), "+A" (*p)
277
: "r" (~mask)
278
: "memory");
279
280
return ((old & mask) != 0);
281
}
282
283
static __inline int
284
atomic_testandset_32(volatile uint32_t *p, u_int val)
285
{
286
uint32_t mask, old;
287
288
mask = 1u << (val & 31);
289
__asm __volatile("amoor.w %0, %2, %1"
290
: "=&r" (old), "+A" (*p)
291
: "r" (mask)
292
: "memory");
293
294
return ((old & mask) != 0);
295
}
296
297
#define atomic_add_int atomic_add_32
298
#define atomic_clear_int atomic_clear_32
299
#define atomic_cmpset_int atomic_cmpset_32
300
#define atomic_fcmpset_int atomic_fcmpset_32
301
#define atomic_fetchadd_int atomic_fetchadd_32
302
#define atomic_readandclear_int atomic_readandclear_32
303
#define atomic_set_int atomic_set_32
304
#define atomic_subtract_int atomic_subtract_32
305
306
ATOMIC_ACQ_REL(set, 32)
307
ATOMIC_ACQ_REL(clear, 32)
308
ATOMIC_ACQ_REL(add, 32)
309
ATOMIC_ACQ_REL(subtract, 32)
310
311
ATOMIC_CMPSET_ACQ_REL(32);
312
ATOMIC_FCMPSET_ACQ_REL(32);
313
314
static __inline uint32_t
315
atomic_load_acq_32(const volatile uint32_t *p)
316
{
317
uint32_t ret;
318
319
ret = *p;
320
321
fence();
322
323
return (ret);
324
}
325
326
static __inline void
327
atomic_store_rel_32(volatile uint32_t *p, uint32_t val)
328
{
329
330
fence();
331
332
*p = val;
333
}
334
335
#define atomic_add_acq_int atomic_add_acq_32
336
#define atomic_clear_acq_int atomic_clear_acq_32
337
#define atomic_cmpset_acq_int atomic_cmpset_acq_32
338
#define atomic_fcmpset_acq_int atomic_fcmpset_acq_32
339
#define atomic_load_acq_int atomic_load_acq_32
340
#define atomic_set_acq_int atomic_set_acq_32
341
#define atomic_subtract_acq_int atomic_subtract_acq_32
342
343
#define atomic_add_rel_int atomic_add_rel_32
344
#define atomic_clear_rel_int atomic_clear_rel_32
345
#define atomic_cmpset_rel_int atomic_cmpset_rel_32
346
#define atomic_fcmpset_rel_int atomic_fcmpset_rel_32
347
#define atomic_set_rel_int atomic_set_rel_32
348
#define atomic_subtract_rel_int atomic_subtract_rel_32
349
#define atomic_store_rel_int atomic_store_rel_32
350
351
static __inline void
352
atomic_add_64(volatile uint64_t *p, uint64_t val)
353
{
354
355
__asm __volatile("amoadd.d zero, %1, %0"
356
: "+A" (*p)
357
: "r" (val)
358
: "memory");
359
}
360
361
static __inline void
362
atomic_subtract_64(volatile uint64_t *p, uint64_t val)
363
{
364
365
__asm __volatile("amoadd.d zero, %1, %0"
366
: "+A" (*p)
367
: "r" (-val)
368
: "memory");
369
}
370
371
static __inline void
372
atomic_set_64(volatile uint64_t *p, uint64_t val)
373
{
374
375
__asm __volatile("amoor.d zero, %1, %0"
376
: "+A" (*p)
377
: "r" (val)
378
: "memory");
379
}
380
381
static __inline void
382
atomic_clear_64(volatile uint64_t *p, uint64_t val)
383
{
384
385
__asm __volatile("amoand.d zero, %1, %0"
386
: "+A" (*p)
387
: "r" (~val)
388
: "memory");
389
}
390
391
static __inline int
392
atomic_cmpset_64(volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
393
{
394
uint64_t tmp;
395
int res;
396
397
res = 0;
398
399
__asm __volatile(
400
"0:"
401
"li %1, 1\n" /* Preset to fail */
402
"lr.d %0, %2\n"
403
"bne %0, %z3, 1f\n"
404
"sc.d %1, %z4, %2\n"
405
"bnez %1, 0b\n"
406
"1:"
407
: "=&r" (tmp), "=&r" (res), "+A" (*p)
408
: "rJ" (cmpval), "rJ" (newval)
409
: "memory");
410
411
return (!res);
412
}
413
414
static __inline int
415
atomic_fcmpset_64(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
416
{
417
uint64_t tmp;
418
int res;
419
420
res = 0;
421
422
__asm __volatile(
423
"0:"
424
"li %1, 1\n" /* Preset to fail */
425
"lr.d %0, %2\n" /* Load old value */
426
"bne %0, %z4, 1f\n" /* Compare */
427
"sc.d %1, %z5, %2\n" /* Try to store new value */
428
"j 2f\n"
429
"1:"
430
"sd %0, %3\n" /* Save old value */
431
"2:"
432
: "=&r" (tmp), "=&r" (res), "+A" (*p), "+A" (*cmpval)
433
: "rJ" (*cmpval), "rJ" (newval)
434
: "memory");
435
436
return (!res);
437
}
438
439
static __inline uint64_t
440
atomic_fetchadd_64(volatile uint64_t *p, uint64_t val)
441
{
442
uint64_t ret;
443
444
__asm __volatile("amoadd.d %0, %2, %1"
445
: "=&r" (ret), "+A" (*p)
446
: "r" (val)
447
: "memory");
448
449
return (ret);
450
}
451
452
static __inline uint64_t
453
atomic_readandclear_64(volatile uint64_t *p)
454
{
455
uint64_t ret;
456
uint64_t val;
457
458
val = 0;
459
460
__asm __volatile("amoswap.d %0, %2, %1"
461
: "=&r"(ret), "+A" (*p)
462
: "r" (val)
463
: "memory");
464
465
return (ret);
466
}
467
468
static __inline int
469
atomic_testandclear_64(volatile uint64_t *p, u_int val)
470
{
471
uint64_t mask, old;
472
473
mask = 1ul << (val & 63);
474
__asm __volatile("amoand.d %0, %2, %1"
475
: "=&r" (old), "+A" (*p)
476
: "r" (~mask)
477
: "memory");
478
479
return ((old & mask) != 0);
480
}
481
482
static __inline int
483
atomic_testandset_64(volatile uint64_t *p, u_int val)
484
{
485
uint64_t mask, old;
486
487
mask = 1ul << (val & 63);
488
__asm __volatile("amoor.d %0, %2, %1"
489
: "=&r" (old), "+A" (*p)
490
: "r" (mask)
491
: "memory");
492
493
return ((old & mask) != 0);
494
}
495
496
static __inline int
497
atomic_testandset_acq_64(volatile uint64_t *p, u_int val)
498
{
499
uint64_t mask, old;
500
501
mask = 1ul << (val & 63);
502
__asm __volatile("amoor.d.aq %0, %2, %1"
503
: "=&r" (old), "+A" (*p)
504
: "r" (mask)
505
: "memory");
506
507
return ((old & mask) != 0);
508
}
509
510
static __inline uint32_t
511
atomic_swap_32(volatile uint32_t *p, uint32_t val)
512
{
513
uint32_t old;
514
515
__asm __volatile("amoswap.w %0, %2, %1"
516
: "=&r"(old), "+A" (*p)
517
: "r" (val)
518
: "memory");
519
520
return (old);
521
}
522
523
static __inline uint64_t
524
atomic_swap_64(volatile uint64_t *p, uint64_t val)
525
{
526
uint64_t old;
527
528
__asm __volatile("amoswap.d %0, %2, %1"
529
: "=&r"(old), "+A" (*p)
530
: "r" (val)
531
: "memory");
532
533
return (old);
534
}
535
536
#define atomic_swap_int atomic_swap_32
537
538
#define atomic_add_long atomic_add_64
539
#define atomic_clear_long atomic_clear_64
540
#define atomic_cmpset_long atomic_cmpset_64
541
#define atomic_fcmpset_long atomic_fcmpset_64
542
#define atomic_fetchadd_long atomic_fetchadd_64
543
#define atomic_readandclear_long atomic_readandclear_64
544
#define atomic_set_long atomic_set_64
545
#define atomic_subtract_long atomic_subtract_64
546
#define atomic_swap_long atomic_swap_64
547
#define atomic_testandclear_long atomic_testandclear_64
548
#define atomic_testandset_long atomic_testandset_64
549
#define atomic_testandset_acq_long atomic_testandset_acq_64
550
551
#define atomic_add_ptr atomic_add_64
552
#define atomic_clear_ptr atomic_clear_64
553
#define atomic_cmpset_ptr atomic_cmpset_64
554
#define atomic_fcmpset_ptr atomic_fcmpset_64
555
#define atomic_fetchadd_ptr atomic_fetchadd_64
556
#define atomic_readandclear_ptr atomic_readandclear_64
557
#define atomic_set_ptr atomic_set_64
558
#define atomic_subtract_ptr atomic_subtract_64
559
#define atomic_swap_ptr atomic_swap_64
560
#define atomic_testandclear_ptr atomic_testandclear_64
561
#define atomic_testandset_ptr atomic_testandset_64
562
563
ATOMIC_ACQ_REL(set, 64)
564
ATOMIC_ACQ_REL(clear, 64)
565
ATOMIC_ACQ_REL(add, 64)
566
ATOMIC_ACQ_REL(subtract, 64)
567
568
ATOMIC_CMPSET_ACQ_REL(64);
569
ATOMIC_FCMPSET_ACQ_REL(64);
570
571
static __inline uint64_t
572
atomic_load_acq_64(const volatile uint64_t *p)
573
{
574
uint64_t ret;
575
576
ret = *p;
577
578
fence();
579
580
return (ret);
581
}
582
583
static __inline void
584
atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
585
{
586
587
fence();
588
589
*p = val;
590
}
591
592
#define atomic_add_acq_long atomic_add_acq_64
593
#define atomic_clear_acq_long atomic_clear_acq_64
594
#define atomic_cmpset_acq_long atomic_cmpset_acq_64
595
#define atomic_fcmpset_acq_long atomic_fcmpset_acq_64
596
#define atomic_load_acq_long atomic_load_acq_64
597
#define atomic_set_acq_long atomic_set_acq_64
598
#define atomic_subtract_acq_long atomic_subtract_acq_64
599
600
#define atomic_add_acq_ptr atomic_add_acq_64
601
#define atomic_clear_acq_ptr atomic_clear_acq_64
602
#define atomic_cmpset_acq_ptr atomic_cmpset_acq_64
603
#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_64
604
#define atomic_load_acq_ptr atomic_load_acq_64
605
#define atomic_set_acq_ptr atomic_set_acq_64
606
#define atomic_subtract_acq_ptr atomic_subtract_acq_64
607
608
#undef ATOMIC_ACQ_REL
609
610
static __inline void
611
atomic_thread_fence_acq(void)
612
{
613
614
fence();
615
}
616
617
static __inline void
618
atomic_thread_fence_rel(void)
619
{
620
621
fence();
622
}
623
624
static __inline void
625
atomic_thread_fence_acq_rel(void)
626
{
627
628
fence();
629
}
630
631
static __inline void
632
atomic_thread_fence_seq_cst(void)
633
{
634
635
fence();
636
}
637
638
#define atomic_add_rel_long atomic_add_rel_64
639
#define atomic_clear_rel_long atomic_clear_rel_64
640
641
#define atomic_add_rel_long atomic_add_rel_64
642
#define atomic_clear_rel_long atomic_clear_rel_64
643
#define atomic_cmpset_rel_long atomic_cmpset_rel_64
644
#define atomic_fcmpset_rel_long atomic_fcmpset_rel_64
645
#define atomic_set_rel_long atomic_set_rel_64
646
#define atomic_subtract_rel_long atomic_subtract_rel_64
647
#define atomic_store_rel_long atomic_store_rel_64
648
649
#define atomic_add_rel_ptr atomic_add_rel_64
650
#define atomic_clear_rel_ptr atomic_clear_rel_64
651
#define atomic_cmpset_rel_ptr atomic_cmpset_rel_64
652
#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_64
653
#define atomic_set_rel_ptr atomic_set_rel_64
654
#define atomic_subtract_rel_ptr atomic_subtract_rel_64
655
#define atomic_store_rel_ptr atomic_store_rel_64
656
657
#include <sys/_atomic_subword.h>
658
659
#define atomic_set_short atomic_set_16
660
#define atomic_clear_short atomic_clear_16
661
662
#endif /* _MACHINE_ATOMIC_H_ */
663
664