Path: blob/master/tools/testing/selftests/bpf/bpf_atomic.h
26285 views
// SPDX-License-Identifier: GPL-2.01/* Copyright (c) 2025 Meta Platforms, Inc. and affiliates. */2#ifndef BPF_ATOMIC_H3#define BPF_ATOMIC_H45#include <vmlinux.h>6#include <bpf/bpf_helpers.h>7#include "bpf_experimental.h"89extern bool CONFIG_X86_64 __kconfig __weak;1011/*12* __unqual_typeof(x) - Declare an unqualified scalar type, leaving13* non-scalar types unchanged,14*15* Prefer C11 _Generic for better compile-times and simpler code. Note: 'char'16* is not type-compatible with 'signed char', and we define a separate case.17*18* This is copied verbatim from kernel's include/linux/compiler_types.h, but19* with default expression (for pointers) changed from (x) to (typeof(x)0).20*21* This is because LLVM has a bug where for lvalue (x), it does not get rid of22* an extra address_space qualifier, but does in case of rvalue (typeof(x)0).23* Hence, for pointers, we need to create an rvalue expression to get the24* desired type. See https://github.com/llvm/llvm-project/issues/53400.25*/26#define __scalar_type_to_expr_cases(type) \27unsigned type : (unsigned type)0, signed type : (signed type)02829#define __unqual_typeof(x) \30typeof(_Generic((x), \31char: (char)0, \32__scalar_type_to_expr_cases(char), \33__scalar_type_to_expr_cases(short), \34__scalar_type_to_expr_cases(int), \35__scalar_type_to_expr_cases(long), \36__scalar_type_to_expr_cases(long long), \37default: (typeof(x))0))3839/* No-op for BPF */40#define cpu_relax() ({})4142#define READ_ONCE(x) (*(volatile typeof(x) *)&(x))4344#define WRITE_ONCE(x, val) ((*(volatile typeof(x) *)&(x)) = (val))4546#define cmpxchg(p, old, new) __sync_val_compare_and_swap((p), old, new)4748#define try_cmpxchg(p, pold, new) \49({ \50__unqual_typeof(*(pold)) __o = *(pold); \51__unqual_typeof(*(p)) __r = cmpxchg(p, __o, new); \52if (__r != __o) \53*(pold) = __r; \54__r == __o; \55})5657#define try_cmpxchg_relaxed(p, pold, new) try_cmpxchg(p, pold, new)5859#define try_cmpxchg_acquire(p, pold, new) try_cmpxchg(p, pold, new)6061#define smp_mb() \62({ \63volatile unsigned long __val; \64__sync_fetch_and_add(&__val, 0); \65})6667#define smp_rmb() \68({ \69if (!CONFIG_X86_64) \70smp_mb(); \71else \72barrier(); \73})7475#define smp_wmb() \76({ \77if (!CONFIG_X86_64) \78smp_mb(); \79else \80barrier(); \81})8283/* Control dependency provides LOAD->STORE, provide LOAD->LOAD */84#define smp_acquire__after_ctrl_dep() ({ smp_rmb(); })8586#define smp_load_acquire(p) \87({ \88__unqual_typeof(*(p)) __v = READ_ONCE(*(p)); \89if (!CONFIG_X86_64) \90smp_mb(); \91barrier(); \92__v; \93})9495#define smp_store_release(p, val) \96({ \97if (!CONFIG_X86_64) \98smp_mb(); \99barrier(); \100WRITE_ONCE(*(p), val); \101})102103#define smp_cond_load_relaxed_label(p, cond_expr, label) \104({ \105typeof(p) __ptr = (p); \106__unqual_typeof(*(p)) VAL; \107for (;;) { \108VAL = (__unqual_typeof(*(p)))READ_ONCE(*__ptr); \109if (cond_expr) \110break; \111cond_break_label(label); \112cpu_relax(); \113} \114(typeof(*(p)))VAL; \115})116117#define smp_cond_load_acquire_label(p, cond_expr, label) \118({ \119__unqual_typeof(*p) __val = \120smp_cond_load_relaxed_label(p, cond_expr, label); \121smp_acquire__after_ctrl_dep(); \122(typeof(*(p)))__val; \123})124125#define atomic_read(p) READ_ONCE((p)->counter)126127#define atomic_cond_read_relaxed_label(p, cond_expr, label) \128smp_cond_load_relaxed_label(&(p)->counter, cond_expr, label)129130#define atomic_cond_read_acquire_label(p, cond_expr, label) \131smp_cond_load_acquire_label(&(p)->counter, cond_expr, label)132133#define atomic_try_cmpxchg_relaxed(p, pold, new) \134try_cmpxchg_relaxed(&(p)->counter, pold, new)135136#define atomic_try_cmpxchg_acquire(p, pold, new) \137try_cmpxchg_acquire(&(p)->counter, pold, new)138139#endif /* BPF_ATOMIC_H */140141142