Path: blob/master/arch/loongarch/include/asm/atomic-amo.h
38237 views
/* SPDX-License-Identifier: GPL-2.0 */1/*2* Atomic operations (AMO).3*4* Copyright (C) 2020-2025 Loongson Technology Corporation Limited5*/67#ifndef _ASM_ATOMIC_AMO_H8#define _ASM_ATOMIC_AMO_H910#include <linux/types.h>11#include <asm/barrier.h>12#include <asm/cmpxchg.h>1314#define ATOMIC_OP(op, I, asm_op) \15static inline void arch_atomic_##op(int i, atomic_t *v) \16{ \17__asm__ __volatile__( \18"am"#asm_op".w" " $zero, %1, %0 \n" \19: "+ZB" (v->counter) \20: "r" (I) \21: "memory"); \22}2324#define ATOMIC_OP_RETURN(op, I, asm_op, c_op, mb, suffix) \25static inline int arch_atomic_##op##_return##suffix(int i, atomic_t *v) \26{ \27int result; \28\29__asm__ __volatile__( \30"am"#asm_op#mb".w" " %1, %2, %0 \n" \31: "+ZB" (v->counter), "=&r" (result) \32: "r" (I) \33: "memory"); \34\35return result c_op I; \36}3738#define ATOMIC_FETCH_OP(op, I, asm_op, mb, suffix) \39static inline int arch_atomic_fetch_##op##suffix(int i, atomic_t *v) \40{ \41int result; \42\43__asm__ __volatile__( \44"am"#asm_op#mb".w" " %1, %2, %0 \n" \45: "+ZB" (v->counter), "=&r" (result) \46: "r" (I) \47: "memory"); \48\49return result; \50}5152#define ATOMIC_OPS(op, I, asm_op, c_op) \53ATOMIC_OP(op, I, asm_op) \54ATOMIC_OP_RETURN(op, I, asm_op, c_op, _db, ) \55ATOMIC_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \56ATOMIC_FETCH_OP(op, I, asm_op, _db, ) \57ATOMIC_FETCH_OP(op, I, asm_op, , _relaxed)5859ATOMIC_OPS(add, i, add, +)60ATOMIC_OPS(sub, -i, add, +)6162#define arch_atomic_add_return arch_atomic_add_return63#define arch_atomic_add_return_acquire arch_atomic_add_return64#define arch_atomic_add_return_release arch_atomic_add_return65#define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed66#define arch_atomic_sub_return arch_atomic_sub_return67#define arch_atomic_sub_return_acquire arch_atomic_sub_return68#define arch_atomic_sub_return_release arch_atomic_sub_return69#define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed70#define arch_atomic_fetch_add arch_atomic_fetch_add71#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add72#define arch_atomic_fetch_add_release arch_atomic_fetch_add73#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed74#define arch_atomic_fetch_sub arch_atomic_fetch_sub75#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub76#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub77#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed7879#undef ATOMIC_OPS8081#define ATOMIC_OPS(op, I, asm_op) \82ATOMIC_OP(op, I, asm_op) \83ATOMIC_FETCH_OP(op, I, asm_op, _db, ) \84ATOMIC_FETCH_OP(op, I, asm_op, , _relaxed)8586ATOMIC_OPS(and, i, and)87ATOMIC_OPS(or, i, or)88ATOMIC_OPS(xor, i, xor)8990#define arch_atomic_fetch_and arch_atomic_fetch_and91#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and92#define arch_atomic_fetch_and_release arch_atomic_fetch_and93#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed94#define arch_atomic_fetch_or arch_atomic_fetch_or95#define arch_atomic_fetch_or_acquire arch_atomic_fetch_or96#define arch_atomic_fetch_or_release arch_atomic_fetch_or97#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed98#define arch_atomic_fetch_xor arch_atomic_fetch_xor99#define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor100#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor101#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed102103#undef ATOMIC_OPS104#undef ATOMIC_FETCH_OP105#undef ATOMIC_OP_RETURN106#undef ATOMIC_OP107108#ifdef CONFIG_64BIT109110#define ATOMIC64_OP(op, I, asm_op) \111static inline void arch_atomic64_##op(long i, atomic64_t *v) \112{ \113__asm__ __volatile__( \114"am"#asm_op".d " " $zero, %1, %0 \n" \115: "+ZB" (v->counter) \116: "r" (I) \117: "memory"); \118}119120#define ATOMIC64_OP_RETURN(op, I, asm_op, c_op, mb, suffix) \121static inline long arch_atomic64_##op##_return##suffix(long i, atomic64_t *v) \122{ \123long result; \124__asm__ __volatile__( \125"am"#asm_op#mb".d " " %1, %2, %0 \n" \126: "+ZB" (v->counter), "=&r" (result) \127: "r" (I) \128: "memory"); \129\130return result c_op I; \131}132133#define ATOMIC64_FETCH_OP(op, I, asm_op, mb, suffix) \134static inline long arch_atomic64_fetch_##op##suffix(long i, atomic64_t *v) \135{ \136long result; \137\138__asm__ __volatile__( \139"am"#asm_op#mb".d " " %1, %2, %0 \n" \140: "+ZB" (v->counter), "=&r" (result) \141: "r" (I) \142: "memory"); \143\144return result; \145}146147#define ATOMIC64_OPS(op, I, asm_op, c_op) \148ATOMIC64_OP(op, I, asm_op) \149ATOMIC64_OP_RETURN(op, I, asm_op, c_op, _db, ) \150ATOMIC64_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \151ATOMIC64_FETCH_OP(op, I, asm_op, _db, ) \152ATOMIC64_FETCH_OP(op, I, asm_op, , _relaxed)153154ATOMIC64_OPS(add, i, add, +)155ATOMIC64_OPS(sub, -i, add, +)156157#define arch_atomic64_add_return arch_atomic64_add_return158#define arch_atomic64_add_return_acquire arch_atomic64_add_return159#define arch_atomic64_add_return_release arch_atomic64_add_return160#define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed161#define arch_atomic64_sub_return arch_atomic64_sub_return162#define arch_atomic64_sub_return_acquire arch_atomic64_sub_return163#define arch_atomic64_sub_return_release arch_atomic64_sub_return164#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed165#define arch_atomic64_fetch_add arch_atomic64_fetch_add166#define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add167#define arch_atomic64_fetch_add_release arch_atomic64_fetch_add168#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed169#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub170#define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub171#define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub172#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed173174#undef ATOMIC64_OPS175176#define ATOMIC64_OPS(op, I, asm_op) \177ATOMIC64_OP(op, I, asm_op) \178ATOMIC64_FETCH_OP(op, I, asm_op, _db, ) \179ATOMIC64_FETCH_OP(op, I, asm_op, , _relaxed)180181ATOMIC64_OPS(and, i, and)182ATOMIC64_OPS(or, i, or)183ATOMIC64_OPS(xor, i, xor)184185#define arch_atomic64_fetch_and arch_atomic64_fetch_and186#define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and187#define arch_atomic64_fetch_and_release arch_atomic64_fetch_and188#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed189#define arch_atomic64_fetch_or arch_atomic64_fetch_or190#define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or191#define arch_atomic64_fetch_or_release arch_atomic64_fetch_or192#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed193#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor194#define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor195#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor196#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed197198#undef ATOMIC64_OPS199#undef ATOMIC64_FETCH_OP200#undef ATOMIC64_OP_RETURN201#undef ATOMIC64_OP202203#endif204205#endif /* _ASM_ATOMIC_AMO_H */206207208