Path: blob/master/arch/sparc/include/asm/atomic_64.h
17356 views
/* atomic.h: Thankfully the V9 is at least reasonable for this1* stuff.2*3* Copyright (C) 1996, 1997, 2000 David S. Miller ([email protected])4*/56#ifndef __ARCH_SPARC64_ATOMIC__7#define __ARCH_SPARC64_ATOMIC__89#include <linux/types.h>10#include <asm/system.h>1112#define ATOMIC_INIT(i) { (i) }13#define ATOMIC64_INIT(i) { (i) }1415#define atomic_read(v) (*(volatile int *)&(v)->counter)16#define atomic64_read(v) (*(volatile long *)&(v)->counter)1718#define atomic_set(v, i) (((v)->counter) = i)19#define atomic64_set(v, i) (((v)->counter) = i)2021extern void atomic_add(int, atomic_t *);22extern void atomic64_add(long, atomic64_t *);23extern void atomic_sub(int, atomic_t *);24extern void atomic64_sub(long, atomic64_t *);2526extern int atomic_add_ret(int, atomic_t *);27extern long atomic64_add_ret(long, atomic64_t *);28extern int atomic_sub_ret(int, atomic_t *);29extern long atomic64_sub_ret(long, atomic64_t *);3031#define atomic_dec_return(v) atomic_sub_ret(1, v)32#define atomic64_dec_return(v) atomic64_sub_ret(1, v)3334#define atomic_inc_return(v) atomic_add_ret(1, v)35#define atomic64_inc_return(v) atomic64_add_ret(1, v)3637#define atomic_sub_return(i, v) atomic_sub_ret(i, v)38#define atomic64_sub_return(i, v) atomic64_sub_ret(i, v)3940#define atomic_add_return(i, v) atomic_add_ret(i, v)41#define atomic64_add_return(i, v) atomic64_add_ret(i, v)4243/*44* atomic_inc_and_test - increment and test45* @v: pointer of type atomic_t46*47* Atomically increments @v by 148* and returns true if the result is zero, or false for all49* other cases.50*/51#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)52#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)5354#define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0)55#define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0)5657#define atomic_dec_and_test(v) (atomic_sub_ret(1, v) == 0)58#define atomic64_dec_and_test(v) (atomic64_sub_ret(1, v) == 0)5960#define atomic_inc(v) atomic_add(1, v)61#define atomic64_inc(v) atomic64_add(1, v)6263#define atomic_dec(v) atomic_sub(1, v)64#define atomic64_dec(v) atomic64_sub(1, v)6566#define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0)67#define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)6869#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))70#define atomic_xchg(v, new) (xchg(&((v)->counter), new))7172static inline int atomic_add_unless(atomic_t *v, int a, int u)73{74int c, old;75c = atomic_read(v);76for (;;) {77if (unlikely(c == (u)))78break;79old = atomic_cmpxchg((v), c, c + (a));80if (likely(old == c))81break;82c = old;83}84return c != (u);85}8687#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)8889#define atomic64_cmpxchg(v, o, n) \90((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))91#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))9293static inline long atomic64_add_unless(atomic64_t *v, long a, long u)94{95long c, old;96c = atomic64_read(v);97for (;;) {98if (unlikely(c == (u)))99break;100old = atomic64_cmpxchg((v), c, c + (a));101if (likely(old == c))102break;103c = old;104}105return c != (u);106}107108#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)109110/* Atomic operations are already serializing */111#define smp_mb__before_atomic_dec() barrier()112#define smp_mb__after_atomic_dec() barrier()113#define smp_mb__before_atomic_inc() barrier()114#define smp_mb__after_atomic_inc() barrier()115116#include <asm-generic/atomic-long.h>117#endif /* !(__ARCH_SPARC64_ATOMIC__) */118119120