Path: blob/master/arch/powerpc/include/asm/atomic.h
15117 views
#ifndef _ASM_POWERPC_ATOMIC_H_1#define _ASM_POWERPC_ATOMIC_H_23/*4* PowerPC atomic operations5*/67#include <linux/types.h>89#ifdef __KERNEL__10#include <linux/compiler.h>11#include <asm/synch.h>12#include <asm/asm-compat.h>13#include <asm/system.h>1415#define ATOMIC_INIT(i) { (i) }1617static __inline__ int atomic_read(const atomic_t *v)18{19int t;2021__asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));2223return t;24}2526static __inline__ void atomic_set(atomic_t *v, int i)27{28__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));29}3031static __inline__ void atomic_add(int a, atomic_t *v)32{33int t;3435__asm__ __volatile__(36"1: lwarx %0,0,%3 # atomic_add\n\37add %0,%2,%0\n"38PPC405_ERR77(0,%3)39" stwcx. %0,0,%3 \n\40bne- 1b"41: "=&r" (t), "+m" (v->counter)42: "r" (a), "r" (&v->counter)43: "cc");44}4546static __inline__ int atomic_add_return(int a, atomic_t *v)47{48int t;4950__asm__ __volatile__(51PPC_RELEASE_BARRIER52"1: lwarx %0,0,%2 # atomic_add_return\n\53add %0,%1,%0\n"54PPC405_ERR77(0,%2)55" stwcx. %0,0,%2 \n\56bne- 1b"57PPC_ACQUIRE_BARRIER58: "=&r" (t)59: "r" (a), "r" (&v->counter)60: "cc", "memory");6162return t;63}6465#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)6667static __inline__ void atomic_sub(int a, atomic_t *v)68{69int t;7071__asm__ __volatile__(72"1: lwarx %0,0,%3 # atomic_sub\n\73subf %0,%2,%0\n"74PPC405_ERR77(0,%3)75" stwcx. %0,0,%3 \n\76bne- 1b"77: "=&r" (t), "+m" (v->counter)78: "r" (a), "r" (&v->counter)79: "cc");80}8182static __inline__ int atomic_sub_return(int a, atomic_t *v)83{84int t;8586__asm__ __volatile__(87PPC_RELEASE_BARRIER88"1: lwarx %0,0,%2 # atomic_sub_return\n\89subf %0,%1,%0\n"90PPC405_ERR77(0,%2)91" stwcx. %0,0,%2 \n\92bne- 1b"93PPC_ACQUIRE_BARRIER94: "=&r" (t)95: "r" (a), "r" (&v->counter)96: "cc", "memory");9798return t;99}100101static __inline__ void atomic_inc(atomic_t *v)102{103int t;104105__asm__ __volatile__(106"1: lwarx %0,0,%2 # atomic_inc\n\107addic %0,%0,1\n"108PPC405_ERR77(0,%2)109" stwcx. %0,0,%2 \n\110bne- 1b"111: "=&r" (t), "+m" (v->counter)112: "r" (&v->counter)113: "cc", "xer");114}115116static __inline__ int atomic_inc_return(atomic_t *v)117{118int t;119120__asm__ __volatile__(121PPC_RELEASE_BARRIER122"1: lwarx %0,0,%1 # atomic_inc_return\n\123addic %0,%0,1\n"124PPC405_ERR77(0,%1)125" stwcx. %0,0,%1 \n\126bne- 1b"127PPC_ACQUIRE_BARRIER128: "=&r" (t)129: "r" (&v->counter)130: "cc", "xer", "memory");131132return t;133}134135/*136* atomic_inc_and_test - increment and test137* @v: pointer of type atomic_t138*139* Atomically increments @v by 1140* and returns true if the result is zero, or false for all141* other cases.142*/143#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)144145static __inline__ void atomic_dec(atomic_t *v)146{147int t;148149__asm__ __volatile__(150"1: lwarx %0,0,%2 # atomic_dec\n\151addic %0,%0,-1\n"152PPC405_ERR77(0,%2)\153" stwcx. %0,0,%2\n\154bne- 1b"155: "=&r" (t), "+m" (v->counter)156: "r" (&v->counter)157: "cc", "xer");158}159160static __inline__ int atomic_dec_return(atomic_t *v)161{162int t;163164__asm__ __volatile__(165PPC_RELEASE_BARRIER166"1: lwarx %0,0,%1 # atomic_dec_return\n\167addic %0,%0,-1\n"168PPC405_ERR77(0,%1)169" stwcx. %0,0,%1\n\170bne- 1b"171PPC_ACQUIRE_BARRIER172: "=&r" (t)173: "r" (&v->counter)174: "cc", "xer", "memory");175176return t;177}178179#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))180#define atomic_xchg(v, new) (xchg(&((v)->counter), new))181182/**183* atomic_add_unless - add unless the number is a given value184* @v: pointer of type atomic_t185* @a: the amount to add to v...186* @u: ...unless v is equal to u.187*188* Atomically adds @a to @v, so long as it was not @u.189* Returns non-zero if @v was not @u, and zero otherwise.190*/191static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)192{193int t;194195__asm__ __volatile__ (196PPC_RELEASE_BARRIER197"1: lwarx %0,0,%1 # atomic_add_unless\n\198cmpw 0,%0,%3 \n\199beq- 2f \n\200add %0,%2,%0 \n"201PPC405_ERR77(0,%2)202" stwcx. %0,0,%1 \n\203bne- 1b \n"204PPC_ACQUIRE_BARRIER205" subf %0,%2,%0 \n\2062:"207: "=&r" (t)208: "r" (&v->counter), "r" (a), "r" (u)209: "cc", "memory");210211return t != u;212}213214#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)215216#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)217#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)218219/*220* Atomically test *v and decrement if it is greater than 0.221* The function returns the old value of *v minus 1, even if222* the atomic variable, v, was not decremented.223*/224static __inline__ int atomic_dec_if_positive(atomic_t *v)225{226int t;227228__asm__ __volatile__(229PPC_RELEASE_BARRIER230"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\231cmpwi %0,1\n\232addi %0,%0,-1\n\233blt- 2f\n"234PPC405_ERR77(0,%1)235" stwcx. %0,0,%1\n\236bne- 1b"237PPC_ACQUIRE_BARRIER238"\n\2392:" : "=&b" (t)240: "r" (&v->counter)241: "cc", "memory");242243return t;244}245246#define smp_mb__before_atomic_dec() smp_mb()247#define smp_mb__after_atomic_dec() smp_mb()248#define smp_mb__before_atomic_inc() smp_mb()249#define smp_mb__after_atomic_inc() smp_mb()250251#ifdef __powerpc64__252253#define ATOMIC64_INIT(i) { (i) }254255static __inline__ long atomic64_read(const atomic64_t *v)256{257long t;258259__asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));260261return t;262}263264static __inline__ void atomic64_set(atomic64_t *v, long i)265{266__asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));267}268269static __inline__ void atomic64_add(long a, atomic64_t *v)270{271long t;272273__asm__ __volatile__(274"1: ldarx %0,0,%3 # atomic64_add\n\275add %0,%2,%0\n\276stdcx. %0,0,%3 \n\277bne- 1b"278: "=&r" (t), "+m" (v->counter)279: "r" (a), "r" (&v->counter)280: "cc");281}282283static __inline__ long atomic64_add_return(long a, atomic64_t *v)284{285long t;286287__asm__ __volatile__(288PPC_RELEASE_BARRIER289"1: ldarx %0,0,%2 # atomic64_add_return\n\290add %0,%1,%0\n\291stdcx. %0,0,%2 \n\292bne- 1b"293PPC_ACQUIRE_BARRIER294: "=&r" (t)295: "r" (a), "r" (&v->counter)296: "cc", "memory");297298return t;299}300301#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)302303static __inline__ void atomic64_sub(long a, atomic64_t *v)304{305long t;306307__asm__ __volatile__(308"1: ldarx %0,0,%3 # atomic64_sub\n\309subf %0,%2,%0\n\310stdcx. %0,0,%3 \n\311bne- 1b"312: "=&r" (t), "+m" (v->counter)313: "r" (a), "r" (&v->counter)314: "cc");315}316317static __inline__ long atomic64_sub_return(long a, atomic64_t *v)318{319long t;320321__asm__ __volatile__(322PPC_RELEASE_BARRIER323"1: ldarx %0,0,%2 # atomic64_sub_return\n\324subf %0,%1,%0\n\325stdcx. %0,0,%2 \n\326bne- 1b"327PPC_ACQUIRE_BARRIER328: "=&r" (t)329: "r" (a), "r" (&v->counter)330: "cc", "memory");331332return t;333}334335static __inline__ void atomic64_inc(atomic64_t *v)336{337long t;338339__asm__ __volatile__(340"1: ldarx %0,0,%2 # atomic64_inc\n\341addic %0,%0,1\n\342stdcx. %0,0,%2 \n\343bne- 1b"344: "=&r" (t), "+m" (v->counter)345: "r" (&v->counter)346: "cc", "xer");347}348349static __inline__ long atomic64_inc_return(atomic64_t *v)350{351long t;352353__asm__ __volatile__(354PPC_RELEASE_BARRIER355"1: ldarx %0,0,%1 # atomic64_inc_return\n\356addic %0,%0,1\n\357stdcx. %0,0,%1 \n\358bne- 1b"359PPC_ACQUIRE_BARRIER360: "=&r" (t)361: "r" (&v->counter)362: "cc", "xer", "memory");363364return t;365}366367/*368* atomic64_inc_and_test - increment and test369* @v: pointer of type atomic64_t370*371* Atomically increments @v by 1372* and returns true if the result is zero, or false for all373* other cases.374*/375#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)376377static __inline__ void atomic64_dec(atomic64_t *v)378{379long t;380381__asm__ __volatile__(382"1: ldarx %0,0,%2 # atomic64_dec\n\383addic %0,%0,-1\n\384stdcx. %0,0,%2\n\385bne- 1b"386: "=&r" (t), "+m" (v->counter)387: "r" (&v->counter)388: "cc", "xer");389}390391static __inline__ long atomic64_dec_return(atomic64_t *v)392{393long t;394395__asm__ __volatile__(396PPC_RELEASE_BARRIER397"1: ldarx %0,0,%1 # atomic64_dec_return\n\398addic %0,%0,-1\n\399stdcx. %0,0,%1\n\400bne- 1b"401PPC_ACQUIRE_BARRIER402: "=&r" (t)403: "r" (&v->counter)404: "cc", "xer", "memory");405406return t;407}408409#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)410#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)411412/*413* Atomically test *v and decrement if it is greater than 0.414* The function returns the old value of *v minus 1.415*/416static __inline__ long atomic64_dec_if_positive(atomic64_t *v)417{418long t;419420__asm__ __volatile__(421PPC_RELEASE_BARRIER422"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\423addic. %0,%0,-1\n\424blt- 2f\n\425stdcx. %0,0,%1\n\426bne- 1b"427PPC_ACQUIRE_BARRIER428"\n\4292:" : "=&r" (t)430: "r" (&v->counter)431: "cc", "xer", "memory");432433return t;434}435436#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))437#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))438439/**440* atomic64_add_unless - add unless the number is a given value441* @v: pointer of type atomic64_t442* @a: the amount to add to v...443* @u: ...unless v is equal to u.444*445* Atomically adds @a to @v, so long as it was not @u.446* Returns non-zero if @v was not @u, and zero otherwise.447*/448static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)449{450long t;451452__asm__ __volatile__ (453PPC_RELEASE_BARRIER454"1: ldarx %0,0,%1 # atomic_add_unless\n\455cmpd 0,%0,%3 \n\456beq- 2f \n\457add %0,%2,%0 \n"458" stdcx. %0,0,%1 \n\459bne- 1b \n"460PPC_ACQUIRE_BARRIER461" subf %0,%2,%0 \n\4622:"463: "=&r" (t)464: "r" (&v->counter), "r" (a), "r" (u)465: "cc", "memory");466467return t != u;468}469470#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)471472#else /* __powerpc64__ */473#include <asm-generic/atomic64.h>474475#endif /* __powerpc64__ */476477#include <asm-generic/atomic-long.h>478#endif /* __KERNEL__ */479#endif /* _ASM_POWERPC_ATOMIC_H_ */480481482