Path: blob/master/arch/x86/include/asm/atomic64_64.h
10821 views
#ifndef _ASM_X86_ATOMIC64_64_H1#define _ASM_X86_ATOMIC64_64_H23#include <linux/types.h>4#include <asm/alternative.h>5#include <asm/cmpxchg.h>67/* The 64-bit atomic type */89#define ATOMIC64_INIT(i) { (i) }1011/**12* atomic64_read - read atomic64 variable13* @v: pointer of type atomic64_t14*15* Atomically reads the value of @v.16* Doesn't imply a read memory barrier.17*/18static inline long atomic64_read(const atomic64_t *v)19{20return (*(volatile long *)&(v)->counter);21}2223/**24* atomic64_set - set atomic64 variable25* @v: pointer to type atomic64_t26* @i: required value27*28* Atomically sets the value of @v to @i.29*/30static inline void atomic64_set(atomic64_t *v, long i)31{32v->counter = i;33}3435/**36* atomic64_add - add integer to atomic64 variable37* @i: integer value to add38* @v: pointer to type atomic64_t39*40* Atomically adds @i to @v.41*/42static inline void atomic64_add(long i, atomic64_t *v)43{44asm volatile(LOCK_PREFIX "addq %1,%0"45: "=m" (v->counter)46: "er" (i), "m" (v->counter));47}4849/**50* atomic64_sub - subtract the atomic64 variable51* @i: integer value to subtract52* @v: pointer to type atomic64_t53*54* Atomically subtracts @i from @v.55*/56static inline void atomic64_sub(long i, atomic64_t *v)57{58asm volatile(LOCK_PREFIX "subq %1,%0"59: "=m" (v->counter)60: "er" (i), "m" (v->counter));61}6263/**64* atomic64_sub_and_test - subtract value from variable and test result65* @i: integer value to subtract66* @v: pointer to type atomic64_t67*68* Atomically subtracts @i from @v and returns69* true if the result is zero, or false for all70* other cases.71*/72static inline int atomic64_sub_and_test(long i, atomic64_t *v)73{74unsigned char c;7576asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"77: "=m" (v->counter), "=qm" (c)78: "er" (i), "m" (v->counter) : "memory");79return c;80}8182/**83* atomic64_inc - increment atomic64 variable84* @v: pointer to type atomic64_t85*86* Atomically increments @v by 1.87*/88static inline void atomic64_inc(atomic64_t *v)89{90asm volatile(LOCK_PREFIX "incq %0"91: "=m" (v->counter)92: "m" (v->counter));93}9495/**96* atomic64_dec - decrement atomic64 variable97* @v: pointer to type atomic64_t98*99* Atomically decrements @v by 1.100*/101static inline void atomic64_dec(atomic64_t *v)102{103asm volatile(LOCK_PREFIX "decq %0"104: "=m" (v->counter)105: "m" (v->counter));106}107108/**109* atomic64_dec_and_test - decrement and test110* @v: pointer to type atomic64_t111*112* Atomically decrements @v by 1 and113* returns true if the result is 0, or false for all other114* cases.115*/116static inline int atomic64_dec_and_test(atomic64_t *v)117{118unsigned char c;119120asm volatile(LOCK_PREFIX "decq %0; sete %1"121: "=m" (v->counter), "=qm" (c)122: "m" (v->counter) : "memory");123return c != 0;124}125126/**127* atomic64_inc_and_test - increment and test128* @v: pointer to type atomic64_t129*130* Atomically increments @v by 1131* and returns true if the result is zero, or false for all132* other cases.133*/134static inline int atomic64_inc_and_test(atomic64_t *v)135{136unsigned char c;137138asm volatile(LOCK_PREFIX "incq %0; sete %1"139: "=m" (v->counter), "=qm" (c)140: "m" (v->counter) : "memory");141return c != 0;142}143144/**145* atomic64_add_negative - add and test if negative146* @i: integer value to add147* @v: pointer to type atomic64_t148*149* Atomically adds @i to @v and returns true150* if the result is negative, or false when151* result is greater than or equal to zero.152*/153static inline int atomic64_add_negative(long i, atomic64_t *v)154{155unsigned char c;156157asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"158: "=m" (v->counter), "=qm" (c)159: "er" (i), "m" (v->counter) : "memory");160return c;161}162163/**164* atomic64_add_return - add and return165* @i: integer value to add166* @v: pointer to type atomic64_t167*168* Atomically adds @i to @v and returns @i + @v169*/170static inline long atomic64_add_return(long i, atomic64_t *v)171{172long __i = i;173asm volatile(LOCK_PREFIX "xaddq %0, %1;"174: "+r" (i), "+m" (v->counter)175: : "memory");176return i + __i;177}178179static inline long atomic64_sub_return(long i, atomic64_t *v)180{181return atomic64_add_return(-i, v);182}183184#define atomic64_inc_return(v) (atomic64_add_return(1, (v)))185#define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))186187static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)188{189return cmpxchg(&v->counter, old, new);190}191192static inline long atomic64_xchg(atomic64_t *v, long new)193{194return xchg(&v->counter, new);195}196197/**198* atomic64_add_unless - add unless the number is a given value199* @v: pointer of type atomic64_t200* @a: the amount to add to v...201* @u: ...unless v is equal to u.202*203* Atomically adds @a to @v, so long as it was not @u.204* Returns non-zero if @v was not @u, and zero otherwise.205*/206static inline int atomic64_add_unless(atomic64_t *v, long a, long u)207{208long c, old;209c = atomic64_read(v);210for (;;) {211if (unlikely(c == (u)))212break;213old = atomic64_cmpxchg((v), c, c + (a));214if (likely(old == c))215break;216c = old;217}218return c != (u);219}220221#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)222223/*224* atomic64_dec_if_positive - decrement by 1 if old value positive225* @v: pointer of type atomic_t226*227* The function returns the old value of *v minus 1, even if228* the atomic variable, v, was not decremented.229*/230static inline long atomic64_dec_if_positive(atomic64_t *v)231{232long c, old, dec;233c = atomic64_read(v);234for (;;) {235dec = c - 1;236if (unlikely(dec < 0))237break;238old = atomic64_cmpxchg((v), c, dec);239if (likely(old == c))240break;241c = old;242}243return dec;244}245246#endif /* _ASM_X86_ATOMIC64_64_H */247248249