#ifndef _ASM_X86_ATOMIC_H1#define _ASM_X86_ATOMIC_H23#include <linux/compiler.h>4#include <linux/types.h>5#include <asm/processor.h>6#include <asm/alternative.h>7#include <asm/cmpxchg.h>89/*10* Atomic operations that C can't guarantee us. Useful for11* resource counting etc..12*/1314#define ATOMIC_INIT(i) { (i) }1516/**17* atomic_read - read atomic variable18* @v: pointer of type atomic_t19*20* Atomically reads the value of @v.21*/22static inline int atomic_read(const atomic_t *v)23{24return (*(volatile int *)&(v)->counter);25}2627/**28* atomic_set - set atomic variable29* @v: pointer of type atomic_t30* @i: required value31*32* Atomically sets the value of @v to @i.33*/34static inline void atomic_set(atomic_t *v, int i)35{36v->counter = i;37}3839/**40* atomic_add - add integer to atomic variable41* @i: integer value to add42* @v: pointer of type atomic_t43*44* Atomically adds @i to @v.45*/46static inline void atomic_add(int i, atomic_t *v)47{48asm volatile(LOCK_PREFIX "addl %1,%0"49: "+m" (v->counter)50: "ir" (i));51}5253/**54* atomic_sub - subtract integer from atomic variable55* @i: integer value to subtract56* @v: pointer of type atomic_t57*58* Atomically subtracts @i from @v.59*/60static inline void atomic_sub(int i, atomic_t *v)61{62asm volatile(LOCK_PREFIX "subl %1,%0"63: "+m" (v->counter)64: "ir" (i));65}6667/**68* atomic_sub_and_test - subtract value from variable and test result69* @i: integer value to subtract70* @v: pointer of type atomic_t71*72* Atomically subtracts @i from @v and returns73* true if the result is zero, or false for all74* other cases.75*/76static inline int atomic_sub_and_test(int i, atomic_t *v)77{78unsigned char c;7980asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"81: "+m" (v->counter), "=qm" (c)82: "ir" (i) : "memory");83return c;84}8586/**87* atomic_inc - increment atomic variable88* @v: pointer of type atomic_t89*90* Atomically increments @v by 1.91*/92static inline void atomic_inc(atomic_t *v)93{94asm volatile(LOCK_PREFIX "incl %0"95: "+m" (v->counter));96}9798/**99* atomic_dec - decrement atomic variable100* @v: pointer of type atomic_t101*102* Atomically decrements @v by 1.103*/104static inline void atomic_dec(atomic_t *v)105{106asm volatile(LOCK_PREFIX "decl %0"107: "+m" (v->counter));108}109110/**111* atomic_dec_and_test - decrement and test112* @v: pointer of type atomic_t113*114* Atomically decrements @v by 1 and115* returns true if the result is 0, or false for all other116* cases.117*/118static inline int atomic_dec_and_test(atomic_t *v)119{120unsigned char c;121122asm volatile(LOCK_PREFIX "decl %0; sete %1"123: "+m" (v->counter), "=qm" (c)124: : "memory");125return c != 0;126}127128/**129* atomic_inc_and_test - increment and test130* @v: pointer of type atomic_t131*132* Atomically increments @v by 1133* and returns true if the result is zero, or false for all134* other cases.135*/136static inline int atomic_inc_and_test(atomic_t *v)137{138unsigned char c;139140asm volatile(LOCK_PREFIX "incl %0; sete %1"141: "+m" (v->counter), "=qm" (c)142: : "memory");143return c != 0;144}145146/**147* atomic_add_negative - add and test if negative148* @i: integer value to add149* @v: pointer of type atomic_t150*151* Atomically adds @i to @v and returns true152* if the result is negative, or false when153* result is greater than or equal to zero.154*/155static inline int atomic_add_negative(int i, atomic_t *v)156{157unsigned char c;158159asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"160: "+m" (v->counter), "=qm" (c)161: "ir" (i) : "memory");162return c;163}164165/**166* atomic_add_return - add integer and return167* @i: integer value to add168* @v: pointer of type atomic_t169*170* Atomically adds @i to @v and returns @i + @v171*/172static inline int atomic_add_return(int i, atomic_t *v)173{174int __i;175#ifdef CONFIG_M386176unsigned long flags;177if (unlikely(boot_cpu_data.x86 <= 3))178goto no_xadd;179#endif180/* Modern 486+ processor */181__i = i;182asm volatile(LOCK_PREFIX "xaddl %0, %1"183: "+r" (i), "+m" (v->counter)184: : "memory");185return i + __i;186187#ifdef CONFIG_M386188no_xadd: /* Legacy 386 processor */189raw_local_irq_save(flags);190__i = atomic_read(v);191atomic_set(v, i + __i);192raw_local_irq_restore(flags);193return i + __i;194#endif195}196197/**198* atomic_sub_return - subtract integer and return199* @v: pointer of type atomic_t200* @i: integer value to subtract201*202* Atomically subtracts @i from @v and returns @v - @i203*/204static inline int atomic_sub_return(int i, atomic_t *v)205{206return atomic_add_return(-i, v);207}208209#define atomic_inc_return(v) (atomic_add_return(1, v))210#define atomic_dec_return(v) (atomic_sub_return(1, v))211212static inline int atomic_cmpxchg(atomic_t *v, int old, int new)213{214return cmpxchg(&v->counter, old, new);215}216217static inline int atomic_xchg(atomic_t *v, int new)218{219return xchg(&v->counter, new);220}221222/**223* atomic_add_unless - add unless the number is already a given value224* @v: pointer of type atomic_t225* @a: the amount to add to v...226* @u: ...unless v is equal to u.227*228* Atomically adds @a to @v, so long as @v was not already @u.229* Returns non-zero if @v was not @u, and zero otherwise.230*/231static inline int atomic_add_unless(atomic_t *v, int a, int u)232{233int c, old;234c = atomic_read(v);235for (;;) {236if (unlikely(c == (u)))237break;238old = atomic_cmpxchg((v), c, c + (a));239if (likely(old == c))240break;241c = old;242}243return c != (u);244}245246#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)247248/*249* atomic_dec_if_positive - decrement by 1 if old value positive250* @v: pointer of type atomic_t251*252* The function returns the old value of *v minus 1, even if253* the atomic variable, v, was not decremented.254*/255static inline int atomic_dec_if_positive(atomic_t *v)256{257int c, old, dec;258c = atomic_read(v);259for (;;) {260dec = c - 1;261if (unlikely(dec < 0))262break;263old = atomic_cmpxchg((v), c, dec);264if (likely(old == c))265break;266c = old;267}268return dec;269}270271/**272* atomic_inc_short - increment of a short integer273* @v: pointer to type int274*275* Atomically adds 1 to @v276* Returns the new value of @u277*/278static inline short int atomic_inc_short(short int *v)279{280asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));281return *v;282}283284#ifdef CONFIG_X86_64285/**286* atomic_or_long - OR of two long integers287* @v1: pointer to type unsigned long288* @v2: pointer to type unsigned long289*290* Atomically ORs @v1 and @v2291* Returns the result of the OR292*/293static inline void atomic_or_long(unsigned long *v1, unsigned long v2)294{295asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2));296}297#endif298299/* These are x86-specific, used by some header files */300#define atomic_clear_mask(mask, addr) \301asm volatile(LOCK_PREFIX "andl %0,%1" \302: : "r" (~(mask)), "m" (*(addr)) : "memory")303304#define atomic_set_mask(mask, addr) \305asm volatile(LOCK_PREFIX "orl %0,%1" \306: : "r" ((unsigned)(mask)), "m" (*(addr)) \307: "memory")308309/* Atomic operations are already serializing on x86 */310#define smp_mb__before_atomic_dec() barrier()311#define smp_mb__after_atomic_dec() barrier()312#define smp_mb__before_atomic_inc() barrier()313#define smp_mb__after_atomic_inc() barrier()314315#ifdef CONFIG_X86_32316# include "atomic64_32.h"317#else318# include "atomic64_64.h"319#endif320321#include <asm-generic/atomic-long.h>322#endif /* _ASM_X86_ATOMIC_H */323324325