Path: blob/master/arch/x86/include/asm/cmpxchg_64.h
10821 views
#ifndef _ASM_X86_CMPXCHG_64_H1#define _ASM_X86_CMPXCHG_64_H23#include <asm/alternative.h> /* Provides LOCK_PREFIX */45static inline void set_64bit(volatile u64 *ptr, u64 val)6{7*ptr = val;8}910extern void __xchg_wrong_size(void);11extern void __cmpxchg_wrong_size(void);1213/*14* Note: no "lock" prefix even on SMP: xchg always implies lock anyway.15* Since this is generally used to protect other memory information, we16* use "asm volatile" and "memory" clobbers to prevent gcc from moving17* information around.18*/19#define __xchg(x, ptr, size) \20({ \21__typeof(*(ptr)) __x = (x); \22switch (size) { \23case 1: \24{ \25volatile u8 *__ptr = (volatile u8 *)(ptr); \26asm volatile("xchgb %0,%1" \27: "=q" (__x), "+m" (*__ptr) \28: "0" (__x) \29: "memory"); \30break; \31} \32case 2: \33{ \34volatile u16 *__ptr = (volatile u16 *)(ptr); \35asm volatile("xchgw %0,%1" \36: "=r" (__x), "+m" (*__ptr) \37: "0" (__x) \38: "memory"); \39break; \40} \41case 4: \42{ \43volatile u32 *__ptr = (volatile u32 *)(ptr); \44asm volatile("xchgl %0,%1" \45: "=r" (__x), "+m" (*__ptr) \46: "0" (__x) \47: "memory"); \48break; \49} \50case 8: \51{ \52volatile u64 *__ptr = (volatile u64 *)(ptr); \53asm volatile("xchgq %0,%1" \54: "=r" (__x), "+m" (*__ptr) \55: "0" (__x) \56: "memory"); \57break; \58} \59default: \60__xchg_wrong_size(); \61} \62__x; \63})6465#define xchg(ptr, v) \66__xchg((v), (ptr), sizeof(*ptr))6768#define __HAVE_ARCH_CMPXCHG 16970/*71* Atomic compare and exchange. Compare OLD with MEM, if identical,72* store NEW in MEM. Return the initial value in MEM. Success is73* indicated by comparing RETURN with OLD.74*/75#define __raw_cmpxchg(ptr, old, new, size, lock) \76({ \77__typeof__(*(ptr)) __ret; \78__typeof__(*(ptr)) __old = (old); \79__typeof__(*(ptr)) __new = (new); \80switch (size) { \81case 1: \82{ \83volatile u8 *__ptr = (volatile u8 *)(ptr); \84asm volatile(lock "cmpxchgb %2,%1" \85: "=a" (__ret), "+m" (*__ptr) \86: "q" (__new), "0" (__old) \87: "memory"); \88break; \89} \90case 2: \91{ \92volatile u16 *__ptr = (volatile u16 *)(ptr); \93asm volatile(lock "cmpxchgw %2,%1" \94: "=a" (__ret), "+m" (*__ptr) \95: "r" (__new), "0" (__old) \96: "memory"); \97break; \98} \99case 4: \100{ \101volatile u32 *__ptr = (volatile u32 *)(ptr); \102asm volatile(lock "cmpxchgl %2,%1" \103: "=a" (__ret), "+m" (*__ptr) \104: "r" (__new), "0" (__old) \105: "memory"); \106break; \107} \108case 8: \109{ \110volatile u64 *__ptr = (volatile u64 *)(ptr); \111asm volatile(lock "cmpxchgq %2,%1" \112: "=a" (__ret), "+m" (*__ptr) \113: "r" (__new), "0" (__old) \114: "memory"); \115break; \116} \117default: \118__cmpxchg_wrong_size(); \119} \120__ret; \121})122123#define __cmpxchg(ptr, old, new, size) \124__raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)125126#define __sync_cmpxchg(ptr, old, new, size) \127__raw_cmpxchg((ptr), (old), (new), (size), "lock; ")128129#define __cmpxchg_local(ptr, old, new, size) \130__raw_cmpxchg((ptr), (old), (new), (size), "")131132#define cmpxchg(ptr, old, new) \133__cmpxchg((ptr), (old), (new), sizeof(*ptr))134135#define sync_cmpxchg(ptr, old, new) \136__sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))137138#define cmpxchg_local(ptr, old, new) \139__cmpxchg_local((ptr), (old), (new), sizeof(*ptr))140141#define cmpxchg64(ptr, o, n) \142({ \143BUILD_BUG_ON(sizeof(*(ptr)) != 8); \144cmpxchg((ptr), (o), (n)); \145})146147#define cmpxchg64_local(ptr, o, n) \148({ \149BUILD_BUG_ON(sizeof(*(ptr)) != 8); \150cmpxchg_local((ptr), (o), (n)); \151})152153#endif /* _ASM_X86_CMPXCHG_64_H */154155156