#ifndef _ASM_X86_BARRIER_H
#define _ASM_X86_BARRIER_H
#include <asm/alternative.h>
#include <asm/nops.h>
#ifdef CONFIG_X86_32
#define mb() asm volatile(ALTERNATIVE("lock addl $0,-4(%%esp)", "mfence", \
X86_FEATURE_XMM2) ::: "memory", "cc")
#define rmb() asm volatile(ALTERNATIVE("lock addl $0,-4(%%esp)", "lfence", \
X86_FEATURE_XMM2) ::: "memory", "cc")
#define wmb() asm volatile(ALTERNATIVE("lock addl $0,-4(%%esp)", "sfence", \
X86_FEATURE_XMM2) ::: "memory", "cc")
#else
#define __mb() asm volatile("mfence":::"memory")
#define __rmb() asm volatile("lfence":::"memory")
#define __wmb() asm volatile("sfence" ::: "memory")
#endif
#define array_index_mask_nospec(idx,sz) ({ \
typeof((idx)+(sz)) __idx = (idx); \
typeof(__idx) __sz = (sz); \
unsigned long __mask; \
asm volatile ("cmp %1,%2; sbb %0,%0" \
:"=r" (__mask) \
:ASM_INPUT_G (__sz), \
"r" (__idx) \
:"cc"); \
__mask; })
#define barrier_nospec() alternative("", "lfence", X86_FEATURE_LFENCE_RDTSC)
#define __dma_rmb() barrier()
#define __dma_wmb() barrier()
#define __smp_mb() asm volatile("lock addl $0,-4(%%" _ASM_SP ")" ::: "memory", "cc")
#define __smp_rmb() dma_rmb()
#define __smp_wmb() barrier()
#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
#define __smp_store_release(p, v) \
do { \
compiletime_assert_atomic_type(*p); \
barrier(); \
WRITE_ONCE(*p, v); \
} while (0)
#define __smp_load_acquire(p) \
({ \
typeof(*p) ___p1 = READ_ONCE(*p); \
compiletime_assert_atomic_type(*p); \
barrier(); \
___p1; \
})
#define __smp_mb__before_atomic() do { } while (0)
#define __smp_mb__after_atomic() do { } while (0)
#define smp_mb__after_switch_mm() do { } while (0)
#include <asm-generic/barrier.h>
#endif