Path: blob/master/arch/blackfin/include/asm/atomic.h
15126 views
/*1* Copyright 2004-2009 Analog Devices Inc.2*3* Licensed under the GPL-2 or later.4*/56#ifndef __ARCH_BLACKFIN_ATOMIC__7#define __ARCH_BLACKFIN_ATOMIC__89#ifndef CONFIG_SMP10# include <asm-generic/atomic.h>11#else1213#include <linux/types.h>14#include <asm/system.h> /* local_irq_XXX() */1516/*17* Atomic operations that C can't guarantee us. Useful for18* resource counting etc..19*/2021#define ATOMIC_INIT(i) { (i) }22#define atomic_set(v, i) (((v)->counter) = i)2324#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)2526asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);2728asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value);2930asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value);3132asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value);3334asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);3536asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);3738static inline void atomic_add(int i, atomic_t *v)39{40__raw_atomic_update_asm(&v->counter, i);41}4243static inline void atomic_sub(int i, atomic_t *v)44{45__raw_atomic_update_asm(&v->counter, -i);46}4748static inline int atomic_add_return(int i, atomic_t *v)49{50return __raw_atomic_update_asm(&v->counter, i);51}5253static inline int atomic_sub_return(int i, atomic_t *v)54{55return __raw_atomic_update_asm(&v->counter, -i);56}5758static inline void atomic_inc(volatile atomic_t *v)59{60__raw_atomic_update_asm(&v->counter, 1);61}6263static inline void atomic_dec(volatile atomic_t *v)64{65__raw_atomic_update_asm(&v->counter, -1);66}6768static inline void atomic_clear_mask(int mask, atomic_t *v)69{70__raw_atomic_clear_asm(&v->counter, mask);71}7273static inline void atomic_set_mask(int mask, atomic_t *v)74{75__raw_atomic_set_asm(&v->counter, mask);76}7778static inline int atomic_test_mask(int mask, atomic_t *v)79{80return __raw_atomic_test_asm(&v->counter, mask);81}8283/* Atomic operations are already serializing */84#define smp_mb__before_atomic_dec() barrier()85#define smp_mb__after_atomic_dec() barrier()86#define smp_mb__before_atomic_inc() barrier()87#define smp_mb__after_atomic_inc() barrier()8889#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)90#define atomic_dec_return(v) atomic_sub_return(1,(v))91#define atomic_inc_return(v) atomic_add_return(1,(v))9293#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))94#define atomic_xchg(v, new) (xchg(&((v)->counter), new))9596#define atomic_add_unless(v, a, u) \97({ \98int c, old; \99c = atomic_read(v); \100while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \101c = old; \102c != (u); \103})104#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)105106/*107* atomic_inc_and_test - increment and test108* @v: pointer of type atomic_t109*110* Atomically increments @v by 1111* and returns true if the result is zero, or false for all112* other cases.113*/114#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)115116#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)117#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)118119#include <asm-generic/atomic-long.h>120121#endif122123#include <asm-generic/atomic64.h>124125#endif126127128