Path: blob/master/include/asm-generic/atomic-long.h
10814 views
#ifndef _ASM_GENERIC_ATOMIC_LONG_H1#define _ASM_GENERIC_ATOMIC_LONG_H2/*3* Copyright (C) 2005 Silicon Graphics, Inc.4* Christoph Lameter5*6* Allows to provide arch independent atomic definitions without the need to7* edit all arch specific atomic.h files.8*/910#include <asm/types.h>1112/*13* Suppport for atomic_long_t14*15* Casts for parameters are avoided for existing atomic functions in order to16* avoid issues with cast-as-lval under gcc 4.x and other limitations that the17* macros of a platform may have.18*/1920#if BITS_PER_LONG == 642122typedef atomic64_t atomic_long_t;2324#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)2526static inline long atomic_long_read(atomic_long_t *l)27{28atomic64_t *v = (atomic64_t *)l;2930return (long)atomic64_read(v);31}3233static inline void atomic_long_set(atomic_long_t *l, long i)34{35atomic64_t *v = (atomic64_t *)l;3637atomic64_set(v, i);38}3940static inline void atomic_long_inc(atomic_long_t *l)41{42atomic64_t *v = (atomic64_t *)l;4344atomic64_inc(v);45}4647static inline void atomic_long_dec(atomic_long_t *l)48{49atomic64_t *v = (atomic64_t *)l;5051atomic64_dec(v);52}5354static inline void atomic_long_add(long i, atomic_long_t *l)55{56atomic64_t *v = (atomic64_t *)l;5758atomic64_add(i, v);59}6061static inline void atomic_long_sub(long i, atomic_long_t *l)62{63atomic64_t *v = (atomic64_t *)l;6465atomic64_sub(i, v);66}6768static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)69{70atomic64_t *v = (atomic64_t *)l;7172return atomic64_sub_and_test(i, v);73}7475static inline int atomic_long_dec_and_test(atomic_long_t *l)76{77atomic64_t *v = (atomic64_t *)l;7879return atomic64_dec_and_test(v);80}8182static inline int atomic_long_inc_and_test(atomic_long_t *l)83{84atomic64_t *v = (atomic64_t *)l;8586return atomic64_inc_and_test(v);87}8889static inline int atomic_long_add_negative(long i, atomic_long_t *l)90{91atomic64_t *v = (atomic64_t *)l;9293return atomic64_add_negative(i, v);94}9596static inline long atomic_long_add_return(long i, atomic_long_t *l)97{98atomic64_t *v = (atomic64_t *)l;99100return (long)atomic64_add_return(i, v);101}102103static inline long atomic_long_sub_return(long i, atomic_long_t *l)104{105atomic64_t *v = (atomic64_t *)l;106107return (long)atomic64_sub_return(i, v);108}109110static inline long atomic_long_inc_return(atomic_long_t *l)111{112atomic64_t *v = (atomic64_t *)l;113114return (long)atomic64_inc_return(v);115}116117static inline long atomic_long_dec_return(atomic_long_t *l)118{119atomic64_t *v = (atomic64_t *)l;120121return (long)atomic64_dec_return(v);122}123124static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)125{126atomic64_t *v = (atomic64_t *)l;127128return (long)atomic64_add_unless(v, a, u);129}130131#define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))132133#define atomic_long_cmpxchg(l, old, new) \134(atomic64_cmpxchg((atomic64_t *)(l), (old), (new)))135#define atomic_long_xchg(v, new) \136(atomic64_xchg((atomic64_t *)(v), (new)))137138#else /* BITS_PER_LONG == 64 */139140typedef atomic_t atomic_long_t;141142#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)143static inline long atomic_long_read(atomic_long_t *l)144{145atomic_t *v = (atomic_t *)l;146147return (long)atomic_read(v);148}149150static inline void atomic_long_set(atomic_long_t *l, long i)151{152atomic_t *v = (atomic_t *)l;153154atomic_set(v, i);155}156157static inline void atomic_long_inc(atomic_long_t *l)158{159atomic_t *v = (atomic_t *)l;160161atomic_inc(v);162}163164static inline void atomic_long_dec(atomic_long_t *l)165{166atomic_t *v = (atomic_t *)l;167168atomic_dec(v);169}170171static inline void atomic_long_add(long i, atomic_long_t *l)172{173atomic_t *v = (atomic_t *)l;174175atomic_add(i, v);176}177178static inline void atomic_long_sub(long i, atomic_long_t *l)179{180atomic_t *v = (atomic_t *)l;181182atomic_sub(i, v);183}184185static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)186{187atomic_t *v = (atomic_t *)l;188189return atomic_sub_and_test(i, v);190}191192static inline int atomic_long_dec_and_test(atomic_long_t *l)193{194atomic_t *v = (atomic_t *)l;195196return atomic_dec_and_test(v);197}198199static inline int atomic_long_inc_and_test(atomic_long_t *l)200{201atomic_t *v = (atomic_t *)l;202203return atomic_inc_and_test(v);204}205206static inline int atomic_long_add_negative(long i, atomic_long_t *l)207{208atomic_t *v = (atomic_t *)l;209210return atomic_add_negative(i, v);211}212213static inline long atomic_long_add_return(long i, atomic_long_t *l)214{215atomic_t *v = (atomic_t *)l;216217return (long)atomic_add_return(i, v);218}219220static inline long atomic_long_sub_return(long i, atomic_long_t *l)221{222atomic_t *v = (atomic_t *)l;223224return (long)atomic_sub_return(i, v);225}226227static inline long atomic_long_inc_return(atomic_long_t *l)228{229atomic_t *v = (atomic_t *)l;230231return (long)atomic_inc_return(v);232}233234static inline long atomic_long_dec_return(atomic_long_t *l)235{236atomic_t *v = (atomic_t *)l;237238return (long)atomic_dec_return(v);239}240241static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)242{243atomic_t *v = (atomic_t *)l;244245return (long)atomic_add_unless(v, a, u);246}247248#define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))249250#define atomic_long_cmpxchg(l, old, new) \251(atomic_cmpxchg((atomic_t *)(l), (old), (new)))252#define atomic_long_xchg(v, new) \253(atomic_xchg((atomic_t *)(v), (new)))254255#endif /* BITS_PER_LONG == 64 */256257#endif /* _ASM_GENERIC_ATOMIC_LONG_H */258259260