Path: blob/master/arch/mn10300/include/asm/atomic.h
15126 views
/* MN10300 Atomic counter operations1*2* Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.3* Written by David Howells ([email protected])4*5* This program is free software; you can redistribute it and/or6* modify it under the terms of the GNU General Public Licence7* as published by the Free Software Foundation; either version8* 2 of the Licence, or (at your option) any later version.9*/10#ifndef _ASM_ATOMIC_H11#define _ASM_ATOMIC_H1213#include <asm/irqflags.h>1415#ifndef __ASSEMBLY__1617#ifdef CONFIG_SMP18#ifdef CONFIG_MN10300_HAS_ATOMIC_OPS_UNIT19static inline20unsigned long __xchg(volatile unsigned long *m, unsigned long val)21{22unsigned long status;23unsigned long oldval;2425asm volatile(26"1: mov %4,(_AAR,%3) \n"27" mov (_ADR,%3),%1 \n"28" mov %5,(_ADR,%3) \n"29" mov (_ADR,%3),%0 \n" /* flush */30" mov (_ASR,%3),%0 \n"31" or %0,%0 \n"32" bne 1b \n"33: "=&r"(status), "=&r"(oldval), "=m"(*m)34: "a"(ATOMIC_OPS_BASE_ADDR), "r"(m), "r"(val)35: "memory", "cc");3637return oldval;38}3940static inline unsigned long __cmpxchg(volatile unsigned long *m,41unsigned long old, unsigned long new)42{43unsigned long status;44unsigned long oldval;4546asm volatile(47"1: mov %4,(_AAR,%3) \n"48" mov (_ADR,%3),%1 \n"49" cmp %5,%1 \n"50" bne 2f \n"51" mov %6,(_ADR,%3) \n"52"2: mov (_ADR,%3),%0 \n" /* flush */53" mov (_ASR,%3),%0 \n"54" or %0,%0 \n"55" bne 1b \n"56: "=&r"(status), "=&r"(oldval), "=m"(*m)57: "a"(ATOMIC_OPS_BASE_ADDR), "r"(m),58"r"(old), "r"(new)59: "memory", "cc");6061return oldval;62}63#else /* CONFIG_MN10300_HAS_ATOMIC_OPS_UNIT */64#error "No SMP atomic operation support!"65#endif /* CONFIG_MN10300_HAS_ATOMIC_OPS_UNIT */6667#else /* CONFIG_SMP */6869/*70* Emulate xchg for non-SMP MN1030071*/72struct __xchg_dummy { unsigned long a[100]; };73#define __xg(x) ((struct __xchg_dummy *)(x))7475static inline76unsigned long __xchg(volatile unsigned long *m, unsigned long val)77{78unsigned long oldval;79unsigned long flags;8081flags = arch_local_cli_save();82oldval = *m;83*m = val;84arch_local_irq_restore(flags);85return oldval;86}8788/*89* Emulate cmpxchg for non-SMP MN1030090*/91static inline unsigned long __cmpxchg(volatile unsigned long *m,92unsigned long old, unsigned long new)93{94unsigned long oldval;95unsigned long flags;9697flags = arch_local_cli_save();98oldval = *m;99if (oldval == old)100*m = new;101arch_local_irq_restore(flags);102return oldval;103}104105#endif /* CONFIG_SMP */106107#define xchg(ptr, v) \108((__typeof__(*(ptr))) __xchg((unsigned long *)(ptr), \109(unsigned long)(v)))110111#define cmpxchg(ptr, o, n) \112((__typeof__(*(ptr))) __cmpxchg((unsigned long *)(ptr), \113(unsigned long)(o), \114(unsigned long)(n)))115116#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))117#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))118119#endif /* !__ASSEMBLY__ */120121#ifndef CONFIG_SMP122#include <asm-generic/atomic.h>123#else124125/*126* Atomic operations that C can't guarantee us. Useful for127* resource counting etc..128*/129130#define ATOMIC_INIT(i) { (i) }131132#ifdef __KERNEL__133134/**135* atomic_read - read atomic variable136* @v: pointer of type atomic_t137*138* Atomically reads the value of @v. Note that the guaranteed139* useful range of an atomic_t is only 24 bits.140*/141#define atomic_read(v) (ACCESS_ONCE((v)->counter))142143/**144* atomic_set - set atomic variable145* @v: pointer of type atomic_t146* @i: required value147*148* Atomically sets the value of @v to @i. Note that the guaranteed149* useful range of an atomic_t is only 24 bits.150*/151#define atomic_set(v, i) (((v)->counter) = (i))152153/**154* atomic_add_return - add integer to atomic variable155* @i: integer value to add156* @v: pointer of type atomic_t157*158* Atomically adds @i to @v and returns the result159* Note that the guaranteed useful range of an atomic_t is only 24 bits.160*/161static inline int atomic_add_return(int i, atomic_t *v)162{163int retval;164#ifdef CONFIG_SMP165int status;166167asm volatile(168"1: mov %4,(_AAR,%3) \n"169" mov (_ADR,%3),%1 \n"170" add %5,%1 \n"171" mov %1,(_ADR,%3) \n"172" mov (_ADR,%3),%0 \n" /* flush */173" mov (_ASR,%3),%0 \n"174" or %0,%0 \n"175" bne 1b \n"176: "=&r"(status), "=&r"(retval), "=m"(v->counter)177: "a"(ATOMIC_OPS_BASE_ADDR), "r"(&v->counter), "r"(i)178: "memory", "cc");179180#else181unsigned long flags;182183flags = arch_local_cli_save();184retval = v->counter;185retval += i;186v->counter = retval;187arch_local_irq_restore(flags);188#endif189return retval;190}191192/**193* atomic_sub_return - subtract integer from atomic variable194* @i: integer value to subtract195* @v: pointer of type atomic_t196*197* Atomically subtracts @i from @v and returns the result198* Note that the guaranteed useful range of an atomic_t is only 24 bits.199*/200static inline int atomic_sub_return(int i, atomic_t *v)201{202int retval;203#ifdef CONFIG_SMP204int status;205206asm volatile(207"1: mov %4,(_AAR,%3) \n"208" mov (_ADR,%3),%1 \n"209" sub %5,%1 \n"210" mov %1,(_ADR,%3) \n"211" mov (_ADR,%3),%0 \n" /* flush */212" mov (_ASR,%3),%0 \n"213" or %0,%0 \n"214" bne 1b \n"215: "=&r"(status), "=&r"(retval), "=m"(v->counter)216: "a"(ATOMIC_OPS_BASE_ADDR), "r"(&v->counter), "r"(i)217: "memory", "cc");218219#else220unsigned long flags;221flags = arch_local_cli_save();222retval = v->counter;223retval -= i;224v->counter = retval;225arch_local_irq_restore(flags);226#endif227return retval;228}229230static inline int atomic_add_negative(int i, atomic_t *v)231{232return atomic_add_return(i, v) < 0;233}234235static inline void atomic_add(int i, atomic_t *v)236{237atomic_add_return(i, v);238}239240static inline void atomic_sub(int i, atomic_t *v)241{242atomic_sub_return(i, v);243}244245static inline void atomic_inc(atomic_t *v)246{247atomic_add_return(1, v);248}249250static inline void atomic_dec(atomic_t *v)251{252atomic_sub_return(1, v);253}254255#define atomic_dec_return(v) atomic_sub_return(1, (v))256#define atomic_inc_return(v) atomic_add_return(1, (v))257258#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)259#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)260#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)261262#define atomic_add_unless(v, a, u) \263({ \264int c, old; \265c = atomic_read(v); \266while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \267c = old; \268c != (u); \269})270271#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)272273/**274* atomic_clear_mask - Atomically clear bits in memory275* @mask: Mask of the bits to be cleared276* @v: pointer to word in memory277*278* Atomically clears the bits set in mask from the memory word specified.279*/280static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)281{282#ifdef CONFIG_SMP283int status;284285asm volatile(286"1: mov %3,(_AAR,%2) \n"287" mov (_ADR,%2),%0 \n"288" and %4,%0 \n"289" mov %0,(_ADR,%2) \n"290" mov (_ADR,%2),%0 \n" /* flush */291" mov (_ASR,%2),%0 \n"292" or %0,%0 \n"293" bne 1b \n"294: "=&r"(status), "=m"(*addr)295: "a"(ATOMIC_OPS_BASE_ADDR), "r"(addr), "r"(~mask)296: "memory", "cc");297#else298unsigned long flags;299300mask = ~mask;301flags = arch_local_cli_save();302*addr &= mask;303arch_local_irq_restore(flags);304#endif305}306307/**308* atomic_set_mask - Atomically set bits in memory309* @mask: Mask of the bits to be set310* @v: pointer to word in memory311*312* Atomically sets the bits set in mask from the memory word specified.313*/314static inline void atomic_set_mask(unsigned long mask, unsigned long *addr)315{316#ifdef CONFIG_SMP317int status;318319asm volatile(320"1: mov %3,(_AAR,%2) \n"321" mov (_ADR,%2),%0 \n"322" or %4,%0 \n"323" mov %0,(_ADR,%2) \n"324" mov (_ADR,%2),%0 \n" /* flush */325" mov (_ASR,%2),%0 \n"326" or %0,%0 \n"327" bne 1b \n"328: "=&r"(status), "=m"(*addr)329: "a"(ATOMIC_OPS_BASE_ADDR), "r"(addr), "r"(mask)330: "memory", "cc");331#else332unsigned long flags;333334flags = arch_local_cli_save();335*addr |= mask;336arch_local_irq_restore(flags);337#endif338}339340/* Atomic operations are already serializing on MN10300??? */341#define smp_mb__before_atomic_dec() barrier()342#define smp_mb__after_atomic_dec() barrier()343#define smp_mb__before_atomic_inc() barrier()344#define smp_mb__after_atomic_inc() barrier()345346#include <asm-generic/atomic-long.h>347348#endif /* __KERNEL__ */349#endif /* CONFIG_SMP */350#endif /* _ASM_ATOMIC_H */351352353