Path: blob/master/arch/mn10300/include/asm/bitops.h
15126 views
/* MN10300 bit operations1*2* Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.3* Written by David Howells ([email protected])4*5* This program is free software; you can redistribute it and/or6* modify it under the terms of the GNU General Public Licence7* as published by the Free Software Foundation; either version8* 2 of the Licence, or (at your option) any later version.9*10* These have to be done with inline assembly: that way the bit-setting11* is guaranteed to be atomic. All bit operations return 0 if the bit12* was cleared before the operation and != 0 if it was not.13*14* bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).15*/16#ifndef __ASM_BITOPS_H17#define __ASM_BITOPS_H1819#include <asm/cpu-regs.h>2021#define smp_mb__before_clear_bit() barrier()22#define smp_mb__after_clear_bit() barrier()2324/*25* set bit26*/27#define __set_bit(nr, addr) \28({ \29volatile unsigned char *_a = (unsigned char *)(addr); \30const unsigned shift = (nr) & 7; \31_a += (nr) >> 3; \32\33asm volatile("bset %2,(%1) # set_bit reg" \34: "=m"(*_a) \35: "a"(_a), "d"(1 << shift), "m"(*_a) \36: "memory", "cc"); \37})3839#define set_bit(nr, addr) __set_bit((nr), (addr))4041/*42* clear bit43*/44#define ___clear_bit(nr, addr) \45({ \46volatile unsigned char *_a = (unsigned char *)(addr); \47const unsigned shift = (nr) & 7; \48_a += (nr) >> 3; \49\50asm volatile("bclr %2,(%1) # clear_bit reg" \51: "=m"(*_a) \52: "a"(_a), "d"(1 << shift), "m"(*_a) \53: "memory", "cc"); \54})5556#define clear_bit(nr, addr) ___clear_bit((nr), (addr))575859static inline void __clear_bit(unsigned long nr, volatile void *addr)60{61unsigned int *a = (unsigned int *) addr;62int mask;6364a += nr >> 5;65mask = 1 << (nr & 0x1f);66*a &= ~mask;67}6869/*70* test bit71*/72static inline int test_bit(unsigned long nr, const volatile void *addr)73{74return 1UL & (((const volatile unsigned int *) addr)[nr >> 5] >> (nr & 31));75}7677/*78* change bit79*/80static inline void __change_bit(unsigned long nr, volatile void *addr)81{82int mask;83unsigned int *a = (unsigned int *) addr;8485a += nr >> 5;86mask = 1 << (nr & 0x1f);87*a ^= mask;88}8990extern void change_bit(unsigned long nr, volatile void *addr);9192/*93* test and set bit94*/95#define __test_and_set_bit(nr,addr) \96({ \97volatile unsigned char *_a = (unsigned char *)(addr); \98const unsigned shift = (nr) & 7; \99unsigned epsw; \100_a += (nr) >> 3; \101\102asm volatile("bset %3,(%2) # test_set_bit reg\n" \103"mov epsw,%1" \104: "=m"(*_a), "=d"(epsw) \105: "a"(_a), "d"(1 << shift), "m"(*_a) \106: "memory", "cc"); \107\108!(epsw & EPSW_FLAG_Z); \109})110111#define test_and_set_bit(nr, addr) __test_and_set_bit((nr), (addr))112113/*114* test and clear bit115*/116#define __test_and_clear_bit(nr, addr) \117({ \118volatile unsigned char *_a = (unsigned char *)(addr); \119const unsigned shift = (nr) & 7; \120unsigned epsw; \121_a += (nr) >> 3; \122\123asm volatile("bclr %3,(%2) # test_clear_bit reg\n" \124"mov epsw,%1" \125: "=m"(*_a), "=d"(epsw) \126: "a"(_a), "d"(1 << shift), "m"(*_a) \127: "memory", "cc"); \128\129!(epsw & EPSW_FLAG_Z); \130})131132#define test_and_clear_bit(nr, addr) __test_and_clear_bit((nr), (addr))133134/*135* test and change bit136*/137static inline int __test_and_change_bit(unsigned long nr, volatile void *addr)138{139int mask, retval;140unsigned int *a = (unsigned int *)addr;141142a += nr >> 5;143mask = 1 << (nr & 0x1f);144retval = (mask & *a) != 0;145*a ^= mask;146147return retval;148}149150extern int test_and_change_bit(unsigned long nr, volatile void *addr);151152#include <asm-generic/bitops/lock.h>153154#ifdef __KERNEL__155156/**157* __ffs - find first bit set158* @x: the word to search159*160* - return 31..0 to indicate bit 31..0 most least significant bit set161* - if no bits are set in x, the result is undefined162*/163static inline __attribute__((const))164unsigned long __ffs(unsigned long x)165{166int bit;167asm("bsch %2,%0" : "=r"(bit) : "0"(0), "r"(x & -x) : "cc");168return bit;169}170171/*172* special slimline version of fls() for calculating ilog2_u32()173* - note: no protection against n == 0174*/175static inline __attribute__((const))176int __ilog2_u32(u32 n)177{178int bit;179asm("bsch %2,%0" : "=r"(bit) : "0"(0), "r"(n) : "cc");180return bit;181}182183/**184* fls - find last bit set185* @x: the word to search186*187* This is defined the same way as ffs:188* - return 32..1 to indicate bit 31..0 most significant bit set189* - return 0 to indicate no bits set190*/191static inline __attribute__((const))192int fls(int x)193{194return (x != 0) ? __ilog2_u32(x) + 1 : 0;195}196197/**198* __fls - find last (most-significant) set bit in a long word199* @word: the word to search200*201* Undefined if no set bit exists, so code should check against 0 first.202*/203static inline unsigned long __fls(unsigned long word)204{205return __ilog2_u32(word);206}207208/**209* ffs - find first bit set210* @x: the word to search211*212* - return 32..1 to indicate bit 31..0 most least significant bit set213* - return 0 to indicate no bits set214*/215static inline __attribute__((const))216int ffs(int x)217{218/* Note: (x & -x) gives us a mask that is the least significant219* (rightmost) 1-bit of the value in x.220*/221return fls(x & -x);222}223224#include <asm-generic/bitops/ffz.h>225#include <asm-generic/bitops/fls64.h>226#include <asm-generic/bitops/find.h>227#include <asm-generic/bitops/sched.h>228#include <asm-generic/bitops/hweight.h>229230#define ext2_set_bit_atomic(lock, nr, addr) \231test_and_set_bit((nr), (addr))232#define ext2_clear_bit_atomic(lock, nr, addr) \233test_and_clear_bit((nr), (addr))234235#include <asm-generic/bitops/le.h>236237#endif /* __KERNEL__ */238#endif /* __ASM_BITOPS_H */239240241