Path: blob/master/arch/tile/include/asm/bitops_64.h
10818 views
/*1* Copyright 2011 Tilera Corporation. All Rights Reserved.2*3* This program is free software; you can redistribute it and/or4* modify it under the terms of the GNU General Public License5* as published by the Free Software Foundation, version 2.6*7* This program is distributed in the hope that it will be useful, but8* WITHOUT ANY WARRANTY; without even the implied warranty of9* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or10* NON INFRINGEMENT. See the GNU General Public License for11* more details.12*/1314#ifndef _ASM_TILE_BITOPS_64_H15#define _ASM_TILE_BITOPS_64_H1617#include <linux/compiler.h>18#include <asm/atomic.h>19#include <asm/system.h>2021/* See <asm/bitops.h> for API comments. */2223static inline void set_bit(unsigned nr, volatile unsigned long *addr)24{25unsigned long mask = (1UL << (nr % BITS_PER_LONG));26__insn_fetchor((void *)(addr + nr / BITS_PER_LONG), mask);27}2829static inline void clear_bit(unsigned nr, volatile unsigned long *addr)30{31unsigned long mask = (1UL << (nr % BITS_PER_LONG));32__insn_fetchand((void *)(addr + nr / BITS_PER_LONG), ~mask);33}3435#define smp_mb__before_clear_bit() smp_mb()36#define smp_mb__after_clear_bit() smp_mb()373839static inline void change_bit(unsigned nr, volatile unsigned long *addr)40{41unsigned long old, mask = (1UL << (nr % BITS_PER_LONG));42long guess, oldval;43addr += nr / BITS_PER_LONG;44old = *addr;45do {46guess = oldval;47oldval = atomic64_cmpxchg((atomic64_t *)addr,48guess, guess ^ mask);49} while (guess != oldval);50}515253/*54* The test_and_xxx_bit() routines require a memory fence before we55* start the operation, and after the operation completes. We use56* smp_mb() before, and rely on the "!= 0" comparison, plus a compiler57* barrier(), to block until the atomic op is complete.58*/5960static inline int test_and_set_bit(unsigned nr, volatile unsigned long *addr)61{62int val;63unsigned long mask = (1UL << (nr % BITS_PER_LONG));64smp_mb(); /* barrier for proper semantics */65val = (__insn_fetchor((void *)(addr + nr / BITS_PER_LONG), mask)66& mask) != 0;67barrier();68return val;69}707172static inline int test_and_clear_bit(unsigned nr, volatile unsigned long *addr)73{74int val;75unsigned long mask = (1UL << (nr % BITS_PER_LONG));76smp_mb(); /* barrier for proper semantics */77val = (__insn_fetchand((void *)(addr + nr / BITS_PER_LONG), ~mask)78& mask) != 0;79barrier();80return val;81}828384static inline int test_and_change_bit(unsigned nr,85volatile unsigned long *addr)86{87unsigned long mask = (1UL << (nr % BITS_PER_LONG));88long guess, oldval = *addr;89addr += nr / BITS_PER_LONG;90oldval = *addr;91do {92guess = oldval;93oldval = atomic64_cmpxchg((atomic64_t *)addr,94guess, guess ^ mask);95} while (guess != oldval);96return (oldval & mask) != 0;97}9899#define ext2_set_bit_atomic(lock, nr, addr) \100test_and_set_bit((nr), (unsigned long *)(addr))101#define ext2_clear_bit_atomic(lock, nr, addr) \102test_and_clear_bit((nr), (unsigned long *)(addr))103104#endif /* _ASM_TILE_BITOPS_64_H */105106107