Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
awilliam
GitHub Repository: awilliam/linux-vfio
Path: blob/master/include/asm-generic/atomic.h
10814 views
1
/*
2
* Generic C implementation of atomic counter operations
3
* Originally implemented for MN10300.
4
*
5
* Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
6
* Written by David Howells ([email protected])
7
*
8
* This program is free software; you can redistribute it and/or
9
* modify it under the terms of the GNU General Public Licence
10
* as published by the Free Software Foundation; either version
11
* 2 of the Licence, or (at your option) any later version.
12
*/
13
#ifndef __ASM_GENERIC_ATOMIC_H
14
#define __ASM_GENERIC_ATOMIC_H
15
16
#ifdef CONFIG_SMP
17
#error not SMP safe
18
#endif
19
20
/*
21
* Atomic operations that C can't guarantee us. Useful for
22
* resource counting etc..
23
*/
24
25
#define ATOMIC_INIT(i) { (i) }
26
27
#ifdef __KERNEL__
28
29
/**
30
* atomic_read - read atomic variable
31
* @v: pointer of type atomic_t
32
*
33
* Atomically reads the value of @v.
34
*/
35
#define atomic_read(v) (*(volatile int *)&(v)->counter)
36
37
/**
38
* atomic_set - set atomic variable
39
* @v: pointer of type atomic_t
40
* @i: required value
41
*
42
* Atomically sets the value of @v to @i.
43
*/
44
#define atomic_set(v, i) (((v)->counter) = (i))
45
46
#include <linux/irqflags.h>
47
#include <asm/system.h>
48
49
/**
50
* atomic_add_return - add integer to atomic variable
51
* @i: integer value to add
52
* @v: pointer of type atomic_t
53
*
54
* Atomically adds @i to @v and returns the result
55
*/
56
static inline int atomic_add_return(int i, atomic_t *v)
57
{
58
unsigned long flags;
59
int temp;
60
61
raw_local_irq_save(flags); /* Don't trace it in an irqsoff handler */
62
temp = v->counter;
63
temp += i;
64
v->counter = temp;
65
raw_local_irq_restore(flags);
66
67
return temp;
68
}
69
70
/**
71
* atomic_sub_return - subtract integer from atomic variable
72
* @i: integer value to subtract
73
* @v: pointer of type atomic_t
74
*
75
* Atomically subtracts @i from @v and returns the result
76
*/
77
static inline int atomic_sub_return(int i, atomic_t *v)
78
{
79
unsigned long flags;
80
int temp;
81
82
raw_local_irq_save(flags); /* Don't trace it in an irqsoff handler */
83
temp = v->counter;
84
temp -= i;
85
v->counter = temp;
86
raw_local_irq_restore(flags);
87
88
return temp;
89
}
90
91
static inline int atomic_add_negative(int i, atomic_t *v)
92
{
93
return atomic_add_return(i, v) < 0;
94
}
95
96
static inline void atomic_add(int i, atomic_t *v)
97
{
98
atomic_add_return(i, v);
99
}
100
101
static inline void atomic_sub(int i, atomic_t *v)
102
{
103
atomic_sub_return(i, v);
104
}
105
106
static inline void atomic_inc(atomic_t *v)
107
{
108
atomic_add_return(1, v);
109
}
110
111
static inline void atomic_dec(atomic_t *v)
112
{
113
atomic_sub_return(1, v);
114
}
115
116
#define atomic_dec_return(v) atomic_sub_return(1, (v))
117
#define atomic_inc_return(v) atomic_add_return(1, (v))
118
119
#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
120
#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
121
#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
122
123
#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
124
#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
125
126
#define cmpxchg_local(ptr, o, n) \
127
((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
128
(unsigned long)(n), sizeof(*(ptr))))
129
130
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
131
132
static inline int atomic_add_unless(atomic_t *v, int a, int u)
133
{
134
int c, old;
135
c = atomic_read(v);
136
while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
137
c = old;
138
return c != u;
139
}
140
141
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
142
143
static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
144
{
145
unsigned long flags;
146
147
mask = ~mask;
148
raw_local_irq_save(flags); /* Don't trace it in a irqsoff handler */
149
*addr &= mask;
150
raw_local_irq_restore(flags);
151
}
152
153
/* Assume that atomic operations are already serializing */
154
#define smp_mb__before_atomic_dec() barrier()
155
#define smp_mb__after_atomic_dec() barrier()
156
#define smp_mb__before_atomic_inc() barrier()
157
#define smp_mb__after_atomic_inc() barrier()
158
159
#include <asm-generic/atomic-long.h>
160
161
#endif /* __KERNEL__ */
162
#endif /* __ASM_GENERIC_ATOMIC_H */
163
164