Line data Source code
1 : #ifndef _ASM_X86_ATOMIC_H
2 : #define _ASM_X86_ATOMIC_H
3 :
4 : #include <linux/compiler.h>
5 : #include <linux/types.h>
6 : #include <asm/processor.h>
7 : #include <asm/alternative.h>
8 : #include <asm/cmpxchg.h>
9 : #include <asm/rmwcc.h>
10 : #include <asm/barrier.h>
11 :
12 : /*
13 : * Atomic operations that C can't guarantee us. Useful for
14 : * resource counting etc..
15 : */
16 :
17 : #define ATOMIC_INIT(i) { (i) }
18 :
19 : /**
20 : * atomic_read - read atomic variable
21 : * @v: pointer of type atomic_t
22 : *
23 : * Atomically reads the value of @v.
24 : */
25 : static inline int atomic_read(const atomic_t *v)
26 : {
27 71954855 : return (*(volatile int *)&(v)->counter);
28 : }
29 :
30 : /**
31 : * atomic_set - set atomic variable
32 : * @v: pointer of type atomic_t
33 : * @i: required value
34 : *
35 : * Atomically sets the value of @v to @i.
36 : */
37 : static inline void atomic_set(atomic_t *v, int i)
38 : {
39 8005241 : v->counter = i;
40 : }
41 :
42 : /**
43 : * atomic_add - add integer to atomic variable
44 : * @i: integer value to add
45 : * @v: pointer of type atomic_t
46 : *
47 : * Atomically adds @i to @v.
48 : */
49 : static inline void atomic_add(int i, atomic_t *v)
50 : {
51 22540 : asm volatile(LOCK_PREFIX "addl %1,%0"
52 : : "+m" (v->counter)
53 : : "ir" (i));
54 : }
55 :
56 : /**
57 : * atomic_sub - subtract integer from atomic variable
58 : * @i: integer value to subtract
59 : * @v: pointer of type atomic_t
60 : *
61 : * Atomically subtracts @i from @v.
62 : */
63 : static inline void atomic_sub(int i, atomic_t *v)
64 : {
65 6317887 : asm volatile(LOCK_PREFIX "subl %1,%0"
66 : : "+m" (v->counter)
67 : : "ir" (i));
68 : }
69 :
70 : /**
71 : * atomic_sub_and_test - subtract value from variable and test result
72 : * @i: integer value to subtract
73 : * @v: pointer of type atomic_t
74 : *
75 : * Atomically subtracts @i from @v and returns
76 : * true if the result is zero, or false for all
77 : * other cases.
78 : */
79 : static inline int atomic_sub_and_test(int i, atomic_t *v)
80 : {
81 27223 : GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
82 : }
83 :
84 : /**
85 : * atomic_inc - increment atomic variable
86 : * @v: pointer of type atomic_t
87 : *
88 : * Atomically increments @v by 1.
89 : */
90 : static inline void atomic_inc(atomic_t *v)
91 : {
92 35842020 : asm volatile(LOCK_PREFIX "incl %0"
93 : : "+m" (v->counter));
94 : }
95 :
96 : /**
97 : * atomic_dec - decrement atomic variable
98 : * @v: pointer of type atomic_t
99 : *
100 : * Atomically decrements @v by 1.
101 : */
102 : static inline void atomic_dec(atomic_t *v)
103 : {
104 11458051 : asm volatile(LOCK_PREFIX "decl %0"
105 : : "+m" (v->counter));
106 : }
107 :
108 : /**
109 : * atomic_dec_and_test - decrement and test
110 : * @v: pointer of type atomic_t
111 : *
112 : * Atomically decrements @v by 1 and
113 : * returns true if the result is 0, or false for all other
114 : * cases.
115 : */
116 : static inline int atomic_dec_and_test(atomic_t *v)
117 : {
118 32602655 : GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
119 : }
120 :
121 : /**
122 : * atomic_inc_and_test - increment and test
123 : * @v: pointer of type atomic_t
124 : *
125 : * Atomically increments @v by 1
126 : * and returns true if the result is zero, or false for all
127 : * other cases.
128 : */
129 : static inline int atomic_inc_and_test(atomic_t *v)
130 : {
131 : GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
132 : }
133 :
134 : /**
135 : * atomic_add_negative - add and test if negative
136 : * @i: integer value to add
137 : * @v: pointer of type atomic_t
138 : *
139 : * Atomically adds @i to @v and returns true
140 : * if the result is negative, or false when
141 : * result is greater than or equal to zero.
142 : */
143 : static inline int atomic_add_negative(int i, atomic_t *v)
144 : {
145 : GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
146 : }
147 :
148 : /**
149 : * atomic_add_return - add integer and return
150 : * @i: integer value to add
151 : * @v: pointer of type atomic_t
152 : *
153 : * Atomically adds @i to @v and returns @i + @v
154 : */
155 : static inline int atomic_add_return(int i, atomic_t *v)
156 : {
157 411586 : return i + xadd(&v->counter, i);
158 : }
159 :
160 : /**
161 : * atomic_sub_return - subtract integer and return
162 : * @v: pointer of type atomic_t
163 : * @i: integer value to subtract
164 : *
165 : * Atomically subtracts @i from @v and returns @v - @i
166 : */
167 : static inline int atomic_sub_return(int i, atomic_t *v)
168 : {
169 211 : return atomic_add_return(-i, v);
170 : }
171 :
172 : #define atomic_inc_return(v) (atomic_add_return(1, v))
173 : #define atomic_dec_return(v) (atomic_sub_return(1, v))
174 :
175 : static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
176 : {
177 7864675 : return cmpxchg(&v->counter, old, new);
178 : }
179 :
180 : static inline int atomic_xchg(atomic_t *v, int new)
181 : {
182 30 : return xchg(&v->counter, new);
183 : }
184 :
185 : /**
186 : * __atomic_add_unless - add unless the number is already a given value
187 : * @v: pointer of type atomic_t
188 : * @a: the amount to add to v...
189 : * @u: ...unless v is equal to u.
190 : *
191 : * Atomically adds @a to @v, so long as @v was not already @u.
192 : * Returns the old value of @v.
193 : */
194 : static inline int __atomic_add_unless(atomic_t *v, int a, int u)
195 : {
196 : int c, old;
197 : c = atomic_read(v);
198 : for (;;) {
199 5450420 : if (unlikely(c == (u)))
200 : break;
201 5449710 : old = atomic_cmpxchg((v), c, c + (a));
202 5452262 : if (likely(old == c))
203 : break;
204 : c = old;
205 : }
206 : return c;
207 : }
208 :
209 : /**
210 : * atomic_inc_short - increment of a short integer
211 : * @v: pointer to type int
212 : *
213 : * Atomically adds 1 to @v
214 : * Returns the new value of @u
215 : */
216 : static inline short int atomic_inc_short(short int *v)
217 : {
218 : asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
219 : return *v;
220 : }
221 :
222 : #ifdef CONFIG_X86_64
223 : /**
224 : * atomic_or_long - OR of two long integers
225 : * @v1: pointer to type unsigned long
226 : * @v2: pointer to type unsigned long
227 : *
228 : * Atomically ORs @v1 and @v2
229 : * Returns the result of the OR
230 : */
231 : static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
232 : {
233 : asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2));
234 : }
235 : #endif
236 :
237 : /* These are x86-specific, used by some header files */
238 : #define atomic_clear_mask(mask, addr) \
239 : asm volatile(LOCK_PREFIX "andl %0,%1" \
240 : : : "r" (~(mask)), "m" (*(addr)) : "memory")
241 :
242 : #define atomic_set_mask(mask, addr) \
243 : asm volatile(LOCK_PREFIX "orl %0,%1" \
244 : : : "r" ((unsigned)(mask)), "m" (*(addr)) \
245 : : "memory")
246 :
247 : #ifdef CONFIG_X86_32
248 : # include <asm/atomic64_32.h>
249 : #else
250 : # include <asm/atomic64_64.h>
251 : #endif
252 :
253 : #endif /* _ASM_X86_ATOMIC_H */
|