root/src/utils/noit_atomic.h

Revision 0605837f9b51a45fe72b865e162e55b8311ccb08, 7.9 kB (checked in by Theo Schlossnagle <jesus@omniti.com>, 4 months ago)

How did this ever work without volatile?

  • Property mode set to 100644
Line 
1 /*
2  * Copyright (c) 2005-2009, OmniTI Computer Consulting, Inc.
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions are
7  * met:
8  *
9  *    * Redistributions of source code must retain the above copyright
10  *      notice, this list of conditions and the following disclaimer.
11  *    * Redistributions in binary form must reproduce the above
12  *      copyright notice, this list of conditions and the following
13  *      disclaimer in the documentation and/or other materials provided
14  *      with the distribution.
15  *    * Neither the name OmniTI Computer Consulting, Inc. nor the names
16  *      of its contributors may be used to endorse or promote products
17  *      derived from this software without specific prior written
18  *      permission.
19  *
20  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #ifndef UTILS_NOIT_ATOMIC_H
34 #define UTILS_NOIT_ATOMIC_H
35
36 #include "noit_config.h"
37
38 typedef volatile int32_t noit_atomic32_t;
39 typedef volatile int64_t noit_atomic64_t;
40
41 #if defined(__GNUC__)
42
43 typedef noit_atomic32_t noit_spinlock_t;
44 static inline noit_atomic32_t
45 noit_atomic_cas32(volatile noit_atomic32_t *ptr,
46                   volatile noit_atomic32_t rpl,
47                   volatile noit_atomic32_t curr) {
48   noit_atomic32_t prev;
49   __asm__ volatile (
50       "lock; cmpxchgl %1, %2"
51     : "=a" (prev)
52     : "r"  (rpl), "m" (*(ptr)), "0" (curr)
53     : "memory");
54   return prev;
55 }
56
57 #if (SIZEOF_VOID_P == 4)
58 static inline void *
59 noit_atomic_casptr(volatile void **ptr,
60                    /* coverity[noescape] */
61                    volatile void *rpl,
62                    volatile void *curr) {
63   void *prev;
64   __asm__ volatile (
65       "lock; cmpxchgl %1, %2"
66     : "=a" (prev)
67     : "r"  (rpl), "m" (*(ptr)), "0" (curr)
68     : "memory");
69   return prev;
70 }
71 #endif
72
73 #ifdef __x86_64__
74 static inline noit_atomic64_t
75 noit_atomic_cas64(volatile noit_atomic64_t *ptr,
76                   volatile noit_atomic64_t rpl,
77                   volatile noit_atomic64_t curr) {
78   noit_atomic64_t prev;
79   __asm__ volatile (
80       "lock; cmpxchgq %1, %2"
81     : "=a" (prev)
82     : "r"  (rpl), "m" (*(ptr)), "0" (curr)
83     : "memory");
84   return prev;
85 }
86 #if (SIZEOF_VOID_P == 8)
87 static inline void *
88 noit_atomic_casptr(volatile void **ptr,
89                   /* coverity[noescape] */
90                   volatile void *rpl,
91                   volatile void *curr) {
92   void *prev;
93   __asm__ volatile (
94       "lock; cmpxchgq %1, %2"
95     : "=a" (prev)
96     : "r"  (rpl), "m" (*(ptr)), "0" (curr)
97     : "memory");
98   return prev;
99 }
100 #endif
101 #else
102
103 static inline noit_atomic64_t
104 noit_atomic_cas64_asm (volatile noit_atomic64_t* ptr,
105                        volatile u_int32_t old_high,
106                        volatile u_int32_t old_low,
107                        volatile u_int32_t new_high,
108                        volatile u_int32_t new_low) {
109   noit_atomic64_t prev;
110   u_int64_t tmp;
111   __asm__ volatile (
112       "lock; cmpxchg8b (%6);"
113     : "=a" (old_low), "=d" (old_high)
114     : "0" (old_low),  "1" (old_high),
115       "c" (new_high),  "r" (new_low),
116       "r" (ptr)
117     : "memory", "cc");
118   tmp = old_high;
119   prev = old_low | tmp << 32;
120   return prev;
121 }
122 static inline noit_atomic64_t
123 noit_atomic_cas64(volatile noit_atomic64_t *ptr,
124                   volatile noit_atomic64_t rpl,
125                   volatile noit_atomic64_t curr) {
126   noit_atomic64_t prev;
127 #ifdef __PIC__
128   __asm__ volatile (
129       "pushl %%ebx;"
130       "mov 4+%1,%%ecx;"
131       "mov %1,%%ebx;"
132       "lock;"
133       "cmpxchg8b (%3);"
134       "popl %%ebx"
135     : "=A" (prev)
136     : "m" (rpl), "A" (curr), "r" (ptr)
137     : "%ecx", "memory", "cc");
138 #else
139   /* These have to be unsigned or bit shifting doesn't work
140    * properly */
141   register u_int32_t old_high = *ptr >> 32, old_low = *ptr;
142   register u_int32_t new_high = rpl >> 32, new_low = rpl;
143   /* We need to break the 64-bit variables into 2 32-bit variables, do a
144    * compare-and-swap, then combine the results */
145   prev = noit_atomic_cas64_asm(ptr, old_high, old_low, new_high, new_low);
146 #endif
147   return prev;
148 };
149 #if (SIZEOF_VOID_P == 8)
150 /* This should never be triggered.. 8 byte pointers on 32bit machines */
151 #error "64bit pointers on a 32bit architecture?"
152 #endif
153 #endif
154
155 static inline void noit_spinlock_lock(volatile noit_spinlock_t *lock) {
156   while(noit_atomic_cas32(lock, 1, 0) != 0);
157 }
158 static inline void noit_spinlock_unlock(volatile noit_spinlock_t *lock) {
159   while(noit_atomic_cas32(lock, 0, 1) != 1);
160 }
161 static inline int noit_spinlock_trylock(volatile noit_spinlock_t *lock) {
162   return (noit_atomic_cas32(lock, 1, 0) == 0);
163 }
164
165 #elif (defined(__sparc) || defined(__sparcv9) || defined(__amd64) || defined(__i386)) && (defined(__SUNPRO_C) || defined(__SUNPRO_CC))
166
167 typedef noit_atomic32_t noit_spinlock_t;
168
169 extern noit_atomic32_t noit_atomic_cas32(volatile noit_atomic32_t *mem,
170         volatile noit_atomic32_t newval, volatile noit_atomic32_t cmpval);
171 extern noit_atomic64_t noit_atomic_cas64(volatile noit_atomic64_t *mem,
172         volatile noit_atomic64_t newval, volatile noit_atomic64_t cmpval);
173 extern void *noit_atomic_casptr(volatile void **mem,
174         volatile void *newval, volatile void *cmpval);
175
176 static inline void noit_spinlock_lock(volatile noit_spinlock_t *lock) {
177   while(noit_atomic_cas32(lock, 1, 0) != 0);
178 }
179 static inline void noit_spinlock_unlock(volatile noit_spinlock_t *lock) {
180   while(noit_atomic_cas32(lock, 0, 1) != 1);
181 }
182 static inline int noit_spinlock_trylock(volatile noit_spinlock_t *lock) {
183   return (noit_atomic_cas32(lock, 1, 0) == 0);
184 }
185
186 #else
187 #error Please stub out the atomics section for your platform
188 #endif
189
190 #ifndef noit_atomic_add32
191 static inline noit_atomic32_t noit_atomic_add32(volatile noit_atomic32_t *loc,
192                                                 volatile noit_atomic32_t diff) {
193   register noit_atomic32_t current;
194   do {
195     current = *(loc);
196   } while(noit_atomic_cas32(loc, current + diff, current) != current);
197   return current + diff;
198 }
199 #endif
200
201 #ifndef noit_atomic_add64
202 static inline noit_atomic64_t noit_atomic_add64(volatile noit_atomic64_t *loc,
203                                                 volatile noit_atomic64_t diff) {
204   register noit_atomic64_t current;
205   do {
206     current = *(loc);
207   } while(noit_atomic_cas64(loc, current + diff, current) != current);
208   return current + diff;
209 }
210 #endif
211
212 #ifndef noit_atomic_sub32
213 static inline noit_atomic32_t noit_atomic_sub32(volatile noit_atomic32_t *loc,
214                                                 volatile noit_atomic32_t diff) {
215   register noit_atomic32_t current;
216   do {
217     current = *(loc);
218   } while(noit_atomic_cas32(loc, current - diff, current) != current);
219   return current - diff;
220 }
221 #endif
222
223 #ifndef noit_atomic_sub64
224 static inline noit_atomic64_t noit_atomic_sub64(volatile noit_atomic64_t *loc,
225                                                 volatile noit_atomic64_t diff) {
226   register noit_atomic64_t current;
227   do {
228     current = *(loc);
229   } while(noit_atomic_cas64(loc, current - diff, current) != current);
230   return current - diff;
231 }
232 #endif
233
234 #ifndef noit_atomic_inc32
235 #define noit_atomic_inc32(a) noit_atomic_add32(a, 1)
236 #endif
237
238 #ifndef noit_atomic_inc64
239 #define noit_atomic_inc64(a) noit_atomic_add64(a, 1)
240 #endif
241
242 #ifndef noit_atomic_dec32
243 #define noit_atomic_dec32(a) noit_atomic_add32(a, -1)
244 #endif
245
246 #ifndef noit_atomic_dec64
247 #define noit_atomic_dec64(a) noit_atomic_add64(a, -1)
248 #endif
249
250 #endif
Note: See TracBrowser for help on using the browser.