26 inline bool __sync_fetch_and_add(
volatile bool* ptr,
bool val)
29 (val ==
true) ? atomic_inc_8((
volatile uint8_t *)ptr) : atomic_add_8((
volatile uint8_t *)ptr, (int8_t)val);
33 inline int8_t __sync_fetch_and_add(
volatile int8_t* ptr, int8_t val)
36 (val == 1) ? atomic_inc_8((
volatile uint8_t*)ptr) : atomic_add_8((
volatile uint8_t*)ptr, val);
40 inline int16_t __sync_fetch_and_add(
volatile int16_t* ptr, int16_t val)
43 (val == 1) ? atomic_inc_16((
volatile uint16_t*)ptr) : atomic_add_16((
volatile uint16_t*)ptr, val);
47 inline int32_t __sync_fetch_and_add(
volatile int32_t* ptr, int32_t val)
50 (val == 1) ? atomic_inc_32((
volatile uint32_t*)ptr) : atomic_add_32((
volatile uint32_t*)ptr, val);
54 inline uint8_t __sync_fetch_and_add(
volatile uint8_t* ptr, uint8_t val)
57 (val == 1) ? atomic_inc_8(ptr) : atomic_add_8(ptr, (int8_t)val);
61 inline uint16_t __sync_fetch_and_add(
volatile uint16_t* ptr, uint16_t val)
64 (val == 1) ? atomic_inc_16(ptr) : atomic_add_16(ptr, (int16_t)val);
68 inline uint32_t __sync_fetch_and_add(
volatile uint32_t* ptr, uint32_t val)
71 (val == 1) ? atomic_inc_32(ptr) : atomic_add_32(ptr, (int32_t)val);
75 # if defined(_KERNEL) || defined(_INT64_TYPE)
76 inline uint64_t __sync_fetch_and_add(
volatile uint64_t* ptr, uint64_t val)
79 (val == 1) ? atomic_inc_64(ptr) : atomic_add_64(ptr, (int64_t)val);
83 inline int64_t __sync_fetch_and_add(
volatile int64_t* ptr, int64_t val)
86 (val == 1) ? atomic_inc_64((
volatile uint64_t*)ptr) : atomic_add_64((
volatile uint64_t*)ptr, val);
91 inline uint8_t __sync_fetch_and_sub(
volatile uint8_t* ptr, uint8_t val)
94 (val == 1) ? atomic_dec_8(ptr) : atomic_add_8(ptr, 0-(int8_t)val);
98 inline uint16_t __sync_fetch_and_sub(
volatile uint16_t* ptr, uint16_t val)
101 (val == 1) ? atomic_dec_16(ptr) : atomic_add_16(ptr, 0-(int16_t)val);
105 inline uint32_t __sync_fetch_and_sub(
volatile uint32_t* ptr, uint32_t val)
108 (val == 1) ? atomic_dec_32(ptr) : atomic_add_32(ptr, 0-(int32_t)val);
112 # if defined(_KERNEL) || defined(_INT64_TYPE)
113 inline uint64_t __sync_fetch_and_sub(
volatile uint64_t* ptr, uint64_t val)
116 (val == 1) ? atomic_dec_64(ptr) : atomic_add_64(ptr, 0-(int64_t)val);
119 inline int64_t __sync_fetch_and_sub(
volatile int64_t* ptr, uint64_t val)
122 (val == 1) ? atomic_dec_64((
volatile uint64_t *) ptr) : atomic_add_64((
volatile uint64_t *) ptr, 0-(int64_t)val);
127 inline bool __sync_add_and_fetch(
volatile bool* ptr,
bool val)
129 return (val ==
true) ? atomic_inc_8_nv((
volatile uint8_t *)ptr) : atomic_add_8_nv((volatile uint8_t *)ptr, (int8_t)val);
132 inline int8_t __sync_add_and_fetch(
volatile int8_t* ptr, int8_t val)
134 return (val == 1) ? atomic_inc_8_nv((
volatile uint8_t*)ptr) : atomic_add_8_nv((volatile uint8_t*)ptr, val);
137 inline int16_t __sync_add_and_fetch(
volatile int16_t* ptr, int16_t val)
139 return (val == 1) ? atomic_inc_16_nv((
volatile uint16_t*)ptr) : atomic_add_16_nv((volatile uint16_t*)ptr, val);
142 inline int32_t __sync_add_and_fetch(
volatile int32_t* ptr, int32_t val)
144 return (val == 1) ? atomic_inc_32_nv((
volatile uint32_t*)ptr) : atomic_add_32_nv((volatile uint32_t*)ptr, val);
147 inline uint8_t __sync_add_and_fetch(
volatile uint8_t* ptr, uint8_t val)
149 return (val == 1) ? atomic_inc_8_nv(ptr) : atomic_add_8_nv(ptr, (int8_t)val);
152 inline uint16_t __sync_add_and_fetch(
volatile uint16_t* ptr, uint16_t val)
154 return (val == 1) ? atomic_inc_16_nv(ptr) : atomic_add_16_nv(ptr, (int16_t)val);
157 inline uint32_t __sync_add_and_fetch(
volatile uint32_t* ptr, uint32_t val)
159 return (val == 1) ? atomic_inc_32_nv(ptr) : atomic_add_32_nv(ptr, (int32_t)val);
162 # if defined(_KERNEL) || defined(_INT64_TYPE)
163 inline uint64_t __sync_add_and_fetch(
volatile uint64_t* ptr, uint64_t val)
165 return (val == 1) ? atomic_inc_64_nv(ptr) : atomic_add_64_nv(ptr, (int64_t)val);
168 inline int64_t __sync_add_and_fetch(
volatile int64_t* ptr, int64_t val)
170 return (val == 1) ? atomic_inc_64_nv((
volatile uint64_t*)ptr) : atomic_add_64_nv((volatile uint64_t*)ptr, val);
174 inline uint8_t __sync_sub_and_fetch(
volatile uint8_t* ptr, uint8_t val)
176 return (val == 1) ? atomic_dec_8_nv(ptr) : atomic_add_8_nv(ptr, 0-(int8_t)val);
179 inline uint16_t __sync_sub_and_fetch(
volatile uint16_t* ptr, uint16_t val)
181 return (val == 1) ? atomic_dec_16_nv(ptr) : atomic_add_16_nv(ptr, 0-(int16_t)val);
184 inline uint32_t __sync_sub_and_fetch(
volatile uint32_t* ptr, uint32_t val)
186 return (val == 1) ? atomic_dec_32_nv(ptr) : atomic_add_32_nv(ptr, 0-(int32_t)val);
189 # if defined(_KERNEL) || defined(_INT64_TYPE)
190 inline uint64_t __sync_sub_and_fetch(
volatile uint64_t* ptr, uint64_t val)
192 return (val == 1) ? atomic_dec_64_nv(ptr) : atomic_add_64_nv(ptr, 0-(int64_t)val);
194 inline int64_t __sync_sub_and_fetch(
volatile int64_t* ptr, uint64_t val)
196 return (val == 1) ? atomic_dec_64_nv((
volatile uint64_t *) ptr) : atomic_add_64_nv((volatile uint64_t *) ptr, 0-(int64_t)val);
200 inline uint8_t __sync_lock_test_and_set(
volatile uint8_t* ptr, uint8_t val)
202 atomic_swap_8(ptr, val);
206 inline uint16_t __sync_lock_test_and_set(
volatile uint16_t* ptr, uint16_t val)
208 atomic_swap_16(ptr, val);
212 inline uint32_t __sync_lock_test_and_set(
volatile uint32_t* ptr, uint32_t val)
214 atomic_swap_32(ptr, val);
218 # if defined(_KERNEL) || defined(_INT64_TYPE)
219 inline uint64_t __sync_lock_test_and_set(
volatile uint64_t* ptr, uint64_t val)
221 atomic_swap_64(ptr, val);
226 inline int8_t __sync_val_compare_and_swap(
volatile int8_t* ptr,
227 int8_t old_val, int8_t val)
229 atomic_cas_8((
volatile uint8_t *)ptr, old_val, val);
233 inline uint8_t __sync_val_compare_and_swap(
volatile uint8_t* ptr,
234 uint8_t old_val, uint8_t val)
236 atomic_cas_8(ptr, old_val, val);
240 inline uint16_t __sync_val_compare_and_swap(
volatile uint16_t* ptr,
241 uint16_t old_val, uint16_t val)
243 atomic_cas_16(ptr, old_val, val);
247 inline uint32_t __sync_val_compare_and_swap(
volatile uint32_t* ptr,
248 uint32_t old_val, uint32_t val)
250 atomic_cas_32(ptr, old_val, val);
254 # if defined(_KERNEL) || defined(_INT64_TYPE)
255 inline uint64_t __sync_val_compare_and_swap(
volatile uint64_t* ptr,
256 uint64_t old_val, uint64_t val)
258 atomic_cas_64(ptr, old_val, val);
263 inline int8_t __sync_bool_compare_and_swap(
volatile int8_t* ptr,
264 int8_t old_val, int8_t val)
267 return orig == atomic_cas_8((
volatile uint8_t *)ptr, old_val, val);
270 inline uint8_t __sync_bool_compare_and_swap(
volatile uint8_t* ptr,
271 uint8_t old_val, uint8_t val)
274 return orig == atomic_cas_8(ptr, old_val, val);
277 inline uint16_t __sync_bool_compare_and_swap(
volatile uint16_t* ptr,
278 uint16_t old_val, uint16_t val)
281 return orig == atomic_cas_16(ptr, old_val, val);
284 inline uint32_t __sync_bool_compare_and_swap(
volatile uint32_t* ptr,
285 uint32_t old_val, uint32_t val)
288 return orig == atomic_cas_32(ptr, old_val, val);
291 # if defined(_KERNEL) || defined(_INT64_TYPE)
292 inline uint64_t __sync_bool_compare_and_swap(
volatile uint64_t* ptr,
293 uint64_t old_val, uint64_t val)
296 return orig == atomic_cas_64(ptr, old_val, val);