atomic.h 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757
  1. /* Internal macros for atomic operations for GNU C Library.
  2. Copyright (C) 2002-2015 Free Software Foundation, Inc.
  3. This file is part of the GNU C Library.
  4. Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
  5. The GNU C Library is free software; you can redistribute it and/or
  6. modify it under the terms of the GNU Lesser General Public
  7. License as published by the Free Software Foundation; either
  8. version 2.1 of the License, or (at your option) any later version.
  9. The GNU C Library is distributed in the hope that it will be useful,
  10. but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. Lesser General Public License for more details.
  13. You should have received a copy of the GNU Lesser General Public
  14. License along with the GNU C Library; if not, see
  15. <http://www.gnu.org/licenses/>. */
  16. #ifndef _ATOMIC_H
  17. #define _ATOMIC_H 1
  18. /* This header defines three types of macros:
  19. - atomic arithmetic and logic operation on memory. They all
  20. have the prefix "atomic_".
  21. - conditionally atomic operations of the same kinds. These
  22. always behave identical but can be faster when atomicity
  23. is not really needed since only one thread has access to
  24. the memory location. In that case the code is slower in
  25. the multi-thread case. The interfaces have the prefix
  26. "catomic_".
  27. - support functions like barriers. They also have the prefix
  28. "atomic_".
  29. Architectures must provide a few lowlevel macros (the compare
  30. and exchange definitions). All others are optional. They
  31. should only be provided if the architecture has specific
  32. support for the operation.
  33. As <atomic.h> macros are usually heavily nested and often use local
  34. variables to make sure side-effects are evaluated properly, use for
  35. macro local variables a per-macro unique prefix. This file uses
  36. __atgN_ prefix where N is different in each macro. */
  37. #include <stdlib.h>
  38. #include <bits/atomic.h>
  39. /* Wrapper macros to call pre_NN_post (mem, ...) where NN is the
  40. bit width of *MEM. The calling macro puts parens around MEM
  41. and following args. */
  42. #define __atomic_val_bysize(pre, post, mem, ...) \
  43. ({ \
  44. __typeof (*mem) __atg1_result; \
  45. if (sizeof (*mem) == 1) \
  46. __atg1_result = pre##_8_##post (mem, __VA_ARGS__); \
  47. else if (sizeof (*mem) == 2) \
  48. __atg1_result = pre##_16_##post (mem, __VA_ARGS__); \
  49. else if (sizeof (*mem) == 4) \
  50. __atg1_result = pre##_32_##post (mem, __VA_ARGS__); \
  51. else if (sizeof (*mem) == 8) \
  52. __atg1_result = pre##_64_##post (mem, __VA_ARGS__); \
  53. else \
  54. abort (); \
  55. __atg1_result; \
  56. })
  57. #define __atomic_bool_bysize(pre, post, mem, ...) \
  58. ({ \
  59. int __atg2_result; \
  60. if (sizeof (*mem) == 1) \
  61. __atg2_result = pre##_8_##post (mem, __VA_ARGS__); \
  62. else if (sizeof (*mem) == 2) \
  63. __atg2_result = pre##_16_##post (mem, __VA_ARGS__); \
  64. else if (sizeof (*mem) == 4) \
  65. __atg2_result = pre##_32_##post (mem, __VA_ARGS__); \
  66. else if (sizeof (*mem) == 8) \
  67. __atg2_result = pre##_64_##post (mem, __VA_ARGS__); \
  68. else \
  69. abort (); \
  70. __atg2_result; \
  71. })
  72. /* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
  73. Return the old *MEM value. */
  74. #if !defined atomic_compare_and_exchange_val_acq \
  75. && defined __arch_compare_and_exchange_val_32_acq
  76. # define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
  77. __atomic_val_bysize (__arch_compare_and_exchange_val,acq, \
  78. mem, newval, oldval)
  79. #endif
  80. #ifndef catomic_compare_and_exchange_val_acq
  81. # ifdef __arch_c_compare_and_exchange_val_32_acq
  82. # define catomic_compare_and_exchange_val_acq(mem, newval, oldval) \
  83. __atomic_val_bysize (__arch_c_compare_and_exchange_val,acq, \
  84. mem, newval, oldval)
  85. # else
  86. # define catomic_compare_and_exchange_val_acq(mem, newval, oldval) \
  87. atomic_compare_and_exchange_val_acq (mem, newval, oldval)
  88. # endif
  89. #endif
  90. #ifndef catomic_compare_and_exchange_val_rel
  91. # ifndef atomic_compare_and_exchange_val_rel
  92. # define catomic_compare_and_exchange_val_rel(mem, newval, oldval) \
  93. catomic_compare_and_exchange_val_acq (mem, newval, oldval)
  94. # else
  95. # define catomic_compare_and_exchange_val_rel(mem, newval, oldval) \
  96. atomic_compare_and_exchange_val_rel (mem, newval, oldval)
  97. # endif
  98. #endif
  99. #ifndef atomic_compare_and_exchange_val_rel
  100. # define atomic_compare_and_exchange_val_rel(mem, newval, oldval) \
  101. atomic_compare_and_exchange_val_acq (mem, newval, oldval)
  102. #endif
  103. /* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
  104. Return zero if *MEM was changed or non-zero if no exchange happened. */
  105. #ifndef atomic_compare_and_exchange_bool_acq
  106. # ifdef __arch_compare_and_exchange_bool_32_acq
  107. # define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
  108. __atomic_bool_bysize (__arch_compare_and_exchange_bool,acq, \
  109. mem, newval, oldval)
  110. # else
  111. # define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
  112. ({ /* Cannot use __oldval here, because macros later in this file might \
  113. call this macro with __oldval argument. */ \
  114. __typeof (oldval) __atg3_old = (oldval); \
  115. atomic_compare_and_exchange_val_acq (mem, newval, __atg3_old) \
  116. != __atg3_old; \
  117. })
  118. # endif
  119. #endif
  120. #ifndef catomic_compare_and_exchange_bool_acq
  121. # ifdef __arch_c_compare_and_exchange_bool_32_acq
  122. # define catomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
  123. __atomic_bool_bysize (__arch_c_compare_and_exchange_bool,acq, \
  124. mem, newval, oldval)
  125. # else
  126. # define catomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
  127. ({ /* Cannot use __oldval here, because macros later in this file might \
  128. call this macro with __oldval argument. */ \
  129. __typeof (oldval) __atg4_old = (oldval); \
  130. catomic_compare_and_exchange_val_acq (mem, newval, __atg4_old) \
  131. != __atg4_old; \
  132. })
  133. # endif
  134. #endif
  135. #ifndef catomic_compare_and_exchange_bool_rel
  136. # ifndef atomic_compare_and_exchange_bool_rel
  137. # define catomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
  138. catomic_compare_and_exchange_bool_acq (mem, newval, oldval)
  139. # else
  140. # define catomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
  141. atomic_compare_and_exchange_bool_rel (mem, newval, oldval)
  142. # endif
  143. #endif
  144. #ifndef atomic_compare_and_exchange_bool_rel
  145. # define atomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
  146. atomic_compare_and_exchange_bool_acq (mem, newval, oldval)
  147. #endif
  148. /* Store NEWVALUE in *MEM and return the old value. */
  149. #ifndef atomic_exchange_acq
  150. # define atomic_exchange_acq(mem, newvalue) \
  151. ({ __typeof (*(mem)) __atg5_oldval; \
  152. __typeof (mem) __atg5_memp = (mem); \
  153. __typeof (*(mem)) __atg5_value = (newvalue); \
  154. \
  155. do \
  156. __atg5_oldval = *__atg5_memp; \
  157. while (__builtin_expect \
  158. (atomic_compare_and_exchange_bool_acq (__atg5_memp, __atg5_value, \
  159. __atg5_oldval), 0)); \
  160. \
  161. __atg5_oldval; })
  162. #endif
  163. #ifndef atomic_exchange_rel
  164. # define atomic_exchange_rel(mem, newvalue) atomic_exchange_acq (mem, newvalue)
  165. #endif
  166. /* Add VALUE to *MEM and return the old value of *MEM. */
  167. #ifndef atomic_exchange_and_add_acq
  168. # ifdef atomic_exchange_and_add
  169. # define atomic_exchange_and_add_acq(mem, value) \
  170. atomic_exchange_and_add (mem, value)
  171. # else
  172. # define atomic_exchange_and_add_acq(mem, value) \
  173. ({ __typeof (*(mem)) __atg6_oldval; \
  174. __typeof (mem) __atg6_memp = (mem); \
  175. __typeof (*(mem)) __atg6_value = (value); \
  176. \
  177. do \
  178. __atg6_oldval = *__atg6_memp; \
  179. while (__builtin_expect \
  180. (atomic_compare_and_exchange_bool_acq (__atg6_memp, \
  181. __atg6_oldval \
  182. + __atg6_value, \
  183. __atg6_oldval), 0)); \
  184. \
  185. __atg6_oldval; })
  186. # endif
  187. #endif
  188. #ifndef atomic_exchange_and_add_rel
  189. # define atomic_exchange_and_add_rel(mem, value) \
  190. atomic_exchange_and_add_acq(mem, value)
  191. #endif
  192. #ifndef atomic_exchange_and_add
  193. # define atomic_exchange_and_add(mem, value) \
  194. atomic_exchange_and_add_acq(mem, value)
  195. #endif
  196. #ifndef catomic_exchange_and_add
  197. # define catomic_exchange_and_add(mem, value) \
  198. ({ __typeof (*(mem)) __atg7_oldv; \
  199. __typeof (mem) __atg7_memp = (mem); \
  200. __typeof (*(mem)) __atg7_value = (value); \
  201. \
  202. do \
  203. __atg7_oldv = *__atg7_memp; \
  204. while (__builtin_expect \
  205. (catomic_compare_and_exchange_bool_acq (__atg7_memp, \
  206. __atg7_oldv \
  207. + __atg7_value, \
  208. __atg7_oldv), 0)); \
  209. \
  210. __atg7_oldv; })
  211. #endif
  212. #ifndef atomic_max
  213. # define atomic_max(mem, value) \
  214. do { \
  215. __typeof (*(mem)) __atg8_oldval; \
  216. __typeof (mem) __atg8_memp = (mem); \
  217. __typeof (*(mem)) __atg8_value = (value); \
  218. do { \
  219. __atg8_oldval = *__atg8_memp; \
  220. if (__atg8_oldval >= __atg8_value) \
  221. break; \
  222. } while (__builtin_expect \
  223. (atomic_compare_and_exchange_bool_acq (__atg8_memp, __atg8_value,\
  224. __atg8_oldval), 0)); \
  225. } while (0)
  226. #endif
  227. #ifndef catomic_max
  228. # define catomic_max(mem, value) \
  229. do { \
  230. __typeof (*(mem)) __atg9_oldv; \
  231. __typeof (mem) __atg9_memp = (mem); \
  232. __typeof (*(mem)) __atg9_value = (value); \
  233. do { \
  234. __atg9_oldv = *__atg9_memp; \
  235. if (__atg9_oldv >= __atg9_value) \
  236. break; \
  237. } while (__builtin_expect \
  238. (catomic_compare_and_exchange_bool_acq (__atg9_memp, \
  239. __atg9_value, \
  240. __atg9_oldv), 0)); \
  241. } while (0)
  242. #endif
  243. #ifndef atomic_min
  244. # define atomic_min(mem, value) \
  245. do { \
  246. __typeof (*(mem)) __atg10_oldval; \
  247. __typeof (mem) __atg10_memp = (mem); \
  248. __typeof (*(mem)) __atg10_value = (value); \
  249. do { \
  250. __atg10_oldval = *__atg10_memp; \
  251. if (__atg10_oldval <= __atg10_value) \
  252. break; \
  253. } while (__builtin_expect \
  254. (atomic_compare_and_exchange_bool_acq (__atg10_memp, \
  255. __atg10_value, \
  256. __atg10_oldval), 0)); \
  257. } while (0)
  258. #endif
  259. #ifndef atomic_add
  260. # define atomic_add(mem, value) (void) atomic_exchange_and_add ((mem), (value))
  261. #endif
  262. #ifndef catomic_add
  263. # define catomic_add(mem, value) \
  264. (void) catomic_exchange_and_add ((mem), (value))
  265. #endif
  266. #ifndef atomic_increment
  267. # define atomic_increment(mem) atomic_add ((mem), 1)
  268. #endif
  269. #ifndef catomic_increment
  270. # define catomic_increment(mem) catomic_add ((mem), 1)
  271. #endif
  272. #ifndef atomic_increment_val
  273. # define atomic_increment_val(mem) (atomic_exchange_and_add ((mem), 1) + 1)
  274. #endif
  275. #ifndef catomic_increment_val
  276. # define catomic_increment_val(mem) (catomic_exchange_and_add ((mem), 1) + 1)
  277. #endif
  278. /* Add one to *MEM and return true iff it's now zero. */
  279. #ifndef atomic_increment_and_test
  280. # define atomic_increment_and_test(mem) \
  281. (atomic_exchange_and_add ((mem), 1) + 1 == 0)
  282. #endif
  283. #ifndef atomic_decrement
  284. # define atomic_decrement(mem) atomic_add ((mem), -1)
  285. #endif
  286. #ifndef catomic_decrement
  287. # define catomic_decrement(mem) catomic_add ((mem), -1)
  288. #endif
  289. #ifndef atomic_decrement_val
  290. # define atomic_decrement_val(mem) (atomic_exchange_and_add ((mem), -1) - 1)
  291. #endif
  292. #ifndef catomic_decrement_val
  293. # define catomic_decrement_val(mem) (catomic_exchange_and_add ((mem), -1) - 1)
  294. #endif
  295. /* Subtract 1 from *MEM and return true iff it's now zero. */
  296. #ifndef atomic_decrement_and_test
  297. # define atomic_decrement_and_test(mem) \
  298. (atomic_exchange_and_add ((mem), -1) == 1)
  299. #endif
  300. /* Decrement *MEM if it is > 0, and return the old value. */
  301. #ifndef atomic_decrement_if_positive
  302. # define atomic_decrement_if_positive(mem) \
  303. ({ __typeof (*(mem)) __atg11_oldval; \
  304. __typeof (mem) __atg11_memp = (mem); \
  305. \
  306. do \
  307. { \
  308. __atg11_oldval = *__atg11_memp; \
  309. if (__builtin_expect (__atg11_oldval <= 0, 0)) \
  310. break; \
  311. } \
  312. while (__builtin_expect \
  313. (atomic_compare_and_exchange_bool_acq (__atg11_memp, \
  314. __atg11_oldval - 1, \
  315. __atg11_oldval), 0)); \
  316. __atg11_oldval; })
  317. #endif
  318. #ifndef atomic_add_negative
  319. # define atomic_add_negative(mem, value) \
  320. ({ __typeof (value) __atg12_value = (value); \
  321. atomic_exchange_and_add (mem, __atg12_value) < -__atg12_value; })
  322. #endif
  323. #ifndef atomic_add_zero
  324. # define atomic_add_zero(mem, value) \
  325. ({ __typeof (value) __atg13_value = (value); \
  326. atomic_exchange_and_add (mem, __atg13_value) == -__atg13_value; })
  327. #endif
  328. #ifndef atomic_bit_set
  329. # define atomic_bit_set(mem, bit) \
  330. (void) atomic_bit_test_set(mem, bit)
  331. #endif
  332. #ifndef atomic_bit_test_set
  333. # define atomic_bit_test_set(mem, bit) \
  334. ({ __typeof (*(mem)) __atg14_old; \
  335. __typeof (mem) __atg14_memp = (mem); \
  336. __typeof (*(mem)) __atg14_mask = ((__typeof (*(mem))) 1 << (bit)); \
  337. \
  338. do \
  339. __atg14_old = (*__atg14_memp); \
  340. while (__builtin_expect \
  341. (atomic_compare_and_exchange_bool_acq (__atg14_memp, \
  342. __atg14_old | __atg14_mask,\
  343. __atg14_old), 0)); \
  344. \
  345. __atg14_old & __atg14_mask; })
  346. #endif
  347. /* Atomically *mem &= mask. */
  348. #ifndef atomic_and
  349. # define atomic_and(mem, mask) \
  350. do { \
  351. __typeof (*(mem)) __atg15_old; \
  352. __typeof (mem) __atg15_memp = (mem); \
  353. __typeof (*(mem)) __atg15_mask = (mask); \
  354. \
  355. do \
  356. __atg15_old = (*__atg15_memp); \
  357. while (__builtin_expect \
  358. (atomic_compare_and_exchange_bool_acq (__atg15_memp, \
  359. __atg15_old & __atg15_mask, \
  360. __atg15_old), 0)); \
  361. } while (0)
  362. #endif
  363. #ifndef catomic_and
  364. # define catomic_and(mem, mask) \
  365. do { \
  366. __typeof (*(mem)) __atg20_old; \
  367. __typeof (mem) __atg20_memp = (mem); \
  368. __typeof (*(mem)) __atg20_mask = (mask); \
  369. \
  370. do \
  371. __atg20_old = (*__atg20_memp); \
  372. while (__builtin_expect \
  373. (catomic_compare_and_exchange_bool_acq (__atg20_memp, \
  374. __atg20_old & __atg20_mask,\
  375. __atg20_old), 0)); \
  376. } while (0)
  377. #endif
  378. /* Atomically *mem &= mask and return the old value of *mem. */
  379. #ifndef atomic_and_val
  380. # define atomic_and_val(mem, mask) \
  381. ({ __typeof (*(mem)) __atg16_old; \
  382. __typeof (mem) __atg16_memp = (mem); \
  383. __typeof (*(mem)) __atg16_mask = (mask); \
  384. \
  385. do \
  386. __atg16_old = (*__atg16_memp); \
  387. while (__builtin_expect \
  388. (atomic_compare_and_exchange_bool_acq (__atg16_memp, \
  389. __atg16_old & __atg16_mask,\
  390. __atg16_old), 0)); \
  391. \
  392. __atg16_old; })
  393. #endif
  394. /* Atomically *mem |= mask and return the old value of *mem. */
  395. #ifndef atomic_or
  396. # define atomic_or(mem, mask) \
  397. do { \
  398. __typeof (*(mem)) __atg17_old; \
  399. __typeof (mem) __atg17_memp = (mem); \
  400. __typeof (*(mem)) __atg17_mask = (mask); \
  401. \
  402. do \
  403. __atg17_old = (*__atg17_memp); \
  404. while (__builtin_expect \
  405. (atomic_compare_and_exchange_bool_acq (__atg17_memp, \
  406. __atg17_old | __atg17_mask, \
  407. __atg17_old), 0)); \
  408. } while (0)
  409. #endif
  410. #ifndef catomic_or
  411. # define catomic_or(mem, mask) \
  412. do { \
  413. __typeof (*(mem)) __atg18_old; \
  414. __typeof (mem) __atg18_memp = (mem); \
  415. __typeof (*(mem)) __atg18_mask = (mask); \
  416. \
  417. do \
  418. __atg18_old = (*__atg18_memp); \
  419. while (__builtin_expect \
  420. (catomic_compare_and_exchange_bool_acq (__atg18_memp, \
  421. __atg18_old | __atg18_mask,\
  422. __atg18_old), 0)); \
  423. } while (0)
  424. #endif
  425. /* Atomically *mem |= mask and return the old value of *mem. */
  426. #ifndef atomic_or_val
  427. # define atomic_or_val(mem, mask) \
  428. ({ __typeof (*(mem)) __atg19_old; \
  429. __typeof (mem) __atg19_memp = (mem); \
  430. __typeof (*(mem)) __atg19_mask = (mask); \
  431. \
  432. do \
  433. __atg19_old = (*__atg19_memp); \
  434. while (__builtin_expect \
  435. (atomic_compare_and_exchange_bool_acq (__atg19_memp, \
  436. __atg19_old | __atg19_mask,\
  437. __atg19_old), 0)); \
  438. \
  439. __atg19_old; })
  440. #endif
  441. #ifndef atomic_full_barrier
  442. # define atomic_full_barrier() __asm__ ("" ::: "memory")
  443. #endif
  444. #ifndef atomic_read_barrier
  445. # define atomic_read_barrier() atomic_full_barrier ()
  446. #endif
  447. #ifndef atomic_write_barrier
  448. # define atomic_write_barrier() atomic_full_barrier ()
  449. #endif
  450. #ifndef atomic_forced_read
  451. # define atomic_forced_read(x) \
  452. ({ __typeof (x) __x; __asm__ ("" : "=r" (__x) : "0" (x)); __x; })
  453. #endif
  454. /* The following functions are a subset of the atomic operations provided by
  455. C11. Usually, a function named atomic_OP_MO(args) is equivalent to C11's
  456. atomic_OP_explicit(args, memory_order_MO); exceptions noted below. */
  457. /* Each arch can request to use compiler built-ins for C11 atomics. If it
  458. does, all atomics will be based on these. */
  459. #if defined USE_ATOMIC_COMPILER_BUILTINS
  460. /* We require 32b atomic operations; some archs also support 64b atomic
  461. operations. */
  462. void __atomic_link_error (void);
  463. # if defined(__HAVE_64B_ATOMICS) && __HAVE_64B_ATOMICS
  464. # define __atomic_check_size(mem) \
  465. if ((sizeof (*mem) != 4) && (sizeof (*mem) != 8)) \
  466. __atomic_link_error ();
  467. # else
  468. # define __atomic_check_size(mem) \
  469. if (sizeof (*mem) != 4) \
  470. __atomic_link_error ();
  471. # endif
  472. # define atomic_thread_fence_acquire() \
  473. __atomic_thread_fence (__ATOMIC_ACQUIRE)
  474. # define atomic_thread_fence_release() \
  475. __atomic_thread_fence (__ATOMIC_RELEASE)
  476. # define atomic_thread_fence_seq_cst() \
  477. __atomic_thread_fence (__ATOMIC_SEQ_CST)
  478. # define atomic_load_relaxed(mem) \
  479. ({ __atomic_check_size((mem)); __atomic_load_n ((mem), __ATOMIC_RELAXED); })
  480. # define atomic_load_acquire(mem) \
  481. ({ __atomic_check_size((mem)); __atomic_load_n ((mem), __ATOMIC_ACQUIRE); })
  482. # define atomic_store_relaxed(mem, val) \
  483. do { \
  484. __atomic_check_size((mem)); \
  485. __atomic_store_n ((mem), (val), __ATOMIC_RELAXED); \
  486. } while (0)
  487. # define atomic_store_release(mem, val) \
  488. do { \
  489. __atomic_check_size((mem)); \
  490. __atomic_store_n ((mem), (val), __ATOMIC_RELEASE); \
  491. } while (0)
  492. /* On failure, this CAS has memory_order_relaxed semantics. */
  493. # define atomic_compare_exchange_weak_relaxed(mem, expected, desired) \
  494. ({ __atomic_check_size((mem)); \
  495. __atomic_compare_exchange_n ((mem), (expected), (desired), 1, \
  496. __ATOMIC_RELAXED, __ATOMIC_RELAXED); })
  497. # define atomic_compare_exchange_weak_acquire(mem, expected, desired) \
  498. ({ __atomic_check_size((mem)); \
  499. __atomic_compare_exchange_n ((mem), (expected), (desired), 1, \
  500. __ATOMIC_ACQUIRE, __ATOMIC_RELAXED); })
  501. # define atomic_compare_exchange_weak_release(mem, expected, desired) \
  502. ({ __atomic_check_size((mem)); \
  503. __atomic_compare_exchange_n ((mem), (expected), (desired), 1, \
  504. __ATOMIC_RELEASE, __ATOMIC_RELAXED); })
  505. # define atomic_exchange_acquire(mem, desired) \
  506. ({ __atomic_check_size((mem)); \
  507. __atomic_exchange_n ((mem), (desired), __ATOMIC_ACQUIRE); })
  508. # define atomic_exchange_release(mem, desired) \
  509. ({ __atomic_check_size((mem)); \
  510. __atomic_exchange_n ((mem), (desired), __ATOMIC_RELEASE); })
  511. # define atomic_fetch_add_relaxed(mem, operand) \
  512. ({ __atomic_check_size((mem)); \
  513. __atomic_fetch_add ((mem), (operand), __ATOMIC_RELAXED); })
  514. # define atomic_fetch_add_acquire(mem, operand) \
  515. ({ __atomic_check_size((mem)); \
  516. __atomic_fetch_add ((mem), (operand), __ATOMIC_ACQUIRE); })
  517. # define atomic_fetch_add_release(mem, operand) \
  518. ({ __atomic_check_size((mem)); \
  519. __atomic_fetch_add ((mem), (operand), __ATOMIC_RELEASE); })
  520. # define atomic_fetch_add_acq_rel(mem, operand) \
  521. ({ __atomic_check_size((mem)); \
  522. __atomic_fetch_add ((mem), (operand), __ATOMIC_ACQ_REL); })
  523. # define atomic_fetch_and_acquire(mem, operand) \
  524. ({ __atomic_check_size((mem)); \
  525. __atomic_fetch_and ((mem), (operand), __ATOMIC_ACQUIRE); })
  526. # define atomic_fetch_or_relaxed(mem, operand) \
  527. ({ __atomic_check_size((mem)); \
  528. __atomic_fetch_or ((mem), (operand), __ATOMIC_RELAXED); })
  529. # define atomic_fetch_or_acquire(mem, operand) \
  530. ({ __atomic_check_size((mem)); \
  531. __atomic_fetch_or ((mem), (operand), __ATOMIC_ACQUIRE); })
  532. #else /* !USE_ATOMIC_COMPILER_BUILTINS */
  533. /* By default, we assume that read, write, and full barriers are equivalent
  534. to acquire, release, and seq_cst barriers. Archs for which this does not
  535. hold have to provide custom definitions of the fences. */
  536. # ifndef atomic_thread_fence_acquire
  537. # define atomic_thread_fence_acquire() atomic_read_barrier ()
  538. # endif
  539. # ifndef atomic_thread_fence_release
  540. # define atomic_thread_fence_release() atomic_write_barrier ()
  541. # endif
  542. # ifndef atomic_thread_fence_seq_cst
  543. # define atomic_thread_fence_seq_cst() atomic_full_barrier ()
  544. # endif
  545. # ifndef atomic_load_relaxed
  546. # define atomic_load_relaxed(mem) \
  547. ({ __typeof (*(mem)) __atg100_val; \
  548. __asm__ ("" : "=r" (__atg100_val) : "0" (*(mem))); \
  549. __atg100_val; })
  550. # endif
  551. # ifndef atomic_load_acquire
  552. # define atomic_load_acquire(mem) \
  553. ({ __typeof (*(mem)) __atg101_val = atomic_load_relaxed (mem); \
  554. atomic_thread_fence_acquire (); \
  555. __atg101_val; })
  556. # endif
  557. # ifndef atomic_store_relaxed
  558. /* XXX Use inline asm here? */
  559. # define atomic_store_relaxed(mem, val) do { *(mem) = (val); } while (0)
  560. # endif
  561. # ifndef atomic_store_release
  562. # define atomic_store_release(mem, val) \
  563. do { \
  564. atomic_thread_fence_release (); \
  565. atomic_store_relaxed ((mem), (val)); \
  566. } while (0)
  567. # endif
  568. /* On failure, this CAS has memory_order_relaxed semantics. */
  569. /* XXX This potentially has one branch more than necessary, but archs
  570. currently do not define a CAS that returns both the previous value and
  571. the success flag. */
  572. # ifndef atomic_compare_exchange_weak_acquire
  573. # define atomic_compare_exchange_weak_acquire(mem, expected, desired) \
  574. ({ __typeof (*(expected)) __atg102_expected = *(expected); \
  575. *(expected) = \
  576. atomic_compare_and_exchange_val_acq ((mem), (desired), *(expected)); \
  577. *(expected) == __atg102_expected; })
  578. # endif
  579. # ifndef atomic_compare_exchange_weak_relaxed
  580. /* XXX Fall back to CAS with acquire MO because archs do not define a weaker
  581. CAS. */
  582. # define atomic_compare_exchange_weak_relaxed(mem, expected, desired) \
  583. atomic_compare_exchange_weak_acquire ((mem), (expected), (desired))
  584. # endif
  585. # ifndef atomic_compare_exchange_weak_release
  586. # define atomic_compare_exchange_weak_release(mem, expected, desired) \
  587. ({ __typeof (*(expected)) __atg103_expected = *(expected); \
  588. *(expected) = \
  589. atomic_compare_and_exchange_val_rel ((mem), (desired), *(expected)); \
  590. *(expected) == __atg103_expected; })
  591. # endif
  592. # ifndef atomic_exchange_acquire
  593. # define atomic_exchange_acquire(mem, val) \
  594. atomic_exchange_acq ((mem), (val))
  595. # endif
  596. # ifndef atomic_exchange_release
  597. # define atomic_exchange_release(mem, val) \
  598. atomic_exchange_rel ((mem), (val))
  599. # endif
  600. # ifndef atomic_fetch_add_acquire
  601. # define atomic_fetch_add_acquire(mem, operand) \
  602. atomic_exchange_and_add_acq ((mem), (operand))
  603. # endif
  604. # ifndef atomic_fetch_add_relaxed
  605. /* XXX Fall back to acquire MO because the MO semantics of
  606. atomic_exchange_and_add are not documented; the generic version falls back
  607. to atomic_exchange_and_add_acq if atomic_exchange_and_add is not defined,
  608. and vice versa. */
  609. # define atomic_fetch_add_relaxed(mem, operand) \
  610. atomic_fetch_add_acquire ((mem), (operand))
  611. # endif
  612. # ifndef atomic_fetch_add_release
  613. # define atomic_fetch_add_release(mem, operand) \
  614. atomic_exchange_and_add_rel ((mem), (operand))
  615. # endif
  616. # ifndef atomic_fetch_add_acq_rel
  617. # define atomic_fetch_add_acq_rel(mem, operand) \
  618. ({ atomic_thread_fence_release (); \
  619. atomic_exchange_and_add_acq ((mem), (operand)); })
  620. # endif
  621. /* XXX The default for atomic_and_val has acquire semantics, but this is not
  622. documented. */
  623. # ifndef atomic_fetch_and_acquire
  624. # define atomic_fetch_and_acquire(mem, operand) \
  625. atomic_and_val ((mem), (operand))
  626. # endif
  627. /* XXX The default for atomic_or_val has acquire semantics, but this is not
  628. documented. */
  629. # ifndef atomic_fetch_or_acquire
  630. # define atomic_fetch_or_acquire(mem, operand) \
  631. atomic_or_val ((mem), (operand))
  632. # endif
  633. /* XXX Fall back to acquire MO because archs do not define a weaker
  634. atomic_or_val. */
  635. # ifndef atomic_fetch_or_relaxed
  636. # define atomic_fetch_or_relaxed(mem, operand) \
  637. atomic_fetch_or_acquire ((mem), (operand))
  638. # endif
  639. #endif /* !USE_ATOMIC_COMPILER_BUILTINS */
  640. #ifndef atomic_delay
  641. # define atomic_delay() do { /* nothing */ } while (0)
  642. #endif
  643. #endif /* atomic.h */