atomic.h 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763
  1. /* Internal macros for atomic operations for GNU C Library.
  2. Copyright (C) 2002-2015 Free Software Foundation, Inc.
  3. This file is part of the GNU C Library.
  4. Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
  5. The GNU C Library is free software; you can redistribute it and/or
  6. modify it under the terms of the GNU Lesser General Public
  7. License as published by the Free Software Foundation; either
  8. version 2.1 of the License, or (at your option) any later version.
  9. The GNU C Library is distributed in the hope that it will be useful,
  10. but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. Lesser General Public License for more details.
  13. You should have received a copy of the GNU Lesser General Public
  14. License along with the GNU C Library; if not, see
  15. <http://www.gnu.org/licenses/>. */
  16. #ifndef _ATOMIC_H
  17. #define _ATOMIC_H 1
  18. /* This header defines three types of macros:
  19. - atomic arithmetic and logic operation on memory. They all
  20. have the prefix "atomic_".
  21. - conditionally atomic operations of the same kinds. These
  22. always behave identical but can be faster when atomicity
  23. is not really needed since only one thread has access to
  24. the memory location. In that case the code is slower in
  25. the multi-thread case. The interfaces have the prefix
  26. "catomic_".
  27. - support functions like barriers. They also have the prefix
  28. "atomic_".
  29. Architectures must provide a few lowlevel macros (the compare
  30. and exchange definitions). All others are optional. They
  31. should only be provided if the architecture has specific
  32. support for the operation.
  33. As <atomic.h> macros are usually heavily nested and often use local
  34. variables to make sure side-effects are evaluated properly, use for
  35. macro local variables a per-macro unique prefix. This file uses
  36. __atgN_ prefix where N is different in each macro. */
  37. #include <stdlib.h>
  38. #include <bits/atomic.h>
  39. /* Wrapper macros to call pre_NN_post (mem, ...) where NN is the
  40. bit width of *MEM. The calling macro puts parens around MEM
  41. and following args. */
  42. #define __atomic_val_bysize(pre, post, mem, ...) \
  43. ({ \
  44. __typeof (*mem) __atg1_result; \
  45. if (sizeof (*mem) == 1) \
  46. __atg1_result = pre##_8_##post (mem, __VA_ARGS__); \
  47. else if (sizeof (*mem) == 2) \
  48. __atg1_result = pre##_16_##post (mem, __VA_ARGS__); \
  49. else if (sizeof (*mem) == 4) \
  50. __atg1_result = pre##_32_##post (mem, __VA_ARGS__); \
  51. else if (sizeof (*mem) == 8) \
  52. __atg1_result = pre##_64_##post (mem, __VA_ARGS__); \
  53. else \
  54. abort (); \
  55. __atg1_result; \
  56. })
  57. #define __atomic_bool_bysize(pre, post, mem, ...) \
  58. ({ \
  59. int __atg2_result; \
  60. if (sizeof (*mem) == 1) \
  61. __atg2_result = pre##_8_##post (mem, __VA_ARGS__); \
  62. else if (sizeof (*mem) == 2) \
  63. __atg2_result = pre##_16_##post (mem, __VA_ARGS__); \
  64. else if (sizeof (*mem) == 4) \
  65. __atg2_result = pre##_32_##post (mem, __VA_ARGS__); \
  66. else if (sizeof (*mem) == 8) \
  67. __atg2_result = pre##_64_##post (mem, __VA_ARGS__); \
  68. else \
  69. abort (); \
  70. __atg2_result; \
  71. })
  72. /* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
  73. Return the old *MEM value. */
  74. #if !defined atomic_compare_and_exchange_val_acq \
  75. && defined __arch_compare_and_exchange_val_32_acq
  76. # define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
  77. __atomic_val_bysize (__arch_compare_and_exchange_val,acq, \
  78. mem, newval, oldval)
  79. #endif
  80. #ifndef catomic_compare_and_exchange_val_acq
  81. # ifdef __arch_c_compare_and_exchange_val_32_acq
  82. # define catomic_compare_and_exchange_val_acq(mem, newval, oldval) \
  83. __atomic_val_bysize (__arch_c_compare_and_exchange_val,acq, \
  84. mem, newval, oldval)
  85. # else
  86. # define catomic_compare_and_exchange_val_acq(mem, newval, oldval) \
  87. atomic_compare_and_exchange_val_acq (mem, newval, oldval)
  88. # endif
  89. #endif
  90. #ifndef catomic_compare_and_exchange_val_rel
  91. # ifndef atomic_compare_and_exchange_val_rel
  92. # define catomic_compare_and_exchange_val_rel(mem, newval, oldval) \
  93. catomic_compare_and_exchange_val_acq (mem, newval, oldval)
  94. # else
  95. # define catomic_compare_and_exchange_val_rel(mem, newval, oldval) \
  96. atomic_compare_and_exchange_val_rel (mem, newval, oldval)
  97. # endif
  98. #endif
  99. #ifndef atomic_compare_and_exchange_val_rel
  100. # define atomic_compare_and_exchange_val_rel(mem, newval, oldval) \
  101. atomic_compare_and_exchange_val_acq (mem, newval, oldval)
  102. #endif
  103. /* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
  104. Return zero if *MEM was changed or non-zero if no exchange happened. */
  105. #ifndef atomic_compare_and_exchange_bool_acq
  106. # ifdef __arch_compare_and_exchange_bool_32_acq
  107. # define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
  108. __atomic_bool_bysize (__arch_compare_and_exchange_bool,acq, \
  109. mem, newval, oldval)
  110. # else
  111. # define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
  112. ({ /* Cannot use __oldval here, because macros later in this file might \
  113. call this macro with __oldval argument. */ \
  114. __typeof (oldval) __atg3_old = (oldval); \
  115. atomic_compare_and_exchange_val_acq (mem, newval, __atg3_old) \
  116. != __atg3_old; \
  117. })
  118. # endif
  119. #endif
  120. #ifndef catomic_compare_and_exchange_bool_acq
  121. # ifdef __arch_c_compare_and_exchange_bool_32_acq
  122. # define catomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
  123. __atomic_bool_bysize (__arch_c_compare_and_exchange_bool,acq, \
  124. mem, newval, oldval)
  125. # else
  126. # define catomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
  127. ({ /* Cannot use __oldval here, because macros later in this file might \
  128. call this macro with __oldval argument. */ \
  129. __typeof (oldval) __atg4_old = (oldval); \
  130. catomic_compare_and_exchange_val_acq (mem, newval, __atg4_old) \
  131. != __atg4_old; \
  132. })
  133. # endif
  134. #endif
  135. #ifndef catomic_compare_and_exchange_bool_rel
  136. # ifndef atomic_compare_and_exchange_bool_rel
  137. # define catomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
  138. catomic_compare_and_exchange_bool_acq (mem, newval, oldval)
  139. # else
  140. # define catomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
  141. atomic_compare_and_exchange_bool_rel (mem, newval, oldval)
  142. # endif
  143. #endif
  144. #ifndef atomic_compare_and_exchange_bool_rel
  145. # define atomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
  146. atomic_compare_and_exchange_bool_acq (mem, newval, oldval)
  147. #endif
  148. /* Store NEWVALUE in *MEM and return the old value. */
  149. #ifndef atomic_exchange_acq
  150. # define atomic_exchange_acq(mem, newvalue) \
  151. ({ __typeof (*(mem)) __atg5_oldval; \
  152. __typeof (mem) __atg5_memp = (mem); \
  153. __typeof (*(mem)) __atg5_value = (newvalue); \
  154. \
  155. do \
  156. __atg5_oldval = *__atg5_memp; \
  157. while (__builtin_expect \
  158. (atomic_compare_and_exchange_bool_acq (__atg5_memp, __atg5_value, \
  159. __atg5_oldval), 0)); \
  160. \
  161. __atg5_oldval; })
  162. #endif
  163. #ifndef atomic_exchange_rel
  164. # define atomic_exchange_rel(mem, newvalue) atomic_exchange_acq (mem, newvalue)
  165. #endif
  166. /* Add VALUE to *MEM and return the old value of *MEM. */
  167. #ifndef atomic_exchange_and_add_acq
  168. # ifdef atomic_exchange_and_add
  169. # define atomic_exchange_and_add_acq(mem, value) \
  170. atomic_exchange_and_add (mem, value)
  171. # else
  172. # define atomic_exchange_and_add_acq(mem, value) \
  173. ({ __typeof (*(mem)) __atg6_oldval; \
  174. __typeof (mem) __atg6_memp = (mem); \
  175. __typeof (*(mem)) __atg6_value = (value); \
  176. \
  177. do \
  178. __atg6_oldval = *__atg6_memp; \
  179. while (__builtin_expect \
  180. (atomic_compare_and_exchange_bool_acq (__atg6_memp, \
  181. __atg6_oldval \
  182. + __atg6_value, \
  183. __atg6_oldval), 0)); \
  184. \
  185. __atg6_oldval; })
  186. # endif
  187. #endif
  188. #ifndef atomic_exchange_and_add_rel
  189. # define atomic_exchange_and_add_rel(mem, value) \
  190. atomic_exchange_and_add_acq(mem, value)
  191. #endif
  192. #ifndef atomic_exchange_and_add
  193. # define atomic_exchange_and_add(mem, value) \
  194. atomic_exchange_and_add_acq(mem, value)
  195. #endif
  196. #ifndef catomic_exchange_and_add
  197. # define catomic_exchange_and_add(mem, value) \
  198. ({ __typeof (*(mem)) __atg7_oldv; \
  199. __typeof (mem) __atg7_memp = (mem); \
  200. __typeof (*(mem)) __atg7_value = (value); \
  201. \
  202. do \
  203. __atg7_oldv = *__atg7_memp; \
  204. while (__builtin_expect \
  205. (catomic_compare_and_exchange_bool_acq (__atg7_memp, \
  206. __atg7_oldv \
  207. + __atg7_value, \
  208. __atg7_oldv), 0)); \
  209. \
  210. __atg7_oldv; })
  211. #endif
  212. #ifndef atomic_max
  213. # define atomic_max(mem, value) \
  214. do { \
  215. __typeof (*(mem)) __atg8_oldval; \
  216. __typeof (mem) __atg8_memp = (mem); \
  217. __typeof (*(mem)) __atg8_value = (value); \
  218. do { \
  219. __atg8_oldval = *__atg8_memp; \
  220. if (__atg8_oldval >= __atg8_value) \
  221. break; \
  222. } while (__builtin_expect \
  223. (atomic_compare_and_exchange_bool_acq (__atg8_memp, __atg8_value,\
  224. __atg8_oldval), 0)); \
  225. } while (0)
  226. #endif
  227. #ifndef catomic_max
  228. # define catomic_max(mem, value) \
  229. do { \
  230. __typeof (*(mem)) __atg9_oldv; \
  231. __typeof (mem) __atg9_memp = (mem); \
  232. __typeof (*(mem)) __atg9_value = (value); \
  233. do { \
  234. __atg9_oldv = *__atg9_memp; \
  235. if (__atg9_oldv >= __atg9_value) \
  236. break; \
  237. } while (__builtin_expect \
  238. (catomic_compare_and_exchange_bool_acq (__atg9_memp, \
  239. __atg9_value, \
  240. __atg9_oldv), 0)); \
  241. } while (0)
  242. #endif
  243. #ifndef atomic_min
  244. # define atomic_min(mem, value) \
  245. do { \
  246. __typeof (*(mem)) __atg10_oldval; \
  247. __typeof (mem) __atg10_memp = (mem); \
  248. __typeof (*(mem)) __atg10_value = (value); \
  249. do { \
  250. __atg10_oldval = *__atg10_memp; \
  251. if (__atg10_oldval <= __atg10_value) \
  252. break; \
  253. } while (__builtin_expect \
  254. (atomic_compare_and_exchange_bool_acq (__atg10_memp, \
  255. __atg10_value, \
  256. __atg10_oldval), 0)); \
  257. } while (0)
  258. #endif
  259. #ifndef atomic_add
  260. # define atomic_add(mem, value) (void) atomic_exchange_and_add ((mem), (value))
  261. #endif
  262. #ifndef catomic_add
  263. # define catomic_add(mem, value) \
  264. (void) catomic_exchange_and_add ((mem), (value))
  265. #endif
  266. #ifndef atomic_increment
  267. # define atomic_increment(mem) atomic_add ((mem), 1)
  268. #endif
  269. #ifndef catomic_increment
  270. # define catomic_increment(mem) catomic_add ((mem), 1)
  271. #endif
  272. #ifndef atomic_increment_val
  273. # define atomic_increment_val(mem) (atomic_exchange_and_add ((mem), 1) + 1)
  274. #endif
  275. #ifndef catomic_increment_val
  276. # define catomic_increment_val(mem) (catomic_exchange_and_add ((mem), 1) + 1)
  277. #endif
  278. /* Add one to *MEM and return true iff it's now zero. */
  279. #ifndef atomic_increment_and_test
  280. # define atomic_increment_and_test(mem) \
  281. (atomic_exchange_and_add ((mem), 1) + 1 == 0)
  282. #endif
  283. #ifndef atomic_decrement
  284. # define atomic_decrement(mem) atomic_add ((mem), -1)
  285. #endif
  286. #ifndef catomic_decrement
  287. # define catomic_decrement(mem) catomic_add ((mem), -1)
  288. #endif
  289. #ifndef atomic_decrement_val
  290. # define atomic_decrement_val(mem) (atomic_exchange_and_add ((mem), -1) - 1)
  291. #endif
  292. #ifndef catomic_decrement_val
  293. # define catomic_decrement_val(mem) (catomic_exchange_and_add ((mem), -1) - 1)
  294. #endif
  295. /* Subtract 1 from *MEM and return true iff it's now zero. */
  296. #ifndef atomic_decrement_and_test
  297. # define atomic_decrement_and_test(mem) \
  298. (atomic_exchange_and_add ((mem), -1) == 1)
  299. #endif
  300. /* Decrement *MEM if it is > 0, and return the old value. */
  301. #ifndef atomic_decrement_if_positive
  302. # define atomic_decrement_if_positive(mem) \
  303. ({ __typeof (*(mem)) __atg11_oldval; \
  304. __typeof (mem) __atg11_memp = (mem); \
  305. \
  306. do \
  307. { \
  308. __atg11_oldval = *__atg11_memp; \
  309. if (__builtin_expect (__atg11_oldval <= 0, 0)) \
  310. break; \
  311. } \
  312. while (__builtin_expect \
  313. (atomic_compare_and_exchange_bool_acq (__atg11_memp, \
  314. __atg11_oldval - 1, \
  315. __atg11_oldval), 0)); \
  316. __atg11_oldval; })
  317. #endif
  318. #ifndef atomic_add_negative
  319. # define atomic_add_negative(mem, value) \
  320. ({ __typeof (value) __atg12_value = (value); \
  321. atomic_exchange_and_add (mem, __atg12_value) < -__atg12_value; })
  322. #endif
  323. #ifndef atomic_add_zero
  324. # define atomic_add_zero(mem, value) \
  325. ({ __typeof (value) __atg13_value = (value); \
  326. atomic_exchange_and_add (mem, __atg13_value) == -__atg13_value; })
  327. #endif
  328. #ifndef atomic_bit_set
  329. # define atomic_bit_set(mem, bit) \
  330. (void) atomic_bit_test_set(mem, bit)
  331. #endif
  332. #ifndef atomic_bit_test_set
  333. # define atomic_bit_test_set(mem, bit) \
  334. ({ __typeof (*(mem)) __atg14_old; \
  335. __typeof (mem) __atg14_memp = (mem); \
  336. __typeof (*(mem)) __atg14_mask = ((__typeof (*(mem))) 1 << (bit)); \
  337. \
  338. do \
  339. __atg14_old = (*__atg14_memp); \
  340. while (__builtin_expect \
  341. (atomic_compare_and_exchange_bool_acq (__atg14_memp, \
  342. __atg14_old | __atg14_mask,\
  343. __atg14_old), 0)); \
  344. \
  345. __atg14_old & __atg14_mask; })
  346. #endif
  347. /* Atomically *mem &= mask. */
  348. #ifndef atomic_and
  349. # define atomic_and(mem, mask) \
  350. do { \
  351. __typeof (*(mem)) __atg15_old; \
  352. __typeof (mem) __atg15_memp = (mem); \
  353. __typeof (*(mem)) __atg15_mask = (mask); \
  354. \
  355. do \
  356. __atg15_old = (*__atg15_memp); \
  357. while (__builtin_expect \
  358. (atomic_compare_and_exchange_bool_acq (__atg15_memp, \
  359. __atg15_old & __atg15_mask, \
  360. __atg15_old), 0)); \
  361. } while (0)
  362. #endif
  363. #ifndef catomic_and
  364. # define catomic_and(mem, mask) \
  365. do { \
  366. __typeof (*(mem)) __atg20_old; \
  367. __typeof (mem) __atg20_memp = (mem); \
  368. __typeof (*(mem)) __atg20_mask = (mask); \
  369. \
  370. do \
  371. __atg20_old = (*__atg20_memp); \
  372. while (__builtin_expect \
  373. (catomic_compare_and_exchange_bool_acq (__atg20_memp, \
  374. __atg20_old & __atg20_mask,\
  375. __atg20_old), 0)); \
  376. } while (0)
  377. #endif
  378. /* Atomically *mem &= mask and return the old value of *mem. */
  379. #ifndef atomic_and_val
  380. # define atomic_and_val(mem, mask) \
  381. ({ __typeof (*(mem)) __atg16_old; \
  382. __typeof (mem) __atg16_memp = (mem); \
  383. __typeof (*(mem)) __atg16_mask = (mask); \
  384. \
  385. do \
  386. __atg16_old = (*__atg16_memp); \
  387. while (__builtin_expect \
  388. (atomic_compare_and_exchange_bool_acq (__atg16_memp, \
  389. __atg16_old & __atg16_mask,\
  390. __atg16_old), 0)); \
  391. \
  392. __atg16_old; })
  393. #endif
  394. /* Atomically *mem |= mask and return the old value of *mem. */
  395. #ifndef atomic_or
  396. # define atomic_or(mem, mask) \
  397. do { \
  398. __typeof (*(mem)) __atg17_old; \
  399. __typeof (mem) __atg17_memp = (mem); \
  400. __typeof (*(mem)) __atg17_mask = (mask); \
  401. \
  402. do \
  403. __atg17_old = (*__atg17_memp); \
  404. while (__builtin_expect \
  405. (atomic_compare_and_exchange_bool_acq (__atg17_memp, \
  406. __atg17_old | __atg17_mask, \
  407. __atg17_old), 0)); \
  408. } while (0)
  409. #endif
  410. #ifndef catomic_or
  411. # define catomic_or(mem, mask) \
  412. do { \
  413. __typeof (*(mem)) __atg18_old; \
  414. __typeof (mem) __atg18_memp = (mem); \
  415. __typeof (*(mem)) __atg18_mask = (mask); \
  416. \
  417. do \
  418. __atg18_old = (*__atg18_memp); \
  419. while (__builtin_expect \
  420. (catomic_compare_and_exchange_bool_acq (__atg18_memp, \
  421. __atg18_old | __atg18_mask,\
  422. __atg18_old), 0)); \
  423. } while (0)
  424. #endif
  425. /* Atomically *mem |= mask and return the old value of *mem. */
  426. #ifndef atomic_or_val
  427. # define atomic_or_val(mem, mask) \
  428. ({ __typeof (*(mem)) __atg19_old; \
  429. __typeof (mem) __atg19_memp = (mem); \
  430. __typeof (*(mem)) __atg19_mask = (mask); \
  431. \
  432. do \
  433. __atg19_old = (*__atg19_memp); \
  434. while (__builtin_expect \
  435. (atomic_compare_and_exchange_bool_acq (__atg19_memp, \
  436. __atg19_old | __atg19_mask,\
  437. __atg19_old), 0)); \
  438. \
  439. __atg19_old; })
  440. #endif
  441. #ifndef atomic_full_barrier
  442. # define atomic_full_barrier() __asm__ ("" ::: "memory")
  443. #endif
  444. #ifndef atomic_read_barrier
  445. # define atomic_read_barrier() atomic_full_barrier ()
  446. #endif
  447. #ifndef atomic_write_barrier
  448. # define atomic_write_barrier() atomic_full_barrier ()
  449. #endif
  450. #ifndef atomic_forced_read
  451. # define atomic_forced_read(x) \
  452. ({ __typeof (x) __x; __asm__ ("" : "=r" (__x) : "0" (x)); __x; })
  453. #endif
  454. /* This is equal to 1 iff the architecture supports 64b atomic operations. */
  455. #define __HAVE_64B_ATOMICS 0 /* TODO: not yet used - Add these to arch bits! */
  456. #ifndef __HAVE_64B_ATOMICS
  457. #error Unable to determine if 64-bit atomics are present.
  458. #endif
  459. /* The following functions are a subset of the atomic operations provided by
  460. C11. Usually, a function named atomic_OP_MO(args) is equivalent to C11's
  461. atomic_OP_explicit(args, memory_order_MO); exceptions noted below. */
  462. /* Each arch can request to use compiler built-ins for C11 atomics. If it
  463. does, all atomics will be based on these. */
  464. #if 0 /* not yet used USE_ATOMIC_COMPILER_BUILTINS */
  465. /* We require 32b atomic operations; some archs also support 64b atomic
  466. operations. */
  467. void __atomic_link_error (void);
  468. # if __HAVE_64B_ATOMICS == 1
  469. # define __atomic_check_size(mem) \
  470. if ((sizeof (*mem) != 4) && (sizeof (*mem) != 8)) \
  471. __atomic_link_error ();
  472. # else
  473. # define __atomic_check_size(mem) \
  474. if (sizeof (*mem) != 4) \
  475. __atomic_link_error ();
  476. # endif
  477. # define atomic_thread_fence_acquire() \
  478. __atomic_thread_fence (__ATOMIC_ACQUIRE)
  479. # define atomic_thread_fence_release() \
  480. __atomic_thread_fence (__ATOMIC_RELEASE)
  481. # define atomic_thread_fence_seq_cst() \
  482. __atomic_thread_fence (__ATOMIC_SEQ_CST)
  483. # define atomic_load_relaxed(mem) \
  484. ({ __atomic_check_size((mem)); __atomic_load_n ((mem), __ATOMIC_RELAXED); })
  485. # define atomic_load_acquire(mem) \
  486. ({ __atomic_check_size((mem)); __atomic_load_n ((mem), __ATOMIC_ACQUIRE); })
  487. # define atomic_store_relaxed(mem, val) \
  488. do { \
  489. __atomic_check_size((mem)); \
  490. __atomic_store_n ((mem), (val), __ATOMIC_RELAXED); \
  491. } while (0)
  492. # define atomic_store_release(mem, val) \
  493. do { \
  494. __atomic_check_size((mem)); \
  495. __atomic_store_n ((mem), (val), __ATOMIC_RELEASE); \
  496. } while (0)
  497. /* On failure, this CAS has memory_order_relaxed semantics. */
  498. # define atomic_compare_exchange_weak_relaxed(mem, expected, desired) \
  499. ({ __atomic_check_size((mem)); \
  500. __atomic_compare_exchange_n ((mem), (expected), (desired), 1, \
  501. __ATOMIC_RELAXED, __ATOMIC_RELAXED); })
  502. # define atomic_compare_exchange_weak_acquire(mem, expected, desired) \
  503. ({ __atomic_check_size((mem)); \
  504. __atomic_compare_exchange_n ((mem), (expected), (desired), 1, \
  505. __ATOMIC_ACQUIRE, __ATOMIC_RELAXED); })
  506. # define atomic_compare_exchange_weak_release(mem, expected, desired) \
  507. ({ __atomic_check_size((mem)); \
  508. __atomic_compare_exchange_n ((mem), (expected), (desired), 1, \
  509. __ATOMIC_RELEASE, __ATOMIC_RELAXED); })
  510. # define atomic_exchange_acquire(mem, desired) \
  511. ({ __atomic_check_size((mem)); \
  512. __atomic_exchange_n ((mem), (desired), __ATOMIC_ACQUIRE); })
  513. # define atomic_exchange_release(mem, desired) \
  514. ({ __atomic_check_size((mem)); \
  515. __atomic_exchange_n ((mem), (desired), __ATOMIC_RELEASE); })
  516. # define atomic_fetch_add_relaxed(mem, operand) \
  517. ({ __atomic_check_size((mem)); \
  518. __atomic_fetch_add ((mem), (operand), __ATOMIC_RELAXED); })
  519. # define atomic_fetch_add_acquire(mem, operand) \
  520. ({ __atomic_check_size((mem)); \
  521. __atomic_fetch_add ((mem), (operand), __ATOMIC_ACQUIRE); })
  522. # define atomic_fetch_add_release(mem, operand) \
  523. ({ __atomic_check_size((mem)); \
  524. __atomic_fetch_add ((mem), (operand), __ATOMIC_RELEASE); })
  525. # define atomic_fetch_add_acq_rel(mem, operand) \
  526. ({ __atomic_check_size((mem)); \
  527. __atomic_fetch_add ((mem), (operand), __ATOMIC_ACQ_REL); })
  528. # define atomic_fetch_and_acquire(mem, operand) \
  529. ({ __atomic_check_size((mem)); \
  530. __atomic_fetch_and ((mem), (operand), __ATOMIC_ACQUIRE); })
  531. # define atomic_fetch_or_relaxed(mem, operand) \
  532. ({ __atomic_check_size((mem)); \
  533. __atomic_fetch_or ((mem), (operand), __ATOMIC_RELAXED); })
  534. # define atomic_fetch_or_acquire(mem, operand) \
  535. ({ __atomic_check_size((mem)); \
  536. __atomic_fetch_or ((mem), (operand), __ATOMIC_ACQUIRE); })
  537. #else /* !USE_ATOMIC_COMPILER_BUILTINS */
  538. /* By default, we assume that read, write, and full barriers are equivalent
  539. to acquire, release, and seq_cst barriers. Archs for which this does not
  540. hold have to provide custom definitions of the fences. */
  541. # ifndef atomic_thread_fence_acquire
  542. # define atomic_thread_fence_acquire() atomic_read_barrier ()
  543. # endif
  544. # ifndef atomic_thread_fence_release
  545. # define atomic_thread_fence_release() atomic_write_barrier ()
  546. # endif
  547. # ifndef atomic_thread_fence_seq_cst
  548. # define atomic_thread_fence_seq_cst() atomic_full_barrier ()
  549. # endif
  550. # ifndef atomic_load_relaxed
  551. # define atomic_load_relaxed(mem) \
  552. ({ __typeof (*(mem)) __atg100_val; \
  553. __asm__ ("" : "=r" (__atg100_val) : "0" (*(mem))); \
  554. __atg100_val; })
  555. # endif
  556. # ifndef atomic_load_acquire
  557. # define atomic_load_acquire(mem) \
  558. ({ __typeof (*(mem)) __atg101_val = atomic_load_relaxed (mem); \
  559. atomic_thread_fence_acquire (); \
  560. __atg101_val; })
  561. # endif
  562. # ifndef atomic_store_relaxed
  563. /* XXX Use inline asm here? */
  564. # define atomic_store_relaxed(mem, val) do { *(mem) = (val); } while (0)
  565. # endif
  566. # ifndef atomic_store_release
  567. # define atomic_store_release(mem, val) \
  568. do { \
  569. atomic_thread_fence_release (); \
  570. atomic_store_relaxed ((mem), (val)); \
  571. } while (0)
  572. # endif
  573. /* On failure, this CAS has memory_order_relaxed semantics. */
  574. /* XXX This potentially has one branch more than necessary, but archs
  575. currently do not define a CAS that returns both the previous value and
  576. the success flag. */
  577. # ifndef atomic_compare_exchange_weak_acquire
  578. # define atomic_compare_exchange_weak_acquire(mem, expected, desired) \
  579. ({ __typeof (*(expected)) __atg102_expected = *(expected); \
  580. *(expected) = \
  581. atomic_compare_and_exchange_val_acq ((mem), (desired), *(expected)); \
  582. *(expected) == __atg102_expected; })
  583. # endif
  584. # ifndef atomic_compare_exchange_weak_relaxed
  585. /* XXX Fall back to CAS with acquire MO because archs do not define a weaker
  586. CAS. */
  587. # define atomic_compare_exchange_weak_relaxed(mem, expected, desired) \
  588. atomic_compare_exchange_weak_acquire ((mem), (expected), (desired))
  589. # endif
  590. # ifndef atomic_compare_exchange_weak_release
  591. # define atomic_compare_exchange_weak_release(mem, expected, desired) \
  592. ({ __typeof (*(expected)) __atg103_expected = *(expected); \
  593. *(expected) = \
  594. atomic_compare_and_exchange_val_rel ((mem), (desired), *(expected)); \
  595. *(expected) == __atg103_expected; })
  596. # endif
  597. # ifndef atomic_exchange_acquire
  598. # define atomic_exchange_acquire(mem, val) \
  599. atomic_exchange_acq ((mem), (val))
  600. # endif
  601. # ifndef atomic_exchange_release
  602. # define atomic_exchange_release(mem, val) \
  603. atomic_exchange_rel ((mem), (val))
  604. # endif
  605. # ifndef atomic_fetch_add_acquire
  606. # define atomic_fetch_add_acquire(mem, operand) \
  607. atomic_exchange_and_add_acq ((mem), (operand))
  608. # endif
  609. # ifndef atomic_fetch_add_relaxed
  610. /* XXX Fall back to acquire MO because the MO semantics of
  611. atomic_exchange_and_add are not documented; the generic version falls back
  612. to atomic_exchange_and_add_acq if atomic_exchange_and_add is not defined,
  613. and vice versa. */
  614. # define atomic_fetch_add_relaxed(mem, operand) \
  615. atomic_fetch_add_acquire ((mem), (operand))
  616. # endif
  617. # ifndef atomic_fetch_add_release
  618. # define atomic_fetch_add_release(mem, operand) \
  619. atomic_exchange_and_add_rel ((mem), (operand))
  620. # endif
  621. # ifndef atomic_fetch_add_acq_rel
  622. # define atomic_fetch_add_acq_rel(mem, operand) \
  623. ({ atomic_thread_fence_release (); \
  624. atomic_exchange_and_add_acq ((mem), (operand)); })
  625. # endif
  626. /* XXX The default for atomic_and_val has acquire semantics, but this is not
  627. documented. */
  628. # ifndef atomic_fetch_and_acquire
  629. # define atomic_fetch_and_acquire(mem, operand) \
  630. atomic_and_val ((mem), (operand))
  631. # endif
  632. /* XXX The default for atomic_or_val has acquire semantics, but this is not
  633. documented. */
  634. # ifndef atomic_fetch_or_acquire
  635. # define atomic_fetch_or_acquire(mem, operand) \
  636. atomic_or_val ((mem), (operand))
  637. # endif
  638. /* XXX Fall back to acquire MO because archs do not define a weaker
  639. atomic_or_val. */
  640. # ifndef atomic_fetch_or_relaxed
  641. # define atomic_fetch_or_relaxed(mem, operand) \
  642. atomic_fetch_or_acquire ((mem), (operand))
  643. # endif
  644. #endif /* !USE_ATOMIC_COMPILER_BUILTINS */
  645. #ifndef atomic_delay
  646. # define atomic_delay() do { /* nothing */ } while (0)
  647. #endif
  648. #endif /* atomic.h */