atomic.h 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323
  1. /* Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc.
  2. This file is part of the GNU C Library.
  3. Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
  4. The GNU C Library is free software; you can redistribute it and/or
  5. modify it under the terms of the GNU Lesser General Public
  6. License as published by the Free Software Foundation; either
  7. version 2.1 of the License, or (at your option) any later version.
  8. The GNU C Library is distributed in the hope that it will be useful,
  9. but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  11. Lesser General Public License for more details.
  12. You should have received a copy of the GNU Lesser General Public
  13. License along with the GNU C Library; if not, see
  14. <http://www.gnu.org/licenses/>. */
  15. #include <stdint.h>
  16. typedef int8_t atomic8_t;
  17. typedef uint8_t uatomic8_t;
  18. typedef int_fast8_t atomic_fast8_t;
  19. typedef uint_fast8_t uatomic_fast8_t;
  20. typedef int16_t atomic16_t;
  21. typedef uint16_t uatomic16_t;
  22. typedef int_fast16_t atomic_fast16_t;
  23. typedef uint_fast16_t uatomic_fast16_t;
  24. typedef int32_t atomic32_t;
  25. typedef uint32_t uatomic32_t;
  26. typedef int_fast32_t atomic_fast32_t;
  27. typedef uint_fast32_t uatomic_fast32_t;
  28. typedef int64_t atomic64_t;
  29. typedef uint64_t uatomic64_t;
  30. typedef int_fast64_t atomic_fast64_t;
  31. typedef uint_fast64_t uatomic_fast64_t;
  32. typedef intptr_t atomicptr_t;
  33. typedef uintptr_t uatomicptr_t;
  34. typedef intmax_t atomic_max_t;
  35. typedef uintmax_t uatomic_max_t;
  36. #ifndef LOCK_PREFIX
  37. # ifdef UP
  38. # define LOCK_PREFIX /* nothing */
  39. # else
  40. # define LOCK_PREFIX "lock;"
  41. # endif
  42. #endif
  43. #define __arch_compare_and_exchange_val_8_acq(mem, newval, oldval) \
  44. ({ __typeof (*mem) ret; \
  45. __asm__ __volatile__ (LOCK_PREFIX "cmpxchgb %b2, %1" \
  46. : "=a" (ret), "=m" (*mem) \
  47. : "q" (newval), "m" (*mem), "0" (oldval)); \
  48. ret; })
  49. #define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
  50. ({ __typeof (*mem) ret; \
  51. __asm__ __volatile__ (LOCK_PREFIX "cmpxchgw %w2, %1" \
  52. : "=a" (ret), "=m" (*mem) \
  53. : "r" (newval), "m" (*mem), "0" (oldval)); \
  54. ret; })
  55. #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
  56. ({ __typeof (*mem) ret; \
  57. __asm__ __volatile__ (LOCK_PREFIX "cmpxchgl %2, %1" \
  58. : "=a" (ret), "=m" (*mem) \
  59. : "r" (newval), "m" (*mem), "0" (oldval)); \
  60. ret; })
  61. #define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
  62. ({ __typeof (*mem) ret; \
  63. __asm__ __volatile__ (LOCK_PREFIX "cmpxchgq %q2, %1" \
  64. : "=a" (ret), "=m" (*mem) \
  65. : "r" ((long) (newval)), "m" (*mem), \
  66. "0" ((long) (oldval))); \
  67. ret; })
  68. /* Note that we need no lock prefix. */
  69. #define atomic_exchange_acq(mem, newvalue) \
  70. ({ __typeof (*mem) result; \
  71. if (sizeof (*mem) == 1) \
  72. __asm__ __volatile__ ("xchgb %b0, %1" \
  73. : "=r" (result), "=m" (*mem) \
  74. : "0" (newvalue), "m" (*mem)); \
  75. else if (sizeof (*mem) == 2) \
  76. __asm__ __volatile__ ("xchgw %w0, %1" \
  77. : "=r" (result), "=m" (*mem) \
  78. : "0" (newvalue), "m" (*mem)); \
  79. else if (sizeof (*mem) == 4) \
  80. __asm__ __volatile__ ("xchgl %0, %1" \
  81. : "=r" (result), "=m" (*mem) \
  82. : "0" (newvalue), "m" (*mem)); \
  83. else \
  84. __asm__ __volatile__ ("xchgq %q0, %1" \
  85. : "=r" (result), "=m" (*mem) \
  86. : "0" ((long) (newvalue)), "m" (*mem)); \
  87. result; })
  88. #define atomic_exchange_and_add(mem, value) \
  89. ({ __typeof (*mem) result; \
  90. if (sizeof (*mem) == 1) \
  91. __asm__ __volatile__ (LOCK_PREFIX "xaddb %b0, %1" \
  92. : "=r" (result), "=m" (*mem) \
  93. : "0" (value), "m" (*mem)); \
  94. else if (sizeof (*mem) == 2) \
  95. __asm__ __volatile__ (LOCK_PREFIX "xaddw %w0, %1" \
  96. : "=r" (result), "=m" (*mem) \
  97. : "0" (value), "m" (*mem)); \
  98. else if (sizeof (*mem) == 4) \
  99. __asm__ __volatile__ (LOCK_PREFIX "xaddl %0, %1" \
  100. : "=r" (result), "=m" (*mem) \
  101. : "0" (value), "m" (*mem)); \
  102. else \
  103. __asm__ __volatile__ (LOCK_PREFIX "xaddq %q0, %1" \
  104. : "=r" (result), "=m" (*mem) \
  105. : "0" ((long) (value)), "m" (*mem)); \
  106. result; })
  107. #define atomic_add(mem, value) \
  108. (void) ({ if (__builtin_constant_p (value) && (value) == 1) \
  109. atomic_increment (mem); \
  110. else if (__builtin_constant_p (value) && (value) == 1) \
  111. atomic_decrement (mem); \
  112. else if (sizeof (*mem) == 1) \
  113. __asm__ __volatile__ (LOCK_PREFIX "addb %b1, %0" \
  114. : "=m" (*mem) \
  115. : "ir" (value), "m" (*mem)); \
  116. else if (sizeof (*mem) == 2) \
  117. __asm__ __volatile__ (LOCK_PREFIX "addw %w1, %0" \
  118. : "=m" (*mem) \
  119. : "ir" (value), "m" (*mem)); \
  120. else if (sizeof (*mem) == 4) \
  121. __asm__ __volatile__ (LOCK_PREFIX "addl %1, %0" \
  122. : "=m" (*mem) \
  123. : "ir" (value), "m" (*mem)); \
  124. else \
  125. __asm__ __volatile__ (LOCK_PREFIX "addq %q1, %0" \
  126. : "=m" (*mem) \
  127. : "ir" ((long) (value)), "m" (*mem)); \
  128. })
  129. #define atomic_add_negative(mem, value) \
  130. ({ unsigned char __result; \
  131. if (sizeof (*mem) == 1) \
  132. __asm__ __volatile__ (LOCK_PREFIX "addb %b2, %0; sets %1" \
  133. : "=m" (*mem), "=qm" (__result) \
  134. : "ir" (value), "m" (*mem)); \
  135. else if (sizeof (*mem) == 2) \
  136. __asm__ __volatile__ (LOCK_PREFIX "addw %w2, %0; sets %1" \
  137. : "=m" (*mem), "=qm" (__result) \
  138. : "ir" (value), "m" (*mem)); \
  139. else if (sizeof (*mem) == 4) \
  140. __asm__ __volatile__ (LOCK_PREFIX "addl %2, %0; sets %1" \
  141. : "=m" (*mem), "=qm" (__result) \
  142. : "ir" (value), "m" (*mem)); \
  143. else \
  144. __asm__ __volatile__ (LOCK_PREFIX "addq %q2, %0; sets %1" \
  145. : "=m" (*mem), "=qm" (__result) \
  146. : "ir" ((long) (value)), "m" (*mem)); \
  147. __result; })
  148. #define atomic_add_zero(mem, value) \
  149. ({ unsigned char __result; \
  150. if (sizeof (*mem) == 1) \
  151. __asm__ __volatile__ (LOCK_PREFIX "addb %b2, %0; setz %1" \
  152. : "=m" (*mem), "=qm" (__result) \
  153. : "ir" (value), "m" (*mem)); \
  154. else if (sizeof (*mem) == 2) \
  155. __asm__ __volatile__ (LOCK_PREFIX "addw %w2, %0; setz %1" \
  156. : "=m" (*mem), "=qm" (__result) \
  157. : "ir" (value), "m" (*mem)); \
  158. else if (sizeof (*mem) == 4) \
  159. __asm__ __volatile__ (LOCK_PREFIX "addl %2, %0; setz %1" \
  160. : "=m" (*mem), "=qm" (__result) \
  161. : "ir" (value), "m" (*mem)); \
  162. else \
  163. __asm__ __volatile__ (LOCK_PREFIX "addq %q2, %0; setz %1" \
  164. : "=m" (*mem), "=qm" (__result) \
  165. : "ir" ((long) (value)), "m" (*mem)); \
  166. __result; })
  167. #define atomic_increment(mem) \
  168. (void) ({ if (sizeof (*mem) == 1) \
  169. __asm__ __volatile__ (LOCK_PREFIX "incb %b0" \
  170. : "=m" (*mem) \
  171. : "m" (*mem)); \
  172. else if (sizeof (*mem) == 2) \
  173. __asm__ __volatile__ (LOCK_PREFIX "incw %w0" \
  174. : "=m" (*mem) \
  175. : "m" (*mem)); \
  176. else if (sizeof (*mem) == 4) \
  177. __asm__ __volatile__ (LOCK_PREFIX "incl %0" \
  178. : "=m" (*mem) \
  179. : "m" (*mem)); \
  180. else \
  181. __asm__ __volatile__ (LOCK_PREFIX "incq %q0" \
  182. : "=m" (*mem) \
  183. : "m" (*mem)); \
  184. })
  185. #define atomic_increment_and_test(mem) \
  186. ({ unsigned char __result; \
  187. if (sizeof (*mem) == 1) \
  188. __asm__ __volatile__ (LOCK_PREFIX "incb %b0; sete %1" \
  189. : "=m" (*mem), "=qm" (__result) \
  190. : "m" (*mem)); \
  191. else if (sizeof (*mem) == 2) \
  192. __asm__ __volatile__ (LOCK_PREFIX "incw %w0; sete %1" \
  193. : "=m" (*mem), "=qm" (__result) \
  194. : "m" (*mem)); \
  195. else if (sizeof (*mem) == 4) \
  196. __asm__ __volatile__ (LOCK_PREFIX "incl %0; sete %1" \
  197. : "=m" (*mem), "=qm" (__result) \
  198. : "m" (*mem)); \
  199. else \
  200. __asm__ __volatile__ (LOCK_PREFIX "incq %q0; sete %1" \
  201. : "=m" (*mem), "=qm" (__result) \
  202. : "m" (*mem)); \
  203. __result; })
  204. #define atomic_decrement(mem) \
  205. (void) ({ if (sizeof (*mem) == 1) \
  206. __asm__ __volatile__ (LOCK_PREFIX "decb %b0" \
  207. : "=m" (*mem) \
  208. : "m" (*mem)); \
  209. else if (sizeof (*mem) == 2) \
  210. __asm__ __volatile__ (LOCK_PREFIX "decw %w0" \
  211. : "=m" (*mem) \
  212. : "m" (*mem)); \
  213. else if (sizeof (*mem) == 4) \
  214. __asm__ __volatile__ (LOCK_PREFIX "decl %0" \
  215. : "=m" (*mem) \
  216. : "m" (*mem)); \
  217. else \
  218. __asm__ __volatile__ (LOCK_PREFIX "decq %q0" \
  219. : "=m" (*mem) \
  220. : "m" (*mem)); \
  221. })
  222. #define atomic_decrement_and_test(mem) \
  223. ({ unsigned char __result; \
  224. if (sizeof (*mem) == 1) \
  225. __asm__ __volatile__ (LOCK_PREFIX "decb %b0; sete %1" \
  226. : "=m" (*mem), "=qm" (__result) \
  227. : "m" (*mem)); \
  228. else if (sizeof (*mem) == 2) \
  229. __asm__ __volatile__ (LOCK_PREFIX "decw %w0; sete %1" \
  230. : "=m" (*mem), "=qm" (__result) \
  231. : "m" (*mem)); \
  232. else if (sizeof (*mem) == 4) \
  233. __asm__ __volatile__ (LOCK_PREFIX "decl %0; sete %1" \
  234. : "=m" (*mem), "=qm" (__result) \
  235. : "m" (*mem)); \
  236. else \
  237. __asm__ __volatile__ (LOCK_PREFIX "decq %q0; sete %1" \
  238. : "=m" (*mem), "=qm" (__result) \
  239. : "m" (*mem)); \
  240. __result; })
  241. #define atomic_bit_set(mem, bit) \
  242. (void) ({ if (sizeof (*mem) == 1) \
  243. __asm__ __volatile__ (LOCK_PREFIX "orb %b2, %0" \
  244. : "=m" (*mem) \
  245. : "m" (*mem), "ir" (1L << (bit))); \
  246. else if (sizeof (*mem) == 2) \
  247. __asm__ __volatile__ (LOCK_PREFIX "orw %w2, %0" \
  248. : "=m" (*mem) \
  249. : "m" (*mem), "ir" (1L << (bit))); \
  250. else if (sizeof (*mem) == 4) \
  251. __asm__ __volatile__ (LOCK_PREFIX "orl %2, %0" \
  252. : "=m" (*mem) \
  253. : "m" (*mem), "ir" (1L << (bit))); \
  254. else if (__builtin_constant_p (bit) && (bit) < 32) \
  255. __asm__ __volatile__ (LOCK_PREFIX "orq %2, %0" \
  256. : "=m" (*mem) \
  257. : "m" (*mem), "i" (1L << (bit))); \
  258. else \
  259. __asm__ __volatile__ (LOCK_PREFIX "orq %q2, %0" \
  260. : "=m" (*mem) \
  261. : "m" (*mem), "r" (1UL << (bit))); \
  262. })
  263. #define atomic_bit_test_set(mem, bit) \
  264. ({ unsigned char __result; \
  265. if (sizeof (*mem) == 1) \
  266. __asm__ __volatile__ (LOCK_PREFIX "btsb %3, %1; setc %0" \
  267. : "=q" (__result), "=m" (*mem) \
  268. : "m" (*mem), "ir" (bit)); \
  269. else if (sizeof (*mem) == 2) \
  270. __asm__ __volatile__ (LOCK_PREFIX "btsw %3, %1; setc %0" \
  271. : "=q" (__result), "=m" (*mem) \
  272. : "m" (*mem), "ir" (bit)); \
  273. else if (sizeof (*mem) == 4) \
  274. __asm__ __volatile__ (LOCK_PREFIX "btsl %3, %1; setc %0" \
  275. : "=q" (__result), "=m" (*mem) \
  276. : "m" (*mem), "ir" (bit)); \
  277. else \
  278. __asm__ __volatile__ (LOCK_PREFIX "btsq %3, %1; setc %0" \
  279. : "=q" (__result), "=m" (*mem) \
  280. : "m" (*mem), "ir" (bit)); \
  281. __result; })
  282. #define atomic_delay() __asm__ ("rep; nop")