atomic.h 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324
  1. /* Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc.
  2. This file is part of the GNU C Library.
  3. Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
  4. The GNU C Library is free software; you can redistribute it and/or
  5. modify it under the terms of the GNU Lesser General Public
  6. License as published by the Free Software Foundation; either
  7. version 2.1 of the License, or (at your option) any later version.
  8. The GNU C Library is distributed in the hope that it will be useful,
  9. but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  11. Lesser General Public License for more details.
  12. You should have received a copy of the GNU Lesser General Public
  13. License along with the GNU C Library; if not, write to the Free
  14. Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
  15. 02111-1307 USA. */
  16. #include <stdint.h>
  17. typedef int8_t atomic8_t;
  18. typedef uint8_t uatomic8_t;
  19. typedef int_fast8_t atomic_fast8_t;
  20. typedef uint_fast8_t uatomic_fast8_t;
  21. typedef int16_t atomic16_t;
  22. typedef uint16_t uatomic16_t;
  23. typedef int_fast16_t atomic_fast16_t;
  24. typedef uint_fast16_t uatomic_fast16_t;
  25. typedef int32_t atomic32_t;
  26. typedef uint32_t uatomic32_t;
  27. typedef int_fast32_t atomic_fast32_t;
  28. typedef uint_fast32_t uatomic_fast32_t;
  29. typedef int64_t atomic64_t;
  30. typedef uint64_t uatomic64_t;
  31. typedef int_fast64_t atomic_fast64_t;
  32. typedef uint_fast64_t uatomic_fast64_t;
  33. typedef intptr_t atomicptr_t;
  34. typedef uintptr_t uatomicptr_t;
  35. typedef intmax_t atomic_max_t;
  36. typedef uintmax_t uatomic_max_t;
  37. #ifndef LOCK_PREFIX
  38. # ifdef UP
  39. # define LOCK_PREFIX /* nothing */
  40. # else
  41. # define LOCK_PREFIX "lock;"
  42. # endif
  43. #endif
  44. #define __arch_compare_and_exchange_val_8_acq(mem, newval, oldval) \
  45. ({ __typeof (*mem) ret; \
  46. __asm__ __volatile__ (LOCK_PREFIX "cmpxchgb %b2, %1" \
  47. : "=a" (ret), "=m" (*mem) \
  48. : "q" (newval), "m" (*mem), "0" (oldval)); \
  49. ret; })
  50. #define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
  51. ({ __typeof (*mem) ret; \
  52. __asm__ __volatile__ (LOCK_PREFIX "cmpxchgw %w2, %1" \
  53. : "=a" (ret), "=m" (*mem) \
  54. : "r" (newval), "m" (*mem), "0" (oldval)); \
  55. ret; })
  56. #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
  57. ({ __typeof (*mem) ret; \
  58. __asm__ __volatile__ (LOCK_PREFIX "cmpxchgl %2, %1" \
  59. : "=a" (ret), "=m" (*mem) \
  60. : "r" (newval), "m" (*mem), "0" (oldval)); \
  61. ret; })
  62. #define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
  63. ({ __typeof (*mem) ret; \
  64. __asm__ __volatile__ (LOCK_PREFIX "cmpxchgq %q2, %1" \
  65. : "=a" (ret), "=m" (*mem) \
  66. : "r" ((long) (newval)), "m" (*mem), \
  67. "0" ((long) (oldval))); \
  68. ret; })
  69. /* Note that we need no lock prefix. */
  70. #define atomic_exchange_acq(mem, newvalue) \
  71. ({ __typeof (*mem) result; \
  72. if (sizeof (*mem) == 1) \
  73. __asm__ __volatile__ ("xchgb %b0, %1" \
  74. : "=r" (result), "=m" (*mem) \
  75. : "0" (newvalue), "m" (*mem)); \
  76. else if (sizeof (*mem) == 2) \
  77. __asm__ __volatile__ ("xchgw %w0, %1" \
  78. : "=r" (result), "=m" (*mem) \
  79. : "0" (newvalue), "m" (*mem)); \
  80. else if (sizeof (*mem) == 4) \
  81. __asm__ __volatile__ ("xchgl %0, %1" \
  82. : "=r" (result), "=m" (*mem) \
  83. : "0" (newvalue), "m" (*mem)); \
  84. else \
  85. __asm__ __volatile__ ("xchgq %q0, %1" \
  86. : "=r" (result), "=m" (*mem) \
  87. : "0" ((long) (newvalue)), "m" (*mem)); \
  88. result; })
  89. #define atomic_exchange_and_add(mem, value) \
  90. ({ __typeof (*mem) result; \
  91. if (sizeof (*mem) == 1) \
  92. __asm__ __volatile__ (LOCK_PREFIX "xaddb %b0, %1" \
  93. : "=r" (result), "=m" (*mem) \
  94. : "0" (value), "m" (*mem)); \
  95. else if (sizeof (*mem) == 2) \
  96. __asm__ __volatile__ (LOCK_PREFIX "xaddw %w0, %1" \
  97. : "=r" (result), "=m" (*mem) \
  98. : "0" (value), "m" (*mem)); \
  99. else if (sizeof (*mem) == 4) \
  100. __asm__ __volatile__ (LOCK_PREFIX "xaddl %0, %1" \
  101. : "=r" (result), "=m" (*mem) \
  102. : "0" (value), "m" (*mem)); \
  103. else \
  104. __asm__ __volatile__ (LOCK_PREFIX "xaddq %q0, %1" \
  105. : "=r" (result), "=m" (*mem) \
  106. : "0" ((long) (value)), "m" (*mem)); \
  107. result; })
  108. #define atomic_add(mem, value) \
  109. (void) ({ if (__builtin_constant_p (value) && (value) == 1) \
  110. atomic_increment (mem); \
  111. else if (__builtin_constant_p (value) && (value) == 1) \
  112. atomic_decrement (mem); \
  113. else if (sizeof (*mem) == 1) \
  114. __asm__ __volatile__ (LOCK_PREFIX "addb %b1, %0" \
  115. : "=m" (*mem) \
  116. : "ir" (value), "m" (*mem)); \
  117. else if (sizeof (*mem) == 2) \
  118. __asm__ __volatile__ (LOCK_PREFIX "addw %w1, %0" \
  119. : "=m" (*mem) \
  120. : "ir" (value), "m" (*mem)); \
  121. else if (sizeof (*mem) == 4) \
  122. __asm__ __volatile__ (LOCK_PREFIX "addl %1, %0" \
  123. : "=m" (*mem) \
  124. : "ir" (value), "m" (*mem)); \
  125. else \
  126. __asm__ __volatile__ (LOCK_PREFIX "addq %q1, %0" \
  127. : "=m" (*mem) \
  128. : "ir" ((long) (value)), "m" (*mem)); \
  129. })
  130. #define atomic_add_negative(mem, value) \
  131. ({ unsigned char __result; \
  132. if (sizeof (*mem) == 1) \
  133. __asm__ __volatile__ (LOCK_PREFIX "addb %b2, %0; sets %1" \
  134. : "=m" (*mem), "=qm" (__result) \
  135. : "ir" (value), "m" (*mem)); \
  136. else if (sizeof (*mem) == 2) \
  137. __asm__ __volatile__ (LOCK_PREFIX "addw %w2, %0; sets %1" \
  138. : "=m" (*mem), "=qm" (__result) \
  139. : "ir" (value), "m" (*mem)); \
  140. else if (sizeof (*mem) == 4) \
  141. __asm__ __volatile__ (LOCK_PREFIX "addl %2, %0; sets %1" \
  142. : "=m" (*mem), "=qm" (__result) \
  143. : "ir" (value), "m" (*mem)); \
  144. else \
  145. __asm__ __volatile__ (LOCK_PREFIX "addq %q2, %0; sets %1" \
  146. : "=m" (*mem), "=qm" (__result) \
  147. : "ir" ((long) (value)), "m" (*mem)); \
  148. __result; })
  149. #define atomic_add_zero(mem, value) \
  150. ({ unsigned char __result; \
  151. if (sizeof (*mem) == 1) \
  152. __asm__ __volatile__ (LOCK_PREFIX "addb %b2, %0; setz %1" \
  153. : "=m" (*mem), "=qm" (__result) \
  154. : "ir" (value), "m" (*mem)); \
  155. else if (sizeof (*mem) == 2) \
  156. __asm__ __volatile__ (LOCK_PREFIX "addw %w2, %0; setz %1" \
  157. : "=m" (*mem), "=qm" (__result) \
  158. : "ir" (value), "m" (*mem)); \
  159. else if (sizeof (*mem) == 4) \
  160. __asm__ __volatile__ (LOCK_PREFIX "addl %2, %0; setz %1" \
  161. : "=m" (*mem), "=qm" (__result) \
  162. : "ir" (value), "m" (*mem)); \
  163. else \
  164. __asm__ __volatile__ (LOCK_PREFIX "addq %q2, %0; setz %1" \
  165. : "=m" (*mem), "=qm" (__result) \
  166. : "ir" ((long) (value)), "m" (*mem)); \
  167. __result; })
  168. #define atomic_increment(mem) \
  169. (void) ({ if (sizeof (*mem) == 1) \
  170. __asm__ __volatile__ (LOCK_PREFIX "incb %b0" \
  171. : "=m" (*mem) \
  172. : "m" (*mem)); \
  173. else if (sizeof (*mem) == 2) \
  174. __asm__ __volatile__ (LOCK_PREFIX "incw %w0" \
  175. : "=m" (*mem) \
  176. : "m" (*mem)); \
  177. else if (sizeof (*mem) == 4) \
  178. __asm__ __volatile__ (LOCK_PREFIX "incl %0" \
  179. : "=m" (*mem) \
  180. : "m" (*mem)); \
  181. else \
  182. __asm__ __volatile__ (LOCK_PREFIX "incq %q0" \
  183. : "=m" (*mem) \
  184. : "m" (*mem)); \
  185. })
  186. #define atomic_increment_and_test(mem) \
  187. ({ unsigned char __result; \
  188. if (sizeof (*mem) == 1) \
  189. __asm__ __volatile__ (LOCK_PREFIX "incb %b0; sete %1" \
  190. : "=m" (*mem), "=qm" (__result) \
  191. : "m" (*mem)); \
  192. else if (sizeof (*mem) == 2) \
  193. __asm__ __volatile__ (LOCK_PREFIX "incw %w0; sete %1" \
  194. : "=m" (*mem), "=qm" (__result) \
  195. : "m" (*mem)); \
  196. else if (sizeof (*mem) == 4) \
  197. __asm__ __volatile__ (LOCK_PREFIX "incl %0; sete %1" \
  198. : "=m" (*mem), "=qm" (__result) \
  199. : "m" (*mem)); \
  200. else \
  201. __asm__ __volatile__ (LOCK_PREFIX "incq %q0; sete %1" \
  202. : "=m" (*mem), "=qm" (__result) \
  203. : "m" (*mem)); \
  204. __result; })
  205. #define atomic_decrement(mem) \
  206. (void) ({ if (sizeof (*mem) == 1) \
  207. __asm__ __volatile__ (LOCK_PREFIX "decb %b0" \
  208. : "=m" (*mem) \
  209. : "m" (*mem)); \
  210. else if (sizeof (*mem) == 2) \
  211. __asm__ __volatile__ (LOCK_PREFIX "decw %w0" \
  212. : "=m" (*mem) \
  213. : "m" (*mem)); \
  214. else if (sizeof (*mem) == 4) \
  215. __asm__ __volatile__ (LOCK_PREFIX "decl %0" \
  216. : "=m" (*mem) \
  217. : "m" (*mem)); \
  218. else \
  219. __asm__ __volatile__ (LOCK_PREFIX "decq %q0" \
  220. : "=m" (*mem) \
  221. : "m" (*mem)); \
  222. })
  223. #define atomic_decrement_and_test(mem) \
  224. ({ unsigned char __result; \
  225. if (sizeof (*mem) == 1) \
  226. __asm__ __volatile__ (LOCK_PREFIX "decb %b0; sete %1" \
  227. : "=m" (*mem), "=qm" (__result) \
  228. : "m" (*mem)); \
  229. else if (sizeof (*mem) == 2) \
  230. __asm__ __volatile__ (LOCK_PREFIX "decw %w0; sete %1" \
  231. : "=m" (*mem), "=qm" (__result) \
  232. : "m" (*mem)); \
  233. else if (sizeof (*mem) == 4) \
  234. __asm__ __volatile__ (LOCK_PREFIX "decl %0; sete %1" \
  235. : "=m" (*mem), "=qm" (__result) \
  236. : "m" (*mem)); \
  237. else \
  238. __asm__ __volatile__ (LOCK_PREFIX "decq %q0; sete %1" \
  239. : "=m" (*mem), "=qm" (__result) \
  240. : "m" (*mem)); \
  241. __result; })
  242. #define atomic_bit_set(mem, bit) \
  243. (void) ({ if (sizeof (*mem) == 1) \
  244. __asm__ __volatile__ (LOCK_PREFIX "orb %b2, %0" \
  245. : "=m" (*mem) \
  246. : "m" (*mem), "ir" (1L << (bit))); \
  247. else if (sizeof (*mem) == 2) \
  248. __asm__ __volatile__ (LOCK_PREFIX "orw %w2, %0" \
  249. : "=m" (*mem) \
  250. : "m" (*mem), "ir" (1L << (bit))); \
  251. else if (sizeof (*mem) == 4) \
  252. __asm__ __volatile__ (LOCK_PREFIX "orl %2, %0" \
  253. : "=m" (*mem) \
  254. : "m" (*mem), "ir" (1L << (bit))); \
  255. else if (__builtin_constant_p (bit) && (bit) < 32) \
  256. __asm__ __volatile__ (LOCK_PREFIX "orq %2, %0" \
  257. : "=m" (*mem) \
  258. : "m" (*mem), "i" (1L << (bit))); \
  259. else \
  260. __asm__ __volatile__ (LOCK_PREFIX "orq %q2, %0" \
  261. : "=m" (*mem) \
  262. : "m" (*mem), "r" (1UL << (bit))); \
  263. })
  264. #define atomic_bit_test_set(mem, bit) \
  265. ({ unsigned char __result; \
  266. if (sizeof (*mem) == 1) \
  267. __asm__ __volatile__ (LOCK_PREFIX "btsb %3, %1; setc %0" \
  268. : "=q" (__result), "=m" (*mem) \
  269. : "m" (*mem), "ir" (bit)); \
  270. else if (sizeof (*mem) == 2) \
  271. __asm__ __volatile__ (LOCK_PREFIX "btsw %3, %1; setc %0" \
  272. : "=q" (__result), "=m" (*mem) \
  273. : "m" (*mem), "ir" (bit)); \
  274. else if (sizeof (*mem) == 4) \
  275. __asm__ __volatile__ (LOCK_PREFIX "btsl %3, %1; setc %0" \
  276. : "=q" (__result), "=m" (*mem) \
  277. : "m" (*mem), "ir" (bit)); \
  278. else \
  279. __asm__ __volatile__ (LOCK_PREFIX "btsq %3, %1; setc %0" \
  280. : "=q" (__result), "=m" (*mem) \
  281. : "m" (*mem), "ir" (bit)); \
  282. __result; })
  283. #define atomic_delay() __asm__ ("rep; nop")