atomic.h 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171
  1. /* Copyright (C) 2012 Free Software Foundation, Inc.
  2. This file is part of the GNU C Library.
  3. The GNU C Library is free software; you can redistribute it and/or
  4. modify it under the terms of the GNU Lesser General Public
  5. License as published by the Free Software Foundation; either
  6. version 2.1 of the License, or (at your option) any later version.
  7. The GNU C Library is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  10. Lesser General Public License for more details.
  11. You should have received a copy of the GNU Lesser General Public
  12. License along with the GNU C Library; if not, write to the Free
  13. Software Foundation, Inc., 51 Franklin Street - Fifth Floor,
  14. Boston, MA 02110-1301, USA. */
  15. #ifndef _BITS_ATOMIC_H
  16. #define _BITS_ATOMIC_H 1
  17. /* Xtensa has only a 32-bit form of a store-conditional instruction,
  18. so just stub out the rest. */
  19. /* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
  20. Return the old *MEM value. */
  21. #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
  22. ({__typeof__(*(mem)) __tmp, __value; \
  23. __asm__ __volatile__( \
  24. "1: l32i %1, %2, 0 \n" \
  25. " bne %1, %4, 2f \n" \
  26. " wsr %1, SCOMPARE1 \n" \
  27. " mov %0, %1 \n" \
  28. " mov %1, %3 \n" \
  29. " s32c1i %1, %2, 0 \n" \
  30. " bne %0, %1, 1b \n" \
  31. "2: \n" \
  32. : "=&a" (__value), "=&a" (__tmp) \
  33. : "a" (mem), "a" (newval), "a" (oldval) \
  34. : "memory" ); \
  35. __tmp; \
  36. })
  37. /* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
  38. Return zero if *MEM was changed or non-zero if no exchange happened. */
  39. #define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval) \
  40. ({__typeof__(*(mem)) __tmp, __value; \
  41. __asm__ __volatile__( \
  42. "1: l32i %0, %2, 0 \n" \
  43. " sub %1, %4, %0 \n" \
  44. " bnez %1, 2f \n" \
  45. " wsr %0, SCOMPARE1 \n" \
  46. " mov %1, %3 \n" \
  47. " s32c1i %1, %2, 0 \n" \
  48. " bne %0, %1, 1b \n" \
  49. " movi %1, 0 \n" \
  50. "2: \n" \
  51. : "=&a" (__value), "=&a" (__tmp) \
  52. : "a" (mem), "a" (newval), "a" (oldval) \
  53. : "memory" ); \
  54. __tmp != 0; \
  55. })
  56. /* Store NEWVALUE in *MEM and return the old value. */
  57. #define __arch_exchange_32_acq(mem, newval) \
  58. ({__typeof__(*(mem)) __tmp, __value; \
  59. __asm__ __volatile__( \
  60. "1: l32i %0, %2, 0 \n" \
  61. " wsr %0, SCOMPARE1 \n" \
  62. " mov %1, %3 \n" \
  63. " s32c1i %1, %2, 0 \n" \
  64. " bne %0, %1, 1b \n" \
  65. : "=&a" (__value), "=&a" (__tmp) \
  66. : "a" (mem), "a" (newval) \
  67. : "memory" ); \
  68. __tmp; \
  69. })
  70. /* Add VALUE to *MEM and return the old value of *MEM. */
  71. #define __arch_atomic_exchange_and_add_32(mem, value) \
  72. ({__typeof__(*(mem)) __tmp, __value; \
  73. __asm__ __volatile__( \
  74. "1: l32i %0, %2, 0 \n" \
  75. " wsr %0, SCOMPARE1 \n" \
  76. " add %1, %0, %3 \n" \
  77. " s32c1i %1, %2, 0 \n" \
  78. " bne %0, %1, 1b \n" \
  79. : "=&a" (__value), "=&a" (__tmp) \
  80. : "a" (mem), "a" (value) \
  81. : "memory" ); \
  82. __tmp; \
  83. })
  84. /* Subtract VALUE from *MEM and return the old value of *MEM. */
  85. #define __arch_atomic_exchange_and_sub_32(mem, value) \
  86. ({__typeof__(*(mem)) __tmp, __value; \
  87. __asm__ __volatile__( \
  88. "1: l32i %0, %2, 0 \n" \
  89. " wsr %0, SCOMPARE1 \n" \
  90. " sub %1, %0, %3 \n" \
  91. " s32c1i %1, %2, 0 \n" \
  92. " bne %0, %1, 1b \n" \
  93. : "=&a" (__value), "=&a" (__tmp) \
  94. : "a" (mem), "a" (value) \
  95. : "memory" ); \
  96. __tmp; \
  97. })
  98. /* Decrement *MEM if it is > 0, and return the old value. */
  99. #define __arch_atomic_decrement_if_positive_32(mem) \
  100. ({__typeof__(*(mem)) __tmp, __value; \
  101. __asm__ __volatile__( \
  102. "1: l32i %0, %2, 0 \n" \
  103. " blti %0, 1, 2f \n" \
  104. " wsr %0, SCOMPARE1 \n" \
  105. " addi %1, %0, -1 \n" \
  106. " s32c1i %1, %2, 0 \n" \
  107. " bne %0, %1, 1b \n" \
  108. "2: \n" \
  109. : "=&a" (__value), "=&a" (__tmp) \
  110. : "a" (mem) \
  111. : "memory" ); \
  112. __tmp; \
  113. })
  114. /* These are the preferred public interfaces: */
  115. #define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
  116. ({ \
  117. if (sizeof (*mem) != 4) \
  118. abort(); \
  119. __arch_compare_and_exchange_val_32_acq(mem, newval, oldval); \
  120. })
  121. #define atomic_exchange_acq(mem, newval) \
  122. ({ \
  123. if (sizeof(*(mem)) != 4) \
  124. abort(); \
  125. __arch_exchange_32_acq(mem, newval); \
  126. })
  127. #define atomic_exchange_and_add(mem, newval) \
  128. ({ \
  129. if (sizeof(*(mem)) != 4) \
  130. abort(); \
  131. __arch_atomic_exchange_and_add_32(mem, newval); \
  132. })
  133. #define atomic_exchange_and_sub(mem, newval) \
  134. ({ \
  135. if (sizeof(*(mem)) != 4) \
  136. abort(); \
  137. __arch_atomic_exchange_and_sub_32(mem, newval); \
  138. })
  139. #define atomic_decrement_if_positive(mem) \
  140. ({ \
  141. if (sizeof(*(mem)) != 4) \
  142. abort(); \
  143. __arch_atomic_decrement_if_positive_32(mem); \
  144. })
  145. #endif /* _BITS_ATOMIC_H */