atomic.h 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189
  1. /*
  2. * Copyright (C) 2013 Synopsys, Inc. (www.synopsys.com)
  3. *
  4. * Licensed under LGPL v2.1 or later, see the file COPYING.LIB in this tarball.
  5. */
  6. #include <stdint.h>
  7. #include <sysdep.h>
  8. typedef int8_t atomic8_t;
  9. typedef uint8_t uatomic8_t;
  10. typedef int_fast8_t atomic_fast8_t;
  11. typedef uint_fast8_t uatomic_fast8_t;
  12. typedef int32_t atomic32_t;
  13. typedef uint32_t uatomic32_t;
  14. typedef int_fast32_t atomic_fast32_t;
  15. typedef uint_fast32_t uatomic_fast32_t;
  16. typedef intptr_t atomicptr_t;
  17. typedef uintptr_t uatomicptr_t;
  18. typedef intmax_t atomic_max_t;
  19. typedef uintmax_t uatomic_max_t;
  20. void __arc_link_error (void);
  21. #ifdef __A7__
  22. #define atomic_full_barrier() __asm__ __volatile__("": : :"memory")
  23. #define ARC_BARRIER_INSTR ""
  24. #else
  25. #define atomic_full_barrier() __asm__ __volatile__("dmb 3": : :"memory")
  26. #define ARC_BARRIER_INSTR "dmb 3"
  27. #endif
  28. /* Atomic compare and exchange. */
  29. #define __arch_compare_and_exchange_val_8_acq(mem, newval, oldval) \
  30. ({ __arc_link_error (); oldval; })
  31. #define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
  32. ({ __arc_link_error (); oldval; })
  33. #define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
  34. ({ __arc_link_error (); oldval; })
  35. #ifdef __CONFIG_ARC_HAS_ATOMICS__
  36. #ifdef __A7__
  37. #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
  38. ({ \
  39. __typeof(oldval) prev; \
  40. \
  41. __asm__ __volatile__( \
  42. "1: llock %0, [%1] \n" \
  43. " brne %0, %2, 2f \n" \
  44. " scond %3, [%1] \n" \
  45. " bnz 1b \n" \
  46. "2: \n" \
  47. : "=&r"(prev) \
  48. : "r"(mem), "ir"(oldval), \
  49. "r"(newval) /* can't be "ir". scond can't take limm for "b" */\
  50. : "cc", "memory"); \
  51. \
  52. prev; \
  53. })
  54. #else /* !__A7__ */
  55. #define USE_ATOMIC_COMPILER_BUILTINS 1
  56. #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
  57. ({ \
  58. __typeof(*mem) __oldval = (oldval); \
  59. __atomic_compare_exchange_n(mem, (void *) &__oldval, newval, 0, \
  60. __ATOMIC_ACQUIRE, __ATOMIC_RELAXED); \
  61. __oldval; \
  62. })
  63. #define __arch_compare_and_exchange_val_8_rel(mem, newval, oldval) \
  64. ({ __arc_link_error (); oldval; })
  65. #define __arch_compare_and_exchange_val_16_rel(mem, newval, oldval) \
  66. ({ __arc_link_error (); oldval; })
  67. #define __arch_compare_and_exchange_val_64_rel(mem, newval, oldval) \
  68. ({ __arc_link_error (); oldval; })
  69. #define __arch_compare_and_exchange_val_32_rel(mem, newval, oldval) \
  70. ({ \
  71. __typeof(*mem) __oldval = (oldval); \
  72. __atomic_compare_exchange_n(mem, (void *) &__oldval, newval, 0, \
  73. __ATOMIC_RELEASE, __ATOMIC_RELAXED); \
  74. __oldval; \
  75. })
  76. /* Compare and exchange with "acquire" semantics, ie barrier after */
  77. #define atomic_compare_and_exchange_val_acq(mem, new, old) \
  78. __atomic_val_bysize(__arch_compare_and_exchange_val, acq, \
  79. mem, new, old)
  80. /* Compare and exchange with "release" semantics, ie barrier before */
  81. #define atomic_compare_and_exchange_val_rel(mem, new, old) \
  82. __atomic_val_bysize(__arch_compare_and_exchange_val, rel, \
  83. mem, new, old)
  84. /* Explicitly define here to use release semantics*/
  85. #define atomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
  86. ({ \
  87. __typeof (oldval) __atg3_old = (oldval); \
  88. atomic_compare_and_exchange_val_rel (mem, newval, __atg3_old) \
  89. != __atg3_old; \
  90. })
  91. #endif /* __A7__ */
  92. #else /* !__CONFIG_ARC_HAS_ATOMICS__ */
  93. #ifndef __NR_arc_usr_cmpxchg
  94. #error "__NR_arc_usr_cmpxchg missing: Please upgrade to kernel 4.9+ headers"
  95. #endif
  96. /* With lack of hardware assist, use kernel to do the atomic operation
  97. This will only work in a UP configuration
  98. */
  99. #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
  100. ({ \
  101. /* opecode INTERNAL_SYSCALL as it lacks cc clobber */ \
  102. register int __ret __asm__("r0") = (int)(mem); \
  103. register int __a1 __asm__("r1") = (int)(oldval); \
  104. register int __a2 __asm__("r2") = (int)(newval); \
  105. register int _sys_num __asm__("r8") = __NR_arc_usr_cmpxchg; \
  106. \
  107. __asm__ volatile ( \
  108. ARC_TRAP_INSN \
  109. : "+r" (__ret) \
  110. : "r"(_sys_num), "r"(__ret), "r"(__a1), "r"(__a2) \
  111. : "memory", "cc"); \
  112. \
  113. /* syscall returns previous value */ \
  114. /* Z bit is set if cmpxchg succeeded (we don't use that yet) */ \
  115. \
  116. (__typeof(oldval)) __ret; \
  117. })
  118. #endif
  119. /* Store NEWVALUE in *MEM and return the old value.
  120. Atomic EX is present in all configurations
  121. */
  122. #define __arch_exchange_32_acq(mem, newval) \
  123. ({ \
  124. __typeof__(*(mem)) val = newval; \
  125. \
  126. __asm__ __volatile__( \
  127. "ex %0, [%1]\n" \
  128. ARC_BARRIER_INSTR \
  129. : "+r" (val) \
  130. : "r" (mem) \
  131. : "memory" ); \
  132. \
  133. val; \
  134. })
  135. #define __arch_exchange_32_rel(mem, newval) \
  136. ({ \
  137. __typeof__(*(mem)) val = newval; \
  138. \
  139. __asm__ __volatile__( \
  140. ARC_BARRIER_INSTR"\n" \
  141. "ex %0, [%1]" \
  142. : "+r" (val) \
  143. : "r" (mem) \
  144. : "memory" ); \
  145. \
  146. val; \
  147. })
  148. #define atomic_exchange_acq(mem, newval) \
  149. ({ \
  150. if (sizeof(*(mem)) != 4) \
  151. abort(); \
  152. __arch_exchange_32_acq(mem, newval); \
  153. })
  154. #define atomic_exchange_rel(mem, newval) \
  155. ({ \
  156. if (sizeof(*(mem)) != 4) \
  157. abort(); \
  158. __arch_exchange_32_rel(mem, newval); \
  159. })