atomic.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117
  1. /*
  2. * Copyright (C) 2013 Synopsys, Inc. (www.synopsys.com)
  3. *
  4. * Licensed under LGPL v2.1 or later, see the file COPYING.LIB in this tarball.
  5. */
  6. #include <stdint.h>
  7. #include <sysdep.h>
  8. typedef int8_t atomic8_t;
  9. typedef uint8_t uatomic8_t;
  10. typedef int_fast8_t atomic_fast8_t;
  11. typedef uint_fast8_t uatomic_fast8_t;
  12. typedef int32_t atomic32_t;
  13. typedef uint32_t uatomic32_t;
  14. typedef int_fast32_t atomic_fast32_t;
  15. typedef uint_fast32_t uatomic_fast32_t;
  16. typedef intptr_t atomicptr_t;
  17. typedef uintptr_t uatomicptr_t;
  18. typedef intmax_t atomic_max_t;
  19. typedef uintmax_t uatomic_max_t;
  20. void __arc_link_error (void);
  21. #ifdef __A7__
  22. #define atomic_full_barrier() __asm__ __volatile__("": : :"memory")
  23. #else
  24. #define atomic_full_barrier() __asm__ __volatile__("dmb 3": : :"memory")
  25. #endif
  26. /* Atomic compare and exchange. */
  27. #define __arch_compare_and_exchange_val_8_acq(mem, newval, oldval) \
  28. ({ __arc_link_error (); oldval; })
  29. #define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
  30. ({ __arc_link_error (); oldval; })
  31. #define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
  32. ({ __arc_link_error (); oldval; })
  33. #ifdef __CONFIG_ARC_HAS_ATOMICS__
  34. #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
  35. ({ \
  36. __typeof(oldval) prev; \
  37. \
  38. __asm__ __volatile__( \
  39. "1: llock %0, [%1] \n" \
  40. " brne %0, %2, 2f \n" \
  41. " scond %3, [%1] \n" \
  42. " bnz 1b \n" \
  43. "2: \n" \
  44. : "=&r"(prev) \
  45. : "r"(mem), "ir"(oldval), \
  46. "r"(newval) /* can't be "ir". scond can't take limm for "b" */\
  47. : "cc", "memory"); \
  48. \
  49. prev; \
  50. })
  51. #else
  52. #ifndef __NR_arc_usr_cmpxchg
  53. #error "__NR_arc_usr_cmpxchg missing: Please upgrade to kernel 4.9+ headers"
  54. #endif
  55. /* With lack of hardware assist, use kernel to do the atomic operation
  56. This will only work in a UP configuration
  57. */
  58. #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
  59. ({ \
  60. /* opecode INTERNAL_SYSCALL as it lacks cc clobber */ \
  61. register int __ret __asm__("r0") = (int)(mem); \
  62. register int __a1 __asm__("r1") = (int)(oldval); \
  63. register int __a2 __asm__("r2") = (int)(newval); \
  64. register int _sys_num __asm__("r8") = __NR_arc_usr_cmpxchg; \
  65. \
  66. __asm__ volatile ( \
  67. ARC_TRAP_INSN \
  68. : "+r" (__ret) \
  69. : "r"(_sys_num), "r"(__ret), "r"(__a1), "r"(__a2) \
  70. : "memory", "cc"); \
  71. \
  72. /* syscall returns previous value */ \
  73. /* Z bit is set if cmpxchg succeeded (we don't use that yet) */ \
  74. \
  75. (__typeof(oldval)) __ret; \
  76. })
  77. #endif
  78. /* Store NEWVALUE in *MEM and return the old value.
  79. Atomic EX is present in all configurations
  80. */
  81. #define __arch_exchange_32_acq(mem, newval) \
  82. ({ \
  83. __typeof__(*(mem)) val = newval; \
  84. \
  85. __asm__ __volatile__( \
  86. "ex %0, [%1]" \
  87. : "+r" (val) \
  88. : "r" (mem) \
  89. : "memory" ); \
  90. \
  91. val; \
  92. })
  93. #define atomic_exchange_acq(mem, newval) \
  94. ({ \
  95. if (sizeof(*(mem)) != 4) \
  96. abort(); \
  97. __arch_exchange_32_acq(mem, newval); \
  98. })