bfin_fixed_code.h 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155
  1. /* Atomic instructions for userspace.
  2. *
  3. * The actual implementations can be found in the kernel.
  4. *
  5. * Copyright (c) 2008 Analog Devices, Inc.
  6. *
  7. * Licensed under the LGPL v2.1.
  8. */
  9. #ifndef __BFIN_FIXED_CODE_H__
  10. #define __BFIN_FIXED_CODE_H__
  11. #include <stdint.h>
  12. #include <asm/fixed_code.h>
  13. #ifndef __ASSEMBLY__
  14. static inline
  15. uint32_t bfin_atomic_xchg32(uint32_t *__bfin_ptr, uint32_t __bfin_newval)
  16. {
  17. uint32_t __bfin_ret;
  18. /* Input: P0: memory address to use
  19. * R1: value to store
  20. * Output: R0: old contents of the memory address
  21. */
  22. __asm__ __volatile__(
  23. "CALL (%[__bfin_func])"
  24. : "=q0" (__bfin_ret), "=m" (*__bfin_ptr)
  25. : [__bfin_func] "a" (ATOMIC_XCHG32), "q1" (__bfin_newval),
  26. "qA" (__bfin_ptr), "m" (*__bfin_ptr)
  27. : "RETS", "memory"
  28. );
  29. return __bfin_ret;
  30. }
  31. static inline
  32. uint32_t bfin_atomic_cas32(uint32_t *__bfin_ptr, uint32_t __bfin_exp, uint32_t __bfin_newval)
  33. {
  34. uint32_t __bfin_ret;
  35. /* Input: P0: memory address to use
  36. * R1: compare value
  37. * R2: new value to store
  38. * Output: R0: old contents of the memory address
  39. */
  40. __asm__ __volatile__(
  41. "CALL (%[__bfin_func])"
  42. : "=q0" (__bfin_ret), "=m" (*__bfin_ptr)
  43. : [__bfin_func] "a" (ATOMIC_CAS32), "q1" (__bfin_exp), "q2" (__bfin_newval),
  44. "qA" (__bfin_ptr), "m" (*__bfin_ptr)
  45. : "RETS", "memory"
  46. );
  47. return __bfin_ret;
  48. }
  49. static inline
  50. uint32_t bfin_atomic_add32(uint32_t *__bfin_ptr, uint32_t __bfin_inc)
  51. {
  52. uint32_t __bfin_ret;
  53. /* Input: P0: memory address to use
  54. * R0: value to add
  55. * Output: R0: new contents of the memory address
  56. * R1: previous contents of the memory address
  57. */
  58. __asm__ __volatile__(
  59. "CALL (%[__bfin_func])"
  60. : "=q0" (__bfin_ret), "=m" (*__bfin_ptr)
  61. : [__bfin_func] "a" (ATOMIC_ADD32), "q0" (__bfin_inc),
  62. "qA" (__bfin_ptr), "m" (*__bfin_ptr)
  63. : "R1", "RETS", "memory"
  64. );
  65. return __bfin_ret;
  66. }
  67. #define bfin_atomic_inc32(ptr) bfin_atomic_add32(ptr, 1)
  68. static inline
  69. uint32_t bfin_atomic_sub32(uint32_t *__bfin_ptr, uint32_t __bfin_dec)
  70. {
  71. uint32_t __bfin_ret;
  72. /* Input: P0: memory address to use
  73. * R0: value to subtract
  74. * Output: R0: new contents of the memory address
  75. * R1: previous contents of the memory address
  76. */
  77. __asm__ __volatile__(
  78. "CALL (%[__bfin_func])"
  79. : "=q0" (__bfin_ret), "=m" (*__bfin_ptr)
  80. : [__bfin_func] "a" (ATOMIC_SUB32), "q0" (__bfin_dec),
  81. "qA" (__bfin_ptr), "m" (*__bfin_ptr)
  82. : "R1", "RETS", "memory"
  83. );
  84. return __bfin_ret;
  85. }
  86. #define bfin_atomic_dec32(ptr) bfin_atomic_sub32(ptr, 1)
  87. static inline
  88. uint32_t bfin_atomic_ior32(uint32_t *__bfin_ptr, uint32_t __bfin_ior)
  89. {
  90. uint32_t __bfin_ret;
  91. /* Input: P0: memory address to use
  92. * R0: value to ior
  93. * Output: R0: new contents of the memory address
  94. * R1: previous contents of the memory address
  95. */
  96. __asm__ __volatile__(
  97. "CALL (%[__bfin_func])"
  98. : "=q0" (__bfin_ret), "=m" (*__bfin_ptr)
  99. : [__bfin_func] "a" (ATOMIC_IOR32), "q0" (__bfin_ior),
  100. "qA" (__bfin_ptr), "m" (*__bfin_ptr)
  101. : "R1", "RETS", "memory"
  102. );
  103. return __bfin_ret;
  104. }
  105. static inline
  106. uint32_t bfin_atomic_and32(uint32_t *__bfin_ptr, uint32_t __bfin_and)
  107. {
  108. uint32_t __bfin_ret;
  109. /* Input: P0: memory address to use
  110. * R0: value to and
  111. * Output: R0: new contents of the memory address
  112. * R1: previous contents of the memory address
  113. */
  114. __asm__ __volatile__(
  115. "CALL (%[__bfin_func])"
  116. : "=q0" (__bfin_ret), "=m" (*__bfin_ptr)
  117. : [__bfin_func] "a" (ATOMIC_AND32), "q0" (__bfin_and),
  118. "qA" (__bfin_ptr), "m" (*__bfin_ptr)
  119. : "R1", "RETS", "memory"
  120. );
  121. return __bfin_ret;
  122. }
  123. static inline
  124. uint32_t bfin_atomic_xor32(uint32_t *__bfin_ptr, uint32_t __bfin_xor)
  125. {
  126. uint32_t __bfin_ret;
  127. /* Input: P0: memory address to use
  128. * R0: value to xor
  129. * Output: R0: new contents of the memory address
  130. * R1: previous contents of the memory address
  131. */
  132. __asm__ __volatile__(
  133. "CALL (%[__bfin_func])"
  134. : "=q0" (__bfin_ret), "=m" (*__bfin_ptr)
  135. : [__bfin_func] "a" (ATOMIC_XOR32), "q0" (__bfin_xor),
  136. "qA" (__bfin_ptr), "m" (*__bfin_ptr)
  137. : "R1", "RETS", "memory"
  138. );
  139. return __bfin_ret;
  140. }
  141. #endif
  142. #endif