lowlevellock.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597
  1. /* Copyright (C) 2002-2004, 2006-2008, 2009 Free Software Foundation, Inc.
  2. This file is part of the GNU C Library.
  3. Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
  4. The GNU C Library is free software; you can redistribute it and/or
  5. modify it under the terms of the GNU Lesser General Public
  6. License as published by the Free Software Foundation; either
  7. version 2.1 of the License, or (at your option) any later version.
  8. The GNU C Library is distributed in the hope that it will be useful,
  9. but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  11. Lesser General Public License for more details.
  12. You should have received a copy of the GNU Lesser General Public
  13. License along with the GNU C Library; if not, see
  14. <http://www.gnu.org/licenses/>. */
  15. #ifndef _LOWLEVELLOCK_H
  16. #define _LOWLEVELLOCK_H 1
  17. #ifndef __ASSEMBLER__
  18. # include <time.h>
  19. # include <sys/param.h>
  20. # include <bits/pthreadtypes.h>
  21. # include <bits/kernel-features.h>
  22. # include <tcb-offsets.h>
  23. # ifndef LOCK_INSTR
  24. # ifdef UP
  25. # define LOCK_INSTR /* nothing */
  26. # else
  27. # define LOCK_INSTR "lock;"
  28. # endif
  29. # endif
  30. #else
  31. # ifndef LOCK
  32. # ifdef UP
  33. # define LOCK
  34. # else
  35. # define LOCK lock
  36. # endif
  37. # endif
  38. #endif
  39. #define FUTEX_WAIT 0
  40. #define FUTEX_WAKE 1
  41. #define FUTEX_CMP_REQUEUE 4
  42. #define FUTEX_WAKE_OP 5
  43. #define FUTEX_LOCK_PI 6
  44. #define FUTEX_UNLOCK_PI 7
  45. #define FUTEX_TRYLOCK_PI 8
  46. #define FUTEX_WAIT_BITSET 9
  47. #define FUTEX_WAKE_BITSET 10
  48. #define FUTEX_WAIT_REQUEUE_PI 11
  49. #define FUTEX_CMP_REQUEUE_PI 12
  50. #define FUTEX_PRIVATE_FLAG 128
  51. #define FUTEX_CLOCK_REALTIME 256
  52. #define FUTEX_BITSET_MATCH_ANY 0xffffffff
  53. #define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
  54. /* Values for 'private' parameter of locking macros. Yes, the
  55. definition seems to be backwards. But it is not. The bit will be
  56. reversed before passing to the system call. */
  57. #define LLL_PRIVATE 0
  58. #define LLL_SHARED FUTEX_PRIVATE_FLAG
  59. #ifndef __ASSEMBLER__
  60. #if !defined NOT_IN_libc || defined IS_IN_rtld
  61. /* In libc.so or ld.so all futexes are private. */
  62. # ifdef __ASSUME_PRIVATE_FUTEX
  63. # define __lll_private_flag(fl, private) \
  64. ((fl) | FUTEX_PRIVATE_FLAG)
  65. # else
  66. # define __lll_private_flag(fl, private) \
  67. ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
  68. # endif
  69. #else
  70. # ifdef __ASSUME_PRIVATE_FUTEX
  71. # define __lll_private_flag(fl, private) \
  72. (((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
  73. # else
  74. # define __lll_private_flag(fl, private) \
  75. (__builtin_constant_p (private) \
  76. ? ((private) == 0 \
  77. ? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
  78. : (fl)) \
  79. : ({ unsigned int __fl = ((private) ^ FUTEX_PRIVATE_FLAG); \
  80. __asm__ ("andl %%fs:%P1, %0" : "+r" (__fl) \
  81. : "i" (offsetof (struct pthread, header.private_futex))); \
  82. __fl | (fl); }))
  83. # endif
  84. #endif
  85. /* Initializer for lock. */
  86. #define LLL_LOCK_INITIALIZER (0)
  87. #define LLL_LOCK_INITIALIZER_LOCKED (1)
  88. #define LLL_LOCK_INITIALIZER_WAITERS (2)
  89. /* Delay in spinlock loop. */
  90. #define BUSY_WAIT_NOP __asm__ ("rep; nop")
  91. #define LLL_STUB_UNWIND_INFO_START \
  92. ".section .eh_frame,\"a\",@progbits\n" \
  93. "7:\t" ".long 9f-8f # Length of Common Information Entry\n" \
  94. "8:\t" ".long 0x0 # CIE Identifier Tag\n\t" \
  95. ".byte 0x1 # CIE Version\n\t" \
  96. ".ascii \"zR\\0\" # CIE Augmentation\n\t" \
  97. ".uleb128 0x1 # CIE Code Alignment Factor\n\t" \
  98. ".sleb128 -8 # CIE Data Alignment Factor\n\t" \
  99. ".byte 0x10 # CIE RA Column\n\t" \
  100. ".uleb128 0x1 # Augmentation size\n\t" \
  101. ".byte 0x1b # FDE Encoding (pcrel sdata4)\n\t" \
  102. ".byte 0x12 # DW_CFA_def_cfa_sf\n\t" \
  103. ".uleb128 0x7\n\t" \
  104. ".sleb128 16\n\t" \
  105. ".align 8\n" \
  106. "9:\t" ".long 23f-10f # FDE Length\n" \
  107. "10:\t" ".long 10b-7b # FDE CIE offset\n\t" \
  108. ".long 1b-. # FDE initial location\n\t" \
  109. ".long 6b-1b # FDE address range\n\t" \
  110. ".uleb128 0x0 # Augmentation size\n\t" \
  111. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  112. ".uleb128 0x10\n\t" \
  113. ".uleb128 12f-11f\n" \
  114. "11:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  115. ".sleb128 4b-1b\n"
  116. #define LLL_STUB_UNWIND_INFO_END \
  117. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  118. ".uleb128 0x10\n\t" \
  119. ".uleb128 14f-13f\n" \
  120. "13:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  121. ".sleb128 4b-2b\n" \
  122. "14:\t" ".byte 0x40 + (3b-2b) # DW_CFA_advance_loc\n\t" \
  123. ".byte 0x0e # DW_CFA_def_cfa_offset\n\t" \
  124. ".uleb128 0\n\t" \
  125. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  126. ".uleb128 0x10\n\t" \
  127. ".uleb128 16f-15f\n" \
  128. "15:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  129. ".sleb128 4b-3b\n" \
  130. "16:\t" ".byte 0x40 + (4b-3b-1) # DW_CFA_advance_loc\n\t" \
  131. ".byte 0x0e # DW_CFA_def_cfa_offset\n\t" \
  132. ".uleb128 128\n\t" \
  133. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  134. ".uleb128 0x10\n\t" \
  135. ".uleb128 20f-17f\n" \
  136. "17:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  137. ".sleb128 19f-18f\n\t" \
  138. ".byte 0x0d # DW_OP_const4s\n" \
  139. "18:\t" ".4byte 4b-.\n\t" \
  140. ".byte 0x1c # DW_OP_minus\n\t" \
  141. ".byte 0x0d # DW_OP_const4s\n" \
  142. "19:\t" ".4byte 24f-.\n\t" \
  143. ".byte 0x22 # DW_OP_plus\n" \
  144. "20:\t" ".byte 0x40 + (5b-4b+1) # DW_CFA_advance_loc\n\t" \
  145. ".byte 0x13 # DW_CFA_def_cfa_offset_sf\n\t" \
  146. ".sleb128 16\n\t" \
  147. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  148. ".uleb128 0x10\n\t" \
  149. ".uleb128 22f-21f\n" \
  150. "21:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  151. ".sleb128 4b-5b\n" \
  152. "22:\t" ".align 8\n" \
  153. "23:\t" ".previous\n"
  154. /* Unwind info for
  155. 1: leaq ..., %rdi
  156. 2: subq $128, %rsp
  157. 3: callq ...
  158. 4: addq $128, %rsp
  159. 5: jmp 24f
  160. 6:
  161. snippet. */
  162. #define LLL_STUB_UNWIND_INFO_5 \
  163. LLL_STUB_UNWIND_INFO_START \
  164. "12:\t" ".byte 0x40 + (2b-1b) # DW_CFA_advance_loc\n\t" \
  165. LLL_STUB_UNWIND_INFO_END
  166. /* Unwind info for
  167. 1: leaq ..., %rdi
  168. 0: movq ..., %rdx
  169. 2: subq $128, %rsp
  170. 3: callq ...
  171. 4: addq $128, %rsp
  172. 5: jmp 24f
  173. 6:
  174. snippet. */
  175. #define LLL_STUB_UNWIND_INFO_6 \
  176. LLL_STUB_UNWIND_INFO_START \
  177. "12:\t" ".byte 0x40 + (0b-1b) # DW_CFA_advance_loc\n\t" \
  178. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  179. ".uleb128 0x10\n\t" \
  180. ".uleb128 26f-25f\n" \
  181. "25:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  182. ".sleb128 4b-0b\n" \
  183. "26:\t" ".byte 0x40 + (2b-0b) # DW_CFA_advance_loc\n\t" \
  184. LLL_STUB_UNWIND_INFO_END
  185. #define lll_futex_wait(futex, val, private) \
  186. lll_futex_timed_wait(futex, val, NULL, private)
  187. #define lll_futex_timed_wait(futex, val, timeout, private) \
  188. ({ \
  189. register const struct timespec *__to __asm__ ("r10") = timeout; \
  190. int __status; \
  191. register __typeof (val) _val __asm__ ("edx") = (val); \
  192. __asm__ __volatile__ ("syscall" \
  193. : "=a" (__status) \
  194. : "0" (SYS_futex), "D" (futex), \
  195. "S" (__lll_private_flag (FUTEX_WAIT, private)), \
  196. "d" (_val), "r" (__to) \
  197. : "memory", "cc", "r11", "cx"); \
  198. __status; \
  199. })
  200. #define lll_futex_wake(futex, nr, private) \
  201. do { \
  202. int __ignore; \
  203. register __typeof (nr) _nr __asm__ ("edx") = (nr); \
  204. __asm__ __volatile__ ("syscall" \
  205. : "=a" (__ignore) \
  206. : "0" (SYS_futex), "D" (futex), \
  207. "S" (__lll_private_flag (FUTEX_WAKE, private)), \
  208. "d" (_nr) \
  209. : "memory", "cc", "r10", "r11", "cx"); \
  210. } while (0)
  211. /* NB: in the lll_trylock macro we simply return the value in %eax
  212. after the cmpxchg instruction. In case the operation succeded this
  213. value is zero. In case the operation failed, the cmpxchg instruction
  214. has loaded the current value of the memory work which is guaranteed
  215. to be nonzero. */
  216. #if defined NOT_IN_libc || defined UP
  217. # define __lll_trylock_asm LOCK_INSTR "cmpxchgl %2, %1"
  218. #else
  219. # define __lll_trylock_asm "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
  220. "je 0f\n\t" \
  221. "lock; cmpxchgl %2, %1\n\t" \
  222. "jmp 1f\n\t" \
  223. "0:\tcmpxchgl %2, %1\n\t" \
  224. "1:"
  225. #endif
  226. #define lll_trylock(futex) \
  227. ({ int ret; \
  228. __asm__ __volatile__ (__lll_trylock_asm \
  229. : "=a" (ret), "=m" (futex) \
  230. : "r" (LLL_LOCK_INITIALIZER_LOCKED), "m" (futex), \
  231. "0" (LLL_LOCK_INITIALIZER) \
  232. : "memory"); \
  233. ret; })
  234. #define lll_robust_trylock(futex, id) \
  235. ({ int ret; \
  236. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %2, %1" \
  237. : "=a" (ret), "=m" (futex) \
  238. : "r" (id), "m" (futex), "0" (LLL_LOCK_INITIALIZER) \
  239. : "memory"); \
  240. ret; })
  241. #define lll_cond_trylock(futex) \
  242. ({ int ret; \
  243. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %2, %1" \
  244. : "=a" (ret), "=m" (futex) \
  245. : "r" (LLL_LOCK_INITIALIZER_WAITERS), \
  246. "m" (futex), "0" (LLL_LOCK_INITIALIZER) \
  247. : "memory"); \
  248. ret; })
  249. #if defined NOT_IN_libc || defined UP
  250. # define __lll_lock_asm_start LOCK_INSTR "cmpxchgl %4, %2\n\t" \
  251. "jnz 1f\n\t"
  252. #else
  253. # define __lll_lock_asm_start "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
  254. "je 0f\n\t" \
  255. "lock; cmpxchgl %4, %2\n\t" \
  256. "jnz 1f\n\t" \
  257. "jmp 24f\n" \
  258. "0:\tcmpxchgl %4, %2\n\t" \
  259. "jnz 1f\n\t"
  260. #endif
  261. #define lll_lock(futex, private) \
  262. (void) \
  263. ({ int ignore1, ignore2, ignore3; \
  264. if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
  265. __asm__ __volatile__ (__lll_lock_asm_start \
  266. ".subsection 1\n\t" \
  267. ".type _L_lock_%=, @function\n" \
  268. "_L_lock_%=:\n" \
  269. "1:\tleaq %2, %%rdi\n" \
  270. "2:\tsubq $128, %%rsp\n" \
  271. "3:\tcallq __lll_lock_wait_private\n" \
  272. "4:\taddq $128, %%rsp\n" \
  273. "5:\tjmp 24f\n" \
  274. "6:\t.size _L_lock_%=, 6b-1b\n\t" \
  275. ".previous\n" \
  276. LLL_STUB_UNWIND_INFO_5 \
  277. "24:" \
  278. : "=S" (ignore1), "=&D" (ignore2), "=m" (futex), \
  279. "=a" (ignore3) \
  280. : "0" (1), "m" (futex), "3" (0) \
  281. : "cx", "r11", "cc", "memory"); \
  282. else \
  283. __asm__ __volatile__ (__lll_lock_asm_start \
  284. ".subsection 1\n\t" \
  285. ".type _L_lock_%=, @function\n" \
  286. "_L_lock_%=:\n" \
  287. "1:\tleaq %2, %%rdi\n" \
  288. "2:\tsubq $128, %%rsp\n" \
  289. "3:\tcallq __lll_lock_wait\n" \
  290. "4:\taddq $128, %%rsp\n" \
  291. "5:\tjmp 24f\n" \
  292. "6:\t.size _L_lock_%=, 6b-1b\n\t" \
  293. ".previous\n" \
  294. LLL_STUB_UNWIND_INFO_5 \
  295. "24:" \
  296. : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
  297. "=a" (ignore3) \
  298. : "1" (1), "m" (futex), "3" (0), "0" (private) \
  299. : "cx", "r11", "cc", "memory"); \
  300. }) \
  301. #define lll_robust_lock(futex, id, private) \
  302. ({ int __ret, ignore1, ignore2; \
  303. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
  304. "jnz 1f\n\t" \
  305. ".subsection 1\n\t" \
  306. ".type _L_robust_lock_%=, @function\n" \
  307. "_L_robust_lock_%=:\n" \
  308. "1:\tleaq %2, %%rdi\n" \
  309. "2:\tsubq $128, %%rsp\n" \
  310. "3:\tcallq __lll_robust_lock_wait\n" \
  311. "4:\taddq $128, %%rsp\n" \
  312. "5:\tjmp 24f\n" \
  313. "6:\t.size _L_robust_lock_%=, 6b-1b\n\t" \
  314. ".previous\n" \
  315. LLL_STUB_UNWIND_INFO_5 \
  316. "24:" \
  317. : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
  318. "=a" (__ret) \
  319. : "1" (id), "m" (futex), "3" (0), "0" (private) \
  320. : "cx", "r11", "cc", "memory"); \
  321. __ret; })
  322. #define lll_cond_lock(futex, private) \
  323. (void) \
  324. ({ int ignore1, ignore2, ignore3; \
  325. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
  326. "jnz 1f\n\t" \
  327. ".subsection 1\n\t" \
  328. ".type _L_cond_lock_%=, @function\n" \
  329. "_L_cond_lock_%=:\n" \
  330. "1:\tleaq %2, %%rdi\n" \
  331. "2:\tsubq $128, %%rsp\n" \
  332. "3:\tcallq __lll_lock_wait\n" \
  333. "4:\taddq $128, %%rsp\n" \
  334. "5:\tjmp 24f\n" \
  335. "6:\t.size _L_cond_lock_%=, 6b-1b\n\t" \
  336. ".previous\n" \
  337. LLL_STUB_UNWIND_INFO_5 \
  338. "24:" \
  339. : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
  340. "=a" (ignore3) \
  341. : "1" (2), "m" (futex), "3" (0), "0" (private) \
  342. : "cx", "r11", "cc", "memory"); \
  343. })
  344. #define lll_robust_cond_lock(futex, id, private) \
  345. ({ int __ret, ignore1, ignore2; \
  346. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
  347. "jnz 1f\n\t" \
  348. ".subsection 1\n\t" \
  349. ".type _L_robust_cond_lock_%=, @function\n" \
  350. "_L_robust_cond_lock_%=:\n" \
  351. "1:\tleaq %2, %%rdi\n" \
  352. "2:\tsubq $128, %%rsp\n" \
  353. "3:\tcallq __lll_robust_lock_wait\n" \
  354. "4:\taddq $128, %%rsp\n" \
  355. "5:\tjmp 24f\n" \
  356. "6:\t.size _L_robust_cond_lock_%=, 6b-1b\n\t" \
  357. ".previous\n" \
  358. LLL_STUB_UNWIND_INFO_5 \
  359. "24:" \
  360. : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
  361. "=a" (__ret) \
  362. : "1" (id | FUTEX_WAITERS), "m" (futex), "3" (0), \
  363. "0" (private) \
  364. : "cx", "r11", "cc", "memory"); \
  365. __ret; })
  366. #define lll_timedlock(futex, timeout, private) \
  367. ({ int __ret, ignore1, ignore2, ignore3; \
  368. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %4\n\t" \
  369. "jnz 1f\n\t" \
  370. ".subsection 1\n\t" \
  371. ".type _L_timedlock_%=, @function\n" \
  372. "_L_timedlock_%=:\n" \
  373. "1:\tleaq %4, %%rdi\n" \
  374. "0:\tmovq %8, %%rdx\n" \
  375. "2:\tsubq $128, %%rsp\n" \
  376. "3:\tcallq __lll_timedlock_wait\n" \
  377. "4:\taddq $128, %%rsp\n" \
  378. "5:\tjmp 24f\n" \
  379. "6:\t.size _L_timedlock_%=, 6b-1b\n\t" \
  380. ".previous\n" \
  381. LLL_STUB_UNWIND_INFO_6 \
  382. "24:" \
  383. : "=a" (__ret), "=D" (ignore1), "=S" (ignore2), \
  384. "=&d" (ignore3), "=m" (futex) \
  385. : "0" (0), "1" (1), "m" (futex), "m" (timeout), \
  386. "2" (private) \
  387. : "memory", "cx", "cc", "r10", "r11"); \
  388. __ret; })
  389. #define lll_robust_timedlock(futex, timeout, id, private) \
  390. ({ int __ret, ignore1, ignore2, ignore3; \
  391. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %4\n\t" \
  392. "jnz 1f\n\t" \
  393. ".subsection 1\n\t" \
  394. ".type _L_robust_timedlock_%=, @function\n" \
  395. "_L_robust_timedlock_%=:\n" \
  396. "1:\tleaq %4, %%rdi\n" \
  397. "0:\tmovq %8, %%rdx\n" \
  398. "2:\tsubq $128, %%rsp\n" \
  399. "3:\tcallq __lll_robust_timedlock_wait\n" \
  400. "4:\taddq $128, %%rsp\n" \
  401. "5:\tjmp 24f\n" \
  402. "6:\t.size _L_robust_timedlock_%=, 6b-1b\n\t" \
  403. ".previous\n" \
  404. LLL_STUB_UNWIND_INFO_6 \
  405. "24:" \
  406. : "=a" (__ret), "=D" (ignore1), "=S" (ignore2), \
  407. "=&d" (ignore3), "=m" (futex) \
  408. : "0" (0), "1" (id), "m" (futex), "m" (timeout), \
  409. "2" (private) \
  410. : "memory", "cx", "cc", "r10", "r11"); \
  411. __ret; })
  412. #if defined NOT_IN_libc || defined UP
  413. # define __lll_unlock_asm_start LOCK_INSTR "decl %0\n\t" \
  414. "jne 1f\n\t"
  415. #else
  416. # define __lll_unlock_asm_start "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
  417. "je 0f\n\t" \
  418. "lock; decl %0\n\t" \
  419. "jne 1f\n\t" \
  420. "jmp 24f\n\t" \
  421. "0:\tdecl %0\n\t" \
  422. "jne 1f\n\t"
  423. #endif
  424. #define lll_unlock(futex, private) \
  425. (void) \
  426. ({ int ignore; \
  427. if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
  428. __asm__ __volatile__ (__lll_unlock_asm_start \
  429. ".subsection 1\n\t" \
  430. ".type _L_unlock_%=, @function\n" \
  431. "_L_unlock_%=:\n" \
  432. "1:\tleaq %0, %%rdi\n" \
  433. "2:\tsubq $128, %%rsp\n" \
  434. "3:\tcallq __lll_unlock_wake_private\n" \
  435. "4:\taddq $128, %%rsp\n" \
  436. "5:\tjmp 24f\n" \
  437. "6:\t.size _L_unlock_%=, 6b-1b\n\t" \
  438. ".previous\n" \
  439. LLL_STUB_UNWIND_INFO_5 \
  440. "24:" \
  441. : "=m" (futex), "=&D" (ignore) \
  442. : "m" (futex) \
  443. : "ax", "cx", "r11", "cc", "memory"); \
  444. else \
  445. __asm__ __volatile__ (__lll_unlock_asm_start \
  446. ".subsection 1\n\t" \
  447. ".type _L_unlock_%=, @function\n" \
  448. "_L_unlock_%=:\n" \
  449. "1:\tleaq %0, %%rdi\n" \
  450. "2:\tsubq $128, %%rsp\n" \
  451. "3:\tcallq __lll_unlock_wake\n" \
  452. "4:\taddq $128, %%rsp\n" \
  453. "5:\tjmp 24f\n" \
  454. "6:\t.size _L_unlock_%=, 6b-1b\n\t" \
  455. ".previous\n" \
  456. LLL_STUB_UNWIND_INFO_5 \
  457. "24:" \
  458. : "=m" (futex), "=&D" (ignore) \
  459. : "m" (futex), "S" (private) \
  460. : "ax", "cx", "r11", "cc", "memory"); \
  461. })
  462. #define lll_robust_unlock(futex, private) \
  463. do \
  464. { \
  465. int ignore; \
  466. __asm__ __volatile__ (LOCK_INSTR "andl %2, %0\n\t" \
  467. "jne 1f\n\t" \
  468. ".subsection 1\n\t" \
  469. ".type _L_robust_unlock_%=, @function\n" \
  470. "_L_robust_unlock_%=:\n" \
  471. "1:\tleaq %0, %%rdi\n" \
  472. "2:\tsubq $128, %%rsp\n" \
  473. "3:\tcallq __lll_unlock_wake\n" \
  474. "4:\taddq $128, %%rsp\n" \
  475. "5:\tjmp 24f\n" \
  476. "6:\t.size _L_robust_unlock_%=, 6b-1b\n\t" \
  477. ".previous\n" \
  478. LLL_STUB_UNWIND_INFO_5 \
  479. "24:" \
  480. : "=m" (futex), "=&D" (ignore) \
  481. : "i" (FUTEX_WAITERS), "m" (futex), \
  482. "S" (private) \
  483. : "ax", "cx", "r11", "cc", "memory"); \
  484. } \
  485. while (0)
  486. #define lll_robust_dead(futex, private) \
  487. do \
  488. { \
  489. int ignore; \
  490. __asm__ __volatile__ (LOCK_INSTR "orl %3, (%2)\n\t" \
  491. "syscall" \
  492. : "=m" (futex), "=a" (ignore) \
  493. : "D" (&(futex)), "i" (FUTEX_OWNER_DIED), \
  494. "S" (__lll_private_flag (FUTEX_WAKE, private)), \
  495. "1" (__NR_futex), "d" (1) \
  496. : "cx", "r11", "cc", "memory"); \
  497. } \
  498. while (0)
  499. /* Returns non-zero if error happened, zero if success. */
  500. #define lll_futex_requeue(ftx, nr_wake, nr_move, mutex, val, private) \
  501. ({ int __res; \
  502. register int __nr_move __asm__ ("r10") = nr_move; \
  503. register void *__mutex __asm__ ("r8") = mutex; \
  504. register int __val __asm__ ("r9") = val; \
  505. __asm__ __volatile__ ("syscall" \
  506. : "=a" (__res) \
  507. : "0" (__NR_futex), "D" ((void *) ftx), \
  508. "S" (__lll_private_flag (FUTEX_CMP_REQUEUE, \
  509. private)), "d" (nr_wake), \
  510. "r" (__nr_move), "r" (__mutex), "r" (__val) \
  511. : "cx", "r11", "cc", "memory"); \
  512. __res < 0; })
  513. #define lll_islocked(futex) \
  514. (futex != LLL_LOCK_INITIALIZER)
  515. /* The kernel notifies a process with uses CLONE_CLEARTID via futex
  516. wakeup when the clone terminates. The memory location contains the
  517. thread ID while the clone is running and is reset to zero
  518. afterwards.
  519. The macro parameter must not have any side effect. */
  520. #define lll_wait_tid(tid) \
  521. do { \
  522. int __ignore; \
  523. register __typeof (tid) _tid __asm__ ("edx") = (tid); \
  524. if (_tid != 0) \
  525. __asm__ __volatile__ ("xorq %%r10, %%r10\n\t" \
  526. "1:\tmovq %2, %%rax\n\t" \
  527. "syscall\n\t" \
  528. "cmpl $0, (%%rdi)\n\t" \
  529. "jne 1b" \
  530. : "=&a" (__ignore) \
  531. : "S" (FUTEX_WAIT), "i" (SYS_futex), "D" (&tid), \
  532. "d" (_tid) \
  533. : "memory", "cc", "r10", "r11", "cx"); \
  534. } while (0)
  535. extern int __lll_timedwait_tid (int *tid, const struct timespec *abstime)
  536. attribute_hidden;
  537. #define lll_timedwait_tid(tid, abstime) \
  538. ({ \
  539. int __ret = 0; \
  540. if (tid != 0) \
  541. { \
  542. if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) \
  543. __ret = EINVAL; \
  544. else \
  545. __ret = __lll_timedwait_tid (&tid, abstime); \
  546. } \
  547. __ret; })
  548. #endif /* !__ASSEMBLER__ */
  549. #endif /* lowlevellock.h */