lowlevellock.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598
  1. /* Copyright (C) 2002-2004, 2006-2008, 2009 Free Software Foundation, Inc.
  2. This file is part of the GNU C Library.
  3. Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
  4. The GNU C Library is free software; you can redistribute it and/or
  5. modify it under the terms of the GNU Lesser General Public
  6. License as published by the Free Software Foundation; either
  7. version 2.1 of the License, or (at your option) any later version.
  8. The GNU C Library is distributed in the hope that it will be useful,
  9. but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  11. Lesser General Public License for more details.
  12. You should have received a copy of the GNU Lesser General Public
  13. License along with the GNU C Library; if not, write to the Free
  14. Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
  15. 02111-1307 USA. */
  16. #ifndef _LOWLEVELLOCK_H
  17. #define _LOWLEVELLOCK_H 1
  18. #ifndef __ASSEMBLER__
  19. # include <time.h>
  20. # include <sys/param.h>
  21. # include <bits/pthreadtypes.h>
  22. # include <bits/kernel-features.h>
  23. # include <tcb-offsets.h>
  24. # ifndef LOCK_INSTR
  25. # ifdef UP
  26. # define LOCK_INSTR /* nothing */
  27. # else
  28. # define LOCK_INSTR "lock;"
  29. # endif
  30. # endif
  31. #else
  32. # ifndef LOCK
  33. # ifdef UP
  34. # define LOCK
  35. # else
  36. # define LOCK lock
  37. # endif
  38. # endif
  39. #endif
  40. #define FUTEX_WAIT 0
  41. #define FUTEX_WAKE 1
  42. #define FUTEX_CMP_REQUEUE 4
  43. #define FUTEX_WAKE_OP 5
  44. #define FUTEX_LOCK_PI 6
  45. #define FUTEX_UNLOCK_PI 7
  46. #define FUTEX_TRYLOCK_PI 8
  47. #define FUTEX_WAIT_BITSET 9
  48. #define FUTEX_WAKE_BITSET 10
  49. #define FUTEX_WAIT_REQUEUE_PI 11
  50. #define FUTEX_CMP_REQUEUE_PI 12
  51. #define FUTEX_PRIVATE_FLAG 128
  52. #define FUTEX_CLOCK_REALTIME 256
  53. #define FUTEX_BITSET_MATCH_ANY 0xffffffff
  54. #define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
  55. /* Values for 'private' parameter of locking macros. Yes, the
  56. definition seems to be backwards. But it is not. The bit will be
  57. reversed before passing to the system call. */
  58. #define LLL_PRIVATE 0
  59. #define LLL_SHARED FUTEX_PRIVATE_FLAG
  60. #ifndef __ASSEMBLER__
  61. #if !defined NOT_IN_libc || defined IS_IN_rtld
  62. /* In libc.so or ld.so all futexes are private. */
  63. # ifdef __ASSUME_PRIVATE_FUTEX
  64. # define __lll_private_flag(fl, private) \
  65. ((fl) | FUTEX_PRIVATE_FLAG)
  66. # else
  67. # define __lll_private_flag(fl, private) \
  68. ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
  69. # endif
  70. #else
  71. # ifdef __ASSUME_PRIVATE_FUTEX
  72. # define __lll_private_flag(fl, private) \
  73. (((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
  74. # else
  75. # define __lll_private_flag(fl, private) \
  76. (__builtin_constant_p (private) \
  77. ? ((private) == 0 \
  78. ? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
  79. : (fl)) \
  80. : ({ unsigned int __fl = ((private) ^ FUTEX_PRIVATE_FLAG); \
  81. __asm__ ("andl %%fs:%P1, %0" : "+r" (__fl) \
  82. : "i" (offsetof (struct pthread, header.private_futex))); \
  83. __fl | (fl); }))
  84. # endif
  85. #endif
  86. /* Initializer for lock. */
  87. #define LLL_LOCK_INITIALIZER (0)
  88. #define LLL_LOCK_INITIALIZER_LOCKED (1)
  89. #define LLL_LOCK_INITIALIZER_WAITERS (2)
  90. /* Delay in spinlock loop. */
  91. #define BUSY_WAIT_NOP __asm__ ("rep; nop")
  92. #define LLL_STUB_UNWIND_INFO_START \
  93. ".section .eh_frame,\"a\",@progbits\n" \
  94. "7:\t" ".long 9f-8f # Length of Common Information Entry\n" \
  95. "8:\t" ".long 0x0 # CIE Identifier Tag\n\t" \
  96. ".byte 0x1 # CIE Version\n\t" \
  97. ".ascii \"zR\\0\" # CIE Augmentation\n\t" \
  98. ".uleb128 0x1 # CIE Code Alignment Factor\n\t" \
  99. ".sleb128 -8 # CIE Data Alignment Factor\n\t" \
  100. ".byte 0x10 # CIE RA Column\n\t" \
  101. ".uleb128 0x1 # Augmentation size\n\t" \
  102. ".byte 0x1b # FDE Encoding (pcrel sdata4)\n\t" \
  103. ".byte 0x12 # DW_CFA_def_cfa_sf\n\t" \
  104. ".uleb128 0x7\n\t" \
  105. ".sleb128 16\n\t" \
  106. ".align 8\n" \
  107. "9:\t" ".long 23f-10f # FDE Length\n" \
  108. "10:\t" ".long 10b-7b # FDE CIE offset\n\t" \
  109. ".long 1b-. # FDE initial location\n\t" \
  110. ".long 6b-1b # FDE address range\n\t" \
  111. ".uleb128 0x0 # Augmentation size\n\t" \
  112. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  113. ".uleb128 0x10\n\t" \
  114. ".uleb128 12f-11f\n" \
  115. "11:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  116. ".sleb128 4b-1b\n"
  117. #define LLL_STUB_UNWIND_INFO_END \
  118. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  119. ".uleb128 0x10\n\t" \
  120. ".uleb128 14f-13f\n" \
  121. "13:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  122. ".sleb128 4b-2b\n" \
  123. "14:\t" ".byte 0x40 + (3b-2b) # DW_CFA_advance_loc\n\t" \
  124. ".byte 0x0e # DW_CFA_def_cfa_offset\n\t" \
  125. ".uleb128 0\n\t" \
  126. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  127. ".uleb128 0x10\n\t" \
  128. ".uleb128 16f-15f\n" \
  129. "15:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  130. ".sleb128 4b-3b\n" \
  131. "16:\t" ".byte 0x40 + (4b-3b-1) # DW_CFA_advance_loc\n\t" \
  132. ".byte 0x0e # DW_CFA_def_cfa_offset\n\t" \
  133. ".uleb128 128\n\t" \
  134. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  135. ".uleb128 0x10\n\t" \
  136. ".uleb128 20f-17f\n" \
  137. "17:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  138. ".sleb128 19f-18f\n\t" \
  139. ".byte 0x0d # DW_OP_const4s\n" \
  140. "18:\t" ".4byte 4b-.\n\t" \
  141. ".byte 0x1c # DW_OP_minus\n\t" \
  142. ".byte 0x0d # DW_OP_const4s\n" \
  143. "19:\t" ".4byte 24f-.\n\t" \
  144. ".byte 0x22 # DW_OP_plus\n" \
  145. "20:\t" ".byte 0x40 + (5b-4b+1) # DW_CFA_advance_loc\n\t" \
  146. ".byte 0x13 # DW_CFA_def_cfa_offset_sf\n\t" \
  147. ".sleb128 16\n\t" \
  148. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  149. ".uleb128 0x10\n\t" \
  150. ".uleb128 22f-21f\n" \
  151. "21:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  152. ".sleb128 4b-5b\n" \
  153. "22:\t" ".align 8\n" \
  154. "23:\t" ".previous\n"
  155. /* Unwind info for
  156. 1: leaq ..., %rdi
  157. 2: subq $128, %rsp
  158. 3: callq ...
  159. 4: addq $128, %rsp
  160. 5: jmp 24f
  161. 6:
  162. snippet. */
  163. #define LLL_STUB_UNWIND_INFO_5 \
  164. LLL_STUB_UNWIND_INFO_START \
  165. "12:\t" ".byte 0x40 + (2b-1b) # DW_CFA_advance_loc\n\t" \
  166. LLL_STUB_UNWIND_INFO_END
  167. /* Unwind info for
  168. 1: leaq ..., %rdi
  169. 0: movq ..., %rdx
  170. 2: subq $128, %rsp
  171. 3: callq ...
  172. 4: addq $128, %rsp
  173. 5: jmp 24f
  174. 6:
  175. snippet. */
  176. #define LLL_STUB_UNWIND_INFO_6 \
  177. LLL_STUB_UNWIND_INFO_START \
  178. "12:\t" ".byte 0x40 + (0b-1b) # DW_CFA_advance_loc\n\t" \
  179. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  180. ".uleb128 0x10\n\t" \
  181. ".uleb128 26f-25f\n" \
  182. "25:\t" ".byte 0x80 # DW_OP_breg16\n\t" \
  183. ".sleb128 4b-0b\n" \
  184. "26:\t" ".byte 0x40 + (2b-0b) # DW_CFA_advance_loc\n\t" \
  185. LLL_STUB_UNWIND_INFO_END
  186. #define lll_futex_wait(futex, val, private) \
  187. lll_futex_timed_wait(futex, val, NULL, private)
  188. #define lll_futex_timed_wait(futex, val, timeout, private) \
  189. ({ \
  190. register const struct timespec *__to __asm__ ("r10") = timeout; \
  191. int __status; \
  192. register __typeof (val) _val __asm__ ("edx") = (val); \
  193. __asm__ __volatile__ ("syscall" \
  194. : "=a" (__status) \
  195. : "0" (SYS_futex), "D" (futex), \
  196. "S" (__lll_private_flag (FUTEX_WAIT, private)), \
  197. "d" (_val), "r" (__to) \
  198. : "memory", "cc", "r11", "cx"); \
  199. __status; \
  200. })
  201. #define lll_futex_wake(futex, nr, private) \
  202. do { \
  203. int __ignore; \
  204. register __typeof (nr) _nr __asm__ ("edx") = (nr); \
  205. __asm__ __volatile__ ("syscall" \
  206. : "=a" (__ignore) \
  207. : "0" (SYS_futex), "D" (futex), \
  208. "S" (__lll_private_flag (FUTEX_WAKE, private)), \
  209. "d" (_nr) \
  210. : "memory", "cc", "r10", "r11", "cx"); \
  211. } while (0)
  212. /* NB: in the lll_trylock macro we simply return the value in %eax
  213. after the cmpxchg instruction. In case the operation succeded this
  214. value is zero. In case the operation failed, the cmpxchg instruction
  215. has loaded the current value of the memory work which is guaranteed
  216. to be nonzero. */
  217. #if defined NOT_IN_libc || defined UP
  218. # define __lll_trylock_asm LOCK_INSTR "cmpxchgl %2, %1"
  219. #else
  220. # define __lll_trylock_asm "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
  221. "je 0f\n\t" \
  222. "lock; cmpxchgl %2, %1\n\t" \
  223. "jmp 1f\n\t" \
  224. "0:\tcmpxchgl %2, %1\n\t" \
  225. "1:"
  226. #endif
  227. #define lll_trylock(futex) \
  228. ({ int ret; \
  229. __asm__ __volatile__ (__lll_trylock_asm \
  230. : "=a" (ret), "=m" (futex) \
  231. : "r" (LLL_LOCK_INITIALIZER_LOCKED), "m" (futex), \
  232. "0" (LLL_LOCK_INITIALIZER) \
  233. : "memory"); \
  234. ret; })
  235. #define lll_robust_trylock(futex, id) \
  236. ({ int ret; \
  237. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %2, %1" \
  238. : "=a" (ret), "=m" (futex) \
  239. : "r" (id), "m" (futex), "0" (LLL_LOCK_INITIALIZER) \
  240. : "memory"); \
  241. ret; })
  242. #define lll_cond_trylock(futex) \
  243. ({ int ret; \
  244. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %2, %1" \
  245. : "=a" (ret), "=m" (futex) \
  246. : "r" (LLL_LOCK_INITIALIZER_WAITERS), \
  247. "m" (futex), "0" (LLL_LOCK_INITIALIZER) \
  248. : "memory"); \
  249. ret; })
  250. #if defined NOT_IN_libc || defined UP
  251. # define __lll_lock_asm_start LOCK_INSTR "cmpxchgl %4, %2\n\t" \
  252. "jnz 1f\n\t"
  253. #else
  254. # define __lll_lock_asm_start "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
  255. "je 0f\n\t" \
  256. "lock; cmpxchgl %4, %2\n\t" \
  257. "jnz 1f\n\t" \
  258. "jmp 24f\n" \
  259. "0:\tcmpxchgl %4, %2\n\t" \
  260. "jnz 1f\n\t"
  261. #endif
  262. #define lll_lock(futex, private) \
  263. (void) \
  264. ({ int ignore1, ignore2, ignore3; \
  265. if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
  266. __asm__ __volatile__ (__lll_lock_asm_start \
  267. ".subsection 1\n\t" \
  268. ".type _L_lock_%=, @function\n" \
  269. "_L_lock_%=:\n" \
  270. "1:\tleaq %2, %%rdi\n" \
  271. "2:\tsubq $128, %%rsp\n" \
  272. "3:\tcallq __lll_lock_wait_private\n" \
  273. "4:\taddq $128, %%rsp\n" \
  274. "5:\tjmp 24f\n" \
  275. "6:\t.size _L_lock_%=, 6b-1b\n\t" \
  276. ".previous\n" \
  277. LLL_STUB_UNWIND_INFO_5 \
  278. "24:" \
  279. : "=S" (ignore1), "=&D" (ignore2), "=m" (futex), \
  280. "=a" (ignore3) \
  281. : "0" (1), "m" (futex), "3" (0) \
  282. : "cx", "r11", "cc", "memory"); \
  283. else \
  284. __asm__ __volatile__ (__lll_lock_asm_start \
  285. ".subsection 1\n\t" \
  286. ".type _L_lock_%=, @function\n" \
  287. "_L_lock_%=:\n" \
  288. "1:\tleaq %2, %%rdi\n" \
  289. "2:\tsubq $128, %%rsp\n" \
  290. "3:\tcallq __lll_lock_wait\n" \
  291. "4:\taddq $128, %%rsp\n" \
  292. "5:\tjmp 24f\n" \
  293. "6:\t.size _L_lock_%=, 6b-1b\n\t" \
  294. ".previous\n" \
  295. LLL_STUB_UNWIND_INFO_5 \
  296. "24:" \
  297. : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
  298. "=a" (ignore3) \
  299. : "1" (1), "m" (futex), "3" (0), "0" (private) \
  300. : "cx", "r11", "cc", "memory"); \
  301. }) \
  302. #define lll_robust_lock(futex, id, private) \
  303. ({ int result, ignore1, ignore2; \
  304. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
  305. "jnz 1f\n\t" \
  306. ".subsection 1\n\t" \
  307. ".type _L_robust_lock_%=, @function\n" \
  308. "_L_robust_lock_%=:\n" \
  309. "1:\tleaq %2, %%rdi\n" \
  310. "2:\tsubq $128, %%rsp\n" \
  311. "3:\tcallq __lll_robust_lock_wait\n" \
  312. "4:\taddq $128, %%rsp\n" \
  313. "5:\tjmp 24f\n" \
  314. "6:\t.size _L_robust_lock_%=, 6b-1b\n\t" \
  315. ".previous\n" \
  316. LLL_STUB_UNWIND_INFO_5 \
  317. "24:" \
  318. : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
  319. "=a" (result) \
  320. : "1" (id), "m" (futex), "3" (0), "0" (private) \
  321. : "cx", "r11", "cc", "memory"); \
  322. result; })
  323. #define lll_cond_lock(futex, private) \
  324. (void) \
  325. ({ int ignore1, ignore2, ignore3; \
  326. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
  327. "jnz 1f\n\t" \
  328. ".subsection 1\n\t" \
  329. ".type _L_cond_lock_%=, @function\n" \
  330. "_L_cond_lock_%=:\n" \
  331. "1:\tleaq %2, %%rdi\n" \
  332. "2:\tsubq $128, %%rsp\n" \
  333. "3:\tcallq __lll_lock_wait\n" \
  334. "4:\taddq $128, %%rsp\n" \
  335. "5:\tjmp 24f\n" \
  336. "6:\t.size _L_cond_lock_%=, 6b-1b\n\t" \
  337. ".previous\n" \
  338. LLL_STUB_UNWIND_INFO_5 \
  339. "24:" \
  340. : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
  341. "=a" (ignore3) \
  342. : "1" (2), "m" (futex), "3" (0), "0" (private) \
  343. : "cx", "r11", "cc", "memory"); \
  344. })
  345. #define lll_robust_cond_lock(futex, id, private) \
  346. ({ int result, ignore1, ignore2; \
  347. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
  348. "jnz 1f\n\t" \
  349. ".subsection 1\n\t" \
  350. ".type _L_robust_cond_lock_%=, @function\n" \
  351. "_L_robust_cond_lock_%=:\n" \
  352. "1:\tleaq %2, %%rdi\n" \
  353. "2:\tsubq $128, %%rsp\n" \
  354. "3:\tcallq __lll_robust_lock_wait\n" \
  355. "4:\taddq $128, %%rsp\n" \
  356. "5:\tjmp 24f\n" \
  357. "6:\t.size _L_robust_cond_lock_%=, 6b-1b\n\t" \
  358. ".previous\n" \
  359. LLL_STUB_UNWIND_INFO_5 \
  360. "24:" \
  361. : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
  362. "=a" (result) \
  363. : "1" (id | FUTEX_WAITERS), "m" (futex), "3" (0), \
  364. "0" (private) \
  365. : "cx", "r11", "cc", "memory"); \
  366. result; })
  367. #define lll_timedlock(futex, timeout, private) \
  368. ({ int result, ignore1, ignore2, ignore3; \
  369. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %4\n\t" \
  370. "jnz 1f\n\t" \
  371. ".subsection 1\n\t" \
  372. ".type _L_timedlock_%=, @function\n" \
  373. "_L_timedlock_%=:\n" \
  374. "1:\tleaq %4, %%rdi\n" \
  375. "0:\tmovq %8, %%rdx\n" \
  376. "2:\tsubq $128, %%rsp\n" \
  377. "3:\tcallq __lll_timedlock_wait\n" \
  378. "4:\taddq $128, %%rsp\n" \
  379. "5:\tjmp 24f\n" \
  380. "6:\t.size _L_timedlock_%=, 6b-1b\n\t" \
  381. ".previous\n" \
  382. LLL_STUB_UNWIND_INFO_6 \
  383. "24:" \
  384. : "=a" (result), "=D" (ignore1), "=S" (ignore2), \
  385. "=&d" (ignore3), "=m" (futex) \
  386. : "0" (0), "1" (1), "m" (futex), "m" (timeout), \
  387. "2" (private) \
  388. : "memory", "cx", "cc", "r10", "r11"); \
  389. result; })
  390. #define lll_robust_timedlock(futex, timeout, id, private) \
  391. ({ int result, ignore1, ignore2, ignore3; \
  392. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %4\n\t" \
  393. "jnz 1f\n\t" \
  394. ".subsection 1\n\t" \
  395. ".type _L_robust_timedlock_%=, @function\n" \
  396. "_L_robust_timedlock_%=:\n" \
  397. "1:\tleaq %4, %%rdi\n" \
  398. "0:\tmovq %8, %%rdx\n" \
  399. "2:\tsubq $128, %%rsp\n" \
  400. "3:\tcallq __lll_robust_timedlock_wait\n" \
  401. "4:\taddq $128, %%rsp\n" \
  402. "5:\tjmp 24f\n" \
  403. "6:\t.size _L_robust_timedlock_%=, 6b-1b\n\t" \
  404. ".previous\n" \
  405. LLL_STUB_UNWIND_INFO_6 \
  406. "24:" \
  407. : "=a" (result), "=D" (ignore1), "=S" (ignore2), \
  408. "=&d" (ignore3), "=m" (futex) \
  409. : "0" (0), "1" (id), "m" (futex), "m" (timeout), \
  410. "2" (private) \
  411. : "memory", "cx", "cc", "r10", "r11"); \
  412. result; })
  413. #if defined NOT_IN_libc || defined UP
  414. # define __lll_unlock_asm_start LOCK_INSTR "decl %0\n\t" \
  415. "jne 1f\n\t"
  416. #else
  417. # define __lll_unlock_asm_start "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
  418. "je 0f\n\t" \
  419. "lock; decl %0\n\t" \
  420. "jne 1f\n\t" \
  421. "jmp 24f\n\t" \
  422. "0:\tdecl %0\n\t" \
  423. "jne 1f\n\t"
  424. #endif
  425. #define lll_unlock(futex, private) \
  426. (void) \
  427. ({ int ignore; \
  428. if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
  429. __asm__ __volatile__ (__lll_unlock_asm_start \
  430. ".subsection 1\n\t" \
  431. ".type _L_unlock_%=, @function\n" \
  432. "_L_unlock_%=:\n" \
  433. "1:\tleaq %0, %%rdi\n" \
  434. "2:\tsubq $128, %%rsp\n" \
  435. "3:\tcallq __lll_unlock_wake_private\n" \
  436. "4:\taddq $128, %%rsp\n" \
  437. "5:\tjmp 24f\n" \
  438. "6:\t.size _L_unlock_%=, 6b-1b\n\t" \
  439. ".previous\n" \
  440. LLL_STUB_UNWIND_INFO_5 \
  441. "24:" \
  442. : "=m" (futex), "=&D" (ignore) \
  443. : "m" (futex) \
  444. : "ax", "cx", "r11", "cc", "memory"); \
  445. else \
  446. __asm__ __volatile__ (__lll_unlock_asm_start \
  447. ".subsection 1\n\t" \
  448. ".type _L_unlock_%=, @function\n" \
  449. "_L_unlock_%=:\n" \
  450. "1:\tleaq %0, %%rdi\n" \
  451. "2:\tsubq $128, %%rsp\n" \
  452. "3:\tcallq __lll_unlock_wake\n" \
  453. "4:\taddq $128, %%rsp\n" \
  454. "5:\tjmp 24f\n" \
  455. "6:\t.size _L_unlock_%=, 6b-1b\n\t" \
  456. ".previous\n" \
  457. LLL_STUB_UNWIND_INFO_5 \
  458. "24:" \
  459. : "=m" (futex), "=&D" (ignore) \
  460. : "m" (futex), "S" (private) \
  461. : "ax", "cx", "r11", "cc", "memory"); \
  462. })
  463. #define lll_robust_unlock(futex, private) \
  464. do \
  465. { \
  466. int ignore; \
  467. __asm__ __volatile__ (LOCK_INSTR "andl %2, %0\n\t" \
  468. "jne 1f\n\t" \
  469. ".subsection 1\n\t" \
  470. ".type _L_robust_unlock_%=, @function\n" \
  471. "_L_robust_unlock_%=:\n" \
  472. "1:\tleaq %0, %%rdi\n" \
  473. "2:\tsubq $128, %%rsp\n" \
  474. "3:\tcallq __lll_unlock_wake\n" \
  475. "4:\taddq $128, %%rsp\n" \
  476. "5:\tjmp 24f\n" \
  477. "6:\t.size _L_robust_unlock_%=, 6b-1b\n\t" \
  478. ".previous\n" \
  479. LLL_STUB_UNWIND_INFO_5 \
  480. "24:" \
  481. : "=m" (futex), "=&D" (ignore) \
  482. : "i" (FUTEX_WAITERS), "m" (futex), \
  483. "S" (private) \
  484. : "ax", "cx", "r11", "cc", "memory"); \
  485. } \
  486. while (0)
  487. #define lll_robust_dead(futex, private) \
  488. do \
  489. { \
  490. int ignore; \
  491. __asm__ __volatile__ (LOCK_INSTR "orl %3, (%2)\n\t" \
  492. "syscall" \
  493. : "=m" (futex), "=a" (ignore) \
  494. : "D" (&(futex)), "i" (FUTEX_OWNER_DIED), \
  495. "S" (__lll_private_flag (FUTEX_WAKE, private)), \
  496. "1" (__NR_futex), "d" (1) \
  497. : "cx", "r11", "cc", "memory"); \
  498. } \
  499. while (0)
  500. /* Returns non-zero if error happened, zero if success. */
  501. #define lll_futex_requeue(ftx, nr_wake, nr_move, mutex, val, private) \
  502. ({ int __res; \
  503. register int __nr_move __asm__ ("r10") = nr_move; \
  504. register void *__mutex __asm__ ("r8") = mutex; \
  505. register int __val __asm__ ("r9") = val; \
  506. __asm__ __volatile__ ("syscall" \
  507. : "=a" (__res) \
  508. : "0" (__NR_futex), "D" ((void *) ftx), \
  509. "S" (__lll_private_flag (FUTEX_CMP_REQUEUE, \
  510. private)), "d" (nr_wake), \
  511. "r" (__nr_move), "r" (__mutex), "r" (__val) \
  512. : "cx", "r11", "cc", "memory"); \
  513. __res < 0; })
  514. #define lll_islocked(futex) \
  515. (futex != LLL_LOCK_INITIALIZER)
  516. /* The kernel notifies a process with uses CLONE_CLEARTID via futex
  517. wakeup when the clone terminates. The memory location contains the
  518. thread ID while the clone is running and is reset to zero
  519. afterwards.
  520. The macro parameter must not have any side effect. */
  521. #define lll_wait_tid(tid) \
  522. do { \
  523. int __ignore; \
  524. register __typeof (tid) _tid __asm__ ("edx") = (tid); \
  525. if (_tid != 0) \
  526. __asm__ __volatile__ ("xorq %%r10, %%r10\n\t" \
  527. "1:\tmovq %2, %%rax\n\t" \
  528. "syscall\n\t" \
  529. "cmpl $0, (%%rdi)\n\t" \
  530. "jne 1b" \
  531. : "=&a" (__ignore) \
  532. : "S" (FUTEX_WAIT), "i" (SYS_futex), "D" (&tid), \
  533. "d" (_tid) \
  534. : "memory", "cc", "r10", "r11", "cx"); \
  535. } while (0)
  536. extern int __lll_timedwait_tid (int *tid, const struct timespec *abstime)
  537. attribute_hidden;
  538. #define lll_timedwait_tid(tid, abstime) \
  539. ({ \
  540. int __result = 0; \
  541. if (tid != 0) \
  542. { \
  543. if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) \
  544. __result = EINVAL; \
  545. else \
  546. __result = __lll_timedwait_tid (&tid, abstime); \
  547. } \
  548. __result; })
  549. #endif /* !__ASSEMBLER__ */
  550. #endif /* lowlevellock.h */