lowlevellock.h 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585
  1. /* Copyright (C) 2002-2004, 2006-2008, 2009 Free Software Foundation, Inc.
  2. This file is part of the GNU C Library.
  3. Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
  4. The GNU C Library is free software; you can redistribute it and/or
  5. modify it under the terms of the GNU Lesser General Public
  6. License as published by the Free Software Foundation; either
  7. version 2.1 of the License, or (at your option) any later version.
  8. The GNU C Library is distributed in the hope that it will be useful,
  9. but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  11. Lesser General Public License for more details.
  12. You should have received a copy of the GNU Lesser General Public
  13. License along with the GNU C Library; if not, write to the Free
  14. Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
  15. 02111-1307 USA. */
  16. #ifndef _LOWLEVELLOCK_H
  17. #define _LOWLEVELLOCK_H 1
  18. #ifndef __ASSEMBLER__
  19. # include <time.h>
  20. # include <sys/param.h>
  21. # include <bits/pthreadtypes.h>
  22. # include <bits/kernel-features.h>
  23. # include <tcb-offsets.h>
  24. # include <atomic.h>
  25. # ifndef LOCK_INSTR
  26. # ifdef UP
  27. # define LOCK_INSTR /* nothing */
  28. # else
  29. # define LOCK_INSTR "lock;"
  30. # endif
  31. # endif
  32. #else
  33. # ifndef LOCK
  34. # ifdef UP
  35. # define LOCK
  36. # else
  37. # define LOCK lock
  38. # endif
  39. # endif
  40. #endif
  41. #define FUTEX_WAIT 0
  42. #define FUTEX_WAKE 1
  43. #define FUTEX_CMP_REQUEUE 4
  44. #define FUTEX_WAKE_OP 5
  45. #define FUTEX_LOCK_PI 6
  46. #define FUTEX_UNLOCK_PI 7
  47. #define FUTEX_TRYLOCK_PI 8
  48. #define FUTEX_WAIT_BITSET 9
  49. #define FUTEX_WAKE_BITSET 10
  50. #define FUTEX_WAIT_REQUEUE_PI 11
  51. #define FUTEX_CMP_REQUEUE_PI 12
  52. #define FUTEX_PRIVATE_FLAG 128
  53. #define FUTEX_CLOCK_REALTIME 256
  54. #define FUTEX_BITSET_MATCH_ANY 0xffffffff
  55. #define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
  56. /* Values for 'private' parameter of locking macros. Yes, the
  57. definition seems to be backwards. But it is not. The bit will be
  58. reversed before passing to the system call. */
  59. #define LLL_PRIVATE 0
  60. #define LLL_SHARED FUTEX_PRIVATE_FLAG
  61. #if !defined NOT_IN_libc || defined IS_IN_rtld
  62. /* In libc.so or ld.so all futexes are private. */
  63. # ifdef __ASSUME_PRIVATE_FUTEX
  64. # define __lll_private_flag(fl, private) \
  65. ((fl) | FUTEX_PRIVATE_FLAG)
  66. # else
  67. # define __lll_private_flag(fl, private) \
  68. ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
  69. # endif
  70. #else
  71. # ifdef __ASSUME_PRIVATE_FUTEX
  72. # define __lll_private_flag(fl, private) \
  73. (((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
  74. # else
  75. # define __lll_private_flag(fl, private) \
  76. (__builtin_constant_p (private) \
  77. ? ((private) == 0 \
  78. ? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
  79. : (fl)) \
  80. : ({ unsigned int __fl = ((private) ^ FUTEX_PRIVATE_FLAG); \
  81. __asm__ ("andl %%gs:%P1, %0" : "+r" (__fl) \
  82. : "i" (offsetof (struct pthread, header.private_futex))); \
  83. __fl | (fl); }))
  84. # endif
  85. #endif
  86. #ifndef __ASSEMBLER__
  87. /* Initializer for compatibility lock. */
  88. #define LLL_LOCK_INITIALIZER (0)
  89. #define LLL_LOCK_INITIALIZER_LOCKED (1)
  90. #define LLL_LOCK_INITIALIZER_WAITERS (2)
  91. #ifdef __PIC__
  92. # define LLL_EBX_LOAD "xchgl %2, %%ebx\n"
  93. # define LLL_EBX_REG "D"
  94. #else
  95. # define LLL_EBX_LOAD
  96. # define LLL_EBX_REG "b"
  97. #endif
  98. #ifdef I386_USE_SYSENTER
  99. # ifdef SHARED
  100. # define LLL_ENTER_KERNEL "call *%%gs:%P6\n\t"
  101. # else
  102. # define LLL_ENTER_KERNEL "call *_dl_sysinfo\n\t"
  103. # endif
  104. #else
  105. # define LLL_ENTER_KERNEL "int $0x80\n\t"
  106. #endif
  107. /* Delay in spinlock loop. */
  108. #define BUSY_WAIT_NOP __asm__ ("rep; nop")
  109. #define LLL_STUB_UNWIND_INFO_START \
  110. ".section .eh_frame,\"a\",@progbits\n" \
  111. "5:\t" ".long 7f-6f # Length of Common Information Entry\n" \
  112. "6:\t" ".long 0x0 # CIE Identifier Tag\n\t" \
  113. ".byte 0x1 # CIE Version\n\t" \
  114. ".ascii \"zR\\0\" # CIE Augmentation\n\t" \
  115. ".uleb128 0x1 # CIE Code Alignment Factor\n\t" \
  116. ".sleb128 -4 # CIE Data Alignment Factor\n\t" \
  117. ".byte 0x8 # CIE RA Column\n\t" \
  118. ".uleb128 0x1 # Augmentation size\n\t" \
  119. ".byte 0x1b # FDE Encoding (pcrel sdata4)\n\t" \
  120. ".byte 0xc # DW_CFA_def_cfa\n\t" \
  121. ".uleb128 0x4\n\t" \
  122. ".uleb128 0x0\n\t" \
  123. ".align 4\n" \
  124. "7:\t" ".long 17f-8f # FDE Length\n" \
  125. "8:\t" ".long 8b-5b # FDE CIE offset\n\t" \
  126. ".long 1b-. # FDE initial location\n\t" \
  127. ".long 4b-1b # FDE address range\n\t" \
  128. ".uleb128 0x0 # Augmentation size\n\t" \
  129. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  130. ".uleb128 0x8\n\t" \
  131. ".uleb128 10f-9f\n" \
  132. "9:\t" ".byte 0x78 # DW_OP_breg8\n\t" \
  133. ".sleb128 3b-1b\n"
  134. #define LLL_STUB_UNWIND_INFO_END \
  135. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  136. ".uleb128 0x8\n\t" \
  137. ".uleb128 12f-11f\n" \
  138. "11:\t" ".byte 0x78 # DW_OP_breg8\n\t" \
  139. ".sleb128 3b-2b\n" \
  140. "12:\t" ".byte 0x40 + (3b-2b-1) # DW_CFA_advance_loc\n\t" \
  141. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  142. ".uleb128 0x8\n\t" \
  143. ".uleb128 16f-13f\n" \
  144. "13:\t" ".byte 0x78 # DW_OP_breg8\n\t" \
  145. ".sleb128 15f-14f\n\t" \
  146. ".byte 0x0d # DW_OP_const4s\n" \
  147. "14:\t" ".4byte 3b-.\n\t" \
  148. ".byte 0x1c # DW_OP_minus\n\t" \
  149. ".byte 0x0d # DW_OP_const4s\n" \
  150. "15:\t" ".4byte 18f-.\n\t" \
  151. ".byte 0x22 # DW_OP_plus\n" \
  152. "16:\t" ".align 4\n" \
  153. "17:\t" ".previous\n"
  154. /* Unwind info for
  155. 1: lea ..., ...
  156. 2: call ...
  157. 3: jmp 18f
  158. 4:
  159. snippet. */
  160. #define LLL_STUB_UNWIND_INFO_3 \
  161. LLL_STUB_UNWIND_INFO_START \
  162. "10:\t" ".byte 0x40 + (2b-1b) # DW_CFA_advance_loc\n\t" \
  163. LLL_STUB_UNWIND_INFO_END
  164. /* Unwind info for
  165. 1: lea ..., ...
  166. 0: movl ..., ...
  167. 2: call ...
  168. 3: jmp 18f
  169. 4:
  170. snippet. */
  171. #define LLL_STUB_UNWIND_INFO_4 \
  172. LLL_STUB_UNWIND_INFO_START \
  173. "10:\t" ".byte 0x40 + (0b-1b) # DW_CFA_advance_loc\n\t" \
  174. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  175. ".uleb128 0x8\n\t" \
  176. ".uleb128 20f-19f\n" \
  177. "19:\t" ".byte 0x78 # DW_OP_breg8\n\t" \
  178. ".sleb128 3b-0b\n" \
  179. "20:\t" ".byte 0x40 + (2b-0b) # DW_CFA_advance_loc\n\t" \
  180. LLL_STUB_UNWIND_INFO_END
  181. #define lll_futex_wait(futex, val, private) \
  182. lll_futex_timed_wait (futex, val, NULL, private)
  183. #define lll_futex_timed_wait(futex, val, timeout, private) \
  184. ({ \
  185. int __status; \
  186. register __typeof (val) _val __asm__ ("edx") = (val); \
  187. __asm__ __volatile__ (LLL_EBX_LOAD \
  188. LLL_ENTER_KERNEL \
  189. LLL_EBX_LOAD \
  190. : "=a" (__status) \
  191. : "0" (SYS_futex), LLL_EBX_REG (futex), "S" (timeout), \
  192. "c" (__lll_private_flag (FUTEX_WAIT, private)), \
  193. "d" (_val), "i" (offsetof (tcbhead_t, sysinfo)) \
  194. : "memory"); \
  195. __status; \
  196. })
  197. #define lll_futex_wake(futex, nr, private) \
  198. do { \
  199. int __ignore; \
  200. register __typeof (nr) _nr __asm__ ("edx") = (nr); \
  201. __asm__ __volatile__ (LLL_EBX_LOAD \
  202. LLL_ENTER_KERNEL \
  203. LLL_EBX_LOAD \
  204. : "=a" (__ignore) \
  205. : "0" (SYS_futex), LLL_EBX_REG (futex), \
  206. "c" (__lll_private_flag (FUTEX_WAKE, private)), \
  207. "d" (_nr), \
  208. "i" (0) /* phony, to align next arg's number */, \
  209. "i" (offsetof (tcbhead_t, sysinfo))); \
  210. } while (0)
  211. /* NB: in the lll_trylock macro we simply return the value in %eax
  212. after the cmpxchg instruction. In case the operation succeded this
  213. value is zero. In case the operation failed, the cmpxchg instruction
  214. has loaded the current value of the memory work which is guaranteed
  215. to be nonzero. */
  216. #if defined NOT_IN_libc || defined UP
  217. # define __lll_trylock_asm LOCK_INSTR "cmpxchgl %2, %1"
  218. #else
  219. # define __lll_trylock_asm "cmpl $0, %%gs:%P5\n\t" \
  220. "je 0f\n\t" \
  221. "lock\n" \
  222. "0:\tcmpxchgl %2, %1"
  223. #endif
  224. #define lll_trylock(futex) \
  225. ({ int ret; \
  226. __asm__ __volatile__ (__lll_trylock_asm \
  227. : "=a" (ret), "=m" (futex) \
  228. : "r" (LLL_LOCK_INITIALIZER_LOCKED), "m" (futex), \
  229. "0" (LLL_LOCK_INITIALIZER), \
  230. "i" (MULTIPLE_THREADS_OFFSET) \
  231. : "memory"); \
  232. ret; })
  233. #define lll_robust_trylock(futex, id) \
  234. ({ int ret; \
  235. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %2, %1" \
  236. : "=a" (ret), "=m" (futex) \
  237. : "r" (id), "m" (futex), \
  238. "0" (LLL_LOCK_INITIALIZER) \
  239. : "memory"); \
  240. ret; })
  241. #define lll_cond_trylock(futex) \
  242. ({ int ret; \
  243. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %2, %1" \
  244. : "=a" (ret), "=m" (futex) \
  245. : "r" (LLL_LOCK_INITIALIZER_WAITERS), \
  246. "m" (futex), "0" (LLL_LOCK_INITIALIZER) \
  247. : "memory"); \
  248. ret; })
  249. #if defined NOT_IN_libc || defined UP
  250. # define __lll_lock_asm_start LOCK_INSTR "cmpxchgl %1, %2\n\t"
  251. #else
  252. # define __lll_lock_asm_start "cmpl $0, %%gs:%P6\n\t" \
  253. "je 0f\n\t" \
  254. "lock\n" \
  255. "0:\tcmpxchgl %1, %2\n\t"
  256. #endif
  257. #define lll_lock(futex, private) \
  258. (void) \
  259. ({ int ignore1, ignore2; \
  260. if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
  261. __asm__ __volatile__ (__lll_lock_asm_start \
  262. "jnz _L_lock_%=\n\t" \
  263. ".subsection 1\n\t" \
  264. ".type _L_lock_%=,@function\n" \
  265. "_L_lock_%=:\n" \
  266. "1:\tleal %2, %%ecx\n" \
  267. "2:\tcall __lll_lock_wait_private\n" \
  268. "3:\tjmp 18f\n" \
  269. "4:\t.size _L_lock_%=, 4b-1b\n\t" \
  270. ".previous\n" \
  271. LLL_STUB_UNWIND_INFO_3 \
  272. "18:" \
  273. : "=a" (ignore1), "=c" (ignore2), "=m" (futex) \
  274. : "0" (0), "1" (1), "m" (futex), \
  275. "i" (MULTIPLE_THREADS_OFFSET) \
  276. : "memory"); \
  277. else \
  278. { \
  279. int ignore3; \
  280. __asm__ __volatile__ (__lll_lock_asm_start \
  281. "jnz _L_lock_%=\n\t" \
  282. ".subsection 1\n\t" \
  283. ".type _L_lock_%=,@function\n" \
  284. "_L_lock_%=:\n" \
  285. "1:\tleal %2, %%edx\n" \
  286. "0:\tmovl %8, %%ecx\n" \
  287. "2:\tcall __lll_lock_wait\n" \
  288. "3:\tjmp 18f\n" \
  289. "4:\t.size _L_lock_%=, 4b-1b\n\t" \
  290. ".previous\n" \
  291. LLL_STUB_UNWIND_INFO_4 \
  292. "18:" \
  293. : "=a" (ignore1), "=c" (ignore2), \
  294. "=m" (futex), "=&d" (ignore3) \
  295. : "1" (1), "m" (futex), \
  296. "i" (MULTIPLE_THREADS_OFFSET), "0" (0), \
  297. "g" ((int) (private)) \
  298. : "memory"); \
  299. } \
  300. })
  301. #define lll_robust_lock(futex, id, private) \
  302. ({ int __ret, ignore1, ignore2; \
  303. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %2\n\t" \
  304. "jnz _L_robust_lock_%=\n\t" \
  305. ".subsection 1\n\t" \
  306. ".type _L_robust_lock_%=,@function\n" \
  307. "_L_robust_lock_%=:\n" \
  308. "1:\tleal %2, %%edx\n" \
  309. "0:\tmovl %7, %%ecx\n" \
  310. "2:\tcall __lll_robust_lock_wait\n" \
  311. "3:\tjmp 18f\n" \
  312. "4:\t.size _L_robust_lock_%=, 4b-1b\n\t" \
  313. ".previous\n" \
  314. LLL_STUB_UNWIND_INFO_4 \
  315. "18:" \
  316. : "=a" (__ret), "=c" (ignore1), "=m" (futex), \
  317. "=&d" (ignore2) \
  318. : "0" (0), "1" (id), "m" (futex), "g" ((int) (private))\
  319. : "memory"); \
  320. __ret; })
  321. /* Special version of lll_lock which causes the unlock function to
  322. always wakeup waiters. */
  323. #define lll_cond_lock(futex, private) \
  324. (void) \
  325. ({ int ignore1, ignore2, ignore3; \
  326. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %2\n\t" \
  327. "jnz _L_cond_lock_%=\n\t" \
  328. ".subsection 1\n\t" \
  329. ".type _L_cond_lock_%=,@function\n" \
  330. "_L_cond_lock_%=:\n" \
  331. "1:\tleal %2, %%edx\n" \
  332. "0:\tmovl %7, %%ecx\n" \
  333. "2:\tcall __lll_lock_wait\n" \
  334. "3:\tjmp 18f\n" \
  335. "4:\t.size _L_cond_lock_%=, 4b-1b\n\t" \
  336. ".previous\n" \
  337. LLL_STUB_UNWIND_INFO_4 \
  338. "18:" \
  339. : "=a" (ignore1), "=c" (ignore2), "=m" (futex), \
  340. "=&d" (ignore3) \
  341. : "0" (0), "1" (2), "m" (futex), "g" ((int) (private))\
  342. : "memory"); \
  343. })
  344. #define lll_robust_cond_lock(futex, id, private) \
  345. ({ int __ret, ignore1, ignore2; \
  346. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %2\n\t" \
  347. "jnz _L_robust_cond_lock_%=\n\t" \
  348. ".subsection 1\n\t" \
  349. ".type _L_robust_cond_lock_%=,@function\n" \
  350. "_L_robust_cond_lock_%=:\n" \
  351. "1:\tleal %2, %%edx\n" \
  352. "0:\tmovl %7, %%ecx\n" \
  353. "2:\tcall __lll_robust_lock_wait\n" \
  354. "3:\tjmp 18f\n" \
  355. "4:\t.size _L_robust_cond_lock_%=, 4b-1b\n\t" \
  356. ".previous\n" \
  357. LLL_STUB_UNWIND_INFO_4 \
  358. "18:" \
  359. : "=a" (__ret), "=c" (ignore1), "=m" (futex), \
  360. "=&d" (ignore2) \
  361. : "0" (0), "1" (id | FUTEX_WAITERS), "m" (futex), \
  362. "g" ((int) (private)) \
  363. : "memory"); \
  364. __ret; })
  365. #define lll_timedlock(futex, timeout, private) \
  366. ({ int __ret, ignore1, ignore2, ignore3; \
  367. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %3\n\t" \
  368. "jnz _L_timedlock_%=\n\t" \
  369. ".subsection 1\n\t" \
  370. ".type _L_timedlock_%=,@function\n" \
  371. "_L_timedlock_%=:\n" \
  372. "1:\tleal %3, %%ecx\n" \
  373. "0:\tmovl %8, %%edx\n" \
  374. "2:\tcall __lll_timedlock_wait\n" \
  375. "3:\tjmp 18f\n" \
  376. "4:\t.size _L_timedlock_%=, 4b-1b\n\t" \
  377. ".previous\n" \
  378. LLL_STUB_UNWIND_INFO_4 \
  379. "18:" \
  380. : "=a" (__ret), "=c" (ignore1), "=&d" (ignore2), \
  381. "=m" (futex), "=S" (ignore3) \
  382. : "0" (0), "1" (1), "m" (futex), "m" (timeout), \
  383. "4" ((int) (private)) \
  384. : "memory"); \
  385. __ret; })
  386. #define lll_robust_timedlock(futex, timeout, id, private) \
  387. ({ int __ret, ignore1, ignore2, ignore3; \
  388. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %3\n\t" \
  389. "jnz _L_robust_timedlock_%=\n\t" \
  390. ".subsection 1\n\t" \
  391. ".type _L_robust_timedlock_%=,@function\n" \
  392. "_L_robust_timedlock_%=:\n" \
  393. "1:\tleal %3, %%ecx\n" \
  394. "0:\tmovl %8, %%edx\n" \
  395. "2:\tcall __lll_robust_timedlock_wait\n" \
  396. "3:\tjmp 18f\n" \
  397. "4:\t.size _L_robust_timedlock_%=, 4b-1b\n\t" \
  398. ".previous\n" \
  399. LLL_STUB_UNWIND_INFO_4 \
  400. "18:" \
  401. : "=a" (__ret), "=c" (ignore1), "=&d" (ignore2), \
  402. "=m" (futex), "=S" (ignore3) \
  403. : "0" (0), "1" (id), "m" (futex), "m" (timeout), \
  404. "4" ((int) (private)) \
  405. : "memory"); \
  406. __ret; })
  407. #if defined NOT_IN_libc || defined UP
  408. # define __lll_unlock_asm LOCK_INSTR "subl $1, %0\n\t"
  409. #else
  410. # define __lll_unlock_asm "cmpl $0, %%gs:%P3\n\t" \
  411. "je 0f\n\t" \
  412. "lock\n" \
  413. "0:\tsubl $1,%0\n\t"
  414. #endif
  415. #define lll_unlock(futex, private) \
  416. (void) \
  417. ({ int ignore; \
  418. if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
  419. __asm__ __volatile__ (__lll_unlock_asm \
  420. "jne _L_unlock_%=\n\t" \
  421. ".subsection 1\n\t" \
  422. ".type _L_unlock_%=,@function\n" \
  423. "_L_unlock_%=:\n" \
  424. "1:\tleal %0, %%eax\n" \
  425. "2:\tcall __lll_unlock_wake_private\n" \
  426. "3:\tjmp 18f\n" \
  427. "4:\t.size _L_unlock_%=, 4b-1b\n\t" \
  428. ".previous\n" \
  429. LLL_STUB_UNWIND_INFO_3 \
  430. "18:" \
  431. : "=m" (futex), "=&a" (ignore) \
  432. : "m" (futex), "i" (MULTIPLE_THREADS_OFFSET) \
  433. : "memory"); \
  434. else \
  435. { \
  436. int ignore2; \
  437. __asm__ __volatile__ (__lll_unlock_asm \
  438. "jne _L_unlock_%=\n\t" \
  439. ".subsection 1\n\t" \
  440. ".type _L_unlock_%=,@function\n" \
  441. "_L_unlock_%=:\n" \
  442. "1:\tleal %0, %%eax\n" \
  443. "0:\tmovl %5, %%ecx\n" \
  444. "2:\tcall __lll_unlock_wake\n" \
  445. "3:\tjmp 18f\n" \
  446. "4:\t.size _L_unlock_%=, 4b-1b\n\t" \
  447. ".previous\n" \
  448. LLL_STUB_UNWIND_INFO_4 \
  449. "18:" \
  450. : "=m" (futex), "=&a" (ignore), "=&c" (ignore2) \
  451. : "i" (MULTIPLE_THREADS_OFFSET), "m" (futex), \
  452. "g" ((int) (private)) \
  453. : "memory"); \
  454. } \
  455. })
  456. #define lll_robust_unlock(futex, private) \
  457. (void) \
  458. ({ int ignore, ignore2; \
  459. __asm__ __volatile__ (LOCK_INSTR "andl %3, %0\n\t" \
  460. "jne _L_robust_unlock_%=\n\t" \
  461. ".subsection 1\n\t" \
  462. ".type _L_robust_unlock_%=,@function\n" \
  463. "_L_robust_unlock_%=:\n\t" \
  464. "1:\tleal %0, %%eax\n" \
  465. "0:\tmovl %5, %%ecx\n" \
  466. "2:\tcall __lll_unlock_wake\n" \
  467. "3:\tjmp 18f\n" \
  468. "4:\t.size _L_robust_unlock_%=, 4b-1b\n\t" \
  469. ".previous\n" \
  470. LLL_STUB_UNWIND_INFO_4 \
  471. "18:" \
  472. : "=m" (futex), "=&a" (ignore), "=&c" (ignore2) \
  473. : "i" (FUTEX_WAITERS), "m" (futex), \
  474. "g" ((int) (private)) \
  475. : "memory"); \
  476. })
  477. #define lll_robust_dead(futex, private) \
  478. (void) \
  479. ({ int __ignore; \
  480. register int _nr __asm__ ("edx") = 1; \
  481. __asm__ __volatile__ (LOCK_INSTR "orl %5, (%2)\n\t" \
  482. LLL_EBX_LOAD \
  483. LLL_ENTER_KERNEL \
  484. LLL_EBX_LOAD \
  485. : "=a" (__ignore) \
  486. : "0" (SYS_futex), LLL_EBX_REG (&(futex)), \
  487. "c" (__lll_private_flag (FUTEX_WAKE, private)), \
  488. "d" (_nr), "i" (FUTEX_OWNER_DIED), \
  489. "i" (offsetof (tcbhead_t, sysinfo))); \
  490. })
  491. #define lll_islocked(futex) \
  492. (futex != LLL_LOCK_INITIALIZER)
  493. /* The kernel notifies a process with uses CLONE_CLEARTID via futex
  494. wakeup when the clone terminates. The memory location contains the
  495. thread ID while the clone is running and is reset to zero
  496. afterwards.
  497. The macro parameter must not have any side effect. */
  498. #define lll_wait_tid(tid) \
  499. do { \
  500. int __ignore; \
  501. register __typeof (tid) _tid __asm__ ("edx") = (tid); \
  502. if (_tid != 0) \
  503. __asm__ __volatile__ (LLL_EBX_LOAD \
  504. "1:\tmovl %1, %%eax\n\t" \
  505. LLL_ENTER_KERNEL \
  506. "cmpl $0, (%%ebx)\n\t" \
  507. "jne 1b\n\t" \
  508. LLL_EBX_LOAD \
  509. : "=&a" (__ignore) \
  510. : "i" (SYS_futex), LLL_EBX_REG (&tid), "S" (0), \
  511. "c" (FUTEX_WAIT), "d" (_tid), \
  512. "i" (offsetof (tcbhead_t, sysinfo)) \
  513. : "memory"); \
  514. } while (0)
  515. extern int __lll_timedwait_tid (int *tid, const struct timespec *abstime)
  516. __attribute__ ((regparm (2))) attribute_hidden;
  517. #define lll_timedwait_tid(tid, abstime) \
  518. ({ \
  519. int __ret = 0; \
  520. if (tid != 0) \
  521. { \
  522. if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) \
  523. __ret = EINVAL; \
  524. else \
  525. __ret = __lll_timedwait_tid (&tid, abstime); \
  526. } \
  527. __ret; })
  528. #endif /* !__ASSEMBLER__ */
  529. #endif /* lowlevellock.h */