lowlevellock.h 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584
  1. /* Copyright (C) 2002-2004, 2006-2008, 2009 Free Software Foundation, Inc.
  2. This file is part of the GNU C Library.
  3. Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
  4. The GNU C Library is free software; you can redistribute it and/or
  5. modify it under the terms of the GNU Lesser General Public
  6. License as published by the Free Software Foundation; either
  7. version 2.1 of the License, or (at your option) any later version.
  8. The GNU C Library is distributed in the hope that it will be useful,
  9. but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  11. Lesser General Public License for more details.
  12. You should have received a copy of the GNU Lesser General Public
  13. License along with the GNU C Library; if not, see
  14. <http://www.gnu.org/licenses/>. */
  15. #ifndef _LOWLEVELLOCK_H
  16. #define _LOWLEVELLOCK_H 1
  17. #ifndef __ASSEMBLER__
  18. # include <time.h>
  19. # include <sys/param.h>
  20. # include <bits/pthreadtypes.h>
  21. # include <bits/kernel-features.h>
  22. # include <tcb-offsets.h>
  23. # include <atomic.h>
  24. # ifndef LOCK_INSTR
  25. # ifdef UP
  26. # define LOCK_INSTR /* nothing */
  27. # else
  28. # define LOCK_INSTR "lock;"
  29. # endif
  30. # endif
  31. #else
  32. # ifndef LOCK
  33. # ifdef UP
  34. # define LOCK
  35. # else
  36. # define LOCK lock
  37. # endif
  38. # endif
  39. #endif
  40. #define FUTEX_WAIT 0
  41. #define FUTEX_WAKE 1
  42. #define FUTEX_CMP_REQUEUE 4
  43. #define FUTEX_WAKE_OP 5
  44. #define FUTEX_LOCK_PI 6
  45. #define FUTEX_UNLOCK_PI 7
  46. #define FUTEX_TRYLOCK_PI 8
  47. #define FUTEX_WAIT_BITSET 9
  48. #define FUTEX_WAKE_BITSET 10
  49. #define FUTEX_WAIT_REQUEUE_PI 11
  50. #define FUTEX_CMP_REQUEUE_PI 12
  51. #define FUTEX_PRIVATE_FLAG 128
  52. #define FUTEX_CLOCK_REALTIME 256
  53. #define FUTEX_BITSET_MATCH_ANY 0xffffffff
  54. #define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
  55. /* Values for 'private' parameter of locking macros. Yes, the
  56. definition seems to be backwards. But it is not. The bit will be
  57. reversed before passing to the system call. */
  58. #define LLL_PRIVATE 0
  59. #define LLL_SHARED FUTEX_PRIVATE_FLAG
  60. #if !defined NOT_IN_libc || defined IS_IN_rtld
  61. /* In libc.so or ld.so all futexes are private. */
  62. # ifdef __ASSUME_PRIVATE_FUTEX
  63. # define __lll_private_flag(fl, private) \
  64. ((fl) | FUTEX_PRIVATE_FLAG)
  65. # else
  66. # define __lll_private_flag(fl, private) \
  67. ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
  68. # endif
  69. #else
  70. # ifdef __ASSUME_PRIVATE_FUTEX
  71. # define __lll_private_flag(fl, private) \
  72. (((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
  73. # else
  74. # define __lll_private_flag(fl, private) \
  75. (__builtin_constant_p (private) \
  76. ? ((private) == 0 \
  77. ? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
  78. : (fl)) \
  79. : ({ unsigned int __fl = ((private) ^ FUTEX_PRIVATE_FLAG); \
  80. __asm__ ("andl %%gs:%P1, %0" : "+r" (__fl) \
  81. : "i" (offsetof (struct pthread, header.private_futex))); \
  82. __fl | (fl); }))
  83. # endif
  84. #endif
  85. #ifndef __ASSEMBLER__
  86. /* Initializer for compatibility lock. */
  87. #define LLL_LOCK_INITIALIZER (0)
  88. #define LLL_LOCK_INITIALIZER_LOCKED (1)
  89. #define LLL_LOCK_INITIALIZER_WAITERS (2)
  90. #ifdef __PIC__
  91. # define LLL_EBX_LOAD "xchgl %2, %%ebx\n"
  92. # define LLL_EBX_REG "D"
  93. #else
  94. # define LLL_EBX_LOAD
  95. # define LLL_EBX_REG "b"
  96. #endif
  97. #ifdef I386_USE_SYSENTER
  98. # ifdef SHARED
  99. # define LLL_ENTER_KERNEL "call *%%gs:%P6\n\t"
  100. # else
  101. # define LLL_ENTER_KERNEL "call *_dl_sysinfo\n\t"
  102. # endif
  103. #else
  104. # define LLL_ENTER_KERNEL "int $0x80\n\t"
  105. #endif
  106. /* Delay in spinlock loop. */
  107. #define BUSY_WAIT_NOP __asm__ ("rep; nop")
  108. #define LLL_STUB_UNWIND_INFO_START \
  109. ".section .eh_frame,\"a\",@progbits\n" \
  110. "5:\t" ".long 7f-6f # Length of Common Information Entry\n" \
  111. "6:\t" ".long 0x0 # CIE Identifier Tag\n\t" \
  112. ".byte 0x1 # CIE Version\n\t" \
  113. ".ascii \"zR\\0\" # CIE Augmentation\n\t" \
  114. ".uleb128 0x1 # CIE Code Alignment Factor\n\t" \
  115. ".sleb128 -4 # CIE Data Alignment Factor\n\t" \
  116. ".byte 0x8 # CIE RA Column\n\t" \
  117. ".uleb128 0x1 # Augmentation size\n\t" \
  118. ".byte 0x1b # FDE Encoding (pcrel sdata4)\n\t" \
  119. ".byte 0xc # DW_CFA_def_cfa\n\t" \
  120. ".uleb128 0x4\n\t" \
  121. ".uleb128 0x0\n\t" \
  122. ".align 4\n" \
  123. "7:\t" ".long 17f-8f # FDE Length\n" \
  124. "8:\t" ".long 8b-5b # FDE CIE offset\n\t" \
  125. ".long 1b-. # FDE initial location\n\t" \
  126. ".long 4b-1b # FDE address range\n\t" \
  127. ".uleb128 0x0 # Augmentation size\n\t" \
  128. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  129. ".uleb128 0x8\n\t" \
  130. ".uleb128 10f-9f\n" \
  131. "9:\t" ".byte 0x78 # DW_OP_breg8\n\t" \
  132. ".sleb128 3b-1b\n"
  133. #define LLL_STUB_UNWIND_INFO_END \
  134. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  135. ".uleb128 0x8\n\t" \
  136. ".uleb128 12f-11f\n" \
  137. "11:\t" ".byte 0x78 # DW_OP_breg8\n\t" \
  138. ".sleb128 3b-2b\n" \
  139. "12:\t" ".byte 0x40 + (3b-2b-1) # DW_CFA_advance_loc\n\t" \
  140. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  141. ".uleb128 0x8\n\t" \
  142. ".uleb128 16f-13f\n" \
  143. "13:\t" ".byte 0x78 # DW_OP_breg8\n\t" \
  144. ".sleb128 15f-14f\n\t" \
  145. ".byte 0x0d # DW_OP_const4s\n" \
  146. "14:\t" ".4byte 3b-.\n\t" \
  147. ".byte 0x1c # DW_OP_minus\n\t" \
  148. ".byte 0x0d # DW_OP_const4s\n" \
  149. "15:\t" ".4byte 18f-.\n\t" \
  150. ".byte 0x22 # DW_OP_plus\n" \
  151. "16:\t" ".align 4\n" \
  152. "17:\t" ".previous\n"
  153. /* Unwind info for
  154. 1: lea ..., ...
  155. 2: call ...
  156. 3: jmp 18f
  157. 4:
  158. snippet. */
  159. #define LLL_STUB_UNWIND_INFO_3 \
  160. LLL_STUB_UNWIND_INFO_START \
  161. "10:\t" ".byte 0x40 + (2b-1b) # DW_CFA_advance_loc\n\t" \
  162. LLL_STUB_UNWIND_INFO_END
  163. /* Unwind info for
  164. 1: lea ..., ...
  165. 0: movl ..., ...
  166. 2: call ...
  167. 3: jmp 18f
  168. 4:
  169. snippet. */
  170. #define LLL_STUB_UNWIND_INFO_4 \
  171. LLL_STUB_UNWIND_INFO_START \
  172. "10:\t" ".byte 0x40 + (0b-1b) # DW_CFA_advance_loc\n\t" \
  173. ".byte 0x16 # DW_CFA_val_expression\n\t" \
  174. ".uleb128 0x8\n\t" \
  175. ".uleb128 20f-19f\n" \
  176. "19:\t" ".byte 0x78 # DW_OP_breg8\n\t" \
  177. ".sleb128 3b-0b\n" \
  178. "20:\t" ".byte 0x40 + (2b-0b) # DW_CFA_advance_loc\n\t" \
  179. LLL_STUB_UNWIND_INFO_END
  180. #define lll_futex_wait(futex, val, private) \
  181. lll_futex_timed_wait (futex, val, NULL, private)
  182. #define lll_futex_timed_wait(futex, val, timeout, private) \
  183. ({ \
  184. int __status; \
  185. register __typeof (val) _val __asm__ ("edx") = (val); \
  186. __asm__ __volatile__ (LLL_EBX_LOAD \
  187. LLL_ENTER_KERNEL \
  188. LLL_EBX_LOAD \
  189. : "=a" (__status) \
  190. : "0" (SYS_futex), LLL_EBX_REG (futex), "S" (timeout), \
  191. "c" (__lll_private_flag (FUTEX_WAIT, private)), \
  192. "d" (_val), "i" (offsetof (tcbhead_t, sysinfo)) \
  193. : "memory"); \
  194. __status; \
  195. })
  196. #define lll_futex_wake(futex, nr, private) \
  197. do { \
  198. int __ignore; \
  199. register __typeof (nr) _nr __asm__ ("edx") = (nr); \
  200. __asm__ __volatile__ (LLL_EBX_LOAD \
  201. LLL_ENTER_KERNEL \
  202. LLL_EBX_LOAD \
  203. : "=a" (__ignore) \
  204. : "0" (SYS_futex), LLL_EBX_REG (futex), \
  205. "c" (__lll_private_flag (FUTEX_WAKE, private)), \
  206. "d" (_nr), \
  207. "i" (0) /* phony, to align next arg's number */, \
  208. "i" (offsetof (tcbhead_t, sysinfo))); \
  209. } while (0)
  210. /* NB: in the lll_trylock macro we simply return the value in %eax
  211. after the cmpxchg instruction. In case the operation succeded this
  212. value is zero. In case the operation failed, the cmpxchg instruction
  213. has loaded the current value of the memory work which is guaranteed
  214. to be nonzero. */
  215. #if defined NOT_IN_libc || defined UP
  216. # define __lll_trylock_asm LOCK_INSTR "cmpxchgl %2, %1"
  217. #else
  218. # define __lll_trylock_asm "cmpl $0, %%gs:%P5\n\t" \
  219. "je 0f\n\t" \
  220. "lock\n" \
  221. "0:\tcmpxchgl %2, %1"
  222. #endif
  223. #define lll_trylock(futex) \
  224. ({ int ret; \
  225. __asm__ __volatile__ (__lll_trylock_asm \
  226. : "=a" (ret), "=m" (futex) \
  227. : "r" (LLL_LOCK_INITIALIZER_LOCKED), "m" (futex), \
  228. "0" (LLL_LOCK_INITIALIZER), \
  229. "i" (MULTIPLE_THREADS_OFFSET) \
  230. : "memory"); \
  231. ret; })
  232. #define lll_robust_trylock(futex, id) \
  233. ({ int ret; \
  234. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %2, %1" \
  235. : "=a" (ret), "=m" (futex) \
  236. : "r" (id), "m" (futex), \
  237. "0" (LLL_LOCK_INITIALIZER) \
  238. : "memory"); \
  239. ret; })
  240. #define lll_cond_trylock(futex) \
  241. ({ int ret; \
  242. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %2, %1" \
  243. : "=a" (ret), "=m" (futex) \
  244. : "r" (LLL_LOCK_INITIALIZER_WAITERS), \
  245. "m" (futex), "0" (LLL_LOCK_INITIALIZER) \
  246. : "memory"); \
  247. ret; })
  248. #if defined NOT_IN_libc || defined UP
  249. # define __lll_lock_asm_start LOCK_INSTR "cmpxchgl %1, %2\n\t"
  250. #else
  251. # define __lll_lock_asm_start "cmpl $0, %%gs:%P6\n\t" \
  252. "je 0f\n\t" \
  253. "lock\n" \
  254. "0:\tcmpxchgl %1, %2\n\t"
  255. #endif
  256. #define lll_lock(futex, private) \
  257. (void) \
  258. ({ int ignore1, ignore2; \
  259. if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
  260. __asm__ __volatile__ (__lll_lock_asm_start \
  261. "jnz _L_lock_%=\n\t" \
  262. ".subsection 1\n\t" \
  263. ".type _L_lock_%=,@function\n" \
  264. "_L_lock_%=:\n" \
  265. "1:\tleal %2, %%ecx\n" \
  266. "2:\tcall __lll_lock_wait_private\n" \
  267. "3:\tjmp 18f\n" \
  268. "4:\t.size _L_lock_%=, 4b-1b\n\t" \
  269. ".previous\n" \
  270. LLL_STUB_UNWIND_INFO_3 \
  271. "18:" \
  272. : "=a" (ignore1), "=c" (ignore2), "=m" (futex) \
  273. : "0" (0), "1" (1), "m" (futex), \
  274. "i" (MULTIPLE_THREADS_OFFSET) \
  275. : "memory"); \
  276. else \
  277. { \
  278. int ignore3; \
  279. __asm__ __volatile__ (__lll_lock_asm_start \
  280. "jnz _L_lock_%=\n\t" \
  281. ".subsection 1\n\t" \
  282. ".type _L_lock_%=,@function\n" \
  283. "_L_lock_%=:\n" \
  284. "1:\tleal %2, %%edx\n" \
  285. "0:\tmovl %8, %%ecx\n" \
  286. "2:\tcall __lll_lock_wait\n" \
  287. "3:\tjmp 18f\n" \
  288. "4:\t.size _L_lock_%=, 4b-1b\n\t" \
  289. ".previous\n" \
  290. LLL_STUB_UNWIND_INFO_4 \
  291. "18:" \
  292. : "=a" (ignore1), "=c" (ignore2), \
  293. "=m" (futex), "=&d" (ignore3) \
  294. : "1" (1), "m" (futex), \
  295. "i" (MULTIPLE_THREADS_OFFSET), "0" (0), \
  296. "g" ((int) (private)) \
  297. : "memory"); \
  298. } \
  299. })
  300. #define lll_robust_lock(futex, id, private) \
  301. ({ int __ret, ignore1, ignore2; \
  302. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %2\n\t" \
  303. "jnz _L_robust_lock_%=\n\t" \
  304. ".subsection 1\n\t" \
  305. ".type _L_robust_lock_%=,@function\n" \
  306. "_L_robust_lock_%=:\n" \
  307. "1:\tleal %2, %%edx\n" \
  308. "0:\tmovl %7, %%ecx\n" \
  309. "2:\tcall __lll_robust_lock_wait\n" \
  310. "3:\tjmp 18f\n" \
  311. "4:\t.size _L_robust_lock_%=, 4b-1b\n\t" \
  312. ".previous\n" \
  313. LLL_STUB_UNWIND_INFO_4 \
  314. "18:" \
  315. : "=a" (__ret), "=c" (ignore1), "=m" (futex), \
  316. "=&d" (ignore2) \
  317. : "0" (0), "1" (id), "m" (futex), "g" ((int) (private))\
  318. : "memory"); \
  319. __ret; })
  320. /* Special version of lll_lock which causes the unlock function to
  321. always wakeup waiters. */
  322. #define lll_cond_lock(futex, private) \
  323. (void) \
  324. ({ int ignore1, ignore2, ignore3; \
  325. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %2\n\t" \
  326. "jnz _L_cond_lock_%=\n\t" \
  327. ".subsection 1\n\t" \
  328. ".type _L_cond_lock_%=,@function\n" \
  329. "_L_cond_lock_%=:\n" \
  330. "1:\tleal %2, %%edx\n" \
  331. "0:\tmovl %7, %%ecx\n" \
  332. "2:\tcall __lll_lock_wait\n" \
  333. "3:\tjmp 18f\n" \
  334. "4:\t.size _L_cond_lock_%=, 4b-1b\n\t" \
  335. ".previous\n" \
  336. LLL_STUB_UNWIND_INFO_4 \
  337. "18:" \
  338. : "=a" (ignore1), "=c" (ignore2), "=m" (futex), \
  339. "=&d" (ignore3) \
  340. : "0" (0), "1" (2), "m" (futex), "g" ((int) (private))\
  341. : "memory"); \
  342. })
  343. #define lll_robust_cond_lock(futex, id, private) \
  344. ({ int __ret, ignore1, ignore2; \
  345. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %2\n\t" \
  346. "jnz _L_robust_cond_lock_%=\n\t" \
  347. ".subsection 1\n\t" \
  348. ".type _L_robust_cond_lock_%=,@function\n" \
  349. "_L_robust_cond_lock_%=:\n" \
  350. "1:\tleal %2, %%edx\n" \
  351. "0:\tmovl %7, %%ecx\n" \
  352. "2:\tcall __lll_robust_lock_wait\n" \
  353. "3:\tjmp 18f\n" \
  354. "4:\t.size _L_robust_cond_lock_%=, 4b-1b\n\t" \
  355. ".previous\n" \
  356. LLL_STUB_UNWIND_INFO_4 \
  357. "18:" \
  358. : "=a" (__ret), "=c" (ignore1), "=m" (futex), \
  359. "=&d" (ignore2) \
  360. : "0" (0), "1" (id | FUTEX_WAITERS), "m" (futex), \
  361. "g" ((int) (private)) \
  362. : "memory"); \
  363. __ret; })
  364. #define lll_timedlock(futex, timeout, private) \
  365. ({ int __ret, ignore1, ignore2, ignore3; \
  366. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %3\n\t" \
  367. "jnz _L_timedlock_%=\n\t" \
  368. ".subsection 1\n\t" \
  369. ".type _L_timedlock_%=,@function\n" \
  370. "_L_timedlock_%=:\n" \
  371. "1:\tleal %3, %%ecx\n" \
  372. "0:\tmovl %8, %%edx\n" \
  373. "2:\tcall __lll_timedlock_wait\n" \
  374. "3:\tjmp 18f\n" \
  375. "4:\t.size _L_timedlock_%=, 4b-1b\n\t" \
  376. ".previous\n" \
  377. LLL_STUB_UNWIND_INFO_4 \
  378. "18:" \
  379. : "=a" (__ret), "=c" (ignore1), "=&d" (ignore2), \
  380. "=m" (futex), "=S" (ignore3) \
  381. : "0" (0), "1" (1), "m" (futex), "m" (timeout), \
  382. "4" ((int) (private)) \
  383. : "memory"); \
  384. __ret; })
  385. #define lll_robust_timedlock(futex, timeout, id, private) \
  386. ({ int __ret, ignore1, ignore2, ignore3; \
  387. __asm__ __volatile__ (LOCK_INSTR "cmpxchgl %1, %3\n\t" \
  388. "jnz _L_robust_timedlock_%=\n\t" \
  389. ".subsection 1\n\t" \
  390. ".type _L_robust_timedlock_%=,@function\n" \
  391. "_L_robust_timedlock_%=:\n" \
  392. "1:\tleal %3, %%ecx\n" \
  393. "0:\tmovl %8, %%edx\n" \
  394. "2:\tcall __lll_robust_timedlock_wait\n" \
  395. "3:\tjmp 18f\n" \
  396. "4:\t.size _L_robust_timedlock_%=, 4b-1b\n\t" \
  397. ".previous\n" \
  398. LLL_STUB_UNWIND_INFO_4 \
  399. "18:" \
  400. : "=a" (__ret), "=c" (ignore1), "=&d" (ignore2), \
  401. "=m" (futex), "=S" (ignore3) \
  402. : "0" (0), "1" (id), "m" (futex), "m" (timeout), \
  403. "4" ((int) (private)) \
  404. : "memory"); \
  405. __ret; })
  406. #if defined NOT_IN_libc || defined UP
  407. # define __lll_unlock_asm LOCK_INSTR "subl $1, %0\n\t"
  408. #else
  409. # define __lll_unlock_asm "cmpl $0, %%gs:%P3\n\t" \
  410. "je 0f\n\t" \
  411. "lock\n" \
  412. "0:\tsubl $1,%0\n\t"
  413. #endif
  414. #define lll_unlock(futex, private) \
  415. (void) \
  416. ({ int ignore; \
  417. if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
  418. __asm__ __volatile__ (__lll_unlock_asm \
  419. "jne _L_unlock_%=\n\t" \
  420. ".subsection 1\n\t" \
  421. ".type _L_unlock_%=,@function\n" \
  422. "_L_unlock_%=:\n" \
  423. "1:\tleal %0, %%eax\n" \
  424. "2:\tcall __lll_unlock_wake_private\n" \
  425. "3:\tjmp 18f\n" \
  426. "4:\t.size _L_unlock_%=, 4b-1b\n\t" \
  427. ".previous\n" \
  428. LLL_STUB_UNWIND_INFO_3 \
  429. "18:" \
  430. : "=m" (futex), "=&a" (ignore) \
  431. : "m" (futex), "i" (MULTIPLE_THREADS_OFFSET) \
  432. : "memory"); \
  433. else \
  434. { \
  435. int ignore2; \
  436. __asm__ __volatile__ (__lll_unlock_asm \
  437. "jne _L_unlock_%=\n\t" \
  438. ".subsection 1\n\t" \
  439. ".type _L_unlock_%=,@function\n" \
  440. "_L_unlock_%=:\n" \
  441. "1:\tleal %0, %%eax\n" \
  442. "0:\tmovl %5, %%ecx\n" \
  443. "2:\tcall __lll_unlock_wake\n" \
  444. "3:\tjmp 18f\n" \
  445. "4:\t.size _L_unlock_%=, 4b-1b\n\t" \
  446. ".previous\n" \
  447. LLL_STUB_UNWIND_INFO_4 \
  448. "18:" \
  449. : "=m" (futex), "=&a" (ignore), "=&c" (ignore2) \
  450. : "i" (MULTIPLE_THREADS_OFFSET), "m" (futex), \
  451. "g" ((int) (private)) \
  452. : "memory"); \
  453. } \
  454. })
  455. #define lll_robust_unlock(futex, private) \
  456. (void) \
  457. ({ int ignore, ignore2; \
  458. __asm__ __volatile__ (LOCK_INSTR "andl %3, %0\n\t" \
  459. "jne _L_robust_unlock_%=\n\t" \
  460. ".subsection 1\n\t" \
  461. ".type _L_robust_unlock_%=,@function\n" \
  462. "_L_robust_unlock_%=:\n\t" \
  463. "1:\tleal %0, %%eax\n" \
  464. "0:\tmovl %5, %%ecx\n" \
  465. "2:\tcall __lll_unlock_wake\n" \
  466. "3:\tjmp 18f\n" \
  467. "4:\t.size _L_robust_unlock_%=, 4b-1b\n\t" \
  468. ".previous\n" \
  469. LLL_STUB_UNWIND_INFO_4 \
  470. "18:" \
  471. : "=m" (futex), "=&a" (ignore), "=&c" (ignore2) \
  472. : "i" (FUTEX_WAITERS), "m" (futex), \
  473. "g" ((int) (private)) \
  474. : "memory"); \
  475. })
  476. #define lll_robust_dead(futex, private) \
  477. (void) \
  478. ({ int __ignore; \
  479. register int _nr __asm__ ("edx") = 1; \
  480. __asm__ __volatile__ (LOCK_INSTR "orl %5, (%2)\n\t" \
  481. LLL_EBX_LOAD \
  482. LLL_ENTER_KERNEL \
  483. LLL_EBX_LOAD \
  484. : "=a" (__ignore) \
  485. : "0" (SYS_futex), LLL_EBX_REG (&(futex)), \
  486. "c" (__lll_private_flag (FUTEX_WAKE, private)), \
  487. "d" (_nr), "i" (FUTEX_OWNER_DIED), \
  488. "i" (offsetof (tcbhead_t, sysinfo))); \
  489. })
  490. #define lll_islocked(futex) \
  491. (futex != LLL_LOCK_INITIALIZER)
  492. /* The kernel notifies a process with uses CLONE_CLEARTID via futex
  493. wakeup when the clone terminates. The memory location contains the
  494. thread ID while the clone is running and is reset to zero
  495. afterwards.
  496. The macro parameter must not have any side effect. */
  497. #define lll_wait_tid(tid) \
  498. do { \
  499. int __ignore; \
  500. register __typeof (tid) _tid __asm__ ("edx") = (tid); \
  501. if (_tid != 0) \
  502. __asm__ __volatile__ (LLL_EBX_LOAD \
  503. "1:\tmovl %1, %%eax\n\t" \
  504. LLL_ENTER_KERNEL \
  505. "cmpl $0, (%%ebx)\n\t" \
  506. "jne 1b\n\t" \
  507. LLL_EBX_LOAD \
  508. : "=&a" (__ignore) \
  509. : "i" (SYS_futex), LLL_EBX_REG (&tid), "S" (0), \
  510. "c" (FUTEX_WAIT), "d" (_tid), \
  511. "i" (offsetof (tcbhead_t, sysinfo)) \
  512. : "memory"); \
  513. } while (0)
  514. extern int __lll_timedwait_tid (int *tid, const struct timespec *abstime)
  515. __attribute__ ((regparm (2))) attribute_hidden;
  516. #define lll_timedwait_tid(tid, abstime) \
  517. ({ \
  518. int __ret = 0; \
  519. if (tid != 0) \
  520. { \
  521. if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) \
  522. __ret = EINVAL; \
  523. else \
  524. __ret = __lll_timedwait_tid (&tid, abstime); \
  525. } \
  526. __ret; })
  527. #endif /* !__ASSEMBLER__ */
  528. #endif /* lowlevellock.h */