1/* Copyright (C) 2002-2018 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
3 Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
18
19#ifndef _LOWLEVELLOCK_H
20#define _LOWLEVELLOCK_H 1
21
22#include <stap-probe.h>
23
24#ifndef __ASSEMBLER__
25# include <time.h>
26# include <sys/param.h>
27# include <bits/pthreadtypes.h>
28# include <kernel-features.h>
29
30# ifndef LOCK_INSTR
31# ifdef UP
32# define LOCK_INSTR /* nothing */
33# else
34# define LOCK_INSTR "lock;"
35# endif
36# endif
37#else
38# ifndef LOCK
39# ifdef UP
40# define LOCK
41# else
42# define LOCK lock
43# endif
44# endif
45#endif
46
47#include <lowlevellock-futex.h>
48
49/* XXX Remove when no assembler code uses futexes anymore. */
50#define SYS_futex __NR_futex
51
52#ifndef __ASSEMBLER__
53
54/* Initializer for lock. */
55#define LLL_LOCK_INITIALIZER (0)
56#define LLL_LOCK_INITIALIZER_LOCKED (1)
57#define LLL_LOCK_INITIALIZER_WAITERS (2)
58
59
60/* NB: in the lll_trylock macro we simply return the value in %eax
61 after the cmpxchg instruction. In case the operation succeded this
62 value is zero. In case the operation failed, the cmpxchg instruction
63 has loaded the current value of the memory work which is guaranteed
64 to be nonzero. */
65#if !IS_IN (libc) || defined UP
66# define __lll_trylock_asm LOCK_INSTR "cmpxchgl %2, %1"
67#else
68# define __lll_trylock_asm "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
69 "je 0f\n\t" \
70 "lock; cmpxchgl %2, %1\n\t" \
71 "jmp 1f\n\t" \
72 "0:\tcmpxchgl %2, %1\n\t" \
73 "1:"
74#endif
75
76#define lll_trylock(futex) \
77 ({ int ret; \
78 __asm __volatile (__lll_trylock_asm \
79 : "=a" (ret), "=m" (futex) \
80 : "r" (LLL_LOCK_INITIALIZER_LOCKED), "m" (futex), \
81 "0" (LLL_LOCK_INITIALIZER) \
82 : "memory"); \
83 ret; })
84
85#define lll_cond_trylock(futex) \
86 ({ int ret; \
87 __asm __volatile (LOCK_INSTR "cmpxchgl %2, %1" \
88 : "=a" (ret), "=m" (futex) \
89 : "r" (LLL_LOCK_INITIALIZER_WAITERS), \
90 "m" (futex), "0" (LLL_LOCK_INITIALIZER) \
91 : "memory"); \
92 ret; })
93
94#if !IS_IN (libc) || defined UP
95# define __lll_lock_asm_start LOCK_INSTR "cmpxchgl %4, %2\n\t" \
96 "jz 24f\n\t"
97#else
98# define __lll_lock_asm_start "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
99 "je 0f\n\t" \
100 "lock; cmpxchgl %4, %2\n\t" \
101 "jnz 1f\n\t" \
102 "jmp 24f\n" \
103 "0:\tcmpxchgl %4, %2\n\t" \
104 "jz 24f\n\t"
105#endif
106
107#define lll_lock(futex, private) \
108 (void) \
109 ({ int ignore1, ignore2, ignore3; \
110 if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
111 __asm __volatile (__lll_lock_asm_start \
112 "1:\tlea %2, %%" RDI_LP "\n" \
113 "2:\tsub $128, %%" RSP_LP "\n" \
114 ".cfi_adjust_cfa_offset 128\n" \
115 "3:\tcallq __lll_lock_wait_private\n" \
116 "4:\tadd $128, %%" RSP_LP "\n" \
117 ".cfi_adjust_cfa_offset -128\n" \
118 "24:" \
119 : "=S" (ignore1), "=&D" (ignore2), "=m" (futex), \
120 "=a" (ignore3) \
121 : "0" (1), "m" (futex), "3" (0) \
122 : "cx", "r11", "cc", "memory"); \
123 else \
124 __asm __volatile (__lll_lock_asm_start \
125 "1:\tlea %2, %%" RDI_LP "\n" \
126 "2:\tsub $128, %%" RSP_LP "\n" \
127 ".cfi_adjust_cfa_offset 128\n" \
128 "3:\tcallq __lll_lock_wait\n" \
129 "4:\tadd $128, %%" RSP_LP "\n" \
130 ".cfi_adjust_cfa_offset -128\n" \
131 "24:" \
132 : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
133 "=a" (ignore3) \
134 : "1" (1), "m" (futex), "3" (0), "0" (private) \
135 : "cx", "r11", "cc", "memory"); \
136 }) \
137
138#define lll_cond_lock(futex, private) \
139 (void) \
140 ({ int ignore1, ignore2, ignore3; \
141 __asm __volatile (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
142 "jz 24f\n" \
143 "1:\tlea %2, %%" RDI_LP "\n" \
144 "2:\tsub $128, %%" RSP_LP "\n" \
145 ".cfi_adjust_cfa_offset 128\n" \
146 "3:\tcallq __lll_lock_wait\n" \
147 "4:\tadd $128, %%" RSP_LP "\n" \
148 ".cfi_adjust_cfa_offset -128\n" \
149 "24:" \
150 : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
151 "=a" (ignore3) \
152 : "1" (2), "m" (futex), "3" (0), "0" (private) \
153 : "cx", "r11", "cc", "memory"); \
154 })
155
156#define lll_timedlock(futex, timeout, private) \
157 ({ int result, ignore1, ignore2, ignore3; \
158 __asm __volatile (LOCK_INSTR "cmpxchgl %1, %4\n\t" \
159 "jz 24f\n" \
160 "1:\tlea %4, %%" RDI_LP "\n" \
161 "0:\tmov %8, %%" RDX_LP "\n" \
162 "2:\tsub $128, %%" RSP_LP "\n" \
163 ".cfi_adjust_cfa_offset 128\n" \
164 "3:\tcallq __lll_timedlock_wait\n" \
165 "4:\tadd $128, %%" RSP_LP "\n" \
166 ".cfi_adjust_cfa_offset -128\n" \
167 "24:" \
168 : "=a" (result), "=D" (ignore1), "=S" (ignore2), \
169 "=&d" (ignore3), "=m" (futex) \
170 : "0" (0), "1" (1), "m" (futex), "m" (timeout), \
171 "2" (private) \
172 : "memory", "cx", "cc", "r10", "r11"); \
173 result; })
174
175extern int __lll_timedlock_elision (int *futex, short *adapt_count,
176 const struct timespec *timeout,
177 int private) attribute_hidden;
178
179#define lll_timedlock_elision(futex, adapt_count, timeout, private) \
180 __lll_timedlock_elision(&(futex), &(adapt_count), timeout, private)
181
182#if !IS_IN (libc) || defined UP
183# define __lll_unlock_asm_start LOCK_INSTR "decl %0\n\t" \
184 "je 24f\n\t"
185#else
186# define __lll_unlock_asm_start "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
187 "je 0f\n\t" \
188 "lock; decl %0\n\t" \
189 "jne 1f\n\t" \
190 "jmp 24f\n\t" \
191 "0:\tdecl %0\n\t" \
192 "je 24f\n\t"
193#endif
194
195#define lll_unlock(futex, private) \
196 (void) \
197 ({ int ignore; \
198 if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
199 __asm __volatile (__lll_unlock_asm_start \
200 "1:\tlea %0, %%" RDI_LP "\n" \
201 "2:\tsub $128, %%" RSP_LP "\n" \
202 ".cfi_adjust_cfa_offset 128\n" \
203 "3:\tcallq __lll_unlock_wake_private\n" \
204 "4:\tadd $128, %%" RSP_LP "\n" \
205 ".cfi_adjust_cfa_offset -128\n" \
206 "24:" \
207 : "=m" (futex), "=&D" (ignore) \
208 : "m" (futex) \
209 : "ax", "cx", "r11", "cc", "memory"); \
210 else \
211 __asm __volatile (__lll_unlock_asm_start \
212 "1:\tlea %0, %%" RDI_LP "\n" \
213 "2:\tsub $128, %%" RSP_LP "\n" \
214 ".cfi_adjust_cfa_offset 128\n" \
215 "3:\tcallq __lll_unlock_wake\n" \
216 "4:\tadd $128, %%" RSP_LP "\n" \
217 ".cfi_adjust_cfa_offset -128\n" \
218 "24:" \
219 : "=m" (futex), "=&D" (ignore) \
220 : "m" (futex), "S" (private) \
221 : "ax", "cx", "r11", "cc", "memory"); \
222 })
223
224#define lll_islocked(futex) \
225 (futex != LLL_LOCK_INITIALIZER)
226
227
228/* The kernel notifies a process which uses CLONE_CHILD_CLEARTID via futex
229 wake-up when the clone terminates. The memory location contains the
230 thread ID while the clone is running and is reset to zero by the kernel
231 afterwards. The kernel up to version 3.16.3 does not use the private futex
232 operations for futex wake-up when the clone terminates. */
233#define lll_wait_tid(tid) \
234 do { \
235 __typeof (tid) __tid; \
236 while ((__tid = (tid)) != 0) \
237 lll_futex_wait (&(tid), __tid, LLL_SHARED);\
238 } while (0)
239
240extern int __lll_timedwait_tid (int *, const struct timespec *)
241 attribute_hidden;
242
243/* As lll_wait_tid, but with a timeout. If the timeout occurs then return
244 ETIMEDOUT. If ABSTIME is invalid, return EINVAL.
245 XXX Note that this differs from the generic version in that we do the
246 error checking here and not in __lll_timedwait_tid. */
247#define lll_timedwait_tid(tid, abstime) \
248 ({ \
249 int __result = 0; \
250 if ((tid) != 0) \
251 __result = __lll_timedwait_tid (&(tid), (abstime)); \
252 __result; })
253
254extern int __lll_lock_elision (int *futex, short *adapt_count, int private)
255 attribute_hidden;
256
257extern int __lll_unlock_elision (int *lock, int private)
258 attribute_hidden;
259
260extern int __lll_trylock_elision (int *lock, short *adapt_count)
261 attribute_hidden;
262
263#define lll_lock_elision(futex, adapt_count, private) \
264 __lll_lock_elision (&(futex), &(adapt_count), private)
265#define lll_unlock_elision(futex, adapt_count, private) \
266 __lll_unlock_elision (&(futex), private)
267#define lll_trylock_elision(futex, adapt_count) \
268 __lll_trylock_elision (&(futex), &(adapt_count))
269
270#endif /* !__ASSEMBLER__ */
271
272#endif /* lowlevellock.h */
273