1/* Definition for thread-local data handling. nptl/x86_64 version.
2 Copyright (C) 2002-2017 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
18
19#ifndef _TLS_H
20#define _TLS_H 1
21
22#ifndef __ASSEMBLER__
23# include <asm/prctl.h> /* For ARCH_SET_FS. */
24# include <stdbool.h>
25# include <stddef.h>
26# include <stdint.h>
27# include <stdlib.h>
28# include <sysdep.h>
29# include <libc-internal.h>
30# include <kernel-features.h>
31# include <dl-dtv.h>
32
33/* Replacement type for __m128 since this file is included by ld.so,
34 which is compiled with -mno-sse. It must not change the alignment
35 of rtld_savespace_sse. */
36typedef struct
37{
38 int i[4];
39} __128bits;
40
41
42typedef struct
43{
44 void *tcb; /* Pointer to the TCB. Not necessarily the
45 thread descriptor used by libpthread. */
46 dtv_t *dtv;
47 void *self; /* Pointer to the thread descriptor. */
48 int multiple_threads;
49 int gscope_flag;
50 uintptr_t sysinfo;
51 uintptr_t stack_guard;
52 uintptr_t pointer_guard;
53 unsigned long int vgetcpu_cache[2];
54# ifndef __ASSUME_PRIVATE_FUTEX
55 int private_futex;
56# else
57 int __glibc_reserved1;
58# endif
59 int __glibc_unused1;
60 /* Reservation of some values for the TM ABI. */
61 void *__private_tm[4];
62 /* GCC split stack support. */
63 void *__private_ss;
64 long int __glibc_reserved2;
65 /* Must be kept even if it is no longer used by glibc since programs,
66 like AddressSanitizer, depend on the size of tcbhead_t. */
67 __128bits __glibc_unused2[8][4] __attribute__ ((aligned (32)));
68
69 void *__padding[8];
70} tcbhead_t;
71
72#else /* __ASSEMBLER__ */
73# include <tcb-offsets.h>
74#endif
75
76
77/* Alignment requirement for the stack. */
78#define STACK_ALIGN 16
79
80
81#ifndef __ASSEMBLER__
82/* Get system call information. */
83# include <sysdep.h>
84
85#ifndef LOCK_PREFIX
86# ifdef UP
87# define LOCK_PREFIX /* nothing */
88# else
89# define LOCK_PREFIX "lock;"
90# endif
91#endif
92
93/* This is the size of the initial TCB. Can't be just sizeof (tcbhead_t),
94 because NPTL getpid, __libc_alloca_cutoff etc. need (almost) the whole
95 struct pthread even when not linked with -lpthread. */
96# define TLS_INIT_TCB_SIZE sizeof (struct pthread)
97
98/* Alignment requirements for the initial TCB. */
99# define TLS_INIT_TCB_ALIGN __alignof__ (struct pthread)
100
101/* This is the size of the TCB. */
102# define TLS_TCB_SIZE sizeof (struct pthread)
103
104/* Alignment requirements for the TCB. */
105# define TLS_TCB_ALIGN __alignof__ (struct pthread)
106
107/* The TCB can have any size and the memory following the address the
108 thread pointer points to is unspecified. Allocate the TCB there. */
109# define TLS_TCB_AT_TP 1
110# define TLS_DTV_AT_TP 0
111
112/* Get the thread descriptor definition. */
113# include <nptl/descr.h>
114
115
116/* Install the dtv pointer. The pointer passed is to the element with
117 index -1 which contain the length. */
118# define INSTALL_DTV(descr, dtvp) \
119 ((tcbhead_t *) (descr))->dtv = (dtvp) + 1
120
121/* Install new dtv for current thread. */
122# define INSTALL_NEW_DTV(dtvp) \
123 ({ struct pthread *__pd; \
124 THREAD_SETMEM (__pd, header.dtv, (dtvp)); })
125
126/* Return dtv of given thread descriptor. */
127# define GET_DTV(descr) \
128 (((tcbhead_t *) (descr))->dtv)
129
130
131/* Code to initially initialize the thread pointer. This might need
132 special attention since 'errno' is not yet available and if the
133 operation can cause a failure 'errno' must not be touched.
134
135 We have to make the syscall for both uses of the macro since the
136 address might be (and probably is) different. */
137# define TLS_INIT_TP(thrdescr) \
138 ({ void *_thrdescr = (thrdescr); \
139 tcbhead_t *_head = _thrdescr; \
140 int _result; \
141 \
142 _head->tcb = _thrdescr; \
143 /* For now the thread descriptor is at the same address. */ \
144 _head->self = _thrdescr; \
145 \
146 /* It is a simple syscall to set the %fs value for the thread. */ \
147 asm volatile ("syscall" \
148 : "=a" (_result) \
149 : "0" ((unsigned long int) __NR_arch_prctl), \
150 "D" ((unsigned long int) ARCH_SET_FS), \
151 "S" (_thrdescr) \
152 : "memory", "cc", "r11", "cx"); \
153 \
154 _result ? "cannot set %fs base address for thread-local storage" : 0; \
155 })
156
157# define TLS_DEFINE_INIT_TP(tp, pd) void *tp = (pd)
158
159
160/* Return the address of the dtv for the current thread. */
161# define THREAD_DTV() \
162 ({ struct pthread *__pd; \
163 THREAD_GETMEM (__pd, header.dtv); })
164
165
166/* Return the thread descriptor for the current thread.
167
168 The contained asm must *not* be marked volatile since otherwise
169 assignments like
170 pthread_descr self = thread_self();
171 do not get optimized away. */
172# define THREAD_SELF \
173 ({ struct pthread *__self; \
174 asm ("mov %%fs:%c1,%0" : "=r" (__self) \
175 : "i" (offsetof (struct pthread, header.self))); \
176 __self;})
177
178/* Magic for libthread_db to know how to do THREAD_SELF. */
179# define DB_THREAD_SELF_INCLUDE <sys/reg.h> /* For the FS constant. */
180# define DB_THREAD_SELF CONST_THREAD_AREA (64, FS)
181
182/* Read member of the thread descriptor directly. */
183# define THREAD_GETMEM(descr, member) \
184 ({ __typeof (descr->member) __value; \
185 if (sizeof (__value) == 1) \
186 asm volatile ("movb %%fs:%P2,%b0" \
187 : "=q" (__value) \
188 : "0" (0), "i" (offsetof (struct pthread, member))); \
189 else if (sizeof (__value) == 4) \
190 asm volatile ("movl %%fs:%P1,%0" \
191 : "=r" (__value) \
192 : "i" (offsetof (struct pthread, member))); \
193 else \
194 { \
195 if (sizeof (__value) != 8) \
196 /* There should not be any value with a size other than 1, \
197 4 or 8. */ \
198 abort (); \
199 \
200 asm volatile ("movq %%fs:%P1,%q0" \
201 : "=r" (__value) \
202 : "i" (offsetof (struct pthread, member))); \
203 } \
204 __value; })
205
206
207/* Same as THREAD_GETMEM, but the member offset can be non-constant. */
208# define THREAD_GETMEM_NC(descr, member, idx) \
209 ({ __typeof (descr->member[0]) __value; \
210 if (sizeof (__value) == 1) \
211 asm volatile ("movb %%fs:%P2(%q3),%b0" \
212 : "=q" (__value) \
213 : "0" (0), "i" (offsetof (struct pthread, member[0])), \
214 "r" (idx)); \
215 else if (sizeof (__value) == 4) \
216 asm volatile ("movl %%fs:%P1(,%q2,4),%0" \
217 : "=r" (__value) \
218 : "i" (offsetof (struct pthread, member[0])), "r" (idx));\
219 else \
220 { \
221 if (sizeof (__value) != 8) \
222 /* There should not be any value with a size other than 1, \
223 4 or 8. */ \
224 abort (); \
225 \
226 asm volatile ("movq %%fs:%P1(,%q2,8),%q0" \
227 : "=r" (__value) \
228 : "i" (offsetof (struct pthread, member[0])), \
229 "r" (idx)); \
230 } \
231 __value; })
232
233
234/* Loading addresses of objects on x86-64 needs to be treated special
235 when generating PIC code. */
236#ifdef __pic__
237# define IMM_MODE "nr"
238#else
239# define IMM_MODE "ir"
240#endif
241
242
243/* Set member of the thread descriptor directly. */
244# define THREAD_SETMEM(descr, member, value) \
245 ({ if (sizeof (descr->member) == 1) \
246 asm volatile ("movb %b0,%%fs:%P1" : \
247 : "iq" (value), \
248 "i" (offsetof (struct pthread, member))); \
249 else if (sizeof (descr->member) == 4) \
250 asm volatile ("movl %0,%%fs:%P1" : \
251 : IMM_MODE (value), \
252 "i" (offsetof (struct pthread, member))); \
253 else \
254 { \
255 if (sizeof (descr->member) != 8) \
256 /* There should not be any value with a size other than 1, \
257 4 or 8. */ \
258 abort (); \
259 \
260 asm volatile ("movq %q0,%%fs:%P1" : \
261 : IMM_MODE ((uint64_t) cast_to_integer (value)), \
262 "i" (offsetof (struct pthread, member))); \
263 }})
264
265
266/* Same as THREAD_SETMEM, but the member offset can be non-constant. */
267# define THREAD_SETMEM_NC(descr, member, idx, value) \
268 ({ if (sizeof (descr->member[0]) == 1) \
269 asm volatile ("movb %b0,%%fs:%P1(%q2)" : \
270 : "iq" (value), \
271 "i" (offsetof (struct pthread, member[0])), \
272 "r" (idx)); \
273 else if (sizeof (descr->member[0]) == 4) \
274 asm volatile ("movl %0,%%fs:%P1(,%q2,4)" : \
275 : IMM_MODE (value), \
276 "i" (offsetof (struct pthread, member[0])), \
277 "r" (idx)); \
278 else \
279 { \
280 if (sizeof (descr->member[0]) != 8) \
281 /* There should not be any value with a size other than 1, \
282 4 or 8. */ \
283 abort (); \
284 \
285 asm volatile ("movq %q0,%%fs:%P1(,%q2,8)" : \
286 : IMM_MODE ((uint64_t) cast_to_integer (value)), \
287 "i" (offsetof (struct pthread, member[0])), \
288 "r" (idx)); \
289 }})
290
291
292/* Atomic compare and exchange on TLS, returning old value. */
293# define THREAD_ATOMIC_CMPXCHG_VAL(descr, member, newval, oldval) \
294 ({ __typeof (descr->member) __ret; \
295 __typeof (oldval) __old = (oldval); \
296 if (sizeof (descr->member) == 4) \
297 asm volatile (LOCK_PREFIX "cmpxchgl %2, %%fs:%P3" \
298 : "=a" (__ret) \
299 : "0" (__old), "r" (newval), \
300 "i" (offsetof (struct pthread, member))); \
301 else \
302 /* Not necessary for other sizes in the moment. */ \
303 abort (); \
304 __ret; })
305
306
307/* Atomic logical and. */
308# define THREAD_ATOMIC_AND(descr, member, val) \
309 (void) ({ if (sizeof ((descr)->member) == 4) \
310 asm volatile (LOCK_PREFIX "andl %1, %%fs:%P0" \
311 :: "i" (offsetof (struct pthread, member)), \
312 "ir" (val)); \
313 else \
314 /* Not necessary for other sizes in the moment. */ \
315 abort (); })
316
317
318/* Atomic set bit. */
319# define THREAD_ATOMIC_BIT_SET(descr, member, bit) \
320 (void) ({ if (sizeof ((descr)->member) == 4) \
321 asm volatile (LOCK_PREFIX "orl %1, %%fs:%P0" \
322 :: "i" (offsetof (struct pthread, member)), \
323 "ir" (1 << (bit))); \
324 else \
325 /* Not necessary for other sizes in the moment. */ \
326 abort (); })
327
328
329# define CALL_THREAD_FCT(descr) \
330 ({ void *__res; \
331 asm volatile ("movq %%fs:%P2, %%rdi\n\t" \
332 "callq *%%fs:%P1" \
333 : "=a" (__res) \
334 : "i" (offsetof (struct pthread, start_routine)), \
335 "i" (offsetof (struct pthread, arg)) \
336 : "di", "si", "cx", "dx", "r8", "r9", "r10", "r11", \
337 "memory", "cc"); \
338 __res; })
339
340
341/* Set the stack guard field in TCB head. */
342# define THREAD_SET_STACK_GUARD(value) \
343 THREAD_SETMEM (THREAD_SELF, header.stack_guard, value)
344# define THREAD_COPY_STACK_GUARD(descr) \
345 ((descr)->header.stack_guard \
346 = THREAD_GETMEM (THREAD_SELF, header.stack_guard))
347
348
349/* Set the pointer guard field in the TCB head. */
350# define THREAD_SET_POINTER_GUARD(value) \
351 THREAD_SETMEM (THREAD_SELF, header.pointer_guard, value)
352# define THREAD_COPY_POINTER_GUARD(descr) \
353 ((descr)->header.pointer_guard \
354 = THREAD_GETMEM (THREAD_SELF, header.pointer_guard))
355
356
357/* Get and set the global scope generation counter in the TCB head. */
358# define THREAD_GSCOPE_FLAG_UNUSED 0
359# define THREAD_GSCOPE_FLAG_USED 1
360# define THREAD_GSCOPE_FLAG_WAIT 2
361# define THREAD_GSCOPE_RESET_FLAG() \
362 do \
363 { int __res; \
364 asm volatile ("xchgl %0, %%fs:%P1" \
365 : "=r" (__res) \
366 : "i" (offsetof (struct pthread, header.gscope_flag)), \
367 "0" (THREAD_GSCOPE_FLAG_UNUSED)); \
368 if (__res == THREAD_GSCOPE_FLAG_WAIT) \
369 lll_futex_wake (&THREAD_SELF->header.gscope_flag, 1, LLL_PRIVATE); \
370 } \
371 while (0)
372# define THREAD_GSCOPE_SET_FLAG() \
373 THREAD_SETMEM (THREAD_SELF, header.gscope_flag, THREAD_GSCOPE_FLAG_USED)
374# define THREAD_GSCOPE_WAIT() \
375 GL(dl_wait_lookup_done) ()
376
377#endif /* __ASSEMBLER__ */
378
379#endif /* tls.h */
380