Coverage Report

Created: 2021-11-03 07:11

/rustc/18bc4bee9710b181b440a472635150f0d6257713/library/std/src/thread/local.rs
Line
Count
Source
1
//! Thread local storage
2
3
#![unstable(feature = "thread_local_internals", issue = "none")]
4
5
#[cfg(all(test, not(target_os = "emscripten")))]
6
mod tests;
7
8
#[cfg(test)]
9
mod dynamic_tests;
10
11
use crate::error::Error;
12
use crate::fmt;
13
14
/// A thread local storage key which owns its contents.
15
///
16
/// This key uses the fastest possible implementation available to it for the
17
/// target platform. It is instantiated with the [`thread_local!`] macro and the
18
/// primary method is the [`with`] method.
19
///
20
/// The [`with`] method yields a reference to the contained value which cannot be
21
/// sent across threads or escape the given closure.
22
///
23
/// # Initialization and Destruction
24
///
25
/// Initialization is dynamically performed on the first call to [`with`]
26
/// within a thread, and values that implement [`Drop`] get destructed when a
27
/// thread exits. Some caveats apply, which are explained below.
28
///
29
/// A `LocalKey`'s initializer cannot recursively depend on itself, and using
30
/// a `LocalKey` in this way will cause the initializer to infinitely recurse
31
/// on the first call to `with`.
32
///
33
/// # Examples
34
///
35
/// ```
36
/// use std::cell::RefCell;
37
/// use std::thread;
38
///
39
/// thread_local!(static FOO: RefCell<u32> = RefCell::new(1));
40
///
41
/// FOO.with(|f| {
42
///     assert_eq!(*f.borrow(), 1);
43
///     *f.borrow_mut() = 2;
44
/// });
45
///
46
/// // each thread starts out with the initial value of 1
47
/// let t = thread::spawn(move|| {
48
///     FOO.with(|f| {
49
///         assert_eq!(*f.borrow(), 1);
50
///         *f.borrow_mut() = 3;
51
///     });
52
/// });
53
///
54
/// // wait for the thread to complete and bail out on panic
55
/// t.join().unwrap();
56
///
57
/// // we retain our original value of 2 despite the child thread
58
/// FOO.with(|f| {
59
///     assert_eq!(*f.borrow(), 2);
60
/// });
61
/// ```
62
///
63
/// # Platform-specific behavior
64
///
65
/// Note that a "best effort" is made to ensure that destructors for types
66
/// stored in thread local storage are run, but not all platforms can guarantee
67
/// that destructors will be run for all types in thread local storage. For
68
/// example, there are a number of known caveats where destructors are not run:
69
///
70
/// 1. On Unix systems when pthread-based TLS is being used, destructors will
71
///    not be run for TLS values on the main thread when it exits. Note that the
72
///    application will exit immediately after the main thread exits as well.
73
/// 2. On all platforms it's possible for TLS to re-initialize other TLS slots
74
///    during destruction. Some platforms ensure that this cannot happen
75
///    infinitely by preventing re-initialization of any slot that has been
76
///    destroyed, but not all platforms have this guard. Those platforms that do
77
///    not guard typically have a synthetic limit after which point no more
78
///    destructors are run.
79
///
80
/// [`with`]: LocalKey::with
81
#[stable(feature = "rust1", since = "1.0.0")]
82
pub struct LocalKey<T: 'static> {
83
    // This outer `LocalKey<T>` type is what's going to be stored in statics,
84
    // but actual data inside will sometimes be tagged with #[thread_local].
85
    // It's not valid for a true static to reference a #[thread_local] static,
86
    // so we get around that by exposing an accessor through a layer of function
87
    // indirection (this thunk).
88
    //
89
    // Note that the thunk is itself unsafe because the returned lifetime of the
90
    // slot where data lives, `'static`, is not actually valid. The lifetime
91
    // here is actually slightly shorter than the currently running thread!
92
    //
93
    // Although this is an extra layer of indirection, it should in theory be
94
    // trivially devirtualizable by LLVM because the value of `inner` never
95
    // changes and the constant should be readonly within a crate. This mainly
96
    // only runs into problems when TLS statics are exported across crates.
97
    inner: unsafe fn() -> Option<&'static T>,
98
}
99
100
#[stable(feature = "std_debug", since = "1.16.0")]
101
impl<T: 'static> fmt::Debug for LocalKey<T> {
102
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
103
        f.debug_struct("LocalKey").finish_non_exhaustive()
104
    }
105
}
106
107
/// Declare a new thread local storage key of type [`std::thread::LocalKey`].
108
///
109
/// # Syntax
110
///
111
/// The macro wraps any number of static declarations and makes them thread local.
112
/// Publicity and attributes for each static are allowed. Example:
113
///
114
/// ```
115
/// use std::cell::RefCell;
116
/// thread_local! {
117
///     pub static FOO: RefCell<u32> = RefCell::new(1);
118
///
119
///     #[allow(unused)]
120
///     static BAR: RefCell<f32> = RefCell::new(1.0);
121
/// }
122
/// # fn main() {}
123
/// ```
124
///
125
/// See [`LocalKey` documentation][`std::thread::LocalKey`] for more
126
/// information.
127
///
128
/// [`std::thread::LocalKey`]: crate::thread::LocalKey
129
#[macro_export]
130
#[stable(feature = "rust1", since = "1.0.0")]
131
#[allow_internal_unstable(thread_local_internals)]
132
macro_rules! thread_local {
133
    // empty (base case for the recursion)
134
    () => {};
135
136
    ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = const { $init:expr }; $($rest:tt)*) => (
137
        $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, const $init);
138
        $crate::thread_local!($($rest)*);
139
    );
140
141
    ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = const { $init:expr }) => (
142
        $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, const $init);
143
    );
144
145
    // process multiple declarations
146
    ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr; $($rest:tt)*) => (
147
        $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
148
        $crate::thread_local!($($rest)*);
149
    );
150
151
    // handle a single declaration
152
    ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr) => (
153
        $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
154
    );
155
}
156
157
#[doc(hidden)]
158
#[unstable(feature = "thread_local_internals", reason = "should not be necessary", issue = "none")]
159
#[macro_export]
160
#[allow_internal_unstable(thread_local_internals, cfg_target_thread_local, thread_local)]
161
#[allow_internal_unsafe]
162
macro_rules! __thread_local_inner {
163
    // used to generate the `LocalKey` value for const-initialized thread locals
164
    (@key $t:ty, const $init:expr) => {{
165
        #[cfg_attr(not(windows), inline)] // see comments below
166
        unsafe fn __getit() -> $crate::option::Option<&'static $t> {
167
            const _REQUIRE_UNSTABLE: () = $crate::thread::require_unstable_const_init_thread_local();
168
169
            // wasm without atomics maps directly to `static mut`, and dtors
170
            // aren't implemented because thread dtors aren't really a thing
171
            // on wasm right now
172
            //
173
            // FIXME(#84224) this should come after the `target_thread_local`
174
            // block.
175
            #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
176
            {
177
                static mut VAL: $t = $init;
178
                Some(&VAL)
179
            }
180
181
            // If the platform has support for `#[thread_local]`, use it.
182
            #[cfg(all(
183
                target_thread_local,
184
                not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
185
            ))]
186
            {
187
                // If a dtor isn't needed we can do something "very raw" and
188
                // just get going.
189
                if !$crate::mem::needs_drop::<$t>() {
190
                    #[thread_local]
191
                    static mut VAL: $t = $init;
192
                    unsafe {
193
                        return Some(&VAL)
194
                    }
195
                }
196
197
                #[thread_local]
198
                static mut VAL: $t = $init;
199
                // 0 == dtor not registered
200
                // 1 == dtor registered, dtor not run
201
                // 2 == dtor registered and is running or has run
202
                #[thread_local]
203
                static mut STATE: u8 = 0;
204
205
                unsafe extern "C" fn destroy(ptr: *mut u8) {
206
                    let ptr = ptr as *mut $t;
207
208
                    unsafe {
209
                        debug_assert_eq!(STATE, 1);
210
                        STATE = 2;
211
                        $crate::ptr::drop_in_place(ptr);
212
                    }
213
                }
214
215
                unsafe {
216
                    match STATE {
217
                        // 0 == we haven't registered a destructor, so do
218
                        //   so now.
219
                        0 => {
220
                            $crate::thread::__FastLocalKeyInner::<$t>::register_dtor(
221
                                $crate::ptr::addr_of_mut!(VAL) as *mut u8,
222
                                destroy,
223
                            );
224
                            STATE = 1;
225
                            Some(&VAL)
226
                        }
227
                        // 1 == the destructor is registered and the value
228
                        //   is valid, so return the pointer.
229
                        1 => Some(&VAL),
230
                        // otherwise the destructor has already run, so we
231
                        // can't give access.
232
                        _ => None,
233
                    }
234
                }
235
            }
236
237
            // On platforms without `#[thread_local]` we fall back to the
238
            // same implementation as below for os thread locals.
239
            #[cfg(all(
240
                not(target_thread_local),
241
                not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
242
            ))]
243
            {
244
                #[inline]
245
                const fn __init() -> $t { $init }
246
                static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
247
                    $crate::thread::__OsLocalKeyInner::new();
248
                #[allow(unused_unsafe)]
249
                unsafe { __KEY.get(__init) }
250
            }
251
        }
252
253
        unsafe {
254
            $crate::thread::LocalKey::new(__getit)
255
        }
256
    }};
257
258
    // used to generate the `LocalKey` value for `thread_local!`
259
    (@key $t:ty, $init:expr) => {
260
        {
261
            #[inline]
262
2
            fn __init() -> $t { $init }
263
264
            // When reading this function you might ask "why is this inlined
265
            // everywhere other than Windows?", and that's a very reasonable
266
            // question to ask. The short story is that it segfaults rustc if
267
            // this function is inlined. The longer story is that Windows looks
268
            // to not support `extern` references to thread locals across DLL
269
            // boundaries. This appears to at least not be supported in the ABI
270
            // that LLVM implements.
271
            //
272
            // Because of this we never inline on Windows, but we do inline on
273
            // other platforms (where external references to thread locals
274
            // across DLLs are supported). A better fix for this would be to
275
            // inline this function on Windows, but only for "statically linked"
276
            // components. For example if two separately compiled rlibs end up
277
            // getting linked into a DLL then it's fine to inline this function
278
            // across that boundary. It's only not fine to inline this function
279
            // across a DLL boundary. Unfortunately rustc doesn't currently
280
            // have this sort of logic available in an attribute, and it's not
281
            // clear that rustc is even equipped to answer this (it's more of a
282
            // Cargo question kinda). This means that, unfortunately, Windows
283
            // gets the pessimistic path for now where it's never inlined.
284
            //
285
            // The issue of "should enable on Windows sometimes" is #84933
286
            #[cfg_attr(not(windows), inline)]
287
63.0k
            unsafe fn __getit() -> $crate::option::Option<&'static $t> {
288
63.0k
                #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
289
63.0k
                static __KEY: $crate::thread::__StaticLocalKeyInner<$t> =
290
63.0k
                    $crate::thread::__StaticLocalKeyInner::new();
291
63.0k
292
63.0k
                #[thread_local]
293
63.0k
                #[cfg(all(
294
63.0k
                    target_thread_local,
295
63.0k
                    not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
296
63.0k
                ))]
297
63.0k
                static __KEY: $crate::thread::__FastLocalKeyInner<$t> =
298
63.0k
                    $crate::thread::__FastLocalKeyInner::new();
299
63.0k
300
63.0k
                #[cfg(all(
301
63.0k
                    not(target_thread_local),
302
63.0k
                    not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
303
63.0k
                ))]
304
63.0k
                static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
305
63.0k
                    $crate::thread::__OsLocalKeyInner::new();
306
63.0k
307
63.0k
                // FIXME: remove the #[allow(...)] marker when macros don't
308
63.0k
                // raise warning for missing/extraneous unsafe blocks anymore.
309
63.0k
                // See https://github.com/rust-lang/rust/issues/74838.
310
63.0k
                #[allow(unused_unsafe)]
311
63.0k
                unsafe { __KEY.get(__init) }
312
63.0k
            }
313
314
            unsafe {
315
                $crate::thread::LocalKey::new(__getit)
316
            }
317
        }
318
    };
319
    ($(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $($init:tt)*) => {
320
        $(#[$attr])* $vis const $name: $crate::thread::LocalKey<$t> =
321
            $crate::__thread_local_inner!(@key $t, $($init)*);
322
    }
323
}
324
325
/// An error returned by [`LocalKey::try_with`](struct.LocalKey.html#method.try_with).
326
#[stable(feature = "thread_local_try_with", since = "1.26.0")]
327
#[non_exhaustive]
328
#[derive(Clone, Copy, Eq, PartialEq)]
329
pub struct AccessError;
330
331
#[stable(feature = "thread_local_try_with", since = "1.26.0")]
332
impl fmt::Debug for AccessError {
333
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
334
        f.debug_struct("AccessError").finish()
335
    }
336
}
337
338
#[stable(feature = "thread_local_try_with", since = "1.26.0")]
339
impl fmt::Display for AccessError {
340
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
341
        fmt::Display::fmt("already destroyed", f)
342
    }
343
}
344
345
#[stable(feature = "thread_local_try_with", since = "1.26.0")]
346
impl Error for AccessError {}
347
348
impl<T: 'static> LocalKey<T> {
349
    #[doc(hidden)]
350
    #[unstable(
351
        feature = "thread_local_internals",
352
        reason = "recently added to create a key",
353
        issue = "none"
354
    )]
355
    #[rustc_const_unstable(feature = "thread_local_internals", issue = "none")]
356
    pub const unsafe fn new(inner: unsafe fn() -> Option<&'static T>) -> LocalKey<T> {
357
        LocalKey { inner }
358
    }
359
360
    /// Acquires a reference to the value in this TLS key.
361
    ///
362
    /// This will lazily initialize the value if this thread has not referenced
363
    /// this key yet.
364
    ///
365
    /// # Panics
366
    ///
367
    /// This function will `panic!()` if the key currently has its
368
    /// destructor running, and it **may** panic if the destructor has
369
    /// previously been run for this thread.
370
    #[stable(feature = "rust1", since = "1.0.0")]
371
    pub fn with<F, R>(&'static self, f: F) -> R
372
    where
373
        F: FnOnce(&T) -> R,
374
    {
375
        self.try_with(f).expect(
376
            "cannot access a Thread Local Storage value \
377
             during or after destruction",
378
        )
379
    }
380
381
    /// Acquires a reference to the value in this TLS key.
382
    ///
383
    /// This will lazily initialize the value if this thread has not referenced
384
    /// this key yet. If the key has been destroyed (which may happen if this is called
385
    /// in a destructor), this function will return an [`AccessError`].
386
    ///
387
    /// # Panics
388
    ///
389
    /// This function will still `panic!()` if the key is uninitialized and the
390
    /// key's initializer panics.
391
    #[stable(feature = "thread_local_try_with", since = "1.26.0")]
392
    #[inline]
393
    pub fn try_with<F, R>(&'static self, f: F) -> Result<R, AccessError>
394
    where
395
        F: FnOnce(&T) -> R,
396
    {
397
        unsafe {
398
            let thread_local = (self.inner)().ok_or(AccessError)?;
399
            Ok(f(thread_local))
400
        }
401
    }
402
}
403
404
mod lazy {
405
    use crate::cell::UnsafeCell;
406
    use crate::hint;
407
    use crate::mem;
408
409
    pub struct LazyKeyInner<T> {
410
        inner: UnsafeCell<Option<T>>,
411
    }
412
413
    impl<T> LazyKeyInner<T> {
414
        pub const fn new() -> LazyKeyInner<T> {
415
            LazyKeyInner { inner: UnsafeCell::new(None) }
416
        }
417
418
        pub unsafe fn get(&self) -> Option<&'static T> {
419
            // SAFETY: The caller must ensure no reference is ever handed out to
420
            // the inner cell nor mutable reference to the Option<T> inside said
421
            // cell. This make it safe to hand a reference, though the lifetime
422
            // of 'static is itself unsafe, making the get method unsafe.
423
            unsafe { (*self.inner.get()).as_ref() }
424
        }
425
426
        /// The caller must ensure that no reference is active: this method
427
        /// needs unique access.
428
        pub unsafe fn initialize<F: FnOnce() -> T>(&self, init: F) -> &'static T {
429
            // Execute the initialization up front, *then* move it into our slot,
430
            // just in case initialization fails.
431
            let value = init();
432
            let ptr = self.inner.get();
433
434
            // SAFETY:
435
            //
436
            // note that this can in theory just be `*ptr = Some(value)`, but due to
437
            // the compiler will currently codegen that pattern with something like:
438
            //
439
            //      ptr::drop_in_place(ptr)
440
            //      ptr::write(ptr, Some(value))
441
            //
442
            // Due to this pattern it's possible for the destructor of the value in
443
            // `ptr` (e.g., if this is being recursively initialized) to re-access
444
            // TLS, in which case there will be a `&` and `&mut` pointer to the same
445
            // value (an aliasing violation). To avoid setting the "I'm running a
446
            // destructor" flag we just use `mem::replace` which should sequence the
447
            // operations a little differently and make this safe to call.
448
            //
449
            // The precondition also ensures that we are the only one accessing
450
            // `self` at the moment so replacing is fine.
451
            unsafe {
452
                let _ = mem::replace(&mut *ptr, Some(value));
453
            }
454
455
            // SAFETY: With the call to `mem::replace` it is guaranteed there is
456
            // a `Some` behind `ptr`, not a `None` so `unreachable_unchecked`
457
            // will never be reached.
458
            unsafe {
459
                // After storing `Some` we want to get a reference to the contents of
460
                // what we just stored. While we could use `unwrap` here and it should
461
                // always work it empirically doesn't seem to always get optimized away,
462
                // which means that using something like `try_with` can pull in
463
                // panicking code and cause a large size bloat.
464
                match *ptr {
465
                    Some(ref x) => x,
466
                    None => hint::unreachable_unchecked(),
467
                }
468
            }
469
        }
470
471
        /// The other methods hand out references while taking &self.
472
        /// As such, callers of this method must ensure no `&` and `&mut` are
473
        /// available and used at the same time.
474
        #[allow(unused)]
475
        pub unsafe fn take(&mut self) -> Option<T> {
476
            // SAFETY: See doc comment for this method.
477
            unsafe { (*self.inner.get()).take() }
478
        }
479
    }
480
}
481
482
/// On some platforms like wasm32 there's no threads, so no need to generate
483
/// thread locals and we can instead just use plain statics!
484
#[doc(hidden)]
485
#[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
486
pub mod statik {
487
    use super::lazy::LazyKeyInner;
488
    use crate::fmt;
489
490
    pub struct Key<T> {
491
        inner: LazyKeyInner<T>,
492
    }
493
494
    unsafe impl<T> Sync for Key<T> {}
495
496
    impl<T> fmt::Debug for Key<T> {
497
        fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
498
            f.debug_struct("Key").finish_non_exhaustive()
499
        }
500
    }
501
502
    impl<T> Key<T> {
503
        pub const fn new() -> Key<T> {
504
            Key { inner: LazyKeyInner::new() }
505
        }
506
507
        pub unsafe fn get(&self, init: fn() -> T) -> Option<&'static T> {
508
            // SAFETY: The caller must ensure no reference is ever handed out to
509
            // the inner cell nor mutable reference to the Option<T> inside said
510
            // cell. This make it safe to hand a reference, though the lifetime
511
            // of 'static is itself unsafe, making the get method unsafe.
512
            let value = unsafe {
513
                match self.inner.get() {
514
                    Some(ref value) => value,
515
                    None => self.inner.initialize(init),
516
                }
517
            };
518
519
            Some(value)
520
        }
521
    }
522
}
523
524
#[doc(hidden)]
525
#[cfg(target_thread_local)]
526
pub mod fast {
527
    use super::lazy::LazyKeyInner;
528
    use crate::cell::Cell;
529
    use crate::fmt;
530
    use crate::mem;
531
    use crate::sys::thread_local_dtor::register_dtor;
532
533
    #[derive(Copy, Clone)]
534
    enum DtorState {
535
        Unregistered,
536
        Registered,
537
        RunningOrHasRun,
538
    }
539
540
    // This data structure has been carefully constructed so that the fast path
541
    // only contains one branch on x86. That optimization is necessary to avoid
542
    // duplicated tls lookups on OSX.
543
    //
544
    // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
545
    pub struct Key<T> {
546
        // If `LazyKeyInner::get` returns `None`, that indicates either:
547
        //   * The value has never been initialized
548
        //   * The value is being recursively initialized
549
        //   * The value has already been destroyed or is being destroyed
550
        // To determine which kind of `None`, check `dtor_state`.
551
        //
552
        // This is very optimizer friendly for the fast path - initialized but
553
        // not yet dropped.
554
        inner: LazyKeyInner<T>,
555
556
        // Metadata to keep track of the state of the destructor. Remember that
557
        // this variable is thread-local, not global.
558
        dtor_state: Cell<DtorState>,
559
    }
560
561
    impl<T> fmt::Debug for Key<T> {
562
        fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
563
            f.debug_struct("Key").finish_non_exhaustive()
564
        }
565
    }
566
567
    impl<T> Key<T> {
568
        pub const fn new() -> Key<T> {
569
            Key { inner: LazyKeyInner::new(), dtor_state: Cell::new(DtorState::Unregistered) }
570
        }
571
572
        // note that this is just a publically-callable function only for the
573
        // const-initialized form of thread locals, basically a way to call the
574
        // free `register_dtor` function defined elsewhere in libstd.
575
        pub unsafe fn register_dtor(a: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) {
576
            unsafe {
577
                register_dtor(a, dtor);
578
            }
579
        }
580
581
        pub unsafe fn get<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
582
            // SAFETY: See the definitions of `LazyKeyInner::get` and
583
            // `try_initialize` for more informations.
584
            //
585
            // The caller must ensure no mutable references are ever active to
586
            // the inner cell or the inner T when this is called.
587
            // The `try_initialize` is dependant on the passed `init` function
588
            // for this.
589
            unsafe {
590
                match self.inner.get() {
591
                    Some(val) => Some(val),
592
                    None => self.try_initialize(init),
593
                }
594
            }
595
        }
596
597
        // `try_initialize` is only called once per fast thread local variable,
598
        // except in corner cases where thread_local dtors reference other
599
        // thread_local's, or it is being recursively initialized.
600
        //
601
        // Macos: Inlining this function can cause two `tlv_get_addr` calls to
602
        // be performed for every call to `Key::get`.
603
        // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
604
        #[inline(never)]
605
        unsafe fn try_initialize<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
606
            // SAFETY: See comment above (this function doc).
607
            if !mem::needs_drop::<T>() || unsafe { self.try_register_dtor() } {
608
                // SAFETY: See comment above (his function doc).
609
                Some(unsafe { self.inner.initialize(init) })
610
            } else {
611
                None
612
            }
613
        }
614
615
        // `try_register_dtor` is only called once per fast thread local
616
        // variable, except in corner cases where thread_local dtors reference
617
        // other thread_local's, or it is being recursively initialized.
618
        unsafe fn try_register_dtor(&self) -> bool {
619
            match self.dtor_state.get() {
620
                DtorState::Unregistered => {
621
                    // SAFETY: dtor registration happens before initialization.
622
                    // Passing `self` as a pointer while using `destroy_value<T>`
623
                    // is safe because the function will build a pointer to a
624
                    // Key<T>, which is the type of self and so find the correct
625
                    // size.
626
                    unsafe { register_dtor(self as *const _ as *mut u8, destroy_value::<T>) };
627
                    self.dtor_state.set(DtorState::Registered);
628
                    true
629
                }
630
                DtorState::Registered => {
631
                    // recursively initialized
632
                    true
633
                }
634
                DtorState::RunningOrHasRun => false,
635
            }
636
        }
637
    }
638
639
    unsafe extern "C" fn destroy_value<T>(ptr: *mut u8) {
640
        let ptr = ptr as *mut Key<T>;
641
642
        // SAFETY:
643
        //
644
        // The pointer `ptr` has been built just above and comes from
645
        // `try_register_dtor` where it is originally a Key<T> coming from `self`,
646
        // making it non-NUL and of the correct type.
647
        //
648
        // Right before we run the user destructor be sure to set the
649
        // `Option<T>` to `None`, and `dtor_state` to `RunningOrHasRun`. This
650
        // causes future calls to `get` to run `try_initialize_drop` again,
651
        // which will now fail, and return `None`.
652
        unsafe {
653
            let value = (*ptr).inner.take();
654
            (*ptr).dtor_state.set(DtorState::RunningOrHasRun);
655
            drop(value);
656
        }
657
    }
658
}
659
660
#[doc(hidden)]
661
pub mod os {
662
    use super::lazy::LazyKeyInner;
663
    use crate::cell::Cell;
664
    use crate::fmt;
665
    use crate::marker;
666
    use crate::ptr;
667
    use crate::sys_common::thread_local_key::StaticKey as OsStaticKey;
668
669
    pub struct Key<T> {
670
        // OS-TLS key that we'll use to key off.
671
        os: OsStaticKey,
672
        marker: marker::PhantomData<Cell<T>>,
673
    }
674
675
    impl<T> fmt::Debug for Key<T> {
676
        fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
677
            f.debug_struct("Key").finish_non_exhaustive()
678
        }
679
    }
680
681
    unsafe impl<T> Sync for Key<T> {}
682
683
    struct Value<T: 'static> {
684
        inner: LazyKeyInner<T>,
685
        key: &'static Key<T>,
686
    }
687
688
    impl<T: 'static> Key<T> {
689
        #[rustc_const_unstable(feature = "thread_local_internals", issue = "none")]
690
        pub const fn new() -> Key<T> {
691
            Key { os: OsStaticKey::new(Some(destroy_value::<T>)), marker: marker::PhantomData }
692
        }
693
694
        /// It is a requirement for the caller to ensure that no mutable
695
        /// reference is active when this method is called.
696
        pub unsafe fn get(&'static self, init: fn() -> T) -> Option<&'static T> {
697
            // SAFETY: See the documentation for this method.
698
            let ptr = unsafe { self.os.get() as *mut Value<T> };
699
            if ptr as usize > 1 {
700
                // SAFETY: the check ensured the pointer is safe (its destructor
701
                // is not running) + it is coming from a trusted source (self).
702
                if let Some(ref value) = unsafe { (*ptr).inner.get() } {
703
                    return Some(value);
704
                }
705
            }
706
            // SAFETY: At this point we are sure we have no value and so
707
            // initializing (or trying to) is safe.
708
            unsafe { self.try_initialize(init) }
709
        }
710
711
        // `try_initialize` is only called once per os thread local variable,
712
        // except in corner cases where thread_local dtors reference other
713
        // thread_local's, or it is being recursively initialized.
714
        unsafe fn try_initialize(&'static self, init: fn() -> T) -> Option<&'static T> {
715
            // SAFETY: No mutable references are ever handed out meaning getting
716
            // the value is ok.
717
            let ptr = unsafe { self.os.get() as *mut Value<T> };
718
            if ptr as usize == 1 {
719
                // destructor is running
720
                return None;
721
            }
722
723
            let ptr = if ptr.is_null() {
724
                // If the lookup returned null, we haven't initialized our own
725
                // local copy, so do that now.
726
                let ptr: Box<Value<T>> = box Value { inner: LazyKeyInner::new(), key: self };
727
                let ptr = Box::into_raw(ptr);
728
                // SAFETY: At this point we are sure there is no value inside
729
                // ptr so setting it will not affect anyone else.
730
                unsafe {
731
                    self.os.set(ptr as *mut u8);
732
                }
733
                ptr
734
            } else {
735
                // recursive initialization
736
                ptr
737
            };
738
739
            // SAFETY: ptr has been ensured as non-NUL just above an so can be
740
            // dereferenced safely.
741
            unsafe { Some((*ptr).inner.initialize(init)) }
742
        }
743
    }
744
745
    unsafe extern "C" fn destroy_value<T: 'static>(ptr: *mut u8) {
746
        // SAFETY:
747
        //
748
        // The OS TLS ensures that this key contains a null value when this
749
        // destructor starts to run. We set it back to a sentinel value of 1 to
750
        // ensure that any future calls to `get` for this thread will return
751
        // `None`.
752
        //
753
        // Note that to prevent an infinite loop we reset it back to null right
754
        // before we return from the destructor ourselves.
755
        unsafe {
756
            let ptr = Box::from_raw(ptr as *mut Value<T>);
757
            let key = ptr.key;
758
            key.os.set(1 as *mut u8);
759
            drop(ptr);
760
            key.os.set(ptr::null_mut());
761
        }
762
    }
763
}