Coverage Report

Created: 2025-11-16 07:04

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/rust/registry/src/index.crates.io-1949cf8c6b5b557f/zerocopy-0.8.27/src/pointer/inner.rs
Line
Count
Source
1
// Copyright 2024 The Fuchsia Authors
2
//
3
// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0
4
// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT
5
// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
6
// This file may not be copied, modified, or distributed except according to
7
// those terms.
8
9
use core::{marker::PhantomData, mem, ops::Range, ptr::NonNull};
10
11
pub use _def::PtrInner;
12
13
#[allow(unused_imports)]
14
use crate::util::polyfills::NumExt as _;
15
use crate::{
16
    layout::{CastType, MetadataCastError},
17
    util::AsAddress,
18
    AlignmentError, CastError, KnownLayout, MetadataOf, SizeError, SplitAt,
19
};
20
21
mod _def {
22
    use super::*;
23
    /// The inner pointer stored inside a [`Ptr`][crate::Ptr].
24
    ///
25
    /// `PtrInner<'a, T>` is [covariant] in `'a` and invariant in `T`.
26
    ///
27
    /// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
28
    #[allow(missing_debug_implementations)]
29
    pub struct PtrInner<'a, T>
30
    where
31
        T: ?Sized,
32
    {
33
        /// # Invariants
34
        ///
35
        /// 0. If `ptr`'s referent is not zero sized, then `ptr` has valid
36
        ///    provenance for its referent, which is entirely contained in some
37
        ///    Rust allocation, `A`.
38
        /// 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live
39
        ///    for at least `'a`.
40
        ///
41
        /// # Postconditions
42
        ///
43
        /// By virtue of these invariants, code may assume the following, which
44
        /// are logical implications of the invariants:
45
        /// - `ptr`'s referent is not larger than `isize::MAX` bytes \[1\]
46
        /// - `ptr`'s referent does not wrap around the address space \[1\]
47
        ///
48
        /// \[1\] Per <https://doc.rust-lang.org/1.85.0/std/ptr/index.html#allocated-object>:
49
        ///
50
        ///   For any allocated object with `base` address, `size`, and a set of
51
        ///   `addresses`, the following are guaranteed:
52
        ///   ...
53
        ///   - `size <= isize::MAX`
54
        ///
55
        ///   As a consequence of these guarantees, given any address `a` within
56
        ///   the set of addresses of an allocated object:
57
        ///   ...
58
        ///   - It is guaranteed that, given `o = a - base` (i.e., the offset of
59
        ///     `a` within the allocated object), `base + o` will not wrap around
60
        ///     the address space (in other words, will not overflow `usize`)
61
        ptr: NonNull<T>,
62
        // SAFETY: `&'a UnsafeCell<T>` is covariant in `'a` and invariant in `T`
63
        // [1]. We use this construction rather than the equivalent `&mut T`,
64
        // because our MSRV of 1.65 prohibits `&mut` types in const contexts.
65
        //
66
        // [1] https://doc.rust-lang.org/1.81.0/reference/subtyping.html#variance
67
        _marker: PhantomData<&'a core::cell::UnsafeCell<T>>,
68
    }
69
70
    impl<'a, T: 'a + ?Sized> Copy for PtrInner<'a, T> {}
71
    impl<'a, T: 'a + ?Sized> Clone for PtrInner<'a, T> {
72
        #[inline(always)]
73
0
        fn clone(&self) -> PtrInner<'a, T> {
74
            // SAFETY: None of the invariants on `ptr` are affected by having
75
            // multiple copies of a `PtrInner`.
76
0
            *self
77
0
        }
78
    }
79
80
    impl<'a, T: 'a + ?Sized> PtrInner<'a, T> {
81
        /// Constructs a `Ptr` from a [`NonNull`].
82
        ///
83
        /// # Safety
84
        ///
85
        /// The caller promises that:
86
        ///
87
        /// 0. If `ptr`'s referent is not zero sized, then `ptr` has valid
88
        ///    provenance for its referent, which is entirely contained in some
89
        ///    Rust allocation, `A`.
90
        /// 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live
91
        ///    for at least `'a`.
92
        #[inline(always)]
93
        #[must_use]
94
0
        pub const unsafe fn new(ptr: NonNull<T>) -> PtrInner<'a, T> {
95
            // SAFETY: The caller has promised to satisfy all safety invariants
96
            // of `PtrInner`.
97
0
            Self { ptr, _marker: PhantomData }
98
0
        }
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<<zerocopy::util::macro_util::Wrap<&mut _, &mut _> as zerocopy::util::macro_util::TransmuteMutDst>::transmute_mut::D<[u16]>>>::new
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<<zerocopy::util::macro_util::Wrap<&mut _, &mut _> as zerocopy::util::macro_util::TransmuteMutDst>::transmute_mut::S<[half::binary16::f16]>>>::new
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<[half::binary16::f16]>>::new
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<[u16]>>::new
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<_>>::new
99
100
        /// Converts this `PtrInner<T>` to a [`NonNull<T>`].
101
        ///
102
        /// Note that this method does not consume `self`. The caller should
103
        /// watch out for `unsafe` code which uses the returned `NonNull` in a
104
        /// way that violates the safety invariants of `self`.
105
        #[inline(always)]
106
        #[must_use]
107
0
        pub const fn as_non_null(&self) -> NonNull<T> {
108
0
            self.ptr
109
0
        }
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<<zerocopy::util::macro_util::Wrap<&mut _, &mut _> as zerocopy::util::macro_util::TransmuteMutDst>::transmute_mut::D<[u16]>>>::as_non_null
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<<zerocopy::util::macro_util::Wrap<&mut _, &mut _> as zerocopy::util::macro_util::TransmuteMutDst>::transmute_mut::S<[half::binary16::f16]>>>::as_non_null
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<[half::binary16::f16]>>::as_non_null
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<[u16]>>::as_non_null
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<_>>::as_non_null
110
    }
111
}
112
113
impl<'a, T: ?Sized> PtrInner<'a, T> {
114
    /// Constructs a `PtrInner` from a reference.
115
    #[inline]
116
0
    pub(crate) fn from_ref(ptr: &'a T) -> Self {
117
0
        let ptr = NonNull::from(ptr);
118
        // SAFETY:
119
        // 0. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
120
        //    `&'a T` [1], has valid provenance for its referent, which is
121
        //    entirely contained in some Rust allocation, `A`.
122
        // 1. If `ptr`'s referent is not zero sized, then `A`, by invariant on
123
        //    `&'a T`, is guaranteed to live for at least `'a`.
124
        //
125
        // [1] Per https://doc.rust-lang.org/1.85.0/std/primitive.reference.html#safety:
126
        //
127
        //   For all types, `T: ?Sized`, and for all `t: &T` or `t: &mut T`,
128
        //   when such values cross an API boundary, the following invariants
129
        //   must generally be upheld:
130
        //   ...
131
        //   - if `size_of_val(t) > 0`, then `t` is dereferenceable for
132
        //     `size_of_val(t)` many bytes
133
        //
134
        //   If `t` points at address `a`, being “dereferenceable” for N bytes
135
        //   means that the memory range `[a, a + N)` is all contained within a
136
        //   single allocated object.
137
0
        unsafe { Self::new(ptr) }
138
0
    }
139
140
    /// Constructs a `PtrInner` from a mutable reference.
141
    #[inline]
142
0
    pub(crate) fn from_mut(ptr: &'a mut T) -> Self {
143
0
        let ptr = NonNull::from(ptr);
144
        // SAFETY:
145
        // 0. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
146
        //    `&'a mut T` [1], has valid provenance for its referent, which is
147
        //    entirely contained in some Rust allocation, `A`.
148
        // 1. If `ptr`'s referent is not zero sized, then `A`, by invariant on
149
        //    `&'a mut T`, is guaranteed to live for at least `'a`.
150
        //
151
        // [1] Per https://doc.rust-lang.org/1.85.0/std/primitive.reference.html#safety:
152
        //
153
        //   For all types, `T: ?Sized`, and for all `t: &T` or `t: &mut T`,
154
        //   when such values cross an API boundary, the following invariants
155
        //   must generally be upheld:
156
        //   ...
157
        //   - if `size_of_val(t) > 0`, then `t` is dereferenceable for
158
        //     `size_of_val(t)` many bytes
159
        //
160
        //   If `t` points at address `a`, being “dereferenceable” for N bytes
161
        //   means that the memory range `[a, a + N)` is all contained within a
162
        //   single allocated object.
163
0
        unsafe { Self::new(ptr) }
164
0
    }
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<[half::binary16::f16]>>::from_mut
Unexecuted instantiation: <zerocopy::pointer::inner::_def::PtrInner<_>>::from_mut
165
166
    #[must_use]
167
    #[inline(always)]
168
0
    pub fn cast_sized<U>(self) -> PtrInner<'a, U>
169
0
    where
170
0
        T: Sized,
171
    {
172
0
        static_assert!(T, U => mem::size_of::<T>() >= mem::size_of::<U>());
173
        // SAFETY: By the preceding assert, `U` is no larger than `T`, which is
174
        // the size of `self`'s referent.
175
0
        unsafe { self.cast() }
176
0
    }
177
178
    /// # Safety
179
    ///
180
    /// `U` must not be larger than the size of `self`'s referent.
181
    #[must_use]
182
    #[inline(always)]
183
0
    pub unsafe fn cast<U>(self) -> PtrInner<'a, U> {
184
0
        let ptr = self.as_non_null().cast::<U>();
185
186
        // SAFETY: The caller promises that `U` is no larger than `self`'s
187
        // referent. Thus, `ptr` addresses a subset of the bytes addressed by
188
        // `self`.
189
        //
190
        // 0. By invariant on `self`, if `self`'s referent is not zero sized,
191
        //    then `self` has valid provenance for its referent, which is
192
        //    entirely contained in some Rust allocation, `A`. Thus, the same
193
        //    holds of `ptr`.
194
        // 1. By invariant on `self`, if `self`'s referent is not zero sized,
195
        //    then `A` is guaranteed to live for at least `'a`.
196
0
        unsafe { PtrInner::new(ptr) }
197
0
    }
198
}
199
200
#[allow(clippy::needless_lifetimes)]
201
impl<'a, T> PtrInner<'a, T>
202
where
203
    T: ?Sized + KnownLayout,
204
{
205
    /// Extracts the metadata of this `ptr`.
206
0
    pub(crate) fn meta(self) -> MetadataOf<T> {
207
0
        let meta = T::pointer_to_metadata(self.as_non_null().as_ptr());
208
        // SAFETY: By invariant on `PtrInner`, `self.as_non_null()` addresses no
209
        // more than `isize::MAX` bytes.
210
0
        unsafe { MetadataOf::new_unchecked(meta) }
211
0
    }
212
213
    /// Produces a `PtrInner` with the same address and provenance as `self` but
214
    /// the given `meta`.
215
    ///
216
    /// # Safety
217
    ///
218
    /// The caller promises that if `self`'s referent is not zero sized, then
219
    /// a pointer constructed from its address with the given `meta` metadata
220
    /// will address a subset of the allocation pointed to by `self`.
221
    #[inline]
222
0
    pub(crate) unsafe fn with_meta(self, meta: T::PointerMetadata) -> Self
223
0
    where
224
0
        T: KnownLayout,
225
    {
226
0
        let raw = T::raw_from_ptr_len(self.as_non_null().cast(), meta);
227
228
        // SAFETY:
229
        //
230
        // Lemma 0: `raw` either addresses zero bytes, or addresses a subset of
231
        //          the allocation pointed to by `self` and has the same
232
        //          provenance as `self`. Proof: `raw` is constructed using
233
        //          provenance-preserving operations, and the caller has
234
        //          promised that, if `self`'s referent is not zero-sized, the
235
        //          resulting pointer addresses a subset of the allocation
236
        //          pointed to by `self`.
237
        //
238
        // 0. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
239
        //    zero sized, then `ptr` is derived from some valid Rust allocation,
240
        //    `A`.
241
        // 1. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
242
        //    zero sized, then `ptr` has valid provenance for `A`.
243
        // 2. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
244
        //    zero sized, then `ptr` addresses a byte range which is entirely
245
        //    contained in `A`.
246
        // 3. Per Lemma 0 and by invariant on `self`, `ptr` addresses a byte
247
        //    range whose length fits in an `isize`.
248
        // 4. Per Lemma 0 and by invariant on `self`, `ptr` addresses a byte
249
        //    range which does not wrap around the address space.
250
        // 5. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
251
        //    zero sized, then `A` is guaranteed to live for at least `'a`.
252
0
        unsafe { PtrInner::new(raw) }
253
0
    }
254
255
0
    pub(crate) fn as_bytes(self) -> PtrInner<'a, [u8]> {
256
0
        let ptr = self.as_non_null();
257
0
        let bytes = match T::size_of_val_raw(ptr) {
258
0
            Some(bytes) => bytes,
259
            // SAFETY: `KnownLayout::size_of_val_raw` promises to always
260
            // return `Some` so long as the resulting size fits in a
261
            // `usize`. By invariant on `PtrInner`, `self` refers to a range
262
            // of bytes whose size fits in an `isize`, which implies that it
263
            // also fits in a `usize`.
264
0
            None => unsafe { core::hint::unreachable_unchecked() },
265
        };
266
267
0
        let ptr = core::ptr::slice_from_raw_parts_mut(ptr.cast::<u8>().as_ptr(), bytes);
268
269
        // SAFETY: `ptr` has the same address as `ptr = self.as_non_null()`,
270
        // which is non-null by construction.
271
0
        let ptr = unsafe { NonNull::new_unchecked(ptr) };
272
273
        // SAFETY: `ptr` points to `bytes` `u8`s starting at the same address as
274
        // `self`'s referent. Since `bytes` is the length of `self`'s referent,
275
        // `ptr` addresses the same byte range as `self`. Thus, by invariant on
276
        // `self` (as a `PtrInner`):
277
        //
278
        // 0. If `ptr`'s referent is not zero sized, then `ptr` has valid
279
        //    provenance for its referent, which is entirely contained in some
280
        //    Rust allocation, `A`.
281
        // 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live
282
        //    for at least `'a`.
283
0
        unsafe { PtrInner::new(ptr) }
284
0
    }
285
}
286
287
#[allow(clippy::needless_lifetimes)]
288
impl<'a, T> PtrInner<'a, T>
289
where
290
    T: ?Sized + KnownLayout<PointerMetadata = usize>,
291
{
292
    /// Splits `T` in two.
293
    ///
294
    /// # Safety
295
    ///
296
    /// The caller promises that:
297
    ///  - `l_len.get() <= self.meta()`.
298
    ///
299
    /// ## (Non-)Overlap
300
    ///
301
    /// Given `let (left, right) = ptr.split_at(l_len)`, it is guaranteed that
302
    /// `left` and `right` are contiguous and non-overlapping if
303
    /// `l_len.padding_needed_for() == 0`. This is true for all `[T]`.
304
    ///
305
    /// If `l_len.padding_needed_for() != 0`, then the left pointer will overlap
306
    /// the right pointer to satisfy `T`'s padding requirements.
307
0
    pub(crate) unsafe fn split_at_unchecked(
308
0
        self,
309
0
        l_len: crate::util::MetadataOf<T>,
310
0
    ) -> (Self, PtrInner<'a, [T::Elem]>)
311
0
    where
312
0
        T: SplitAt,
313
    {
314
0
        let l_len = l_len.get();
315
316
        // SAFETY: The caller promises that `l_len.get() <= self.meta()`.
317
        // Trivially, `0 <= l_len`.
318
0
        let left = unsafe { self.with_meta(l_len) };
319
320
0
        let right = self.trailing_slice();
321
        // SAFETY: The caller promises that `l_len <= self.meta() = slf.meta()`.
322
        // Trivially, `slf.meta() <= slf.meta()`.
323
0
        let right = unsafe { right.slice_unchecked(l_len..self.meta().get()) };
324
325
        // SAFETY: If `l_len.padding_needed_for() == 0`, then `left` and `right`
326
        // are non-overlapping. Proof: `left` is constructed `slf` with `l_len`
327
        // as its (exclusive) upper bound. If `l_len.padding_needed_for() == 0`,
328
        // then `left` requires no trailing padding following its final element.
329
        // Since `right` is constructed from `slf`'s trailing slice with `l_len`
330
        // as its (inclusive) lower bound, no byte is referred to by both
331
        // pointers.
332
        //
333
        // Conversely, `l_len.padding_needed_for() == N`, where `N
334
        // > 0`, `left` requires `N` bytes of trailing padding following its
335
        // final element. Since `right` is constructed from the trailing slice
336
        // of `slf` with `l_len` as its (inclusive) lower bound, the first `N`
337
        // bytes of `right` are aliased by `left`.
338
0
        (left, right)
339
0
    }
340
341
    /// Produces the trailing slice of `self`.
342
0
    pub(crate) fn trailing_slice(self) -> PtrInner<'a, [T::Elem]>
343
0
    where
344
0
        T: SplitAt,
345
    {
346
0
        let offset = crate::trailing_slice_layout::<T>().offset;
347
348
0
        let bytes = self.as_non_null().cast::<u8>().as_ptr();
349
350
        // SAFETY:
351
        // - By invariant on `T: KnownLayout`, `T::LAYOUT` describes `T`'s
352
        //   layout. `offset` is the offset of the trailing slice within `T`,
353
        //   which is by definition in-bounds or one byte past the end of any
354
        //   `T`, regardless of metadata. By invariant on `PtrInner`, `self`
355
        //   (and thus `bytes`) points to a byte range of size `<= isize::MAX`,
356
        //   and so `offset <= isize::MAX`. Since `size_of::<u8>() == 1`,
357
        //   `offset * size_of::<u8>() <= isize::MAX`.
358
        // - If `offset > 0`, then by invariant on `PtrInner`, `self` (and thus
359
        //   `bytes`) points to a byte range entirely contained within the same
360
        //   allocated object as `self`. As explained above, this offset results
361
        //   in a pointer to or one byte past the end of this allocated object.
362
0
        let bytes = unsafe { bytes.add(offset) };
363
364
        // SAFETY: By the preceding safety argument, `bytes` is within or one
365
        // byte past the end of the same allocated object as `self`, which
366
        // ensures that it is non-null.
367
0
        let bytes = unsafe { NonNull::new_unchecked(bytes) };
368
369
0
        let ptr = KnownLayout::raw_from_ptr_len(bytes, self.meta().get());
370
371
        // SAFETY:
372
        // 0. If `ptr`'s referent is not zero sized, then `ptr` is derived from
373
        //    some valid Rust allocation, `A`, because `ptr` is derived from
374
        //    the same allocated object as `self`.
375
        // 1. If `ptr`'s referent is not zero sized, then `ptr` has valid
376
        //    provenance for `A` because `raw` is derived from the same
377
        //    allocated object as `self` via provenance-preserving operations.
378
        // 2. If `ptr`'s referent is not zero sized, then `ptr` addresses a byte
379
        //    range which is entirely contained in `A`, by previous safety proof
380
        //    on `bytes`.
381
        // 3. `ptr` addresses a byte range whose length fits in an `isize`, by
382
        //    consequence of #2.
383
        // 4. `ptr` addresses a byte range which does not wrap around the
384
        //    address space, by consequence of #2.
385
        // 5. If `ptr`'s referent is not zero sized, then `A` is guaranteed to
386
        //    live for at least `'a`, because `ptr` is derived from `self`.
387
0
        unsafe { PtrInner::new(ptr) }
388
0
    }
389
}
390
391
#[allow(clippy::needless_lifetimes)]
392
impl<'a, T> PtrInner<'a, [T]> {
393
    /// Creates a pointer which addresses the given `range` of self.
394
    ///
395
    /// # Safety
396
    ///
397
    /// `range` is a valid range (`start <= end`) and `end <= self.meta()`.
398
0
    pub(crate) unsafe fn slice_unchecked(self, range: Range<usize>) -> Self {
399
0
        let base = self.as_non_null().cast::<T>().as_ptr();
400
401
        // SAFETY: The caller promises that `start <= end <= self.meta()`. By
402
        // invariant, if `self`'s referent is not zero-sized, then `self` refers
403
        // to a byte range which is contained within a single allocation, which
404
        // is no more than `isize::MAX` bytes long, and which does not wrap
405
        // around the address space. Thus, this pointer arithmetic remains
406
        // in-bounds of the same allocation, and does not wrap around the
407
        // address space. The offset (in bytes) does not overflow `isize`.
408
        //
409
        // If `self`'s referent is zero-sized, then these conditions are
410
        // trivially satisfied.
411
0
        let base = unsafe { base.add(range.start) };
412
413
        // SAFETY: The caller promises that `start <= end`, and so this will not
414
        // underflow.
415
        #[allow(unstable_name_collisions)]
416
0
        let len = unsafe { range.end.unchecked_sub(range.start) };
417
418
0
        let ptr = core::ptr::slice_from_raw_parts_mut(base, len);
419
420
        // SAFETY: By invariant, `self`'s referent is either a ZST or lives
421
        // entirely in an allocation. `ptr` points inside of or one byte past
422
        // the end of that referent. Thus, in either case, `ptr` is non-null.
423
0
        let ptr = unsafe { NonNull::new_unchecked(ptr) };
424
425
        // SAFETY:
426
        //
427
        // Lemma 0: `ptr` addresses a subset of the bytes addressed by `self`,
428
        //          and has the same provenance. Proof: The caller guarantees
429
        //          that `start <= end <= self.meta()`. Thus, `base` is
430
        //          in-bounds of `self`, and `base + (end - start)` is also
431
        //          in-bounds of self. Finally, `ptr` is constructed using
432
        //          provenance-preserving operations.
433
        //
434
        // 0. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
435
        //    zero sized, then `ptr` has valid provenance for its referent,
436
        //    which is entirely contained in some Rust allocation, `A`.
437
        // 1. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
438
        //    zero sized, then `A` is guaranteed to live for at least `'a`.
439
0
        unsafe { PtrInner::new(ptr) }
440
0
    }
441
442
    /// Iteratively projects the elements `PtrInner<T>` from `PtrInner<[T]>`.
443
0
    pub(crate) fn iter(&self) -> impl Iterator<Item = PtrInner<'a, T>> {
444
        // FIXME(#429): Once `NonNull::cast` documents that it preserves
445
        // provenance, cite those docs.
446
0
        let base = self.as_non_null().cast::<T>().as_ptr();
447
0
        (0..self.meta().get()).map(move |i| {
448
            // FIXME(https://github.com/rust-lang/rust/issues/74265): Use
449
            // `NonNull::get_unchecked_mut`.
450
451
            // SAFETY: If the following conditions are not satisfied
452
            // `pointer::cast` may induce Undefined Behavior [1]:
453
            //
454
            // > - The computed offset, `count * size_of::<T>()` bytes, must not
455
            // >   overflow `isize``.
456
            // > - If the computed offset is non-zero, then `self` must be
457
            // >   derived from a pointer to some allocated object, and the
458
            // >   entire memory range between `self` and the result must be in
459
            // >   bounds of that allocated object. In particular, this range
460
            // >   must not “wrap around” the edge of the address space.
461
            //
462
            // [1] https://doc.rust-lang.org/std/primitive.pointer.html#method.add
463
            //
464
            // We satisfy both of these conditions here:
465
            // - By invariant on `Ptr`, `self` addresses a byte range whose
466
            //   length fits in an `isize`. Since `elem` is contained in `self`,
467
            //   the computed offset of `elem` must fit within `isize.`
468
            // - If the computed offset is non-zero, then this means that the
469
            //   referent is not zero-sized. In this case, `base` points to an
470
            //   allocated object (by invariant on `self`). Thus:
471
            //   - By contract, `self.meta()` accurately reflects the number of
472
            //     elements in the slice. `i` is in bounds of `c.meta()` by
473
            //     construction, and so the result of this addition cannot
474
            //     overflow past the end of the allocation referred to by `c`.
475
            //   - By invariant on `Ptr`, `self` addresses a byte range which
476
            //     does not wrap around the address space. Since `elem` is
477
            //     contained in `self`, the computed offset of `elem` must wrap
478
            //     around the address space.
479
            //
480
            // FIXME(#429): Once `pointer::add` documents that it preserves
481
            // provenance, cite those docs.
482
0
            let elem = unsafe { base.add(i) };
483
484
            // SAFETY: `elem` must not be null. `base` is constructed from a
485
            // `NonNull` pointer, and the addition that produces `elem` must not
486
            // overflow or wrap around, so `elem >= base > 0`.
487
            //
488
            // FIXME(#429): Once `NonNull::new_unchecked` documents that it
489
            // preserves provenance, cite those docs.
490
0
            let elem = unsafe { NonNull::new_unchecked(elem) };
491
492
            // SAFETY: The safety invariants of `Ptr::new` (see definition) are
493
            // satisfied:
494
            // 0. If `elem`'s referent is not zero sized, then `elem` has valid
495
            //    provenance for its referent, because it derived from `self`
496
            //    using a series of provenance-preserving operations, and
497
            //    because `self` has valid provenance for its referent. By the
498
            //    same argument, `elem`'s referent is entirely contained within
499
            //    the same allocated object as `self`'s referent.
500
            // 1. If `elem`'s referent is not zero sized, then the allocation of
501
            //    `elem` is guaranteed to live for at least `'a`, because `elem`
502
            //    is entirely contained in `self`, which lives for at least `'a`
503
            //    by invariant on `Ptr`.
504
0
            unsafe { PtrInner::new(elem) }
505
0
        })
506
0
    }
507
}
508
509
impl<'a, T, const N: usize> PtrInner<'a, [T; N]> {
510
    /// Casts this pointer-to-array into a slice.
511
    ///
512
    /// # Safety
513
    ///
514
    /// Callers may assume that the returned `PtrInner` references the same
515
    /// address and length as `self`.
516
    #[allow(clippy::wrong_self_convention)]
517
0
    pub(crate) fn as_slice(self) -> PtrInner<'a, [T]> {
518
0
        let start = self.as_non_null().cast::<T>().as_ptr();
519
0
        let slice = core::ptr::slice_from_raw_parts_mut(start, N);
520
        // SAFETY: `slice` is not null, because it is derived from `start`
521
        // which is non-null.
522
0
        let slice = unsafe { NonNull::new_unchecked(slice) };
523
        // SAFETY: Lemma: In the following safety arguments, note that `slice`
524
        // is derived from `self` in two steps: first, by casting `self: [T; N]`
525
        // to `start: T`, then by constructing a pointer to a slice starting at
526
        // `start` of length `N`. As a result, `slice` references exactly the
527
        // same allocation as `self`, if any.
528
        //
529
        // 0. By the above lemma, if `slice`'s referent is not zero sized, then
530
        //    `slice` has the same referent as `self`. By invariant on `self`,
531
        //    this referent is entirely contained within some allocation, `A`.
532
        //    Because `slice` was constructed using provenance-preserving
533
        //    operations, it has provenance for its entire referent.
534
        // 1. By the above lemma, if `slice`'s referent is not zero sized, then
535
        //    `A` is guaranteed to live for at least `'a`, because it is derived
536
        //    from the same allocation as `self`, which, by invariant on `Ptr`,
537
        //    lives for at least `'a`.
538
0
        unsafe { PtrInner::new(slice) }
539
0
    }
540
}
541
542
impl<'a> PtrInner<'a, [u8]> {
543
    /// Attempts to cast `self` to a `U` using the given cast type.
544
    ///
545
    /// If `U` is a slice DST and pointer metadata (`meta`) is provided, then
546
    /// the cast will only succeed if it would produce an object with the given
547
    /// metadata.
548
    ///
549
    /// Returns `None` if the resulting `U` would be invalidly-aligned, if no
550
    /// `U` can fit in `self`, or if the provided pointer metadata describes an
551
    /// invalid instance of `U`. On success, returns a pointer to the
552
    /// largest-possible `U` which fits in `self`.
553
    ///
554
    /// # Safety
555
    ///
556
    /// The caller may assume that this implementation is correct, and may rely
557
    /// on that assumption for the soundness of their code. In particular, the
558
    /// caller may assume that, if `try_cast_into` returns `Some((ptr,
559
    /// remainder))`, then `ptr` and `remainder` refer to non-overlapping byte
560
    /// ranges within `self`, and that `ptr` and `remainder` entirely cover
561
    /// `self`. Finally:
562
    /// - If this is a prefix cast, `ptr` has the same address as `self`.
563
    /// - If this is a suffix cast, `remainder` has the same address as `self`.
564
    #[inline]
565
0
    pub(crate) fn try_cast_into<U>(
566
0
        self,
567
0
        cast_type: CastType,
568
0
        meta: Option<U::PointerMetadata>,
569
0
    ) -> Result<(PtrInner<'a, U>, PtrInner<'a, [u8]>), CastError<Self, U>>
570
0
    where
571
0
        U: 'a + ?Sized + KnownLayout,
572
    {
573
        // PANICS: By invariant, the byte range addressed by
574
        // `self.as_non_null()` does not wrap around the address space. This
575
        // implies that the sum of the address (represented as a `usize`) and
576
        // length do not overflow `usize`, as required by
577
        // `validate_cast_and_convert_metadata`. Thus, this call to
578
        // `validate_cast_and_convert_metadata` will only panic if `U` is a DST
579
        // whose trailing slice element is zero-sized.
580
0
        let maybe_metadata = MetadataOf::<U>::validate_cast_and_convert_metadata(
581
0
            AsAddress::addr(self.as_non_null().as_ptr()),
582
0
            self.meta(),
583
0
            cast_type,
584
0
            meta,
585
        );
586
587
0
        let (elems, split_at) = match maybe_metadata {
588
0
            Ok((elems, split_at)) => (elems, split_at),
589
            Err(MetadataCastError::Alignment) => {
590
                // SAFETY: Since `validate_cast_and_convert_metadata` returned
591
                // an alignment error, `U` must have an alignment requirement
592
                // greater than one.
593
0
                let err = unsafe { AlignmentError::<_, U>::new_unchecked(self) };
594
0
                return Err(CastError::Alignment(err));
595
            }
596
0
            Err(MetadataCastError::Size) => return Err(CastError::Size(SizeError::new(self))),
597
        };
598
599
        // SAFETY: `validate_cast_and_convert_metadata` promises to return
600
        // `split_at <= self.meta()`.
601
        //
602
        // Lemma 0: `l_slice` and `r_slice` are non-overlapping. Proof: By
603
        // contract on `PtrInner::split_at_unchecked`, the produced `PtrInner`s
604
        // are always non-overlapping if `self` is a `[T]`; here it is a `[u8]`.
605
0
        let (l_slice, r_slice) = unsafe { self.split_at_unchecked(split_at) };
606
607
0
        let (target, remainder) = match cast_type {
608
0
            CastType::Prefix => (l_slice, r_slice),
609
0
            CastType::Suffix => (r_slice, l_slice),
610
        };
611
612
0
        let base = target.as_non_null().cast::<u8>();
613
614
0
        let ptr = U::raw_from_ptr_len(base, elems.get());
615
616
        // SAFETY:
617
        // 0. By invariant, if `target`'s referent is not zero sized, then
618
        //    `target` has provenance valid for some Rust allocation, `A`.
619
        //    Because `ptr` is derived from `target` via provenance-preserving
620
        //    operations, `ptr` will also have provenance valid for its entire
621
        //    referent.
622
        // 1. `validate_cast_and_convert_metadata` promises that the object
623
        //    described by `elems` and `split_at` lives at a byte range which is
624
        //    a subset of the input byte range. Thus, by invariant, if
625
        //    `target`'s referent is not zero sized, then `target` refers to an
626
        //    allocation which is guaranteed to live for at least `'a`, and thus
627
        //    so does `ptr`.
628
0
        Ok((unsafe { PtrInner::new(ptr) }, remainder))
629
0
    }
630
}
631
632
#[cfg(test)]
633
mod tests {
634
    use super::*;
635
    use crate::*;
636
637
    #[test]
638
    fn test_meta() {
639
        let arr = [1; 16];
640
        let dst = <[u8]>::ref_from_bytes(&arr[..]).unwrap();
641
        let ptr = PtrInner::from_ref(dst);
642
        assert_eq!(ptr.meta().get(), 16);
643
644
        // SAFETY: 8 is less than 16
645
        let ptr = unsafe { ptr.with_meta(8) };
646
647
        assert_eq!(ptr.meta().get(), 8);
648
    }
649
650
    #[test]
651
    fn test_split_at() {
652
        fn test_split_at<const OFFSET: usize, const BUFFER_SIZE: usize>() {
653
            #[derive(FromBytes, KnownLayout, SplitAt, Immutable)]
654
            #[repr(C)]
655
            struct SliceDst<const OFFSET: usize> {
656
                prefix: [u8; OFFSET],
657
                trailing: [u8],
658
            }
659
660
            let n: usize = BUFFER_SIZE - OFFSET;
661
            let arr = [1; BUFFER_SIZE];
662
            let dst = SliceDst::<OFFSET>::ref_from_bytes(&arr[..]).unwrap();
663
            let ptr = PtrInner::from_ref(dst);
664
            for i in 0..=n {
665
                assert_eq!(ptr.meta().get(), n);
666
                // SAFETY: `i` is in bounds by construction.
667
                let i = unsafe { MetadataOf::new_unchecked(i) };
668
                // SAFETY: `i` is in bounds by construction.
669
                let (l, r) = unsafe { ptr.split_at_unchecked(i) };
670
                // SAFETY: Points to a valid value by construction.
671
                #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)]
672
                // Clippy false positive
673
                let l_sum: usize = l
674
                    .trailing_slice()
675
                    .iter()
676
                    .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }
677
                        as usize)
678
                    .sum();
679
                // SAFETY: Points to a valid value by construction.
680
                #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)]
681
                // Clippy false positive
682
                let r_sum: usize = r
683
                    .iter()
684
                    .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }
685
                        as usize)
686
                    .sum();
687
                assert_eq!(l_sum, i.get());
688
                assert_eq!(r_sum, n - i.get());
689
                assert_eq!(l_sum + r_sum, n);
690
            }
691
        }
692
693
        test_split_at::<0, 16>();
694
        test_split_at::<1, 17>();
695
        test_split_at::<2, 18>();
696
    }
697
698
    #[test]
699
    fn test_trailing_slice() {
700
        fn test_trailing_slice<const OFFSET: usize, const BUFFER_SIZE: usize>() {
701
            #[derive(FromBytes, KnownLayout, SplitAt, Immutable)]
702
            #[repr(C)]
703
            struct SliceDst<const OFFSET: usize> {
704
                prefix: [u8; OFFSET],
705
                trailing: [u8],
706
            }
707
708
            let n: usize = BUFFER_SIZE - OFFSET;
709
            let arr = [1; BUFFER_SIZE];
710
            let dst = SliceDst::<OFFSET>::ref_from_bytes(&arr[..]).unwrap();
711
            let ptr = PtrInner::from_ref(dst);
712
713
            assert_eq!(ptr.meta().get(), n);
714
            let trailing = ptr.trailing_slice();
715
            assert_eq!(trailing.meta().get(), n);
716
717
            assert_eq!(
718
                // SAFETY: We assume this to be sound for the sake of this test,
719
                // which will fail, here, in miri, if the safety precondition of
720
                // `offset_of` is not satisfied.
721
                unsafe {
722
                    #[allow(clippy::as_conversions)]
723
                    let offset = (trailing.as_non_null().as_ptr() as *mut u8)
724
                        .offset_from(ptr.as_non_null().as_ptr() as *mut _);
725
                    offset
726
                },
727
                isize::try_from(OFFSET).unwrap(),
728
            );
729
730
            // SAFETY: Points to a valid value by construction.
731
            #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)]
732
            // Clippy false positive
733
            let trailing: usize =
734
                trailing
735
                    .iter()
736
                    .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }
737
                        as usize)
738
                    .sum();
739
740
            assert_eq!(trailing, n);
741
        }
742
743
        test_trailing_slice::<0, 16>();
744
        test_trailing_slice::<1, 17>();
745
        test_trailing_slice::<2, 18>();
746
    }
747
}