/rust/registry/src/index.crates.io-1949cf8c6b5b557f/bytemuck-1.24.0/src/allocation.rs
Line | Count | Source |
1 | | #![cfg(feature = "extern_crate_alloc")] |
2 | | #![allow(clippy::duplicated_attributes)] |
3 | | |
4 | | //! Stuff to boost things in the `alloc` crate. |
5 | | //! |
6 | | //! * You must enable the `extern_crate_alloc` feature of `bytemuck` or you will |
7 | | //! not be able to use this module! This is generally done by adding the |
8 | | //! feature to the dependency in Cargo.toml like so: |
9 | | //! |
10 | | //! `bytemuck = { version = "VERSION_YOU_ARE_USING", features = |
11 | | //! ["extern_crate_alloc"]}` |
12 | | |
13 | | use super::*; |
14 | | #[cfg(target_has_atomic = "ptr")] |
15 | | use alloc::sync::Arc; |
16 | | use alloc::{ |
17 | | alloc::{alloc_zeroed, Layout}, |
18 | | boxed::Box, |
19 | | rc::Rc, |
20 | | vec, |
21 | | vec::Vec, |
22 | | }; |
23 | | use core::{ |
24 | | mem::{size_of_val, ManuallyDrop}, |
25 | | ops::{Deref, DerefMut}, |
26 | | }; |
27 | | |
28 | | /// As [`try_cast_box`], but unwraps for you. |
29 | | #[inline] |
30 | 0 | pub fn cast_box<A: NoUninit, B: AnyBitPattern>(input: Box<A>) -> Box<B> { |
31 | 0 | try_cast_box(input).map_err(|(e, _v)| e).unwrap() |
32 | 0 | } |
33 | | |
34 | | /// Attempts to cast the content type of a [`Box`]. |
35 | | /// |
36 | | /// On failure you get back an error along with the starting `Box`. |
37 | | /// |
38 | | /// ## Failure |
39 | | /// |
40 | | /// * The start and end content type of the `Box` must have the exact same |
41 | | /// alignment. |
42 | | /// * The start and end size of the `Box` must have the exact same size. |
43 | | #[inline] |
44 | 0 | pub fn try_cast_box<A: NoUninit, B: AnyBitPattern>( |
45 | 0 | input: Box<A>, |
46 | 0 | ) -> Result<Box<B>, (PodCastError, Box<A>)> { |
47 | 0 | if align_of::<A>() != align_of::<B>() { |
48 | 0 | Err((PodCastError::AlignmentMismatch, input)) |
49 | 0 | } else if size_of::<A>() != size_of::<B>() { |
50 | 0 | Err((PodCastError::SizeMismatch, input)) |
51 | | } else { |
52 | | // Note(Lokathor): This is much simpler than with the Vec casting! |
53 | 0 | let ptr: *mut B = Box::into_raw(input) as *mut B; |
54 | 0 | Ok(unsafe { Box::from_raw(ptr) }) |
55 | | } |
56 | 0 | } |
57 | | |
58 | | /// Allocates a `Box<T>` with all of the contents being zeroed out. |
59 | | /// |
60 | | /// This uses the global allocator to create a zeroed allocation and _then_ |
61 | | /// turns it into a Box. In other words, it's 100% assured that the zeroed data |
62 | | /// won't be put temporarily on the stack. You can make a box of any size |
63 | | /// without fear of a stack overflow. |
64 | | /// |
65 | | /// ## Failure |
66 | | /// |
67 | | /// This fails if the allocation fails. |
68 | | #[inline] |
69 | 0 | pub fn try_zeroed_box<T: Zeroable>() -> Result<Box<T>, ()> { |
70 | 0 | if size_of::<T>() == 0 { |
71 | | // This will not allocate but simply create an arbitrary non-null |
72 | | // aligned pointer, valid for Box for a zero-sized pointee. |
73 | 0 | let ptr = core::ptr::NonNull::dangling().as_ptr(); |
74 | 0 | return Ok(unsafe { Box::from_raw(ptr) }); |
75 | 0 | } |
76 | 0 | let layout = Layout::new::<T>(); |
77 | 0 | let ptr = unsafe { alloc_zeroed(layout) }; |
78 | 0 | if ptr.is_null() { |
79 | | // we don't know what the error is because `alloc_zeroed` is a dumb API |
80 | 0 | Err(()) |
81 | | } else { |
82 | 0 | Ok(unsafe { Box::<T>::from_raw(ptr as *mut T) }) |
83 | | } |
84 | 0 | } |
85 | | |
86 | | /// As [`try_zeroed_box`], but unwraps for you. |
87 | | #[inline] |
88 | 0 | pub fn zeroed_box<T: Zeroable>() -> Box<T> { |
89 | 0 | try_zeroed_box().unwrap() |
90 | 0 | } |
91 | | |
92 | | /// Allocates a `Vec<T>` of length and capacity exactly equal to `length` and |
93 | | /// all elements zeroed. |
94 | | /// |
95 | | /// ## Failure |
96 | | /// |
97 | | /// This fails if the allocation fails, or if a layout cannot be calculated for |
98 | | /// the allocation. |
99 | 0 | pub fn try_zeroed_vec<T: Zeroable>(length: usize) -> Result<Vec<T>, ()> { |
100 | 0 | if length == 0 { |
101 | 0 | Ok(Vec::new()) |
102 | | } else { |
103 | 0 | let boxed_slice = try_zeroed_slice_box(length)?; |
104 | 0 | Ok(boxed_slice.into_vec()) |
105 | | } |
106 | 0 | } |
107 | | |
108 | | /// As [`try_zeroed_vec`] but unwraps for you |
109 | 0 | pub fn zeroed_vec<T: Zeroable>(length: usize) -> Vec<T> { |
110 | 0 | try_zeroed_vec(length).unwrap() |
111 | 0 | } |
112 | | |
113 | | /// Allocates a `Box<[T]>` with all contents being zeroed out. |
114 | | /// |
115 | | /// This uses the global allocator to create a zeroed allocation and _then_ |
116 | | /// turns it into a Box. In other words, it's 100% assured that the zeroed data |
117 | | /// won't be put temporarily on the stack. You can make a box of any size |
118 | | /// without fear of a stack overflow. |
119 | | /// |
120 | | /// ## Failure |
121 | | /// |
122 | | /// This fails if the allocation fails, or if a layout cannot be calculated for |
123 | | /// the allocation. |
124 | | #[inline] |
125 | 0 | pub fn try_zeroed_slice_box<T: Zeroable>( |
126 | 0 | length: usize, |
127 | 0 | ) -> Result<Box<[T]>, ()> { |
128 | 0 | if size_of::<T>() == 0 || length == 0 { |
129 | | // This will not allocate but simply create an arbitrary non-null aligned |
130 | | // slice pointer, valid for Box for a zero-sized pointee. |
131 | 0 | let ptr = core::ptr::NonNull::dangling().as_ptr(); |
132 | 0 | let slice_ptr = core::ptr::slice_from_raw_parts_mut(ptr, length); |
133 | 0 | return Ok(unsafe { Box::from_raw(slice_ptr) }); |
134 | 0 | } |
135 | 0 | let layout = core::alloc::Layout::array::<T>(length).map_err(|_| ())?; |
136 | 0 | let ptr = unsafe { alloc_zeroed(layout) }; |
137 | 0 | if ptr.is_null() { |
138 | | // we don't know what the error is because `alloc_zeroed` is a dumb API |
139 | 0 | Err(()) |
140 | | } else { |
141 | 0 | let slice = |
142 | 0 | unsafe { core::slice::from_raw_parts_mut(ptr as *mut T, length) }; |
143 | 0 | Ok(unsafe { Box::<[T]>::from_raw(slice) }) |
144 | | } |
145 | 0 | } |
146 | | |
147 | | /// As [`try_zeroed_slice_box`], but unwraps for you. |
148 | 0 | pub fn zeroed_slice_box<T: Zeroable>(length: usize) -> Box<[T]> { |
149 | 0 | try_zeroed_slice_box(length).unwrap() |
150 | 0 | } |
151 | | |
152 | | /// Allocates a `Arc<T>` with all contents being zeroed out. |
153 | | #[cfg(all(feature = "alloc_uninit", target_has_atomic = "ptr"))] |
154 | | pub fn zeroed_arc<T: Zeroable>() -> Arc<T> { |
155 | | let mut arc = Arc::new_uninit(); |
156 | | crate::write_zeroes(Arc::get_mut(&mut arc).unwrap()); // unwrap never fails for a newly allocated Arc |
157 | | unsafe { arc.assume_init() } |
158 | | } |
159 | | |
160 | | /// Allocates a `Arc<[T]>` with all contents being zeroed out. |
161 | | #[cfg(all(feature = "alloc_uninit", target_has_atomic = "ptr"))] |
162 | | pub fn zeroed_arc_slice<T: Zeroable>(length: usize) -> Arc<[T]> { |
163 | | let mut arc = Arc::new_uninit_slice(length); |
164 | | crate::fill_zeroes(Arc::get_mut(&mut arc).unwrap()); // unwrap never fails for a newly allocated Arc |
165 | | unsafe { arc.assume_init() } |
166 | | } |
167 | | |
168 | | /// Allocates a `Rc<T>` with all contents being zeroed out. |
169 | | #[cfg(feature = "alloc_uninit")] |
170 | | pub fn zeroed_rc<T: Zeroable>() -> Rc<T> { |
171 | | let mut rc = Rc::new_uninit(); |
172 | | crate::write_zeroes(Rc::get_mut(&mut rc).unwrap()); // unwrap never fails for a newly allocated Rc |
173 | | unsafe { rc.assume_init() } |
174 | | } |
175 | | |
176 | | /// Allocates a `Rc<[T]>` with all contents being zeroed out. |
177 | | #[cfg(feature = "alloc_uninit")] |
178 | | pub fn zeroed_rc_slice<T: Zeroable>(length: usize) -> Rc<[T]> { |
179 | | let mut rc = Rc::new_uninit_slice(length); |
180 | | crate::fill_zeroes(Rc::get_mut(&mut rc).unwrap()); // unwrap never fails for a newly allocated Rc |
181 | | unsafe { rc.assume_init() } |
182 | | } |
183 | | |
184 | | /// As [`try_cast_slice_box`], but unwraps for you. |
185 | | #[inline] |
186 | 0 | pub fn cast_slice_box<A: NoUninit, B: AnyBitPattern>( |
187 | 0 | input: Box<[A]>, |
188 | 0 | ) -> Box<[B]> { |
189 | 0 | try_cast_slice_box(input).map_err(|(e, _v)| e).unwrap() |
190 | 0 | } |
191 | | |
192 | | /// Attempts to cast the content type of a `Box<[T]>`. |
193 | | /// |
194 | | /// On failure you get back an error along with the starting `Box<[T]>`. |
195 | | /// |
196 | | /// ## Failure |
197 | | /// |
198 | | /// * The start and end content type of the `Box<[T]>` must have the exact same |
199 | | /// alignment. |
200 | | /// * The start and end content size in bytes of the `Box<[T]>` must be the |
201 | | /// exact same. |
202 | | #[inline] |
203 | 0 | pub fn try_cast_slice_box<A: NoUninit, B: AnyBitPattern>( |
204 | 0 | input: Box<[A]>, |
205 | 0 | ) -> Result<Box<[B]>, (PodCastError, Box<[A]>)> { |
206 | 0 | if align_of::<A>() != align_of::<B>() { |
207 | 0 | Err((PodCastError::AlignmentMismatch, input)) |
208 | 0 | } else if size_of::<A>() != size_of::<B>() { |
209 | 0 | let input_bytes = size_of_val::<[A]>(&*input); |
210 | 0 | if (size_of::<B>() == 0 && input_bytes != 0) |
211 | 0 | || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0) |
212 | | { |
213 | | // If the size in bytes of the underlying buffer does not match an exact |
214 | | // multiple of the size of B, we cannot cast between them. |
215 | 0 | Err((PodCastError::OutputSliceWouldHaveSlop, input)) |
216 | | } else { |
217 | | // Because the size is an exact multiple, we can now change the length |
218 | | // of the slice and recreate the Box |
219 | | // NOTE: This is a valid operation because according to the docs of |
220 | | // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc |
221 | | // the block must be the same Layout that is used to dealloc the block. |
222 | | // Luckily, Layout only stores two things, the alignment, and the size in |
223 | | // bytes. So as long as both of those stay the same, the Layout will |
224 | | // remain a valid input to dealloc. |
225 | 0 | let length = |
226 | 0 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; |
227 | 0 | let box_ptr: *mut A = Box::into_raw(input) as *mut A; |
228 | 0 | let ptr: *mut [B] = |
229 | 0 | unsafe { core::slice::from_raw_parts_mut(box_ptr as *mut B, length) }; |
230 | 0 | Ok(unsafe { Box::<[B]>::from_raw(ptr) }) |
231 | | } |
232 | | } else { |
233 | 0 | let box_ptr: *mut [A] = Box::into_raw(input); |
234 | 0 | let ptr: *mut [B] = box_ptr as *mut [B]; |
235 | 0 | Ok(unsafe { Box::<[B]>::from_raw(ptr) }) |
236 | | } |
237 | 0 | } |
238 | | |
239 | | /// As [`try_cast_vec`], but unwraps for you. |
240 | | #[inline] |
241 | 0 | pub fn cast_vec<A: NoUninit, B: AnyBitPattern>(input: Vec<A>) -> Vec<B> { |
242 | 0 | try_cast_vec(input).map_err(|(e, _v)| e).unwrap() |
243 | 0 | } |
244 | | |
245 | | /// Attempts to cast the content type of a [`Vec`]. |
246 | | /// |
247 | | /// On failure you get back an error along with the starting `Vec`. |
248 | | /// |
249 | | /// ## Failure |
250 | | /// |
251 | | /// * The start and end content type of the `Vec` must have the exact same |
252 | | /// alignment. |
253 | | /// * The start and end content size in bytes of the `Vec` must be the exact |
254 | | /// same. |
255 | | /// * The start and end capacity in bytes of the `Vec` must be the exact same. |
256 | | #[inline] |
257 | 1.81k | pub fn try_cast_vec<A: NoUninit, B: AnyBitPattern>( |
258 | 1.81k | input: Vec<A>, |
259 | 1.81k | ) -> Result<Vec<B>, (PodCastError, Vec<A>)> { |
260 | 1.81k | if align_of::<A>() != align_of::<B>() { |
261 | 0 | Err((PodCastError::AlignmentMismatch, input)) |
262 | 1.81k | } else if size_of::<A>() != size_of::<B>() { |
263 | 0 | let input_size = size_of_val::<[A]>(&*input); |
264 | 0 | let input_capacity = input.capacity() * size_of::<A>(); |
265 | 0 | if (size_of::<B>() == 0 && input_capacity != 0) |
266 | 0 | || (size_of::<B>() != 0 |
267 | 0 | && (input_size % size_of::<B>() != 0 |
268 | 0 | || input_capacity % size_of::<B>() != 0)) |
269 | | { |
270 | | // If the size in bytes of the underlying buffer does not match an exact |
271 | | // multiple of the size of B, we cannot cast between them. |
272 | | // Note that we have to pay special attention to make sure that both |
273 | | // length and capacity are valid under B, as we do not want to |
274 | | // change which bytes are considered part of the initialized slice |
275 | | // of the Vec |
276 | 0 | Err((PodCastError::OutputSliceWouldHaveSlop, input)) |
277 | | } else { |
278 | | // Because the size is an exact multiple, we can now change the length and |
279 | | // capacity and recreate the Vec |
280 | | // NOTE: This is a valid operation because according to the docs of |
281 | | // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc |
282 | | // the block must be the same Layout that is used to dealloc the block. |
283 | | // Luckily, Layout only stores two things, the alignment, and the size in |
284 | | // bytes. So as long as both of those stay the same, the Layout will |
285 | | // remain a valid input to dealloc. |
286 | | |
287 | | // Note(Lokathor): First we record the length and capacity, which don't |
288 | | // have any secret provenance metadata. |
289 | 0 | let length: usize = |
290 | 0 | if size_of::<B>() != 0 { input_size / size_of::<B>() } else { 0 }; |
291 | 0 | let capacity: usize = |
292 | 0 | if size_of::<B>() != 0 { input_capacity / size_of::<B>() } else { 0 }; |
293 | | // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with |
294 | | // ManuallyDrop, because if we used `core::mem::forget` after taking the |
295 | | // pointer then that would invalidate our pointer. In nightly there's a |
296 | | // "into raw parts" method, which we can switch this too eventually. |
297 | 0 | let mut manual_drop_vec = ManuallyDrop::new(input); |
298 | 0 | let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr(); |
299 | 0 | let ptr: *mut B = vec_ptr as *mut B; |
300 | 0 | Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) }) |
301 | | } |
302 | | } else { |
303 | | // Note(Lokathor): First we record the length and capacity, which don't have |
304 | | // any secret provenance metadata. |
305 | 1.81k | let length: usize = input.len(); |
306 | 1.81k | let capacity: usize = input.capacity(); |
307 | | // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with |
308 | | // ManuallyDrop, because if we used `core::mem::forget` after taking the |
309 | | // pointer then that would invalidate our pointer. In nightly there's a |
310 | | // "into raw parts" method, which we can switch this too eventually. |
311 | 1.81k | let mut manual_drop_vec = ManuallyDrop::new(input); |
312 | 1.81k | let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr(); |
313 | 1.81k | let ptr: *mut B = vec_ptr as *mut B; |
314 | 1.81k | Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) }) |
315 | | } |
316 | 1.81k | } Unexecuted instantiation: bytemuck::allocation::try_cast_vec::<f32, u8> bytemuck::allocation::try_cast_vec::<u8, u8> Line | Count | Source | 257 | 1.81k | pub fn try_cast_vec<A: NoUninit, B: AnyBitPattern>( | 258 | 1.81k | input: Vec<A>, | 259 | 1.81k | ) -> Result<Vec<B>, (PodCastError, Vec<A>)> { | 260 | 1.81k | if align_of::<A>() != align_of::<B>() { | 261 | 0 | Err((PodCastError::AlignmentMismatch, input)) | 262 | 1.81k | } else if size_of::<A>() != size_of::<B>() { | 263 | 0 | let input_size = size_of_val::<[A]>(&*input); | 264 | 0 | let input_capacity = input.capacity() * size_of::<A>(); | 265 | 0 | if (size_of::<B>() == 0 && input_capacity != 0) | 266 | 0 | || (size_of::<B>() != 0 | 267 | 0 | && (input_size % size_of::<B>() != 0 | 268 | 0 | || input_capacity % size_of::<B>() != 0)) | 269 | | { | 270 | | // If the size in bytes of the underlying buffer does not match an exact | 271 | | // multiple of the size of B, we cannot cast between them. | 272 | | // Note that we have to pay special attention to make sure that both | 273 | | // length and capacity are valid under B, as we do not want to | 274 | | // change which bytes are considered part of the initialized slice | 275 | | // of the Vec | 276 | 0 | Err((PodCastError::OutputSliceWouldHaveSlop, input)) | 277 | | } else { | 278 | | // Because the size is an exact multiple, we can now change the length and | 279 | | // capacity and recreate the Vec | 280 | | // NOTE: This is a valid operation because according to the docs of | 281 | | // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc | 282 | | // the block must be the same Layout that is used to dealloc the block. | 283 | | // Luckily, Layout only stores two things, the alignment, and the size in | 284 | | // bytes. So as long as both of those stay the same, the Layout will | 285 | | // remain a valid input to dealloc. | 286 | | | 287 | | // Note(Lokathor): First we record the length and capacity, which don't | 288 | | // have any secret provenance metadata. | 289 | 0 | let length: usize = | 290 | 0 | if size_of::<B>() != 0 { input_size / size_of::<B>() } else { 0 }; | 291 | 0 | let capacity: usize = | 292 | 0 | if size_of::<B>() != 0 { input_capacity / size_of::<B>() } else { 0 }; | 293 | | // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with | 294 | | // ManuallyDrop, because if we used `core::mem::forget` after taking the | 295 | | // pointer then that would invalidate our pointer. In nightly there's a | 296 | | // "into raw parts" method, which we can switch this too eventually. | 297 | 0 | let mut manual_drop_vec = ManuallyDrop::new(input); | 298 | 0 | let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr(); | 299 | 0 | let ptr: *mut B = vec_ptr as *mut B; | 300 | 0 | Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) }) | 301 | | } | 302 | | } else { | 303 | | // Note(Lokathor): First we record the length and capacity, which don't have | 304 | | // any secret provenance metadata. | 305 | 1.81k | let length: usize = input.len(); | 306 | 1.81k | let capacity: usize = input.capacity(); | 307 | | // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with | 308 | | // ManuallyDrop, because if we used `core::mem::forget` after taking the | 309 | | // pointer then that would invalidate our pointer. In nightly there's a | 310 | | // "into raw parts" method, which we can switch this too eventually. | 311 | 1.81k | let mut manual_drop_vec = ManuallyDrop::new(input); | 312 | 1.81k | let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr(); | 313 | 1.81k | let ptr: *mut B = vec_ptr as *mut B; | 314 | 1.81k | Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) }) | 315 | | } | 316 | 1.81k | } |
Unexecuted instantiation: bytemuck::allocation::try_cast_vec::<u16, u8> Unexecuted instantiation: bytemuck::allocation::try_cast_vec::<_, _> |
317 | | |
318 | | /// This "collects" a slice of pod data into a vec of a different pod type. |
319 | | /// |
320 | | /// Unlike with [`cast_slice`] and [`cast_slice_mut`], this will always work. |
321 | | /// |
322 | | /// The output vec will be of a minimal size/capacity to hold the slice given. |
323 | | /// |
324 | | /// ```rust |
325 | | /// # use bytemuck::*; |
326 | | /// let halfwords: [u16; 4] = [5, 6, 7, 8]; |
327 | | /// let vec_of_words: Vec<u32> = pod_collect_to_vec(&halfwords); |
328 | | /// if cfg!(target_endian = "little") { |
329 | | /// assert_eq!(&vec_of_words[..], &[0x0006_0005, 0x0008_0007][..]) |
330 | | /// } else { |
331 | | /// assert_eq!(&vec_of_words[..], &[0x0005_0006, 0x0007_0008][..]) |
332 | | /// } |
333 | | /// ``` |
334 | 0 | pub fn pod_collect_to_vec<A: NoUninit, B: NoUninit + AnyBitPattern>( |
335 | 0 | src: &[A], |
336 | 0 | ) -> Vec<B> { |
337 | 0 | let src_size = core::mem::size_of_val(src); |
338 | | // Note(Lokathor): dst_count is rounded up so that the dest will always be at |
339 | | // least as many bytes as the src. |
340 | 0 | let dst_count = src_size / size_of::<B>() |
341 | 0 | + if src_size % size_of::<B>() != 0 { 1 } else { 0 }; |
342 | 0 | let mut dst = vec![B::zeroed(); dst_count]; |
343 | | |
344 | 0 | let src_bytes: &[u8] = cast_slice(src); |
345 | 0 | let dst_bytes: &mut [u8] = cast_slice_mut(&mut dst[..]); |
346 | 0 | dst_bytes[..src_size].copy_from_slice(src_bytes); |
347 | 0 | dst |
348 | 0 | } Unexecuted instantiation: bytemuck::allocation::pod_collect_to_vec::<_, _> Unexecuted instantiation: bytemuck::allocation::pod_collect_to_vec::<u8, f32> Unexecuted instantiation: bytemuck::allocation::pod_collect_to_vec::<u8, u16> |
349 | | |
350 | | /// As [`try_cast_rc`], but unwraps for you. |
351 | | #[inline] |
352 | 0 | pub fn cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>( |
353 | 0 | input: Rc<A>, |
354 | 0 | ) -> Rc<B> { |
355 | 0 | try_cast_rc(input).map_err(|(e, _v)| e).unwrap() |
356 | 0 | } |
357 | | |
358 | | /// Attempts to cast the content type of a [`Rc`]. |
359 | | /// |
360 | | /// On failure you get back an error along with the starting `Rc`. |
361 | | /// |
362 | | /// The bounds on this function are the same as [`cast_mut`], because a user |
363 | | /// could call `Rc::get_unchecked_mut` on the output, which could be observable |
364 | | /// in the input. |
365 | | /// |
366 | | /// ## Failure |
367 | | /// |
368 | | /// * The start and end content type of the `Rc` must have the exact same |
369 | | /// alignment. |
370 | | /// * The start and end size of the `Rc` must have the exact same size. |
371 | | #[inline] |
372 | 0 | pub fn try_cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>( |
373 | 0 | input: Rc<A>, |
374 | 0 | ) -> Result<Rc<B>, (PodCastError, Rc<A>)> { |
375 | 0 | if align_of::<A>() != align_of::<B>() { |
376 | 0 | Err((PodCastError::AlignmentMismatch, input)) |
377 | 0 | } else if size_of::<A>() != size_of::<B>() { |
378 | 0 | Err((PodCastError::SizeMismatch, input)) |
379 | | } else { |
380 | | // Safety: Rc::from_raw requires size and alignment match, which is met. |
381 | 0 | let ptr: *const B = Rc::into_raw(input) as *const B; |
382 | 0 | Ok(unsafe { Rc::from_raw(ptr) }) |
383 | | } |
384 | 0 | } |
385 | | |
386 | | /// As [`try_cast_arc`], but unwraps for you. |
387 | | #[inline] |
388 | | #[cfg(target_has_atomic = "ptr")] |
389 | 0 | pub fn cast_arc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>( |
390 | 0 | input: Arc<A>, |
391 | 0 | ) -> Arc<B> { |
392 | 0 | try_cast_arc(input).map_err(|(e, _v)| e).unwrap() |
393 | 0 | } |
394 | | |
395 | | /// Attempts to cast the content type of a [`Arc`]. |
396 | | /// |
397 | | /// On failure you get back an error along with the starting `Arc`. |
398 | | /// |
399 | | /// The bounds on this function are the same as [`cast_mut`], because a user |
400 | | /// could call `Rc::get_unchecked_mut` on the output, which could be observable |
401 | | /// in the input. |
402 | | /// |
403 | | /// ## Failure |
404 | | /// |
405 | | /// * The start and end content type of the `Arc` must have the exact same |
406 | | /// alignment. |
407 | | /// * The start and end size of the `Arc` must have the exact same size. |
408 | | #[inline] |
409 | | #[cfg(target_has_atomic = "ptr")] |
410 | 0 | pub fn try_cast_arc< |
411 | 0 | A: NoUninit + AnyBitPattern, |
412 | 0 | B: NoUninit + AnyBitPattern, |
413 | 0 | >( |
414 | 0 | input: Arc<A>, |
415 | 0 | ) -> Result<Arc<B>, (PodCastError, Arc<A>)> { |
416 | 0 | if align_of::<A>() != align_of::<B>() { |
417 | 0 | Err((PodCastError::AlignmentMismatch, input)) |
418 | 0 | } else if size_of::<A>() != size_of::<B>() { |
419 | 0 | Err((PodCastError::SizeMismatch, input)) |
420 | | } else { |
421 | | // Safety: Arc::from_raw requires size and alignment match, which is met. |
422 | 0 | let ptr: *const B = Arc::into_raw(input) as *const B; |
423 | 0 | Ok(unsafe { Arc::from_raw(ptr) }) |
424 | | } |
425 | 0 | } |
426 | | |
427 | | /// As [`try_cast_slice_rc`], but unwraps for you. |
428 | | #[inline] |
429 | 0 | pub fn cast_slice_rc< |
430 | 0 | A: NoUninit + AnyBitPattern, |
431 | 0 | B: NoUninit + AnyBitPattern, |
432 | 0 | >( |
433 | 0 | input: Rc<[A]>, |
434 | 0 | ) -> Rc<[B]> { |
435 | 0 | try_cast_slice_rc(input).map_err(|(e, _v)| e).unwrap() |
436 | 0 | } |
437 | | |
438 | | /// Attempts to cast the content type of a `Rc<[T]>`. |
439 | | /// |
440 | | /// On failure you get back an error along with the starting `Rc<[T]>`. |
441 | | /// |
442 | | /// The bounds on this function are the same as [`cast_mut`], because a user |
443 | | /// could call `Rc::get_unchecked_mut` on the output, which could be observable |
444 | | /// in the input. |
445 | | /// |
446 | | /// ## Failure |
447 | | /// |
448 | | /// * The start and end content type of the `Rc<[T]>` must have the exact same |
449 | | /// alignment. |
450 | | /// * The start and end content size in bytes of the `Rc<[T]>` must be the exact |
451 | | /// same. |
452 | | #[inline] |
453 | 0 | pub fn try_cast_slice_rc< |
454 | 0 | A: NoUninit + AnyBitPattern, |
455 | 0 | B: NoUninit + AnyBitPattern, |
456 | 0 | >( |
457 | 0 | input: Rc<[A]>, |
458 | 0 | ) -> Result<Rc<[B]>, (PodCastError, Rc<[A]>)> { |
459 | 0 | if align_of::<A>() != align_of::<B>() { |
460 | 0 | Err((PodCastError::AlignmentMismatch, input)) |
461 | 0 | } else if size_of::<A>() != size_of::<B>() { |
462 | 0 | let input_bytes = size_of_val::<[A]>(&*input); |
463 | 0 | if (size_of::<B>() == 0 && input_bytes != 0) |
464 | 0 | || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0) |
465 | | { |
466 | | // If the size in bytes of the underlying buffer does not match an exact |
467 | | // multiple of the size of B, we cannot cast between them. |
468 | 0 | Err((PodCastError::OutputSliceWouldHaveSlop, input)) |
469 | | } else { |
470 | | // Because the size is an exact multiple, we can now change the length |
471 | | // of the slice and recreate the Rc |
472 | | // NOTE: This is a valid operation because according to the docs of |
473 | | // std::rc::Rc::from_raw(), the type U that was in the original Rc<U> |
474 | | // acquired from Rc::into_raw() must have the same size alignment and |
475 | | // size of the type T in the new Rc<T>. So as long as both the size |
476 | | // and alignment stay the same, the Rc will remain a valid Rc. |
477 | 0 | let length = |
478 | 0 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; |
479 | 0 | let rc_ptr: *const A = Rc::into_raw(input) as *const A; |
480 | | // Must use ptr::slice_from_raw_parts, because we cannot make an |
481 | | // intermediate const reference, because it has mutable provenance, |
482 | | // nor an intermediate mutable reference, because it could be aliased. |
483 | 0 | let ptr = core::ptr::slice_from_raw_parts(rc_ptr as *const B, length); |
484 | 0 | Ok(unsafe { Rc::<[B]>::from_raw(ptr) }) |
485 | | } |
486 | | } else { |
487 | 0 | let rc_ptr: *const [A] = Rc::into_raw(input); |
488 | 0 | let ptr: *const [B] = rc_ptr as *const [B]; |
489 | 0 | Ok(unsafe { Rc::<[B]>::from_raw(ptr) }) |
490 | | } |
491 | 0 | } |
492 | | |
493 | | /// As [`try_cast_slice_arc`], but unwraps for you. |
494 | | #[inline] |
495 | | #[cfg(target_has_atomic = "ptr")] |
496 | 0 | pub fn cast_slice_arc< |
497 | 0 | A: NoUninit + AnyBitPattern, |
498 | 0 | B: NoUninit + AnyBitPattern, |
499 | 0 | >( |
500 | 0 | input: Arc<[A]>, |
501 | 0 | ) -> Arc<[B]> { |
502 | 0 | try_cast_slice_arc(input).map_err(|(e, _v)| e).unwrap() |
503 | 0 | } |
504 | | |
505 | | /// Attempts to cast the content type of a `Arc<[T]>`. |
506 | | /// |
507 | | /// On failure you get back an error along with the starting `Arc<[T]>`. |
508 | | /// |
509 | | /// The bounds on this function are the same as [`cast_mut`], because a user |
510 | | /// could call `Rc::get_unchecked_mut` on the output, which could be observable |
511 | | /// in the input. |
512 | | /// |
513 | | /// ## Failure |
514 | | /// |
515 | | /// * The start and end content type of the `Arc<[T]>` must have the exact same |
516 | | /// alignment. |
517 | | /// * The start and end content size in bytes of the `Arc<[T]>` must be the |
518 | | /// exact same. |
519 | | #[inline] |
520 | | #[cfg(target_has_atomic = "ptr")] |
521 | 0 | pub fn try_cast_slice_arc< |
522 | 0 | A: NoUninit + AnyBitPattern, |
523 | 0 | B: NoUninit + AnyBitPattern, |
524 | 0 | >( |
525 | 0 | input: Arc<[A]>, |
526 | 0 | ) -> Result<Arc<[B]>, (PodCastError, Arc<[A]>)> { |
527 | 0 | if align_of::<A>() != align_of::<B>() { |
528 | 0 | Err((PodCastError::AlignmentMismatch, input)) |
529 | 0 | } else if size_of::<A>() != size_of::<B>() { |
530 | 0 | let input_bytes = size_of_val::<[A]>(&*input); |
531 | 0 | if (size_of::<B>() == 0 && input_bytes != 0) |
532 | 0 | || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0) |
533 | | { |
534 | | // If the size in bytes of the underlying buffer does not match an exact |
535 | | // multiple of the size of B, we cannot cast between them. |
536 | 0 | Err((PodCastError::OutputSliceWouldHaveSlop, input)) |
537 | | } else { |
538 | | // Because the size is an exact multiple, we can now change the length |
539 | | // of the slice and recreate the Arc |
540 | | // NOTE: This is a valid operation because according to the docs of |
541 | | // std::sync::Arc::from_raw(), the type U that was in the original Arc<U> |
542 | | // acquired from Arc::into_raw() must have the same size alignment and |
543 | | // size of the type T in the new Arc<T>. So as long as both the size |
544 | | // and alignment stay the same, the Arc will remain a valid Arc. |
545 | 0 | let length = |
546 | 0 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; |
547 | 0 | let arc_ptr: *const A = Arc::into_raw(input) as *const A; |
548 | | // Must use ptr::slice_from_raw_parts, because we cannot make an |
549 | | // intermediate const reference, because it has mutable provenance, |
550 | | // nor an intermediate mutable reference, because it could be aliased. |
551 | 0 | let ptr = core::ptr::slice_from_raw_parts(arc_ptr as *const B, length); |
552 | 0 | Ok(unsafe { Arc::<[B]>::from_raw(ptr) }) |
553 | | } |
554 | | } else { |
555 | 0 | let arc_ptr: *const [A] = Arc::into_raw(input); |
556 | 0 | let ptr: *const [B] = arc_ptr as *const [B]; |
557 | 0 | Ok(unsafe { Arc::<[B]>::from_raw(ptr) }) |
558 | | } |
559 | 0 | } |
560 | | |
561 | | /// An extension trait for `TransparentWrapper` and alloc types. |
562 | | pub trait TransparentWrapperAlloc<Inner: ?Sized>: |
563 | | TransparentWrapper<Inner> |
564 | | { |
565 | | /// Convert a vec of the inner type into a vec of the wrapper type. |
566 | 0 | fn wrap_vec(s: Vec<Inner>) -> Vec<Self> |
567 | 0 | where |
568 | 0 | Self: Sized, |
569 | 0 | Inner: Sized, |
570 | | { |
571 | 0 | let mut s = ManuallyDrop::new(s); |
572 | | |
573 | 0 | let length = s.len(); |
574 | 0 | let capacity = s.capacity(); |
575 | 0 | let ptr = s.as_mut_ptr(); |
576 | | |
577 | | unsafe { |
578 | | // SAFETY: |
579 | | // * ptr comes from Vec (and will not be double-dropped) |
580 | | // * the two types have the identical representation |
581 | | // * the len and capacity fields are valid |
582 | 0 | Vec::from_raw_parts(ptr as *mut Self, length, capacity) |
583 | | } |
584 | 0 | } |
585 | | |
586 | | /// Convert a box to the inner type into a box to the wrapper |
587 | | /// type. |
588 | | #[inline] |
589 | 0 | fn wrap_box(s: Box<Inner>) -> Box<Self> { |
590 | | // The unsafe contract requires that these two have |
591 | | // identical representations, and thus identical pointer metadata. |
592 | | // Assert that Self and Inner have the same pointer size, |
593 | | // which is the best we can do to assert their metadata is the same type |
594 | | // on stable. |
595 | 0 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>()); |
596 | | |
597 | | unsafe { |
598 | | // A pointer cast doesn't work here because rustc can't tell that |
599 | | // the vtables match (because of the `?Sized` restriction relaxation). |
600 | | // A `transmute` doesn't work because the sizes are unspecified. |
601 | | // |
602 | | // SAFETY: |
603 | | // * The unsafe contract requires that pointers to Inner and Self have |
604 | | // identical representations |
605 | | // * Box is guaranteed to have representation identical to a (non-null) |
606 | | // pointer |
607 | | // * The pointer comes from a box (and thus satisfies all safety |
608 | | // requirements of Box) |
609 | 0 | let inner_ptr: *mut Inner = Box::into_raw(s); |
610 | 0 | let wrapper_ptr: *mut Self = transmute!(inner_ptr); |
611 | 0 | Box::from_raw(wrapper_ptr) |
612 | | } |
613 | 0 | } |
614 | | |
615 | | /// Convert an [`Rc`] to the inner type into an `Rc` to the wrapper type. |
616 | | #[inline] |
617 | 0 | fn wrap_rc(s: Rc<Inner>) -> Rc<Self> { |
618 | | // The unsafe contract requires that these two have |
619 | | // identical representations, and thus identical pointer metadata. |
620 | | // Assert that Self and Inner have the same pointer size, |
621 | | // which is the best we can do to assert their metadata is the same type |
622 | | // on stable. |
623 | 0 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>()); |
624 | | |
625 | | unsafe { |
626 | | // A pointer cast doesn't work here because rustc can't tell that |
627 | | // the vtables match (because of the `?Sized` restriction relaxation). |
628 | | // A `transmute` doesn't work because the layout of Rc is unspecified. |
629 | | // |
630 | | // SAFETY: |
631 | | // * The unsafe contract requires that pointers to Inner and Self have |
632 | | // identical representations, and that the size and alignment of Inner |
633 | | // and Self are the same, which meets the safety requirements of |
634 | | // Rc::from_raw |
635 | 0 | let inner_ptr: *const Inner = Rc::into_raw(s); |
636 | 0 | let wrapper_ptr: *const Self = transmute!(inner_ptr); |
637 | 0 | Rc::from_raw(wrapper_ptr) |
638 | | } |
639 | 0 | } |
640 | | |
641 | | /// Convert an [`Arc`] to the inner type into an `Arc` to the wrapper type. |
642 | | #[inline] |
643 | | #[cfg(target_has_atomic = "ptr")] |
644 | 0 | fn wrap_arc(s: Arc<Inner>) -> Arc<Self> { |
645 | | // The unsafe contract requires that these two have |
646 | | // identical representations, and thus identical pointer metadata. |
647 | | // Assert that Self and Inner have the same pointer size, |
648 | | // which is the best we can do to assert their metadata is the same type |
649 | | // on stable. |
650 | 0 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>()); |
651 | | |
652 | | unsafe { |
653 | | // A pointer cast doesn't work here because rustc can't tell that |
654 | | // the vtables match (because of the `?Sized` restriction relaxation). |
655 | | // A `transmute` doesn't work because the layout of Arc is unspecified. |
656 | | // |
657 | | // SAFETY: |
658 | | // * The unsafe contract requires that pointers to Inner and Self have |
659 | | // identical representations, and that the size and alignment of Inner |
660 | | // and Self are the same, which meets the safety requirements of |
661 | | // Arc::from_raw |
662 | 0 | let inner_ptr: *const Inner = Arc::into_raw(s); |
663 | 0 | let wrapper_ptr: *const Self = transmute!(inner_ptr); |
664 | 0 | Arc::from_raw(wrapper_ptr) |
665 | | } |
666 | 0 | } |
667 | | |
668 | | /// Convert a vec of the wrapper type into a vec of the inner type. |
669 | 0 | fn peel_vec(s: Vec<Self>) -> Vec<Inner> |
670 | 0 | where |
671 | 0 | Self: Sized, |
672 | 0 | Inner: Sized, |
673 | | { |
674 | 0 | let mut s = ManuallyDrop::new(s); |
675 | | |
676 | 0 | let length = s.len(); |
677 | 0 | let capacity = s.capacity(); |
678 | 0 | let ptr = s.as_mut_ptr(); |
679 | | |
680 | | unsafe { |
681 | | // SAFETY: |
682 | | // * ptr comes from Vec (and will not be double-dropped) |
683 | | // * the two types have the identical representation |
684 | | // * the len and capacity fields are valid |
685 | 0 | Vec::from_raw_parts(ptr as *mut Inner, length, capacity) |
686 | | } |
687 | 0 | } |
688 | | |
689 | | /// Convert a box to the wrapper type into a box to the inner |
690 | | /// type. |
691 | | #[inline] |
692 | 0 | fn peel_box(s: Box<Self>) -> Box<Inner> { |
693 | | // The unsafe contract requires that these two have |
694 | | // identical representations, and thus identical pointer metadata. |
695 | | // Assert that Self and Inner have the same pointer size, |
696 | | // which is the best we can do to assert their metadata is the same type |
697 | | // on stable. |
698 | 0 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>()); |
699 | | |
700 | | unsafe { |
701 | | // A pointer cast doesn't work here because rustc can't tell that |
702 | | // the vtables match (because of the `?Sized` restriction relaxation). |
703 | | // A `transmute` doesn't work because the sizes are unspecified. |
704 | | // |
705 | | // SAFETY: |
706 | | // * The unsafe contract requires that pointers to Inner and Self have |
707 | | // identical representations |
708 | | // * Box is guaranteed to have representation identical to a (non-null) |
709 | | // pointer |
710 | | // * The pointer comes from a box (and thus satisfies all safety |
711 | | // requirements of Box) |
712 | 0 | let wrapper_ptr: *mut Self = Box::into_raw(s); |
713 | 0 | let inner_ptr: *mut Inner = transmute!(wrapper_ptr); |
714 | 0 | Box::from_raw(inner_ptr) |
715 | | } |
716 | 0 | } |
717 | | |
718 | | /// Convert an [`Rc`] to the wrapper type into an `Rc` to the inner type. |
719 | | #[inline] |
720 | 0 | fn peel_rc(s: Rc<Self>) -> Rc<Inner> { |
721 | | // The unsafe contract requires that these two have |
722 | | // identical representations, and thus identical pointer metadata. |
723 | | // Assert that Self and Inner have the same pointer size, |
724 | | // which is the best we can do to assert their metadata is the same type |
725 | | // on stable. |
726 | 0 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>()); |
727 | | |
728 | | unsafe { |
729 | | // A pointer cast doesn't work here because rustc can't tell that |
730 | | // the vtables match (because of the `?Sized` restriction relaxation). |
731 | | // A `transmute` doesn't work because the layout of Rc is unspecified. |
732 | | // |
733 | | // SAFETY: |
734 | | // * The unsafe contract requires that pointers to Inner and Self have |
735 | | // identical representations, and that the size and alignment of Inner |
736 | | // and Self are the same, which meets the safety requirements of |
737 | | // Rc::from_raw |
738 | 0 | let wrapper_ptr: *const Self = Rc::into_raw(s); |
739 | 0 | let inner_ptr: *const Inner = transmute!(wrapper_ptr); |
740 | 0 | Rc::from_raw(inner_ptr) |
741 | | } |
742 | 0 | } |
743 | | |
744 | | /// Convert an [`Arc`] to the wrapper type into an `Arc` to the inner type. |
745 | | #[inline] |
746 | | #[cfg(target_has_atomic = "ptr")] |
747 | 0 | fn peel_arc(s: Arc<Self>) -> Arc<Inner> { |
748 | | // The unsafe contract requires that these two have |
749 | | // identical representations, and thus identical pointer metadata. |
750 | | // Assert that Self and Inner have the same pointer size, |
751 | | // which is the best we can do to assert their metadata is the same type |
752 | | // on stable. |
753 | 0 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>()); |
754 | | |
755 | | unsafe { |
756 | | // A pointer cast doesn't work here because rustc can't tell that |
757 | | // the vtables match (because of the `?Sized` restriction relaxation). |
758 | | // A `transmute` doesn't work because the layout of Arc is unspecified. |
759 | | // |
760 | | // SAFETY: |
761 | | // * The unsafe contract requires that pointers to Inner and Self have |
762 | | // identical representations, and that the size and alignment of Inner |
763 | | // and Self are the same, which meets the safety requirements of |
764 | | // Arc::from_raw |
765 | 0 | let wrapper_ptr: *const Self = Arc::into_raw(s); |
766 | 0 | let inner_ptr: *const Inner = transmute!(wrapper_ptr); |
767 | 0 | Arc::from_raw(inner_ptr) |
768 | | } |
769 | 0 | } |
770 | | } |
771 | | |
772 | | impl<I: ?Sized, T: ?Sized + TransparentWrapper<I>> TransparentWrapperAlloc<I> |
773 | | for T |
774 | | { |
775 | | } |
776 | | |
777 | | /// As `Box<[u8]>`, but remembers the original alignment. |
778 | | pub struct BoxBytes { |
779 | | // SAFETY: `ptr` is aligned to `layout.align()`, points to |
780 | | // `layout.size()` initialized bytes, and, if `layout.size() > 0`, |
781 | | // is owned and was allocated with the global allocator with `layout`. |
782 | | ptr: NonNull<u8>, |
783 | | layout: Layout, |
784 | | } |
785 | | |
786 | | // SAFETY: `BoxBytes` is semantically a `Box<[u8], Global>` with a different allocation alignment, |
787 | | // `Box<[u8], Global>` is `Send + Sync`, and changing the allocation alignment has no thread-safety implications. |
788 | | unsafe impl Send for BoxBytes {} |
789 | | // SAFETY: See `Send` impl |
790 | | unsafe impl Sync for BoxBytes {} |
791 | | |
792 | | impl Deref for BoxBytes { |
793 | | type Target = [u8]; |
794 | | |
795 | 0 | fn deref(&self) -> &Self::Target { |
796 | | // SAFETY: See type invariant. |
797 | | unsafe { |
798 | 0 | core::slice::from_raw_parts(self.ptr.as_ptr(), self.layout.size()) |
799 | | } |
800 | 0 | } |
801 | | } |
802 | | |
803 | | impl DerefMut for BoxBytes { |
804 | 0 | fn deref_mut(&mut self) -> &mut Self::Target { |
805 | | // SAFETY: See type invariant. |
806 | 0 | unsafe { |
807 | 0 | core::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.layout.size()) |
808 | 0 | } |
809 | 0 | } |
810 | | } |
811 | | |
812 | | impl Drop for BoxBytes { |
813 | 0 | fn drop(&mut self) { |
814 | 0 | if self.layout.size() != 0 { |
815 | 0 | // SAFETY: See type invariant: if `self.layout.size() != 0`, then |
816 | 0 | // `self.ptr` is owned and was allocated with `self.layout`. |
817 | 0 | unsafe { alloc::alloc::dealloc(self.ptr.as_ptr(), self.layout) }; |
818 | 0 | } |
819 | 0 | } |
820 | | } |
821 | | |
822 | | impl<T: ?Sized + sealed::BoxBytesOf> From<Box<T>> for BoxBytes { |
823 | 0 | fn from(value: Box<T>) -> Self { |
824 | 0 | value.box_bytes_of() |
825 | 0 | } |
826 | | } |
827 | | |
828 | | mod sealed { |
829 | | use crate::{BoxBytes, PodCastError}; |
830 | | use alloc::boxed::Box; |
831 | | |
832 | | pub trait BoxBytesOf { |
833 | | fn box_bytes_of(self: Box<Self>) -> BoxBytes; |
834 | | } |
835 | | |
836 | | pub trait FromBoxBytes { |
837 | | fn try_from_box_bytes( |
838 | | bytes: BoxBytes, |
839 | | ) -> Result<Box<Self>, (PodCastError, BoxBytes)>; |
840 | | } |
841 | | } |
842 | | |
843 | | impl<T: NoUninit> sealed::BoxBytesOf for T { |
844 | 0 | fn box_bytes_of(self: Box<Self>) -> BoxBytes { |
845 | 0 | let layout = Layout::new::<T>(); |
846 | 0 | let ptr = Box::into_raw(self) as *mut u8; |
847 | | // SAFETY: Box::into_raw() returns a non-null pointer. |
848 | 0 | let ptr = unsafe { NonNull::new_unchecked(ptr) }; |
849 | 0 | BoxBytes { ptr, layout } |
850 | 0 | } |
851 | | } |
852 | | |
853 | | impl<T: NoUninit> sealed::BoxBytesOf for [T] { |
854 | 0 | fn box_bytes_of(self: Box<Self>) -> BoxBytes { |
855 | 0 | let layout = Layout::for_value::<[T]>(&self); |
856 | 0 | let ptr = Box::into_raw(self) as *mut u8; |
857 | | // SAFETY: Box::into_raw() returns a non-null pointer. |
858 | 0 | let ptr = unsafe { NonNull::new_unchecked(ptr) }; |
859 | 0 | BoxBytes { ptr, layout } |
860 | 0 | } |
861 | | } |
862 | | |
863 | | impl sealed::BoxBytesOf for str { |
864 | 0 | fn box_bytes_of(self: Box<Self>) -> BoxBytes { |
865 | 0 | self.into_boxed_bytes().box_bytes_of() |
866 | 0 | } |
867 | | } |
868 | | |
869 | | impl<T: AnyBitPattern> sealed::FromBoxBytes for T { |
870 | 0 | fn try_from_box_bytes( |
871 | 0 | bytes: BoxBytes, |
872 | 0 | ) -> Result<Box<Self>, (PodCastError, BoxBytes)> { |
873 | 0 | let layout = Layout::new::<T>(); |
874 | 0 | if bytes.layout.align() != layout.align() { |
875 | 0 | Err((PodCastError::AlignmentMismatch, bytes)) |
876 | 0 | } else if bytes.layout.size() != layout.size() { |
877 | 0 | Err((PodCastError::SizeMismatch, bytes)) |
878 | | } else { |
879 | 0 | let (ptr, _) = bytes.into_raw_parts(); |
880 | | // SAFETY: See BoxBytes type invariant. |
881 | 0 | Ok(unsafe { Box::from_raw(ptr.as_ptr() as *mut T) }) |
882 | | } |
883 | 0 | } |
884 | | } |
885 | | |
886 | | impl<T: AnyBitPattern> sealed::FromBoxBytes for [T] { |
887 | 0 | fn try_from_box_bytes( |
888 | 0 | bytes: BoxBytes, |
889 | 0 | ) -> Result<Box<Self>, (PodCastError, BoxBytes)> { |
890 | 0 | let single_layout = Layout::new::<T>(); |
891 | 0 | if bytes.layout.align() != single_layout.align() { |
892 | 0 | Err((PodCastError::AlignmentMismatch, bytes)) |
893 | 0 | } else if (single_layout.size() == 0 && bytes.layout.size() != 0) |
894 | 0 | || (single_layout.size() != 0 |
895 | 0 | && bytes.layout.size() % single_layout.size() != 0) |
896 | | { |
897 | 0 | Err((PodCastError::OutputSliceWouldHaveSlop, bytes)) |
898 | | } else { |
899 | 0 | let (ptr, layout) = bytes.into_raw_parts(); |
900 | 0 | let length = if single_layout.size() != 0 { |
901 | 0 | layout.size() / single_layout.size() |
902 | | } else { |
903 | 0 | 0 |
904 | | }; |
905 | 0 | let ptr = |
906 | 0 | core::ptr::slice_from_raw_parts_mut(ptr.as_ptr() as *mut T, length); |
907 | | // SAFETY: See BoxBytes type invariant. |
908 | 0 | Ok(unsafe { Box::from_raw(ptr) }) |
909 | | } |
910 | 0 | } |
911 | | } |
912 | | |
913 | | /// Re-interprets `Box<T>` as `BoxBytes`. |
914 | | /// |
915 | | /// `T` must be either [`Sized`] and [`NoUninit`], |
916 | | /// [`[U]`](slice) where `U: NoUninit`, or [`str`]. |
917 | | #[inline] |
918 | 0 | pub fn box_bytes_of<T: sealed::BoxBytesOf + ?Sized>(input: Box<T>) -> BoxBytes { |
919 | 0 | input.box_bytes_of() |
920 | 0 | } |
921 | | |
922 | | /// Re-interprets `BoxBytes` as `Box<T>`. |
923 | | /// |
924 | | /// `T` must be either [`Sized`] + [`AnyBitPattern`], or |
925 | | /// [`[U]`](slice) where `U: AnyBitPattern`. |
926 | | /// |
927 | | /// ## Panics |
928 | | /// |
929 | | /// This is [`try_from_box_bytes`] but will panic on error and the input will be |
930 | | /// dropped. |
931 | | #[inline] |
932 | | #[cfg_attr(feature = "track_caller", track_caller)] |
933 | 0 | pub fn from_box_bytes<T: sealed::FromBoxBytes + ?Sized>( |
934 | 0 | input: BoxBytes, |
935 | 0 | ) -> Box<T> { |
936 | 0 | try_from_box_bytes(input).map_err(|(error, _)| error).unwrap() |
937 | 0 | } |
938 | | |
939 | | /// Re-interprets `BoxBytes` as `Box<T>`. |
940 | | /// |
941 | | /// `T` must be either [`Sized`] + [`AnyBitPattern`], or |
942 | | /// [`[U]`](slice) where `U: AnyBitPattern`. |
943 | | /// |
944 | | /// Returns `Err`: |
945 | | /// * If the input isn't aligned for `T`. |
946 | | /// * If `T: Sized` and the input's length isn't exactly the size of `T`. |
947 | | /// * If `T = [U]` and the input's length isn't exactly a multiple of the size |
948 | | /// of `U`. |
949 | | #[inline] |
950 | 0 | pub fn try_from_box_bytes<T: sealed::FromBoxBytes + ?Sized>( |
951 | 0 | input: BoxBytes, |
952 | 0 | ) -> Result<Box<T>, (PodCastError, BoxBytes)> { |
953 | 0 | T::try_from_box_bytes(input) |
954 | 0 | } |
955 | | |
956 | | impl BoxBytes { |
957 | | /// Constructs a `BoxBytes` from its raw parts. |
958 | | /// |
959 | | /// # Safety |
960 | | /// |
961 | | /// The pointer is owned, has been allocated with the provided layout, and |
962 | | /// points to `layout.size()` initialized bytes. |
963 | 0 | pub unsafe fn from_raw_parts(ptr: NonNull<u8>, layout: Layout) -> Self { |
964 | 0 | BoxBytes { ptr, layout } |
965 | 0 | } |
966 | | |
967 | | /// Deconstructs a `BoxBytes` into its raw parts. |
968 | | /// |
969 | | /// The pointer is owned, has been allocated with the provided layout, and |
970 | | /// points to `layout.size()` initialized bytes. |
971 | 0 | pub fn into_raw_parts(self) -> (NonNull<u8>, Layout) { |
972 | 0 | let me = ManuallyDrop::new(self); |
973 | 0 | (me.ptr, me.layout) |
974 | 0 | } |
975 | | |
976 | | /// Returns the original layout. |
977 | 0 | pub fn layout(&self) -> Layout { |
978 | 0 | self.layout |
979 | 0 | } |
980 | | } |