/rust/registry/src/index.crates.io-1949cf8c6b5b557f/bytemuck-1.24.0/src/internal.rs
Line | Count | Source |
1 | | //! Internal implementation of casting functions not bound by marker traits |
2 | | //! and therefore marked as unsafe. This is used so that we don't need to |
3 | | //! duplicate the business logic contained in these functions between the |
4 | | //! versions exported in the crate root, `checked`, and `relaxed` modules. |
5 | | #![allow(unused_unsafe)] |
6 | | |
7 | | use crate::PodCastError; |
8 | | use core::{marker::*, mem::*}; |
9 | | |
10 | | /* |
11 | | |
12 | | Note(Lokathor): We've switched all of the `unwrap` to `match` because there is |
13 | | apparently a bug: https://github.com/rust-lang/rust/issues/68667 |
14 | | and it doesn't seem to show up in simple godbolt examples but has been reported |
15 | | as having an impact when there's a cast mixed in with other more complicated |
16 | | code around it. Rustc/LLVM ends up missing that the `Err` can't ever happen for |
17 | | particular type combinations, and then it doesn't fully eliminated the panic |
18 | | possibility code branch. |
19 | | |
20 | | */ |
21 | | |
22 | | /// Immediately panics. |
23 | | #[cfg(not(target_arch = "spirv"))] |
24 | | #[cold] |
25 | | #[inline(never)] |
26 | | #[cfg_attr(feature = "track_caller", track_caller)] |
27 | 0 | pub(crate) fn something_went_wrong<D: core::fmt::Display>( |
28 | 0 | _src: &str, _err: D, |
29 | 0 | ) -> ! { |
30 | | // Note(Lokathor): Keeping the panic here makes the panic _formatting_ go |
31 | | // here too, which helps assembly readability and also helps keep down |
32 | | // the inline pressure. |
33 | 0 | panic!("{src}>{err}", src = _src, err = _err);Unexecuted instantiation: bytemuck::internal::something_went_wrong::<bytemuck::PodCastError> Unexecuted instantiation: bytemuck::internal::something_went_wrong::<_> |
34 | | } |
35 | | |
36 | | /// Immediately panics. |
37 | | #[cfg(target_arch = "spirv")] |
38 | | #[cold] |
39 | | #[inline(never)] |
40 | | pub(crate) fn something_went_wrong<D>(_src: &str, _err: D) -> ! { |
41 | | // Note: On the spirv targets from [rust-gpu](https://github.com/EmbarkStudios/rust-gpu) |
42 | | // panic formatting cannot be used. We we just give a generic error message |
43 | | // The chance that the panicking version of these functions will ever get |
44 | | // called on spir-v targets with invalid inputs is small, but giving a |
45 | | // simple error message is better than no error message at all. |
46 | | panic!("Called a panicing helper from bytemuck which paniced"); |
47 | | } |
48 | | |
49 | | /// Re-interprets `&T` as `&[u8]`. |
50 | | /// |
51 | | /// Any ZST becomes an empty slice, and in that case the pointer value of that |
52 | | /// empty slice might not match the pointer value of the input reference. |
53 | | #[inline(always)] |
54 | 0 | pub(crate) unsafe fn bytes_of<T: Copy>(t: &T) -> &[u8] { |
55 | 0 | match try_cast_slice::<T, u8>(core::slice::from_ref(t)) { |
56 | 0 | Ok(s) => s, |
57 | 0 | Err(_) => unreachable!(), |
58 | | } |
59 | 0 | } |
60 | | |
61 | | /// Re-interprets `&mut T` as `&mut [u8]`. |
62 | | /// |
63 | | /// Any ZST becomes an empty slice, and in that case the pointer value of that |
64 | | /// empty slice might not match the pointer value of the input reference. |
65 | | #[inline] |
66 | 0 | pub(crate) unsafe fn bytes_of_mut<T: Copy>(t: &mut T) -> &mut [u8] { |
67 | 0 | match try_cast_slice_mut::<T, u8>(core::slice::from_mut(t)) { |
68 | 0 | Ok(s) => s, |
69 | 0 | Err(_) => unreachable!(), |
70 | | } |
71 | 0 | } |
72 | | |
73 | | /// Re-interprets `&[u8]` as `&T`. |
74 | | /// |
75 | | /// ## Panics |
76 | | /// |
77 | | /// This is [`try_from_bytes`] but will panic on error. |
78 | | #[inline] |
79 | | #[cfg_attr(feature = "track_caller", track_caller)] |
80 | 0 | pub(crate) unsafe fn from_bytes<T: Copy>(s: &[u8]) -> &T { |
81 | 0 | match try_from_bytes(s) { |
82 | 0 | Ok(t) => t, |
83 | 0 | Err(e) => something_went_wrong("from_bytes", e), |
84 | | } |
85 | 0 | } |
86 | | |
87 | | /// Re-interprets `&mut [u8]` as `&mut T`. |
88 | | /// |
89 | | /// ## Panics |
90 | | /// |
91 | | /// This is [`try_from_bytes_mut`] but will panic on error. |
92 | | #[inline] |
93 | | #[cfg_attr(feature = "track_caller", track_caller)] |
94 | 0 | pub(crate) unsafe fn from_bytes_mut<T: Copy>(s: &mut [u8]) -> &mut T { |
95 | 0 | match try_from_bytes_mut(s) { |
96 | 0 | Ok(t) => t, |
97 | 0 | Err(e) => something_went_wrong("from_bytes_mut", e), |
98 | | } |
99 | 0 | } |
100 | | |
101 | | /// Reads from the bytes as if they were a `T`. |
102 | | /// |
103 | | /// ## Failure |
104 | | /// * If the `bytes` length is not equal to `size_of::<T>()`. |
105 | | #[inline] |
106 | 32.6M | pub(crate) unsafe fn try_pod_read_unaligned<T: Copy>( |
107 | 32.6M | bytes: &[u8], |
108 | 32.6M | ) -> Result<T, PodCastError> { |
109 | 32.6M | if bytes.len() != size_of::<T>() { |
110 | 0 | Err(PodCastError::SizeMismatch) |
111 | | } else { |
112 | 32.6M | Ok(unsafe { (bytes.as_ptr() as *const T).read_unaligned() }) |
113 | | } |
114 | 32.6M | } Unexecuted instantiation: bytemuck::internal::try_pod_read_unaligned::<_> Unexecuted instantiation: bytemuck::internal::try_pod_read_unaligned::<[f32; 3]> Unexecuted instantiation: bytemuck::internal::try_pod_read_unaligned::<[f32; 4]> Unexecuted instantiation: bytemuck::internal::try_pod_read_unaligned::<[f32; 3]> bytemuck::internal::try_pod_read_unaligned::<[f32; 4]> Line | Count | Source | 106 | 32.6M | pub(crate) unsafe fn try_pod_read_unaligned<T: Copy>( | 107 | 32.6M | bytes: &[u8], | 108 | 32.6M | ) -> Result<T, PodCastError> { | 109 | 32.6M | if bytes.len() != size_of::<T>() { | 110 | 0 | Err(PodCastError::SizeMismatch) | 111 | | } else { | 112 | 32.6M | Ok(unsafe { (bytes.as_ptr() as *const T).read_unaligned() }) | 113 | | } | 114 | 32.6M | } |
|
115 | | |
116 | | /// Reads the slice into a `T` value. |
117 | | /// |
118 | | /// ## Panics |
119 | | /// * This is like `try_pod_read_unaligned` but will panic on failure. |
120 | | #[inline] |
121 | | #[cfg_attr(feature = "track_caller", track_caller)] |
122 | 32.6M | pub(crate) unsafe fn pod_read_unaligned<T: Copy>(bytes: &[u8]) -> T { |
123 | 32.6M | match try_pod_read_unaligned(bytes) { |
124 | 32.6M | Ok(t) => t, |
125 | 0 | Err(e) => something_went_wrong("pod_read_unaligned", e), |
126 | | } |
127 | 32.6M | } Unexecuted instantiation: bytemuck::internal::pod_read_unaligned::<_> Unexecuted instantiation: bytemuck::internal::pod_read_unaligned::<[f32; 3]> Unexecuted instantiation: bytemuck::internal::pod_read_unaligned::<[f32; 4]> Unexecuted instantiation: bytemuck::internal::pod_read_unaligned::<[f32; 3]> bytemuck::internal::pod_read_unaligned::<[f32; 4]> Line | Count | Source | 122 | 32.6M | pub(crate) unsafe fn pod_read_unaligned<T: Copy>(bytes: &[u8]) -> T { | 123 | 32.6M | match try_pod_read_unaligned(bytes) { | 124 | 32.6M | Ok(t) => t, | 125 | 0 | Err(e) => something_went_wrong("pod_read_unaligned", e), | 126 | | } | 127 | 32.6M | } |
|
128 | | |
129 | | /// Checks if `ptr` is aligned to an `align` memory boundary. |
130 | | /// |
131 | | /// ## Panics |
132 | | /// * If `align` is not a power of two. This includes when `align` is zero. |
133 | | #[inline] |
134 | | #[cfg_attr(feature = "track_caller", track_caller)] |
135 | 0 | pub(crate) fn is_aligned_to(ptr: *const (), align: usize) -> bool { |
136 | | #[cfg(feature = "align_offset")] |
137 | | { |
138 | | // This is in a way better than `ptr as usize % align == 0`, |
139 | | // because casting a pointer to an integer has the side effect that it |
140 | | // exposes the pointer's provenance, which may theoretically inhibit |
141 | | // some compiler optimizations. |
142 | | ptr.align_offset(align) == 0 |
143 | | } |
144 | | #[cfg(not(feature = "align_offset"))] |
145 | | { |
146 | 0 | ((ptr as usize) % align) == 0 |
147 | | } |
148 | 0 | } Unexecuted instantiation: bytemuck::internal::is_aligned_to Unexecuted instantiation: bytemuck::internal::is_aligned_to Unexecuted instantiation: bytemuck::internal::is_aligned_to |
149 | | |
150 | | /// Re-interprets `&[u8]` as `&T`. |
151 | | /// |
152 | | /// ## Failure |
153 | | /// |
154 | | /// * If the slice isn't aligned for the new type |
155 | | /// * If the slice's length isn’t exactly the size of the new type |
156 | | #[inline] |
157 | 0 | pub(crate) unsafe fn try_from_bytes<T: Copy>( |
158 | 0 | s: &[u8], |
159 | 0 | ) -> Result<&T, PodCastError> { |
160 | 0 | if s.len() != size_of::<T>() { |
161 | 0 | Err(PodCastError::SizeMismatch) |
162 | 0 | } else if !is_aligned_to(s.as_ptr() as *const (), align_of::<T>()) { |
163 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) |
164 | | } else { |
165 | 0 | Ok(unsafe { &*(s.as_ptr() as *const T) }) |
166 | | } |
167 | 0 | } |
168 | | |
169 | | /// Re-interprets `&mut [u8]` as `&mut T`. |
170 | | /// |
171 | | /// ## Failure |
172 | | /// |
173 | | /// * If the slice isn't aligned for the new type |
174 | | /// * If the slice's length isn’t exactly the size of the new type |
175 | | #[inline] |
176 | 0 | pub(crate) unsafe fn try_from_bytes_mut<T: Copy>( |
177 | 0 | s: &mut [u8], |
178 | 0 | ) -> Result<&mut T, PodCastError> { |
179 | 0 | if s.len() != size_of::<T>() { |
180 | 0 | Err(PodCastError::SizeMismatch) |
181 | 0 | } else if !is_aligned_to(s.as_ptr() as *const (), align_of::<T>()) { |
182 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) |
183 | | } else { |
184 | 0 | Ok(unsafe { &mut *(s.as_mut_ptr() as *mut T) }) |
185 | | } |
186 | 0 | } |
187 | | |
188 | | /// Cast `A` into `B` |
189 | | /// |
190 | | /// ## Panics |
191 | | /// |
192 | | /// * This is like [`try_cast`](try_cast), but will panic on a size mismatch. |
193 | | #[inline] |
194 | | #[cfg_attr(feature = "track_caller", track_caller)] |
195 | 5.53k | pub(crate) unsafe fn cast<A: Copy, B: Copy>(a: A) -> B { |
196 | 5.53k | if size_of::<A>() == size_of::<B>() { |
197 | 5.53k | unsafe { transmute!(a) } |
198 | | } else { |
199 | 0 | something_went_wrong("cast", PodCastError::SizeMismatch) |
200 | | } |
201 | 5.53k | } Unexecuted instantiation: bytemuck::internal::cast::<[u8; 3], [u8; 4]> bytemuck::internal::cast::<[u8; 4], [u8; 4]> Line | Count | Source | 195 | 5.53k | pub(crate) unsafe fn cast<A: Copy, B: Copy>(a: A) -> B { | 196 | 5.53k | if size_of::<A>() == size_of::<B>() { | 197 | 5.53k | unsafe { transmute!(a) } | 198 | | } else { | 199 | 0 | something_went_wrong("cast", PodCastError::SizeMismatch) | 200 | | } | 201 | 5.53k | } |
Unexecuted instantiation: bytemuck::internal::cast::<_, _> |
202 | | |
203 | | /// Cast `&mut A` into `&mut B`. |
204 | | /// |
205 | | /// ## Panics |
206 | | /// |
207 | | /// This is [`try_cast_mut`] but will panic on error. |
208 | | #[inline] |
209 | | #[cfg_attr(feature = "track_caller", track_caller)] |
210 | 0 | pub(crate) unsafe fn cast_mut<A: Copy, B: Copy>(a: &mut A) -> &mut B { |
211 | 0 | if size_of::<A>() == size_of::<B>() && align_of::<A>() >= align_of::<B>() { |
212 | | // Plz mr compiler, just notice that we can't ever hit Err in this case. |
213 | 0 | match try_cast_mut(a) { |
214 | 0 | Ok(b) => b, |
215 | 0 | Err(_) => unreachable!(), |
216 | | } |
217 | | } else { |
218 | 0 | match try_cast_mut(a) { |
219 | 0 | Ok(b) => b, |
220 | 0 | Err(e) => something_went_wrong("cast_mut", e), |
221 | | } |
222 | | } |
223 | 0 | } |
224 | | |
225 | | /// Cast `&A` into `&B`. |
226 | | /// |
227 | | /// ## Panics |
228 | | /// |
229 | | /// This is [`try_cast_ref`] but will panic on error. |
230 | | #[inline] |
231 | | #[cfg_attr(feature = "track_caller", track_caller)] |
232 | 0 | pub(crate) unsafe fn cast_ref<A: Copy, B: Copy>(a: &A) -> &B { |
233 | 0 | if size_of::<A>() == size_of::<B>() && align_of::<A>() >= align_of::<B>() { |
234 | | // Plz mr compiler, just notice that we can't ever hit Err in this case. |
235 | 0 | match try_cast_ref(a) { |
236 | 0 | Ok(b) => b, |
237 | 0 | Err(_) => unreachable!(), |
238 | | } |
239 | | } else { |
240 | 0 | match try_cast_ref(a) { |
241 | 0 | Ok(b) => b, |
242 | 0 | Err(e) => something_went_wrong("cast_ref", e), |
243 | | } |
244 | | } |
245 | 0 | } |
246 | | |
247 | | /// Cast `&[A]` into `&[B]`. |
248 | | /// |
249 | | /// ## Panics |
250 | | /// |
251 | | /// This is [`try_cast_slice`] but will panic on error. |
252 | | #[inline] |
253 | | #[cfg_attr(feature = "track_caller", track_caller)] |
254 | 99.3M | pub(crate) unsafe fn cast_slice<A: Copy, B: Copy>(a: &[A]) -> &[B] { |
255 | 99.3M | match try_cast_slice(a) { |
256 | 99.3M | Ok(b) => b, |
257 | 0 | Err(e) => something_went_wrong("cast_slice", e), |
258 | | } |
259 | 99.3M | } Unexecuted instantiation: bytemuck::internal::cast_slice::<i8, u8> Unexecuted instantiation: bytemuck::internal::cast_slice::<f64, u8> bytemuck::internal::cast_slice::<f32, u8> Line | Count | Source | 254 | 99.3M | pub(crate) unsafe fn cast_slice<A: Copy, B: Copy>(a: &[A]) -> &[B] { | 255 | 99.3M | match try_cast_slice(a) { | 256 | 99.3M | Ok(b) => b, | 257 | 0 | Err(e) => something_went_wrong("cast_slice", e), | 258 | | } | 259 | 99.3M | } |
bytemuck::internal::cast_slice::<u8, [u8; 4]> Line | Count | Source | 254 | 24 | pub(crate) unsafe fn cast_slice<A: Copy, B: Copy>(a: &[A]) -> &[B] { | 255 | 24 | match try_cast_slice(a) { | 256 | 24 | Ok(b) => b, | 257 | 0 | Err(e) => something_went_wrong("cast_slice", e), | 258 | | } | 259 | 24 | } |
bytemuck::internal::cast_slice::<u8, u8> Line | Count | Source | 254 | 1.73k | pub(crate) unsafe fn cast_slice<A: Copy, B: Copy>(a: &[A]) -> &[B] { | 255 | 1.73k | match try_cast_slice(a) { | 256 | 1.73k | Ok(b) => b, | 257 | 0 | Err(e) => something_went_wrong("cast_slice", e), | 258 | | } | 259 | 1.73k | } |
Unexecuted instantiation: bytemuck::internal::cast_slice::<i32, u8> Unexecuted instantiation: bytemuck::internal::cast_slice::<u32, u8> Unexecuted instantiation: bytemuck::internal::cast_slice::<i16, u8> bytemuck::internal::cast_slice::<u16, u8> Line | Count | Source | 254 | 28 | pub(crate) unsafe fn cast_slice<A: Copy, B: Copy>(a: &[A]) -> &[B] { | 255 | 28 | match try_cast_slice(a) { | 256 | 28 | Ok(b) => b, | 257 | 0 | Err(e) => something_went_wrong("cast_slice", e), | 258 | | } | 259 | 28 | } |
Unexecuted instantiation: bytemuck::internal::cast_slice::<i64, u8> Unexecuted instantiation: bytemuck::internal::cast_slice::<u64, u8> Unexecuted instantiation: bytemuck::internal::cast_slice::<_, _> |
260 | | |
261 | | /// Cast `&mut [A]` into `&mut [B]`. |
262 | | /// |
263 | | /// ## Panics |
264 | | /// |
265 | | /// This is [`try_cast_slice_mut`] but will panic on error. |
266 | | #[inline] |
267 | | #[cfg_attr(feature = "track_caller", track_caller)] |
268 | 33.3k | pub(crate) unsafe fn cast_slice_mut<A: Copy, B: Copy>(a: &mut [A]) -> &mut [B] { |
269 | 33.3k | match try_cast_slice_mut(a) { |
270 | 33.3k | Ok(b) => b, |
271 | 0 | Err(e) => something_went_wrong("cast_slice_mut", e), |
272 | | } |
273 | 33.3k | } bytemuck::internal::cast_slice_mut::<f32, u8> Line | Count | Source | 268 | 2.90k | pub(crate) unsafe fn cast_slice_mut<A: Copy, B: Copy>(a: &mut [A]) -> &mut [B] { | 269 | 2.90k | match try_cast_slice_mut(a) { | 270 | 2.90k | Ok(b) => b, | 271 | 0 | Err(e) => something_went_wrong("cast_slice_mut", e), | 272 | | } | 273 | 2.90k | } |
bytemuck::internal::cast_slice_mut::<u8, [u8; 3]> Line | Count | Source | 268 | 11 | pub(crate) unsafe fn cast_slice_mut<A: Copy, B: Copy>(a: &mut [A]) -> &mut [B] { | 269 | 11 | match try_cast_slice_mut(a) { | 270 | 11 | Ok(b) => b, | 271 | 0 | Err(e) => something_went_wrong("cast_slice_mut", e), | 272 | | } | 273 | 11 | } |
bytemuck::internal::cast_slice_mut::<u8, [u8; 4]> Line | Count | Source | 268 | 13 | pub(crate) unsafe fn cast_slice_mut<A: Copy, B: Copy>(a: &mut [A]) -> &mut [B] { | 269 | 13 | match try_cast_slice_mut(a) { | 270 | 13 | Ok(b) => b, | 271 | 0 | Err(e) => something_went_wrong("cast_slice_mut", e), | 272 | | } | 273 | 13 | } |
bytemuck::internal::cast_slice_mut::<u8, u8> Line | Count | Source | 268 | 27.1k | pub(crate) unsafe fn cast_slice_mut<A: Copy, B: Copy>(a: &mut [A]) -> &mut [B] { | 269 | 27.1k | match try_cast_slice_mut(a) { | 270 | 27.1k | Ok(b) => b, | 271 | 0 | Err(e) => something_went_wrong("cast_slice_mut", e), | 272 | | } | 273 | 27.1k | } |
bytemuck::internal::cast_slice_mut::<u16, u8> Line | Count | Source | 268 | 3.25k | pub(crate) unsafe fn cast_slice_mut<A: Copy, B: Copy>(a: &mut [A]) -> &mut [B] { | 269 | 3.25k | match try_cast_slice_mut(a) { | 270 | 3.25k | Ok(b) => b, | 271 | 0 | Err(e) => something_went_wrong("cast_slice_mut", e), | 272 | | } | 273 | 3.25k | } |
Unexecuted instantiation: bytemuck::internal::cast_slice_mut::<_, _> |
274 | | |
275 | | /// Try to cast `A` into `B`. |
276 | | /// |
277 | | /// Note that for this particular type of cast, alignment isn't a factor. The |
278 | | /// input value is semantically copied into the function and then returned to a |
279 | | /// new memory location which will have whatever the required alignment of the |
280 | | /// output type is. |
281 | | /// |
282 | | /// ## Failure |
283 | | /// |
284 | | /// * If the types don't have the same size this fails. |
285 | | #[inline] |
286 | 0 | pub(crate) unsafe fn try_cast<A: Copy, B: Copy>( |
287 | 0 | a: A, |
288 | 0 | ) -> Result<B, PodCastError> { |
289 | 0 | if size_of::<A>() == size_of::<B>() { |
290 | 0 | Ok(unsafe { transmute!(a) }) |
291 | | } else { |
292 | 0 | Err(PodCastError::SizeMismatch) |
293 | | } |
294 | 0 | } |
295 | | |
296 | | /// Try to convert a `&A` into `&B`. |
297 | | /// |
298 | | /// ## Failure |
299 | | /// |
300 | | /// * If the reference isn't aligned in the new type |
301 | | /// * If the source type and target type aren't the same size. |
302 | | #[inline] |
303 | 0 | pub(crate) unsafe fn try_cast_ref<A: Copy, B: Copy>( |
304 | 0 | a: &A, |
305 | 0 | ) -> Result<&B, PodCastError> { |
306 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away |
307 | | // after monomorphization. |
308 | 0 | if align_of::<B>() > align_of::<A>() |
309 | 0 | && !is_aligned_to(a as *const A as *const (), align_of::<B>()) |
310 | | { |
311 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) |
312 | 0 | } else if size_of::<B>() == size_of::<A>() { |
313 | 0 | Ok(unsafe { &*(a as *const A as *const B) }) |
314 | | } else { |
315 | 0 | Err(PodCastError::SizeMismatch) |
316 | | } |
317 | 0 | } |
318 | | |
319 | | /// Try to convert a `&mut A` into `&mut B`. |
320 | | /// |
321 | | /// As [`try_cast_ref`], but `mut`. |
322 | | #[inline] |
323 | 0 | pub(crate) unsafe fn try_cast_mut<A: Copy, B: Copy>( |
324 | 0 | a: &mut A, |
325 | 0 | ) -> Result<&mut B, PodCastError> { |
326 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away |
327 | | // after monomorphization. |
328 | 0 | if align_of::<B>() > align_of::<A>() |
329 | 0 | && !is_aligned_to(a as *const A as *const (), align_of::<B>()) |
330 | | { |
331 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) |
332 | 0 | } else if size_of::<B>() == size_of::<A>() { |
333 | 0 | Ok(unsafe { &mut *(a as *mut A as *mut B) }) |
334 | | } else { |
335 | 0 | Err(PodCastError::SizeMismatch) |
336 | | } |
337 | 0 | } |
338 | | |
339 | | /// Try to convert `&[A]` into `&[B]` (possibly with a change in length). |
340 | | /// |
341 | | /// * `input.as_ptr() as usize == output.as_ptr() as usize` |
342 | | /// * `input.len() * size_of::<A>() == output.len() * size_of::<B>()` |
343 | | /// |
344 | | /// ## Failure |
345 | | /// |
346 | | /// * If the target type has a greater alignment requirement and the input slice |
347 | | /// isn't aligned. |
348 | | /// * If the target element type is a different size from the current element |
349 | | /// type, and the output slice wouldn't be a whole number of elements when |
350 | | /// accounting for the size change (eg: 3 `u16` values is 1.5 `u32` values, so |
351 | | /// that's a failure). |
352 | | #[inline] |
353 | 99.3M | pub(crate) unsafe fn try_cast_slice<A: Copy, B: Copy>( |
354 | 99.3M | a: &[A], |
355 | 99.3M | ) -> Result<&[B], PodCastError> { |
356 | 99.3M | let input_bytes = core::mem::size_of_val::<[A]>(a); |
357 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away |
358 | | // after monomorphization. |
359 | 99.3M | if align_of::<B>() > align_of::<A>() |
360 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) |
361 | | { |
362 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) |
363 | 99.3M | } else if size_of::<B>() == size_of::<A>() { |
364 | 1.73k | Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, a.len()) }) |
365 | 99.3M | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) |
366 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) |
367 | | { |
368 | 99.3M | let new_len = |
369 | 99.3M | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; |
370 | 99.3M | Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, new_len) }) |
371 | | } else { |
372 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) |
373 | | } |
374 | 99.3M | } Unexecuted instantiation: bytemuck::internal::try_cast_slice::<i8, u8> Unexecuted instantiation: bytemuck::internal::try_cast_slice::<f64, u8> bytemuck::internal::try_cast_slice::<f32, u8> Line | Count | Source | 353 | 99.3M | pub(crate) unsafe fn try_cast_slice<A: Copy, B: Copy>( | 354 | 99.3M | a: &[A], | 355 | 99.3M | ) -> Result<&[B], PodCastError> { | 356 | 99.3M | let input_bytes = core::mem::size_of_val::<[A]>(a); | 357 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away | 358 | | // after monomorphization. | 359 | 99.3M | if align_of::<B>() > align_of::<A>() | 360 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) | 361 | | { | 362 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) | 363 | 99.3M | } else if size_of::<B>() == size_of::<A>() { | 364 | 0 | Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, a.len()) }) | 365 | 99.3M | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) | 366 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) | 367 | | { | 368 | 99.3M | let new_len = | 369 | 99.3M | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; | 370 | 99.3M | Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, new_len) }) | 371 | | } else { | 372 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) | 373 | | } | 374 | 99.3M | } |
bytemuck::internal::try_cast_slice::<u8, [u8; 4]> Line | Count | Source | 353 | 24 | pub(crate) unsafe fn try_cast_slice<A: Copy, B: Copy>( | 354 | 24 | a: &[A], | 355 | 24 | ) -> Result<&[B], PodCastError> { | 356 | 24 | let input_bytes = core::mem::size_of_val::<[A]>(a); | 357 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away | 358 | | // after monomorphization. | 359 | 24 | if align_of::<B>() > align_of::<A>() | 360 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) | 361 | | { | 362 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) | 363 | 24 | } else if size_of::<B>() == size_of::<A>() { | 364 | 0 | Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, a.len()) }) | 365 | 24 | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) | 366 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) | 367 | | { | 368 | 24 | let new_len = | 369 | 24 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; | 370 | 24 | Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, new_len) }) | 371 | | } else { | 372 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) | 373 | | } | 374 | 24 | } |
bytemuck::internal::try_cast_slice::<u8, u8> Line | Count | Source | 353 | 1.73k | pub(crate) unsafe fn try_cast_slice<A: Copy, B: Copy>( | 354 | 1.73k | a: &[A], | 355 | 1.73k | ) -> Result<&[B], PodCastError> { | 356 | 1.73k | let input_bytes = core::mem::size_of_val::<[A]>(a); | 357 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away | 358 | | // after monomorphization. | 359 | 1.73k | if align_of::<B>() > align_of::<A>() | 360 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) | 361 | | { | 362 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) | 363 | 1.73k | } else if size_of::<B>() == size_of::<A>() { | 364 | 1.73k | Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, a.len()) }) | 365 | 0 | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) | 366 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) | 367 | | { | 368 | 0 | let new_len = | 369 | 0 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; | 370 | 0 | Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, new_len) }) | 371 | | } else { | 372 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) | 373 | | } | 374 | 1.73k | } |
Unexecuted instantiation: bytemuck::internal::try_cast_slice::<i32, u8> Unexecuted instantiation: bytemuck::internal::try_cast_slice::<u32, u8> Unexecuted instantiation: bytemuck::internal::try_cast_slice::<i16, u8> bytemuck::internal::try_cast_slice::<u16, u8> Line | Count | Source | 353 | 28 | pub(crate) unsafe fn try_cast_slice<A: Copy, B: Copy>( | 354 | 28 | a: &[A], | 355 | 28 | ) -> Result<&[B], PodCastError> { | 356 | 28 | let input_bytes = core::mem::size_of_val::<[A]>(a); | 357 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away | 358 | | // after monomorphization. | 359 | 28 | if align_of::<B>() > align_of::<A>() | 360 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) | 361 | | { | 362 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) | 363 | 28 | } else if size_of::<B>() == size_of::<A>() { | 364 | 0 | Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, a.len()) }) | 365 | 28 | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) | 366 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) | 367 | | { | 368 | 28 | let new_len = | 369 | 28 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; | 370 | 28 | Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, new_len) }) | 371 | | } else { | 372 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) | 373 | | } | 374 | 28 | } |
Unexecuted instantiation: bytemuck::internal::try_cast_slice::<i64, u8> Unexecuted instantiation: bytemuck::internal::try_cast_slice::<u64, u8> Unexecuted instantiation: bytemuck::internal::try_cast_slice::<_, _> Unexecuted instantiation: bytemuck::internal::try_cast_slice::<u8, f32> Unexecuted instantiation: bytemuck::internal::try_cast_slice::<u8, u16> |
375 | | |
376 | | /// Try to convert `&mut [A]` into `&mut [B]` (possibly with a change in |
377 | | /// length). |
378 | | /// |
379 | | /// As [`try_cast_slice`], but `&mut`. |
380 | | #[inline] |
381 | 33.3k | pub(crate) unsafe fn try_cast_slice_mut<A: Copy, B: Copy>( |
382 | 33.3k | a: &mut [A], |
383 | 33.3k | ) -> Result<&mut [B], PodCastError> { |
384 | 33.3k | let input_bytes = core::mem::size_of_val::<[A]>(a); |
385 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away |
386 | | // after monomorphization. |
387 | 33.3k | if align_of::<B>() > align_of::<A>() |
388 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) |
389 | | { |
390 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) |
391 | 33.3k | } else if size_of::<B>() == size_of::<A>() { |
392 | 27.1k | Ok(unsafe { |
393 | 27.1k | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, a.len()) |
394 | 27.1k | }) |
395 | 6.18k | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) |
396 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) |
397 | | { |
398 | 6.18k | let new_len = |
399 | 6.18k | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; |
400 | 6.18k | Ok(unsafe { |
401 | 6.18k | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, new_len) |
402 | 6.18k | }) |
403 | | } else { |
404 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) |
405 | | } |
406 | 33.3k | } bytemuck::internal::try_cast_slice_mut::<f32, u8> Line | Count | Source | 381 | 2.90k | pub(crate) unsafe fn try_cast_slice_mut<A: Copy, B: Copy>( | 382 | 2.90k | a: &mut [A], | 383 | 2.90k | ) -> Result<&mut [B], PodCastError> { | 384 | 2.90k | let input_bytes = core::mem::size_of_val::<[A]>(a); | 385 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away | 386 | | // after monomorphization. | 387 | 2.90k | if align_of::<B>() > align_of::<A>() | 388 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) | 389 | | { | 390 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) | 391 | 2.90k | } else if size_of::<B>() == size_of::<A>() { | 392 | 0 | Ok(unsafe { | 393 | 0 | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, a.len()) | 394 | 0 | }) | 395 | 2.90k | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) | 396 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) | 397 | | { | 398 | 2.90k | let new_len = | 399 | 2.90k | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; | 400 | 2.90k | Ok(unsafe { | 401 | 2.90k | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, new_len) | 402 | 2.90k | }) | 403 | | } else { | 404 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) | 405 | | } | 406 | 2.90k | } |
bytemuck::internal::try_cast_slice_mut::<u8, [u8; 3]> Line | Count | Source | 381 | 11 | pub(crate) unsafe fn try_cast_slice_mut<A: Copy, B: Copy>( | 382 | 11 | a: &mut [A], | 383 | 11 | ) -> Result<&mut [B], PodCastError> { | 384 | 11 | let input_bytes = core::mem::size_of_val::<[A]>(a); | 385 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away | 386 | | // after monomorphization. | 387 | 11 | if align_of::<B>() > align_of::<A>() | 388 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) | 389 | | { | 390 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) | 391 | 11 | } else if size_of::<B>() == size_of::<A>() { | 392 | 0 | Ok(unsafe { | 393 | 0 | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, a.len()) | 394 | 0 | }) | 395 | 11 | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) | 396 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) | 397 | | { | 398 | 11 | let new_len = | 399 | 11 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; | 400 | 11 | Ok(unsafe { | 401 | 11 | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, new_len) | 402 | 11 | }) | 403 | | } else { | 404 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) | 405 | | } | 406 | 11 | } |
bytemuck::internal::try_cast_slice_mut::<u8, [u8; 4]> Line | Count | Source | 381 | 13 | pub(crate) unsafe fn try_cast_slice_mut<A: Copy, B: Copy>( | 382 | 13 | a: &mut [A], | 383 | 13 | ) -> Result<&mut [B], PodCastError> { | 384 | 13 | let input_bytes = core::mem::size_of_val::<[A]>(a); | 385 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away | 386 | | // after monomorphization. | 387 | 13 | if align_of::<B>() > align_of::<A>() | 388 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) | 389 | | { | 390 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) | 391 | 13 | } else if size_of::<B>() == size_of::<A>() { | 392 | 0 | Ok(unsafe { | 393 | 0 | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, a.len()) | 394 | 0 | }) | 395 | 13 | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) | 396 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) | 397 | | { | 398 | 13 | let new_len = | 399 | 13 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; | 400 | 13 | Ok(unsafe { | 401 | 13 | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, new_len) | 402 | 13 | }) | 403 | | } else { | 404 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) | 405 | | } | 406 | 13 | } |
bytemuck::internal::try_cast_slice_mut::<u8, u8> Line | Count | Source | 381 | 27.1k | pub(crate) unsafe fn try_cast_slice_mut<A: Copy, B: Copy>( | 382 | 27.1k | a: &mut [A], | 383 | 27.1k | ) -> Result<&mut [B], PodCastError> { | 384 | 27.1k | let input_bytes = core::mem::size_of_val::<[A]>(a); | 385 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away | 386 | | // after monomorphization. | 387 | 27.1k | if align_of::<B>() > align_of::<A>() | 388 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) | 389 | | { | 390 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) | 391 | 27.1k | } else if size_of::<B>() == size_of::<A>() { | 392 | 27.1k | Ok(unsafe { | 393 | 27.1k | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, a.len()) | 394 | 27.1k | }) | 395 | 0 | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) | 396 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) | 397 | | { | 398 | 0 | let new_len = | 399 | 0 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; | 400 | 0 | Ok(unsafe { | 401 | 0 | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, new_len) | 402 | 0 | }) | 403 | | } else { | 404 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) | 405 | | } | 406 | 27.1k | } |
bytemuck::internal::try_cast_slice_mut::<u16, u8> Line | Count | Source | 381 | 3.25k | pub(crate) unsafe fn try_cast_slice_mut<A: Copy, B: Copy>( | 382 | 3.25k | a: &mut [A], | 383 | 3.25k | ) -> Result<&mut [B], PodCastError> { | 384 | 3.25k | let input_bytes = core::mem::size_of_val::<[A]>(a); | 385 | | // Note(Lokathor): everything with `align_of` and `size_of` will optimize away | 386 | | // after monomorphization. | 387 | 3.25k | if align_of::<B>() > align_of::<A>() | 388 | 0 | && !is_aligned_to(a.as_ptr() as *const (), align_of::<B>()) | 389 | | { | 390 | 0 | Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) | 391 | 3.25k | } else if size_of::<B>() == size_of::<A>() { | 392 | 0 | Ok(unsafe { | 393 | 0 | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, a.len()) | 394 | 0 | }) | 395 | 3.25k | } else if (size_of::<B>() != 0 && input_bytes % size_of::<B>() == 0) | 396 | 0 | || (size_of::<B>() == 0 && input_bytes == 0) | 397 | | { | 398 | 3.25k | let new_len = | 399 | 3.25k | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 }; | 400 | 3.25k | Ok(unsafe { | 401 | 3.25k | core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, new_len) | 402 | 3.25k | }) | 403 | | } else { | 404 | 0 | Err(PodCastError::OutputSliceWouldHaveSlop) | 405 | | } | 406 | 3.25k | } |
Unexecuted instantiation: bytemuck::internal::try_cast_slice_mut::<_, _> |