/rust/registry/src/index.crates.io-1949cf8c6b5b557f/bitvec-1.0.1/src/ptr/range.rs
Line | Count | Source |
1 | | #![doc = include_str!("../../doc/ptr/range.md")] |
2 | | |
3 | | use core::{ |
4 | | fmt::{ |
5 | | self, |
6 | | Debug, |
7 | | Formatter, |
8 | | }, |
9 | | hash::{ |
10 | | Hash, |
11 | | Hasher, |
12 | | }, |
13 | | iter::FusedIterator, |
14 | | ops::{ |
15 | | Bound, |
16 | | Range, |
17 | | RangeBounds, |
18 | | }, |
19 | | }; |
20 | | |
21 | | use wyz::comu::{ |
22 | | Const, |
23 | | Mutability, |
24 | | }; |
25 | | |
26 | | use super::{ |
27 | | BitPtr, |
28 | | BitSpan, |
29 | | }; |
30 | | use crate::{ |
31 | | devel as dvl, |
32 | | order::{ |
33 | | BitOrder, |
34 | | Lsb0, |
35 | | }, |
36 | | store::BitStore, |
37 | | }; |
38 | | |
39 | | #[repr(C)] |
40 | | #[doc = include_str!("../../doc/ptr/BitPtrRange.md")] |
41 | | pub struct BitPtrRange<M = Const, T = usize, O = Lsb0> |
42 | | where |
43 | | M: Mutability, |
44 | | T: BitStore, |
45 | | O: BitOrder, |
46 | | { |
47 | | /// The lower, inclusive, bound of the range. The bit to which this points |
48 | | /// is considered live. |
49 | | pub start: BitPtr<M, T, O>, |
50 | | /// The higher, exclusive, bound of the range. The bit to which this points |
51 | | /// is considered dead, and the pointer may be one bit beyond the bounds of |
52 | | /// an allocation region. |
53 | | /// |
54 | | /// Because Rust and LLVM both define the address of `base + (len * width)` |
55 | | /// as being within the provenance of `base`, even though that address may |
56 | | /// itself be the base address of another region in a different provenance, |
57 | | /// and bit-pointers are always composed of an ordinary memory address and a |
58 | | /// bit-counter, the ending bit-pointer is always valid. |
59 | | pub end: BitPtr<M, T, O>, |
60 | | } |
61 | | |
62 | | impl<M, T, O> BitPtrRange<M, T, O> |
63 | | where |
64 | | M: Mutability, |
65 | | T: BitStore, |
66 | | O: BitOrder, |
67 | | { |
68 | | /// The canonical empty range. All ranges with zero length (equal `.start` |
69 | | /// and `.end`) are equally empty. |
70 | | pub const EMPTY: Self = Self { |
71 | | start: BitPtr::DANGLING, |
72 | | end: BitPtr::DANGLING, |
73 | | }; |
74 | | |
75 | | /// Explicitly converts a `Range<BitPtr>` into a `BitPtrRange`. |
76 | | #[inline] |
77 | 0 | pub fn from_range(Range { start, end }: Range<BitPtr<M, T, O>>) -> Self { |
78 | 0 | Self { start, end } |
79 | 0 | } |
80 | | |
81 | | /// Explicitly converts a `BitPtrRange` into a `Range<BitPtr>`. |
82 | | #[inline] |
83 | 0 | pub fn into_range(self) -> Range<BitPtr<M, T, O>> { |
84 | 0 | let Self { start, end } = self; |
85 | 0 | start .. end |
86 | 0 | } |
87 | | |
88 | | /// Tests if the range is empty (the distance between bit-pointers is `0`). |
89 | | /// |
90 | | /// ## Original |
91 | | /// |
92 | | /// [`Range::is_empty`](core::ops::Range::is_empty) |
93 | | /// |
94 | | /// ## Examples |
95 | | /// |
96 | | /// ```rust |
97 | | /// use bitvec::prelude::*; |
98 | | /// use bitvec::ptr::BitPtrRange; |
99 | | /// |
100 | | /// let data = 0u8; |
101 | | /// let bp = BitPtr::<_, _, Lsb0>::from_ref(&data); |
102 | | /// let mut range = BitPtrRange::from_range(bp .. bp.wrapping_add(1)); |
103 | | /// |
104 | | /// assert!(!range.is_empty()); |
105 | | /// assert_ne!(range.start, range.end); |
106 | | /// |
107 | | /// range.next(); |
108 | | /// |
109 | | /// assert!(range.is_empty()); |
110 | | /// assert_eq!(range.start, range.end); |
111 | | /// ``` |
112 | | #[inline] |
113 | 0 | pub fn is_empty(&self) -> bool { |
114 | 0 | self.start == self.end |
115 | 0 | } Unexecuted instantiation: <bitvec::ptr::range::BitPtrRange<wyz::comu::Mut, u8, bitvec::order::Msb0>>::is_empty Unexecuted instantiation: <bitvec::ptr::range::BitPtrRange<wyz::comu::Const, u8, bitvec::order::Msb0>>::is_empty Unexecuted instantiation: <bitvec::ptr::range::BitPtrRange<_, _, _>>::is_empty |
116 | | |
117 | | /// Tests if a given bit-pointer is contained within the range. |
118 | | /// |
119 | | /// Bit-pointer ordering is defined when the types have the same exact |
120 | | /// `BitOrder` type parameter and the same `BitStore::Mem` associated type |
121 | | /// (but are free to differ in alias condition!). Inclusion in a range |
122 | | /// occurs when the bit-pointer is not strictly less than the range start, |
123 | | /// and is strictly less than the range end. |
124 | | /// |
125 | | /// ## Original |
126 | | /// |
127 | | /// [`Range::contains`](core::ops::Range::contains) |
128 | | /// |
129 | | /// ## Examples |
130 | | /// |
131 | | /// ```rust |
132 | | /// use bitvec::prelude::*; |
133 | | /// use bitvec::ptr::BitPtrRange; |
134 | | /// use core::cell::Cell; |
135 | | /// |
136 | | /// let data = 0u16; |
137 | | /// let bp = BitPtr::<_, _, Lsb0>::from_ref(&data); |
138 | | /// |
139 | | /// let mut range = BitPtrRange::from_range(bp .. bp.wrapping_add(16)); |
140 | | /// range.nth(2); |
141 | | /// range.nth_back(2); |
142 | | /// |
143 | | /// assert!(bp < range.start); |
144 | | /// assert!(!range.contains(&bp)); |
145 | | /// |
146 | | /// let mid = bp.wrapping_add(8); |
147 | | /// |
148 | | /// let same_mem = mid.cast::<Cell<u16>>(); |
149 | | /// assert!(range.contains(&mid)); |
150 | | /// ``` |
151 | | /// |
152 | | /// Casting to a different `BitStore` type whose `Mem` parameter differs |
153 | | /// from the range always results in a `false` response, even if the pointer |
154 | | /// being tested is numerically within the range. |
155 | | #[inline] |
156 | 0 | pub fn contains<M2, T2>(&self, pointer: &BitPtr<M2, T2, O>) -> bool |
157 | 0 | where |
158 | 0 | M2: Mutability, |
159 | 0 | T2: BitStore, |
160 | | { |
161 | 0 | dvl::match_store::<T::Mem, T2::Mem>() |
162 | 0 | && self.start <= *pointer |
163 | 0 | && *pointer < self.end |
164 | 0 | } |
165 | | |
166 | | /// Converts the range into a span descriptor over all live bits. |
167 | | /// |
168 | | /// The produced bit-span does *not* include the bit addressed by `.end`. |
169 | | /// |
170 | | /// ## Safety |
171 | | /// |
172 | | /// The `.start` and `.end` bit-pointers must both be derived from the same |
173 | | /// provenance region. `BitSpan` draws its provenance from the `.start` |
174 | | /// element pointer, and incorrectly extending it beyond the source |
175 | | /// provenance is undefined behavior. |
176 | 0 | pub(crate) unsafe fn into_bitspan(self) -> BitSpan<M, T, O> { |
177 | 0 | self.start.span_unchecked(self.len()) |
178 | 0 | } |
179 | | |
180 | | /// Snapshots `.start`, then increments it. |
181 | | /// |
182 | | /// This method is only safe to call when the range is non-empty. |
183 | | #[inline] |
184 | 0 | fn take_front(&mut self) -> BitPtr<M, T, O> { |
185 | 0 | let start = self.start; |
186 | 0 | self.start = start.wrapping_add(1); |
187 | 0 | start |
188 | 0 | } Unexecuted instantiation: <bitvec::ptr::range::BitPtrRange<wyz::comu::Mut, u8, bitvec::order::Msb0>>::take_front Unexecuted instantiation: <bitvec::ptr::range::BitPtrRange<wyz::comu::Const, u8, bitvec::order::Msb0>>::take_front Unexecuted instantiation: <bitvec::ptr::range::BitPtrRange<_, _, _>>::take_front |
189 | | |
190 | | /// Decrements `.end`, then returns it. |
191 | | /// |
192 | | /// The bit-pointer returned by this method is always to an alive bit. |
193 | | /// |
194 | | /// This method is only safe to call when the range is non-empty. |
195 | | #[inline] |
196 | 0 | fn take_back(&mut self) -> BitPtr<M, T, O> { |
197 | 0 | let prev = self.end.wrapping_sub(1); |
198 | 0 | self.end = prev; |
199 | 0 | prev |
200 | 0 | } |
201 | | } |
202 | | |
203 | | #[cfg(not(tarpaulin_include))] |
204 | | impl<M, T, O> Clone for BitPtrRange<M, T, O> |
205 | | where |
206 | | M: Mutability, |
207 | | T: BitStore, |
208 | | O: BitOrder, |
209 | | { |
210 | | #[inline] |
211 | 0 | fn clone(&self) -> Self { |
212 | 0 | Self { ..*self } |
213 | 0 | } |
214 | | } |
215 | | |
216 | | impl<M, T, O> Eq for BitPtrRange<M, T, O> |
217 | | where |
218 | | M: Mutability, |
219 | | T: BitStore, |
220 | | O: BitOrder, |
221 | | { |
222 | | } |
223 | | |
224 | | impl<M1, M2, O, T1, T2> PartialEq<BitPtrRange<M2, T2, O>> |
225 | | for BitPtrRange<M1, T1, O> |
226 | | where |
227 | | M1: Mutability, |
228 | | M2: Mutability, |
229 | | O: BitOrder, |
230 | | T1: BitStore, |
231 | | T2: BitStore, |
232 | | { |
233 | | #[inline] |
234 | 0 | fn eq(&self, other: &BitPtrRange<M2, T2, O>) -> bool { |
235 | | // Pointers over different element types are never equal |
236 | 0 | dvl::match_store::<T1::Mem, T2::Mem>() |
237 | 0 | && self.start == other.start |
238 | 0 | && self.end == other.end |
239 | 0 | } |
240 | | } |
241 | | |
242 | | #[cfg(not(tarpaulin_include))] |
243 | | impl<M, T, O> Default for BitPtrRange<M, T, O> |
244 | | where |
245 | | M: Mutability, |
246 | | T: BitStore, |
247 | | O: BitOrder, |
248 | | { |
249 | | #[inline] |
250 | 0 | fn default() -> Self { |
251 | 0 | Self::EMPTY |
252 | 0 | } |
253 | | } |
254 | | |
255 | | #[cfg(not(tarpaulin_include))] |
256 | | impl<M, T, O> From<Range<BitPtr<M, T, O>>> for BitPtrRange<M, T, O> |
257 | | where |
258 | | M: Mutability, |
259 | | T: BitStore, |
260 | | O: BitOrder, |
261 | | { |
262 | | #[inline] |
263 | 0 | fn from(range: Range<BitPtr<M, T, O>>) -> Self { |
264 | 0 | Self::from_range(range) |
265 | 0 | } |
266 | | } |
267 | | |
268 | | #[cfg(not(tarpaulin_include))] |
269 | | impl<M, T, O> From<BitPtrRange<M, T, O>> for Range<BitPtr<M, T, O>> |
270 | | where |
271 | | M: Mutability, |
272 | | T: BitStore, |
273 | | O: BitOrder, |
274 | | { |
275 | | #[inline] |
276 | 0 | fn from(range: BitPtrRange<M, T, O>) -> Self { |
277 | 0 | range.into_range() |
278 | 0 | } |
279 | | } |
280 | | |
281 | | #[cfg(not(tarpaulin_include))] |
282 | | impl<M, T, O> Debug for BitPtrRange<M, T, O> |
283 | | where |
284 | | M: Mutability, |
285 | | T: BitStore, |
286 | | O: BitOrder, |
287 | | { |
288 | | #[inline] |
289 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
290 | 0 | let Range { start, end } = self.clone().into_range(); |
291 | 0 | Debug::fmt(&start, fmt)?; |
292 | 0 | write!(fmt, "{0}..{0}", if fmt.alternate() { " " } else { "" })?; |
293 | 0 | Debug::fmt(&end, fmt) |
294 | 0 | } |
295 | | } |
296 | | |
297 | | #[cfg(not(tarpaulin_include))] |
298 | | impl<M, T, O> Hash for BitPtrRange<M, T, O> |
299 | | where |
300 | | M: Mutability, |
301 | | T: BitStore, |
302 | | O: BitOrder, |
303 | | { |
304 | | #[inline] |
305 | 0 | fn hash<H>(&self, state: &mut H) |
306 | 0 | where H: Hasher { |
307 | 0 | self.start.hash(state); |
308 | 0 | self.end.hash(state); |
309 | 0 | } |
310 | | } |
311 | | |
312 | | impl<M, T, O> Iterator for BitPtrRange<M, T, O> |
313 | | where |
314 | | M: Mutability, |
315 | | T: BitStore, |
316 | | O: BitOrder, |
317 | | { |
318 | | type Item = BitPtr<M, T, O>; |
319 | | |
320 | | easy_iter!(); |
321 | | |
322 | | #[inline] |
323 | 0 | fn next(&mut self) -> Option<Self::Item> { |
324 | 0 | if Self::is_empty(&*self) { |
325 | 0 | return None; |
326 | 0 | } |
327 | 0 | Some(self.take_front()) |
328 | 0 | } Unexecuted instantiation: <bitvec::ptr::range::BitPtrRange<wyz::comu::Mut, u8, bitvec::order::Msb0> as core::iter::traits::iterator::Iterator>::next Unexecuted instantiation: <bitvec::ptr::range::BitPtrRange<wyz::comu::Const, u8, bitvec::order::Msb0> as core::iter::traits::iterator::Iterator>::next Unexecuted instantiation: <bitvec::ptr::range::BitPtrRange<_, _, _> as core::iter::traits::iterator::Iterator>::next |
329 | | |
330 | | #[inline] |
331 | 0 | fn nth(&mut self, n: usize) -> Option<Self::Item> { |
332 | 0 | if n >= self.len() { |
333 | 0 | self.start = self.end; |
334 | 0 | return None; |
335 | 0 | } |
336 | 0 | self.start = unsafe { self.start.add(n) }; |
337 | 0 | Some(self.take_front()) |
338 | 0 | } |
339 | | } |
340 | | |
341 | | impl<M, T, O> DoubleEndedIterator for BitPtrRange<M, T, O> |
342 | | where |
343 | | M: Mutability, |
344 | | T: BitStore, |
345 | | O: BitOrder, |
346 | | { |
347 | | #[inline] |
348 | 0 | fn next_back(&mut self) -> Option<Self::Item> { |
349 | 0 | if Self::is_empty(&*self) { |
350 | 0 | return None; |
351 | 0 | } |
352 | 0 | Some(self.take_back()) |
353 | 0 | } |
354 | | |
355 | | #[inline] |
356 | 0 | fn nth_back(&mut self, n: usize) -> Option<Self::Item> { |
357 | 0 | if n >= self.len() { |
358 | 0 | self.end = self.start; |
359 | 0 | return None; |
360 | 0 | } |
361 | 0 | let out = unsafe { self.end.sub(n.wrapping_add(1)) }; |
362 | 0 | self.end = out; |
363 | 0 | Some(out) |
364 | 0 | } |
365 | | } |
366 | | |
367 | | impl<M, T, O> ExactSizeIterator for BitPtrRange<M, T, O> |
368 | | where |
369 | | M: Mutability, |
370 | | T: BitStore, |
371 | | O: BitOrder, |
372 | | { |
373 | | #[inline] |
374 | 0 | fn len(&self) -> usize { |
375 | 0 | (unsafe { self.end.offset_from(self.start) }) as usize |
376 | 0 | } |
377 | | } |
378 | | |
379 | | impl<M, T, O> FusedIterator for BitPtrRange<M, T, O> |
380 | | where |
381 | | M: Mutability, |
382 | | T: BitStore, |
383 | | O: BitOrder, |
384 | | { |
385 | | } |
386 | | |
387 | | #[cfg(not(tarpaulin_include))] |
388 | | impl<M, T, O> RangeBounds<BitPtr<M, T, O>> for BitPtrRange<M, T, O> |
389 | | where |
390 | | M: Mutability, |
391 | | T: BitStore, |
392 | | O: BitOrder, |
393 | | { |
394 | | #[inline] |
395 | 0 | fn start_bound(&self) -> Bound<&BitPtr<M, T, O>> { |
396 | 0 | Bound::Included(&self.start) |
397 | 0 | } |
398 | | |
399 | | #[inline] |
400 | 0 | fn end_bound(&self) -> Bound<&BitPtr<M, T, O>> { |
401 | 0 | Bound::Excluded(&self.end) |
402 | 0 | } |
403 | | } |