/rust/registry/src/index.crates.io-1949cf8c6b5b557f/bitvec-1.0.1/src/index.rs
Line | Count | Source |
1 | | #![doc = include_str!("../doc/index.md")] |
2 | | |
3 | | use core::{ |
4 | | any, |
5 | | fmt::{ |
6 | | self, |
7 | | Binary, |
8 | | Debug, |
9 | | Display, |
10 | | Formatter, |
11 | | }, |
12 | | iter::{ |
13 | | FusedIterator, |
14 | | Sum, |
15 | | }, |
16 | | marker::PhantomData, |
17 | | ops::{ |
18 | | BitAnd, |
19 | | BitOr, |
20 | | Not, |
21 | | }, |
22 | | }; |
23 | | |
24 | | use crate::{ |
25 | | mem::{ |
26 | | bits_of, |
27 | | BitRegister, |
28 | | }, |
29 | | order::BitOrder, |
30 | | }; |
31 | | |
32 | | #[repr(transparent)] |
33 | | #[doc = include_str!("../doc/index/BitIdx.md")] |
34 | | #[derive(Clone, Copy, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] |
35 | | pub struct BitIdx<R = usize> |
36 | | where R: BitRegister |
37 | | { |
38 | | /// Semantic index counter within a register, constrained to `0 .. R::BITS`. |
39 | | idx: u8, |
40 | | /// Marker for the register type. |
41 | | _ty: PhantomData<R>, |
42 | | } |
43 | | |
44 | | impl<R> BitIdx<R> |
45 | | where R: BitRegister |
46 | | { |
47 | | /// The inclusive maximum index within an `R` element. |
48 | | pub const MAX: Self = Self { |
49 | | idx: R::MASK, |
50 | | _ty: PhantomData, |
51 | | }; |
52 | | /// The inclusive minimum index within an `R` element. |
53 | | pub const MIN: Self = Self { |
54 | | idx: 0, |
55 | | _ty: PhantomData, |
56 | | }; |
57 | | |
58 | | /// Wraps a counter value as a known-good index into an `R` register. |
59 | | /// |
60 | | /// ## Parameters |
61 | | /// |
62 | | /// - `idx`: The counter value to mark as an index. This must be in the |
63 | | /// range `0 .. R::BITS`. |
64 | | /// |
65 | | /// ## Returns |
66 | | /// |
67 | | /// This returns `idx`, either marked as a valid `BitIdx` or an invalid |
68 | | /// `BitIdxError` by whether it is within the valid range `0 .. R::BITS`. |
69 | | #[inline] |
70 | 0 | pub fn new(idx: u8) -> Result<Self, BitIdxError<R>> { |
71 | 0 | if idx >= bits_of::<R>() as u8 { |
72 | 0 | return Err(BitIdxError::new(idx)); |
73 | 0 | } |
74 | 0 | Ok(unsafe { Self::new_unchecked(idx) }) |
75 | 0 | } |
76 | | |
77 | | /// Wraps a counter value as an assumed-good index into an `R` register. |
78 | | /// |
79 | | /// ## Parameters |
80 | | /// |
81 | | /// - `idx`: The counter value to mark as an index. This must be in the |
82 | | /// range `0 .. R::BITS`. |
83 | | /// |
84 | | /// ## Returns |
85 | | /// |
86 | | /// This unconditionally marks `idx` as a valid bit-index. |
87 | | /// |
88 | | /// ## Safety |
89 | | /// |
90 | | /// If the `idx` value is outside the valid range, then the program is |
91 | | /// incorrect. Debug builds will panic; release builds do not inspect the |
92 | | /// value or specify a behavior. |
93 | | #[inline] |
94 | 2.28M | pub unsafe fn new_unchecked(idx: u8) -> Self { |
95 | 2.28M | debug_assert!( |
96 | 0 | idx < bits_of::<R>() as u8, |
97 | 0 | "Bit index {} cannot exceed type width {}", |
98 | | idx, |
99 | 0 | bits_of::<R>(), |
100 | | ); |
101 | 2.28M | Self { |
102 | 2.28M | idx, |
103 | 2.28M | _ty: PhantomData, |
104 | 2.28M | } |
105 | 2.28M | } <bitvec::index::BitIdx<u8>>::new_unchecked Line | Count | Source | 94 | 2.28M | pub unsafe fn new_unchecked(idx: u8) -> Self { | 95 | 2.28M | debug_assert!( | 96 | 0 | idx < bits_of::<R>() as u8, | 97 | 0 | "Bit index {} cannot exceed type width {}", | 98 | | idx, | 99 | 0 | bits_of::<R>(), | 100 | | ); | 101 | 2.28M | Self { | 102 | 2.28M | idx, | 103 | 2.28M | _ty: PhantomData, | 104 | 2.28M | } | 105 | 2.28M | } |
Unexecuted instantiation: <bitvec::index::BitIdx<_>>::new_unchecked |
106 | | |
107 | | /// Removes the index wrapper, leaving the internal counter. |
108 | | #[inline] |
109 | | #[cfg(not(tarpaulin_include))] |
110 | 2.54M | pub fn into_inner(self) -> u8 { |
111 | 2.54M | self.idx |
112 | 2.54M | } <bitvec::index::BitIdx<u8>>::into_inner Line | Count | Source | 110 | 2.54M | pub fn into_inner(self) -> u8 { | 111 | 2.54M | self.idx | 112 | 2.54M | } |
Unexecuted instantiation: <bitvec::index::BitIdx<_>>::into_inner |
113 | | |
114 | | /// Increments an index counter, wrapping at the back edge of the register. |
115 | | /// |
116 | | /// ## Parameters |
117 | | /// |
118 | | /// - `self`: The index to increment. |
119 | | /// |
120 | | /// ## Returns |
121 | | /// |
122 | | /// - `.0`: The next index after `self`. |
123 | | /// - `.1`: Indicates whether the new index is in the next memory address. |
124 | | #[inline] |
125 | 0 | pub fn next(self) -> (Self, bool) { |
126 | 0 | let next = self.idx + 1; |
127 | 0 | ( |
128 | 0 | unsafe { Self::new_unchecked(next & R::MASK) }, |
129 | 0 | next == bits_of::<R>() as u8, |
130 | 0 | ) |
131 | 0 | } |
132 | | |
133 | | /// Decrements an index counter, wrapping at the front edge of the register. |
134 | | /// |
135 | | /// ## Parameters |
136 | | /// |
137 | | /// - `self`: The index to decrement. |
138 | | /// |
139 | | /// ## Returns |
140 | | /// |
141 | | /// - `.0`: The previous index before `self`. |
142 | | /// - `.1`: Indicates whether the new index is in the previous memory |
143 | | /// address. |
144 | | #[inline] |
145 | 0 | pub fn prev(self) -> (Self, bool) { |
146 | 0 | let prev = self.idx.wrapping_sub(1); |
147 | 0 | ( |
148 | 0 | unsafe { Self::new_unchecked(prev & R::MASK) }, |
149 | 0 | self.idx == 0, |
150 | 0 | ) |
151 | 0 | } |
152 | | |
153 | | /// Computes the bit position corresponding to `self` under some ordering. |
154 | | /// |
155 | | /// This forwards to [`O::at::<R>`], which is the only public, safe, |
156 | | /// constructor for a position counter. |
157 | | /// |
158 | | /// [`O::at::<R>`]: crate::order::BitOrder::at |
159 | | #[inline] |
160 | | #[cfg(not(tarpaulin_include))] |
161 | 0 | pub fn position<O>(self) -> BitPos<R> |
162 | 0 | where O: BitOrder { |
163 | 0 | O::at::<R>(self) |
164 | 0 | } |
165 | | |
166 | | /// Computes the bit selector corresponding to `self` under an ordering. |
167 | | /// |
168 | | /// This forwards to [`O::select::<R>`], which is the only public, safe, |
169 | | /// constructor for a bit selector. |
170 | | /// |
171 | | /// [`O::select::<R>`]: crate::order::BitOrder::select |
172 | | #[inline] |
173 | | #[cfg(not(tarpaulin_include))] |
174 | 67.8k | pub fn select<O>(self) -> BitSel<R> |
175 | 67.8k | where O: BitOrder { |
176 | 67.8k | O::select::<R>(self) |
177 | 67.8k | } <bitvec::index::BitIdx<u8>>::select::<bitvec::order::Msb0> Line | Count | Source | 174 | 67.8k | pub fn select<O>(self) -> BitSel<R> | 175 | 67.8k | where O: BitOrder { | 176 | 67.8k | O::select::<R>(self) | 177 | 67.8k | } |
Unexecuted instantiation: <bitvec::index::BitIdx<_>>::select::<_> |
178 | | |
179 | | /// Computes the bit selector for `self` as an accessor mask. |
180 | | /// |
181 | | /// This is a type-cast over [`Self::select`]. |
182 | | /// |
183 | | /// [`Self::select`]: Self::select |
184 | | #[inline] |
185 | | #[cfg(not(tarpaulin_include))] |
186 | 0 | pub fn mask<O>(self) -> BitMask<R> |
187 | 0 | where O: BitOrder { |
188 | 0 | self.select::<O>().mask() |
189 | 0 | } |
190 | | |
191 | | /// Iterates over all indices between an inclusive start and exclusive end |
192 | | /// point. |
193 | | /// |
194 | | /// Because implementation details of the range type family, including the |
195 | | /// [`RangeBounds`] trait, are not yet stable, and heterogeneous ranges are |
196 | | /// not supported, this must be an opaque iterator rather than a direct |
197 | | /// [`Range<BitIdx<R>>`]. |
198 | | /// |
199 | | /// # Parameters |
200 | | /// |
201 | | /// - `from`: The inclusive low bound of the range. This will be the first |
202 | | /// index produced by the iterator. |
203 | | /// - `upto`: The exclusive high bound of the range. The iterator will halt |
204 | | /// before yielding an index of this value. |
205 | | /// |
206 | | /// # Returns |
207 | | /// |
208 | | /// An opaque iterator that is equivalent to the range `from .. upto`. |
209 | | /// |
210 | | /// # Requirements |
211 | | /// |
212 | | /// `from` must be no greater than `upto`. |
213 | | /// |
214 | | /// [`RangeBounds`]: core::ops::RangeBounds |
215 | | /// [`Range<BitIdx<R>>`]: core::ops::Range |
216 | | #[inline] |
217 | 0 | pub fn range( |
218 | 0 | self, |
219 | 0 | upto: BitEnd<R>, |
220 | 0 | ) -> impl Iterator<Item = Self> |
221 | 0 | + DoubleEndedIterator |
222 | 0 | + ExactSizeIterator |
223 | 0 | + FusedIterator { |
224 | 0 | let (from, upto) = (self.into_inner(), upto.into_inner()); |
225 | 0 | debug_assert!(from <= upto, "Ranges must run from low to high"); |
226 | 0 | (from .. upto).map(|val| unsafe { Self::new_unchecked(val) }) |
227 | 0 | } |
228 | | |
229 | | /// Iterates over all possible index values. |
230 | | #[inline] |
231 | 0 | pub fn range_all() -> impl Iterator<Item = Self> |
232 | 0 | + DoubleEndedIterator |
233 | 0 | + ExactSizeIterator |
234 | 0 | + FusedIterator { |
235 | 0 | BitIdx::MIN.range(BitEnd::MAX) |
236 | 0 | } |
237 | | |
238 | | /// Computes the jump distance for some number of bits away from a starting |
239 | | /// index. |
240 | | /// |
241 | | /// This computes the number of elements by which to adjust a base pointer, |
242 | | /// and then the bit index of the destination bit in the new referent |
243 | | /// register element. |
244 | | /// |
245 | | /// # Parameters |
246 | | /// |
247 | | /// - `self`: An index within some element, from which the offset is |
248 | | /// computed. |
249 | | /// - `by`: The distance by which to jump. Negative values move lower in the |
250 | | /// index and element-pointer space; positive values move higher. |
251 | | /// |
252 | | /// # Returns |
253 | | /// |
254 | | /// - `.0`: The number of elements `R` by which to adjust a base pointer. |
255 | | /// This value can be passed directly into [`ptr::offset`]. |
256 | | /// - `.1`: The index of the destination bit within the destination element. |
257 | | /// |
258 | | /// [`ptr::offset`]: https://doc.rust-lang.org/stable/std/primitive.pointer.html#method.offset |
259 | 446k | pub(crate) fn offset(self, by: isize) -> (isize, Self) { |
260 | | /* Signed-add `self.idx` to the jump distance. This will almost |
261 | | * certainly not wrap (as the crate imposes restrictions well below |
262 | | * `isize::MAX`), but correctness never hurts. The resulting sum is a |
263 | | * bit distance that is then broken into an element distance and final |
264 | | * bit index. |
265 | | */ |
266 | 446k | let far = by.wrapping_add(self.into_inner() as isize); |
267 | | |
268 | 446k | let (elts, head) = (far >> R::INDX, far as u8 & R::MASK); |
269 | | |
270 | 446k | (elts, unsafe { Self::new_unchecked(head) }) |
271 | 446k | } <bitvec::index::BitIdx<u8>>::offset Line | Count | Source | 259 | 446k | pub(crate) fn offset(self, by: isize) -> (isize, Self) { | 260 | | /* Signed-add `self.idx` to the jump distance. This will almost | 261 | | * certainly not wrap (as the crate imposes restrictions well below | 262 | | * `isize::MAX`), but correctness never hurts. The resulting sum is a | 263 | | * bit distance that is then broken into an element distance and final | 264 | | * bit index. | 265 | | */ | 266 | 446k | let far = by.wrapping_add(self.into_inner() as isize); | 267 | | | 268 | 446k | let (elts, head) = (far >> R::INDX, far as u8 & R::MASK); | 269 | | | 270 | 446k | (elts, unsafe { Self::new_unchecked(head) }) | 271 | 446k | } |
Unexecuted instantiation: <bitvec::index::BitIdx<_>>::offset |
272 | | |
273 | | /// Computes the span information for a region beginning at `self` for `len` |
274 | | /// bits. |
275 | | /// |
276 | | /// The span information is the number of elements in the region that hold |
277 | | /// live bits, and the position of the tail marker after the live bits. |
278 | | /// |
279 | | /// This forwards to [`BitEnd::span`], as the computation is identical for |
280 | | /// the two types. Beginning a span at any `Idx` is equivalent to beginning |
281 | | /// it at the tail of a previous span. |
282 | | /// |
283 | | /// # Parameters |
284 | | /// |
285 | | /// - `self`: The start bit of the span. |
286 | | /// - `len`: The number of bits in the span. |
287 | | /// |
288 | | /// # Returns |
289 | | /// |
290 | | /// - `.0`: The number of elements, starting in the element that contains |
291 | | /// `self`, that contain live bits of the span. |
292 | | /// - `.1`: The tail counter of the span’s end point. |
293 | | /// |
294 | | /// [`BitEnd::span`]: crate::index::BitEnd::span |
295 | 415k | pub(crate) fn span(self, len: usize) -> (usize, BitEnd<R>) { |
296 | 415k | unsafe { BitEnd::<R>::new_unchecked(self.into_inner()) }.span(len) |
297 | 415k | } <bitvec::index::BitIdx<u8>>::span Line | Count | Source | 295 | 415k | pub(crate) fn span(self, len: usize) -> (usize, BitEnd<R>) { | 296 | 415k | unsafe { BitEnd::<R>::new_unchecked(self.into_inner()) }.span(len) | 297 | 415k | } |
Unexecuted instantiation: <bitvec::index::BitIdx<_>>::span |
298 | | } |
299 | | |
300 | | impl<R> Binary for BitIdx<R> |
301 | | where R: BitRegister |
302 | | { |
303 | | #[inline] |
304 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
305 | 0 | write!(fmt, "{:0>1$b}", self.idx, R::INDX as usize) |
306 | 0 | } |
307 | | } |
308 | | |
309 | | impl<R> Debug for BitIdx<R> |
310 | | where R: BitRegister |
311 | | { |
312 | | #[inline] |
313 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
314 | 0 | write!(fmt, "BitIdx<{}>({})", any::type_name::<R>(), self) |
315 | 0 | } |
316 | | } |
317 | | |
318 | | impl<R> Display for BitIdx<R> |
319 | | where R: BitRegister |
320 | | { |
321 | | #[inline] |
322 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
323 | 0 | Binary::fmt(self, fmt) |
324 | 0 | } |
325 | | } |
326 | | |
327 | | #[repr(transparent)] |
328 | | #[doc = include_str!("../doc/index/BitIdxError.md")] |
329 | | #[derive(Clone, Copy, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] |
330 | | pub struct BitIdxError<R = usize> |
331 | | where R: BitRegister |
332 | | { |
333 | | /// The value that is invalid as a [`BitIdx<R>`]. |
334 | | /// |
335 | | /// [`BitIdx<R>`]: crate::index::BitIdx |
336 | | err: u8, |
337 | | /// Marker for the register type. |
338 | | _ty: PhantomData<R>, |
339 | | } |
340 | | |
341 | | impl<R> BitIdxError<R> |
342 | | where R: BitRegister |
343 | | { |
344 | | /// Marks a counter value as invalid to be an index for an `R` register. |
345 | | /// |
346 | | /// ## Parameters |
347 | | /// |
348 | | /// - `err`: The counter value to mark as an error. This must be greater |
349 | | /// than `R::BITS`. |
350 | | /// |
351 | | /// ## Returns |
352 | | /// |
353 | | /// This returns `err`, marked as an invalid index for `R`. |
354 | | /// |
355 | | /// ## Panics |
356 | | /// |
357 | | /// Debug builds panic when `err` is a valid index for `R`. |
358 | 0 | pub(crate) fn new(err: u8) -> Self { |
359 | 0 | debug_assert!( |
360 | 0 | err >= bits_of::<R>() as u8, |
361 | 0 | "Bit index {} is valid for type width {}", |
362 | | err, |
363 | 0 | bits_of::<R>(), |
364 | | ); |
365 | 0 | Self { |
366 | 0 | err, |
367 | 0 | _ty: PhantomData, |
368 | 0 | } |
369 | 0 | } |
370 | | |
371 | | /// Removes the error wrapper, leaving the internal counter. |
372 | | #[inline] |
373 | | #[cfg(not(tarpaulin_include))] |
374 | 0 | pub fn into_inner(self) -> u8 { |
375 | 0 | self.err |
376 | 0 | } |
377 | | } |
378 | | |
379 | | impl<R> Debug for BitIdxError<R> |
380 | | where R: BitRegister |
381 | | { |
382 | | #[inline] |
383 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
384 | 0 | write!(fmt, "BitIdxError<{}>({})", any::type_name::<R>(), self.err) |
385 | 0 | } |
386 | | } |
387 | | |
388 | | #[cfg(not(tarpaulin_include))] |
389 | | impl<R> Display for BitIdxError<R> |
390 | | where R: BitRegister |
391 | | { |
392 | | #[inline] |
393 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
394 | 0 | write!( |
395 | 0 | fmt, |
396 | 0 | "the value {} is too large to index into {} ({} bits wide)", |
397 | | self.err, |
398 | 0 | any::type_name::<R>(), |
399 | 0 | bits_of::<R>(), |
400 | | ) |
401 | 0 | } |
402 | | } |
403 | | |
404 | | #[cfg(feature = "std")] |
405 | | impl<R> std::error::Error for BitIdxError<R> where R: BitRegister {} |
406 | | |
407 | | #[repr(transparent)] |
408 | | #[doc = include_str!("../doc/index/BitEnd.md")] |
409 | | #[derive(Clone, Copy, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] |
410 | | pub struct BitEnd<R = usize> |
411 | | where R: BitRegister |
412 | | { |
413 | | /// Semantic tail counter within or after a register, contained to `0 ..= |
414 | | /// R::BITS`. |
415 | | end: u8, |
416 | | /// Marker for the register type. |
417 | | _ty: PhantomData<R>, |
418 | | } |
419 | | |
420 | | impl<R> BitEnd<R> |
421 | | where R: BitRegister |
422 | | { |
423 | | /// The inclusive maximum tail within (or after) an `R` element. |
424 | | pub const MAX: Self = Self { |
425 | | end: bits_of::<R>() as u8, |
426 | | _ty: PhantomData, |
427 | | }; |
428 | | /// The inclusive minimum tail within (or after) an `R` element. |
429 | | pub const MIN: Self = Self { |
430 | | end: 0, |
431 | | _ty: PhantomData, |
432 | | }; |
433 | | |
434 | | /// Wraps a counter value as a known-good tail of an `R` register. |
435 | | /// |
436 | | /// ## Parameters |
437 | | /// |
438 | | /// - `end`: The counter value to mark as a tail. This must be in the range |
439 | | /// `0 ..= R::BITS`. |
440 | | /// |
441 | | /// ## Returns |
442 | | /// |
443 | | /// This returns `Some(end)` when it is in the valid range `0 ..= R::BITS`, |
444 | | /// and `None` when it is not. |
445 | | #[inline] |
446 | 0 | pub fn new(end: u8) -> Option<Self> { |
447 | 0 | if end > bits_of::<R>() as u8 { |
448 | 0 | return None; |
449 | 0 | } |
450 | 0 | Some(unsafe { Self::new_unchecked(end) }) |
451 | 0 | } |
452 | | |
453 | | /// Wraps a counter value as an assumed-good tail of an `R` register. |
454 | | /// |
455 | | /// ## Parameters |
456 | | /// |
457 | | /// - `end`: The counter value to mark as a tail. This must be in the range |
458 | | /// `0 ..= R::BITS`. |
459 | | /// |
460 | | /// ## Returns |
461 | | /// |
462 | | /// This unconditionally marks `end` as a valid tail index. |
463 | | /// |
464 | | /// ## Safety |
465 | | /// |
466 | | /// If the `end` value is outside the valid range, then the program is |
467 | | /// incorrect. Debug builds will panic; release builds do not inspect the |
468 | | /// value or specify a behavior. |
469 | 830k | pub(crate) unsafe fn new_unchecked(end: u8) -> Self { |
470 | 830k | debug_assert!( |
471 | 0 | end <= bits_of::<R>() as u8, |
472 | 0 | "Bit tail {} cannot exceed type width {}", |
473 | | end, |
474 | 0 | bits_of::<R>(), |
475 | | ); |
476 | 830k | Self { |
477 | 830k | end, |
478 | 830k | _ty: PhantomData, |
479 | 830k | } |
480 | 830k | } <bitvec::index::BitEnd<u8>>::new_unchecked Line | Count | Source | 469 | 830k | pub(crate) unsafe fn new_unchecked(end: u8) -> Self { | 470 | 830k | debug_assert!( | 471 | 0 | end <= bits_of::<R>() as u8, | 472 | 0 | "Bit tail {} cannot exceed type width {}", | 473 | | end, | 474 | 0 | bits_of::<R>(), | 475 | | ); | 476 | 830k | Self { | 477 | 830k | end, | 478 | 830k | _ty: PhantomData, | 479 | 830k | } | 480 | 830k | } |
Unexecuted instantiation: <bitvec::index::BitEnd<_>>::new_unchecked |
481 | | |
482 | | /// Removes the tail wrapper, leaving the internal counter. |
483 | | #[inline] |
484 | | #[cfg(not(tarpaulin_include))] |
485 | 481k | pub fn into_inner(self) -> u8 { |
486 | 481k | self.end |
487 | 481k | } <bitvec::index::BitEnd<u8>>::into_inner Line | Count | Source | 485 | 481k | pub fn into_inner(self) -> u8 { | 486 | 481k | self.end | 487 | 481k | } |
Unexecuted instantiation: <bitvec::index::BitEnd<_>>::into_inner |
488 | | |
489 | | /// Iterates over all tail indices at and after an inclusive starting point. |
490 | | /// |
491 | | /// Because implementation details of the range type family, including the |
492 | | /// [`RangeBounds`] trait, are not yet stable, and heterogeneous ranges are |
493 | | /// not yet supported, this must be an opaque iterator rather than a direct |
494 | | /// [`Range<BitEnd<R>>`]. |
495 | | /// |
496 | | /// # Parameters |
497 | | /// |
498 | | /// - `from`: The inclusive low bound of the range. This will be the first |
499 | | /// tail produced by the iterator. |
500 | | /// |
501 | | /// # Returns |
502 | | /// |
503 | | /// An opaque iterator that is equivalent to the range `from ..= |
504 | | /// Self::MAX`. |
505 | | /// |
506 | | /// [`RangeBounds`]: core::ops::RangeBounds |
507 | | /// [`Range<BitEnd<R>>`]: core::ops::Range |
508 | | #[inline] |
509 | 0 | pub fn range_from( |
510 | 0 | from: BitIdx<R>, |
511 | 0 | ) -> impl Iterator<Item = Self> |
512 | 0 | + DoubleEndedIterator |
513 | 0 | + ExactSizeIterator |
514 | 0 | + FusedIterator { |
515 | 0 | (from.idx ..= Self::MAX.end) |
516 | 0 | .map(|tail| unsafe { BitEnd::new_unchecked(tail) }) |
517 | 0 | } |
518 | | |
519 | | /// Computes the span information for a region. |
520 | | /// |
521 | | /// The computed span of `len` bits begins at `self` and extends upwards in |
522 | | /// memory. The return value is the number of memory elements that contain |
523 | | /// bits of the span, and the first dead bit after the span. |
524 | | /// |
525 | | /// ## Parameters |
526 | | /// |
527 | | /// - `self`: A dead bit which is used as the first live bit of the new |
528 | | /// span. |
529 | | /// - `len`: The number of live bits in the span starting at `self`. |
530 | | /// |
531 | | /// ## Returns |
532 | | /// |
533 | | /// - `.0`: The number of `R` elements that contain live bits in the |
534 | | /// computed span. |
535 | | /// - `.1`: The dead-bit tail index ending the computed span. |
536 | | /// |
537 | | /// ## Behavior |
538 | | /// |
539 | | /// If `len` is `0`, this returns `(0, self)`, as the span has no live bits. |
540 | | /// If `self` is [`BitEnd::MAX`], then the new region starts at |
541 | | /// [`BitIdx::MIN`] in the next element. |
542 | | /// |
543 | | /// [`BitEnd::MAX`]: Self::MAX |
544 | | /// [`BitIdx::MIN`]: Self::MIN |
545 | 415k | pub(crate) fn span(self, len: usize) -> (usize, Self) { |
546 | 415k | if len == 0 { |
547 | 0 | return (0, self); |
548 | 415k | } |
549 | | |
550 | 415k | let head = self.end & R::MASK; |
551 | 415k | let bits_in_head = (bits_of::<R>() as u8 - head) as usize; |
552 | | |
553 | 415k | if len <= bits_in_head { |
554 | 17.6k | return (1, unsafe { Self::new_unchecked(head + len as u8) }); |
555 | 397k | } |
556 | | |
557 | 397k | let bits_after_head = len - bits_in_head; |
558 | 397k | let elts = bits_after_head >> R::INDX; |
559 | 397k | let tail = bits_after_head as u8 & R::MASK; |
560 | | |
561 | 397k | let is_zero = (tail == 0) as u8; |
562 | 397k | let edges = 2 - is_zero as usize; |
563 | 397k | (elts + edges, unsafe { |
564 | 397k | Self::new_unchecked((is_zero << R::INDX) | tail) |
565 | 397k | }) |
566 | 415k | } <bitvec::index::BitEnd<u8>>::span Line | Count | Source | 545 | 415k | pub(crate) fn span(self, len: usize) -> (usize, Self) { | 546 | 415k | if len == 0 { | 547 | 0 | return (0, self); | 548 | 415k | } | 549 | | | 550 | 415k | let head = self.end & R::MASK; | 551 | 415k | let bits_in_head = (bits_of::<R>() as u8 - head) as usize; | 552 | | | 553 | 415k | if len <= bits_in_head { | 554 | 17.6k | return (1, unsafe { Self::new_unchecked(head + len as u8) }); | 555 | 397k | } | 556 | | | 557 | 397k | let bits_after_head = len - bits_in_head; | 558 | 397k | let elts = bits_after_head >> R::INDX; | 559 | 397k | let tail = bits_after_head as u8 & R::MASK; | 560 | | | 561 | 397k | let is_zero = (tail == 0) as u8; | 562 | 397k | let edges = 2 - is_zero as usize; | 563 | 397k | (elts + edges, unsafe { | 564 | 397k | Self::new_unchecked((is_zero << R::INDX) | tail) | 565 | 397k | }) | 566 | 415k | } |
Unexecuted instantiation: <bitvec::index::BitEnd<_>>::span |
567 | | } |
568 | | |
569 | | impl<R> Binary for BitEnd<R> |
570 | | where R: BitRegister |
571 | | { |
572 | | #[inline] |
573 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
574 | 0 | write!(fmt, "{:0>1$b}", self.end, R::INDX as usize + 1) |
575 | 0 | } |
576 | | } |
577 | | |
578 | | impl<R> Debug for BitEnd<R> |
579 | | where R: BitRegister |
580 | | { |
581 | | #[inline] |
582 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
583 | 0 | write!(fmt, "BitEnd<{}>({})", any::type_name::<R>(), self) |
584 | 0 | } |
585 | | } |
586 | | |
587 | | impl<R> Display for BitEnd<R> |
588 | | where R: BitRegister |
589 | | { |
590 | | #[inline] |
591 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
592 | 0 | Binary::fmt(self, fmt) |
593 | 0 | } |
594 | | } |
595 | | |
596 | | #[repr(transparent)] |
597 | | #[doc = include_str!("../doc/index/BitPos.md")] |
598 | | // #[rustc_layout_scalar_valid_range_end(R::BITS)] |
599 | | #[derive(Clone, Copy, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] |
600 | | pub struct BitPos<R = usize> |
601 | | where R: BitRegister |
602 | | { |
603 | | /// Electrical position counter within a register, constrained to `0 .. |
604 | | /// R::BITS`. |
605 | | pos: u8, |
606 | | /// Marker for the register type. |
607 | | _ty: PhantomData<R>, |
608 | | } |
609 | | |
610 | | impl<R> BitPos<R> |
611 | | where R: BitRegister |
612 | | { |
613 | | /// The position value of the most significant bit in an `R` element. |
614 | | pub const MAX: Self = Self { |
615 | | pos: R::MASK as u8, |
616 | | _ty: PhantomData, |
617 | | }; |
618 | | /// The position value of the least significant bit in an `R` element. |
619 | | pub const MIN: Self = Self { |
620 | | pos: 0, |
621 | | _ty: PhantomData, |
622 | | }; |
623 | | |
624 | | /// Wraps a counter value as a known-good position within an `R` register. |
625 | | /// |
626 | | /// ## Parameters |
627 | | /// |
628 | | /// - `pos`: The counter value to mark as a position. This must be in the |
629 | | /// range `0 .. R::BITS`. |
630 | | /// |
631 | | /// ## Returns |
632 | | /// |
633 | | /// This returns `Some(pos)` when it is in the valid range `0 .. R::BITS`, |
634 | | /// and `None` when it is not. |
635 | | #[inline] |
636 | 0 | pub fn new(pos: u8) -> Option<Self> { |
637 | 0 | if pos >= bits_of::<R>() as u8 { |
638 | 0 | return None; |
639 | 0 | } |
640 | 0 | Some(unsafe { Self::new_unchecked(pos) }) |
641 | 0 | } |
642 | | |
643 | | /// Wraps a counter value as an assumed-good position within an `R` |
644 | | /// register. |
645 | | /// |
646 | | /// ## Parameters |
647 | | /// |
648 | | /// - `value`: The counter value to mark as a position. This must be in the |
649 | | /// range `0 .. R::BITS`. |
650 | | /// |
651 | | /// ## Returns |
652 | | /// |
653 | | /// This unconditionally marks `pos` as a valid bit-position. |
654 | | /// |
655 | | /// ## Safety |
656 | | /// |
657 | | /// If the `pos` value is outside the valid range, then the program is |
658 | | /// incorrect. Debug builds will panic; release builds do not inspect the |
659 | | /// value or specify a behavior. |
660 | | #[inline] |
661 | 0 | pub unsafe fn new_unchecked(pos: u8) -> Self { |
662 | 0 | debug_assert!( |
663 | 0 | pos < bits_of::<R>() as u8, |
664 | 0 | "Bit position {} cannot exceed type width {}", |
665 | | pos, |
666 | 0 | bits_of::<R>(), |
667 | | ); |
668 | 0 | Self { |
669 | 0 | pos, |
670 | 0 | _ty: PhantomData, |
671 | 0 | } |
672 | 0 | } |
673 | | |
674 | | /// Removes the position wrapper, leaving the internal counter. |
675 | | #[inline] |
676 | | #[cfg(not(tarpaulin_include))] |
677 | 0 | pub fn into_inner(self) -> u8 { |
678 | 0 | self.pos |
679 | 0 | } |
680 | | |
681 | | /// Computes the bit selector corresponding to `self`. |
682 | | /// |
683 | | /// This is always `1 << self.pos`. |
684 | | #[inline] |
685 | 0 | pub fn select(self) -> BitSel<R> { |
686 | 0 | unsafe { BitSel::new_unchecked(R::ONE << self.pos) } |
687 | 0 | } |
688 | | |
689 | | /// Computes the bit selector for `self` as an accessor mask. |
690 | | /// |
691 | | /// This is a type-cast over [`Self::select`]. |
692 | | /// |
693 | | /// [`Self::select`]: Self::select |
694 | | #[inline] |
695 | | #[cfg(not(tarpaulin_include))] |
696 | 0 | pub fn mask(self) -> BitMask<R> { |
697 | 0 | self.select().mask() |
698 | 0 | } |
699 | | |
700 | | /// Iterates over all possible position values. |
701 | 0 | pub(crate) fn range_all() -> impl Iterator<Item = Self> |
702 | 0 | + DoubleEndedIterator |
703 | 0 | + ExactSizeIterator |
704 | 0 | + FusedIterator { |
705 | 0 | BitIdx::<R>::range_all() |
706 | 0 | .map(|idx| unsafe { Self::new_unchecked(idx.into_inner()) }) |
707 | 0 | } |
708 | | } |
709 | | |
710 | | impl<R> Binary for BitPos<R> |
711 | | where R: BitRegister |
712 | | { |
713 | | #[inline] |
714 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
715 | 0 | write!(fmt, "{:0>1$b}", self.pos, R::INDX as usize) |
716 | 0 | } |
717 | | } |
718 | | |
719 | | impl<R> Debug for BitPos<R> |
720 | | where R: BitRegister |
721 | | { |
722 | | #[inline] |
723 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
724 | 0 | write!(fmt, "BitPos<{}>({})", any::type_name::<R>(), self) |
725 | 0 | } |
726 | | } |
727 | | |
728 | | impl<R> Display for BitPos<R> |
729 | | where R: BitRegister |
730 | | { |
731 | | #[inline] |
732 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
733 | 0 | Binary::fmt(self, fmt) |
734 | 0 | } |
735 | | } |
736 | | |
737 | | #[repr(transparent)] |
738 | | #[doc = include_str!("../doc/index/BitSel.md")] |
739 | | // #[rustc_layout_scalar_valid_range_end(R::BITS)] |
740 | | #[derive(Clone, Copy, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] |
741 | | pub struct BitSel<R = usize> |
742 | | where R: BitRegister |
743 | | { |
744 | | /// A one-hot selection mask. |
745 | | sel: R, |
746 | | } |
747 | | |
748 | | impl<R> BitSel<R> |
749 | | where R: BitRegister |
750 | | { |
751 | | /// Wraps a selector value as a known-good selection in an `R` register. |
752 | | /// |
753 | | /// ## Parameters |
754 | | /// |
755 | | /// - `sel`: A one-hot selection mask of a bit in an `R` register. |
756 | | /// |
757 | | /// ## Returns |
758 | | /// |
759 | | /// This returns `Some(sel)` when it is a power of two (exactly one bit set |
760 | | /// and all others cleared), and `None` when it is not. |
761 | | #[inline] |
762 | 0 | pub fn new(sel: R) -> Option<Self> { |
763 | 0 | if sel.count_ones() != 1 { |
764 | 0 | return None; |
765 | 0 | } |
766 | 0 | Some(unsafe { Self::new_unchecked(sel) }) |
767 | 0 | } |
768 | | |
769 | | /// Wraps a selector value as an assumed-good selection in an `R` register. |
770 | | /// |
771 | | /// ## Parameters |
772 | | /// |
773 | | /// - `sel`: A one-hot selection mask of a bit in an `R` register. |
774 | | /// |
775 | | /// ## Returns |
776 | | /// |
777 | | /// This unconditionally marks `sel` as a one-hot bit selector. |
778 | | /// |
779 | | /// ## Safety |
780 | | /// |
781 | | /// If the `sel` value has zero or multiple bits set, then it is invalid to |
782 | | /// be used as a `BitSel` and the program is incorrect. Debug builds will |
783 | | /// panic; release builds do not inspect the value or specify a behavior. |
784 | | #[inline] |
785 | 67.8k | pub unsafe fn new_unchecked(sel: R) -> Self { |
786 | 67.8k | debug_assert!( |
787 | 0 | sel.count_ones() == 1, |
788 | 0 | "Selections are required to have exactly one bit set: {:0>1$b}", |
789 | | sel, |
790 | 0 | bits_of::<R>() as usize, |
791 | | ); |
792 | 67.8k | Self { sel } |
793 | 67.8k | } <bitvec::index::BitSel<u8>>::new_unchecked Line | Count | Source | 785 | 67.8k | pub unsafe fn new_unchecked(sel: R) -> Self { | 786 | 67.8k | debug_assert!( | 787 | 0 | sel.count_ones() == 1, | 788 | 0 | "Selections are required to have exactly one bit set: {:0>1$b}", | 789 | | sel, | 790 | 0 | bits_of::<R>() as usize, | 791 | | ); | 792 | 67.8k | Self { sel } | 793 | 67.8k | } |
Unexecuted instantiation: <bitvec::index::BitSel<_>>::new_unchecked |
794 | | |
795 | | /// Removes the one-hot selection wrapper, leaving the internal mask. |
796 | | #[inline] |
797 | | #[cfg(not(tarpaulin_include))] |
798 | 67.8k | pub fn into_inner(self) -> R { |
799 | 67.8k | self.sel |
800 | 67.8k | } <bitvec::index::BitSel<u8>>::into_inner Line | Count | Source | 798 | 67.8k | pub fn into_inner(self) -> R { | 799 | 67.8k | self.sel | 800 | 67.8k | } |
Unexecuted instantiation: <bitvec::index::BitSel<_>>::into_inner |
801 | | |
802 | | /// Computes a bit-mask for `self`. This is a type-cast. |
803 | | #[inline] |
804 | | #[cfg(not(tarpaulin_include))] |
805 | 0 | pub fn mask(self) -> BitMask<R> { |
806 | 0 | BitMask::new(self.sel) |
807 | 0 | } |
808 | | |
809 | | /// Iterates over all possible selector values. |
810 | | #[inline] |
811 | 0 | pub fn range_all() -> impl Iterator<Item = Self> |
812 | 0 | + DoubleEndedIterator |
813 | 0 | + ExactSizeIterator |
814 | 0 | + FusedIterator { |
815 | 0 | BitPos::<R>::range_all().map(BitPos::select) |
816 | 0 | } |
817 | | } |
818 | | |
819 | | impl<R> Binary for BitSel<R> |
820 | | where R: BitRegister |
821 | | { |
822 | | #[inline] |
823 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
824 | 0 | write!(fmt, "{:0>1$b}", self.sel, bits_of::<R>() as usize) |
825 | 0 | } |
826 | | } |
827 | | |
828 | | impl<R> Debug for BitSel<R> |
829 | | where R: BitRegister |
830 | | { |
831 | | #[inline] |
832 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
833 | 0 | write!(fmt, "BitSel<{}>({})", any::type_name::<R>(), self) |
834 | 0 | } |
835 | | } |
836 | | |
837 | | impl<R> Display for BitSel<R> |
838 | | where R: BitRegister |
839 | | { |
840 | | #[inline] |
841 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
842 | 0 | Binary::fmt(self, fmt) |
843 | 0 | } |
844 | | } |
845 | | |
846 | | #[repr(transparent)] |
847 | | #[doc = include_str!("../doc/index/BitMask.md")] |
848 | | #[derive(Clone, Copy, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] |
849 | | pub struct BitMask<R = usize> |
850 | | where R: BitRegister |
851 | | { |
852 | | /// A mask of any number of bits to select. |
853 | | mask: R, |
854 | | } |
855 | | |
856 | | impl<R> BitMask<R> |
857 | | where R: BitRegister |
858 | | { |
859 | | /// A full bit-mask with every bit set. |
860 | | pub const ALL: Self = Self { mask: R::ALL }; |
861 | | /// An empty bit-mask with every bit cleared. |
862 | | pub const ZERO: Self = Self { mask: R::ZERO }; |
863 | | |
864 | | /// Wraps any `R` value as a bit-mask. |
865 | | /// |
866 | | /// This constructor is provided to explicitly declare that an operation is |
867 | | /// discarding the numeric value of an integer and instead using it only as |
868 | | /// a bit-mask. |
869 | | /// |
870 | | /// ## Parameters |
871 | | /// |
872 | | /// - `mask`: Some integer to use as a bit-mask. |
873 | | /// |
874 | | /// ## Returns |
875 | | /// |
876 | | /// The `mask` value wrapped as a bit-mask, with its numeric context |
877 | | /// discarded. |
878 | | /// |
879 | | /// Prefer accumulating [`BitSel`] values using its `Sum` implementation. |
880 | | /// |
881 | | /// ## Safety |
882 | | /// |
883 | | /// The `mask` value must be computed from a set of valid bit positions in |
884 | | /// the caller’s context. |
885 | | /// |
886 | | /// [`BitSel`]: crate::index::BitSel |
887 | | #[inline] |
888 | 372k | pub fn new(mask: R) -> Self { |
889 | 372k | Self { mask } |
890 | 372k | } <bitvec::index::BitMask<u8>>::new Line | Count | Source | 888 | 372k | pub fn new(mask: R) -> Self { | 889 | 372k | Self { mask } | 890 | 372k | } |
Unexecuted instantiation: <bitvec::index::BitMask<_>>::new |
891 | | |
892 | | /// Removes the mask wrapper, leaving the internal value. |
893 | | #[inline] |
894 | | #[cfg(not(tarpaulin_include))] |
895 | 772k | pub fn into_inner(self) -> R { |
896 | 772k | self.mask |
897 | 772k | } <bitvec::index::BitMask<u8>>::into_inner Line | Count | Source | 895 | 772k | pub fn into_inner(self) -> R { | 896 | 772k | self.mask | 897 | 772k | } |
Unexecuted instantiation: <bitvec::index::BitMask<_>>::into_inner |
898 | | |
899 | | /// Tests if a mask contains a given selector bit. |
900 | | /// |
901 | | /// ## Parameters |
902 | | /// |
903 | | /// - `&self`: The mask being tested. |
904 | | /// - `sel`: A selector bit to test in `self`. |
905 | | /// |
906 | | /// ## Returns |
907 | | /// |
908 | | /// Whether `self` has set the bit that `sel` indicates. |
909 | | #[inline] |
910 | 0 | pub fn test(&self, sel: BitSel<R>) -> bool { |
911 | 0 | self.mask & sel.sel != R::ZERO |
912 | 0 | } |
913 | | |
914 | | /// Inserts a selector bit into a mask. |
915 | | /// |
916 | | /// ## Parameters |
917 | | /// |
918 | | /// - `&mut self`: The mask being modified. |
919 | | /// - `sel`: A selector bit to insert into `self`. |
920 | | /// |
921 | | /// ## Effects |
922 | | /// |
923 | | /// The `sel` bit is set in the mask. |
924 | | #[inline] |
925 | 0 | pub fn insert(&mut self, sel: BitSel<R>) { |
926 | 0 | self.mask |= sel.sel; |
927 | 0 | } |
928 | | |
929 | | /// Creates a new mask with a selector bit activated. |
930 | | /// |
931 | | /// ## Parameters |
932 | | /// |
933 | | /// - `self`: The original mask. |
934 | | /// - `sel`: The selector bit being added into the mask. |
935 | | /// |
936 | | /// ## Returns |
937 | | /// |
938 | | /// A new bit-mask with `sel` activated. |
939 | | #[inline] |
940 | 0 | pub fn combine(self, sel: BitSel<R>) -> Self { |
941 | 0 | Self { |
942 | 0 | mask: self.mask | sel.sel, |
943 | 0 | } |
944 | 0 | } |
945 | | } |
946 | | |
947 | | impl<R> Binary for BitMask<R> |
948 | | where R: BitRegister |
949 | | { |
950 | | #[inline] |
951 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
952 | 0 | write!(fmt, "{:0>1$b}", self.mask, bits_of::<R>() as usize) |
953 | 0 | } |
954 | | } |
955 | | |
956 | | impl<R> Debug for BitMask<R> |
957 | | where R: BitRegister |
958 | | { |
959 | | #[inline] |
960 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
961 | 0 | write!(fmt, "BitMask<{}>({})", any::type_name::<R>(), self) |
962 | 0 | } |
963 | | } |
964 | | |
965 | | impl<R> Display for BitMask<R> |
966 | | where R: BitRegister |
967 | | { |
968 | | #[inline] |
969 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
970 | 0 | Binary::fmt(self, fmt) |
971 | 0 | } |
972 | | } |
973 | | |
974 | | impl<R> Sum<BitSel<R>> for BitMask<R> |
975 | | where R: BitRegister |
976 | | { |
977 | | #[inline] |
978 | 0 | fn sum<I>(iter: I) -> Self |
979 | 0 | where I: Iterator<Item = BitSel<R>> { |
980 | 0 | iter.fold(Self::ZERO, Self::combine) |
981 | 0 | } |
982 | | } |
983 | | |
984 | | impl<R> BitAnd<R> for BitMask<R> |
985 | | where R: BitRegister |
986 | | { |
987 | | type Output = Self; |
988 | | |
989 | | #[inline] |
990 | 197k | fn bitand(self, rhs: R) -> Self::Output { |
991 | 197k | Self { |
992 | 197k | mask: self.mask & rhs, |
993 | 197k | } |
994 | 197k | } <bitvec::index::BitMask<u8> as core::ops::bit::BitAnd<u8>>::bitand Line | Count | Source | 990 | 197k | fn bitand(self, rhs: R) -> Self::Output { | 991 | 197k | Self { | 992 | 197k | mask: self.mask & rhs, | 993 | 197k | } | 994 | 197k | } |
Unexecuted instantiation: <bitvec::index::BitMask<_> as core::ops::bit::BitAnd<_>>::bitand |
995 | | } |
996 | | |
997 | | impl<R> BitOr<R> for BitMask<R> |
998 | | where R: BitRegister |
999 | | { |
1000 | | type Output = Self; |
1001 | | |
1002 | | #[inline] |
1003 | 0 | fn bitor(self, rhs: R) -> Self::Output { |
1004 | 0 | Self { |
1005 | 0 | mask: self.mask | rhs, |
1006 | 0 | } |
1007 | 0 | } |
1008 | | } |
1009 | | |
1010 | | impl<R> Not for BitMask<R> |
1011 | | where R: BitRegister |
1012 | | { |
1013 | | type Output = Self; |
1014 | | |
1015 | | #[inline] |
1016 | 0 | fn not(self) -> Self::Output { |
1017 | 0 | Self { mask: !self.mask } |
1018 | 0 | } |
1019 | | } |
1020 | | |
1021 | | #[cfg(test)] |
1022 | | mod tests { |
1023 | | use super::*; |
1024 | | use crate::order::Lsb0; |
1025 | | |
1026 | | #[test] |
1027 | | fn index_ctors() { |
1028 | | for n in 0 .. 8 { |
1029 | | assert!(BitIdx::<u8>::new(n).is_ok()); |
1030 | | } |
1031 | | assert!(BitIdx::<u8>::new(8).is_err()); |
1032 | | |
1033 | | for n in 0 .. 16 { |
1034 | | assert!(BitIdx::<u16>::new(n).is_ok()); |
1035 | | } |
1036 | | assert!(BitIdx::<u16>::new(16).is_err()); |
1037 | | |
1038 | | for n in 0 .. 32 { |
1039 | | assert!(BitIdx::<u32>::new(n).is_ok()); |
1040 | | } |
1041 | | assert!(BitIdx::<u32>::new(32).is_err()); |
1042 | | |
1043 | | #[cfg(target_pointer_width = "64")] |
1044 | | { |
1045 | | for n in 0 .. 64 { |
1046 | | assert!(BitIdx::<u64>::new(n).is_ok()); |
1047 | | } |
1048 | | assert!(BitIdx::<u64>::new(64).is_err()); |
1049 | | } |
1050 | | |
1051 | | for n in 0 .. bits_of::<usize>() as u8 { |
1052 | | assert!(BitIdx::<usize>::new(n).is_ok()); |
1053 | | } |
1054 | | assert!(BitIdx::<usize>::new(bits_of::<usize>() as u8).is_err()); |
1055 | | } |
1056 | | |
1057 | | #[test] |
1058 | | fn tail_ctors() { |
1059 | | for n in 0 ..= 8 { |
1060 | | assert!(BitEnd::<u8>::new(n).is_some()); |
1061 | | } |
1062 | | assert!(BitEnd::<u8>::new(9).is_none()); |
1063 | | |
1064 | | for n in 0 ..= 16 { |
1065 | | assert!(BitEnd::<u16>::new(n).is_some()); |
1066 | | } |
1067 | | assert!(BitEnd::<u16>::new(17).is_none()); |
1068 | | |
1069 | | for n in 0 ..= 32 { |
1070 | | assert!(BitEnd::<u32>::new(n).is_some()); |
1071 | | } |
1072 | | assert!(BitEnd::<u32>::new(33).is_none()); |
1073 | | |
1074 | | #[cfg(target_pointer_width = "64")] |
1075 | | { |
1076 | | for n in 0 ..= 64 { |
1077 | | assert!(BitEnd::<u64>::new(n).is_some()); |
1078 | | } |
1079 | | assert!(BitEnd::<u64>::new(65).is_none()); |
1080 | | } |
1081 | | |
1082 | | for n in 0 ..= bits_of::<usize>() as u8 { |
1083 | | assert!(BitEnd::<usize>::new(n).is_some()); |
1084 | | } |
1085 | | assert!(BitEnd::<usize>::new(bits_of::<usize>() as u8 + 1).is_none()); |
1086 | | } |
1087 | | |
1088 | | #[test] |
1089 | | fn position_ctors() { |
1090 | | for n in 0 .. 8 { |
1091 | | assert!(BitPos::<u8>::new(n).is_some()); |
1092 | | } |
1093 | | assert!(BitPos::<u8>::new(8).is_none()); |
1094 | | |
1095 | | for n in 0 .. 16 { |
1096 | | assert!(BitPos::<u16>::new(n).is_some()); |
1097 | | } |
1098 | | assert!(BitPos::<u16>::new(16).is_none()); |
1099 | | |
1100 | | for n in 0 .. 32 { |
1101 | | assert!(BitPos::<u32>::new(n).is_some()); |
1102 | | } |
1103 | | assert!(BitPos::<u32>::new(32).is_none()); |
1104 | | |
1105 | | #[cfg(target_pointer_width = "64")] |
1106 | | { |
1107 | | for n in 0 .. 64 { |
1108 | | assert!(BitPos::<u64>::new(n).is_some()); |
1109 | | } |
1110 | | assert!(BitPos::<u64>::new(64).is_none()); |
1111 | | } |
1112 | | |
1113 | | for n in 0 .. bits_of::<usize>() as u8 { |
1114 | | assert!(BitPos::<usize>::new(n).is_some()); |
1115 | | } |
1116 | | assert!(BitPos::<usize>::new(bits_of::<usize>() as u8).is_none()); |
1117 | | } |
1118 | | |
1119 | | #[test] |
1120 | | fn select_ctors() { |
1121 | | for n in 0 .. 8 { |
1122 | | assert!(BitSel::<u8>::new(1 << n).is_some()); |
1123 | | } |
1124 | | assert!(BitSel::<u8>::new(0).is_none()); |
1125 | | assert!(BitSel::<u8>::new(3).is_none()); |
1126 | | |
1127 | | for n in 0 .. 16 { |
1128 | | assert!(BitSel::<u16>::new(1 << n).is_some()); |
1129 | | } |
1130 | | assert!(BitSel::<u16>::new(0).is_none()); |
1131 | | assert!(BitSel::<u16>::new(3).is_none()); |
1132 | | |
1133 | | for n in 0 .. 32 { |
1134 | | assert!(BitSel::<u32>::new(1 << n).is_some()); |
1135 | | } |
1136 | | assert!(BitSel::<u32>::new(0).is_none()); |
1137 | | assert!(BitSel::<u32>::new(3).is_none()); |
1138 | | |
1139 | | #[cfg(target_pointer_width = "64")] |
1140 | | { |
1141 | | for n in 0 .. 64 { |
1142 | | assert!(BitSel::<u64>::new(1 << n).is_some()); |
1143 | | } |
1144 | | assert!(BitSel::<u64>::new(0).is_none()); |
1145 | | assert!(BitSel::<u64>::new(3).is_none()); |
1146 | | } |
1147 | | |
1148 | | for n in 0 .. bits_of::<usize>() as u8 { |
1149 | | assert!(BitSel::<usize>::new(1 << n).is_some()); |
1150 | | } |
1151 | | assert!(BitSel::<usize>::new(0).is_none()); |
1152 | | assert!(BitSel::<usize>::new(3).is_none()); |
1153 | | } |
1154 | | |
1155 | | #[test] |
1156 | | fn ranges() { |
1157 | | let mut range = BitIdx::<u16>::range_all(); |
1158 | | assert_eq!(range.next(), BitIdx::new(0).ok()); |
1159 | | assert_eq!(range.next_back(), BitIdx::new(15).ok()); |
1160 | | assert_eq!(range.count(), 14); |
1161 | | |
1162 | | let mut range = BitEnd::<u8>::range_from(BitIdx::new(1).unwrap()); |
1163 | | assert_eq!(range.next(), BitEnd::new(1)); |
1164 | | assert_eq!(range.next_back(), BitEnd::new(8)); |
1165 | | assert_eq!(range.count(), 6); |
1166 | | |
1167 | | let mut range = BitPos::<u8>::range_all(); |
1168 | | assert_eq!(range.next(), BitPos::new(0)); |
1169 | | assert_eq!(range.next_back(), BitPos::new(7)); |
1170 | | assert_eq!(range.count(), 6); |
1171 | | |
1172 | | let mut range = BitSel::<u8>::range_all(); |
1173 | | assert_eq!(range.next(), BitSel::new(1)); |
1174 | | assert_eq!(range.next_back(), BitSel::new(128)); |
1175 | | assert_eq!(range.count(), 6); |
1176 | | } |
1177 | | |
1178 | | #[test] |
1179 | | fn index_cycle() { |
1180 | | let six = BitIdx::<u8>::new(6).unwrap(); |
1181 | | let (seven, step) = six.next(); |
1182 | | assert_eq!(seven, BitIdx::new(7).unwrap()); |
1183 | | assert!(!step); |
1184 | | |
1185 | | let (zero, step) = seven.next(); |
1186 | | assert_eq!(zero, BitIdx::MIN); |
1187 | | assert!(step); |
1188 | | |
1189 | | let (seven, step) = zero.prev(); |
1190 | | assert_eq!(seven, BitIdx::new(7).unwrap()); |
1191 | | assert!(step); |
1192 | | |
1193 | | let (six, step) = seven.prev(); |
1194 | | assert_eq!(six, BitIdx::new(6).unwrap()); |
1195 | | assert!(!step); |
1196 | | |
1197 | | let fourteen = BitIdx::<u16>::new(14).unwrap(); |
1198 | | let (fifteen, step) = fourteen.next(); |
1199 | | assert_eq!(fifteen, BitIdx::new(15).unwrap()); |
1200 | | assert!(!step); |
1201 | | let (zero, step) = fifteen.next(); |
1202 | | assert_eq!(zero, BitIdx::MIN); |
1203 | | assert!(step); |
1204 | | let (fifteen, step) = zero.prev(); |
1205 | | assert_eq!(fifteen, BitIdx::new(15).unwrap()); |
1206 | | assert!(step); |
1207 | | let (fourteen, step) = fifteen.prev(); |
1208 | | assert_eq!(fourteen, BitIdx::new(14).unwrap()); |
1209 | | assert!(!step); |
1210 | | } |
1211 | | |
1212 | | #[test] |
1213 | | fn jumps() { |
1214 | | let (jump, head) = BitIdx::<u8>::new(1).unwrap().offset(2); |
1215 | | assert_eq!(jump, 0); |
1216 | | assert_eq!(head, BitIdx::new(3).unwrap()); |
1217 | | |
1218 | | let (jump, head) = BitIdx::<u8>::MAX.offset(1); |
1219 | | assert_eq!(jump, 1); |
1220 | | assert_eq!(head, BitIdx::MIN); |
1221 | | |
1222 | | let (jump, head) = BitIdx::<u16>::new(10).unwrap().offset(40); |
1223 | | // 10 is in 0..16; 10+40 is in 48..64 |
1224 | | assert_eq!(jump, 3); |
1225 | | assert_eq!(head, BitIdx::new(2).unwrap()); |
1226 | | |
1227 | | // .offset() wraps at the `isize` boundary |
1228 | | let (jump, head) = BitIdx::<u8>::MAX.offset(isize::MAX); |
1229 | | assert_eq!(jump, -(((isize::MAX as usize + 1) >> 3) as isize)); |
1230 | | assert_eq!(head, BitIdx::MAX.prev().0); |
1231 | | |
1232 | | let (elts, tail) = BitIdx::<u8>::new(4).unwrap().span(0); |
1233 | | assert_eq!(elts, 0); |
1234 | | assert_eq!(tail, BitEnd::new(4).unwrap()); |
1235 | | |
1236 | | let (elts, tail) = BitIdx::<u8>::new(3).unwrap().span(3); |
1237 | | assert_eq!(elts, 1); |
1238 | | assert_eq!(tail, BitEnd::new(6).unwrap()); |
1239 | | |
1240 | | let (elts, tail) = BitIdx::<u16>::new(10).unwrap().span(40); |
1241 | | assert_eq!(elts, 4); |
1242 | | assert_eq!(tail, BitEnd::new(2).unwrap()); |
1243 | | } |
1244 | | |
1245 | | #[test] |
1246 | | fn mask_operators() { |
1247 | | let mut mask = BitIdx::<u8>::new(2) |
1248 | | .unwrap() |
1249 | | .range(BitEnd::new(5).unwrap()) |
1250 | | .map(BitIdx::select::<Lsb0>) |
1251 | | .sum::<BitMask<u8>>(); |
1252 | | assert_eq!(mask, BitMask::new(28)); |
1253 | | assert_eq!(mask & 25, BitMask::new(24)); |
1254 | | assert_eq!(mask | 32, BitMask::new(60)); |
1255 | | assert_eq!(!mask, BitMask::new(!28)); |
1256 | | let yes = BitSel::<u8>::new(16).unwrap(); |
1257 | | let no = BitSel::<u8>::new(64).unwrap(); |
1258 | | assert!(mask.test(yes)); |
1259 | | assert!(!mask.test(no)); |
1260 | | mask.insert(no); |
1261 | | assert!(mask.test(no)); |
1262 | | } |
1263 | | |
1264 | | #[test] |
1265 | | #[cfg(feature = "alloc")] |
1266 | | fn render() { |
1267 | | #[cfg(not(feature = "std"))] |
1268 | | use alloc::format; |
1269 | | |
1270 | | assert_eq!(format!("{:?}", BitIdx::<u8>::MAX), "BitIdx<u8>(111)"); |
1271 | | assert_eq!(format!("{:?}", BitIdx::<u16>::MAX), "BitIdx<u16>(1111)"); |
1272 | | assert_eq!(format!("{:?}", BitIdx::<u32>::MAX), "BitIdx<u32>(11111)"); |
1273 | | |
1274 | | assert_eq!( |
1275 | | format!("{:?}", BitIdx::<u8>::new(8).unwrap_err()), |
1276 | | "BitIdxError<u8>(8)" |
1277 | | ); |
1278 | | assert_eq!( |
1279 | | format!("{:?}", BitIdx::<u16>::new(16).unwrap_err()), |
1280 | | "BitIdxError<u16>(16)" |
1281 | | ); |
1282 | | assert_eq!( |
1283 | | format!("{:?}", BitIdx::<u32>::new(32).unwrap_err()), |
1284 | | "BitIdxError<u32>(32)" |
1285 | | ); |
1286 | | |
1287 | | assert_eq!(format!("{:?}", BitEnd::<u8>::MAX), "BitEnd<u8>(1000)"); |
1288 | | assert_eq!(format!("{:?}", BitEnd::<u16>::MAX), "BitEnd<u16>(10000)"); |
1289 | | assert_eq!(format!("{:?}", BitEnd::<u32>::MAX), "BitEnd<u32>(100000)"); |
1290 | | |
1291 | | assert_eq!(format!("{:?}", BitPos::<u8>::MAX), "BitPos<u8>(111)"); |
1292 | | assert_eq!(format!("{:?}", BitPos::<u16>::MAX), "BitPos<u16>(1111)"); |
1293 | | assert_eq!(format!("{:?}", BitPos::<u32>::MAX), "BitPos<u32>(11111)"); |
1294 | | |
1295 | | assert_eq!( |
1296 | | format!("{:?}", BitSel::<u8>::new(1).unwrap()), |
1297 | | "BitSel<u8>(00000001)", |
1298 | | ); |
1299 | | assert_eq!( |
1300 | | format!("{:?}", BitSel::<u16>::new(1).unwrap()), |
1301 | | "BitSel<u16>(0000000000000001)", |
1302 | | ); |
1303 | | assert_eq!( |
1304 | | format!("{:?}", BitSel::<u32>::new(1).unwrap()), |
1305 | | "BitSel<u32>(00000000000000000000000000000001)", |
1306 | | ); |
1307 | | |
1308 | | assert_eq!( |
1309 | | format!("{:?}", BitMask::<u8>::new(1 | 4 | 32)), |
1310 | | "BitMask<u8>(00100101)", |
1311 | | ); |
1312 | | assert_eq!( |
1313 | | format!("{:?}", BitMask::<u16>::new(1 | 4 | 32)), |
1314 | | "BitMask<u16>(0000000000100101)", |
1315 | | ); |
1316 | | assert_eq!( |
1317 | | format!("{:?}", BitMask::<u32>::new(1 | 4 | 32)), |
1318 | | "BitMask<u32>(00000000000000000000000000100101)", |
1319 | | ); |
1320 | | |
1321 | | #[cfg(target_pointer_width = "64")] |
1322 | | { |
1323 | | assert_eq!( |
1324 | | format!("{:?}", BitIdx::<u64>::MAX), |
1325 | | "BitIdx<u64>(111111)", |
1326 | | ); |
1327 | | assert_eq!( |
1328 | | format!("{:?}", BitIdx::<u64>::new(64).unwrap_err()), |
1329 | | "BitIdxError<u64>(64)", |
1330 | | ); |
1331 | | assert_eq!( |
1332 | | format!("{:?}", BitEnd::<u64>::MAX), |
1333 | | "BitEnd<u64>(1000000)", |
1334 | | ); |
1335 | | assert_eq!( |
1336 | | format!("{:?}", BitPos::<u64>::MAX), |
1337 | | "BitPos<u64>(111111)", |
1338 | | ); |
1339 | | assert_eq!( |
1340 | | format!("{:?}", BitSel::<u64>::new(1).unwrap()), |
1341 | | "BitSel<u64>(0000000000000000000000000000000000000000000000000000000000000001)", |
1342 | | ); |
1343 | | assert_eq!( |
1344 | | format!("{:?}", BitMask::<u64>::new(1 | 4 | 32)), |
1345 | | "BitMask<u64>(0000000000000000000000000000000000000000000000000000000000100101)", |
1346 | | ); |
1347 | | } |
1348 | | } |
1349 | | } |