/rust/registry/src/index.crates.io-6f17d22bba15001f/bitvec-1.0.1/src/vec/iter.rs
Line | Count | Source (jump to first uncovered line) |
1 | | #![doc = include_str!("../../doc/vec/iter.md")] |
2 | | |
3 | | use alloc::vec::Vec; |
4 | | use core::{ |
5 | | fmt::{ |
6 | | self, |
7 | | Debug, |
8 | | Formatter, |
9 | | }, |
10 | | iter::{ |
11 | | FromIterator, |
12 | | FusedIterator, |
13 | | }, |
14 | | mem, |
15 | | ops::Range, |
16 | | }; |
17 | | |
18 | | use tap::{ |
19 | | Pipe, |
20 | | Tap, |
21 | | TapOptional, |
22 | | }; |
23 | | use wyz::{ |
24 | | comu::{ |
25 | | Mut, |
26 | | Mutability, |
27 | | }, |
28 | | range::RangeExt, |
29 | | }; |
30 | | |
31 | | use super::BitVec; |
32 | | use crate::{ |
33 | | boxed::BitBox, |
34 | | mem::bits_of, |
35 | | order::BitOrder, |
36 | | ptr::{ |
37 | | BitPtrRange, |
38 | | BitRef, |
39 | | }, |
40 | | slice::BitSlice, |
41 | | store::BitStore, |
42 | | view::BitView, |
43 | | }; |
44 | | |
45 | | #[doc = include_str!("../../doc/vec/iter/Extend_bool.md")] |
46 | | impl<T, O> Extend<bool> for BitVec<T, O> |
47 | | where |
48 | | T: BitStore, |
49 | | O: BitOrder, |
50 | | { |
51 | | #[inline] |
52 | 0 | fn extend<I>(&mut self, iter: I) |
53 | 0 | where I: IntoIterator<Item = bool> { |
54 | 0 | let mut iter = iter.into_iter(); |
55 | | #[allow(irrefutable_let_patterns)] // Removing the `if` is unstable. |
56 | 0 | if let (_, Some(n)) | (n, None) = iter.size_hint() { |
57 | 0 | self.reserve(n); |
58 | 0 | let len = self.len(); |
59 | 0 | // If the reservation did not panic, then this will not overflow. |
60 | 0 | let new_len = len.wrapping_add(n); |
61 | 0 | let new = unsafe { self.get_unchecked_mut(len .. new_len) }; |
62 | 0 |
|
63 | 0 | let pulled = new |
64 | 0 | .as_mut_bitptr_range() |
65 | 0 | .zip(iter.by_ref()) |
66 | 0 | .map(|(ptr, bit)| unsafe { |
67 | 0 | ptr.write(bit); |
68 | 0 | }) |
69 | 0 | .count(); |
70 | 0 | unsafe { |
71 | 0 | self.set_len(len + pulled); |
72 | 0 | } |
73 | 0 | } |
74 | | |
75 | | // If the iterator is well-behaved and finite, this should never |
76 | | // enter; if the iterator is infinite, then this will eventually crash. |
77 | 0 | iter.for_each(|bit| self.push(bit)); |
78 | 0 | } |
79 | | } |
80 | | |
81 | | #[cfg(not(tarpaulin_include))] |
82 | | impl<'a, T, O> Extend<&'a bool> for BitVec<T, O> |
83 | | where |
84 | | T: BitStore, |
85 | | O: BitOrder, |
86 | | { |
87 | | #[inline] |
88 | 0 | fn extend<I>(&mut self, iter: I) |
89 | 0 | where I: IntoIterator<Item = &'a bool> { |
90 | 0 | self.extend(iter.into_iter().copied()); |
91 | 0 | } |
92 | | } |
93 | | |
94 | | #[cfg(not(tarpaulin_include))] |
95 | | #[doc = include_str!("../../doc/vec/iter/Extend_BitRef.md")] |
96 | | impl<'a, M, T1, T2, O1, O2> Extend<BitRef<'a, M, T2, O2>> for BitVec<T1, O1> |
97 | | where |
98 | | M: Mutability, |
99 | | T1: BitStore, |
100 | | T2: BitStore, |
101 | | O1: BitOrder, |
102 | | O2: BitOrder, |
103 | | { |
104 | | #[inline] |
105 | 0 | fn extend<I>(&mut self, iter: I) |
106 | 0 | where I: IntoIterator<Item = BitRef<'a, M, T2, O2>> { |
107 | 0 | self.extend(iter.into_iter().map(|bit| *bit)); |
108 | 0 | } |
109 | | } |
110 | | |
111 | | impl<T, O> Extend<T> for BitVec<T, O> |
112 | | where |
113 | | T: BitStore, |
114 | | O: BitOrder, |
115 | | { |
116 | | #[inline] |
117 | 0 | fn extend<I>(&mut self, iter: I) |
118 | 0 | where I: IntoIterator<Item = T> { |
119 | 0 | let iter = iter.into_iter(); |
120 | | #[allow(irrefutable_let_patterns)] |
121 | 0 | if let (_, Some(n)) | (n, None) = iter.size_hint() { |
122 | 0 | self.reserve(n.checked_mul(bits_of::<T::Mem>()).unwrap()); |
123 | 0 | } |
124 | 0 | iter.for_each(|elem| self.extend_from_bitslice(elem.view_bits::<O>())); |
125 | 0 | } |
126 | | } |
127 | | |
128 | | #[cfg(not(tarpaulin_include))] |
129 | | impl<'a, T, O> Extend<&'a T> for BitVec<T, O> |
130 | | where |
131 | | T: BitStore, |
132 | | O: BitOrder, |
133 | | { |
134 | | #[inline] |
135 | 0 | fn extend<I>(&mut self, iter: I) |
136 | 0 | where I: IntoIterator<Item = &'a T> { |
137 | 0 | self.extend( |
138 | 0 | iter.into_iter() |
139 | 0 | .map(BitStore::load_value) |
140 | 0 | .map(<T as BitStore>::new), |
141 | 0 | ); |
142 | 0 | } |
143 | | } |
144 | | |
145 | | #[cfg(not(tarpaulin_include))] |
146 | | #[doc = include_str!("../../doc/vec/iter/FromIterator_bool.md")] |
147 | | impl<T, O> FromIterator<bool> for BitVec<T, O> |
148 | | where |
149 | | T: BitStore, |
150 | | O: BitOrder, |
151 | | { |
152 | | #[inline] |
153 | 0 | fn from_iter<I>(iter: I) -> Self |
154 | 0 | where I: IntoIterator<Item = bool> { |
155 | 0 | Self::new().tap_mut(|bv| bv.extend(iter)) |
156 | 0 | } |
157 | | } |
158 | | |
159 | | #[cfg(not(tarpaulin_include))] |
160 | | impl<'a, T, O> FromIterator<&'a bool> for BitVec<T, O> |
161 | | where |
162 | | T: BitStore, |
163 | | O: BitOrder, |
164 | | { |
165 | | #[inline] |
166 | 0 | fn from_iter<I>(iter: I) -> Self |
167 | 0 | where I: IntoIterator<Item = &'a bool> { |
168 | 0 | iter.into_iter().copied().collect::<Self>() |
169 | 0 | } |
170 | | } |
171 | | |
172 | | #[cfg(not(tarpaulin_include))] |
173 | | #[doc = include_str!("../../doc/vec/iter/FromIterator_BitRef.md")] |
174 | | impl<'a, M, T1, T2, O1, O2> FromIterator<BitRef<'a, M, T2, O2>> |
175 | | for BitVec<T1, O1> |
176 | | where |
177 | | M: Mutability, |
178 | | T1: BitStore, |
179 | | T2: BitStore, |
180 | | O1: BitOrder, |
181 | | O2: BitOrder, |
182 | | { |
183 | | #[inline] |
184 | 0 | fn from_iter<I>(iter: I) -> Self |
185 | 0 | where I: IntoIterator<Item = BitRef<'a, M, T2, O2>> { |
186 | 0 | iter.into_iter().map(|br| *br).pipe(Self::from_iter) |
187 | 0 | } |
188 | | } |
189 | | |
190 | | #[cfg(not(tarpaulin_include))] |
191 | | impl<T, O> FromIterator<T> for BitVec<T, O> |
192 | | where |
193 | | T: BitStore, |
194 | | O: BitOrder, |
195 | | { |
196 | | #[inline] |
197 | 0 | fn from_iter<I>(iter: I) -> Self |
198 | 0 | where I: IntoIterator<Item = T> { |
199 | 0 | iter.into_iter().collect::<Vec<T>>().pipe(Self::from_vec) |
200 | 0 | } |
201 | | } |
202 | | |
203 | | #[cfg(not(tarpaulin_include))] |
204 | | impl<'a, T, O> FromIterator<&'a T> for BitVec<T, O> |
205 | | where |
206 | | T: BitStore, |
207 | | O: BitOrder, |
208 | | { |
209 | | #[inline] |
210 | 0 | fn from_iter<I>(iter: I) -> Self |
211 | 0 | where I: IntoIterator<Item = &'a T> { |
212 | 0 | iter.into_iter() |
213 | 0 | .map(<T as BitStore>::load_value) |
214 | 0 | .map(<T as BitStore>::new) |
215 | 0 | .collect::<Self>() |
216 | 0 | } |
217 | | } |
218 | | |
219 | | #[doc = include_str!("../../doc/vec/iter/IntoIterator.md")] |
220 | | impl<T, O> IntoIterator for BitVec<T, O> |
221 | | where |
222 | | T: BitStore, |
223 | | O: BitOrder, |
224 | | { |
225 | | type IntoIter = <BitBox<T, O> as IntoIterator>::IntoIter; |
226 | | type Item = <BitBox<T, O> as IntoIterator>::Item; |
227 | | |
228 | | #[inline] |
229 | 0 | fn into_iter(self) -> Self::IntoIter { |
230 | 0 | self.into_boxed_bitslice().into_iter() |
231 | 0 | } |
232 | | } |
233 | | |
234 | | #[cfg(not(tarpaulin_include))] |
235 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Vec.html#impl-IntoIterator-1) |
236 | | impl<'a, T, O> IntoIterator for &'a BitVec<T, O> |
237 | | where |
238 | | O: BitOrder, |
239 | | T: 'a + BitStore, |
240 | | { |
241 | | type IntoIter = <&'a BitSlice<T, O> as IntoIterator>::IntoIter; |
242 | | type Item = <&'a BitSlice<T, O> as IntoIterator>::Item; |
243 | | |
244 | | #[inline] |
245 | 0 | fn into_iter(self) -> Self::IntoIter { |
246 | 0 | self.as_bitslice().iter() |
247 | 0 | } |
248 | | } |
249 | | |
250 | | #[cfg(not(tarpaulin_include))] |
251 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Vec.html#impl-IntoIterator-2) |
252 | | impl<'a, T, O> IntoIterator for &'a mut BitVec<T, O> |
253 | | where |
254 | | O: BitOrder, |
255 | | T: 'a + BitStore, |
256 | | { |
257 | | type IntoIter = <&'a mut BitSlice<T, O> as IntoIterator>::IntoIter; |
258 | | type Item = <&'a mut BitSlice<T, O> as IntoIterator>::Item; |
259 | | |
260 | | #[inline] |
261 | 0 | fn into_iter(self) -> Self::IntoIter { |
262 | 0 | self.as_mut_bitslice().iter_mut() |
263 | 0 | } |
264 | | } |
265 | | |
266 | | #[doc = include_str!("../../doc/vec/iter/Drain.md")] |
267 | | pub struct Drain<'a, T, O> |
268 | | where |
269 | | O: BitOrder, |
270 | | T: 'a + BitStore, |
271 | | { |
272 | | /// Exclusive reference to the handle that created the drain. |
273 | | source: &'a mut BitVec<T, O>, |
274 | | /// The range of the source bit-vector’s buffer that is being drained. |
275 | | drain: BitPtrRange<Mut, T, O>, |
276 | | /// The range of the source bit-vector’s preserved back section. This runs |
277 | | /// from the first bit after the `.drain` to the first bit after the |
278 | | /// original bit-vector ends. |
279 | | tail: Range<usize>, |
280 | | } |
281 | | |
282 | | impl<'a, T, O> Drain<'a, T, O> |
283 | | where |
284 | | O: BitOrder, |
285 | | T: 'a + BitStore, |
286 | | { |
287 | | /// Produces a new drain over a region of a bit-vector. |
288 | 0 | pub(super) fn new<R>(source: &'a mut BitVec<T, O>, range: R) -> Self |
289 | 0 | where R: RangeExt<usize> { |
290 | 0 | let len = source.len(); |
291 | 0 | let region = range.normalize(None, len); |
292 | 0 | assert!( |
293 | 0 | region.end <= len, |
294 | 0 | "drains cannot extend past the length of their source bit-vector", |
295 | | ); |
296 | | |
297 | | // The `.tail` region is everything in the bit-vector after the drain. |
298 | 0 | let tail = region.end .. len; |
299 | 0 | let drain = unsafe { |
300 | 0 | // Artificially truncate the source bit-vector to before the drain |
301 | 0 | // region. This is restored in the destructor. |
302 | 0 | source.set_len_unchecked(region.start); |
303 | 0 | let base = source.as_mut_bitptr(); |
304 | 0 | BitPtrRange { |
305 | 0 | start: base.add(region.start), |
306 | 0 | end: base.add(region.end), |
307 | 0 | } |
308 | 0 | }; |
309 | 0 |
|
310 | 0 | Self { |
311 | 0 | source, |
312 | 0 | drain, |
313 | 0 | tail, |
314 | 0 | } |
315 | 0 | } |
316 | | |
317 | | /// Views the unyielded bits remaining in the drain. |
318 | | /// |
319 | | /// ## Original |
320 | | /// |
321 | | /// [`Drain::as_slice`](alloc::vec::Drain::as_slice) |
322 | | #[inline] |
323 | | #[cfg(not(tarpaulin_include))] |
324 | 0 | pub fn as_bitslice(&self) -> &'a BitSlice<T, O> { |
325 | 0 | unsafe { self.drain.clone().into_bitspan().into_bitslice_ref() } |
326 | 0 | } |
327 | | |
328 | | #[inline] |
329 | | #[cfg(not(tarpaulin_include))] |
330 | | #[deprecated = "use `.as_bitslice()` instead"] |
331 | | #[allow(missing_docs, clippy::missing_docs_in_private_items)] |
332 | 0 | pub fn as_slice(&self) -> &'a BitSlice<T, O> { |
333 | 0 | self.as_bitslice() |
334 | 0 | } |
335 | | |
336 | | /// Attempts to fill the `drain` region with the contents of another |
337 | | /// iterator. |
338 | | /// |
339 | | /// The source bit-vector is extended to include each bit that the |
340 | | /// replacement iterator provides, but is *not yet* extended to include the |
341 | | /// `tail` region, even if the replacement iterator completely fills the |
342 | | /// `drain` region. That work occurs in the destructor. |
343 | | /// |
344 | | /// This is only used by [`Splice`]. |
345 | | /// |
346 | | /// [`Splice`]: crate::vec::Splice |
347 | | #[inline] |
348 | 0 | fn fill(&mut self, iter: &mut impl Iterator<Item = bool>) -> FillStatus { |
349 | 0 | let bv = &mut *self.source; |
350 | 0 | let mut len = bv.len(); |
351 | 0 | let span = |
352 | 0 | unsafe { bv.as_mut_bitptr().add(len).range(self.tail.start - len) }; |
353 | 0 |
|
354 | 0 | let mut out = FillStatus::FullSpan; |
355 | 0 | for ptr in span { |
356 | 0 | if let Some(bit) = iter.next() { |
357 | 0 | unsafe { |
358 | 0 | ptr.write(bit); |
359 | 0 | } |
360 | 0 | len += 1; |
361 | 0 | } |
362 | | else { |
363 | 0 | out = FillStatus::EmptyInput; |
364 | 0 | break; |
365 | | } |
366 | | } |
367 | 0 | unsafe { |
368 | 0 | bv.set_len_unchecked(len); |
369 | 0 | } |
370 | 0 | out |
371 | 0 | } |
372 | | |
373 | | /// Reserves space for `additional` more bits at the end of the `drain` |
374 | | /// region by moving the `tail` region upwards in memory. |
375 | | /// |
376 | | /// This has the same effects as [`BitVec::resize`], except that the bits |
377 | | /// are inserted between `drain` and `tail` rather than at the end. |
378 | | /// |
379 | | /// This does not modify the drain iteration cursor, including its endpoint. |
380 | | /// The newly inserted bits are not available for iteration. |
381 | | /// |
382 | | /// This is only used by [`Splice`], which may insert more bits than the |
383 | | /// drain removed. |
384 | | /// |
385 | | /// [`BitVec::resize`]: crate::vec::BitVec::resize |
386 | | /// [`Splice`]: crate::vec::Splice |
387 | 0 | unsafe fn move_tail(&mut self, additional: usize) { |
388 | 0 | if additional == 0 { |
389 | 0 | return; |
390 | 0 | } |
391 | 0 |
|
392 | 0 | let bv = &mut *self.source; |
393 | 0 | let tail_len = self.tail.len(); |
394 | 0 |
|
395 | 0 | let full_len = additional + tail_len; |
396 | 0 | bv.reserve(full_len); |
397 | 0 | let new_tail_start = additional + self.tail.start; |
398 | 0 | let orig_tail = mem::replace( |
399 | 0 | &mut self.tail, |
400 | 0 | new_tail_start .. new_tail_start + tail_len, |
401 | 0 | ); |
402 | 0 | let len = bv.len(); |
403 | 0 | bv.set_len_unchecked(full_len); |
404 | 0 | bv.copy_within_unchecked(orig_tail, new_tail_start); |
405 | 0 | bv.set_len_unchecked(len); |
406 | 0 | } |
407 | | } |
408 | | |
409 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-AsRef%3C%5BT%5D%3E) |
410 | | #[cfg(not(tarpaulin_include))] |
411 | | impl<T, O> AsRef<BitSlice<T, O>> for Drain<'_, T, O> |
412 | | where |
413 | | T: BitStore, |
414 | | O: BitOrder, |
415 | | { |
416 | | #[inline] |
417 | 0 | fn as_ref(&self) -> &BitSlice<T, O> { |
418 | 0 | self.as_bitslice() |
419 | 0 | } |
420 | | } |
421 | | |
422 | | #[cfg(not(tarpaulin_include))] |
423 | | impl<T, O> Debug for Drain<'_, T, O> |
424 | | where |
425 | | T: BitStore, |
426 | | O: BitOrder, |
427 | | { |
428 | | #[inline] |
429 | 0 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { |
430 | 0 | fmt.debug_tuple("Drain").field(&self.as_bitslice()).finish() |
431 | 0 | } |
432 | | } |
433 | | |
434 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-Iterator) |
435 | | #[cfg(not(tarpaulin_include))] |
436 | | impl<T, O> Iterator for Drain<'_, T, O> |
437 | | where |
438 | | T: BitStore, |
439 | | O: BitOrder, |
440 | | { |
441 | | type Item = bool; |
442 | | |
443 | | easy_iter!(); |
444 | | |
445 | | #[inline] |
446 | 0 | fn next(&mut self) -> Option<Self::Item> { |
447 | 0 | self.drain.next().map(|bp| unsafe { bp.read() }) |
448 | 0 | } |
449 | | |
450 | | #[inline] |
451 | 0 | fn nth(&mut self, n: usize) -> Option<Self::Item> { |
452 | 0 | self.drain.nth(n).map(|bp| unsafe { bp.read() }) |
453 | 0 | } |
454 | | } |
455 | | |
456 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-DoubleEndedIterator) |
457 | | #[cfg(not(tarpaulin_include))] |
458 | | impl<T, O> DoubleEndedIterator for Drain<'_, T, O> |
459 | | where |
460 | | T: BitStore, |
461 | | O: BitOrder, |
462 | | { |
463 | | #[inline] |
464 | 0 | fn next_back(&mut self) -> Option<Self::Item> { |
465 | 0 | self.drain.next_back().map(|bp| unsafe { bp.read() }) |
466 | 0 | } |
467 | | |
468 | | #[inline] |
469 | 0 | fn nth_back(&mut self, n: usize) -> Option<Self::Item> { |
470 | 0 | self.drain.nth_back(n).map(|bp| unsafe { bp.read() }) |
471 | 0 | } |
472 | | } |
473 | | |
474 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-ExactSizeIterator) |
475 | | #[cfg(not(tarpaulin_include))] |
476 | | impl<T, O> ExactSizeIterator for Drain<'_, T, O> |
477 | | where |
478 | | T: BitStore, |
479 | | O: BitOrder, |
480 | | { |
481 | | #[inline] |
482 | 0 | fn len(&self) -> usize { |
483 | 0 | self.drain.len() |
484 | 0 | } |
485 | | } |
486 | | |
487 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-FusedIterator) |
488 | | impl<T, O> FusedIterator for Drain<'_, T, O> |
489 | | where |
490 | | T: BitStore, |
491 | | O: BitOrder, |
492 | | { |
493 | | } |
494 | | |
495 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-Send) |
496 | | // #[allow(clippy::non_send_fields_in_send_ty)] |
497 | | unsafe impl<T, O> Send for Drain<'_, T, O> |
498 | | where |
499 | | T: BitStore, |
500 | | O: BitOrder, |
501 | | for<'a> &'a mut BitSlice<T, O>: Send, |
502 | | { |
503 | | } |
504 | | |
505 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-Sync) |
506 | | unsafe impl<T, O> Sync for Drain<'_, T, O> |
507 | | where |
508 | | T: BitStore, |
509 | | O: BitOrder, |
510 | | BitSlice<T, O>: Sync, |
511 | | { |
512 | | } |
513 | | |
514 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-Drop) |
515 | | impl<T, O> Drop for Drain<'_, T, O> |
516 | | where |
517 | | T: BitStore, |
518 | | O: BitOrder, |
519 | | { |
520 | | #[inline] |
521 | 0 | fn drop(&mut self) { |
522 | 0 | let tail = mem::take(&mut self.tail); |
523 | 0 | let tail_len = tail.len(); |
524 | 0 | if tail_len == 0 { |
525 | 0 | return; |
526 | 0 | } |
527 | 0 |
|
528 | 0 | let bv = &mut *self.source; |
529 | 0 | let old_len = bv.len(); |
530 | 0 | unsafe { |
531 | 0 | bv.set_len_unchecked(tail.end); |
532 | 0 | bv.copy_within_unchecked(tail, old_len); |
533 | 0 | bv.set_len_unchecked(old_len + tail_len); |
534 | 0 | } |
535 | 0 | } |
536 | | } |
537 | | |
538 | | #[repr(u8)] |
539 | | #[doc = include_str!("../../doc/vec/iter/FillStatus.md")] |
540 | | #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] |
541 | | enum FillStatus { |
542 | | /// The drain span is completely filled. |
543 | | FullSpan = 0, |
544 | | /// The replacement source is completely exhausted. |
545 | | EmptyInput = 1, |
546 | | } |
547 | | |
548 | | #[derive(Debug)] |
549 | | #[doc = include_str!("../../doc/vec/iter/Splice.md")] |
550 | | pub struct Splice<'a, T, O, I> |
551 | | where |
552 | | O: BitOrder, |
553 | | T: 'a + BitStore, |
554 | | I: Iterator<Item = bool>, |
555 | | { |
556 | | /// The region of the bit-vector being drained. |
557 | | drain: Drain<'a, T, O>, |
558 | | /// The bitstream that replaces drained bits. |
559 | | splice: I, |
560 | | } |
561 | | |
562 | | impl<'a, T, O, I> Splice<'a, T, O, I> |
563 | | where |
564 | | O: BitOrder, |
565 | | T: 'a + BitStore, |
566 | | I: Iterator<Item = bool>, |
567 | | { |
568 | | /// Constructs a splice out of a drain and a replacement source. |
569 | 0 | pub(super) fn new( |
570 | 0 | drain: Drain<'a, T, O>, |
571 | 0 | splice: impl IntoIterator<IntoIter = I, Item = bool>, |
572 | 0 | ) -> Self { |
573 | 0 | let splice = splice.into_iter(); |
574 | 0 | Self { drain, splice } |
575 | 0 | } |
576 | | } |
577 | | |
578 | | impl<T, O, I> Iterator for Splice<'_, T, O, I> |
579 | | where |
580 | | T: BitStore, |
581 | | O: BitOrder, |
582 | | I: Iterator<Item = bool>, |
583 | | { |
584 | | type Item = bool; |
585 | | |
586 | | easy_iter!(); |
587 | | |
588 | | #[inline] |
589 | 0 | fn next(&mut self) -> Option<Self::Item> { |
590 | 0 | self.drain.next().tap_some(|_| unsafe { |
591 | 0 | if let Some(bit) = self.splice.next() { |
592 | 0 | let bv = &mut *self.drain.source; |
593 | 0 | let len = bv.len(); |
594 | 0 | bv.set_len_unchecked(len + 1); |
595 | 0 | bv.set_unchecked(len, bit); |
596 | 0 | } |
597 | 0 | }) |
598 | 0 | } |
599 | | } |
600 | | |
601 | | #[cfg(not(tarpaulin_include))] |
602 | | impl<T, O, I> DoubleEndedIterator for Splice<'_, T, O, I> |
603 | | where |
604 | | T: BitStore, |
605 | | O: BitOrder, |
606 | | I: Iterator<Item = bool>, |
607 | | { |
608 | | #[inline] |
609 | 0 | fn next_back(&mut self) -> Option<Self::Item> { |
610 | 0 | self.drain.next_back() |
611 | 0 | } |
612 | | |
613 | | #[inline] |
614 | 0 | fn nth_back(&mut self, n: usize) -> Option<Self::Item> { |
615 | 0 | self.drain.nth_back(n) |
616 | 0 | } |
617 | | } |
618 | | |
619 | | #[cfg(not(tarpaulin_include))] |
620 | | impl<T, O, I> ExactSizeIterator for Splice<'_, T, O, I> |
621 | | where |
622 | | T: BitStore, |
623 | | O: BitOrder, |
624 | | I: Iterator<Item = bool>, |
625 | | { |
626 | | #[inline] |
627 | 0 | fn len(&self) -> usize { |
628 | 0 | self.drain.len() |
629 | 0 | } |
630 | | } |
631 | | |
632 | | impl<T, O, I> FusedIterator for Splice<'_, T, O, I> |
633 | | where |
634 | | T: BitStore, |
635 | | O: BitOrder, |
636 | | I: Iterator<Item = bool>, |
637 | | { |
638 | | } |
639 | | |
640 | | /// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-Drop) |
641 | | impl<T, O, I> Drop for Splice<'_, T, O, I> |
642 | | where |
643 | | T: BitStore, |
644 | | O: BitOrder, |
645 | | I: Iterator<Item = bool>, |
646 | | { |
647 | | #[inline] |
648 | 0 | fn drop(&mut self) { |
649 | 0 | let tail = self.drain.tail.clone(); |
650 | 0 | let tail_len = tail.len(); |
651 | 0 | let bv = &mut *self.drain.source; |
652 | 0 |
|
653 | 0 | if tail_len == 0 { |
654 | 0 | bv.extend(self.splice.by_ref()); |
655 | 0 | return; |
656 | 0 | } |
657 | 0 |
|
658 | 0 | if let FillStatus::EmptyInput = self.drain.fill(&mut self.splice) { |
659 | 0 | return; |
660 | 0 | } |
661 | | |
662 | 0 | let len = match self.splice.size_hint() { |
663 | 0 | (n, None) | (_, Some(n)) => n, |
664 | 0 | }; |
665 | 0 |
|
666 | 0 | unsafe { |
667 | 0 | self.drain.move_tail(len); |
668 | 0 | } |
669 | 0 | if let FillStatus::EmptyInput = self.drain.fill(&mut self.splice) { |
670 | 0 | return; |
671 | 0 | } |
672 | 0 |
|
673 | 0 | /* If the `.splice` *still* has bits to provide, then its |
674 | 0 | * `.size_hint()` is untrustworthy. Collect the `.splice` into a |
675 | 0 | * bit-vector, then insert the bit-vector into the spliced region. |
676 | 0 | */ |
677 | 0 | let mut collected = |
678 | 0 | self.splice.by_ref().collect::<BitVec<T, O>>().into_iter(); |
679 | 0 | let len = collected.len(); |
680 | 0 | if len > 0 { |
681 | 0 | unsafe { |
682 | 0 | self.drain.move_tail(len); |
683 | 0 | } |
684 | 0 | let filled = self.drain.fill(collected.by_ref()); |
685 | 0 | debug_assert_eq!(filled, FillStatus::EmptyInput); |
686 | 0 | debug_assert_eq!(collected.len(), 0); |
687 | 0 | } |
688 | 0 | } |
689 | | } |