/rust/registry/src/index.crates.io-1949cf8c6b5b557f/bitvec-1.0.1/src/slice/ops.rs
Line | Count | Source |
1 | | #![doc = include_str!("../../doc/slice/ops.md")] |
2 | | |
3 | | use core::ops::{ |
4 | | BitAnd, |
5 | | BitAndAssign, |
6 | | BitOr, |
7 | | BitOrAssign, |
8 | | BitXor, |
9 | | BitXorAssign, |
10 | | Index, |
11 | | IndexMut, |
12 | | Not, |
13 | | Range, |
14 | | RangeFrom, |
15 | | RangeFull, |
16 | | RangeInclusive, |
17 | | RangeTo, |
18 | | RangeToInclusive, |
19 | | }; |
20 | | |
21 | | use super::{ |
22 | | BitSlice, |
23 | | BitSliceIndex, |
24 | | }; |
25 | | use crate::{ |
26 | | domain::Domain, |
27 | | order::{ |
28 | | BitOrder, |
29 | | Lsb0, |
30 | | Msb0, |
31 | | }, |
32 | | store::BitStore, |
33 | | }; |
34 | | |
35 | | impl<T1, T2, O1, O2> BitAndAssign<&BitSlice<T2, O2>> for BitSlice<T1, O1> |
36 | | where |
37 | | T1: BitStore, |
38 | | T2: BitStore, |
39 | | O1: BitOrder, |
40 | | O2: BitOrder, |
41 | | { |
42 | | #[inline] |
43 | | #[doc = include_str!("../../doc/slice/bitop_assign.md")] |
44 | 0 | fn bitand_assign(&mut self, rhs: &BitSlice<T2, O2>) { |
45 | 0 | if let (Some(this), Some(that)) = |
46 | 0 | (self.coerce_mut::<T1, Lsb0>(), rhs.coerce::<T1, Lsb0>()) |
47 | | { |
48 | 0 | return this.sp_bitop_assign(that, BitAnd::bitand, BitAnd::bitand); |
49 | 0 | } |
50 | 0 | if let (Some(this), Some(that)) = |
51 | 0 | (self.coerce_mut::<T1, Msb0>(), rhs.coerce::<T1, Msb0>()) |
52 | | { |
53 | 0 | return this.sp_bitop_assign(that, BitAnd::bitand, BitAnd::bitand); |
54 | 0 | } |
55 | 0 | for (this, that) in self.as_mut_bitptr_range().zip(rhs.as_bitptr_range()) |
56 | | { |
57 | 0 | unsafe { |
58 | 0 | this.write(this.read() & that.read()); |
59 | 0 | } |
60 | | } |
61 | 0 | if let Some(rem) = self.get_mut(rhs.len() ..) { |
62 | 0 | rem.fill(false); |
63 | 0 | } |
64 | 0 | } |
65 | | } |
66 | | |
67 | | impl<T1, T2, O1, O2> BitOrAssign<&BitSlice<T2, O2>> for BitSlice<T1, O1> |
68 | | where |
69 | | T1: BitStore, |
70 | | T2: BitStore, |
71 | | O1: BitOrder, |
72 | | O2: BitOrder, |
73 | | { |
74 | | #[inline] |
75 | | #[doc = include_str!("../../doc/slice/bitop_assign.md")] |
76 | 0 | fn bitor_assign(&mut self, rhs: &BitSlice<T2, O2>) { |
77 | 0 | if let (Some(this), Some(that)) = |
78 | 0 | (self.coerce_mut::<T1, Lsb0>(), rhs.coerce::<T1, Lsb0>()) |
79 | | { |
80 | 0 | return this.sp_bitop_assign(that, BitOr::bitor, BitOr::bitor); |
81 | 0 | } |
82 | 0 | if let (Some(this), Some(that)) = |
83 | 0 | (self.coerce_mut::<T1, Msb0>(), rhs.coerce::<T1, Msb0>()) |
84 | | { |
85 | 0 | return this.sp_bitop_assign(that, BitOr::bitor, BitOr::bitor); |
86 | 0 | } |
87 | 0 | for (this, that) in self.as_mut_bitptr_range().zip(rhs.as_bitptr_range()) |
88 | | { |
89 | 0 | unsafe { |
90 | 0 | this.write(this.read() | that.read()); |
91 | 0 | } |
92 | | } |
93 | 0 | } |
94 | | } |
95 | | |
96 | | impl<T1, T2, O1, O2> BitXorAssign<&BitSlice<T2, O2>> for BitSlice<T1, O1> |
97 | | where |
98 | | T1: BitStore, |
99 | | T2: BitStore, |
100 | | O1: BitOrder, |
101 | | O2: BitOrder, |
102 | | { |
103 | | #[inline] |
104 | | #[doc = include_str!("../../doc/slice/bitop_assign.md")] |
105 | 0 | fn bitxor_assign(&mut self, rhs: &BitSlice<T2, O2>) { |
106 | 0 | if let (Some(this), Some(that)) = |
107 | 0 | (self.coerce_mut::<T1, Lsb0>(), rhs.coerce::<T1, Lsb0>()) |
108 | | { |
109 | 0 | return this.sp_bitop_assign(that, BitXor::bitxor, BitXor::bitxor); |
110 | 0 | } |
111 | 0 | if let (Some(this), Some(that)) = |
112 | 0 | (self.coerce_mut::<T1, Msb0>(), rhs.coerce::<T1, Msb0>()) |
113 | | { |
114 | 0 | return this.sp_bitop_assign(that, BitXor::bitxor, BitXor::bitxor); |
115 | 0 | } |
116 | 0 | for (this, that) in self.as_mut_bitptr_range().zip(rhs.as_bitptr_range()) |
117 | | { |
118 | 0 | unsafe { |
119 | 0 | this.write(this.read() ^ that.read()); |
120 | 0 | } |
121 | | } |
122 | 0 | } |
123 | | } |
124 | | |
125 | | impl<T, O> Index<usize> for BitSlice<T, O> |
126 | | where |
127 | | T: BitStore, |
128 | | O: BitOrder, |
129 | | { |
130 | | type Output = bool; |
131 | | |
132 | | /// Looks up a single bit by its semantic index. |
133 | | /// |
134 | | /// ## Examples |
135 | | /// |
136 | | /// ```rust |
137 | | /// use bitvec::prelude::*; |
138 | | /// |
139 | | /// let bits = bits![u8, Msb0; 0, 1, 0]; |
140 | | /// assert!(!bits[0]); // -----^ | | |
141 | | /// assert!( bits[1]); // --------^ | |
142 | | /// assert!(!bits[2]); // -----------^ |
143 | | /// ``` |
144 | | /// |
145 | | /// If the index is greater than or equal to the length, indexing will |
146 | | /// panic. |
147 | | /// |
148 | | /// The below test will panic when accessing index 1, as only index 0 is |
149 | | /// valid. |
150 | | /// |
151 | | /// ```rust,should_panic |
152 | | /// use bitvec::prelude::*; |
153 | | /// |
154 | | /// let bits = bits![0, ]; |
155 | | /// bits[1]; // --------^ |
156 | | /// ``` |
157 | | #[inline] |
158 | 38.5k | fn index(&self, index: usize) -> &Self::Output { |
159 | 38.5k | match *index.index(self) { |
160 | 1.22k | true => &true, |
161 | 37.3k | false => &false, |
162 | | } |
163 | 38.5k | } <bitvec::slice::BitSlice<u8, bitvec::order::Msb0> as core::ops::index::Index<usize>>::index Line | Count | Source | 158 | 38.5k | fn index(&self, index: usize) -> &Self::Output { | 159 | 38.5k | match *index.index(self) { | 160 | 1.22k | true => &true, | 161 | 37.3k | false => &false, | 162 | | } | 163 | 38.5k | } |
Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::Index<usize>>::index |
164 | | } |
165 | | |
166 | | /// Implements `Index` and `IndexMut` with the given type. |
167 | | macro_rules! index { |
168 | | ($($t:ty),+ $(,)?) => { $( |
169 | | impl<T, O> Index<$t> for BitSlice<T, O> |
170 | | where |
171 | | O: BitOrder, |
172 | | T: BitStore, |
173 | | { |
174 | | type Output = Self; |
175 | | |
176 | | #[inline] |
177 | | #[track_caller] |
178 | 120k | fn index(&self, index: $t) -> &Self::Output { |
179 | 120k | index.index(self) |
180 | 120k | } <bitvec::slice::BitSlice<u8, bitvec::order::Msb0> as core::ops::index::Index<core::ops::range::Range<usize>>>::index Line | Count | Source | 178 | 8.15k | fn index(&self, index: $t) -> &Self::Output { | 179 | 8.15k | index.index(self) | 180 | 8.15k | } |
<bitvec::slice::BitSlice<u8, bitvec::order::Msb0> as core::ops::index::Index<core::ops::range::RangeFrom<usize>>>::index Line | Count | Source | 178 | 108k | fn index(&self, index: $t) -> &Self::Output { | 179 | 108k | index.index(self) | 180 | 108k | } |
<bitvec::slice::BitSlice<u8, bitvec::order::Msb0> as core::ops::index::Index<core::ops::range::RangeTo<usize>>>::index Line | Count | Source | 178 | 4.17k | fn index(&self, index: $t) -> &Self::Output { | 179 | 4.17k | index.index(self) | 180 | 4.17k | } |
Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::Index<core::ops::range::Range<usize>>>::index Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::Index<core::ops::range::RangeFrom<usize>>>::index Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::Index<core::ops::range::RangeFull>>::index Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::Index<core::ops::range::RangeInclusive<usize>>>::index Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::Index<core::ops::range::RangeTo<usize>>>::index Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::Index<core::ops::range::RangeToInclusive<usize>>>::index |
181 | | } |
182 | | |
183 | | impl<T, O> IndexMut<$t> for BitSlice<T, O> |
184 | | where |
185 | | O: BitOrder, |
186 | | T: BitStore, |
187 | | { |
188 | | #[inline] |
189 | | #[track_caller] |
190 | 84.9k | fn index_mut(&mut self, index: $t) -> &mut Self::Output { |
191 | 84.9k | index.index_mut(self) |
192 | 84.9k | } <bitvec::slice::BitSlice<u8, bitvec::order::Msb0> as core::ops::index::IndexMut<core::ops::range::Range<usize>>>::index_mut Line | Count | Source | 190 | 41.9k | fn index_mut(&mut self, index: $t) -> &mut Self::Output { | 191 | 41.9k | index.index_mut(self) | 192 | 41.9k | } |
<bitvec::slice::BitSlice<u8, bitvec::order::Msb0> as core::ops::index::IndexMut<core::ops::range::RangeFrom<usize>>>::index_mut Line | Count | Source | 190 | 43.0k | fn index_mut(&mut self, index: $t) -> &mut Self::Output { | 191 | 43.0k | index.index_mut(self) | 192 | 43.0k | } |
Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::IndexMut<core::ops::range::Range<usize>>>::index_mut Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::IndexMut<core::ops::range::RangeFrom<usize>>>::index_mut Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::IndexMut<core::ops::range::RangeFull>>::index_mut Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::IndexMut<core::ops::range::RangeInclusive<usize>>>::index_mut Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::IndexMut<core::ops::range::RangeTo<usize>>>::index_mut Unexecuted instantiation: <bitvec::slice::BitSlice<_, _> as core::ops::index::IndexMut<core::ops::range::RangeToInclusive<usize>>>::index_mut |
193 | | } |
194 | | )+ }; |
195 | | } |
196 | | |
197 | | index! { |
198 | | Range<usize>, |
199 | | RangeFrom<usize>, |
200 | | RangeFull, |
201 | | RangeInclusive<usize>, |
202 | | RangeTo<usize>, |
203 | | RangeToInclusive<usize>, |
204 | | } |
205 | | |
206 | | /** Inverts each bit in the bit-slice. |
207 | | |
208 | | Unlike the `&`, `|`, and `^` operators, this implementation is guaranteed to |
209 | | update each memory element only once, and is not required to traverse every live |
210 | | bit in the underlying region. |
211 | | **/ |
212 | | impl<'a, T, O> Not for &'a mut BitSlice<T, O> |
213 | | where |
214 | | T: BitStore, |
215 | | O: BitOrder, |
216 | | { |
217 | | type Output = Self; |
218 | | |
219 | | #[inline] |
220 | 0 | fn not(self) -> Self::Output { |
221 | 0 | match self.domain_mut() { |
222 | 0 | Domain::Enclave(mut elem) => { |
223 | 0 | elem.invert(); |
224 | 0 | }, |
225 | 0 | Domain::Region { head, body, tail } => { |
226 | 0 | if let Some(mut elem) = head { |
227 | 0 | elem.invert(); |
228 | 0 | } |
229 | 0 | for elem in body { |
230 | 0 | elem.store_value(!elem.load_value()); |
231 | 0 | } |
232 | 0 | if let Some(mut elem) = tail { |
233 | 0 | elem.invert(); |
234 | 0 | } |
235 | | }, |
236 | | } |
237 | 0 | self |
238 | 0 | } |
239 | | } |