/rust/registry/src/index.crates.io-1949cf8c6b5b557f/bendy-0.3.3/src/decoding/decoder.rs
Line | Count | Source |
1 | | use alloc::format; |
2 | | use core::str; |
3 | | |
4 | | use crate::{ |
5 | | decoding::{Error, Object}, |
6 | | state_tracker::{StateTracker, StructureError, Token}, |
7 | | }; |
8 | | |
9 | | /// A bencode decoder |
10 | | /// |
11 | | /// This can be used to either get a stream of tokens (using the [`Decoder::tokens()`] method) or to |
12 | | /// read a complete object at a time (using the [`Decoder::next_object()`]) method. |
13 | | #[derive(Debug)] |
14 | | pub struct Decoder<'a> { |
15 | | source: &'a [u8], |
16 | | offset: usize, |
17 | | state: StateTracker<&'a [u8], Error>, |
18 | | } |
19 | | |
20 | | impl<'ser> Decoder<'ser> { |
21 | | /// Create a new decoder from the given byte array |
22 | 243k | pub fn new(buffer: &'ser [u8]) -> Self { |
23 | 243k | Decoder { |
24 | 243k | source: buffer, |
25 | 243k | offset: 0, |
26 | 243k | state: StateTracker::new(), |
27 | 243k | } |
28 | 243k | } <bendy::decoding::decoder::Decoder>::new Line | Count | Source | 22 | 139k | pub fn new(buffer: &'ser [u8]) -> Self { | 23 | 139k | Decoder { | 24 | 139k | source: buffer, | 25 | 139k | offset: 0, | 26 | 139k | state: StateTracker::new(), | 27 | 139k | } | 28 | 139k | } |
<bendy::decoding::decoder::Decoder>::new Line | Count | Source | 22 | 104k | pub fn new(buffer: &'ser [u8]) -> Self { | 23 | 104k | Decoder { | 24 | 104k | source: buffer, | 25 | 104k | offset: 0, | 26 | 104k | state: StateTracker::new(), | 27 | 104k | } | 28 | 104k | } |
|
29 | | |
30 | | /// Set the maximum nesting depth of the decoder. An unlimited-depth decoder may be |
31 | | /// created using `with_max_depth(<usize>::max_value())`, but be warned that this will likely |
32 | | /// exhaust memory if the nesting depth is too deep (even when reading raw tokens) |
33 | 243k | pub fn with_max_depth(mut self, new_max_depth: usize) -> Self { |
34 | 243k | self.state.set_max_depth(new_max_depth); |
35 | 243k | self |
36 | 243k | } <bendy::decoding::decoder::Decoder>::with_max_depth Line | Count | Source | 33 | 139k | pub fn with_max_depth(mut self, new_max_depth: usize) -> Self { | 34 | 139k | self.state.set_max_depth(new_max_depth); | 35 | 139k | self | 36 | 139k | } |
<bendy::decoding::decoder::Decoder>::with_max_depth Line | Count | Source | 33 | 104k | pub fn with_max_depth(mut self, new_max_depth: usize) -> Self { | 34 | 104k | self.state.set_max_depth(new_max_depth); | 35 | 104k | self | 36 | 104k | } |
|
37 | | |
38 | 9.21M | fn take_byte(&mut self) -> Option<u8> { |
39 | 9.21M | if self.offset < self.source.len() { |
40 | 9.21M | let ret = Some(self.source[self.offset]); |
41 | 9.21M | self.offset += 1; |
42 | 9.21M | ret |
43 | | } else { |
44 | 0 | None |
45 | | } |
46 | 9.21M | } <bendy::decoding::decoder::Decoder>::take_byte Line | Count | Source | 38 | 4.05M | fn take_byte(&mut self) -> Option<u8> { | 39 | 4.05M | if self.offset < self.source.len() { | 40 | 4.05M | let ret = Some(self.source[self.offset]); | 41 | 4.05M | self.offset += 1; | 42 | 4.05M | ret | 43 | | } else { | 44 | 0 | None | 45 | | } | 46 | 4.05M | } |
<bendy::decoding::decoder::Decoder>::take_byte Line | Count | Source | 38 | 5.15M | fn take_byte(&mut self) -> Option<u8> { | 39 | 5.15M | if self.offset < self.source.len() { | 40 | 5.15M | let ret = Some(self.source[self.offset]); | 41 | 5.15M | self.offset += 1; | 42 | 5.15M | ret | 43 | | } else { | 44 | 0 | None | 45 | | } | 46 | 5.15M | } |
|
47 | | |
48 | 4.08M | fn take_chunk(&mut self, count: usize) -> Option<&'ser [u8]> { |
49 | 4.08M | match self.offset.checked_add(count) { |
50 | 4.08M | Some(end_pos) if end_pos <= self.source.len() => { |
51 | 4.07M | let ret = &self.source[self.offset..end_pos]; |
52 | 4.07M | self.offset = end_pos; |
53 | 4.07M | Some(ret) |
54 | | }, |
55 | 6.48k | _ => None, |
56 | | } |
57 | 4.08M | } <bendy::decoding::decoder::Decoder>::take_chunk Line | Count | Source | 48 | 1.97M | fn take_chunk(&mut self, count: usize) -> Option<&'ser [u8]> { | 49 | 1.97M | match self.offset.checked_add(count) { | 50 | 1.97M | Some(end_pos) if end_pos <= self.source.len() => { | 51 | 1.96M | let ret = &self.source[self.offset..end_pos]; | 52 | 1.96M | self.offset = end_pos; | 53 | 1.96M | Some(ret) | 54 | | }, | 55 | 5.42k | _ => None, | 56 | | } | 57 | 1.97M | } |
<bendy::decoding::decoder::Decoder>::take_chunk Line | Count | Source | 48 | 2.11M | fn take_chunk(&mut self, count: usize) -> Option<&'ser [u8]> { | 49 | 2.11M | match self.offset.checked_add(count) { | 50 | 2.11M | Some(end_pos) if end_pos <= self.source.len() => { | 51 | 2.11M | let ret = &self.source[self.offset..end_pos]; | 52 | 2.11M | self.offset = end_pos; | 53 | 2.11M | Some(ret) | 54 | | }, | 55 | 1.06k | _ => None, | 56 | | } | 57 | 2.11M | } |
|
58 | | |
59 | 4.31M | fn take_int(&mut self, expected_terminator: char) -> Result<&'ser str, StructureError> { |
60 | | enum State { |
61 | | Start, |
62 | | Sign, |
63 | | Zero, |
64 | | Digits, |
65 | | } |
66 | | |
67 | 4.31M | let mut curpos = self.offset; |
68 | 4.31M | let mut state = State::Start; |
69 | | |
70 | 4.31M | let mut success = false; |
71 | 39.8M | while curpos < self.source.len() { |
72 | 39.8M | let c = self.source[curpos] as char; |
73 | 39.8M | match state { |
74 | | State::Start => { |
75 | 4.31M | if c == '-' { |
76 | 647 | state = State::Sign; |
77 | 4.31M | } else if c == '0' { |
78 | 339k | state = State::Zero; |
79 | 3.97M | } else if c >= '1' && c <= '9' { |
80 | 3.97M | state = State::Digits; |
81 | 3.97M | } else { |
82 | 138 | return Err(StructureError::unexpected("'-' or '0'..'9'", c, curpos)); |
83 | | } |
84 | | }, |
85 | | State::Zero => { |
86 | 339k | if c == expected_terminator { |
87 | 338k | success = true; |
88 | 338k | break; |
89 | | } else { |
90 | 690 | return Err(StructureError::unexpected( |
91 | 690 | &format!("{:?}", expected_terminator), |
92 | 690 | c, |
93 | 690 | curpos, |
94 | 690 | )); |
95 | | } |
96 | | }, |
97 | | State::Sign => { |
98 | 635 | if c >= '1' && c <= '9' { |
99 | 621 | state = State::Digits; |
100 | 621 | } else { |
101 | 14 | return Err(StructureError::unexpected("'1'..'9'", c, curpos)); |
102 | | } |
103 | | }, |
104 | | State::Digits => { |
105 | 35.2M | if c >= '0' && c <= '9' { |
106 | 31.2M | // do nothing, this is ok |
107 | 31.2M | } else if c == expected_terminator { |
108 | 3.97M | success = true; |
109 | 3.97M | break; |
110 | | } else { |
111 | 5.37k | return Err(StructureError::unexpected( |
112 | 5.37k | &format!("{:?} or '0'..'9'", expected_terminator), |
113 | 5.37k | c, |
114 | 5.37k | curpos, |
115 | 5.37k | )); |
116 | | } |
117 | | }, |
118 | | } |
119 | 35.5M | curpos += 1; |
120 | | } |
121 | | |
122 | 4.31M | if !success { |
123 | 553 | return Err(StructureError::UnexpectedEof); |
124 | 4.30M | } |
125 | | |
126 | 4.30M | let slice = &self.source[self.offset..curpos]; |
127 | 4.30M | self.offset = curpos + 1; |
128 | 4.30M | let ival = if cfg!(debug) { |
129 | 0 | str::from_utf8(slice).expect("We've already examined every byte in the string") |
130 | | } else { |
131 | | // Avoid a second UTF-8 check here |
132 | 4.30M | unsafe { str::from_utf8_unchecked(slice) } |
133 | | }; |
134 | | |
135 | 4.30M | Ok(ival) |
136 | 4.31M | } <bendy::decoding::decoder::Decoder>::take_int Line | Count | Source | 59 | 2.11M | fn take_int(&mut self, expected_terminator: char) -> Result<&'ser str, StructureError> { | 60 | | enum State { | 61 | | Start, | 62 | | Sign, | 63 | | Zero, | 64 | | Digits, | 65 | | } | 66 | | | 67 | 2.11M | let mut curpos = self.offset; | 68 | 2.11M | let mut state = State::Start; | 69 | | | 70 | 2.11M | let mut success = false; | 71 | 20.5M | while curpos < self.source.len() { | 72 | 20.5M | let c = self.source[curpos] as char; | 73 | 20.5M | match state { | 74 | | State::Start => { | 75 | 2.11M | if c == '-' { | 76 | 247 | state = State::Sign; | 77 | 2.11M | } else if c == '0' { | 78 | 175k | state = State::Zero; | 79 | 1.94M | } else if c >= '1' && c <= '9' { | 80 | 1.94M | state = State::Digits; | 81 | 1.94M | } else { | 82 | 91 | return Err(StructureError::unexpected("'-' or '0'..'9'", c, curpos)); | 83 | | } | 84 | | }, | 85 | | State::Zero => { | 86 | 175k | if c == expected_terminator { | 87 | 174k | success = true; | 88 | 174k | break; | 89 | | } else { | 90 | 488 | return Err(StructureError::unexpected( | 91 | 488 | &format!("{:?}", expected_terminator), | 92 | 488 | c, | 93 | 488 | curpos, | 94 | 488 | )); | 95 | | } | 96 | | }, | 97 | | State::Sign => { | 98 | 241 | if c >= '1' && c <= '9' { | 99 | 233 | state = State::Digits; | 100 | 233 | } else { | 101 | 8 | return Err(StructureError::unexpected("'1'..'9'", c, curpos)); | 102 | | } | 103 | | }, | 104 | | State::Digits => { | 105 | 18.2M | if c >= '0' && c <= '9' { | 106 | 16.2M | // do nothing, this is ok | 107 | 16.2M | } else if c == expected_terminator { | 108 | 1.93M | success = true; | 109 | 1.93M | break; | 110 | | } else { | 111 | 4.70k | return Err(StructureError::unexpected( | 112 | 4.70k | &format!("{:?} or '0'..'9'", expected_terminator), | 113 | 4.70k | c, | 114 | 4.70k | curpos, | 115 | 4.70k | )); | 116 | | } | 117 | | }, | 118 | | } | 119 | 18.4M | curpos += 1; | 120 | | } | 121 | | | 122 | 2.11M | if !success { | 123 | 370 | return Err(StructureError::UnexpectedEof); | 124 | 2.11M | } | 125 | | | 126 | 2.11M | let slice = &self.source[self.offset..curpos]; | 127 | 2.11M | self.offset = curpos + 1; | 128 | 2.11M | let ival = if cfg!(debug) { | 129 | 0 | str::from_utf8(slice).expect("We've already examined every byte in the string") | 130 | | } else { | 131 | | // Avoid a second UTF-8 check here | 132 | 2.11M | unsafe { str::from_utf8_unchecked(slice) } | 133 | | }; | 134 | | | 135 | 2.11M | Ok(ival) | 136 | 2.11M | } |
<bendy::decoding::decoder::Decoder>::take_int Line | Count | Source | 59 | 2.19M | fn take_int(&mut self, expected_terminator: char) -> Result<&'ser str, StructureError> { | 60 | | enum State { | 61 | | Start, | 62 | | Sign, | 63 | | Zero, | 64 | | Digits, | 65 | | } | 66 | | | 67 | 2.19M | let mut curpos = self.offset; | 68 | 2.19M | let mut state = State::Start; | 69 | | | 70 | 2.19M | let mut success = false; | 71 | 19.3M | while curpos < self.source.len() { | 72 | 19.3M | let c = self.source[curpos] as char; | 73 | 19.3M | match state { | 74 | | State::Start => { | 75 | 2.19M | if c == '-' { | 76 | 400 | state = State::Sign; | 77 | 2.19M | } else if c == '0' { | 78 | 164k | state = State::Zero; | 79 | 2.03M | } else if c >= '1' && c <= '9' { | 80 | 2.03M | state = State::Digits; | 81 | 2.03M | } else { | 82 | 47 | return Err(StructureError::unexpected("'-' or '0'..'9'", c, curpos)); | 83 | | } | 84 | | }, | 85 | | State::Zero => { | 86 | 164k | if c == expected_terminator { | 87 | 163k | success = true; | 88 | 163k | break; | 89 | | } else { | 90 | 202 | return Err(StructureError::unexpected( | 91 | 202 | &format!("{:?}", expected_terminator), | 92 | 202 | c, | 93 | 202 | curpos, | 94 | 202 | )); | 95 | | } | 96 | | }, | 97 | | State::Sign => { | 98 | 394 | if c >= '1' && c <= '9' { | 99 | 388 | state = State::Digits; | 100 | 388 | } else { | 101 | 6 | return Err(StructureError::unexpected("'1'..'9'", c, curpos)); | 102 | | } | 103 | | }, | 104 | | State::Digits => { | 105 | 16.9M | if c >= '0' && c <= '9' { | 106 | 14.9M | // do nothing, this is ok | 107 | 14.9M | } else if c == expected_terminator { | 108 | 2.03M | success = true; | 109 | 2.03M | break; | 110 | | } else { | 111 | 671 | return Err(StructureError::unexpected( | 112 | 671 | &format!("{:?} or '0'..'9'", expected_terminator), | 113 | 671 | c, | 114 | 671 | curpos, | 115 | 671 | )); | 116 | | } | 117 | | }, | 118 | | } | 119 | 17.1M | curpos += 1; | 120 | | } | 121 | | | 122 | 2.19M | if !success { | 123 | 183 | return Err(StructureError::UnexpectedEof); | 124 | 2.19M | } | 125 | | | 126 | 2.19M | let slice = &self.source[self.offset..curpos]; | 127 | 2.19M | self.offset = curpos + 1; | 128 | 2.19M | let ival = if cfg!(debug) { | 129 | 0 | str::from_utf8(slice).expect("We've already examined every byte in the string") | 130 | | } else { | 131 | | // Avoid a second UTF-8 check here | 132 | 2.19M | unsafe { str::from_utf8_unchecked(slice) } | 133 | | }; | 134 | | | 135 | 2.19M | Ok(ival) | 136 | 2.19M | } |
|
137 | | |
138 | 9.21M | fn raw_next_token(&mut self) -> Result<Token<'ser>, Error> { |
139 | 9.21M | let token = match self.take_byte().ok_or(StructureError::UnexpectedEof)? as char { |
140 | 2.39M | 'e' => Token::End, |
141 | 1.35M | 'l' => Token::List, |
142 | 1.12M | 'd' => Token::Dict, |
143 | 226k | 'i' => Token::Num(self.take_int('e')?), |
144 | 4.10M | c if c >= '0' && c <= '9' => { |
145 | 4.09M | self.offset -= 1; |
146 | | |
147 | 4.09M | let curpos = self.offset; |
148 | 4.09M | let ival = self.take_int(':')?; |
149 | 4.08M | let len = usize::from_str_radix(ival, 10).map_err(|_| { |
150 | 91 | StructureError::SyntaxError(format!("Invalid integer at offset {}", curpos)) |
151 | 91 | })?; <bendy::decoding::decoder::Decoder>::raw_next_token::{closure#0}Line | Count | Source | 149 | 41 | let len = usize::from_str_radix(ival, 10).map_err(|_| { | 150 | 41 | StructureError::SyntaxError(format!("Invalid integer at offset {}", curpos)) | 151 | 41 | })?; |
<bendy::decoding::decoder::Decoder>::raw_next_token::{closure#0}Line | Count | Source | 149 | 50 | let len = usize::from_str_radix(ival, 10).map_err(|_| { | 150 | 50 | StructureError::SyntaxError(format!("Invalid integer at offset {}", curpos)) | 151 | 50 | })?; |
|
152 | 4.08M | Token::String(self.take_chunk(len).ok_or(StructureError::UnexpectedEof)?) |
153 | | }, |
154 | 17.5k | tok => { |
155 | 17.5k | return Err(Error::from(StructureError::SyntaxError(format!( |
156 | 17.5k | "Invalid token starting with {:?} at offset {}", |
157 | 17.5k | tok, |
158 | 17.5k | self.offset - 1 |
159 | 17.5k | )))); |
160 | | }, |
161 | | }; |
162 | | |
163 | 9.18M | Ok(token) |
164 | 9.21M | } <bendy::decoding::decoder::Decoder>::raw_next_token Line | Count | Source | 138 | 4.05M | fn raw_next_token(&mut self) -> Result<Token<'ser>, Error> { | 139 | 4.05M | let token = match self.take_byte().ok_or(StructureError::UnexpectedEof)? as char { | 140 | 929k | 'e' => Token::End, | 141 | 622k | 'l' => Token::List, | 142 | 370k | 'd' => Token::Dict, | 143 | 142k | 'i' => Token::Num(self.take_int('e')?), | 144 | 1.99M | c if c >= '0' && c <= '9' => { | 145 | 1.97M | self.offset -= 1; | 146 | | | 147 | 1.97M | let curpos = self.offset; | 148 | 1.97M | let ival = self.take_int(':')?; | 149 | 1.97M | let len = usize::from_str_radix(ival, 10).map_err(|_| { | 150 | | StructureError::SyntaxError(format!("Invalid integer at offset {}", curpos)) | 151 | 41 | })?; | 152 | 1.97M | Token::String(self.take_chunk(len).ok_or(StructureError::UnexpectedEof)?) | 153 | | }, | 154 | 15.0k | tok => { | 155 | 15.0k | return Err(Error::from(StructureError::SyntaxError(format!( | 156 | 15.0k | "Invalid token starting with {:?} at offset {}", | 157 | 15.0k | tok, | 158 | 15.0k | self.offset - 1 | 159 | 15.0k | )))); | 160 | | }, | 161 | | }; | 162 | | | 163 | 4.03M | Ok(token) | 164 | 4.05M | } |
<bendy::decoding::decoder::Decoder>::raw_next_token Line | Count | Source | 138 | 5.15M | fn raw_next_token(&mut self) -> Result<Token<'ser>, Error> { | 139 | 5.15M | let token = match self.take_byte().ok_or(StructureError::UnexpectedEof)? as char { | 140 | 1.46M | 'e' => Token::End, | 141 | 735k | 'l' => Token::List, | 142 | 749k | 'd' => Token::Dict, | 143 | 83.4k | 'i' => Token::Num(self.take_int('e')?), | 144 | 2.11M | c if c >= '0' && c <= '9' => { | 145 | 2.11M | self.offset -= 1; | 146 | | | 147 | 2.11M | let curpos = self.offset; | 148 | 2.11M | let ival = self.take_int(':')?; | 149 | 2.11M | let len = usize::from_str_radix(ival, 10).map_err(|_| { | 150 | | StructureError::SyntaxError(format!("Invalid integer at offset {}", curpos)) | 151 | 50 | })?; | 152 | 2.11M | Token::String(self.take_chunk(len).ok_or(StructureError::UnexpectedEof)?) | 153 | | }, | 154 | 2.43k | tok => { | 155 | 2.43k | return Err(Error::from(StructureError::SyntaxError(format!( | 156 | 2.43k | "Invalid token starting with {:?} at offset {}", | 157 | 2.43k | tok, | 158 | 2.43k | self.offset - 1 | 159 | 2.43k | )))); | 160 | | }, | 161 | | }; | 162 | | | 163 | 5.14M | Ok(token) | 164 | 5.15M | } |
|
165 | | |
166 | | /// Read the next token. Returns Ok(Some(token)) if a token was successfully read, |
167 | 9.28M | fn next_token(&mut self) -> Result<Option<Token<'ser>>, Error> { |
168 | 9.28M | self.state.check_error()?; |
169 | | |
170 | 9.21M | if self.offset == self.source.len() { |
171 | 3.76k | self.state.observe_eof()?; |
172 | 0 | return Ok(None); |
173 | 9.21M | } |
174 | | |
175 | 9.21M | let tok_result = self.raw_next_token(); |
176 | 9.21M | let tok = self.state.latch_err(tok_result)?; |
177 | | |
178 | 9.18M | self.state.observe_token(&tok)?; |
179 | 9.17M | Ok(Some(tok)) |
180 | 9.28M | } <bendy::decoding::decoder::Decoder>::next_token Line | Count | Source | 167 | 4.12M | fn next_token(&mut self) -> Result<Option<Token<'ser>>, Error> { | 168 | 4.12M | self.state.check_error()?; | 169 | | | 170 | 4.06M | if self.offset == self.source.len() { | 171 | 1.97k | self.state.observe_eof()?; | 172 | 0 | return Ok(None); | 173 | 4.05M | } | 174 | | | 175 | 4.05M | let tok_result = self.raw_next_token(); | 176 | 4.05M | let tok = self.state.latch_err(tok_result)?; | 177 | | | 178 | 4.03M | self.state.observe_token(&tok)?; | 179 | 4.02M | Ok(Some(tok)) | 180 | 4.12M | } |
<bendy::decoding::decoder::Decoder>::next_token Line | Count | Source | 167 | 5.16M | fn next_token(&mut self) -> Result<Option<Token<'ser>>, Error> { | 168 | 5.16M | self.state.check_error()?; | 169 | | | 170 | 5.15M | if self.offset == self.source.len() { | 171 | 1.78k | self.state.observe_eof()?; | 172 | 0 | return Ok(None); | 173 | 5.15M | } | 174 | | | 175 | 5.15M | let tok_result = self.raw_next_token(); | 176 | 5.15M | let tok = self.state.latch_err(tok_result)?; | 177 | | | 178 | 5.14M | self.state.observe_token(&tok)?; | 179 | 5.14M | Ok(Some(tok)) | 180 | 5.16M | } |
|
181 | | |
182 | | /// Iterate over the tokens in the input stream. This guarantees that the resulting stream |
183 | | /// of tokens constitutes a valid bencoded structure. |
184 | 0 | pub fn tokens(self) -> Tokens<'ser> { |
185 | 0 | Tokens(self) |
186 | 0 | } Unexecuted instantiation: <bendy::decoding::decoder::Decoder>::tokens Unexecuted instantiation: <bendy::decoding::decoder::Decoder>::tokens |
187 | | } |
188 | | |
189 | | /// Iterator over the tokens in the input stream. This guarantees that the resulting stream |
190 | | /// of tokens constitutes a valid bencoded structure. |
191 | | pub struct Tokens<'a>(Decoder<'a>); |
192 | | |
193 | | impl<'a> Iterator for Tokens<'a> { |
194 | | type Item = Result<Token<'a>, Error>; |
195 | | |
196 | 0 | fn next(&mut self) -> Option<Self::Item> { |
197 | | // Only report an error once |
198 | 0 | if self.0.state.check_error().is_err() { |
199 | 0 | return None; |
200 | 0 | } |
201 | 0 | match self.0.next_token() { |
202 | 0 | Ok(Some(token)) => Some(Ok(token)), |
203 | 0 | Ok(None) => None, |
204 | 0 | Err(err) => Some(Err(err)), |
205 | | } |
206 | 0 | } Unexecuted instantiation: <bendy::decoding::decoder::Tokens as core::iter::traits::iterator::Iterator>::next Unexecuted instantiation: <bendy::decoding::decoder::Tokens as core::iter::traits::iterator::Iterator>::next |
207 | | } |
208 | | |
209 | | // High level interface |
210 | | |
211 | | impl<'ser> Decoder<'ser> { |
212 | | /// Read the next object from the encoded stream |
213 | | /// |
214 | | /// If the beginning of an object was successfully read, returns `Ok(Some(object))`. |
215 | | /// At the end of the input stream, this will return `Ok(None)`; otherwise, returns |
216 | | /// `Err(some_error)`. |
217 | | /// |
218 | | /// Note that complex objects (lists and dicts) are not fully validated before being |
219 | | /// returned from this method, so you may still get an error while decoding the contents |
220 | | /// of the object |
221 | 9.28M | pub fn next_object<'obj>(&'obj mut self) -> Result<Option<Object<'obj, 'ser>>, Error> { |
222 | | use self::Token::*; |
223 | 9.28M | Ok(match self.next_token()? { |
224 | 2.39M | None | Some(End) => None, |
225 | 1.35M | Some(List) => Some(Object::List(ListDecoder::new(self))), |
226 | 1.11M | Some(Dict) => Some(Object::Dict(DictDecoder::new(self))), |
227 | 4.07M | Some(String(s)) => Some(Object::Bytes(s)), |
228 | 225k | Some(Num(s)) => Some(Object::Integer(s)), |
229 | | }) |
230 | 9.28M | } <bendy::decoding::decoder::Decoder>::next_object Line | Count | Source | 221 | 4.12M | pub fn next_object<'obj>(&'obj mut self) -> Result<Option<Object<'obj, 'ser>>, Error> { | 222 | | use self::Token::*; | 223 | 4.12M | Ok(match self.next_token()? { | 224 | 929k | None | Some(End) => None, | 225 | 621k | Some(List) => Some(Object::List(ListDecoder::new(self))), | 226 | 370k | Some(Dict) => Some(Object::Dict(DictDecoder::new(self))), | 227 | 1.96M | Some(String(s)) => Some(Object::Bytes(s)), | 228 | 142k | Some(Num(s)) => Some(Object::Integer(s)), | 229 | | }) | 230 | 4.12M | } |
<bendy::decoding::decoder::Decoder>::next_object Line | Count | Source | 221 | 5.16M | pub fn next_object<'obj>(&'obj mut self) -> Result<Option<Object<'obj, 'ser>>, Error> { | 222 | | use self::Token::*; | 223 | 5.16M | Ok(match self.next_token()? { | 224 | 1.46M | None | Some(End) => None, | 225 | 735k | Some(List) => Some(Object::List(ListDecoder::new(self))), | 226 | 749k | Some(Dict) => Some(Object::Dict(DictDecoder::new(self))), | 227 | 2.10M | Some(String(s)) => Some(Object::Bytes(s)), | 228 | 83.3k | Some(Num(s)) => Some(Object::Integer(s)), | 229 | | }) | 230 | 5.16M | } |
|
231 | | } |
232 | | |
233 | | /// A dictionary read from the input stream |
234 | | #[derive(Debug)] |
235 | | pub struct DictDecoder<'obj, 'ser: 'obj> { |
236 | | decoder: &'obj mut Decoder<'ser>, |
237 | | finished: bool, |
238 | | start_point: usize, |
239 | | } |
240 | | |
241 | | /// A list read from the input stream |
242 | | #[derive(Debug)] |
243 | | pub struct ListDecoder<'obj, 'ser: 'obj> { |
244 | | decoder: &'obj mut Decoder<'ser>, |
245 | | finished: bool, |
246 | | start_point: usize, |
247 | | } |
248 | | |
249 | | impl<'obj, 'ser: 'obj> DictDecoder<'obj, 'ser> { |
250 | 1.11M | fn new(decoder: &'obj mut Decoder<'ser>) -> Self { |
251 | 1.11M | let offset = decoder.offset - 1; |
252 | 1.11M | DictDecoder { |
253 | 1.11M | decoder, |
254 | 1.11M | finished: false, |
255 | 1.11M | start_point: offset, |
256 | 1.11M | } |
257 | 1.11M | } <bendy::decoding::decoder::DictDecoder>::new Line | Count | Source | 250 | 370k | fn new(decoder: &'obj mut Decoder<'ser>) -> Self { | 251 | 370k | let offset = decoder.offset - 1; | 252 | 370k | DictDecoder { | 253 | 370k | decoder, | 254 | 370k | finished: false, | 255 | 370k | start_point: offset, | 256 | 370k | } | 257 | 370k | } |
<bendy::decoding::decoder::DictDecoder>::new Line | Count | Source | 250 | 749k | fn new(decoder: &'obj mut Decoder<'ser>) -> Self { | 251 | 749k | let offset = decoder.offset - 1; | 252 | 749k | DictDecoder { | 253 | 749k | decoder, | 254 | 749k | finished: false, | 255 | 749k | start_point: offset, | 256 | 749k | } | 257 | 749k | } |
|
258 | | |
259 | | /// Parse the next key/value pair from the dictionary. Returns `Ok(None)` |
260 | | /// at the end of the dictionary |
261 | 2.42M | pub fn next_pair<'item>( |
262 | 2.42M | &'item mut self, |
263 | 2.42M | ) -> Result<Option<(&'ser [u8], Object<'item, 'ser>)>, Error> { |
264 | 2.42M | if self.finished { |
265 | 276k | return Ok(None); |
266 | 2.15M | } |
267 | | |
268 | | // We convert to a token to release the mut ref to decoder |
269 | 2.15M | let key = self.decoder.next_object()?.map(Object::into_token); |
270 | | |
271 | 1.00M | if let Some(Token::String(k)) = key { |
272 | | // This unwrap should be safe because None would produce an error here |
273 | 1.00M | let v = self.decoder.next_object()?.unwrap(); |
274 | 994k | Ok(Some((k, v))) |
275 | | } else { |
276 | | // We can't have gotten anything but a string, as anything else would be |
277 | | // a state error |
278 | 1.04M | self.finished = true; |
279 | 1.04M | Ok(None) |
280 | | } |
281 | 2.42M | } <bendy::decoding::decoder::DictDecoder>::next_pair Line | Count | Source | 261 | 1.07M | pub fn next_pair<'item>( | 262 | 1.07M | &'item mut self, | 263 | 1.07M | ) -> Result<Option<(&'ser [u8], Object<'item, 'ser>)>, Error> { | 264 | 1.07M | if self.finished { | 265 | 151k | return Ok(None); | 266 | 924k | } | 267 | | | 268 | | // We convert to a token to release the mut ref to decoder | 269 | 924k | let key = self.decoder.next_object()?.map(Object::into_token); | 270 | | | 271 | 527k | if let Some(Token::String(k)) = key { | 272 | | // This unwrap should be safe because None would produce an error here | 273 | 527k | let v = self.decoder.next_object()?.unwrap(); | 274 | 522k | Ok(Some((k, v))) | 275 | | } else { | 276 | | // We can't have gotten anything but a string, as anything else would be | 277 | | // a state error | 278 | 312k | self.finished = true; | 279 | 312k | Ok(None) | 280 | | } | 281 | 1.07M | } |
<bendy::decoding::decoder::DictDecoder>::next_pair Line | Count | Source | 261 | 1.35M | pub fn next_pair<'item>( | 262 | 1.35M | &'item mut self, | 263 | 1.35M | ) -> Result<Option<(&'ser [u8], Object<'item, 'ser>)>, Error> { | 264 | 1.35M | if self.finished { | 265 | 125k | return Ok(None); | 266 | 1.22M | } | 267 | | | 268 | | // We convert to a token to release the mut ref to decoder | 269 | 1.22M | let key = self.decoder.next_object()?.map(Object::into_token); | 270 | | | 271 | 473k | if let Some(Token::String(k)) = key { | 272 | | // This unwrap should be safe because None would produce an error here | 273 | 473k | let v = self.decoder.next_object()?.unwrap(); | 274 | 472k | Ok(Some((k, v))) | 275 | | } else { | 276 | | // We can't have gotten anything but a string, as anything else would be | 277 | | // a state error | 278 | 735k | self.finished = true; | 279 | 735k | Ok(None) | 280 | | } | 281 | 1.35M | } |
|
282 | | |
283 | | /// Consume (and validate the structure of) the rest of the items from the |
284 | | /// dictionary. This method should be used to check for encoding errors if |
285 | | /// [`DictDecoder::next_pair`] is not called until it returns `Ok(None)`. |
286 | 1.11M | pub fn consume_all(&mut self) -> Result<(), Error> { |
287 | 1.19M | while self.next_pair()?.is_some() { |
288 | 78.2k | // just drop the items |
289 | 78.2k | } |
290 | 1.04M | Ok(()) |
291 | 1.11M | } <bendy::decoding::decoder::DictDecoder>::consume_all Line | Count | Source | 286 | 370k | pub fn consume_all(&mut self) -> Result<(), Error> { | 287 | 374k | while self.next_pair()?.is_some() { | 288 | 3.42k | // just drop the items | 289 | 3.42k | } | 290 | 312k | Ok(()) | 291 | 370k | } |
<bendy::decoding::decoder::DictDecoder>::consume_all Line | Count | Source | 286 | 749k | pub fn consume_all(&mut self) -> Result<(), Error> { | 287 | 823k | while self.next_pair()?.is_some() { | 288 | 74.7k | // just drop the items | 289 | 74.7k | } | 290 | 735k | Ok(()) | 291 | 749k | } |
|
292 | | |
293 | | /// Get the raw bytes that made up this dictionary |
294 | 0 | pub fn into_raw(mut self) -> Result<&'ser [u8], Error> { |
295 | 0 | self.consume_all()?; |
296 | 0 | Ok(&self.decoder.source[self.start_point..self.decoder.offset]) |
297 | 0 | } Unexecuted instantiation: <bendy::decoding::decoder::DictDecoder>::into_raw Unexecuted instantiation: <bendy::decoding::decoder::DictDecoder>::into_raw |
298 | | } |
299 | | |
300 | | impl<'obj, 'ser: 'obj> Drop for DictDecoder<'obj, 'ser> { |
301 | 1.11M | fn drop(&mut self) { |
302 | | // we don't care about errors in drop; they'll be reported again in the parent |
303 | 1.11M | self.consume_all().ok(); |
304 | 1.11M | } <bendy::decoding::decoder::DictDecoder as core::ops::drop::Drop>::drop Line | Count | Source | 301 | 370k | fn drop(&mut self) { | 302 | | // we don't care about errors in drop; they'll be reported again in the parent | 303 | 370k | self.consume_all().ok(); | 304 | 370k | } |
<bendy::decoding::decoder::DictDecoder as core::ops::drop::Drop>::drop Line | Count | Source | 301 | 749k | fn drop(&mut self) { | 302 | | // we don't care about errors in drop; they'll be reported again in the parent | 303 | 749k | self.consume_all().ok(); | 304 | 749k | } |
|
305 | | } |
306 | | |
307 | | impl<'obj, 'ser: 'obj> ListDecoder<'obj, 'ser> { |
308 | 1.35M | fn new(decoder: &'obj mut Decoder<'ser>) -> Self { |
309 | 1.35M | let offset = decoder.offset - 1; |
310 | 1.35M | ListDecoder { |
311 | 1.35M | decoder, |
312 | 1.35M | finished: false, |
313 | 1.35M | start_point: offset, |
314 | 1.35M | } |
315 | 1.35M | } <bendy::decoding::decoder::ListDecoder>::new Line | Count | Source | 308 | 621k | fn new(decoder: &'obj mut Decoder<'ser>) -> Self { | 309 | 621k | let offset = decoder.offset - 1; | 310 | 621k | ListDecoder { | 311 | 621k | decoder, | 312 | 621k | finished: false, | 313 | 621k | start_point: offset, | 314 | 621k | } | 315 | 621k | } |
<bendy::decoding::decoder::ListDecoder>::new Line | Count | Source | 308 | 735k | fn new(decoder: &'obj mut Decoder<'ser>) -> Self { | 309 | 735k | let offset = decoder.offset - 1; | 310 | 735k | ListDecoder { | 311 | 735k | decoder, | 312 | 735k | finished: false, | 313 | 735k | start_point: offset, | 314 | 735k | } | 315 | 735k | } |
|
316 | | |
317 | | /// Get the next item from the list. Returns `Ok(None)` at the end of the list |
318 | 6.05M | pub fn next_object<'item>(&'item mut self) -> Result<Option<Object<'item, 'ser>>, Error> { |
319 | 6.05M | if self.finished { |
320 | 159k | return Ok(None); |
321 | 5.89M | } |
322 | | |
323 | 5.89M | let item = self.decoder.next_object()?; |
324 | 5.88M | if item.is_none() { |
325 | 1.35M | self.finished = true; |
326 | 4.53M | } |
327 | | |
328 | 5.88M | Ok(item) |
329 | 6.05M | } <bendy::decoding::decoder::ListDecoder>::next_object Line | Count | Source | 318 | 2.60M | pub fn next_object<'item>(&'item mut self) -> Result<Option<Object<'item, 'ser>>, Error> { | 319 | 2.60M | if self.finished { | 320 | 79.6k | return Ok(None); | 321 | 2.52M | } | 322 | | | 323 | 2.52M | let item = self.decoder.next_object()?; | 324 | 2.52M | if item.is_none() { | 325 | 617k | self.finished = true; | 326 | 1.90M | } | 327 | | | 328 | 2.52M | Ok(item) | 329 | 2.60M | } |
<bendy::decoding::decoder::ListDecoder>::next_object Line | Count | Source | 318 | 3.44M | pub fn next_object<'item>(&'item mut self) -> Result<Option<Object<'item, 'ser>>, Error> { | 319 | 3.44M | if self.finished { | 320 | 79.4k | return Ok(None); | 321 | 3.36M | } | 322 | | | 323 | 3.36M | let item = self.decoder.next_object()?; | 324 | 3.36M | if item.is_none() { | 325 | 733k | self.finished = true; | 326 | 2.62M | } | 327 | | | 328 | 3.36M | Ok(item) | 329 | 3.44M | } |
|
330 | | |
331 | | /// Consume (and validate the structure of) the rest of the items from the |
332 | | /// list. This method should be used to check for encoding errors if |
333 | | /// [`ListDecoder::next_object`] is not called until it returns [`Ok(())`]. |
334 | | /// |
335 | | /// [`Ok(())`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok |
336 | 1.35M | pub fn consume_all(&mut self) -> Result<(), Error> { |
337 | 2.23M | while self.next_object()?.is_some() { |
338 | 881k | // just drop the items |
339 | 881k | } |
340 | 1.35M | Ok(()) |
341 | 1.35M | } <bendy::decoding::decoder::ListDecoder>::consume_all Line | Count | Source | 336 | 621k | pub fn consume_all(&mut self) -> Result<(), Error> { | 337 | 812k | while self.next_object()?.is_some() { | 338 | 191k | // just drop the items | 339 | 191k | } | 340 | 617k | Ok(()) | 341 | 621k | } |
<bendy::decoding::decoder::ListDecoder>::consume_all Line | Count | Source | 336 | 735k | pub fn consume_all(&mut self) -> Result<(), Error> { | 337 | 1.42M | while self.next_object()?.is_some() { | 338 | 690k | // just drop the items | 339 | 690k | } | 340 | 733k | Ok(()) | 341 | 735k | } |
|
342 | | |
343 | | /// Get the raw bytes that made up this list |
344 | 0 | pub fn into_raw(mut self) -> Result<&'ser [u8], Error> { |
345 | 0 | self.consume_all()?; |
346 | 0 | Ok(&self.decoder.source[self.start_point..self.decoder.offset]) |
347 | 0 | } Unexecuted instantiation: <bendy::decoding::decoder::ListDecoder>::into_raw Unexecuted instantiation: <bendy::decoding::decoder::ListDecoder>::into_raw |
348 | | } |
349 | | |
350 | | impl<'obj, 'ser: 'obj> Drop for ListDecoder<'obj, 'ser> { |
351 | 1.35M | fn drop(&mut self) { |
352 | | // we don't care about errors in drop; they'll be reported again in the parent |
353 | 1.35M | self.consume_all().ok(); |
354 | 1.35M | } <bendy::decoding::decoder::ListDecoder as core::ops::drop::Drop>::drop Line | Count | Source | 351 | 621k | fn drop(&mut self) { | 352 | | // we don't care about errors in drop; they'll be reported again in the parent | 353 | 621k | self.consume_all().ok(); | 354 | 621k | } |
<bendy::decoding::decoder::ListDecoder as core::ops::drop::Drop>::drop Line | Count | Source | 351 | 735k | fn drop(&mut self) { | 352 | | // we don't care about errors in drop; they'll be reported again in the parent | 353 | 735k | self.consume_all().ok(); | 354 | 735k | } |
|
355 | | } |
356 | | |
357 | | #[cfg(test)] |
358 | | mod test { |
359 | | |
360 | | #[cfg(not(feature = "std"))] |
361 | | use alloc::{vec, vec::Vec}; |
362 | | use core::iter; |
363 | | |
364 | | use regex; |
365 | | |
366 | | use super::*; |
367 | | |
368 | | static SIMPLE_MSG: &'static [u8] = b"d3:bari1e3:fooli2ei3eee"; |
369 | | |
370 | | fn decode_tokens(msg: &[u8]) -> Vec<Token> { |
371 | | let tokens: Vec<Result<Token, Error>> = Decoder::new(msg).tokens().collect(); |
372 | | if tokens.iter().all(Result::is_ok) { |
373 | | tokens.into_iter().map(Result::unwrap).collect() |
374 | | } else { |
375 | | panic!( |
376 | | "Unexpected tokenization error. Received tokens: {:?}", |
377 | | tokens |
378 | | ); |
379 | | } |
380 | | } |
381 | | |
382 | | fn decode_err(msg: &[u8], err_regex: &str) { |
383 | | let mut tokens: Vec<Result<Token, Error>> = Decoder::new(msg).tokens().collect(); |
384 | | if tokens.iter().all(Result::is_ok) { |
385 | | panic!("Unexpected parse success: {:?}", tokens); |
386 | | } else { |
387 | | let err = format!("{}", tokens.pop().unwrap().err().unwrap()); |
388 | | let err_regex = regex::Regex::new(err_regex).expect("Test regexes should be valid"); |
389 | | if !err_regex.is_match(&err) { |
390 | | panic!("Unexpected error: {}", err); |
391 | | } |
392 | | } |
393 | | } |
394 | | |
395 | | #[test] |
396 | | fn simple_bdecode_tokenization() { |
397 | | use self::Token::*; |
398 | | let tokens: Vec<_> = decode_tokens(SIMPLE_MSG); |
399 | | assert_eq!( |
400 | | tokens, |
401 | | vec![ |
402 | | Dict, |
403 | | String(&b"bar"[..]), |
404 | | Num(&"1"[..]), |
405 | | String(&b"foo"[..]), |
406 | | List, |
407 | | Num(&"2"[..]), |
408 | | Num(&"3"[..]), |
409 | | End, |
410 | | End, |
411 | | ] |
412 | | ); |
413 | | } |
414 | | |
415 | | #[test] |
416 | | fn short_dict_should_fail() { |
417 | | decode_err(b"d", r"EOF"); |
418 | | } |
419 | | |
420 | | #[test] |
421 | | fn short_list_should_fail() { |
422 | | decode_err(b"l", r"EOF"); |
423 | | } |
424 | | |
425 | | #[test] |
426 | | fn short_int_should_fail() { |
427 | | decode_err(b"i12", r"EOF"); |
428 | | } |
429 | | |
430 | | #[test] |
431 | | fn negative_numbers_and_zero_should_parse() { |
432 | | use self::Token::*; |
433 | | let tokens: Vec<_> = decode_tokens(b"i0ei-1e"); |
434 | | assert_eq!(tokens, vec![Num(&"0"), Num(&"-1")],); |
435 | | } |
436 | | |
437 | | #[test] |
438 | | fn negative_zero_is_illegal() { |
439 | | decode_err(b"i-0e", "got '0'"); |
440 | | } |
441 | | |
442 | | #[test] |
443 | | fn leading_zeros_are_illegal() { |
444 | | decode_err(b"i01e", "got '1'"); |
445 | | decode_err(b"i-01e", "got '0'"); |
446 | | } |
447 | | |
448 | | #[test] |
449 | | fn map_keys_must_be_strings() { |
450 | | decode_err(b"d3:fooi1ei2ei3ee", r"Map keys must be strings"); |
451 | | } |
452 | | |
453 | | #[test] |
454 | | fn map_keys_must_ascend() { |
455 | | decode_err(b"d3:fooi1e3:bari1ee", r"Keys were not sorted"); |
456 | | } |
457 | | |
458 | | #[test] |
459 | | fn map_keys_must_be_unique() { |
460 | | decode_err(b"d3:fooi1e3:fooi1ee", r"Keys were not sorted"); |
461 | | } |
462 | | |
463 | | #[test] |
464 | | fn map_keys_must_have_values() { |
465 | | decode_err(b"d3:fooe", r"Missing map value"); |
466 | | } |
467 | | |
468 | | #[test] |
469 | | fn strings_must_have_bodies() { |
470 | | decode_err(b"3:", r"EOF"); |
471 | | } |
472 | | |
473 | | #[test] |
474 | | fn ints_must_have_bodies() { |
475 | | decode_err(b"ie", r"Expected.*got 'e'"); |
476 | | } |
477 | | |
478 | | #[test] |
479 | | fn recursion_should_be_limited() { |
480 | | let mut msg = Vec::new(); |
481 | | msg.extend(iter::repeat(b'l').take(4096)); |
482 | | msg.extend(iter::repeat(b'e').take(4096)); |
483 | | decode_err(&msg, r"nesting depth"); |
484 | | } |
485 | | |
486 | | #[test] |
487 | | fn recursion_bounds_should_be_tight() { |
488 | | let test_msg = b"lllleeee"; |
489 | | assert!(Decoder::new(test_msg) |
490 | | .with_max_depth(4) |
491 | | .tokens() |
492 | | .last() |
493 | | .unwrap() |
494 | | .is_ok()); |
495 | | assert!(Decoder::new(test_msg) |
496 | | .with_max_depth(3) |
497 | | .tokens() |
498 | | .last() |
499 | | .unwrap() |
500 | | .is_err()); |
501 | | } |
502 | | |
503 | | #[test] |
504 | | fn dict_drop_should_consume_struct() { |
505 | | let mut decoder = Decoder::new(b"d3:fooi1e3:quxi2eei1000e"); |
506 | | drop(decoder.next_object()); |
507 | | |
508 | | let token = decoder.tokens().next().unwrap().unwrap(); |
509 | | assert_eq!(token, Token::Num("1000")); |
510 | | } |
511 | | |
512 | | #[test] |
513 | | fn list_drop_should_consume_struct() { |
514 | | let mut decoder = Decoder::new(b"li1ei2ei3eei1000e"); |
515 | | drop(decoder.next_object()); |
516 | | |
517 | | let token = decoder.tokens().next().unwrap().unwrap(); |
518 | | assert_eq!(token, Token::Num("1000")); |
519 | | } |
520 | | |
521 | | #[test] |
522 | | fn bytes_or_should_work_on_bytes() { |
523 | | assert_eq!( |
524 | | Ok(&b"foo"[..]), |
525 | | Object::Bytes(b"foo").bytes_or(Err("failure")) |
526 | | ); |
527 | | } |
528 | | |
529 | | #[test] |
530 | | fn bytes_or_should_not_work_on_other_types() { |
531 | | assert_eq!( |
532 | | Err("failure"), |
533 | | Object::Integer("123").bytes_or(Err("failure")) |
534 | | ); |
535 | | |
536 | | let mut list_decoder = Decoder::new(b"le"); |
537 | | assert_eq!( |
538 | | Err("failure"), |
539 | | list_decoder |
540 | | .next_object() |
541 | | .unwrap() |
542 | | .unwrap() |
543 | | .bytes_or(Err("failure")) |
544 | | ); |
545 | | let mut dict_decoder = Decoder::new(b"de"); |
546 | | assert_eq!( |
547 | | Err("failure"), |
548 | | dict_decoder |
549 | | .next_object() |
550 | | .unwrap() |
551 | | .unwrap() |
552 | | .bytes_or(Err("failure")) |
553 | | ); |
554 | | } |
555 | | |
556 | | #[test] |
557 | | fn bytes_or_else_should_work_on_bytes() { |
558 | | assert_eq!( |
559 | | Ok(&b"foo"[..]), |
560 | | Object::Bytes(b"foo").bytes_or_else(|_| Err("failure")) |
561 | | ); |
562 | | } |
563 | | |
564 | | #[test] |
565 | | fn bytes_or_else_should_not_work_on_other_types() { |
566 | | assert_eq!( |
567 | | Err("failure"), |
568 | | Object::Integer("123").bytes_or_else(|_| Err("failure")) |
569 | | ); |
570 | | let mut list_decoder = Decoder::new(b"le"); |
571 | | assert_eq!( |
572 | | Err("failure"), |
573 | | list_decoder |
574 | | .next_object() |
575 | | .unwrap() |
576 | | .unwrap() |
577 | | .bytes_or_else(|_| Err("failure")) |
578 | | ); |
579 | | let mut dict_decoder = Decoder::new(b"de"); |
580 | | assert_eq!( |
581 | | Err("failure"), |
582 | | dict_decoder |
583 | | .next_object() |
584 | | .unwrap() |
585 | | .unwrap() |
586 | | .bytes_or_else(|_| Err("failure")) |
587 | | ); |
588 | | } |
589 | | |
590 | | #[test] |
591 | | fn integer_str_or_should_work_on_int() { |
592 | | assert_eq!( |
593 | | Ok(&"123"[..]), |
594 | | Object::Integer("123").integer_or(Err("failure")) |
595 | | ); |
596 | | } |
597 | | |
598 | | #[test] |
599 | | fn integer_str_or_should_not_work_on_other_types() { |
600 | | assert_eq!( |
601 | | Err("failure"), |
602 | | Object::Bytes(b"foo").integer_or(Err("failure")) |
603 | | ); |
604 | | let mut list_decoder = Decoder::new(b"le"); |
605 | | assert_eq!( |
606 | | Err("failure"), |
607 | | list_decoder |
608 | | .next_object() |
609 | | .unwrap() |
610 | | .unwrap() |
611 | | .integer_or(Err("failure")) |
612 | | ); |
613 | | let mut dict_decoder = Decoder::new(b"de"); |
614 | | assert_eq!( |
615 | | Err("failure"), |
616 | | dict_decoder |
617 | | .next_object() |
618 | | .unwrap() |
619 | | .unwrap() |
620 | | .integer_or(Err("failure")) |
621 | | ); |
622 | | } |
623 | | |
624 | | #[test] |
625 | | fn integer_str_or_else_should_work_on_int() { |
626 | | assert_eq!( |
627 | | Ok(&"123"[..]), |
628 | | Object::Integer("123").integer_or_else(|_| Err("failure")) |
629 | | ); |
630 | | } |
631 | | |
632 | | #[test] |
633 | | fn integer_str_or_else_should_not_work_on_other_types() { |
634 | | assert_eq!( |
635 | | Err("failure"), |
636 | | Object::Bytes(b"foo").integer_or_else(|_| Err("failure")) |
637 | | ); |
638 | | let mut list_decoder = Decoder::new(b"le"); |
639 | | assert_eq!( |
640 | | Err("failure"), |
641 | | list_decoder |
642 | | .next_object() |
643 | | .unwrap() |
644 | | .unwrap() |
645 | | .integer_or_else(|_| Err("failure")) |
646 | | ); |
647 | | let mut dict_decoder = Decoder::new(b"de"); |
648 | | assert_eq!( |
649 | | Err("failure"), |
650 | | dict_decoder |
651 | | .next_object() |
652 | | .unwrap() |
653 | | .unwrap() |
654 | | .integer_or_else(|_| Err("failure")) |
655 | | ); |
656 | | } |
657 | | |
658 | | #[test] |
659 | | fn list_or_should_work_on_list() { |
660 | | let mut list_decoder = Decoder::new(b"le"); |
661 | | assert!(list_decoder |
662 | | .next_object() |
663 | | .unwrap() |
664 | | .unwrap() |
665 | | .list_or(Err("failure")) |
666 | | .is_ok()); |
667 | | } |
668 | | #[test] |
669 | | fn list_or_should_not_work_on_other_types() { |
670 | | assert_eq!( |
671 | | "failure", |
672 | | Object::Bytes(b"foo").list_or(Err("failure")).unwrap_err() |
673 | | ); |
674 | | assert_eq!( |
675 | | "failure", |
676 | | Object::Integer("foo").list_or(Err("failure")).unwrap_err() |
677 | | ); |
678 | | |
679 | | let mut dict_decoder = Decoder::new(b"de"); |
680 | | assert_eq!( |
681 | | "failure", |
682 | | dict_decoder |
683 | | .next_object() |
684 | | .unwrap() |
685 | | .unwrap() |
686 | | .list_or(Err("failure")) |
687 | | .unwrap_err() |
688 | | ); |
689 | | } |
690 | | |
691 | | #[test] |
692 | | fn list_or_else_should_work_on_list() { |
693 | | let mut list_decoder = Decoder::new(b"le"); |
694 | | assert!(list_decoder |
695 | | .next_object() |
696 | | .unwrap() |
697 | | .unwrap() |
698 | | .list_or_else(|_| Err("failure")) |
699 | | .is_ok()); |
700 | | } |
701 | | #[test] |
702 | | fn list_or_else_should_not_work_on_other_types() { |
703 | | assert_eq!( |
704 | | "failure", |
705 | | Object::Bytes(b"foo") |
706 | | .list_or_else(|_| Err("failure")) |
707 | | .unwrap_err() |
708 | | ); |
709 | | assert_eq!( |
710 | | "failure", |
711 | | Object::Integer("foo") |
712 | | .list_or_else(|_| Err("failure")) |
713 | | .unwrap_err() |
714 | | ); |
715 | | |
716 | | let mut dict_decoder = Decoder::new(b"de"); |
717 | | assert_eq!( |
718 | | "failure", |
719 | | dict_decoder |
720 | | .next_object() |
721 | | .unwrap() |
722 | | .unwrap() |
723 | | .list_or_else(|_| Err("failure")) |
724 | | .unwrap_err() |
725 | | ); |
726 | | } |
727 | | |
728 | | #[test] |
729 | | fn dictionary_or_should_work_on_dict() { |
730 | | let mut dict_decoder = Decoder::new(b"de"); |
731 | | assert!(dict_decoder |
732 | | .next_object() |
733 | | .unwrap() |
734 | | .unwrap() |
735 | | .dictionary_or(Err("failure")) |
736 | | .is_ok()); |
737 | | } |
738 | | |
739 | | #[test] |
740 | | fn dictionary_or_should_not_work_on_other_types() { |
741 | | assert_eq!( |
742 | | "failure", |
743 | | Object::Bytes(b"foo") |
744 | | .dictionary_or(Err("failure")) |
745 | | .unwrap_err() |
746 | | ); |
747 | | assert_eq!( |
748 | | "failure", |
749 | | Object::Integer("foo") |
750 | | .dictionary_or(Err("failure")) |
751 | | .unwrap_err() |
752 | | ); |
753 | | |
754 | | let mut list_decoder = Decoder::new(b"le"); |
755 | | assert_eq!( |
756 | | "failure", |
757 | | list_decoder |
758 | | .next_object() |
759 | | .unwrap() |
760 | | .unwrap() |
761 | | .dictionary_or(Err("failure")) |
762 | | .unwrap_err() |
763 | | ); |
764 | | } |
765 | | |
766 | | #[test] |
767 | | fn dictionary_or_else_should_work_on_dict() { |
768 | | let mut dict_decoder = Decoder::new(b"de"); |
769 | | assert!(dict_decoder |
770 | | .next_object() |
771 | | .unwrap() |
772 | | .unwrap() |
773 | | .dictionary_or_else(|_| Err("failure")) |
774 | | .is_ok()); |
775 | | } |
776 | | |
777 | | #[test] |
778 | | fn dictionary_or_else_should_not_work_on_other_types() { |
779 | | assert_eq!( |
780 | | "failure", |
781 | | Object::Bytes(b"foo") |
782 | | .dictionary_or_else(|_| Err("failure")) |
783 | | .unwrap_err() |
784 | | ); |
785 | | assert_eq!( |
786 | | "failure", |
787 | | Object::Integer("foo") |
788 | | .dictionary_or_else(|_| Err("failure")) |
789 | | .unwrap_err() |
790 | | ); |
791 | | |
792 | | let mut list_decoder = Decoder::new(b"le"); |
793 | | assert_eq!( |
794 | | "failure", |
795 | | list_decoder |
796 | | .next_object() |
797 | | .unwrap() |
798 | | .unwrap() |
799 | | .dictionary_or_else(|_| Err("failure")) |
800 | | .unwrap_err() |
801 | | ); |
802 | | } |
803 | | } |