/rust/registry/src/index.crates.io-1949cf8c6b5b557f/image-webp-0.2.4/src/decoder.rs
Line | Count | Source |
1 | | use byteorder_lite::{LittleEndian, ReadBytesExt}; |
2 | | use quick_error::quick_error; |
3 | | |
4 | | use std::collections::HashMap; |
5 | | use std::io::{self, BufRead, Cursor, Read, Seek}; |
6 | | use std::num::NonZeroU16; |
7 | | use std::ops::Range; |
8 | | |
9 | | use crate::extended::{self, get_alpha_predictor, read_alpha_chunk, WebPExtendedInfo}; |
10 | | |
11 | | use super::lossless::LosslessDecoder; |
12 | | use super::vp8::Vp8Decoder; |
13 | | |
14 | | quick_error! { |
15 | | /// Errors that can occur when attempting to decode a WebP image |
16 | | #[derive(Debug)] |
17 | | #[non_exhaustive] |
18 | | pub enum DecodingError { |
19 | | /// An IO error occurred while reading the file |
20 | | IoError(err: io::Error) { |
21 | | from() |
22 | | display("IO Error: {}", err) |
23 | | source(err) |
24 | | } |
25 | | |
26 | | /// RIFF's "RIFF" signature not found or invalid |
27 | | RiffSignatureInvalid(err: [u8; 4]) { |
28 | | display("Invalid RIFF signature: {err:x?}") |
29 | | } |
30 | | |
31 | | /// WebP's "WEBP" signature not found or invalid |
32 | | WebpSignatureInvalid(err: [u8; 4]) { |
33 | | display("Invalid WebP signature: {err:x?}") |
34 | | } |
35 | | |
36 | | /// An expected chunk was missing |
37 | | ChunkMissing { |
38 | | display("An expected chunk was missing") |
39 | | } |
40 | | |
41 | | /// Chunk Header was incorrect or invalid in its usage |
42 | | ChunkHeaderInvalid(err: [u8; 4]) { |
43 | | display("Invalid Chunk header: {err:x?}") |
44 | | } |
45 | | |
46 | | #[allow(deprecated)] |
47 | | #[deprecated] |
48 | | /// Some bits were invalid |
49 | | ReservedBitSet { |
50 | | display("Reserved bits set") |
51 | | } |
52 | | |
53 | | /// The ALPH chunk preprocessing info flag was invalid |
54 | | InvalidAlphaPreprocessing { |
55 | | display("Alpha chunk preprocessing flag invalid") |
56 | | } |
57 | | |
58 | | /// Invalid compression method |
59 | | InvalidCompressionMethod { |
60 | | display("Invalid compression method") |
61 | | } |
62 | | |
63 | | /// Alpha chunk doesn't match the frame's size |
64 | | AlphaChunkSizeMismatch { |
65 | | display("Alpha chunk size mismatch") |
66 | | } |
67 | | |
68 | | /// Image is too large, either for the platform's pointer size or generally |
69 | | ImageTooLarge { |
70 | | display("Image too large") |
71 | | } |
72 | | |
73 | | /// Frame would go out of the canvas |
74 | | FrameOutsideImage { |
75 | | display("Frame outside image") |
76 | | } |
77 | | |
78 | | /// Signature of 0x2f not found |
79 | | LosslessSignatureInvalid(err: u8) { |
80 | | display("Invalid lossless signature: {err:x?}") |
81 | | } |
82 | | |
83 | | /// Version Number was not zero |
84 | | VersionNumberInvalid(err: u8) { |
85 | | display("Invalid lossless version number: {err}") |
86 | | } |
87 | | |
88 | | /// Invalid color cache bits |
89 | | InvalidColorCacheBits(err: u8) { |
90 | | display("Invalid color cache bits: {err}") |
91 | | } |
92 | | |
93 | | /// An invalid Huffman code was encountered |
94 | | HuffmanError { |
95 | | display("Invalid Huffman code") |
96 | | } |
97 | | |
98 | | /// The bitstream was somehow corrupt |
99 | | BitStreamError { |
100 | | display("Corrupt bitstream") |
101 | | } |
102 | | |
103 | | /// The transforms specified were invalid |
104 | | TransformError { |
105 | | display("Invalid transform") |
106 | | } |
107 | | |
108 | | /// VP8's `[0x9D, 0x01, 0x2A]` magic not found or invalid |
109 | | Vp8MagicInvalid(err: [u8; 3]) { |
110 | | display("Invalid VP8 magic: {err:x?}") |
111 | | } |
112 | | |
113 | | /// VP8 Decoder initialisation wasn't provided with enough data |
114 | | NotEnoughInitData { |
115 | | display("Not enough VP8 init data") |
116 | | } |
117 | | |
118 | | /// At time of writing, only the YUV colour-space encoded as `0` is specified |
119 | | ColorSpaceInvalid(err: u8) { |
120 | | display("Invalid VP8 color space: {err}") |
121 | | } |
122 | | |
123 | | /// LUMA prediction mode was not recognised |
124 | | LumaPredictionModeInvalid(err: i8) { |
125 | | display("Invalid VP8 luma prediction mode: {err}") |
126 | | } |
127 | | |
128 | | /// Intra-prediction mode was not recognised |
129 | | IntraPredictionModeInvalid(err: i8) { |
130 | | display("Invalid VP8 intra prediction mode: {err}") |
131 | | } |
132 | | |
133 | | /// Chroma prediction mode was not recognised |
134 | | ChromaPredictionModeInvalid(err: i8) { |
135 | | display("Invalid VP8 chroma prediction mode: {err}") |
136 | | } |
137 | | |
138 | | /// Inconsistent image sizes |
139 | | InconsistentImageSizes { |
140 | | display("Inconsistent image sizes") |
141 | | } |
142 | | |
143 | | /// The file may be valid, but this crate doesn't support decoding it. |
144 | | UnsupportedFeature(err: String) { |
145 | | display("Unsupported feature: {err}") |
146 | | } |
147 | | |
148 | | /// Invalid function call or parameter |
149 | | InvalidParameter(err: String) { |
150 | | display("Invalid parameter: {err}") |
151 | | } |
152 | | |
153 | | /// Memory limit exceeded |
154 | | MemoryLimitExceeded { |
155 | | display("Memory limit exceeded") |
156 | | } |
157 | | |
158 | | /// Invalid chunk size |
159 | | InvalidChunkSize { |
160 | | display("Invalid chunk size") |
161 | | } |
162 | | |
163 | | /// No more frames in image |
164 | | NoMoreFrames { |
165 | | display("No more frames") |
166 | | } |
167 | | } |
168 | | } |
169 | | |
170 | | /// All possible RIFF chunks in a WebP image file |
171 | | #[allow(clippy::upper_case_acronyms)] |
172 | | #[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)] |
173 | | pub(crate) enum WebPRiffChunk { |
174 | | RIFF, |
175 | | WEBP, |
176 | | VP8, |
177 | | VP8L, |
178 | | VP8X, |
179 | | ANIM, |
180 | | ANMF, |
181 | | ALPH, |
182 | | ICCP, |
183 | | EXIF, |
184 | | XMP, |
185 | | Unknown([u8; 4]), |
186 | | } |
187 | | |
188 | | impl WebPRiffChunk { |
189 | 32.9k | pub(crate) const fn from_fourcc(chunk_fourcc: [u8; 4]) -> Self { |
190 | 32.9k | match &chunk_fourcc { |
191 | 4.21k | b"RIFF" => Self::RIFF, |
192 | 4.21k | b"WEBP" => Self::WEBP, |
193 | 1.38k | b"VP8 " => Self::VP8, |
194 | 2.58k | b"VP8L" => Self::VP8L, |
195 | 1.49k | b"VP8X" => Self::VP8X, |
196 | 1.64k | b"ANIM" => Self::ANIM, |
197 | 3.45k | b"ANMF" => Self::ANMF, |
198 | 1.91k | b"ALPH" => Self::ALPH, |
199 | 1.73k | b"ICCP" => Self::ICCP, |
200 | 914 | b"EXIF" => Self::EXIF, |
201 | 339 | b"XMP " => Self::XMP, |
202 | 9.04k | _ => Self::Unknown(chunk_fourcc), |
203 | | } |
204 | 32.9k | } |
205 | | |
206 | 47 | pub(crate) const fn to_fourcc(self) -> [u8; 4] { |
207 | 47 | match self { |
208 | 0 | Self::RIFF => *b"RIFF", |
209 | 0 | Self::WEBP => *b"WEBP", |
210 | 1 | Self::VP8 => *b"VP8 ", |
211 | 1 | Self::VP8L => *b"VP8L", |
212 | 0 | Self::VP8X => *b"VP8X", |
213 | 0 | Self::ANIM => *b"ANIM", |
214 | 3 | Self::ANMF => *b"ANMF", |
215 | 4 | Self::ALPH => *b"ALPH", |
216 | 0 | Self::ICCP => *b"ICCP", |
217 | 0 | Self::EXIF => *b"EXIF", |
218 | 0 | Self::XMP => *b"XMP ", |
219 | 38 | Self::Unknown(fourcc) => fourcc, |
220 | | } |
221 | 47 | } |
222 | | |
223 | 15.8k | pub(crate) const fn is_unknown(self) -> bool { |
224 | 15.8k | matches!(self, Self::Unknown(_)) |
225 | 15.8k | } |
226 | | } |
227 | | |
228 | | // enum WebPImage { |
229 | | // Lossy(VP8Frame), |
230 | | // Lossless(LosslessFrame), |
231 | | // Extended(ExtendedImage), |
232 | | // } |
233 | | |
234 | | enum ImageKind { |
235 | | Lossy, |
236 | | Lossless, |
237 | | Extended(WebPExtendedInfo), |
238 | | } |
239 | | |
240 | | struct AnimationState { |
241 | | next_frame: u32, |
242 | | next_frame_start: u64, |
243 | | dispose_next_frame: bool, |
244 | | previous_frame_width: u32, |
245 | | previous_frame_height: u32, |
246 | | previous_frame_x_offset: u32, |
247 | | previous_frame_y_offset: u32, |
248 | | canvas: Option<Vec<u8>>, |
249 | | } |
250 | | impl Default for AnimationState { |
251 | 4.83k | fn default() -> Self { |
252 | 4.83k | Self { |
253 | 4.83k | next_frame: 0, |
254 | 4.83k | next_frame_start: 0, |
255 | 4.83k | dispose_next_frame: true, |
256 | 4.83k | previous_frame_width: 0, |
257 | 4.83k | previous_frame_height: 0, |
258 | 4.83k | previous_frame_x_offset: 0, |
259 | 4.83k | previous_frame_y_offset: 0, |
260 | 4.83k | canvas: None, |
261 | 4.83k | } |
262 | 4.83k | } |
263 | | } |
264 | | |
265 | | /// Number of times that an animation loops. |
266 | | #[derive(Copy, Clone, Debug, Eq, PartialEq)] |
267 | | pub enum LoopCount { |
268 | | /// The animation loops forever. |
269 | | Forever, |
270 | | /// Each frame of the animation is displayed the specified number of times. |
271 | | Times(NonZeroU16), |
272 | | } |
273 | | |
274 | | /// WebP decoder configuration options |
275 | | #[derive(Clone)] |
276 | | #[non_exhaustive] |
277 | | pub struct WebPDecodeOptions { |
278 | | /// The upsampling method used in conversion from lossy yuv to rgb |
279 | | /// |
280 | | /// Defaults to `Bilinear`. |
281 | | pub lossy_upsampling: UpsamplingMethod, |
282 | | } |
283 | | |
284 | | impl Default for WebPDecodeOptions { |
285 | 4.21k | fn default() -> Self { |
286 | 4.21k | Self { |
287 | 4.21k | lossy_upsampling: UpsamplingMethod::Bilinear, |
288 | 4.21k | } |
289 | 4.21k | } |
290 | | } |
291 | | |
292 | | /// Methods for upsampling the chroma values in lossy decoding |
293 | | /// |
294 | | /// The chroma red and blue planes are encoded in VP8 as half the size of the luma plane |
295 | | /// Therefore we need to upsample these values up to fit each pixel in the image. |
296 | | #[derive(Clone, Copy, Default)] |
297 | | pub enum UpsamplingMethod { |
298 | | /// Fancy upsampling |
299 | | /// |
300 | | /// Does bilinear interpolation using the 4 values nearest to the pixel, weighting based on the distance |
301 | | /// from the pixel. |
302 | | #[default] |
303 | | Bilinear, |
304 | | /// Simple upsampling, just uses the closest u/v value to the pixel when upsampling |
305 | | /// |
306 | | /// Matches the -nofancy option in dwebp. |
307 | | /// Should be faster but may lead to slightly jagged edges. |
308 | | Simple, |
309 | | } |
310 | | |
311 | | /// WebP image format decoder. |
312 | | pub struct WebPDecoder<R> { |
313 | | r: R, |
314 | | memory_limit: usize, |
315 | | |
316 | | width: u32, |
317 | | height: u32, |
318 | | |
319 | | kind: ImageKind, |
320 | | animation: AnimationState, |
321 | | |
322 | | is_lossy: bool, |
323 | | has_alpha: bool, |
324 | | num_frames: u32, |
325 | | loop_count: LoopCount, |
326 | | loop_duration: u64, |
327 | | |
328 | | chunks: HashMap<WebPRiffChunk, Range<u64>>, |
329 | | |
330 | | webp_decode_options: WebPDecodeOptions, |
331 | | } |
332 | | |
333 | | impl<R: BufRead + Seek> WebPDecoder<R> { |
334 | | /// Create a new `WebPDecoder` from the reader `r`. The decoder performs many small reads, so the |
335 | | /// reader should be buffered. |
336 | 4.21k | pub fn new(r: R) -> Result<Self, DecodingError> { |
337 | 4.21k | Self::new_with_options(r, WebPDecodeOptions::default()) |
338 | 4.21k | } <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::new Line | Count | Source | 336 | 4.21k | pub fn new(r: R) -> Result<Self, DecodingError> { | 337 | 4.21k | Self::new_with_options(r, WebPDecodeOptions::default()) | 338 | 4.21k | } |
Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::new |
339 | | |
340 | | /// Create a new `WebPDecoder` from the reader `r` with the options `WebPDecodeOptions`. The decoder |
341 | | /// performs many small reads, so the reader should be buffered. |
342 | 4.21k | pub fn new_with_options( |
343 | 4.21k | r: R, |
344 | 4.21k | webp_decode_options: WebPDecodeOptions, |
345 | 4.21k | ) -> Result<Self, DecodingError> { |
346 | 4.21k | let mut decoder = Self { |
347 | 4.21k | r, |
348 | 4.21k | width: 0, |
349 | 4.21k | height: 0, |
350 | 4.21k | num_frames: 0, |
351 | 4.21k | kind: ImageKind::Lossy, |
352 | 4.21k | chunks: HashMap::new(), |
353 | 4.21k | animation: Default::default(), |
354 | 4.21k | memory_limit: usize::MAX, |
355 | 4.21k | is_lossy: false, |
356 | 4.21k | has_alpha: false, |
357 | 4.21k | loop_count: LoopCount::Times(NonZeroU16::new(1).unwrap()), |
358 | 4.21k | loop_duration: 0, |
359 | 4.21k | webp_decode_options, |
360 | 4.21k | }; |
361 | 4.21k | decoder.read_data()?; |
362 | 4.08k | Ok(decoder) |
363 | 4.21k | } <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::new_with_options Line | Count | Source | 342 | 4.21k | pub fn new_with_options( | 343 | 4.21k | r: R, | 344 | 4.21k | webp_decode_options: WebPDecodeOptions, | 345 | 4.21k | ) -> Result<Self, DecodingError> { | 346 | 4.21k | let mut decoder = Self { | 347 | 4.21k | r, | 348 | 4.21k | width: 0, | 349 | 4.21k | height: 0, | 350 | 4.21k | num_frames: 0, | 351 | 4.21k | kind: ImageKind::Lossy, | 352 | 4.21k | chunks: HashMap::new(), | 353 | 4.21k | animation: Default::default(), | 354 | 4.21k | memory_limit: usize::MAX, | 355 | 4.21k | is_lossy: false, | 356 | 4.21k | has_alpha: false, | 357 | 4.21k | loop_count: LoopCount::Times(NonZeroU16::new(1).unwrap()), | 358 | 4.21k | loop_duration: 0, | 359 | 4.21k | webp_decode_options, | 360 | 4.21k | }; | 361 | 4.21k | decoder.read_data()?; | 362 | 4.08k | Ok(decoder) | 363 | 4.21k | } |
Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::new_with_options |
364 | | |
365 | 4.21k | fn read_data(&mut self) -> Result<(), DecodingError> { |
366 | 4.21k | let (WebPRiffChunk::RIFF, riff_size, _) = read_chunk_header(&mut self.r)? else { |
367 | 0 | return Err(DecodingError::ChunkHeaderInvalid(*b"RIFF")); |
368 | | }; |
369 | | |
370 | 4.21k | match &read_fourcc(&mut self.r)? { |
371 | 4.21k | WebPRiffChunk::WEBP => {} |
372 | 0 | fourcc => return Err(DecodingError::WebpSignatureInvalid(fourcc.to_fourcc())), |
373 | | } |
374 | | |
375 | 4.21k | let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?; |
376 | 4.21k | let start = self.r.stream_position()?; |
377 | | |
378 | 4.21k | match chunk { |
379 | | WebPRiffChunk::VP8 => { |
380 | 793 | let tag = self.r.read_u24::<LittleEndian>()?; |
381 | | |
382 | 793 | let keyframe = tag & 1 == 0; |
383 | 793 | if !keyframe { |
384 | 0 | return Err(DecodingError::UnsupportedFeature( |
385 | 0 | "Non-keyframe frames".to_owned(), |
386 | 0 | )); |
387 | 793 | } |
388 | | |
389 | 793 | let mut tag = [0u8; 3]; |
390 | 793 | self.r.read_exact(&mut tag)?; |
391 | 793 | if tag != [0x9d, 0x01, 0x2a] { |
392 | 0 | return Err(DecodingError::Vp8MagicInvalid(tag)); |
393 | 793 | } |
394 | | |
395 | 793 | let w = self.r.read_u16::<LittleEndian>()?; |
396 | 793 | let h = self.r.read_u16::<LittleEndian>()?; |
397 | | |
398 | 793 | self.width = u32::from(w & 0x3FFF); |
399 | 793 | self.height = u32::from(h & 0x3FFF); |
400 | 793 | if self.width == 0 || self.height == 0 { |
401 | 0 | return Err(DecodingError::InconsistentImageSizes); |
402 | 793 | } |
403 | | |
404 | 793 | self.chunks |
405 | 793 | .insert(WebPRiffChunk::VP8, start..start + chunk_size); |
406 | 793 | self.kind = ImageKind::Lossy; |
407 | 793 | self.is_lossy = true; |
408 | | } |
409 | | WebPRiffChunk::VP8L => { |
410 | 2.29k | let signature = self.r.read_u8()?; |
411 | 2.29k | if signature != 0x2f { |
412 | 0 | return Err(DecodingError::LosslessSignatureInvalid(signature)); |
413 | 2.29k | } |
414 | | |
415 | 2.29k | let header = self.r.read_u32::<LittleEndian>()?; |
416 | 2.29k | let version = header >> 29; |
417 | 2.29k | if version != 0 { |
418 | 0 | return Err(DecodingError::VersionNumberInvalid(version as u8)); |
419 | 2.29k | } |
420 | | |
421 | 2.29k | self.width = (1 + header) & 0x3FFF; |
422 | 2.29k | self.height = (1 + (header >> 14)) & 0x3FFF; |
423 | 2.29k | self.chunks |
424 | 2.29k | .insert(WebPRiffChunk::VP8L, start..start + chunk_size); |
425 | 2.29k | self.kind = ImageKind::Lossless; |
426 | 2.29k | self.has_alpha = (header >> 28) & 1 != 0; |
427 | | } |
428 | | WebPRiffChunk::VP8X => { |
429 | 1.12k | let mut info = extended::read_extended_header(&mut self.r)?; |
430 | 1.12k | self.width = info.canvas_width; |
431 | 1.12k | self.height = info.canvas_height; |
432 | | |
433 | 1.12k | let mut position = start + chunk_size_rounded; |
434 | 1.12k | let max_position = position + riff_size.saturating_sub(12); |
435 | 1.12k | self.r.seek(io::SeekFrom::Start(position))?; |
436 | | |
437 | 16.9k | while position < max_position { |
438 | 16.4k | match read_chunk_header(&mut self.r) { |
439 | 15.8k | Ok((chunk, chunk_size, chunk_size_rounded)) => { |
440 | 15.8k | let range = position + 8..position + 8 + chunk_size; |
441 | 15.8k | position += 8 + chunk_size_rounded; |
442 | | |
443 | 15.8k | if !chunk.is_unknown() { |
444 | 8.72k | self.chunks.entry(chunk).or_insert(range); |
445 | 8.72k | } |
446 | | |
447 | 15.8k | if chunk == WebPRiffChunk::ANMF { |
448 | 2.82k | self.num_frames += 1; |
449 | 2.82k | if chunk_size < 24 { |
450 | 0 | return Err(DecodingError::InvalidChunkSize); |
451 | 2.82k | } |
452 | | |
453 | 2.82k | self.r.seek_relative(12)?; |
454 | 2.82k | let duration = self.r.read_u32::<LittleEndian>()? & 0xffffff; |
455 | 2.82k | self.loop_duration = |
456 | 2.82k | self.loop_duration.wrapping_add(u64::from(duration)); |
457 | | |
458 | | // If the image is animated, the image data chunk will be inside the |
459 | | // ANMF chunks, so we must inspect them to determine whether the |
460 | | // image contains any lossy image data. VP8 chunks store lossy data |
461 | | // and the spec says that lossless images SHOULD NOT contain ALPH |
462 | | // chunks, so we treat both as indicators of lossy images. |
463 | 2.82k | if !self.is_lossy { |
464 | 1.99k | let (subchunk, ..) = read_chunk_header(&mut self.r)?; |
465 | 1.99k | if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk { |
466 | 538 | self.is_lossy = true; |
467 | 1.45k | } |
468 | 1.99k | self.r.seek_relative(chunk_size_rounded as i64 - 24)?; |
469 | | } else { |
470 | 834 | self.r.seek_relative(chunk_size_rounded as i64 - 16)?; |
471 | | } |
472 | | |
473 | 2.82k | continue; |
474 | 12.9k | } |
475 | | |
476 | 12.9k | self.r.seek_relative(chunk_size_rounded as i64)?; |
477 | | } |
478 | 620 | Err(DecodingError::IoError(e)) |
479 | 620 | if e.kind() == io::ErrorKind::UnexpectedEof => |
480 | | { |
481 | 620 | break; |
482 | | } |
483 | 0 | Err(e) => return Err(e), |
484 | | } |
485 | | } |
486 | 1.12k | self.is_lossy = self.is_lossy || self.chunks.contains_key(&WebPRiffChunk::VP8); |
487 | | |
488 | | // NOTE: We allow malformed images that have `info.icc_profile` set without a ICCP chunk, |
489 | | // because this is relatively common. |
490 | 1.12k | if info.animation |
491 | 684 | && (!self.chunks.contains_key(&WebPRiffChunk::ANIM) |
492 | 641 | || !self.chunks.contains_key(&WebPRiffChunk::ANMF)) |
493 | 1.06k | || info.exif_metadata && !self.chunks.contains_key(&WebPRiffChunk::EXIF) |
494 | 1.06k | || info.xmp_metadata && !self.chunks.contains_key(&WebPRiffChunk::XMP) |
495 | 1.05k | || !info.animation |
496 | 436 | && self.chunks.contains_key(&WebPRiffChunk::VP8) |
497 | 436 | == self.chunks.contains_key(&WebPRiffChunk::VP8L) |
498 | | { |
499 | 126 | return Err(DecodingError::ChunkMissing); |
500 | 997 | } |
501 | | |
502 | | // Decode ANIM chunk. |
503 | 997 | if info.animation { |
504 | 621 | match self.read_chunk(WebPRiffChunk::ANIM, 6) { |
505 | 621 | Ok(Some(chunk)) => { |
506 | 621 | let mut cursor = Cursor::new(chunk); |
507 | 621 | cursor.read_exact(&mut info.background_color_hint)?; |
508 | 621 | self.loop_count = match cursor.read_u16::<LittleEndian>()? { |
509 | 271 | 0 => LoopCount::Forever, |
510 | 350 | n => LoopCount::Times(NonZeroU16::new(n).unwrap()), |
511 | | }; |
512 | 621 | self.animation.next_frame_start = |
513 | 621 | self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8; |
514 | | } |
515 | 0 | Ok(None) => return Err(DecodingError::ChunkMissing), |
516 | | Err(DecodingError::MemoryLimitExceeded) => { |
517 | 0 | return Err(DecodingError::InvalidChunkSize) |
518 | | } |
519 | 0 | Err(e) => return Err(e), |
520 | | } |
521 | 376 | } |
522 | | |
523 | | // If the image is animated, the image data chunk will be inside the ANMF chunks. We |
524 | | // store the ALPH, VP8, and VP8L chunks (as applicable) of the first frame in the |
525 | | // hashmap so that we can read them later. |
526 | 997 | if let Some(range) = self.chunks.get(&WebPRiffChunk::ANMF).cloned() { |
527 | 623 | let mut position = range.start + 16; |
528 | 623 | self.r.seek(io::SeekFrom::Start(position))?; |
529 | 1.20k | for _ in 0..2 { |
530 | 1.14k | let (subchunk, subchunk_size, subchunk_size_rounded) = |
531 | 1.14k | read_chunk_header(&mut self.r)?; |
532 | 1.14k | let subrange = position + 8..position + 8 + subchunk_size; |
533 | 1.14k | self.chunks.entry(subchunk).or_insert(subrange.clone()); |
534 | | |
535 | 1.14k | position += 8 + subchunk_size_rounded; |
536 | 1.14k | if position + 8 > range.end { |
537 | 557 | break; |
538 | 584 | } |
539 | | } |
540 | 374 | } |
541 | | |
542 | 997 | self.has_alpha = info.alpha; |
543 | 997 | self.kind = ImageKind::Extended(info); |
544 | | } |
545 | 1 | _ => return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())), |
546 | | }; |
547 | | |
548 | 4.08k | Ok(()) |
549 | 4.21k | } <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::read_data Line | Count | Source | 365 | 4.21k | fn read_data(&mut self) -> Result<(), DecodingError> { | 366 | 4.21k | let (WebPRiffChunk::RIFF, riff_size, _) = read_chunk_header(&mut self.r)? else { | 367 | 0 | return Err(DecodingError::ChunkHeaderInvalid(*b"RIFF")); | 368 | | }; | 369 | | | 370 | 4.21k | match &read_fourcc(&mut self.r)? { | 371 | 4.21k | WebPRiffChunk::WEBP => {} | 372 | 0 | fourcc => return Err(DecodingError::WebpSignatureInvalid(fourcc.to_fourcc())), | 373 | | } | 374 | | | 375 | 4.21k | let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?; | 376 | 4.21k | let start = self.r.stream_position()?; | 377 | | | 378 | 4.21k | match chunk { | 379 | | WebPRiffChunk::VP8 => { | 380 | 793 | let tag = self.r.read_u24::<LittleEndian>()?; | 381 | | | 382 | 793 | let keyframe = tag & 1 == 0; | 383 | 793 | if !keyframe { | 384 | 0 | return Err(DecodingError::UnsupportedFeature( | 385 | 0 | "Non-keyframe frames".to_owned(), | 386 | 0 | )); | 387 | 793 | } | 388 | | | 389 | 793 | let mut tag = [0u8; 3]; | 390 | 793 | self.r.read_exact(&mut tag)?; | 391 | 793 | if tag != [0x9d, 0x01, 0x2a] { | 392 | 0 | return Err(DecodingError::Vp8MagicInvalid(tag)); | 393 | 793 | } | 394 | | | 395 | 793 | let w = self.r.read_u16::<LittleEndian>()?; | 396 | 793 | let h = self.r.read_u16::<LittleEndian>()?; | 397 | | | 398 | 793 | self.width = u32::from(w & 0x3FFF); | 399 | 793 | self.height = u32::from(h & 0x3FFF); | 400 | 793 | if self.width == 0 || self.height == 0 { | 401 | 0 | return Err(DecodingError::InconsistentImageSizes); | 402 | 793 | } | 403 | | | 404 | 793 | self.chunks | 405 | 793 | .insert(WebPRiffChunk::VP8, start..start + chunk_size); | 406 | 793 | self.kind = ImageKind::Lossy; | 407 | 793 | self.is_lossy = true; | 408 | | } | 409 | | WebPRiffChunk::VP8L => { | 410 | 2.29k | let signature = self.r.read_u8()?; | 411 | 2.29k | if signature != 0x2f { | 412 | 0 | return Err(DecodingError::LosslessSignatureInvalid(signature)); | 413 | 2.29k | } | 414 | | | 415 | 2.29k | let header = self.r.read_u32::<LittleEndian>()?; | 416 | 2.29k | let version = header >> 29; | 417 | 2.29k | if version != 0 { | 418 | 0 | return Err(DecodingError::VersionNumberInvalid(version as u8)); | 419 | 2.29k | } | 420 | | | 421 | 2.29k | self.width = (1 + header) & 0x3FFF; | 422 | 2.29k | self.height = (1 + (header >> 14)) & 0x3FFF; | 423 | 2.29k | self.chunks | 424 | 2.29k | .insert(WebPRiffChunk::VP8L, start..start + chunk_size); | 425 | 2.29k | self.kind = ImageKind::Lossless; | 426 | 2.29k | self.has_alpha = (header >> 28) & 1 != 0; | 427 | | } | 428 | | WebPRiffChunk::VP8X => { | 429 | 1.12k | let mut info = extended::read_extended_header(&mut self.r)?; | 430 | 1.12k | self.width = info.canvas_width; | 431 | 1.12k | self.height = info.canvas_height; | 432 | | | 433 | 1.12k | let mut position = start + chunk_size_rounded; | 434 | 1.12k | let max_position = position + riff_size.saturating_sub(12); | 435 | 1.12k | self.r.seek(io::SeekFrom::Start(position))?; | 436 | | | 437 | 16.9k | while position < max_position { | 438 | 16.4k | match read_chunk_header(&mut self.r) { | 439 | 15.8k | Ok((chunk, chunk_size, chunk_size_rounded)) => { | 440 | 15.8k | let range = position + 8..position + 8 + chunk_size; | 441 | 15.8k | position += 8 + chunk_size_rounded; | 442 | | | 443 | 15.8k | if !chunk.is_unknown() { | 444 | 8.72k | self.chunks.entry(chunk).or_insert(range); | 445 | 8.72k | } | 446 | | | 447 | 15.8k | if chunk == WebPRiffChunk::ANMF { | 448 | 2.82k | self.num_frames += 1; | 449 | 2.82k | if chunk_size < 24 { | 450 | 0 | return Err(DecodingError::InvalidChunkSize); | 451 | 2.82k | } | 452 | | | 453 | 2.82k | self.r.seek_relative(12)?; | 454 | 2.82k | let duration = self.r.read_u32::<LittleEndian>()? & 0xffffff; | 455 | 2.82k | self.loop_duration = | 456 | 2.82k | self.loop_duration.wrapping_add(u64::from(duration)); | 457 | | | 458 | | // If the image is animated, the image data chunk will be inside the | 459 | | // ANMF chunks, so we must inspect them to determine whether the | 460 | | // image contains any lossy image data. VP8 chunks store lossy data | 461 | | // and the spec says that lossless images SHOULD NOT contain ALPH | 462 | | // chunks, so we treat both as indicators of lossy images. | 463 | 2.82k | if !self.is_lossy { | 464 | 1.99k | let (subchunk, ..) = read_chunk_header(&mut self.r)?; | 465 | 1.99k | if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk { | 466 | 538 | self.is_lossy = true; | 467 | 1.45k | } | 468 | 1.99k | self.r.seek_relative(chunk_size_rounded as i64 - 24)?; | 469 | | } else { | 470 | 834 | self.r.seek_relative(chunk_size_rounded as i64 - 16)?; | 471 | | } | 472 | | | 473 | 2.82k | continue; | 474 | 12.9k | } | 475 | | | 476 | 12.9k | self.r.seek_relative(chunk_size_rounded as i64)?; | 477 | | } | 478 | 620 | Err(DecodingError::IoError(e)) | 479 | 620 | if e.kind() == io::ErrorKind::UnexpectedEof => | 480 | | { | 481 | 620 | break; | 482 | | } | 483 | 0 | Err(e) => return Err(e), | 484 | | } | 485 | | } | 486 | 1.12k | self.is_lossy = self.is_lossy || self.chunks.contains_key(&WebPRiffChunk::VP8); | 487 | | | 488 | | // NOTE: We allow malformed images that have `info.icc_profile` set without a ICCP chunk, | 489 | | // because this is relatively common. | 490 | 1.12k | if info.animation | 491 | 684 | && (!self.chunks.contains_key(&WebPRiffChunk::ANIM) | 492 | 641 | || !self.chunks.contains_key(&WebPRiffChunk::ANMF)) | 493 | 1.06k | || info.exif_metadata && !self.chunks.contains_key(&WebPRiffChunk::EXIF) | 494 | 1.06k | || info.xmp_metadata && !self.chunks.contains_key(&WebPRiffChunk::XMP) | 495 | 1.05k | || !info.animation | 496 | 436 | && self.chunks.contains_key(&WebPRiffChunk::VP8) | 497 | 436 | == self.chunks.contains_key(&WebPRiffChunk::VP8L) | 498 | | { | 499 | 126 | return Err(DecodingError::ChunkMissing); | 500 | 997 | } | 501 | | | 502 | | // Decode ANIM chunk. | 503 | 997 | if info.animation { | 504 | 621 | match self.read_chunk(WebPRiffChunk::ANIM, 6) { | 505 | 621 | Ok(Some(chunk)) => { | 506 | 621 | let mut cursor = Cursor::new(chunk); | 507 | 621 | cursor.read_exact(&mut info.background_color_hint)?; | 508 | 621 | self.loop_count = match cursor.read_u16::<LittleEndian>()? { | 509 | 271 | 0 => LoopCount::Forever, | 510 | 350 | n => LoopCount::Times(NonZeroU16::new(n).unwrap()), | 511 | | }; | 512 | 621 | self.animation.next_frame_start = | 513 | 621 | self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8; | 514 | | } | 515 | 0 | Ok(None) => return Err(DecodingError::ChunkMissing), | 516 | | Err(DecodingError::MemoryLimitExceeded) => { | 517 | 0 | return Err(DecodingError::InvalidChunkSize) | 518 | | } | 519 | 0 | Err(e) => return Err(e), | 520 | | } | 521 | 376 | } | 522 | | | 523 | | // If the image is animated, the image data chunk will be inside the ANMF chunks. We | 524 | | // store the ALPH, VP8, and VP8L chunks (as applicable) of the first frame in the | 525 | | // hashmap so that we can read them later. | 526 | 997 | if let Some(range) = self.chunks.get(&WebPRiffChunk::ANMF).cloned() { | 527 | 623 | let mut position = range.start + 16; | 528 | 623 | self.r.seek(io::SeekFrom::Start(position))?; | 529 | 1.20k | for _ in 0..2 { | 530 | 1.14k | let (subchunk, subchunk_size, subchunk_size_rounded) = | 531 | 1.14k | read_chunk_header(&mut self.r)?; | 532 | 1.14k | let subrange = position + 8..position + 8 + subchunk_size; | 533 | 1.14k | self.chunks.entry(subchunk).or_insert(subrange.clone()); | 534 | | | 535 | 1.14k | position += 8 + subchunk_size_rounded; | 536 | 1.14k | if position + 8 > range.end { | 537 | 557 | break; | 538 | 584 | } | 539 | | } | 540 | 374 | } | 541 | | | 542 | 997 | self.has_alpha = info.alpha; | 543 | 997 | self.kind = ImageKind::Extended(info); | 544 | | } | 545 | 1 | _ => return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())), | 546 | | }; | 547 | | | 548 | 4.08k | Ok(()) | 549 | 4.21k | } |
Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::read_data |
550 | | |
551 | | /// Sets the maximum amount of memory that the decoder is allowed to allocate at once. |
552 | | /// |
553 | | /// TODO: Some allocations currently ignore this limit. |
554 | 0 | pub fn set_memory_limit(&mut self, limit: usize) { |
555 | 0 | self.memory_limit = limit; |
556 | 0 | } |
557 | | |
558 | | /// Get the background color specified in the image file if the image is extended and animated webp. |
559 | 0 | pub fn background_color_hint(&self) -> Option<[u8; 4]> { |
560 | 0 | if let ImageKind::Extended(info) = &self.kind { |
561 | 0 | Some(info.background_color_hint) |
562 | | } else { |
563 | 0 | None |
564 | | } |
565 | 0 | } |
566 | | |
567 | | /// Sets the background color if the image is an extended and animated webp. |
568 | 0 | pub fn set_background_color(&mut self, color: [u8; 4]) -> Result<(), DecodingError> { |
569 | 0 | if let ImageKind::Extended(info) = &mut self.kind { |
570 | 0 | info.background_color = Some(color); |
571 | 0 | Ok(()) |
572 | | } else { |
573 | 0 | Err(DecodingError::InvalidParameter( |
574 | 0 | "Background color can only be set on animated webp".to_owned(), |
575 | 0 | )) |
576 | | } |
577 | 0 | } |
578 | | |
579 | | /// Returns the (width, height) of the image in pixels. |
580 | 20.4k | pub fn dimensions(&self) -> (u32, u32) { |
581 | 20.4k | (self.width, self.height) |
582 | 20.4k | } <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::dimensions Line | Count | Source | 580 | 20.4k | pub fn dimensions(&self) -> (u32, u32) { | 581 | 20.4k | (self.width, self.height) | 582 | 20.4k | } |
Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::dimensions |
583 | | |
584 | | /// Returns whether the image has an alpha channel. If so, the pixel format is Rgba8 and |
585 | | /// otherwise Rgb8. |
586 | 21.7k | pub fn has_alpha(&self) -> bool { |
587 | 21.7k | self.has_alpha |
588 | 21.7k | } <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::has_alpha Line | Count | Source | 586 | 21.7k | pub fn has_alpha(&self) -> bool { | 587 | 21.7k | self.has_alpha | 588 | 21.7k | } |
Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::has_alpha |
589 | | |
590 | | /// Returns true if the image is animated. |
591 | 4.69k | pub fn is_animated(&self) -> bool { |
592 | 4.69k | match &self.kind { |
593 | 3.08k | ImageKind::Lossy | ImageKind::Lossless => false, |
594 | 1.61k | ImageKind::Extended(extended) => extended.animation, |
595 | | } |
596 | 4.69k | } <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::is_animated Line | Count | Source | 591 | 4.69k | pub fn is_animated(&self) -> bool { | 592 | 4.69k | match &self.kind { | 593 | 3.08k | ImageKind::Lossy | ImageKind::Lossless => false, | 594 | 1.61k | ImageKind::Extended(extended) => extended.animation, | 595 | | } | 596 | 4.69k | } |
Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::is_animated |
597 | | |
598 | | /// Returns whether the image is lossy. For animated images, this is true if any frame is lossy. |
599 | 0 | pub fn is_lossy(&mut self) -> bool { |
600 | 0 | self.is_lossy |
601 | 0 | } |
602 | | |
603 | | /// Returns the number of frames of a single loop of the animation, or zero if the image is not |
604 | | /// animated. |
605 | 0 | pub fn num_frames(&self) -> u32 { |
606 | 0 | self.num_frames |
607 | 0 | } |
608 | | |
609 | | /// Returns the number of times the animation should loop. |
610 | 0 | pub fn loop_count(&self) -> LoopCount { |
611 | 0 | self.loop_count |
612 | 0 | } |
613 | | |
614 | | /// Returns the total duration of one loop through the animation in milliseconds, or zero if the |
615 | | /// image is not animated. |
616 | | /// |
617 | | /// This is the sum of the durations of all individual frames of the image. |
618 | 0 | pub fn loop_duration(&self) -> u64 { |
619 | 0 | self.loop_duration |
620 | 0 | } |
621 | | |
622 | 621 | fn read_chunk( |
623 | 621 | &mut self, |
624 | 621 | chunk: WebPRiffChunk, |
625 | 621 | max_size: usize, |
626 | 621 | ) -> Result<Option<Vec<u8>>, DecodingError> { |
627 | 621 | match self.chunks.get(&chunk) { |
628 | 621 | Some(range) => { |
629 | 621 | if range.end - range.start > max_size as u64 { |
630 | 0 | return Err(DecodingError::MemoryLimitExceeded); |
631 | 621 | } |
632 | | |
633 | 621 | self.r.seek(io::SeekFrom::Start(range.start))?; |
634 | 621 | let mut data = vec![0; (range.end - range.start) as usize]; |
635 | 621 | self.r.read_exact(&mut data)?; |
636 | 621 | Ok(Some(data)) |
637 | | } |
638 | 0 | None => Ok(None), |
639 | | } |
640 | 621 | } <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::read_chunk Line | Count | Source | 622 | 621 | fn read_chunk( | 623 | 621 | &mut self, | 624 | 621 | chunk: WebPRiffChunk, | 625 | 621 | max_size: usize, | 626 | 621 | ) -> Result<Option<Vec<u8>>, DecodingError> { | 627 | 621 | match self.chunks.get(&chunk) { | 628 | 621 | Some(range) => { | 629 | 621 | if range.end - range.start > max_size as u64 { | 630 | 0 | return Err(DecodingError::MemoryLimitExceeded); | 631 | 621 | } | 632 | | | 633 | 621 | self.r.seek(io::SeekFrom::Start(range.start))?; | 634 | 621 | let mut data = vec![0; (range.end - range.start) as usize]; | 635 | 621 | self.r.read_exact(&mut data)?; | 636 | 621 | Ok(Some(data)) | 637 | | } | 638 | 0 | None => Ok(None), | 639 | | } | 640 | 621 | } |
Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::read_chunk |
641 | | |
642 | | /// Returns the raw bytes of the ICC profile, or None if there is no ICC profile. |
643 | 0 | pub fn icc_profile(&mut self) -> Result<Option<Vec<u8>>, DecodingError> { |
644 | 0 | self.read_chunk(WebPRiffChunk::ICCP, self.memory_limit) |
645 | 0 | } Unexecuted instantiation: <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::icc_profile Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::icc_profile |
646 | | |
647 | | /// Returns the raw bytes of the EXIF metadata, or None if there is no EXIF metadata. |
648 | 0 | pub fn exif_metadata(&mut self) -> Result<Option<Vec<u8>>, DecodingError> { |
649 | 0 | self.read_chunk(WebPRiffChunk::EXIF, self.memory_limit) |
650 | 0 | } Unexecuted instantiation: <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::exif_metadata Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::exif_metadata |
651 | | |
652 | | /// Returns the raw bytes of the XMP metadata, or None if there is no XMP metadata. |
653 | 0 | pub fn xmp_metadata(&mut self) -> Result<Option<Vec<u8>>, DecodingError> { |
654 | 0 | self.read_chunk(WebPRiffChunk::XMP, self.memory_limit) |
655 | 0 | } Unexecuted instantiation: <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::xmp_metadata Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::xmp_metadata |
656 | | |
657 | | /// Returns the number of bytes required to store the image or a single frame, or None if that |
658 | | /// would take more than `usize::MAX` bytes. |
659 | 4.69k | pub fn output_buffer_size(&self) -> Option<usize> { |
660 | 4.69k | let bytes_per_pixel = if self.has_alpha() { 4 } else { 3 }; |
661 | 4.69k | (self.width as usize) |
662 | 4.69k | .checked_mul(self.height as usize)? |
663 | 4.69k | .checked_mul(bytes_per_pixel) |
664 | 4.69k | } <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::output_buffer_size Line | Count | Source | 659 | 4.69k | pub fn output_buffer_size(&self) -> Option<usize> { | 660 | 4.69k | let bytes_per_pixel = if self.has_alpha() { 4 } else { 3 }; | 661 | 4.69k | (self.width as usize) | 662 | 4.69k | .checked_mul(self.height as usize)? | 663 | 4.69k | .checked_mul(bytes_per_pixel) | 664 | 4.69k | } |
Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::output_buffer_size |
665 | | |
666 | | /// Returns the raw bytes of the image. For animated images, this is the first frame. |
667 | | /// |
668 | | /// Fails with `ImageTooLarge` if `buf` has length different than `output_buffer_size()` |
669 | 4.08k | pub fn read_image(&mut self, buf: &mut [u8]) -> Result<(), DecodingError> { |
670 | 4.08k | if Some(buf.len()) != self.output_buffer_size() { |
671 | 0 | return Err(DecodingError::ImageTooLarge); |
672 | 4.08k | } |
673 | | |
674 | 4.08k | if self.is_animated() { |
675 | 618 | let saved = std::mem::take(&mut self.animation); |
676 | 618 | self.animation.next_frame_start = |
677 | 618 | self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8; |
678 | 618 | let result = self.read_frame(buf); |
679 | 618 | self.animation = saved; |
680 | 618 | result?; |
681 | 3.46k | } else if let Some(range) = self.chunks.get(&WebPRiffChunk::VP8L) { |
682 | 2.29k | let mut decoder = LosslessDecoder::new(range_reader(&mut self.r, range.clone())?); |
683 | | |
684 | 2.29k | if self.has_alpha { |
685 | 1.85k | decoder.decode_frame(self.width, self.height, false, buf)?; |
686 | | } else { |
687 | 437 | let mut data = vec![0; self.width as usize * self.height as usize * 4]; |
688 | 437 | decoder.decode_frame(self.width, self.height, false, &mut data)?; |
689 | 77.2M | for (rgba_val, chunk) in data.chunks_exact(4).zip(buf.chunks_exact_mut(3)) { |
690 | 77.2M | chunk.copy_from_slice(&rgba_val[..3]); |
691 | 77.2M | } |
692 | | } |
693 | | } else { |
694 | 1.16k | let range = self |
695 | 1.16k | .chunks |
696 | 1.16k | .get(&WebPRiffChunk::VP8) |
697 | 1.16k | .ok_or(DecodingError::ChunkMissing)?; |
698 | 1.16k | let reader = range_reader(&mut self.r, range.start..range.end)?; |
699 | 1.16k | let frame = Vp8Decoder::decode_frame(reader)?; |
700 | 579 | if u32::from(frame.width) != self.width || u32::from(frame.height) != self.height { |
701 | 12 | return Err(DecodingError::InconsistentImageSizes); |
702 | 567 | } |
703 | | |
704 | 567 | if self.has_alpha() { |
705 | 313 | frame.fill_rgba(buf, self.webp_decode_options.lossy_upsampling); |
706 | | |
707 | 313 | let range = self |
708 | 313 | .chunks |
709 | 313 | .get(&WebPRiffChunk::ALPH) |
710 | 313 | .ok_or(DecodingError::ChunkMissing)? |
711 | 308 | .clone(); |
712 | 308 | let alpha_chunk = read_alpha_chunk( |
713 | 308 | &mut range_reader(&mut self.r, range)?, |
714 | 308 | self.width as u16, |
715 | 308 | self.height as u16, |
716 | 244 | )?; |
717 | | |
718 | 8.17k | for y in 0..frame.height { |
719 | 1.65M | for x in 0..frame.width { |
720 | 1.65M | let predictor: u8 = get_alpha_predictor( |
721 | 1.65M | x.into(), |
722 | 1.65M | y.into(), |
723 | 1.65M | frame.width.into(), |
724 | 1.65M | alpha_chunk.filtering_method, |
725 | 1.65M | buf, |
726 | 1.65M | ); |
727 | 1.65M | |
728 | 1.65M | let alpha_index = |
729 | 1.65M | usize::from(y) * usize::from(frame.width) + usize::from(x); |
730 | 1.65M | let buffer_index = alpha_index * 4 + 3; |
731 | 1.65M | |
732 | 1.65M | buf[buffer_index] = predictor.wrapping_add(alpha_chunk.data[alpha_index]); |
733 | 1.65M | } |
734 | | } |
735 | 254 | } else { |
736 | 254 | frame.fill_rgb(buf, self.webp_decode_options.lossy_upsampling); |
737 | 254 | } |
738 | | } |
739 | | |
740 | 2.29k | Ok(()) |
741 | 4.08k | } <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::read_image Line | Count | Source | 669 | 4.08k | pub fn read_image(&mut self, buf: &mut [u8]) -> Result<(), DecodingError> { | 670 | 4.08k | if Some(buf.len()) != self.output_buffer_size() { | 671 | 0 | return Err(DecodingError::ImageTooLarge); | 672 | 4.08k | } | 673 | | | 674 | 4.08k | if self.is_animated() { | 675 | 618 | let saved = std::mem::take(&mut self.animation); | 676 | 618 | self.animation.next_frame_start = | 677 | 618 | self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8; | 678 | 618 | let result = self.read_frame(buf); | 679 | 618 | self.animation = saved; | 680 | 618 | result?; | 681 | 3.46k | } else if let Some(range) = self.chunks.get(&WebPRiffChunk::VP8L) { | 682 | 2.29k | let mut decoder = LosslessDecoder::new(range_reader(&mut self.r, range.clone())?); | 683 | | | 684 | 2.29k | if self.has_alpha { | 685 | 1.85k | decoder.decode_frame(self.width, self.height, false, buf)?; | 686 | | } else { | 687 | 437 | let mut data = vec![0; self.width as usize * self.height as usize * 4]; | 688 | 437 | decoder.decode_frame(self.width, self.height, false, &mut data)?; | 689 | 77.2M | for (rgba_val, chunk) in data.chunks_exact(4).zip(buf.chunks_exact_mut(3)) { | 690 | 77.2M | chunk.copy_from_slice(&rgba_val[..3]); | 691 | 77.2M | } | 692 | | } | 693 | | } else { | 694 | 1.16k | let range = self | 695 | 1.16k | .chunks | 696 | 1.16k | .get(&WebPRiffChunk::VP8) | 697 | 1.16k | .ok_or(DecodingError::ChunkMissing)?; | 698 | 1.16k | let reader = range_reader(&mut self.r, range.start..range.end)?; | 699 | 1.16k | let frame = Vp8Decoder::decode_frame(reader)?; | 700 | 579 | if u32::from(frame.width) != self.width || u32::from(frame.height) != self.height { | 701 | 12 | return Err(DecodingError::InconsistentImageSizes); | 702 | 567 | } | 703 | | | 704 | 567 | if self.has_alpha() { | 705 | 313 | frame.fill_rgba(buf, self.webp_decode_options.lossy_upsampling); | 706 | | | 707 | 313 | let range = self | 708 | 313 | .chunks | 709 | 313 | .get(&WebPRiffChunk::ALPH) | 710 | 313 | .ok_or(DecodingError::ChunkMissing)? | 711 | 308 | .clone(); | 712 | 308 | let alpha_chunk = read_alpha_chunk( | 713 | 308 | &mut range_reader(&mut self.r, range)?, | 714 | 308 | self.width as u16, | 715 | 308 | self.height as u16, | 716 | 244 | )?; | 717 | | | 718 | 8.17k | for y in 0..frame.height { | 719 | 1.65M | for x in 0..frame.width { | 720 | 1.65M | let predictor: u8 = get_alpha_predictor( | 721 | 1.65M | x.into(), | 722 | 1.65M | y.into(), | 723 | 1.65M | frame.width.into(), | 724 | 1.65M | alpha_chunk.filtering_method, | 725 | 1.65M | buf, | 726 | 1.65M | ); | 727 | 1.65M | | 728 | 1.65M | let alpha_index = | 729 | 1.65M | usize::from(y) * usize::from(frame.width) + usize::from(x); | 730 | 1.65M | let buffer_index = alpha_index * 4 + 3; | 731 | 1.65M | | 732 | 1.65M | buf[buffer_index] = predictor.wrapping_add(alpha_chunk.data[alpha_index]); | 733 | 1.65M | } | 734 | | } | 735 | 254 | } else { | 736 | 254 | frame.fill_rgb(buf, self.webp_decode_options.lossy_upsampling); | 737 | 254 | } | 738 | | } | 739 | | | 740 | 2.29k | Ok(()) | 741 | 4.08k | } |
Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::read_image |
742 | | |
743 | | /// Reads the next frame of the animation. |
744 | | /// |
745 | | /// The frame contents are written into `buf` and the method returns the duration of the frame |
746 | | /// in milliseconds. If there are no more frames, the method returns |
747 | | /// `DecodingError::NoMoreFrames` and `buf` is left unchanged. |
748 | | /// |
749 | | /// # Panics |
750 | | /// |
751 | | /// Panics if the image is not animated. |
752 | 618 | pub fn read_frame(&mut self, buf: &mut [u8]) -> Result<u32, DecodingError> { |
753 | 618 | assert!(self.is_animated()); |
754 | 618 | assert_eq!(Some(buf.len()), self.output_buffer_size()); |
755 | | |
756 | 618 | if self.animation.next_frame == self.num_frames { |
757 | 0 | return Err(DecodingError::NoMoreFrames); |
758 | 618 | } |
759 | | |
760 | 618 | let ImageKind::Extended(info) = &self.kind else { |
761 | 0 | unreachable!() |
762 | | }; |
763 | | |
764 | 618 | self.r |
765 | 618 | .seek(io::SeekFrom::Start(self.animation.next_frame_start))?; |
766 | | |
767 | 618 | let anmf_size = match read_chunk_header(&mut self.r)? { |
768 | 618 | (WebPRiffChunk::ANMF, size, _) if size >= 32 => size, |
769 | 25 | _ => return Err(DecodingError::ChunkHeaderInvalid(*b"ANMF")), |
770 | | }; |
771 | | |
772 | | // Read ANMF chunk |
773 | 593 | let frame_x = extended::read_3_bytes(&mut self.r)? * 2; |
774 | 593 | let frame_y = extended::read_3_bytes(&mut self.r)? * 2; |
775 | 593 | let frame_width = extended::read_3_bytes(&mut self.r)? + 1; |
776 | 593 | let frame_height = extended::read_3_bytes(&mut self.r)? + 1; |
777 | 593 | if frame_width > 16384 || frame_height > 16384 { |
778 | 5 | return Err(DecodingError::ImageTooLarge); |
779 | 588 | } |
780 | 588 | if frame_x + frame_width > self.width || frame_y + frame_height > self.height { |
781 | 12 | return Err(DecodingError::FrameOutsideImage); |
782 | 576 | } |
783 | 576 | let duration = extended::read_3_bytes(&mut self.r)?; |
784 | 576 | let frame_info = self.r.read_u8()?; |
785 | 576 | let use_alpha_blending = frame_info & 0b00000010 == 0; |
786 | 576 | let dispose = frame_info & 0b00000001 != 0; |
787 | | |
788 | 576 | let clear_color = if self.animation.dispose_next_frame { |
789 | 576 | info.background_color |
790 | | } else { |
791 | 0 | None |
792 | | }; |
793 | | |
794 | | // Read normal bitstream now |
795 | 576 | let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?; |
796 | 576 | if chunk_size_rounded + 24 > anmf_size { |
797 | 14 | return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())); |
798 | 562 | } |
799 | | |
800 | 562 | let (frame, frame_has_alpha): (Vec<u8>, bool) = match chunk { |
801 | | WebPRiffChunk::VP8 => { |
802 | 22 | let reader = (&mut self.r).take(chunk_size); |
803 | 22 | let raw_frame = Vp8Decoder::decode_frame(reader)?; |
804 | 22 | if u32::from(raw_frame.width) != frame_width |
805 | 15 | || u32::from(raw_frame.height) != frame_height |
806 | | { |
807 | 8 | return Err(DecodingError::InconsistentImageSizes); |
808 | 14 | } |
809 | 14 | let mut rgb_frame = vec![0; frame_width as usize * frame_height as usize * 3]; |
810 | 14 | raw_frame.fill_rgb(&mut rgb_frame, self.webp_decode_options.lossy_upsampling); |
811 | 14 | (rgb_frame, false) |
812 | | } |
813 | | WebPRiffChunk::VP8L => { |
814 | 64 | let reader = (&mut self.r).take(chunk_size); |
815 | 64 | let mut lossless_decoder = LosslessDecoder::new(reader); |
816 | 64 | let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4]; |
817 | 64 | lossless_decoder.decode_frame(frame_width, frame_height, false, &mut rgba_frame)?; |
818 | 61 | (rgba_frame, true) |
819 | | } |
820 | | WebPRiffChunk::ALPH => { |
821 | 474 | if chunk_size_rounded + 32 > anmf_size { |
822 | 1 | return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())); |
823 | 473 | } |
824 | | |
825 | | // read alpha |
826 | 473 | let next_chunk_start = self.r.stream_position()? + chunk_size_rounded; |
827 | 473 | let mut reader = (&mut self.r).take(chunk_size); |
828 | 137 | let alpha_chunk = |
829 | 473 | read_alpha_chunk(&mut reader, frame_width as u16, frame_height as u16)?; |
830 | | |
831 | | // read opaque |
832 | 137 | self.r.seek(io::SeekFrom::Start(next_chunk_start))?; |
833 | 137 | let (next_chunk, next_chunk_size, _) = read_chunk_header(&mut self.r)?; |
834 | 128 | if chunk_size + next_chunk_size + 32 > anmf_size { |
835 | 29 | return Err(DecodingError::ChunkHeaderInvalid(next_chunk.to_fourcc())); |
836 | 99 | } |
837 | | |
838 | 99 | let frame = Vp8Decoder::decode_frame((&mut self.r).take(next_chunk_size))?; |
839 | | |
840 | 89 | let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4]; |
841 | 89 | frame.fill_rgba(&mut rgba_frame, self.webp_decode_options.lossy_upsampling); |
842 | | |
843 | 11.5k | for y in 0..frame.height { |
844 | 2.12M | for x in 0..frame.width { |
845 | 2.12M | let predictor: u8 = get_alpha_predictor( |
846 | 2.12M | x.into(), |
847 | 2.12M | y.into(), |
848 | 2.12M | frame.width.into(), |
849 | 2.12M | alpha_chunk.filtering_method, |
850 | 2.12M | &rgba_frame, |
851 | 2.12M | ); |
852 | 2.12M | |
853 | 2.12M | let alpha_index = |
854 | 2.12M | usize::from(y) * usize::from(frame.width) + usize::from(x); |
855 | 2.12M | let buffer_index = alpha_index * 4 + 3; |
856 | 2.12M | |
857 | 2.12M | rgba_frame[buffer_index] = |
858 | 2.12M | predictor.wrapping_add(alpha_chunk.data[alpha_index]); |
859 | 2.12M | } |
860 | | } |
861 | | |
862 | 89 | (rgba_frame, true) |
863 | | } |
864 | 2 | _ => return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())), |
865 | | }; |
866 | | |
867 | | // fill starting canvas with clear color |
868 | 164 | if self.animation.canvas.is_none() { |
869 | 164 | self.animation.canvas = { |
870 | 164 | let mut canvas = vec![0; (self.width * self.height * 4) as usize]; |
871 | 164 | if let Some(color) = info.background_color.as_ref() { |
872 | 0 | canvas |
873 | 0 | .chunks_exact_mut(4) |
874 | 0 | .for_each(|c| c.copy_from_slice(color)) Unexecuted instantiation: <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::read_frame::{closure#0}Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::read_frame::{closure#0} |
875 | 164 | } |
876 | 164 | Some(canvas) |
877 | | } |
878 | 0 | } |
879 | 164 | extended::composite_frame( |
880 | 164 | self.animation.canvas.as_mut().unwrap(), |
881 | 164 | self.width, |
882 | 164 | self.height, |
883 | 164 | clear_color, |
884 | 164 | &frame, |
885 | 164 | frame_x, |
886 | 164 | frame_y, |
887 | 164 | frame_width, |
888 | 164 | frame_height, |
889 | 164 | frame_has_alpha, |
890 | 164 | use_alpha_blending, |
891 | 164 | self.animation.previous_frame_width, |
892 | 164 | self.animation.previous_frame_height, |
893 | 164 | self.animation.previous_frame_x_offset, |
894 | 164 | self.animation.previous_frame_y_offset, |
895 | | ); |
896 | | |
897 | 164 | self.animation.previous_frame_width = frame_width; |
898 | 164 | self.animation.previous_frame_height = frame_height; |
899 | 164 | self.animation.previous_frame_x_offset = frame_x; |
900 | 164 | self.animation.previous_frame_y_offset = frame_y; |
901 | | |
902 | 164 | self.animation.dispose_next_frame = dispose; |
903 | 164 | self.animation.next_frame_start += anmf_size + 8; |
904 | 164 | self.animation.next_frame += 1; |
905 | | |
906 | 164 | if self.has_alpha() { |
907 | 140 | buf.copy_from_slice(self.animation.canvas.as_ref().unwrap()); |
908 | 140 | } else { |
909 | 794M | for (b, c) in buf |
910 | 24 | .chunks_exact_mut(3) |
911 | 24 | .zip(self.animation.canvas.as_ref().unwrap().chunks_exact(4)) |
912 | 794M | { |
913 | 794M | b.copy_from_slice(&c[..3]); |
914 | 794M | } |
915 | | } |
916 | | |
917 | 164 | Ok(duration) |
918 | 618 | } <image_webp::decoder::WebPDecoder<std::io::cursor::Cursor<&[u8]>>>::read_frame Line | Count | Source | 752 | 618 | pub fn read_frame(&mut self, buf: &mut [u8]) -> Result<u32, DecodingError> { | 753 | 618 | assert!(self.is_animated()); | 754 | 618 | assert_eq!(Some(buf.len()), self.output_buffer_size()); | 755 | | | 756 | 618 | if self.animation.next_frame == self.num_frames { | 757 | 0 | return Err(DecodingError::NoMoreFrames); | 758 | 618 | } | 759 | | | 760 | 618 | let ImageKind::Extended(info) = &self.kind else { | 761 | 0 | unreachable!() | 762 | | }; | 763 | | | 764 | 618 | self.r | 765 | 618 | .seek(io::SeekFrom::Start(self.animation.next_frame_start))?; | 766 | | | 767 | 618 | let anmf_size = match read_chunk_header(&mut self.r)? { | 768 | 618 | (WebPRiffChunk::ANMF, size, _) if size >= 32 => size, | 769 | 25 | _ => return Err(DecodingError::ChunkHeaderInvalid(*b"ANMF")), | 770 | | }; | 771 | | | 772 | | // Read ANMF chunk | 773 | 593 | let frame_x = extended::read_3_bytes(&mut self.r)? * 2; | 774 | 593 | let frame_y = extended::read_3_bytes(&mut self.r)? * 2; | 775 | 593 | let frame_width = extended::read_3_bytes(&mut self.r)? + 1; | 776 | 593 | let frame_height = extended::read_3_bytes(&mut self.r)? + 1; | 777 | 593 | if frame_width > 16384 || frame_height > 16384 { | 778 | 5 | return Err(DecodingError::ImageTooLarge); | 779 | 588 | } | 780 | 588 | if frame_x + frame_width > self.width || frame_y + frame_height > self.height { | 781 | 12 | return Err(DecodingError::FrameOutsideImage); | 782 | 576 | } | 783 | 576 | let duration = extended::read_3_bytes(&mut self.r)?; | 784 | 576 | let frame_info = self.r.read_u8()?; | 785 | 576 | let use_alpha_blending = frame_info & 0b00000010 == 0; | 786 | 576 | let dispose = frame_info & 0b00000001 != 0; | 787 | | | 788 | 576 | let clear_color = if self.animation.dispose_next_frame { | 789 | 576 | info.background_color | 790 | | } else { | 791 | 0 | None | 792 | | }; | 793 | | | 794 | | // Read normal bitstream now | 795 | 576 | let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?; | 796 | 576 | if chunk_size_rounded + 24 > anmf_size { | 797 | 14 | return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())); | 798 | 562 | } | 799 | | | 800 | 562 | let (frame, frame_has_alpha): (Vec<u8>, bool) = match chunk { | 801 | | WebPRiffChunk::VP8 => { | 802 | 22 | let reader = (&mut self.r).take(chunk_size); | 803 | 22 | let raw_frame = Vp8Decoder::decode_frame(reader)?; | 804 | 22 | if u32::from(raw_frame.width) != frame_width | 805 | 15 | || u32::from(raw_frame.height) != frame_height | 806 | | { | 807 | 8 | return Err(DecodingError::InconsistentImageSizes); | 808 | 14 | } | 809 | 14 | let mut rgb_frame = vec![0; frame_width as usize * frame_height as usize * 3]; | 810 | 14 | raw_frame.fill_rgb(&mut rgb_frame, self.webp_decode_options.lossy_upsampling); | 811 | 14 | (rgb_frame, false) | 812 | | } | 813 | | WebPRiffChunk::VP8L => { | 814 | 64 | let reader = (&mut self.r).take(chunk_size); | 815 | 64 | let mut lossless_decoder = LosslessDecoder::new(reader); | 816 | 64 | let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4]; | 817 | 64 | lossless_decoder.decode_frame(frame_width, frame_height, false, &mut rgba_frame)?; | 818 | 61 | (rgba_frame, true) | 819 | | } | 820 | | WebPRiffChunk::ALPH => { | 821 | 474 | if chunk_size_rounded + 32 > anmf_size { | 822 | 1 | return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())); | 823 | 473 | } | 824 | | | 825 | | // read alpha | 826 | 473 | let next_chunk_start = self.r.stream_position()? + chunk_size_rounded; | 827 | 473 | let mut reader = (&mut self.r).take(chunk_size); | 828 | 137 | let alpha_chunk = | 829 | 473 | read_alpha_chunk(&mut reader, frame_width as u16, frame_height as u16)?; | 830 | | | 831 | | // read opaque | 832 | 137 | self.r.seek(io::SeekFrom::Start(next_chunk_start))?; | 833 | 137 | let (next_chunk, next_chunk_size, _) = read_chunk_header(&mut self.r)?; | 834 | 128 | if chunk_size + next_chunk_size + 32 > anmf_size { | 835 | 29 | return Err(DecodingError::ChunkHeaderInvalid(next_chunk.to_fourcc())); | 836 | 99 | } | 837 | | | 838 | 99 | let frame = Vp8Decoder::decode_frame((&mut self.r).take(next_chunk_size))?; | 839 | | | 840 | 89 | let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4]; | 841 | 89 | frame.fill_rgba(&mut rgba_frame, self.webp_decode_options.lossy_upsampling); | 842 | | | 843 | 11.5k | for y in 0..frame.height { | 844 | 2.12M | for x in 0..frame.width { | 845 | 2.12M | let predictor: u8 = get_alpha_predictor( | 846 | 2.12M | x.into(), | 847 | 2.12M | y.into(), | 848 | 2.12M | frame.width.into(), | 849 | 2.12M | alpha_chunk.filtering_method, | 850 | 2.12M | &rgba_frame, | 851 | 2.12M | ); | 852 | 2.12M | | 853 | 2.12M | let alpha_index = | 854 | 2.12M | usize::from(y) * usize::from(frame.width) + usize::from(x); | 855 | 2.12M | let buffer_index = alpha_index * 4 + 3; | 856 | 2.12M | | 857 | 2.12M | rgba_frame[buffer_index] = | 858 | 2.12M | predictor.wrapping_add(alpha_chunk.data[alpha_index]); | 859 | 2.12M | } | 860 | | } | 861 | | | 862 | 89 | (rgba_frame, true) | 863 | | } | 864 | 2 | _ => return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())), | 865 | | }; | 866 | | | 867 | | // fill starting canvas with clear color | 868 | 164 | if self.animation.canvas.is_none() { | 869 | 164 | self.animation.canvas = { | 870 | 164 | let mut canvas = vec![0; (self.width * self.height * 4) as usize]; | 871 | 164 | if let Some(color) = info.background_color.as_ref() { | 872 | 0 | canvas | 873 | 0 | .chunks_exact_mut(4) | 874 | 0 | .for_each(|c| c.copy_from_slice(color)) | 875 | 164 | } | 876 | 164 | Some(canvas) | 877 | | } | 878 | 0 | } | 879 | 164 | extended::composite_frame( | 880 | 164 | self.animation.canvas.as_mut().unwrap(), | 881 | 164 | self.width, | 882 | 164 | self.height, | 883 | 164 | clear_color, | 884 | 164 | &frame, | 885 | 164 | frame_x, | 886 | 164 | frame_y, | 887 | 164 | frame_width, | 888 | 164 | frame_height, | 889 | 164 | frame_has_alpha, | 890 | 164 | use_alpha_blending, | 891 | 164 | self.animation.previous_frame_width, | 892 | 164 | self.animation.previous_frame_height, | 893 | 164 | self.animation.previous_frame_x_offset, | 894 | 164 | self.animation.previous_frame_y_offset, | 895 | | ); | 896 | | | 897 | 164 | self.animation.previous_frame_width = frame_width; | 898 | 164 | self.animation.previous_frame_height = frame_height; | 899 | 164 | self.animation.previous_frame_x_offset = frame_x; | 900 | 164 | self.animation.previous_frame_y_offset = frame_y; | 901 | | | 902 | 164 | self.animation.dispose_next_frame = dispose; | 903 | 164 | self.animation.next_frame_start += anmf_size + 8; | 904 | 164 | self.animation.next_frame += 1; | 905 | | | 906 | 164 | if self.has_alpha() { | 907 | 140 | buf.copy_from_slice(self.animation.canvas.as_ref().unwrap()); | 908 | 140 | } else { | 909 | 794M | for (b, c) in buf | 910 | 24 | .chunks_exact_mut(3) | 911 | 24 | .zip(self.animation.canvas.as_ref().unwrap().chunks_exact(4)) | 912 | 794M | { | 913 | 794M | b.copy_from_slice(&c[..3]); | 914 | 794M | } | 915 | | } | 916 | | | 917 | 164 | Ok(duration) | 918 | 618 | } |
Unexecuted instantiation: <image_webp::decoder::WebPDecoder<_>>::read_frame |
919 | | |
920 | | /// Resets the animation to the first frame. |
921 | | /// |
922 | | /// # Panics |
923 | | /// |
924 | | /// Panics if the image is not animated. |
925 | 0 | pub fn reset_animation(&mut self) { |
926 | 0 | assert!(self.is_animated()); |
927 | | |
928 | 0 | self.animation.next_frame = 0; |
929 | 0 | self.animation.next_frame_start = self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8; |
930 | 0 | self.animation.dispose_next_frame = true; |
931 | 0 | } |
932 | | |
933 | | /// Sets the upsampling method that is used in lossy decoding |
934 | 0 | pub fn set_lossy_upsampling(&mut self, upsampling_method: UpsamplingMethod) { |
935 | 0 | self.webp_decode_options.lossy_upsampling = upsampling_method; |
936 | 0 | } |
937 | | } |
938 | | |
939 | 3.77k | pub(crate) fn range_reader<R: BufRead + Seek>( |
940 | 3.77k | mut r: R, |
941 | 3.77k | range: Range<u64>, |
942 | 3.77k | ) -> Result<impl BufRead, DecodingError> { |
943 | 3.77k | r.seek(io::SeekFrom::Start(range.start))?; |
944 | 3.77k | Ok(r.take(range.end - range.start)) |
945 | 3.77k | } image_webp::decoder::range_reader::<&mut std::io::cursor::Cursor<&[u8]>> Line | Count | Source | 939 | 3.77k | pub(crate) fn range_reader<R: BufRead + Seek>( | 940 | 3.77k | mut r: R, | 941 | 3.77k | range: Range<u64>, | 942 | 3.77k | ) -> Result<impl BufRead, DecodingError> { | 943 | 3.77k | r.seek(io::SeekFrom::Start(range.start))?; | 944 | 3.77k | Ok(r.take(range.end - range.start)) | 945 | 3.77k | } |
Unexecuted instantiation: image_webp::decoder::range_reader::<_> |
946 | | |
947 | 33.5k | pub(crate) fn read_fourcc<R: BufRead>(mut r: R) -> Result<WebPRiffChunk, DecodingError> { |
948 | 33.5k | let mut chunk_fourcc = [0; 4]; |
949 | 33.5k | r.read_exact(&mut chunk_fourcc)?; |
950 | 32.9k | Ok(WebPRiffChunk::from_fourcc(chunk_fourcc)) |
951 | 33.5k | } image_webp::decoder::read_fourcc::<&mut std::io::cursor::Cursor<&[u8]>> Line | Count | Source | 947 | 4.21k | pub(crate) fn read_fourcc<R: BufRead>(mut r: R) -> Result<WebPRiffChunk, DecodingError> { | 948 | 4.21k | let mut chunk_fourcc = [0; 4]; | 949 | 4.21k | r.read_exact(&mut chunk_fourcc)?; | 950 | 4.21k | Ok(WebPRiffChunk::from_fourcc(chunk_fourcc)) | 951 | 4.21k | } |
image_webp::decoder::read_fourcc::<&mut &mut std::io::cursor::Cursor<&[u8]>> Line | Count | Source | 947 | 29.3k | pub(crate) fn read_fourcc<R: BufRead>(mut r: R) -> Result<WebPRiffChunk, DecodingError> { | 948 | 29.3k | let mut chunk_fourcc = [0; 4]; | 949 | 29.3k | r.read_exact(&mut chunk_fourcc)?; | 950 | 28.7k | Ok(WebPRiffChunk::from_fourcc(chunk_fourcc)) | 951 | 29.3k | } |
Unexecuted instantiation: image_webp::decoder::read_fourcc::<_> |
952 | | |
953 | 29.3k | pub(crate) fn read_chunk_header<R: BufRead>( |
954 | 29.3k | mut r: R, |
955 | 29.3k | ) -> Result<(WebPRiffChunk, u64, u64), DecodingError> { |
956 | 29.3k | let chunk = read_fourcc(&mut r)?; |
957 | 28.7k | let chunk_size = r.read_u32::<LittleEndian>()?; |
958 | 28.7k | let chunk_size_rounded = chunk_size.saturating_add(chunk_size & 1); |
959 | 28.7k | Ok((chunk, chunk_size.into(), chunk_size_rounded.into())) |
960 | 29.3k | } image_webp::decoder::read_chunk_header::<&mut std::io::cursor::Cursor<&[u8]>> Line | Count | Source | 953 | 29.3k | pub(crate) fn read_chunk_header<R: BufRead>( | 954 | 29.3k | mut r: R, | 955 | 29.3k | ) -> Result<(WebPRiffChunk, u64, u64), DecodingError> { | 956 | 29.3k | let chunk = read_fourcc(&mut r)?; | 957 | 28.7k | let chunk_size = r.read_u32::<LittleEndian>()?; | 958 | 28.7k | let chunk_size_rounded = chunk_size.saturating_add(chunk_size & 1); | 959 | 28.7k | Ok((chunk, chunk_size.into(), chunk_size_rounded.into())) | 960 | 29.3k | } |
Unexecuted instantiation: image_webp::decoder::read_chunk_header::<_> |
961 | | |
962 | | #[cfg(test)] |
963 | | mod tests { |
964 | | use super::*; |
965 | | const RGB_BPP: usize = 3; |
966 | | |
967 | | #[test] |
968 | | fn add_with_overflow_size() { |
969 | | let bytes = vec![ |
970 | | 0x52, 0x49, 0x46, 0x46, 0xaf, 0x37, 0x80, 0x47, 0x57, 0x45, 0x42, 0x50, 0x6c, 0x64, |
971 | | 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xfb, 0x7e, 0x73, 0x00, 0x06, 0x00, 0x00, 0x00, |
972 | | 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, |
973 | | 0x40, 0xfb, 0xff, 0xff, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, |
974 | | 0x00, 0x00, 0x00, 0x00, 0x62, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x49, |
975 | | 0x49, 0x54, 0x55, 0x50, 0x4c, 0x54, 0x59, 0x50, 0x45, 0x33, 0x37, 0x44, 0x4d, 0x46, |
976 | | ]; |
977 | | |
978 | | let data = std::io::Cursor::new(bytes); |
979 | | |
980 | | let _ = WebPDecoder::new(data); |
981 | | } |
982 | | |
983 | | #[test] |
984 | | fn decode_2x2_single_color_image() { |
985 | | // Image data created from imagemagick and output of xxd: |
986 | | // $ convert -size 2x2 xc:#f00 red.webp |
987 | | // $ xxd -g 1 red.webp | head |
988 | | |
989 | | const NUM_PIXELS: usize = 2 * 2 * RGB_BPP; |
990 | | // 2x2 red pixel image |
991 | | let bytes = [ |
992 | | 0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50, |
993 | | 0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x02, 0x00, |
994 | | 0x02, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03, |
995 | | 0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff, |
996 | | 0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00, |
997 | | ]; |
998 | | |
999 | | let mut data = [0; NUM_PIXELS]; |
1000 | | let mut decoder = WebPDecoder::new(std::io::Cursor::new(bytes)).unwrap(); |
1001 | | decoder.read_image(&mut data).unwrap(); |
1002 | | |
1003 | | // All pixels are the same value |
1004 | | let first_pixel = &data[..RGB_BPP]; |
1005 | | assert!(data.chunks_exact(3).all(|ch| ch.iter().eq(first_pixel))); |
1006 | | } |
1007 | | |
1008 | | #[test] |
1009 | | fn decode_3x3_single_color_image() { |
1010 | | // Test that any odd pixel "tail" is decoded properly |
1011 | | |
1012 | | const NUM_PIXELS: usize = 3 * 3 * RGB_BPP; |
1013 | | // 3x3 red pixel image |
1014 | | let bytes = [ |
1015 | | 0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50, |
1016 | | 0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x03, 0x00, |
1017 | | 0x03, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03, |
1018 | | 0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff, |
1019 | | 0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00, |
1020 | | ]; |
1021 | | |
1022 | | let mut data = [0; NUM_PIXELS]; |
1023 | | let mut decoder = WebPDecoder::new(std::io::Cursor::new(bytes)).unwrap(); |
1024 | | decoder.read_image(&mut data).unwrap(); |
1025 | | |
1026 | | // All pixels are the same value |
1027 | | let first_pixel = &data[..RGB_BPP]; |
1028 | | assert!(data.chunks_exact(3).all(|ch| ch.iter().eq(first_pixel))); |
1029 | | } |
1030 | | } |