/rust/registry/src/index.crates.io-1949cf8c6b5b557f/av-scenechange-0.14.1/src/lib.rs
Line | Count | Source |
1 | | // Documentation lints |
2 | | // FIXME: add docs and turn this to warn |
3 | | #![allow(missing_docs)] |
4 | | #![warn(clippy::doc_link_with_quotes)] |
5 | | #![warn(clippy::doc_markdown)] |
6 | | #![warn(clippy::missing_errors_doc)] |
7 | | #![warn(clippy::missing_panics_doc)] |
8 | | |
9 | | pub mod decoder; |
10 | | |
11 | | mod analyze; |
12 | | #[macro_use] |
13 | | mod cpu; |
14 | | mod data; |
15 | | #[cfg(feature = "ffmpeg")] |
16 | | pub mod ffmpeg; |
17 | | #[cfg(feature = "vapoursynth")] |
18 | | pub mod vapoursynth; |
19 | | mod y4m; |
20 | | |
21 | | use std::{ |
22 | | collections::{BTreeMap, BTreeSet}, |
23 | | io::Read, |
24 | | sync::Arc, |
25 | | time::Instant, |
26 | | }; |
27 | | |
28 | | pub use ::y4m::Decoder as Y4mDecoder; |
29 | | use decoder::Decoder; |
30 | | pub use num_rational::Rational32; |
31 | | use v_frame::pixel::Pixel; |
32 | | |
33 | | pub use crate::{analyze::SceneChangeDetector, cpu::CpuFeatureLevel}; |
34 | | |
35 | | /// Options determining how to run scene change detection. |
36 | | #[derive(Debug, Clone, Copy)] |
37 | | pub struct DetectionOptions { |
38 | | /// The speed of detection algorithm to use. |
39 | | /// Slower algorithms are more accurate/better for use in encoders. |
40 | | pub analysis_speed: SceneDetectionSpeed, |
41 | | /// Enabling this will utilize heuristics to avoid scenecuts |
42 | | /// that are too close to each other. |
43 | | /// This is generally useful if you want scenecut detection |
44 | | /// for use in an encoder. |
45 | | /// If you want a raw list of scene changes, you should disable this. |
46 | | pub detect_flashes: bool, |
47 | | /// The minimum distance between two scene changes. |
48 | | pub min_scenecut_distance: Option<usize>, |
49 | | /// The maximum distance between two scene changes. |
50 | | pub max_scenecut_distance: Option<usize>, |
51 | | /// The distance to look ahead in the video |
52 | | /// for scene flash detection. |
53 | | /// |
54 | | /// Not used if `detect_flashes` is `false`. |
55 | | pub lookahead_distance: usize, |
56 | | } |
57 | | |
58 | | impl Default for DetectionOptions { |
59 | | #[inline] |
60 | 0 | fn default() -> Self { |
61 | 0 | DetectionOptions { |
62 | 0 | analysis_speed: SceneDetectionSpeed::Standard, |
63 | 0 | detect_flashes: true, |
64 | 0 | lookahead_distance: 5, |
65 | 0 | min_scenecut_distance: None, |
66 | 0 | max_scenecut_distance: None, |
67 | 0 | } |
68 | 0 | } |
69 | | } |
70 | | |
71 | | /// Results from a scene change detection pass. |
72 | | #[derive(Debug, Clone)] |
73 | | #[cfg_attr(feature = "serialize", derive(serde::Serialize))] |
74 | | pub struct DetectionResults { |
75 | | /// The 0-indexed frame numbers where scene changes were detected. |
76 | | pub scene_changes: Vec<usize>, |
77 | | /// The total number of frames read. |
78 | | pub frame_count: usize, |
79 | | /// Average speed (FPS) |
80 | | pub speed: f64, |
81 | | } |
82 | | |
83 | | /// # Errors |
84 | | /// |
85 | | /// - If using a Vapoursynth script that contains an unsupported video format. |
86 | | #[inline] |
87 | 0 | pub fn new_detector<R: Read, T: Pixel>( |
88 | 0 | dec: &mut Decoder<R>, |
89 | 0 | opts: DetectionOptions, |
90 | 0 | ) -> anyhow::Result<SceneChangeDetector<T>> { |
91 | 0 | let video_details = dec.get_video_details()?; |
92 | | |
93 | 0 | Ok(SceneChangeDetector::new( |
94 | 0 | (video_details.width, video_details.height), |
95 | 0 | video_details.bit_depth, |
96 | 0 | video_details.time_base.recip(), |
97 | 0 | video_details.chroma_sampling, |
98 | 0 | if opts.detect_flashes { |
99 | 0 | opts.lookahead_distance |
100 | | } else { |
101 | 0 | 1 |
102 | | }, |
103 | 0 | opts.analysis_speed, |
104 | 0 | opts.min_scenecut_distance.map_or(0, |val| val), |
105 | 0 | opts.max_scenecut_distance |
106 | 0 | .map_or_else(|| u32::MAX as usize, |val| val), |
107 | 0 | CpuFeatureLevel::default(), |
108 | | )) |
109 | 0 | } |
110 | | |
111 | | /// Runs through a y4m video clip, |
112 | | /// detecting where scene changes occur. |
113 | | /// This is adjustable based on the `opts` parameters. |
114 | | /// |
115 | | /// This is the preferred, simplified interface |
116 | | /// for analyzing a whole clip for scene changes. |
117 | | /// |
118 | | /// # Arguments |
119 | | /// |
120 | | /// - `progress_callback`: An optional callback that will fire after each frame |
121 | | /// is analyzed. Arguments passed in will be, in order, the number of frames |
122 | | /// analyzed, and the number of keyframes detected. This is generally useful |
123 | | /// for displaying progress, etc. |
124 | | /// |
125 | | /// # Errors |
126 | | /// |
127 | | /// - If using a Vapoursynth script that contains an unsupported video format. |
128 | | /// |
129 | | /// # Panics |
130 | | /// |
131 | | /// - If `opts.lookahead_distance` is 0. |
132 | | #[inline] |
133 | 0 | pub fn detect_scene_changes<R: Read, T: Pixel>( |
134 | 0 | dec: &mut Decoder<R>, |
135 | 0 | opts: DetectionOptions, |
136 | 0 | frame_limit: Option<usize>, |
137 | 0 | progress_callback: Option<&dyn Fn(usize, usize)>, |
138 | 0 | ) -> anyhow::Result<DetectionResults> { |
139 | 0 | assert!(opts.lookahead_distance >= 1); |
140 | | |
141 | 0 | let mut detector = new_detector::<R, T>(dec, opts)?; |
142 | 0 | let video_details = dec.get_video_details()?; |
143 | 0 | let mut frame_queue = BTreeMap::new(); |
144 | 0 | let mut keyframes = BTreeSet::new(); |
145 | 0 | keyframes.insert(0); |
146 | | |
147 | 0 | let start_time = Instant::now(); |
148 | 0 | let mut frameno = 0; |
149 | | loop { |
150 | 0 | let mut next_input_frameno = frame_queue.keys().last().copied().map_or(0, |key| key + 1); |
151 | 0 | while next_input_frameno |
152 | 0 | < (frameno + opts.lookahead_distance + 1).min(frame_limit.unwrap_or(usize::MAX)) |
153 | | { |
154 | 0 | let frame = dec.read_video_frame(&video_details); |
155 | 0 | if let Ok(frame) = frame { |
156 | 0 | frame_queue.insert(next_input_frameno, Arc::new(frame)); |
157 | 0 | next_input_frameno += 1; |
158 | 0 | } else { |
159 | | // End of input |
160 | 0 | break; |
161 | | } |
162 | | } |
163 | | |
164 | | // The frame_queue should start at whatever the previous frame was |
165 | 0 | let frame_set = frame_queue |
166 | 0 | .values() |
167 | 0 | .take(opts.lookahead_distance + 2) |
168 | 0 | .collect::<Vec<_>>(); |
169 | 0 | if frame_set.len() < 2 { |
170 | | // End of video |
171 | 0 | break; |
172 | 0 | } |
173 | 0 | if frameno == 0 |
174 | 0 | || detector.analyze_next_frame( |
175 | 0 | &frame_set, |
176 | 0 | frameno, |
177 | 0 | *keyframes |
178 | 0 | .iter() |
179 | 0 | .last() |
180 | 0 | .expect("at least 1 keyframe should exist"), |
181 | | ) |
182 | 0 | { |
183 | 0 | keyframes.insert(frameno); |
184 | 0 | }; |
185 | | |
186 | 0 | if frameno > 0 { |
187 | 0 | frame_queue.remove(&(frameno - 1)); |
188 | 0 | } |
189 | | |
190 | 0 | frameno += 1; |
191 | 0 | if let Some(progress_fn) = progress_callback { |
192 | 0 | progress_fn(frameno, keyframes.len()); |
193 | 0 | } |
194 | 0 | if let Some(frame_limit) = frame_limit { |
195 | 0 | if frameno == frame_limit { |
196 | 0 | break; |
197 | 0 | } |
198 | 0 | } |
199 | | } |
200 | 0 | Ok(DetectionResults { |
201 | 0 | scene_changes: keyframes.into_iter().collect(), |
202 | 0 | frame_count: frameno, |
203 | 0 | speed: frameno as f64 / start_time.elapsed().as_secs_f64(), |
204 | 0 | }) |
205 | 0 | } |
206 | | |
207 | | #[derive(Clone, Copy, Debug, PartialOrd, PartialEq, Eq)] |
208 | | pub enum SceneDetectionSpeed { |
209 | | /// Fastest scene detection using pixel-wise comparison |
210 | | Fast, |
211 | | /// Scene detection using frame costs and motion vectors |
212 | | Standard, |
213 | | /// Do not perform scenecut detection, only place keyframes at fixed |
214 | | /// intervals |
215 | | None, |
216 | | } |