/src/aom/av1/encoder/encode_strategy.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright (c) 2019, Alliance for Open Media. All rights reserved. |
3 | | * |
4 | | * This source code is subject to the terms of the BSD 2 Clause License and |
5 | | * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License |
6 | | * was not distributed with this source code in the LICENSE file, you can |
7 | | * obtain it at www.aomedia.org/license/software. If the Alliance for Open |
8 | | * Media Patent License 1.0 was not distributed with this source code in the |
9 | | * PATENTS file, you can obtain it at www.aomedia.org/license/patent. |
10 | | */ |
11 | | |
12 | | #include <stdint.h> |
13 | | |
14 | | #include "av1/common/blockd.h" |
15 | | #include "config/aom_config.h" |
16 | | #include "config/aom_scale_rtcd.h" |
17 | | |
18 | | #include "aom/aom_codec.h" |
19 | | #include "aom/aom_encoder.h" |
20 | | |
21 | | #if CONFIG_MISMATCH_DEBUG |
22 | | #include "aom_util/debug_util.h" |
23 | | #endif // CONFIG_MISMATCH_DEBUG |
24 | | |
25 | | #include "av1/common/av1_common_int.h" |
26 | | #include "av1/common/reconinter.h" |
27 | | |
28 | | #include "av1/encoder/encoder.h" |
29 | | #include "av1/encoder/encode_strategy.h" |
30 | | #include "av1/encoder/encodeframe.h" |
31 | | #include "av1/encoder/encoder_alloc.h" |
32 | | #include "av1/encoder/firstpass.h" |
33 | | #include "av1/encoder/gop_structure.h" |
34 | | #include "av1/encoder/pass2_strategy.h" |
35 | | #include "av1/encoder/temporal_filter.h" |
36 | | #if CONFIG_THREE_PASS |
37 | | #include "av1/encoder/thirdpass.h" |
38 | | #endif // CONFIG_THREE_PASS |
39 | | #include "av1/encoder/tpl_model.h" |
40 | | |
41 | | #if CONFIG_TUNE_VMAF |
42 | | #include "av1/encoder/tune_vmaf.h" |
43 | | #endif |
44 | | |
45 | | #define TEMPORAL_FILTER_KEY_FRAME (CONFIG_REALTIME_ONLY ? 0 : 1) |
46 | | |
47 | | static INLINE void set_refresh_frame_flags( |
48 | | RefreshFrameInfo *const refresh_frame, bool refresh_gf, bool refresh_bwdref, |
49 | 0 | bool refresh_arf) { |
50 | 0 | refresh_frame->golden_frame = refresh_gf; |
51 | 0 | refresh_frame->bwd_ref_frame = refresh_bwdref; |
52 | 0 | refresh_frame->alt_ref_frame = refresh_arf; |
53 | 0 | } |
54 | | |
55 | | void av1_configure_buffer_updates(AV1_COMP *const cpi, |
56 | | RefreshFrameInfo *const refresh_frame, |
57 | | const FRAME_UPDATE_TYPE type, |
58 | | const REFBUF_STATE refbuf_state, |
59 | 0 | int force_refresh_all) { |
60 | | // NOTE(weitinglin): Should we define another function to take care of |
61 | | // cpi->rc.is_$Source_Type to make this function as it is in the comment? |
62 | 0 | const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags = |
63 | 0 | &cpi->ext_flags.refresh_frame; |
64 | 0 | cpi->rc.is_src_frame_alt_ref = 0; |
65 | |
|
66 | 0 | switch (type) { |
67 | 0 | case KF_UPDATE: |
68 | 0 | set_refresh_frame_flags(refresh_frame, true, true, true); |
69 | 0 | break; |
70 | | |
71 | 0 | case LF_UPDATE: |
72 | 0 | set_refresh_frame_flags(refresh_frame, false, false, false); |
73 | 0 | break; |
74 | | |
75 | 0 | case GF_UPDATE: |
76 | 0 | set_refresh_frame_flags(refresh_frame, true, false, false); |
77 | 0 | break; |
78 | | |
79 | 0 | case OVERLAY_UPDATE: |
80 | 0 | if (refbuf_state == REFBUF_RESET) |
81 | 0 | set_refresh_frame_flags(refresh_frame, true, true, true); |
82 | 0 | else |
83 | 0 | set_refresh_frame_flags(refresh_frame, true, false, false); |
84 | |
|
85 | 0 | cpi->rc.is_src_frame_alt_ref = 1; |
86 | 0 | break; |
87 | | |
88 | 0 | case ARF_UPDATE: |
89 | | // NOTE: BWDREF does not get updated along with ALTREF_FRAME. |
90 | 0 | if (refbuf_state == REFBUF_RESET) |
91 | 0 | set_refresh_frame_flags(refresh_frame, true, true, true); |
92 | 0 | else |
93 | 0 | set_refresh_frame_flags(refresh_frame, false, false, true); |
94 | |
|
95 | 0 | break; |
96 | | |
97 | 0 | case INTNL_OVERLAY_UPDATE: |
98 | 0 | set_refresh_frame_flags(refresh_frame, false, false, false); |
99 | 0 | cpi->rc.is_src_frame_alt_ref = 1; |
100 | 0 | break; |
101 | | |
102 | 0 | case INTNL_ARF_UPDATE: |
103 | 0 | set_refresh_frame_flags(refresh_frame, false, true, false); |
104 | 0 | break; |
105 | | |
106 | 0 | default: assert(0); break; |
107 | 0 | } |
108 | | |
109 | 0 | if (ext_refresh_frame_flags->update_pending && |
110 | 0 | (!is_stat_generation_stage(cpi))) { |
111 | 0 | set_refresh_frame_flags(refresh_frame, |
112 | 0 | ext_refresh_frame_flags->golden_frame, |
113 | 0 | ext_refresh_frame_flags->bwd_ref_frame, |
114 | 0 | ext_refresh_frame_flags->alt_ref_frame); |
115 | 0 | GF_GROUP *gf_group = &cpi->ppi->gf_group; |
116 | 0 | if (ext_refresh_frame_flags->golden_frame) |
117 | 0 | gf_group->update_type[cpi->gf_frame_index] = GF_UPDATE; |
118 | 0 | if (ext_refresh_frame_flags->alt_ref_frame) |
119 | 0 | gf_group->update_type[cpi->gf_frame_index] = ARF_UPDATE; |
120 | 0 | if (ext_refresh_frame_flags->bwd_ref_frame) |
121 | 0 | gf_group->update_type[cpi->gf_frame_index] = INTNL_ARF_UPDATE; |
122 | 0 | } |
123 | |
|
124 | 0 | if (force_refresh_all) |
125 | 0 | set_refresh_frame_flags(refresh_frame, true, true, true); |
126 | 0 | } |
127 | | |
128 | | static void set_additional_frame_flags(const AV1_COMMON *const cm, |
129 | 0 | unsigned int *const frame_flags) { |
130 | 0 | if (frame_is_intra_only(cm)) { |
131 | 0 | *frame_flags |= FRAMEFLAGS_INTRAONLY; |
132 | 0 | } |
133 | 0 | if (frame_is_sframe(cm)) { |
134 | 0 | *frame_flags |= FRAMEFLAGS_SWITCH; |
135 | 0 | } |
136 | 0 | if (cm->features.error_resilient_mode) { |
137 | 0 | *frame_flags |= FRAMEFLAGS_ERROR_RESILIENT; |
138 | 0 | } |
139 | 0 | } |
140 | | |
141 | | static void set_ext_overrides(AV1_COMMON *const cm, |
142 | | EncodeFrameParams *const frame_params, |
143 | 0 | ExternalFlags *const ext_flags) { |
144 | | // Overrides the defaults with the externally supplied values with |
145 | | // av1_update_reference() and av1_update_entropy() calls |
146 | | // Note: The overrides are valid only for the next frame passed |
147 | | // to av1_encode_lowlevel() |
148 | |
|
149 | 0 | if (ext_flags->use_s_frame) { |
150 | 0 | frame_params->frame_type = S_FRAME; |
151 | 0 | } |
152 | |
|
153 | 0 | if (ext_flags->refresh_frame_context_pending) { |
154 | 0 | cm->features.refresh_frame_context = ext_flags->refresh_frame_context; |
155 | 0 | ext_flags->refresh_frame_context_pending = 0; |
156 | 0 | } |
157 | 0 | cm->features.allow_ref_frame_mvs = ext_flags->use_ref_frame_mvs; |
158 | |
|
159 | 0 | frame_params->error_resilient_mode = ext_flags->use_error_resilient; |
160 | | // A keyframe is already error resilient and keyframes with |
161 | | // error_resilient_mode interferes with the use of show_existing_frame |
162 | | // when forward reference keyframes are enabled. |
163 | 0 | frame_params->error_resilient_mode &= frame_params->frame_type != KEY_FRAME; |
164 | | // For bitstream conformance, s-frames must be error-resilient |
165 | 0 | frame_params->error_resilient_mode |= frame_params->frame_type == S_FRAME; |
166 | 0 | } |
167 | | |
168 | | static int choose_primary_ref_frame( |
169 | 0 | AV1_COMP *const cpi, const EncodeFrameParams *const frame_params) { |
170 | 0 | const AV1_COMMON *const cm = &cpi->common; |
171 | |
|
172 | 0 | const int intra_only = frame_params->frame_type == KEY_FRAME || |
173 | 0 | frame_params->frame_type == INTRA_ONLY_FRAME; |
174 | 0 | if (intra_only || frame_params->error_resilient_mode || |
175 | 0 | cpi->ext_flags.use_primary_ref_none) { |
176 | 0 | return PRIMARY_REF_NONE; |
177 | 0 | } |
178 | | |
179 | 0 | #if !CONFIG_REALTIME_ONLY |
180 | 0 | if (cpi->use_ducky_encode) { |
181 | 0 | int wanted_fb = cpi->ppi->gf_group.primary_ref_idx[cpi->gf_frame_index]; |
182 | 0 | for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) { |
183 | 0 | if (get_ref_frame_map_idx(cm, ref_frame) == wanted_fb) |
184 | 0 | return ref_frame - LAST_FRAME; |
185 | 0 | } |
186 | | |
187 | 0 | return PRIMARY_REF_NONE; |
188 | 0 | } |
189 | 0 | #endif // !CONFIG_REALTIME_ONLY |
190 | | |
191 | | // In large scale case, always use Last frame's frame contexts. |
192 | | // Note(yunqing): In other cases, primary_ref_frame is chosen based on |
193 | | // cpi->ppi->gf_group.layer_depth[cpi->gf_frame_index], which also controls |
194 | | // frame bit allocation. |
195 | 0 | if (cm->tiles.large_scale) return (LAST_FRAME - LAST_FRAME); |
196 | | |
197 | 0 | if (cpi->ppi->use_svc || cpi->ppi->rtc_ref.set_ref_frame_config) |
198 | 0 | return av1_svc_primary_ref_frame(cpi); |
199 | | |
200 | | // Find the most recent reference frame with the same reference type as the |
201 | | // current frame |
202 | 0 | const int current_ref_type = get_current_frame_ref_type(cpi); |
203 | 0 | int wanted_fb = cpi->ppi->fb_of_context_type[current_ref_type]; |
204 | | #if CONFIG_FPMT_TEST |
205 | | if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) { |
206 | | GF_GROUP *const gf_group = &cpi->ppi->gf_group; |
207 | | if (gf_group->update_type[cpi->gf_frame_index] == INTNL_ARF_UPDATE) { |
208 | | int frame_level = gf_group->frame_parallel_level[cpi->gf_frame_index]; |
209 | | // Book keep wanted_fb of frame_parallel_level 1 frame in an FP2 set. |
210 | | if (frame_level == 1) { |
211 | | cpi->wanted_fb = wanted_fb; |
212 | | } |
213 | | // Use the wanted_fb of level 1 frame in an FP2 for a level 2 frame in the |
214 | | // set. |
215 | | if (frame_level == 2 && |
216 | | gf_group->update_type[cpi->gf_frame_index - 1] == INTNL_ARF_UPDATE) { |
217 | | assert(gf_group->frame_parallel_level[cpi->gf_frame_index - 1] == 1); |
218 | | wanted_fb = cpi->wanted_fb; |
219 | | } |
220 | | } |
221 | | } |
222 | | #endif // CONFIG_FPMT_TEST |
223 | 0 | int primary_ref_frame = PRIMARY_REF_NONE; |
224 | 0 | for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) { |
225 | 0 | if (get_ref_frame_map_idx(cm, ref_frame) == wanted_fb) { |
226 | 0 | primary_ref_frame = ref_frame - LAST_FRAME; |
227 | 0 | } |
228 | 0 | } |
229 | |
|
230 | 0 | return primary_ref_frame; |
231 | 0 | } |
232 | | |
233 | 0 | static void adjust_frame_rate(AV1_COMP *cpi, int64_t ts_start, int64_t ts_end) { |
234 | 0 | TimeStamps *time_stamps = &cpi->time_stamps; |
235 | 0 | int64_t this_duration; |
236 | 0 | int step = 0; |
237 | | |
238 | | // Clear down mmx registers |
239 | |
|
240 | 0 | if (cpi->ppi->use_svc && cpi->ppi->rtc_ref.set_ref_frame_config && |
241 | 0 | cpi->svc.number_spatial_layers > 1) { |
242 | | // ts_start is the timestamp for the current frame and ts_end is the |
243 | | // expected next timestamp given the duration passed into codec_encode(). |
244 | | // See the setting in encoder_encode() in av1_cx_iface.c: |
245 | | // ts_start = timebase_units_to_ticks(cpi_data.timestamp_ratio, ptsvol), |
246 | | // ts_end = timebase_units_to_ticks(cpi_data.timestamp_ratio, ptsvol + |
247 | | // duration). So the difference ts_end - ts_start is the duration passed |
248 | | // in by the user. For spatial layers SVC set the framerate based directly |
249 | | // on the duration, and bypass the adjustments below. |
250 | 0 | this_duration = ts_end - ts_start; |
251 | 0 | if (this_duration > 0) { |
252 | 0 | cpi->new_framerate = 10000000.0 / this_duration; |
253 | 0 | av1_new_framerate(cpi, cpi->new_framerate); |
254 | 0 | time_stamps->prev_ts_start = ts_start; |
255 | 0 | time_stamps->prev_ts_end = ts_end; |
256 | 0 | return; |
257 | 0 | } |
258 | 0 | } |
259 | | |
260 | 0 | if (ts_start == time_stamps->first_ts_start) { |
261 | 0 | this_duration = ts_end - ts_start; |
262 | 0 | step = 1; |
263 | 0 | } else { |
264 | 0 | int64_t last_duration = |
265 | 0 | time_stamps->prev_ts_end - time_stamps->prev_ts_start; |
266 | |
|
267 | 0 | this_duration = ts_end - time_stamps->prev_ts_end; |
268 | | |
269 | | // do a step update if the duration changes by 10% |
270 | 0 | if (last_duration) |
271 | 0 | step = (int)((this_duration - last_duration) * 10 / last_duration); |
272 | 0 | } |
273 | |
|
274 | 0 | if (this_duration) { |
275 | 0 | if (step) { |
276 | 0 | cpi->new_framerate = 10000000.0 / this_duration; |
277 | 0 | av1_new_framerate(cpi, cpi->new_framerate); |
278 | 0 | } else { |
279 | | // Average this frame's rate into the last second's average |
280 | | // frame rate. If we haven't seen 1 second yet, then average |
281 | | // over the whole interval seen. |
282 | 0 | const double interval = |
283 | 0 | AOMMIN((double)(ts_end - time_stamps->first_ts_start), 10000000.0); |
284 | 0 | double avg_duration = 10000000.0 / cpi->framerate; |
285 | 0 | avg_duration *= (interval - avg_duration + this_duration); |
286 | 0 | avg_duration /= interval; |
287 | 0 | cpi->new_framerate = (10000000.0 / avg_duration); |
288 | | // For parallel frames update cpi->framerate with new_framerate |
289 | | // during av1_post_encode_updates() |
290 | 0 | double framerate = |
291 | 0 | (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) |
292 | 0 | ? cpi->framerate |
293 | 0 | : cpi->new_framerate; |
294 | 0 | av1_new_framerate(cpi, framerate); |
295 | 0 | } |
296 | 0 | } |
297 | |
|
298 | 0 | time_stamps->prev_ts_start = ts_start; |
299 | 0 | time_stamps->prev_ts_end = ts_end; |
300 | 0 | } |
301 | | |
302 | | // Determine whether there is a forced keyframe pending in the lookahead buffer |
303 | | int is_forced_keyframe_pending(struct lookahead_ctx *lookahead, |
304 | | const int up_to_index, |
305 | 0 | const COMPRESSOR_STAGE compressor_stage) { |
306 | 0 | for (int i = 0; i <= up_to_index; i++) { |
307 | 0 | const struct lookahead_entry *e = |
308 | 0 | av1_lookahead_peek(lookahead, i, compressor_stage); |
309 | 0 | if (e == NULL) { |
310 | | // We have reached the end of the lookahead buffer and not early-returned |
311 | | // so there isn't a forced key-frame pending. |
312 | 0 | return -1; |
313 | 0 | } else if (e->flags == AOM_EFLAG_FORCE_KF) { |
314 | 0 | return i; |
315 | 0 | } else { |
316 | 0 | continue; |
317 | 0 | } |
318 | 0 | } |
319 | 0 | return -1; // Never reached |
320 | 0 | } |
321 | | |
322 | | // Check if we should encode an ARF or internal ARF. If not, try a LAST |
323 | | // Do some setup associated with the chosen source |
324 | | // temporal_filtered, flush, and frame_update_type are outputs. |
325 | | // Return the frame source, or NULL if we couldn't find one |
326 | | static struct lookahead_entry *choose_frame_source( |
327 | | AV1_COMP *const cpi, int *const flush, int *pop_lookahead, |
328 | 0 | struct lookahead_entry **last_source, int *const show_frame) { |
329 | 0 | AV1_COMMON *const cm = &cpi->common; |
330 | 0 | const GF_GROUP *const gf_group = &cpi->ppi->gf_group; |
331 | 0 | struct lookahead_entry *source = NULL; |
332 | | |
333 | | // Source index in lookahead buffer. |
334 | 0 | int src_index = gf_group->arf_src_offset[cpi->gf_frame_index]; |
335 | | |
336 | | // TODO(Aasaipriya): Forced key frames need to be fixed when rc_mode != AOM_Q |
337 | 0 | if (src_index && |
338 | 0 | (is_forced_keyframe_pending(cpi->ppi->lookahead, src_index, |
339 | 0 | cpi->compressor_stage) != -1) && |
340 | 0 | cpi->oxcf.rc_cfg.mode != AOM_Q && !is_stat_generation_stage(cpi)) { |
341 | 0 | src_index = 0; |
342 | 0 | *flush = 1; |
343 | 0 | } |
344 | | |
345 | | // If the current frame is arf, then we should not pop from the lookahead |
346 | | // buffer. If the current frame is not arf, then pop it. This assumes the |
347 | | // first frame in the GF group is not arf. May need to change if it is not |
348 | | // true. |
349 | 0 | *pop_lookahead = (src_index == 0); |
350 | | // If this is a key frame and keyframe filtering is enabled with overlay, |
351 | | // then do not pop. |
352 | 0 | if (*pop_lookahead && cpi->oxcf.kf_cfg.enable_keyframe_filtering > 1 && |
353 | 0 | gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE && |
354 | 0 | !is_stat_generation_stage(cpi) && cpi->ppi->lookahead) { |
355 | 0 | if (cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz && |
356 | 0 | (*flush || |
357 | 0 | cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz == |
358 | 0 | cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].pop_sz)) { |
359 | 0 | *pop_lookahead = 0; |
360 | 0 | } |
361 | 0 | } |
362 | | |
363 | | // LAP stage does not have ARFs or forward key-frames, |
364 | | // hence, always pop_lookahead here. |
365 | 0 | if (is_stat_generation_stage(cpi)) { |
366 | 0 | *pop_lookahead = 1; |
367 | 0 | src_index = 0; |
368 | 0 | } |
369 | |
|
370 | 0 | *show_frame = *pop_lookahead; |
371 | |
|
372 | | #if CONFIG_FPMT_TEST |
373 | | if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_ENCODE) { |
374 | | #else |
375 | 0 | { |
376 | 0 | #endif // CONFIG_FPMT_TEST |
377 | | // Future frame in parallel encode set |
378 | 0 | if (gf_group->src_offset[cpi->gf_frame_index] != 0 && |
379 | 0 | !is_stat_generation_stage(cpi)) |
380 | 0 | src_index = gf_group->src_offset[cpi->gf_frame_index]; |
381 | 0 | } |
382 | 0 | if (*show_frame) { |
383 | | // show frame, pop from buffer |
384 | | // Get last frame source. |
385 | 0 | if (cm->current_frame.frame_number > 0) { |
386 | 0 | *last_source = av1_lookahead_peek(cpi->ppi->lookahead, src_index - 1, |
387 | 0 | cpi->compressor_stage); |
388 | 0 | } |
389 | | // Read in the source frame. |
390 | 0 | source = av1_lookahead_peek(cpi->ppi->lookahead, src_index, |
391 | 0 | cpi->compressor_stage); |
392 | 0 | } else { |
393 | | // no show frames are arf frames |
394 | 0 | source = av1_lookahead_peek(cpi->ppi->lookahead, src_index, |
395 | 0 | cpi->compressor_stage); |
396 | 0 | if (source != NULL) { |
397 | 0 | cm->showable_frame = 1; |
398 | 0 | } |
399 | 0 | } |
400 | 0 | return source; |
401 | 0 | } |
402 | | |
403 | | // Don't allow a show_existing_frame to coincide with an error resilient or |
404 | | // S-Frame. An exception can be made in the case of a keyframe, since it does |
405 | | // not depend on any previous frames. |
406 | | static int allow_show_existing(const AV1_COMP *const cpi, |
407 | 0 | unsigned int frame_flags) { |
408 | 0 | if (cpi->common.current_frame.frame_number == 0) return 0; |
409 | | |
410 | 0 | const struct lookahead_entry *lookahead_src = |
411 | 0 | av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage); |
412 | 0 | if (lookahead_src == NULL) return 1; |
413 | | |
414 | 0 | const int is_error_resilient = |
415 | 0 | cpi->oxcf.tool_cfg.error_resilient_mode || |
416 | 0 | (lookahead_src->flags & AOM_EFLAG_ERROR_RESILIENT); |
417 | 0 | const int is_s_frame = cpi->oxcf.kf_cfg.enable_sframe || |
418 | 0 | (lookahead_src->flags & AOM_EFLAG_SET_S_FRAME); |
419 | 0 | const int is_key_frame = |
420 | 0 | (cpi->rc.frames_to_key == 0) || (frame_flags & FRAMEFLAGS_KEY); |
421 | 0 | return !(is_error_resilient || is_s_frame) || is_key_frame; |
422 | 0 | } |
423 | | |
424 | | // Update frame_flags to tell the encoder's caller what sort of frame was |
425 | | // encoded. |
426 | | static void update_frame_flags(const AV1_COMMON *const cm, |
427 | | const RefreshFrameInfo *const refresh_frame, |
428 | 0 | unsigned int *frame_flags) { |
429 | 0 | if (encode_show_existing_frame(cm)) { |
430 | 0 | *frame_flags &= ~(uint32_t)FRAMEFLAGS_GOLDEN; |
431 | 0 | *frame_flags &= ~(uint32_t)FRAMEFLAGS_BWDREF; |
432 | 0 | *frame_flags &= ~(uint32_t)FRAMEFLAGS_ALTREF; |
433 | 0 | *frame_flags &= ~(uint32_t)FRAMEFLAGS_KEY; |
434 | 0 | return; |
435 | 0 | } |
436 | | |
437 | 0 | if (refresh_frame->golden_frame) { |
438 | 0 | *frame_flags |= FRAMEFLAGS_GOLDEN; |
439 | 0 | } else { |
440 | 0 | *frame_flags &= ~(uint32_t)FRAMEFLAGS_GOLDEN; |
441 | 0 | } |
442 | |
|
443 | 0 | if (refresh_frame->alt_ref_frame) { |
444 | 0 | *frame_flags |= FRAMEFLAGS_ALTREF; |
445 | 0 | } else { |
446 | 0 | *frame_flags &= ~(uint32_t)FRAMEFLAGS_ALTREF; |
447 | 0 | } |
448 | |
|
449 | 0 | if (refresh_frame->bwd_ref_frame) { |
450 | 0 | *frame_flags |= FRAMEFLAGS_BWDREF; |
451 | 0 | } else { |
452 | 0 | *frame_flags &= ~(uint32_t)FRAMEFLAGS_BWDREF; |
453 | 0 | } |
454 | |
|
455 | 0 | if (cm->current_frame.frame_type == KEY_FRAME) { |
456 | 0 | *frame_flags |= FRAMEFLAGS_KEY; |
457 | 0 | } else { |
458 | 0 | *frame_flags &= ~(uint32_t)FRAMEFLAGS_KEY; |
459 | 0 | } |
460 | 0 | } |
461 | | |
462 | | #define DUMP_REF_FRAME_IMAGES 0 |
463 | | |
464 | | #if DUMP_REF_FRAME_IMAGES == 1 |
465 | | static int dump_one_image(AV1_COMMON *cm, |
466 | | const YV12_BUFFER_CONFIG *const ref_buf, |
467 | | char *file_name) { |
468 | | int h; |
469 | | FILE *f_ref = NULL; |
470 | | |
471 | | if (ref_buf == NULL) { |
472 | | printf("Frame data buffer is NULL.\n"); |
473 | | return AOM_CODEC_MEM_ERROR; |
474 | | } |
475 | | |
476 | | if ((f_ref = fopen(file_name, "wb")) == NULL) { |
477 | | printf("Unable to open file %s to write.\n", file_name); |
478 | | return AOM_CODEC_MEM_ERROR; |
479 | | } |
480 | | |
481 | | // --- Y --- |
482 | | for (h = 0; h < cm->height; ++h) { |
483 | | fwrite(&ref_buf->y_buffer[h * ref_buf->y_stride], 1, cm->width, f_ref); |
484 | | } |
485 | | // --- U --- |
486 | | for (h = 0; h < (cm->height >> 1); ++h) { |
487 | | fwrite(&ref_buf->u_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1), |
488 | | f_ref); |
489 | | } |
490 | | // --- V --- |
491 | | for (h = 0; h < (cm->height >> 1); ++h) { |
492 | | fwrite(&ref_buf->v_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1), |
493 | | f_ref); |
494 | | } |
495 | | |
496 | | fclose(f_ref); |
497 | | |
498 | | return AOM_CODEC_OK; |
499 | | } |
500 | | |
501 | | static void dump_ref_frame_images(AV1_COMP *cpi) { |
502 | | AV1_COMMON *const cm = &cpi->common; |
503 | | MV_REFERENCE_FRAME ref_frame; |
504 | | |
505 | | for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) { |
506 | | char file_name[256] = ""; |
507 | | snprintf(file_name, sizeof(file_name), "/tmp/enc_F%d_ref_%d.yuv", |
508 | | cm->current_frame.frame_number, ref_frame); |
509 | | dump_one_image(cm, get_ref_frame_yv12_buf(cpi, ref_frame), file_name); |
510 | | } |
511 | | } |
512 | | #endif // DUMP_REF_FRAME_IMAGES == 1 |
513 | | |
514 | 0 | int av1_get_refresh_ref_frame_map(int refresh_frame_flags) { |
515 | 0 | int ref_map_index; |
516 | |
|
517 | 0 | for (ref_map_index = 0; ref_map_index < REF_FRAMES; ++ref_map_index) |
518 | 0 | if ((refresh_frame_flags >> ref_map_index) & 1) break; |
519 | |
|
520 | 0 | if (ref_map_index == REF_FRAMES) ref_map_index = INVALID_IDX; |
521 | 0 | return ref_map_index; |
522 | 0 | } |
523 | | |
524 | 0 | static int get_free_ref_map_index(RefFrameMapPair ref_map_pairs[REF_FRAMES]) { |
525 | 0 | for (int idx = 0; idx < REF_FRAMES; ++idx) |
526 | 0 | if (ref_map_pairs[idx].disp_order == -1) return idx; |
527 | 0 | return INVALID_IDX; |
528 | 0 | } |
529 | | |
530 | | static int get_refresh_idx(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES], |
531 | | int update_arf, GF_GROUP *gf_group, int gf_index, |
532 | 0 | int enable_refresh_skip, int cur_frame_disp) { |
533 | 0 | int arf_count = 0; |
534 | 0 | int oldest_arf_order = INT32_MAX; |
535 | 0 | int oldest_arf_idx = -1; |
536 | |
|
537 | 0 | int oldest_frame_order = INT32_MAX; |
538 | 0 | int oldest_idx = -1; |
539 | |
|
540 | 0 | for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) { |
541 | 0 | RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx]; |
542 | 0 | if (ref_pair.disp_order == -1) continue; |
543 | 0 | const int frame_order = ref_pair.disp_order; |
544 | 0 | const int reference_frame_level = ref_pair.pyr_level; |
545 | | // Keep future frames and three closest previous frames in output order. |
546 | 0 | if (frame_order > cur_frame_disp - 3) continue; |
547 | | |
548 | 0 | if (enable_refresh_skip) { |
549 | 0 | int skip_frame = 0; |
550 | | // Prevent refreshing a frame in gf_group->skip_frame_refresh. |
551 | 0 | for (int i = 0; i < REF_FRAMES; i++) { |
552 | 0 | int frame_to_skip = gf_group->skip_frame_refresh[gf_index][i]; |
553 | 0 | if (frame_to_skip == INVALID_IDX) break; |
554 | 0 | if (frame_order == frame_to_skip) { |
555 | 0 | skip_frame = 1; |
556 | 0 | break; |
557 | 0 | } |
558 | 0 | } |
559 | 0 | if (skip_frame) continue; |
560 | 0 | } |
561 | | |
562 | | // Keep track of the oldest level 1 frame if the current frame is also level |
563 | | // 1. |
564 | 0 | if (reference_frame_level == 1) { |
565 | | // If there are more than 2 level 1 frames in the reference list, |
566 | | // discard the oldest. |
567 | 0 | if (frame_order < oldest_arf_order) { |
568 | 0 | oldest_arf_order = frame_order; |
569 | 0 | oldest_arf_idx = map_idx; |
570 | 0 | } |
571 | 0 | arf_count++; |
572 | 0 | continue; |
573 | 0 | } |
574 | | |
575 | | // Update the overall oldest reference frame. |
576 | 0 | if (frame_order < oldest_frame_order) { |
577 | 0 | oldest_frame_order = frame_order; |
578 | 0 | oldest_idx = map_idx; |
579 | 0 | } |
580 | 0 | } |
581 | 0 | if (update_arf && arf_count > 2) return oldest_arf_idx; |
582 | 0 | if (oldest_idx >= 0) return oldest_idx; |
583 | 0 | if (oldest_arf_idx >= 0) return oldest_arf_idx; |
584 | 0 | if (oldest_idx == -1) { |
585 | 0 | assert(arf_count > 2 && enable_refresh_skip); |
586 | 0 | return oldest_arf_idx; |
587 | 0 | } |
588 | 0 | assert(0 && "No valid refresh index found"); |
589 | 0 | return -1; |
590 | 0 | } |
591 | | |
592 | | // Computes the reference refresh index for INTNL_ARF_UPDATE frame. |
593 | | int av1_calc_refresh_idx_for_intnl_arf( |
594 | | AV1_COMP *cpi, RefFrameMapPair ref_frame_map_pairs[REF_FRAMES], |
595 | 0 | int gf_index) { |
596 | 0 | GF_GROUP *const gf_group = &cpi->ppi->gf_group; |
597 | | |
598 | | // Search for the open slot to store the current frame. |
599 | 0 | int free_fb_index = get_free_ref_map_index(ref_frame_map_pairs); |
600 | | |
601 | | // Use a free slot if available. |
602 | 0 | if (free_fb_index != INVALID_IDX) { |
603 | 0 | return free_fb_index; |
604 | 0 | } else { |
605 | 0 | int enable_refresh_skip = !is_one_pass_rt_params(cpi); |
606 | 0 | int refresh_idx = |
607 | 0 | get_refresh_idx(ref_frame_map_pairs, 0, gf_group, gf_index, |
608 | 0 | enable_refresh_skip, gf_group->display_idx[gf_index]); |
609 | 0 | return refresh_idx; |
610 | 0 | } |
611 | 0 | } |
612 | | |
613 | | int av1_get_refresh_frame_flags( |
614 | | const AV1_COMP *const cpi, const EncodeFrameParams *const frame_params, |
615 | | FRAME_UPDATE_TYPE frame_update_type, int gf_index, int cur_disp_order, |
616 | 0 | RefFrameMapPair ref_frame_map_pairs[REF_FRAMES]) { |
617 | 0 | const AV1_COMMON *const cm = &cpi->common; |
618 | 0 | const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags = |
619 | 0 | &cpi->ext_flags.refresh_frame; |
620 | |
|
621 | 0 | GF_GROUP *gf_group = &cpi->ppi->gf_group; |
622 | 0 | if (gf_group->refbuf_state[gf_index] == REFBUF_RESET) |
623 | 0 | return SELECT_ALL_BUF_SLOTS; |
624 | | |
625 | | // TODO(jingning): Deprecate the following operations. |
626 | | // Switch frames and shown key-frames overwrite all reference slots |
627 | 0 | if (frame_params->frame_type == S_FRAME) return SELECT_ALL_BUF_SLOTS; |
628 | | |
629 | | // show_existing_frames don't actually send refresh_frame_flags so set the |
630 | | // flags to 0 to keep things consistent. |
631 | 0 | if (frame_params->show_existing_frame) return 0; |
632 | | |
633 | 0 | const RTC_REF *const rtc_ref = &cpi->ppi->rtc_ref; |
634 | 0 | if (is_frame_droppable(rtc_ref, ext_refresh_frame_flags)) return 0; |
635 | | |
636 | 0 | #if !CONFIG_REALTIME_ONLY |
637 | 0 | if (cpi->use_ducky_encode && |
638 | 0 | cpi->ducky_encode_info.frame_info.gop_mode == DUCKY_ENCODE_GOP_MODE_RCL) { |
639 | 0 | int new_fb_map_idx = cpi->ppi->gf_group.update_ref_idx[gf_index]; |
640 | 0 | if (new_fb_map_idx == INVALID_IDX) return 0; |
641 | 0 | return 1 << new_fb_map_idx; |
642 | 0 | } |
643 | 0 | #endif // !CONFIG_REALTIME_ONLY |
644 | | |
645 | 0 | int refresh_mask = 0; |
646 | 0 | if (ext_refresh_frame_flags->update_pending) { |
647 | 0 | if (rtc_ref->set_ref_frame_config || |
648 | 0 | use_rtc_reference_structure_one_layer(cpi)) { |
649 | 0 | for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++) { |
650 | 0 | int ref_frame_map_idx = rtc_ref->ref_idx[i]; |
651 | 0 | refresh_mask |= rtc_ref->refresh[ref_frame_map_idx] |
652 | 0 | << ref_frame_map_idx; |
653 | 0 | } |
654 | 0 | return refresh_mask; |
655 | 0 | } |
656 | | // Unfortunately the encoder interface reflects the old refresh_*_frame |
657 | | // flags so we have to replicate the old refresh_frame_flags logic here in |
658 | | // order to preserve the behaviour of the flag overrides. |
659 | 0 | int ref_frame_map_idx = get_ref_frame_map_idx(cm, LAST_FRAME); |
660 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
661 | 0 | refresh_mask |= ext_refresh_frame_flags->last_frame << ref_frame_map_idx; |
662 | |
|
663 | 0 | ref_frame_map_idx = get_ref_frame_map_idx(cm, EXTREF_FRAME); |
664 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
665 | 0 | refresh_mask |= ext_refresh_frame_flags->bwd_ref_frame |
666 | 0 | << ref_frame_map_idx; |
667 | |
|
668 | 0 | ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF2_FRAME); |
669 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
670 | 0 | refresh_mask |= ext_refresh_frame_flags->alt2_ref_frame |
671 | 0 | << ref_frame_map_idx; |
672 | |
|
673 | 0 | if (frame_update_type == OVERLAY_UPDATE) { |
674 | 0 | ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME); |
675 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
676 | 0 | refresh_mask |= ext_refresh_frame_flags->golden_frame |
677 | 0 | << ref_frame_map_idx; |
678 | 0 | } else { |
679 | 0 | ref_frame_map_idx = get_ref_frame_map_idx(cm, GOLDEN_FRAME); |
680 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
681 | 0 | refresh_mask |= ext_refresh_frame_flags->golden_frame |
682 | 0 | << ref_frame_map_idx; |
683 | |
|
684 | 0 | ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME); |
685 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
686 | 0 | refresh_mask |= ext_refresh_frame_flags->alt_ref_frame |
687 | 0 | << ref_frame_map_idx; |
688 | 0 | } |
689 | 0 | return refresh_mask; |
690 | 0 | } |
691 | | |
692 | | // Search for the open slot to store the current frame. |
693 | 0 | int free_fb_index = get_free_ref_map_index(ref_frame_map_pairs); |
694 | | |
695 | | // No refresh necessary for these frame types. |
696 | 0 | if (frame_update_type == OVERLAY_UPDATE || |
697 | 0 | frame_update_type == INTNL_OVERLAY_UPDATE) |
698 | 0 | return refresh_mask; |
699 | | |
700 | | // If there is an open slot, refresh that one instead of replacing a |
701 | | // reference. |
702 | 0 | if (free_fb_index != INVALID_IDX) { |
703 | 0 | refresh_mask = 1 << free_fb_index; |
704 | 0 | return refresh_mask; |
705 | 0 | } |
706 | 0 | const int enable_refresh_skip = !is_one_pass_rt_params(cpi); |
707 | 0 | const int update_arf = frame_update_type == ARF_UPDATE; |
708 | 0 | const int refresh_idx = |
709 | 0 | get_refresh_idx(ref_frame_map_pairs, update_arf, &cpi->ppi->gf_group, |
710 | 0 | gf_index, enable_refresh_skip, cur_disp_order); |
711 | 0 | return 1 << refresh_idx; |
712 | 0 | } |
713 | | |
714 | | #if !CONFIG_REALTIME_ONLY |
715 | | // Apply temporal filtering to source frames and encode the filtered frame. |
716 | | // If the current frame does not require filtering, this function is identical |
717 | | // to av1_encode() except that tpl is not performed. |
718 | | static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest, |
719 | | EncodeFrameInput *const frame_input, |
720 | | const EncodeFrameParams *const frame_params, |
721 | 0 | EncodeFrameResults *const frame_results) { |
722 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
723 | | if (cpi->oxcf.pass == 2) start_timing(cpi, denoise_and_encode_time); |
724 | | #endif |
725 | 0 | const AV1EncoderConfig *const oxcf = &cpi->oxcf; |
726 | 0 | AV1_COMMON *const cm = &cpi->common; |
727 | |
|
728 | 0 | GF_GROUP *const gf_group = &cpi->ppi->gf_group; |
729 | 0 | FRAME_UPDATE_TYPE update_type = |
730 | 0 | get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index); |
731 | 0 | const int is_second_arf = |
732 | 0 | av1_gop_is_second_arf(gf_group, cpi->gf_frame_index); |
733 | | |
734 | | // Decide whether to apply temporal filtering to the source frame. |
735 | 0 | int apply_filtering = |
736 | 0 | av1_is_temporal_filter_on(oxcf) && !is_stat_generation_stage(cpi); |
737 | 0 | if (update_type != KF_UPDATE && update_type != ARF_UPDATE && !is_second_arf) { |
738 | 0 | apply_filtering = 0; |
739 | 0 | } |
740 | 0 | if (apply_filtering) { |
741 | 0 | if (frame_params->frame_type == KEY_FRAME) { |
742 | | // TODO(angiebird): Move the noise level check to av1_tf_info_filtering. |
743 | | // Decide whether it is allowed to perform key frame filtering |
744 | 0 | int allow_kf_filtering = oxcf->kf_cfg.enable_keyframe_filtering && |
745 | 0 | !frame_params->show_existing_frame && |
746 | 0 | !is_lossless_requested(&oxcf->rc_cfg); |
747 | 0 | if (allow_kf_filtering) { |
748 | 0 | double y_noise_level = 0.0; |
749 | 0 | av1_estimate_noise_level( |
750 | 0 | frame_input->source, &y_noise_level, AOM_PLANE_Y, AOM_PLANE_Y, |
751 | 0 | cm->seq_params->bit_depth, NOISE_ESTIMATION_EDGE_THRESHOLD); |
752 | 0 | apply_filtering = y_noise_level > 0; |
753 | 0 | } else { |
754 | 0 | apply_filtering = 0; |
755 | 0 | } |
756 | | // If we are doing kf filtering, set up a few things. |
757 | 0 | if (apply_filtering) { |
758 | 0 | av1_setup_past_independence(cm); |
759 | 0 | } |
760 | 0 | } else if (is_second_arf) { |
761 | 0 | apply_filtering = cpi->sf.hl_sf.second_alt_ref_filtering; |
762 | 0 | } |
763 | 0 | } |
764 | |
|
765 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
766 | | if (cpi->oxcf.pass == 2) start_timing(cpi, apply_filtering_time); |
767 | | #endif |
768 | | // Save the pointer to the original source image. |
769 | 0 | YV12_BUFFER_CONFIG *source_buffer = frame_input->source; |
770 | | // apply filtering to frame |
771 | 0 | if (apply_filtering) { |
772 | 0 | int show_existing_alt_ref = 0; |
773 | 0 | FRAME_DIFF frame_diff; |
774 | 0 | int top_index = 0; |
775 | 0 | int bottom_index = 0; |
776 | 0 | const int q_index = av1_rc_pick_q_and_bounds( |
777 | 0 | cpi, cpi->oxcf.frm_dim_cfg.width, cpi->oxcf.frm_dim_cfg.height, |
778 | 0 | cpi->gf_frame_index, &bottom_index, &top_index); |
779 | | |
780 | | // TODO(bohanli): figure out why we need frame_type in cm here. |
781 | 0 | cm->current_frame.frame_type = frame_params->frame_type; |
782 | 0 | if (update_type == KF_UPDATE || update_type == ARF_UPDATE) { |
783 | 0 | YV12_BUFFER_CONFIG *tf_buf = av1_tf_info_get_filtered_buf( |
784 | 0 | &cpi->ppi->tf_info, cpi->gf_frame_index, &frame_diff); |
785 | 0 | if (tf_buf != NULL) { |
786 | 0 | frame_input->source = tf_buf; |
787 | 0 | show_existing_alt_ref = av1_check_show_filtered_frame( |
788 | 0 | tf_buf, &frame_diff, q_index, cm->seq_params->bit_depth); |
789 | 0 | if (show_existing_alt_ref) { |
790 | 0 | cpi->common.showable_frame |= 1; |
791 | 0 | } else { |
792 | 0 | cpi->common.showable_frame = 0; |
793 | 0 | } |
794 | 0 | } |
795 | 0 | if (gf_group->frame_type[cpi->gf_frame_index] != KEY_FRAME) { |
796 | 0 | cpi->ppi->show_existing_alt_ref = show_existing_alt_ref; |
797 | 0 | } |
798 | 0 | } |
799 | |
|
800 | 0 | if (is_second_arf) { |
801 | | // Allocate the memory for tf_buf_second_arf buffer, only when it is |
802 | | // required. |
803 | 0 | int ret = aom_realloc_frame_buffer( |
804 | 0 | &cpi->ppi->tf_info.tf_buf_second_arf, oxcf->frm_dim_cfg.width, |
805 | 0 | oxcf->frm_dim_cfg.height, cm->seq_params->subsampling_x, |
806 | 0 | cm->seq_params->subsampling_y, cm->seq_params->use_highbitdepth, |
807 | 0 | cpi->oxcf.border_in_pixels, cm->features.byte_alignment, NULL, NULL, |
808 | 0 | NULL, cpi->alloc_pyramid, 0); |
809 | 0 | if (ret) |
810 | 0 | aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR, |
811 | 0 | "Failed to allocate tf_buf_second_arf"); |
812 | |
|
813 | 0 | YV12_BUFFER_CONFIG *tf_buf_second_arf = |
814 | 0 | &cpi->ppi->tf_info.tf_buf_second_arf; |
815 | | // We didn't apply temporal filtering for second arf ahead in |
816 | | // av1_tf_info_filtering(). |
817 | 0 | const int arf_src_index = gf_group->arf_src_offset[cpi->gf_frame_index]; |
818 | | // Right now, we are still using tf_buf_second_arf due to |
819 | | // implementation complexity. |
820 | | // TODO(angiebird): Reuse tf_info->tf_buf here. |
821 | 0 | av1_temporal_filter(cpi, arf_src_index, cpi->gf_frame_index, &frame_diff, |
822 | 0 | tf_buf_second_arf); |
823 | 0 | show_existing_alt_ref = av1_check_show_filtered_frame( |
824 | 0 | tf_buf_second_arf, &frame_diff, q_index, cm->seq_params->bit_depth); |
825 | 0 | if (show_existing_alt_ref) { |
826 | 0 | aom_extend_frame_borders(tf_buf_second_arf, av1_num_planes(cm)); |
827 | 0 | frame_input->source = tf_buf_second_arf; |
828 | 0 | } |
829 | | // Currently INTNL_ARF_UPDATE only do show_existing. |
830 | 0 | cpi->common.showable_frame |= 1; |
831 | 0 | } |
832 | | |
833 | | // Copy source metadata to the temporal filtered frame |
834 | 0 | if (source_buffer->metadata && |
835 | 0 | aom_copy_metadata_to_frame_buffer(frame_input->source, |
836 | 0 | source_buffer->metadata)) { |
837 | 0 | aom_internal_error( |
838 | 0 | cm->error, AOM_CODEC_MEM_ERROR, |
839 | 0 | "Failed to copy source metadata to the temporal filtered frame"); |
840 | 0 | } |
841 | 0 | } |
842 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
843 | | if (cpi->oxcf.pass == 2) end_timing(cpi, apply_filtering_time); |
844 | | #endif |
845 | |
|
846 | 0 | int set_mv_params = frame_params->frame_type == KEY_FRAME || |
847 | 0 | update_type == ARF_UPDATE || update_type == GF_UPDATE; |
848 | 0 | cm->show_frame = frame_params->show_frame; |
849 | 0 | cm->current_frame.frame_type = frame_params->frame_type; |
850 | | // TODO(bohanli): Why is this? what part of it is necessary? |
851 | 0 | av1_set_frame_size(cpi, cm->width, cm->height); |
852 | 0 | if (set_mv_params) av1_set_mv_search_params(cpi); |
853 | |
|
854 | | #if CONFIG_RD_COMMAND |
855 | | if (frame_params->frame_type == KEY_FRAME) { |
856 | | char filepath[] = "rd_command.txt"; |
857 | | av1_read_rd_command(filepath, &cpi->rd_command); |
858 | | } |
859 | | #endif // CONFIG_RD_COMMAND |
860 | 0 | if (cpi->gf_frame_index == 0 && !is_stat_generation_stage(cpi)) { |
861 | | // perform tpl after filtering |
862 | 0 | int allow_tpl = |
863 | 0 | oxcf->gf_cfg.lag_in_frames > 1 && oxcf->algo_cfg.enable_tpl_model; |
864 | 0 | if (gf_group->size > MAX_LENGTH_TPL_FRAME_STATS) { |
865 | 0 | allow_tpl = 0; |
866 | 0 | } |
867 | 0 | if (frame_params->frame_type != KEY_FRAME) { |
868 | | // In rare case, it's possible to have non ARF/GF update_type here. |
869 | | // We should set allow_tpl to zero in the situation |
870 | 0 | allow_tpl = |
871 | 0 | allow_tpl && (update_type == ARF_UPDATE || update_type == GF_UPDATE || |
872 | 0 | (cpi->use_ducky_encode && |
873 | 0 | cpi->ducky_encode_info.frame_info.gop_mode == |
874 | 0 | DUCKY_ENCODE_GOP_MODE_RCL)); |
875 | 0 | } |
876 | |
|
877 | 0 | if (allow_tpl) { |
878 | 0 | if (!cpi->skip_tpl_setup_stats) { |
879 | 0 | av1_tpl_preload_rc_estimate(cpi, frame_params); |
880 | 0 | av1_tpl_setup_stats(cpi, 0, frame_params); |
881 | | #if CONFIG_BITRATE_ACCURACY && !CONFIG_THREE_PASS |
882 | | assert(cpi->gf_frame_index == 0); |
883 | | av1_vbr_rc_update_q_index_list(&cpi->vbr_rc_info, &cpi->ppi->tpl_data, |
884 | | gf_group, cm->seq_params->bit_depth); |
885 | | #endif |
886 | 0 | } |
887 | 0 | } else { |
888 | 0 | av1_init_tpl_stats(&cpi->ppi->tpl_data); |
889 | 0 | } |
890 | | #if CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS |
891 | | if (cpi->oxcf.pass == AOM_RC_SECOND_PASS && |
892 | | cpi->second_pass_log_stream != NULL) { |
893 | | TPL_INFO *tpl_info; |
894 | | AOM_CHECK_MEM_ERROR(cm->error, tpl_info, aom_malloc(sizeof(*tpl_info))); |
895 | | av1_pack_tpl_info(tpl_info, gf_group, &cpi->ppi->tpl_data); |
896 | | av1_write_tpl_info(tpl_info, cpi->second_pass_log_stream, |
897 | | cpi->common.error); |
898 | | aom_free(tpl_info); |
899 | | } |
900 | | #endif // CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS |
901 | 0 | } |
902 | |
|
903 | 0 | if (av1_encode(cpi, dest, frame_input, frame_params, frame_results) != |
904 | 0 | AOM_CODEC_OK) { |
905 | 0 | return AOM_CODEC_ERROR; |
906 | 0 | } |
907 | | |
908 | | // Set frame_input source to true source for psnr calculation. |
909 | 0 | if (apply_filtering && is_psnr_calc_enabled(cpi)) { |
910 | 0 | cpi->source = av1_realloc_and_scale_if_required( |
911 | 0 | cm, source_buffer, &cpi->scaled_source, cm->features.interp_filter, 0, |
912 | 0 | false, true, cpi->oxcf.border_in_pixels, cpi->alloc_pyramid); |
913 | 0 | cpi->unscaled_source = source_buffer; |
914 | 0 | } |
915 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
916 | | if (cpi->oxcf.pass == 2) end_timing(cpi, denoise_and_encode_time); |
917 | | #endif |
918 | 0 | return AOM_CODEC_OK; |
919 | 0 | } |
920 | | #endif // !CONFIG_REALTIME_ONLY |
921 | | |
922 | | /*!\cond */ |
923 | | // Struct to keep track of relevant reference frame data. |
924 | | typedef struct { |
925 | | int map_idx; |
926 | | int disp_order; |
927 | | int pyr_level; |
928 | | int used; |
929 | | } RefBufMapData; |
930 | | /*!\endcond */ |
931 | | |
932 | | // Comparison function to sort reference frames in ascending display order. |
933 | 0 | static int compare_map_idx_pair_asc(const void *a, const void *b) { |
934 | 0 | if (((RefBufMapData *)a)->disp_order == ((RefBufMapData *)b)->disp_order) { |
935 | 0 | return 0; |
936 | 0 | } else if (((const RefBufMapData *)a)->disp_order > |
937 | 0 | ((const RefBufMapData *)b)->disp_order) { |
938 | 0 | return 1; |
939 | 0 | } else { |
940 | 0 | return -1; |
941 | 0 | } |
942 | 0 | } |
943 | | |
944 | | // Checks to see if a particular reference frame is already in the reference |
945 | | // frame map. |
946 | 0 | static int is_in_ref_map(RefBufMapData *map, int disp_order, int n_frames) { |
947 | 0 | for (int i = 0; i < n_frames; i++) { |
948 | 0 | if (disp_order == map[i].disp_order) return 1; |
949 | 0 | } |
950 | 0 | return 0; |
951 | 0 | } |
952 | | |
953 | | // Add a reference buffer index to a named reference slot. |
954 | | static void add_ref_to_slot(RefBufMapData *ref, int *const remapped_ref_idx, |
955 | 0 | int frame) { |
956 | 0 | remapped_ref_idx[frame - LAST_FRAME] = ref->map_idx; |
957 | 0 | ref->used = 1; |
958 | 0 | } |
959 | | |
960 | | // Threshold dictating when we are allowed to start considering |
961 | | // leaving lowest level frames unmapped. |
962 | 0 | #define LOW_LEVEL_FRAMES_TR 5 |
963 | | |
964 | | // Find which reference buffer should be left out of the named mapping. |
965 | | // This is because there are 8 reference buffers and only 7 named slots. |
966 | | static void set_unmapped_ref(RefBufMapData *buffer_map, int n_bufs, |
967 | | int n_min_level_refs, int min_level, |
968 | 0 | int cur_frame_disp) { |
969 | 0 | int max_dist = 0; |
970 | 0 | int unmapped_idx = -1; |
971 | 0 | if (n_bufs <= ALTREF_FRAME) return; |
972 | 0 | for (int i = 0; i < n_bufs; i++) { |
973 | 0 | if (buffer_map[i].used) continue; |
974 | 0 | if (buffer_map[i].pyr_level != min_level || |
975 | 0 | n_min_level_refs >= LOW_LEVEL_FRAMES_TR) { |
976 | 0 | int dist = abs(cur_frame_disp - buffer_map[i].disp_order); |
977 | 0 | if (dist > max_dist) { |
978 | 0 | max_dist = dist; |
979 | 0 | unmapped_idx = i; |
980 | 0 | } |
981 | 0 | } |
982 | 0 | } |
983 | 0 | assert(unmapped_idx >= 0 && "Unmapped reference not found"); |
984 | 0 | buffer_map[unmapped_idx].used = 1; |
985 | 0 | } |
986 | | |
987 | | void av1_get_ref_frames(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES], |
988 | | int cur_frame_disp, const AV1_COMP *cpi, int gf_index, |
989 | | int is_parallel_encode, |
990 | 0 | int remapped_ref_idx[REF_FRAMES]) { |
991 | 0 | int buf_map_idx = 0; |
992 | | |
993 | | // Initialize reference frame mappings. |
994 | 0 | for (int i = 0; i < REF_FRAMES; ++i) remapped_ref_idx[i] = INVALID_IDX; |
995 | |
|
996 | 0 | #if !CONFIG_REALTIME_ONLY |
997 | 0 | if (cpi->use_ducky_encode && |
998 | 0 | cpi->ducky_encode_info.frame_info.gop_mode == DUCKY_ENCODE_GOP_MODE_RCL) { |
999 | 0 | for (int rf = LAST_FRAME; rf < REF_FRAMES; ++rf) { |
1000 | 0 | if (cpi->ppi->gf_group.ref_frame_list[gf_index][rf] != INVALID_IDX) { |
1001 | 0 | remapped_ref_idx[rf - LAST_FRAME] = |
1002 | 0 | cpi->ppi->gf_group.ref_frame_list[gf_index][rf]; |
1003 | 0 | } |
1004 | 0 | } |
1005 | |
|
1006 | 0 | int valid_rf_idx = 0; |
1007 | 0 | static const int ref_frame_type_order[REF_FRAMES - LAST_FRAME] = { |
1008 | 0 | GOLDEN_FRAME, ALTREF_FRAME, LAST_FRAME, BWDREF_FRAME, |
1009 | 0 | ALTREF2_FRAME, LAST2_FRAME, LAST3_FRAME |
1010 | 0 | }; |
1011 | 0 | for (int i = 0; i < REF_FRAMES - LAST_FRAME; i++) { |
1012 | 0 | int rf = ref_frame_type_order[i]; |
1013 | 0 | if (remapped_ref_idx[rf - LAST_FRAME] != INVALID_IDX) { |
1014 | 0 | valid_rf_idx = remapped_ref_idx[rf - LAST_FRAME]; |
1015 | 0 | break; |
1016 | 0 | } |
1017 | 0 | } |
1018 | |
|
1019 | 0 | for (int i = 0; i < REF_FRAMES; ++i) { |
1020 | 0 | if (remapped_ref_idx[i] == INVALID_IDX) { |
1021 | 0 | remapped_ref_idx[i] = valid_rf_idx; |
1022 | 0 | } |
1023 | 0 | } |
1024 | |
|
1025 | 0 | return; |
1026 | 0 | } |
1027 | 0 | #endif // !CONFIG_REALTIME_ONLY |
1028 | | |
1029 | 0 | RefBufMapData buffer_map[REF_FRAMES]; |
1030 | 0 | int n_bufs = 0; |
1031 | 0 | memset(buffer_map, 0, REF_FRAMES * sizeof(buffer_map[0])); |
1032 | 0 | int min_level = MAX_ARF_LAYERS; |
1033 | 0 | int max_level = 0; |
1034 | 0 | GF_GROUP *gf_group = &cpi->ppi->gf_group; |
1035 | 0 | int skip_ref_unmapping = 0; |
1036 | 0 | int is_one_pass_rt = is_one_pass_rt_params(cpi); |
1037 | | |
1038 | | // Go through current reference buffers and store display order, pyr level, |
1039 | | // and map index. |
1040 | 0 | for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) { |
1041 | | // Get reference frame buffer. |
1042 | 0 | RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx]; |
1043 | 0 | if (ref_pair.disp_order == -1) continue; |
1044 | 0 | const int frame_order = ref_pair.disp_order; |
1045 | | // Avoid duplicates. |
1046 | 0 | if (is_in_ref_map(buffer_map, frame_order, n_bufs)) continue; |
1047 | 0 | const int reference_frame_level = ref_pair.pyr_level; |
1048 | | |
1049 | | // Keep track of the lowest and highest levels that currently exist. |
1050 | 0 | if (reference_frame_level < min_level) min_level = reference_frame_level; |
1051 | 0 | if (reference_frame_level > max_level) max_level = reference_frame_level; |
1052 | |
|
1053 | 0 | buffer_map[n_bufs].map_idx = map_idx; |
1054 | 0 | buffer_map[n_bufs].disp_order = frame_order; |
1055 | 0 | buffer_map[n_bufs].pyr_level = reference_frame_level; |
1056 | 0 | buffer_map[n_bufs].used = 0; |
1057 | 0 | n_bufs++; |
1058 | 0 | } |
1059 | | |
1060 | | // Sort frames in ascending display order. |
1061 | 0 | qsort(buffer_map, n_bufs, sizeof(buffer_map[0]), compare_map_idx_pair_asc); |
1062 | |
|
1063 | 0 | int n_min_level_refs = 0; |
1064 | 0 | int closest_past_ref = -1; |
1065 | 0 | int golden_idx = -1; |
1066 | 0 | int altref_idx = -1; |
1067 | | |
1068 | | // Find the GOLDEN_FRAME and BWDREF_FRAME. |
1069 | | // Also collect various stats about the reference frames for the remaining |
1070 | | // mappings. |
1071 | 0 | for (int i = n_bufs - 1; i >= 0; i--) { |
1072 | 0 | if (buffer_map[i].pyr_level == min_level) { |
1073 | | // Keep track of the number of lowest level frames. |
1074 | 0 | n_min_level_refs++; |
1075 | 0 | if (buffer_map[i].disp_order < cur_frame_disp && golden_idx == -1 && |
1076 | 0 | remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] == INVALID_IDX) { |
1077 | | // Save index for GOLDEN. |
1078 | 0 | golden_idx = i; |
1079 | 0 | } else if (buffer_map[i].disp_order > cur_frame_disp && |
1080 | 0 | altref_idx == -1 && |
1081 | 0 | remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] == INVALID_IDX) { |
1082 | | // Save index for ALTREF. |
1083 | 0 | altref_idx = i; |
1084 | 0 | } |
1085 | 0 | } else if (buffer_map[i].disp_order == cur_frame_disp) { |
1086 | | // Map the BWDREF_FRAME if this is the show_existing_frame. |
1087 | 0 | add_ref_to_slot(&buffer_map[i], remapped_ref_idx, BWDREF_FRAME); |
1088 | 0 | } |
1089 | | |
1090 | | // During parallel encodes of lower layer frames, exclude the first frame |
1091 | | // (frame_parallel_level 1) from being used for the reference assignment of |
1092 | | // the second frame (frame_parallel_level 2). |
1093 | 0 | if (!is_one_pass_rt && gf_group->frame_parallel_level[gf_index] == 2 && |
1094 | 0 | gf_group->frame_parallel_level[gf_index - 1] == 1 && |
1095 | 0 | gf_group->update_type[gf_index - 1] == INTNL_ARF_UPDATE) { |
1096 | 0 | assert(gf_group->update_type[gf_index] == INTNL_ARF_UPDATE); |
1097 | | #if CONFIG_FPMT_TEST |
1098 | | is_parallel_encode = (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_ENCODE) |
1099 | | ? is_parallel_encode |
1100 | | : 0; |
1101 | | #endif // CONFIG_FPMT_TEST |
1102 | | // If parallel cpis are active, use ref_idx_to_skip, else, use display |
1103 | | // index. |
1104 | 0 | assert(IMPLIES(is_parallel_encode, cpi->ref_idx_to_skip != INVALID_IDX)); |
1105 | 0 | assert(IMPLIES(!is_parallel_encode, |
1106 | 0 | gf_group->skip_frame_as_ref[gf_index] != INVALID_IDX)); |
1107 | 0 | buffer_map[i].used = is_parallel_encode |
1108 | 0 | ? (buffer_map[i].map_idx == cpi->ref_idx_to_skip) |
1109 | 0 | : (buffer_map[i].disp_order == |
1110 | 0 | gf_group->skip_frame_as_ref[gf_index]); |
1111 | | // In case a ref frame is excluded from being used during assignment, |
1112 | | // skip the call to set_unmapped_ref(). Applicable in steady state. |
1113 | 0 | if (buffer_map[i].used) skip_ref_unmapping = 1; |
1114 | 0 | } |
1115 | | |
1116 | | // Keep track of where the frames change from being past frames to future |
1117 | | // frames. |
1118 | 0 | if (buffer_map[i].disp_order < cur_frame_disp && closest_past_ref < 0) |
1119 | 0 | closest_past_ref = i; |
1120 | 0 | } |
1121 | | |
1122 | | // Do not map GOLDEN and ALTREF based on their pyramid level if all reference |
1123 | | // frames have the same level. |
1124 | 0 | if (n_min_level_refs <= n_bufs) { |
1125 | | // Map the GOLDEN_FRAME. |
1126 | 0 | if (golden_idx > -1) |
1127 | 0 | add_ref_to_slot(&buffer_map[golden_idx], remapped_ref_idx, GOLDEN_FRAME); |
1128 | | // Map the ALTREF_FRAME. |
1129 | 0 | if (altref_idx > -1) |
1130 | 0 | add_ref_to_slot(&buffer_map[altref_idx], remapped_ref_idx, ALTREF_FRAME); |
1131 | 0 | } |
1132 | | |
1133 | | // Find the buffer to be excluded from the mapping. |
1134 | 0 | if (!skip_ref_unmapping) |
1135 | 0 | set_unmapped_ref(buffer_map, n_bufs, n_min_level_refs, min_level, |
1136 | 0 | cur_frame_disp); |
1137 | | |
1138 | | // Place past frames in LAST_FRAME, LAST2_FRAME, and LAST3_FRAME. |
1139 | 0 | for (int frame = LAST_FRAME; frame < GOLDEN_FRAME; frame++) { |
1140 | | // Continue if the current ref slot is already full. |
1141 | 0 | if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue; |
1142 | | // Find the next unmapped reference buffer |
1143 | | // in decreasing ouptut order relative to current picture. |
1144 | 0 | int next_buf_max = 0; |
1145 | 0 | int next_disp_order = INT_MIN; |
1146 | 0 | for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) { |
1147 | 0 | if (!buffer_map[buf_map_idx].used && |
1148 | 0 | buffer_map[buf_map_idx].disp_order < cur_frame_disp && |
1149 | 0 | buffer_map[buf_map_idx].disp_order > next_disp_order) { |
1150 | 0 | next_disp_order = buffer_map[buf_map_idx].disp_order; |
1151 | 0 | next_buf_max = buf_map_idx; |
1152 | 0 | } |
1153 | 0 | } |
1154 | 0 | buf_map_idx = next_buf_max; |
1155 | 0 | if (buf_map_idx < 0) break; |
1156 | 0 | if (buffer_map[buf_map_idx].used) break; |
1157 | 0 | add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame); |
1158 | 0 | } |
1159 | | |
1160 | | // Place future frames (if there are any) in BWDREF_FRAME and ALTREF2_FRAME. |
1161 | 0 | for (int frame = BWDREF_FRAME; frame < REF_FRAMES; frame++) { |
1162 | | // Continue if the current ref slot is already full. |
1163 | 0 | if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue; |
1164 | | // Find the next unmapped reference buffer |
1165 | | // in increasing ouptut order relative to current picture. |
1166 | 0 | int next_buf_max = 0; |
1167 | 0 | int next_disp_order = INT_MAX; |
1168 | 0 | for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) { |
1169 | 0 | if (!buffer_map[buf_map_idx].used && |
1170 | 0 | buffer_map[buf_map_idx].disp_order > cur_frame_disp && |
1171 | 0 | buffer_map[buf_map_idx].disp_order < next_disp_order) { |
1172 | 0 | next_disp_order = buffer_map[buf_map_idx].disp_order; |
1173 | 0 | next_buf_max = buf_map_idx; |
1174 | 0 | } |
1175 | 0 | } |
1176 | 0 | buf_map_idx = next_buf_max; |
1177 | 0 | if (buf_map_idx < 0) break; |
1178 | 0 | if (buffer_map[buf_map_idx].used) break; |
1179 | 0 | add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame); |
1180 | 0 | } |
1181 | | |
1182 | | // Place remaining past frames. |
1183 | 0 | buf_map_idx = closest_past_ref; |
1184 | 0 | for (int frame = LAST_FRAME; frame < REF_FRAMES; frame++) { |
1185 | | // Continue if the current ref slot is already full. |
1186 | 0 | if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue; |
1187 | | // Find the next unmapped reference buffer. |
1188 | 0 | for (; buf_map_idx >= 0; buf_map_idx--) { |
1189 | 0 | if (!buffer_map[buf_map_idx].used) break; |
1190 | 0 | } |
1191 | 0 | if (buf_map_idx < 0) break; |
1192 | 0 | if (buffer_map[buf_map_idx].used) break; |
1193 | 0 | add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame); |
1194 | 0 | } |
1195 | | |
1196 | | // Place remaining future frames. |
1197 | 0 | buf_map_idx = n_bufs - 1; |
1198 | 0 | for (int frame = ALTREF_FRAME; frame >= LAST_FRAME; frame--) { |
1199 | | // Continue if the current ref slot is already full. |
1200 | 0 | if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue; |
1201 | | // Find the next unmapped reference buffer. |
1202 | 0 | for (; buf_map_idx > closest_past_ref; buf_map_idx--) { |
1203 | 0 | if (!buffer_map[buf_map_idx].used) break; |
1204 | 0 | } |
1205 | 0 | if (buf_map_idx < 0) break; |
1206 | 0 | if (buffer_map[buf_map_idx].used) break; |
1207 | 0 | add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame); |
1208 | 0 | } |
1209 | | |
1210 | | // Fill any slots that are empty (should only happen for the first 7 frames). |
1211 | 0 | for (int i = 0; i < REF_FRAMES; ++i) |
1212 | 0 | if (remapped_ref_idx[i] == INVALID_IDX) remapped_ref_idx[i] = 0; |
1213 | 0 | } |
1214 | | |
1215 | | int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size, |
1216 | | uint8_t *const dest, unsigned int *frame_flags, |
1217 | | int64_t *const time_stamp, int64_t *const time_end, |
1218 | | const aom_rational64_t *const timestamp_ratio, |
1219 | 0 | int *const pop_lookahead, int flush) { |
1220 | 0 | AV1EncoderConfig *const oxcf = &cpi->oxcf; |
1221 | 0 | AV1_COMMON *const cm = &cpi->common; |
1222 | 0 | GF_GROUP *gf_group = &cpi->ppi->gf_group; |
1223 | 0 | ExternalFlags *const ext_flags = &cpi->ext_flags; |
1224 | 0 | GFConfig *const gf_cfg = &oxcf->gf_cfg; |
1225 | |
|
1226 | 0 | EncodeFrameInput frame_input; |
1227 | 0 | EncodeFrameParams frame_params; |
1228 | 0 | EncodeFrameResults frame_results; |
1229 | 0 | memset(&frame_input, 0, sizeof(frame_input)); |
1230 | 0 | memset(&frame_params, 0, sizeof(frame_params)); |
1231 | 0 | memset(&frame_results, 0, sizeof(frame_results)); |
1232 | |
|
1233 | | #if CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS |
1234 | | VBR_RATECTRL_INFO *vbr_rc_info = &cpi->vbr_rc_info; |
1235 | | if (oxcf->pass == AOM_RC_THIRD_PASS && vbr_rc_info->ready == 0) { |
1236 | | THIRD_PASS_FRAME_INFO frame_info[MAX_THIRD_PASS_BUF]; |
1237 | | av1_open_second_pass_log(cpi, 1); |
1238 | | FILE *second_pass_log_stream = cpi->second_pass_log_stream; |
1239 | | fseek(second_pass_log_stream, 0, SEEK_END); |
1240 | | size_t file_size = ftell(second_pass_log_stream); |
1241 | | rewind(second_pass_log_stream); |
1242 | | size_t read_size = 0; |
1243 | | while (read_size < file_size) { |
1244 | | THIRD_PASS_GOP_INFO gop_info; |
1245 | | struct aom_internal_error_info *error = cpi->common.error; |
1246 | | // Read in GOP information from the second pass file. |
1247 | | av1_read_second_pass_gop_info(second_pass_log_stream, &gop_info, error); |
1248 | | TPL_INFO *tpl_info; |
1249 | | AOM_CHECK_MEM_ERROR(cm->error, tpl_info, aom_malloc(sizeof(*tpl_info))); |
1250 | | av1_read_tpl_info(tpl_info, second_pass_log_stream, error); |
1251 | | // Read in per-frame info from second-pass encoding |
1252 | | av1_read_second_pass_per_frame_info(second_pass_log_stream, frame_info, |
1253 | | gop_info.num_frames, error); |
1254 | | av1_vbr_rc_append_tpl_info(vbr_rc_info, tpl_info); |
1255 | | read_size = ftell(second_pass_log_stream); |
1256 | | aom_free(tpl_info); |
1257 | | } |
1258 | | av1_close_second_pass_log(cpi); |
1259 | | if (cpi->oxcf.rc_cfg.mode == AOM_Q) { |
1260 | | vbr_rc_info->base_q_index = cpi->oxcf.rc_cfg.cq_level; |
1261 | | av1_vbr_rc_compute_q_indices( |
1262 | | vbr_rc_info->base_q_index, vbr_rc_info->total_frame_count, |
1263 | | vbr_rc_info->qstep_ratio_list, cm->seq_params->bit_depth, |
1264 | | vbr_rc_info->q_index_list); |
1265 | | } else { |
1266 | | vbr_rc_info->base_q_index = av1_vbr_rc_info_estimate_base_q( |
1267 | | vbr_rc_info->total_bit_budget, cm->seq_params->bit_depth, |
1268 | | vbr_rc_info->scale_factors, vbr_rc_info->total_frame_count, |
1269 | | vbr_rc_info->update_type_list, vbr_rc_info->qstep_ratio_list, |
1270 | | vbr_rc_info->txfm_stats_list, vbr_rc_info->q_index_list, NULL); |
1271 | | } |
1272 | | vbr_rc_info->ready = 1; |
1273 | | #if CONFIG_RATECTRL_LOG |
1274 | | rc_log_record_chunk_info(&cpi->rc_log, vbr_rc_info->base_q_index, |
1275 | | vbr_rc_info->total_frame_count); |
1276 | | #endif // CONFIG_RATECTRL_LOG |
1277 | | } |
1278 | | #endif // CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS |
1279 | | |
1280 | | // Check if we need to stuff more src frames |
1281 | 0 | if (flush == 0) { |
1282 | 0 | int srcbuf_size = |
1283 | 0 | av1_lookahead_depth(cpi->ppi->lookahead, cpi->compressor_stage); |
1284 | 0 | int pop_size = |
1285 | 0 | av1_lookahead_pop_sz(cpi->ppi->lookahead, cpi->compressor_stage); |
1286 | | |
1287 | | // Continue buffering look ahead buffer. |
1288 | 0 | if (srcbuf_size < pop_size) return -1; |
1289 | 0 | } |
1290 | | |
1291 | 0 | if (!av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage)) { |
1292 | 0 | #if !CONFIG_REALTIME_ONLY |
1293 | 0 | if (flush && oxcf->pass == AOM_RC_FIRST_PASS && |
1294 | 0 | !cpi->ppi->twopass.first_pass_done) { |
1295 | 0 | av1_end_first_pass(cpi); /* get last stats packet */ |
1296 | 0 | cpi->ppi->twopass.first_pass_done = 1; |
1297 | 0 | } |
1298 | 0 | #endif |
1299 | 0 | return -1; |
1300 | 0 | } |
1301 | | |
1302 | | // TODO(sarahparker) finish bit allocation for one pass pyramid |
1303 | 0 | if (has_no_stats_stage(cpi)) { |
1304 | 0 | gf_cfg->gf_max_pyr_height = |
1305 | 0 | AOMMIN(gf_cfg->gf_max_pyr_height, USE_ALTREF_FOR_ONE_PASS); |
1306 | 0 | gf_cfg->gf_min_pyr_height = |
1307 | 0 | AOMMIN(gf_cfg->gf_min_pyr_height, gf_cfg->gf_max_pyr_height); |
1308 | 0 | } |
1309 | | |
1310 | | // Allocation of mi buffers. |
1311 | 0 | alloc_mb_mode_info_buffers(cpi); |
1312 | |
|
1313 | 0 | cpi->skip_tpl_setup_stats = 0; |
1314 | 0 | #if !CONFIG_REALTIME_ONLY |
1315 | 0 | if (oxcf->pass != AOM_RC_FIRST_PASS) { |
1316 | 0 | TplParams *const tpl_data = &cpi->ppi->tpl_data; |
1317 | 0 | if (tpl_data->tpl_stats_pool[0] == NULL) { |
1318 | 0 | av1_setup_tpl_buffers(cpi->ppi, &cm->mi_params, oxcf->frm_dim_cfg.width, |
1319 | 0 | oxcf->frm_dim_cfg.height, 0, |
1320 | 0 | oxcf->gf_cfg.lag_in_frames); |
1321 | 0 | } |
1322 | 0 | } |
1323 | 0 | cpi->twopass_frame.this_frame = NULL; |
1324 | 0 | const int use_one_pass_rt_params = is_one_pass_rt_params(cpi); |
1325 | 0 | if (!use_one_pass_rt_params && !is_stat_generation_stage(cpi)) { |
1326 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1327 | | start_timing(cpi, av1_get_second_pass_params_time); |
1328 | | #endif |
1329 | | |
1330 | | // Initialise frame_level_rate_correction_factors with value previous |
1331 | | // to the parallel frames. |
1332 | 0 | if (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) { |
1333 | 0 | for (int i = 0; i < RATE_FACTOR_LEVELS; i++) { |
1334 | 0 | cpi->rc.frame_level_rate_correction_factors[i] = |
1335 | | #if CONFIG_FPMT_TEST |
1336 | | (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) |
1337 | | ? cpi->ppi->p_rc.temp_rate_correction_factors[i] |
1338 | | : |
1339 | | #endif // CONFIG_FPMT_TEST |
1340 | 0 | cpi->ppi->p_rc.rate_correction_factors[i]; |
1341 | 0 | } |
1342 | 0 | } |
1343 | | |
1344 | | // copy mv_stats from ppi to frame_level cpi. |
1345 | 0 | cpi->mv_stats = cpi->ppi->mv_stats; |
1346 | 0 | av1_get_second_pass_params(cpi, &frame_params, *frame_flags); |
1347 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1348 | | end_timing(cpi, av1_get_second_pass_params_time); |
1349 | | #endif |
1350 | 0 | } |
1351 | 0 | #endif |
1352 | |
|
1353 | 0 | if (!is_stat_generation_stage(cpi)) { |
1354 | | // TODO(jingning): fwd key frame always uses show existing frame? |
1355 | 0 | if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE && |
1356 | 0 | gf_group->refbuf_state[cpi->gf_frame_index] == REFBUF_RESET) { |
1357 | 0 | frame_params.show_existing_frame = 1; |
1358 | 0 | } else { |
1359 | 0 | frame_params.show_existing_frame = |
1360 | 0 | (cpi->ppi->show_existing_alt_ref && |
1361 | 0 | gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) || |
1362 | 0 | gf_group->update_type[cpi->gf_frame_index] == INTNL_OVERLAY_UPDATE; |
1363 | 0 | } |
1364 | 0 | frame_params.show_existing_frame &= allow_show_existing(cpi, *frame_flags); |
1365 | | |
1366 | | // Special handling to reset 'show_existing_frame' in case of dropped |
1367 | | // frames. |
1368 | 0 | if (oxcf->rc_cfg.drop_frames_water_mark && |
1369 | 0 | (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE || |
1370 | 0 | gf_group->update_type[cpi->gf_frame_index] == INTNL_OVERLAY_UPDATE)) { |
1371 | | // During the encode of an OVERLAY_UPDATE/INTNL_OVERLAY_UPDATE frame, loop |
1372 | | // over the gf group to check if the corresponding |
1373 | | // ARF_UPDATE/INTNL_ARF_UPDATE frame was dropped. |
1374 | 0 | int cur_disp_idx = gf_group->display_idx[cpi->gf_frame_index]; |
1375 | 0 | for (int idx = 0; idx < cpi->gf_frame_index; idx++) { |
1376 | 0 | if (cur_disp_idx == gf_group->display_idx[idx]) { |
1377 | 0 | assert(IMPLIES( |
1378 | 0 | gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE, |
1379 | 0 | gf_group->update_type[idx] == ARF_UPDATE)); |
1380 | 0 | assert(IMPLIES(gf_group->update_type[cpi->gf_frame_index] == |
1381 | 0 | INTNL_OVERLAY_UPDATE, |
1382 | 0 | gf_group->update_type[idx] == INTNL_ARF_UPDATE)); |
1383 | | // Reset show_existing_frame and set cpi->is_dropped_frame to true if |
1384 | | // the frame was dropped during its first encode. |
1385 | 0 | if (gf_group->is_frame_dropped[idx]) { |
1386 | 0 | frame_params.show_existing_frame = 0; |
1387 | 0 | assert(!cpi->is_dropped_frame); |
1388 | 0 | cpi->is_dropped_frame = true; |
1389 | 0 | } |
1390 | 0 | break; |
1391 | 0 | } |
1392 | 0 | } |
1393 | 0 | } |
1394 | | |
1395 | | // Reset show_existing_alt_ref decision to 0 after it is used. |
1396 | 0 | if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) { |
1397 | 0 | cpi->ppi->show_existing_alt_ref = 0; |
1398 | 0 | } |
1399 | 0 | } else { |
1400 | 0 | frame_params.show_existing_frame = 0; |
1401 | 0 | } |
1402 | |
|
1403 | 0 | struct lookahead_entry *source = NULL; |
1404 | 0 | struct lookahead_entry *last_source = NULL; |
1405 | 0 | if (frame_params.show_existing_frame) { |
1406 | 0 | source = av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage); |
1407 | 0 | *pop_lookahead = 1; |
1408 | 0 | frame_params.show_frame = 1; |
1409 | 0 | } else { |
1410 | 0 | source = choose_frame_source(cpi, &flush, pop_lookahead, &last_source, |
1411 | 0 | &frame_params.show_frame); |
1412 | 0 | } |
1413 | |
|
1414 | 0 | if (source == NULL) { // If no source was found, we can't encode a frame. |
1415 | 0 | #if !CONFIG_REALTIME_ONLY |
1416 | 0 | if (flush && oxcf->pass == AOM_RC_FIRST_PASS && |
1417 | 0 | !cpi->ppi->twopass.first_pass_done) { |
1418 | 0 | av1_end_first_pass(cpi); /* get last stats packet */ |
1419 | 0 | cpi->ppi->twopass.first_pass_done = 1; |
1420 | 0 | } |
1421 | 0 | #endif |
1422 | 0 | return -1; |
1423 | 0 | } |
1424 | | |
1425 | | // reset src_offset to allow actual encode call for this frame to get its |
1426 | | // source. |
1427 | 0 | gf_group->src_offset[cpi->gf_frame_index] = 0; |
1428 | | |
1429 | | // Source may be changed if temporal filtered later. |
1430 | 0 | frame_input.source = &source->img; |
1431 | 0 | if ((cpi->ppi->use_svc || cpi->rc.prev_frame_is_dropped) && |
1432 | 0 | last_source != NULL) |
1433 | 0 | av1_svc_set_last_source(cpi, &frame_input, &last_source->img); |
1434 | 0 | else |
1435 | 0 | frame_input.last_source = last_source != NULL ? &last_source->img : NULL; |
1436 | 0 | frame_input.ts_duration = source->ts_end - source->ts_start; |
1437 | | // Save unfiltered source. It is used in av1_get_second_pass_params(). |
1438 | 0 | cpi->unfiltered_source = frame_input.source; |
1439 | |
|
1440 | 0 | *time_stamp = source->ts_start; |
1441 | 0 | *time_end = source->ts_end; |
1442 | 0 | if (source->ts_start < cpi->time_stamps.first_ts_start) { |
1443 | 0 | cpi->time_stamps.first_ts_start = source->ts_start; |
1444 | 0 | cpi->time_stamps.prev_ts_end = source->ts_start; |
1445 | 0 | } |
1446 | |
|
1447 | 0 | av1_apply_encoding_flags(cpi, source->flags); |
1448 | 0 | *frame_flags = (source->flags & AOM_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0; |
1449 | |
|
1450 | | #if CONFIG_FPMT_TEST |
1451 | | if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) { |
1452 | | if (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) { |
1453 | | cpi->framerate = cpi->temp_framerate; |
1454 | | } |
1455 | | } |
1456 | | #endif // CONFIG_FPMT_TEST |
1457 | | |
1458 | | // Shown frames and arf-overlay frames need frame-rate considering |
1459 | 0 | if (frame_params.show_frame) |
1460 | 0 | adjust_frame_rate(cpi, source->ts_start, source->ts_end); |
1461 | |
|
1462 | 0 | if (!frame_params.show_existing_frame) { |
1463 | 0 | if (cpi->film_grain_table) { |
1464 | 0 | cm->cur_frame->film_grain_params_present = aom_film_grain_table_lookup( |
1465 | 0 | cpi->film_grain_table, *time_stamp, *time_end, 0 /* =erase */, |
1466 | 0 | &cm->film_grain_params); |
1467 | 0 | } else { |
1468 | 0 | cm->cur_frame->film_grain_params_present = |
1469 | 0 | cm->seq_params->film_grain_params_present; |
1470 | 0 | } |
1471 | | // only one operating point supported now |
1472 | 0 | const int64_t pts64 = ticks_to_timebase_units(timestamp_ratio, *time_stamp); |
1473 | 0 | if (pts64 < 0 || pts64 > UINT32_MAX) return AOM_CODEC_ERROR; |
1474 | | |
1475 | 0 | cm->frame_presentation_time = (uint32_t)pts64; |
1476 | 0 | } |
1477 | | |
1478 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1479 | | start_timing(cpi, av1_get_one_pass_rt_params_time); |
1480 | | #endif |
1481 | | #if CONFIG_REALTIME_ONLY |
1482 | | av1_get_one_pass_rt_params(cpi, &frame_params.frame_type, &frame_input, |
1483 | | *frame_flags); |
1484 | | if (use_rtc_reference_structure_one_layer(cpi)) |
1485 | | av1_set_rtc_reference_structure_one_layer(cpi, cpi->gf_frame_index == 0); |
1486 | | #else |
1487 | 0 | if (use_one_pass_rt_params) { |
1488 | 0 | av1_get_one_pass_rt_params(cpi, &frame_params.frame_type, &frame_input, |
1489 | 0 | *frame_flags); |
1490 | 0 | if (use_rtc_reference_structure_one_layer(cpi)) |
1491 | 0 | av1_set_rtc_reference_structure_one_layer(cpi, cpi->gf_frame_index == 0); |
1492 | 0 | } |
1493 | 0 | #endif |
1494 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1495 | | end_timing(cpi, av1_get_one_pass_rt_params_time); |
1496 | | #endif |
1497 | |
|
1498 | 0 | FRAME_UPDATE_TYPE frame_update_type = |
1499 | 0 | get_frame_update_type(gf_group, cpi->gf_frame_index); |
1500 | |
|
1501 | 0 | if (frame_params.show_existing_frame && |
1502 | 0 | frame_params.frame_type != KEY_FRAME) { |
1503 | | // Force show-existing frames to be INTER, except forward keyframes |
1504 | 0 | frame_params.frame_type = INTER_FRAME; |
1505 | 0 | } |
1506 | | |
1507 | | // Per-frame encode speed. In theory this can vary, but things may have |
1508 | | // been written assuming speed-level will not change within a sequence, so |
1509 | | // this parameter should be used with caution. |
1510 | 0 | frame_params.speed = oxcf->speed; |
1511 | |
|
1512 | 0 | #if !CONFIG_REALTIME_ONLY |
1513 | | // Set forced key frames when necessary. For two-pass encoding / lap mode, |
1514 | | // this is already handled by av1_get_second_pass_params. However when no |
1515 | | // stats are available, we still need to check if the new frame is a keyframe. |
1516 | | // For one pass rt, this is already checked in av1_get_one_pass_rt_params. |
1517 | 0 | if (!use_one_pass_rt_params && |
1518 | 0 | (is_stat_generation_stage(cpi) || has_no_stats_stage(cpi))) { |
1519 | | // Current frame is coded as a key-frame for any of the following cases: |
1520 | | // 1) First frame of a video |
1521 | | // 2) For all-intra frame encoding |
1522 | | // 3) When a key-frame is forced |
1523 | 0 | const int kf_requested = |
1524 | 0 | (cm->current_frame.frame_number == 0 || |
1525 | 0 | oxcf->kf_cfg.key_freq_max == 0 || (*frame_flags & FRAMEFLAGS_KEY)); |
1526 | 0 | if (kf_requested && frame_update_type != OVERLAY_UPDATE && |
1527 | 0 | frame_update_type != INTNL_OVERLAY_UPDATE) { |
1528 | 0 | frame_params.frame_type = KEY_FRAME; |
1529 | 0 | } else if (is_stat_generation_stage(cpi)) { |
1530 | | // For stats generation, set the frame type to inter here. |
1531 | 0 | frame_params.frame_type = INTER_FRAME; |
1532 | 0 | } |
1533 | 0 | } |
1534 | 0 | #endif |
1535 | | |
1536 | | // Work out some encoding parameters specific to the pass: |
1537 | 0 | if (has_no_stats_stage(cpi) && oxcf->q_cfg.aq_mode == CYCLIC_REFRESH_AQ) { |
1538 | 0 | av1_cyclic_refresh_update_parameters(cpi); |
1539 | 0 | } else if (is_stat_generation_stage(cpi)) { |
1540 | 0 | cpi->td.mb.e_mbd.lossless[0] = is_lossless_requested(&oxcf->rc_cfg); |
1541 | 0 | } else if (is_stat_consumption_stage(cpi)) { |
1542 | | #if CONFIG_MISMATCH_DEBUG |
1543 | | mismatch_move_frame_idx_w(); |
1544 | | #endif |
1545 | | #if TXCOEFF_COST_TIMER |
1546 | | cm->txcoeff_cost_timer = 0; |
1547 | | cm->txcoeff_cost_count = 0; |
1548 | | #endif |
1549 | 0 | } |
1550 | |
|
1551 | 0 | if (!is_stat_generation_stage(cpi)) |
1552 | 0 | set_ext_overrides(cm, &frame_params, ext_flags); |
1553 | | |
1554 | | // Shown keyframes and S frames refresh all reference buffers |
1555 | 0 | const int force_refresh_all = |
1556 | 0 | ((frame_params.frame_type == KEY_FRAME && frame_params.show_frame) || |
1557 | 0 | frame_params.frame_type == S_FRAME) && |
1558 | 0 | !frame_params.show_existing_frame; |
1559 | |
|
1560 | 0 | av1_configure_buffer_updates( |
1561 | 0 | cpi, &frame_params.refresh_frame, frame_update_type, |
1562 | 0 | gf_group->refbuf_state[cpi->gf_frame_index], force_refresh_all); |
1563 | |
|
1564 | 0 | if (!is_stat_generation_stage(cpi)) { |
1565 | 0 | const YV12_BUFFER_CONFIG *ref_frame_buf[INTER_REFS_PER_FRAME]; |
1566 | |
|
1567 | 0 | RefFrameMapPair ref_frame_map_pairs[REF_FRAMES]; |
1568 | 0 | init_ref_map_pair(cpi, ref_frame_map_pairs); |
1569 | 0 | const int order_offset = gf_group->arf_src_offset[cpi->gf_frame_index]; |
1570 | 0 | const int cur_frame_disp = |
1571 | 0 | cpi->common.current_frame.frame_number + order_offset; |
1572 | |
|
1573 | 0 | int get_ref_frames = 0; |
1574 | | #if CONFIG_FPMT_TEST |
1575 | | get_ref_frames = |
1576 | | (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) ? 1 : 0; |
1577 | | #endif // CONFIG_FPMT_TEST |
1578 | 0 | if (get_ref_frames || |
1579 | 0 | gf_group->frame_parallel_level[cpi->gf_frame_index] == 0) { |
1580 | 0 | if (!ext_flags->refresh_frame.update_pending) { |
1581 | 0 | av1_get_ref_frames(ref_frame_map_pairs, cur_frame_disp, cpi, |
1582 | 0 | cpi->gf_frame_index, 1, cm->remapped_ref_idx); |
1583 | 0 | } else if (cpi->ppi->rtc_ref.set_ref_frame_config || |
1584 | 0 | use_rtc_reference_structure_one_layer(cpi)) { |
1585 | 0 | for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++) |
1586 | 0 | cm->remapped_ref_idx[i] = cpi->ppi->rtc_ref.ref_idx[i]; |
1587 | 0 | } |
1588 | 0 | } |
1589 | | |
1590 | | // Get the reference frames |
1591 | 0 | bool has_ref_frames = false; |
1592 | 0 | for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) { |
1593 | 0 | const RefCntBuffer *ref_frame = |
1594 | 0 | get_ref_frame_buf(cm, ref_frame_priority_order[i]); |
1595 | 0 | ref_frame_buf[i] = ref_frame != NULL ? &ref_frame->buf : NULL; |
1596 | 0 | if (ref_frame != NULL) has_ref_frames = true; |
1597 | 0 | } |
1598 | 0 | if (!has_ref_frames && (frame_params.frame_type == INTER_FRAME || |
1599 | 0 | frame_params.frame_type == S_FRAME)) { |
1600 | 0 | return AOM_CODEC_ERROR; |
1601 | 0 | } |
1602 | | |
1603 | | // Work out which reference frame slots may be used. |
1604 | 0 | frame_params.ref_frame_flags = |
1605 | 0 | get_ref_frame_flags(&cpi->sf, is_one_pass_rt_params(cpi), ref_frame_buf, |
1606 | 0 | ext_flags->ref_frame_flags); |
1607 | | |
1608 | | // Set primary_ref_frame of non-reference frames as PRIMARY_REF_NONE. |
1609 | 0 | if (cpi->ppi->gf_group.is_frame_non_ref[cpi->gf_frame_index]) { |
1610 | 0 | frame_params.primary_ref_frame = PRIMARY_REF_NONE; |
1611 | 0 | } else { |
1612 | 0 | frame_params.primary_ref_frame = |
1613 | 0 | choose_primary_ref_frame(cpi, &frame_params); |
1614 | 0 | } |
1615 | |
|
1616 | 0 | frame_params.order_offset = gf_group->arf_src_offset[cpi->gf_frame_index]; |
1617 | | |
1618 | | // Call av1_get_refresh_frame_flags() if refresh index not available. |
1619 | 0 | if (!cpi->refresh_idx_available) { |
1620 | 0 | frame_params.refresh_frame_flags = av1_get_refresh_frame_flags( |
1621 | 0 | cpi, &frame_params, frame_update_type, cpi->gf_frame_index, |
1622 | 0 | cur_frame_disp, ref_frame_map_pairs); |
1623 | 0 | } else { |
1624 | 0 | assert(cpi->ref_refresh_index != INVALID_IDX); |
1625 | 0 | frame_params.refresh_frame_flags = (1 << cpi->ref_refresh_index); |
1626 | 0 | } |
1627 | | |
1628 | | // Make the frames marked as is_frame_non_ref to non-reference frames. |
1629 | 0 | if (gf_group->is_frame_non_ref[cpi->gf_frame_index]) |
1630 | 0 | frame_params.refresh_frame_flags = 0; |
1631 | |
|
1632 | 0 | frame_params.existing_fb_idx_to_show = INVALID_IDX; |
1633 | | // Find the frame buffer to show based on display order. |
1634 | 0 | if (frame_params.show_existing_frame) { |
1635 | 0 | for (int frame = 0; frame < REF_FRAMES; frame++) { |
1636 | 0 | const RefCntBuffer *const buf = cm->ref_frame_map[frame]; |
1637 | 0 | if (buf == NULL) continue; |
1638 | 0 | const int frame_order = (int)buf->display_order_hint; |
1639 | 0 | if (frame_order == cur_frame_disp) |
1640 | 0 | frame_params.existing_fb_idx_to_show = frame; |
1641 | 0 | } |
1642 | 0 | } |
1643 | 0 | } |
1644 | | |
1645 | | // The way frame_params->remapped_ref_idx is setup is a placeholder. |
1646 | | // Currently, reference buffer assignment is done by update_ref_frame_map() |
1647 | | // which is called by high-level strategy AFTER encoding a frame. It |
1648 | | // modifies cm->remapped_ref_idx. If you want to use an alternative method |
1649 | | // to determine reference buffer assignment, just put your assignments into |
1650 | | // frame_params->remapped_ref_idx here and they will be used when encoding |
1651 | | // this frame. If frame_params->remapped_ref_idx is setup independently of |
1652 | | // cm->remapped_ref_idx then update_ref_frame_map() will have no effect. |
1653 | 0 | memcpy(frame_params.remapped_ref_idx, cm->remapped_ref_idx, |
1654 | 0 | REF_FRAMES * sizeof(*cm->remapped_ref_idx)); |
1655 | |
|
1656 | 0 | cpi->td.mb.rdmult_delta_qindex = cpi->td.mb.delta_qindex = 0; |
1657 | |
|
1658 | 0 | if (!frame_params.show_existing_frame) { |
1659 | 0 | cm->quant_params.using_qmatrix = oxcf->q_cfg.using_qm; |
1660 | 0 | } |
1661 | |
|
1662 | 0 | const int is_intra_frame = frame_params.frame_type == KEY_FRAME || |
1663 | 0 | frame_params.frame_type == INTRA_ONLY_FRAME; |
1664 | 0 | FeatureFlags *const features = &cm->features; |
1665 | 0 | if (!is_stat_generation_stage(cpi) && |
1666 | 0 | (oxcf->pass == AOM_RC_ONE_PASS || oxcf->pass >= AOM_RC_SECOND_PASS) && |
1667 | 0 | is_intra_frame) { |
1668 | 0 | av1_set_screen_content_options(cpi, features); |
1669 | 0 | } |
1670 | |
|
1671 | | #if CONFIG_REALTIME_ONLY |
1672 | | if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) != |
1673 | | AOM_CODEC_OK) { |
1674 | | return AOM_CODEC_ERROR; |
1675 | | } |
1676 | | #else |
1677 | 0 | if (has_no_stats_stage(cpi) && oxcf->mode == REALTIME && |
1678 | 0 | gf_cfg->lag_in_frames == 0) { |
1679 | 0 | if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) != |
1680 | 0 | AOM_CODEC_OK) { |
1681 | 0 | return AOM_CODEC_ERROR; |
1682 | 0 | } |
1683 | 0 | } else if (denoise_and_encode(cpi, dest, &frame_input, &frame_params, |
1684 | 0 | &frame_results) != AOM_CODEC_OK) { |
1685 | 0 | return AOM_CODEC_ERROR; |
1686 | 0 | } |
1687 | 0 | #endif // CONFIG_REALTIME_ONLY |
1688 | | |
1689 | | // This is used in rtc temporal filter case. Use true source in the PSNR |
1690 | | // calculation. |
1691 | 0 | if (is_psnr_calc_enabled(cpi) && cpi->sf.rt_sf.use_rtc_tf) { |
1692 | 0 | assert(cpi->orig_source.buffer_alloc_sz > 0); |
1693 | 0 | cpi->source = &cpi->orig_source; |
1694 | 0 | } |
1695 | |
|
1696 | 0 | if (!is_stat_generation_stage(cpi)) { |
1697 | | // First pass doesn't modify reference buffer assignment or produce frame |
1698 | | // flags |
1699 | 0 | update_frame_flags(&cpi->common, &cpi->refresh_frame, frame_flags); |
1700 | 0 | set_additional_frame_flags(cm, frame_flags); |
1701 | 0 | } |
1702 | |
|
1703 | 0 | #if !CONFIG_REALTIME_ONLY |
1704 | | #if TXCOEFF_COST_TIMER |
1705 | | if (!is_stat_generation_stage(cpi)) { |
1706 | | cm->cum_txcoeff_cost_timer += cm->txcoeff_cost_timer; |
1707 | | fprintf(stderr, |
1708 | | "\ntxb coeff cost block number: %ld, frame time: %ld, cum time %ld " |
1709 | | "in us\n", |
1710 | | cm->txcoeff_cost_count, cm->txcoeff_cost_timer, |
1711 | | cm->cum_txcoeff_cost_timer); |
1712 | | } |
1713 | | #endif |
1714 | 0 | #endif // !CONFIG_REALTIME_ONLY |
1715 | |
|
1716 | | #if CONFIG_TUNE_VMAF |
1717 | | if (!is_stat_generation_stage(cpi) && |
1718 | | (oxcf->tune_cfg.tuning >= AOM_TUNE_VMAF_WITH_PREPROCESSING && |
1719 | | oxcf->tune_cfg.tuning <= AOM_TUNE_VMAF_NEG_MAX_GAIN)) { |
1720 | | av1_update_vmaf_curve(cpi); |
1721 | | } |
1722 | | #endif |
1723 | | |
1724 | | // Unpack frame_results: |
1725 | 0 | *size = frame_results.size; |
1726 | | |
1727 | | // Leave a signal for a higher level caller about if this frame is droppable |
1728 | 0 | if (*size > 0) { |
1729 | 0 | cpi->droppable = |
1730 | 0 | is_frame_droppable(&cpi->ppi->rtc_ref, &ext_flags->refresh_frame); |
1731 | 0 | } |
1732 | | |
1733 | | // For SVC, or when frame-dropper is enabled: |
1734 | | // keep track of the (unscaled) source corresponding to the refresh of LAST |
1735 | | // reference (base temporal layer - TL0). Copy only for the |
1736 | | // top spatial enhancement layer so all spatial layers of the next |
1737 | | // superframe have last_source to be aligned with previous TL0 superframe. |
1738 | | // Avoid cases where resolution changes for unscaled source (top spatial |
1739 | | // layer). Only needs to be done for frame that are encoded (size > 0). |
1740 | 0 | if (*size > 0 && |
1741 | 0 | (cpi->ppi->use_svc || cpi->oxcf.rc_cfg.drop_frames_water_mark > 0) && |
1742 | 0 | cpi->svc.spatial_layer_id == cpi->svc.number_spatial_layers - 1 && |
1743 | 0 | cpi->svc.temporal_layer_id == 0 && |
1744 | 0 | cpi->unscaled_source->y_width == cpi->svc.source_last_TL0.y_width && |
1745 | 0 | cpi->unscaled_source->y_height == cpi->svc.source_last_TL0.y_height) { |
1746 | 0 | aom_yv12_copy_y(cpi->unscaled_source, &cpi->svc.source_last_TL0, 1); |
1747 | 0 | aom_yv12_copy_u(cpi->unscaled_source, &cpi->svc.source_last_TL0, 1); |
1748 | 0 | aom_yv12_copy_v(cpi->unscaled_source, &cpi->svc.source_last_TL0, 1); |
1749 | 0 | } |
1750 | |
|
1751 | 0 | return AOM_CODEC_OK; |
1752 | 0 | } |