/src/aom/av1/encoder/encode_strategy.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright (c) 2019, Alliance for Open Media. All rights reserved |
3 | | * |
4 | | * This source code is subject to the terms of the BSD 2 Clause License and |
5 | | * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License |
6 | | * was not distributed with this source code in the LICENSE file, you can |
7 | | * obtain it at www.aomedia.org/license/software. If the Alliance for Open |
8 | | * Media Patent License 1.0 was not distributed with this source code in the |
9 | | * PATENTS file, you can obtain it at www.aomedia.org/license/patent. |
10 | | */ |
11 | | |
12 | | #include <stdint.h> |
13 | | |
14 | | #include "av1/common/blockd.h" |
15 | | #include "config/aom_config.h" |
16 | | #include "config/aom_scale_rtcd.h" |
17 | | |
18 | | #include "aom/aom_codec.h" |
19 | | #include "aom/aom_encoder.h" |
20 | | |
21 | | #if CONFIG_MISMATCH_DEBUG |
22 | | #include "aom_util/debug_util.h" |
23 | | #endif // CONFIG_MISMATCH_DEBUG |
24 | | |
25 | | #include "av1/common/av1_common_int.h" |
26 | | #include "av1/common/reconinter.h" |
27 | | |
28 | | #include "av1/encoder/encoder.h" |
29 | | #include "av1/encoder/encode_strategy.h" |
30 | | #include "av1/encoder/encodeframe.h" |
31 | | #include "av1/encoder/encoder_alloc.h" |
32 | | #include "av1/encoder/firstpass.h" |
33 | | #include "av1/encoder/gop_structure.h" |
34 | | #include "av1/encoder/pass2_strategy.h" |
35 | | #include "av1/encoder/temporal_filter.h" |
36 | | #include "av1/encoder/tpl_model.h" |
37 | | |
38 | | #if CONFIG_TUNE_VMAF |
39 | | #include "av1/encoder/tune_vmaf.h" |
40 | | #endif |
41 | | |
42 | | #define TEMPORAL_FILTER_KEY_FRAME (CONFIG_REALTIME_ONLY ? 0 : 1) |
43 | | |
44 | | static INLINE void set_refresh_frame_flags( |
45 | | RefreshFrameInfo *const refresh_frame, bool refresh_gf, bool refresh_bwdref, |
46 | 0 | bool refresh_arf) { |
47 | 0 | refresh_frame->golden_frame = refresh_gf; |
48 | 0 | refresh_frame->bwd_ref_frame = refresh_bwdref; |
49 | 0 | refresh_frame->alt_ref_frame = refresh_arf; |
50 | 0 | } |
51 | | |
52 | | void av1_configure_buffer_updates(AV1_COMP *const cpi, |
53 | | RefreshFrameInfo *const refresh_frame, |
54 | | const FRAME_UPDATE_TYPE type, |
55 | | const REFBUF_STATE refbuf_state, |
56 | 0 | int force_refresh_all) { |
57 | | // NOTE(weitinglin): Should we define another function to take care of |
58 | | // cpi->rc.is_$Source_Type to make this function as it is in the comment? |
59 | 0 | const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags = |
60 | 0 | &cpi->ext_flags.refresh_frame; |
61 | 0 | cpi->rc.is_src_frame_alt_ref = 0; |
62 | |
|
63 | 0 | switch (type) { |
64 | 0 | case KF_UPDATE: |
65 | 0 | set_refresh_frame_flags(refresh_frame, true, true, true); |
66 | 0 | break; |
67 | | |
68 | 0 | case LF_UPDATE: |
69 | 0 | set_refresh_frame_flags(refresh_frame, false, false, false); |
70 | 0 | break; |
71 | | |
72 | 0 | case GF_UPDATE: |
73 | 0 | set_refresh_frame_flags(refresh_frame, true, false, false); |
74 | 0 | break; |
75 | | |
76 | 0 | case OVERLAY_UPDATE: |
77 | 0 | if (refbuf_state == REFBUF_RESET) |
78 | 0 | set_refresh_frame_flags(refresh_frame, true, true, true); |
79 | 0 | else |
80 | 0 | set_refresh_frame_flags(refresh_frame, true, false, false); |
81 | |
|
82 | 0 | cpi->rc.is_src_frame_alt_ref = 1; |
83 | 0 | break; |
84 | | |
85 | 0 | case ARF_UPDATE: |
86 | | // NOTE: BWDREF does not get updated along with ALTREF_FRAME. |
87 | 0 | if (refbuf_state == REFBUF_RESET) |
88 | 0 | set_refresh_frame_flags(refresh_frame, true, true, true); |
89 | 0 | else |
90 | 0 | set_refresh_frame_flags(refresh_frame, false, false, true); |
91 | |
|
92 | 0 | break; |
93 | | |
94 | 0 | case INTNL_OVERLAY_UPDATE: |
95 | 0 | set_refresh_frame_flags(refresh_frame, false, false, false); |
96 | 0 | cpi->rc.is_src_frame_alt_ref = 1; |
97 | 0 | break; |
98 | | |
99 | 0 | case INTNL_ARF_UPDATE: |
100 | 0 | set_refresh_frame_flags(refresh_frame, false, true, false); |
101 | 0 | break; |
102 | | |
103 | 0 | default: assert(0); break; |
104 | 0 | } |
105 | | |
106 | 0 | if (ext_refresh_frame_flags->update_pending && |
107 | 0 | (!is_stat_generation_stage(cpi))) { |
108 | 0 | set_refresh_frame_flags(refresh_frame, |
109 | 0 | ext_refresh_frame_flags->golden_frame, |
110 | 0 | ext_refresh_frame_flags->bwd_ref_frame, |
111 | 0 | ext_refresh_frame_flags->alt_ref_frame); |
112 | 0 | GF_GROUP *gf_group = &cpi->ppi->gf_group; |
113 | 0 | if (ext_refresh_frame_flags->golden_frame) |
114 | 0 | gf_group->update_type[cpi->gf_frame_index] = GF_UPDATE; |
115 | 0 | if (ext_refresh_frame_flags->alt_ref_frame) |
116 | 0 | gf_group->update_type[cpi->gf_frame_index] = ARF_UPDATE; |
117 | 0 | if (ext_refresh_frame_flags->bwd_ref_frame) |
118 | 0 | gf_group->update_type[cpi->gf_frame_index] = INTNL_ARF_UPDATE; |
119 | 0 | } |
120 | |
|
121 | 0 | if (force_refresh_all) |
122 | 0 | set_refresh_frame_flags(refresh_frame, true, true, true); |
123 | 0 | } |
124 | | |
125 | | static void set_additional_frame_flags(const AV1_COMMON *const cm, |
126 | 0 | unsigned int *const frame_flags) { |
127 | 0 | if (frame_is_intra_only(cm)) { |
128 | 0 | *frame_flags |= FRAMEFLAGS_INTRAONLY; |
129 | 0 | } |
130 | 0 | if (frame_is_sframe(cm)) { |
131 | 0 | *frame_flags |= FRAMEFLAGS_SWITCH; |
132 | 0 | } |
133 | 0 | if (cm->features.error_resilient_mode) { |
134 | 0 | *frame_flags |= FRAMEFLAGS_ERROR_RESILIENT; |
135 | 0 | } |
136 | 0 | } |
137 | | |
138 | | static void set_ext_overrides(AV1_COMMON *const cm, |
139 | | EncodeFrameParams *const frame_params, |
140 | 0 | ExternalFlags *const ext_flags) { |
141 | | // Overrides the defaults with the externally supplied values with |
142 | | // av1_update_reference() and av1_update_entropy() calls |
143 | | // Note: The overrides are valid only for the next frame passed |
144 | | // to av1_encode_lowlevel() |
145 | |
|
146 | 0 | if (ext_flags->use_s_frame) { |
147 | 0 | frame_params->frame_type = S_FRAME; |
148 | 0 | } |
149 | |
|
150 | 0 | if (ext_flags->refresh_frame_context_pending) { |
151 | 0 | cm->features.refresh_frame_context = ext_flags->refresh_frame_context; |
152 | 0 | ext_flags->refresh_frame_context_pending = 0; |
153 | 0 | } |
154 | 0 | cm->features.allow_ref_frame_mvs = ext_flags->use_ref_frame_mvs; |
155 | |
|
156 | 0 | frame_params->error_resilient_mode = ext_flags->use_error_resilient; |
157 | | // A keyframe is already error resilient and keyframes with |
158 | | // error_resilient_mode interferes with the use of show_existing_frame |
159 | | // when forward reference keyframes are enabled. |
160 | 0 | frame_params->error_resilient_mode &= frame_params->frame_type != KEY_FRAME; |
161 | | // For bitstream conformance, s-frames must be error-resilient |
162 | 0 | frame_params->error_resilient_mode |= frame_params->frame_type == S_FRAME; |
163 | 0 | } |
164 | | |
165 | | static int choose_primary_ref_frame( |
166 | 0 | AV1_COMP *const cpi, const EncodeFrameParams *const frame_params) { |
167 | 0 | const AV1_COMMON *const cm = &cpi->common; |
168 | |
|
169 | 0 | const int intra_only = frame_params->frame_type == KEY_FRAME || |
170 | 0 | frame_params->frame_type == INTRA_ONLY_FRAME; |
171 | 0 | if (intra_only || frame_params->error_resilient_mode || |
172 | 0 | cpi->ext_flags.use_primary_ref_none) { |
173 | 0 | return PRIMARY_REF_NONE; |
174 | 0 | } |
175 | | |
176 | | // In large scale case, always use Last frame's frame contexts. |
177 | | // Note(yunqing): In other cases, primary_ref_frame is chosen based on |
178 | | // cpi->ppi->gf_group.layer_depth[cpi->gf_frame_index], which also controls |
179 | | // frame bit allocation. |
180 | 0 | if (cm->tiles.large_scale) return (LAST_FRAME - LAST_FRAME); |
181 | | |
182 | 0 | if (cpi->ppi->use_svc) return av1_svc_primary_ref_frame(cpi); |
183 | | |
184 | | // Find the most recent reference frame with the same reference type as the |
185 | | // current frame |
186 | 0 | const int current_ref_type = get_current_frame_ref_type(cpi); |
187 | 0 | int wanted_fb = cpi->ppi->fb_of_context_type[current_ref_type]; |
188 | | #if CONFIG_FRAME_PARALLEL_ENCODE && CONFIG_FRAME_PARALLEL_ENCODE_2 && \ |
189 | | CONFIG_FPMT_TEST |
190 | | if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) { |
191 | | GF_GROUP *const gf_group = &cpi->ppi->gf_group; |
192 | | if (gf_group->update_type[cpi->gf_frame_index] == INTNL_ARF_UPDATE) { |
193 | | int frame_level = gf_group->frame_parallel_level[cpi->gf_frame_index]; |
194 | | // Book keep wanted_fb of frame_parallel_level 1 frame in an FP2 set. |
195 | | if (frame_level == 1) { |
196 | | cpi->wanted_fb = wanted_fb; |
197 | | } |
198 | | // Use the wanted_fb of level 1 frame in an FP2 for a level 2 frame in the |
199 | | // set. |
200 | | if (frame_level == 2 && |
201 | | gf_group->update_type[cpi->gf_frame_index - 1] == INTNL_ARF_UPDATE) { |
202 | | assert(gf_group->frame_parallel_level[cpi->gf_frame_index - 1] == 1); |
203 | | wanted_fb = cpi->wanted_fb; |
204 | | } |
205 | | } |
206 | | } |
207 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE && CONFIG_FRAME_PARALLEL_ENCODE_2 && |
208 | | // CONFIG_FPMT_TEST |
209 | 0 | int primary_ref_frame = PRIMARY_REF_NONE; |
210 | 0 | for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) { |
211 | 0 | if (get_ref_frame_map_idx(cm, ref_frame) == wanted_fb) { |
212 | 0 | primary_ref_frame = ref_frame - LAST_FRAME; |
213 | 0 | } |
214 | 0 | } |
215 | |
|
216 | 0 | return primary_ref_frame; |
217 | 0 | } |
218 | | |
219 | 0 | static void adjust_frame_rate(AV1_COMP *cpi, int64_t ts_start, int64_t ts_end) { |
220 | 0 | TimeStamps *time_stamps = &cpi->time_stamps; |
221 | 0 | int64_t this_duration; |
222 | 0 | int step = 0; |
223 | | |
224 | | // Clear down mmx registers |
225 | |
|
226 | 0 | if (cpi->ppi->use_svc && cpi->svc.spatial_layer_id > 0) { |
227 | 0 | cpi->framerate = cpi->svc.base_framerate; |
228 | 0 | av1_rc_update_framerate(cpi, cpi->common.width, cpi->common.height); |
229 | 0 | return; |
230 | 0 | } |
231 | | |
232 | 0 | if (ts_start == time_stamps->first_ts_start) { |
233 | 0 | this_duration = ts_end - ts_start; |
234 | 0 | step = 1; |
235 | 0 | } else { |
236 | 0 | int64_t last_duration = |
237 | 0 | time_stamps->prev_ts_end - time_stamps->prev_ts_start; |
238 | |
|
239 | 0 | this_duration = ts_end - time_stamps->prev_ts_end; |
240 | | |
241 | | // do a step update if the duration changes by 10% |
242 | 0 | if (last_duration) |
243 | 0 | step = (int)((this_duration - last_duration) * 10 / last_duration); |
244 | 0 | } |
245 | |
|
246 | 0 | if (this_duration) { |
247 | 0 | if (step) { |
248 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
249 | | cpi->new_framerate = 10000000.0 / this_duration; |
250 | | #endif |
251 | 0 | av1_new_framerate(cpi, 10000000.0 / this_duration); |
252 | 0 | } else { |
253 | 0 | double framerate; |
254 | | // Average this frame's rate into the last second's average |
255 | | // frame rate. If we haven't seen 1 second yet, then average |
256 | | // over the whole interval seen. |
257 | 0 | const double interval = |
258 | 0 | AOMMIN((double)(ts_end - time_stamps->first_ts_start), 10000000.0); |
259 | 0 | double avg_duration = 10000000.0 / cpi->framerate; |
260 | 0 | avg_duration *= (interval - avg_duration + this_duration); |
261 | 0 | avg_duration /= interval; |
262 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
263 | | cpi->new_framerate = (10000000.0 / avg_duration); |
264 | | // For parallel frames update cpi->framerate with new_framerate |
265 | | // during av1_post_encode_updates() |
266 | | framerate = |
267 | | (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) |
268 | | ? cpi->framerate |
269 | | : cpi->new_framerate; |
270 | | #else |
271 | 0 | framerate = (10000000.0 / avg_duration); |
272 | 0 | #endif |
273 | 0 | av1_new_framerate(cpi, framerate); |
274 | 0 | } |
275 | 0 | } |
276 | |
|
277 | 0 | time_stamps->prev_ts_start = ts_start; |
278 | 0 | time_stamps->prev_ts_end = ts_end; |
279 | 0 | } |
280 | | |
281 | | // Determine whether there is a forced keyframe pending in the lookahead buffer |
282 | | int is_forced_keyframe_pending(struct lookahead_ctx *lookahead, |
283 | | const int up_to_index, |
284 | 0 | const COMPRESSOR_STAGE compressor_stage) { |
285 | 0 | for (int i = 0; i <= up_to_index; i++) { |
286 | 0 | const struct lookahead_entry *e = |
287 | 0 | av1_lookahead_peek(lookahead, i, compressor_stage); |
288 | 0 | if (e == NULL) { |
289 | | // We have reached the end of the lookahead buffer and not early-returned |
290 | | // so there isn't a forced key-frame pending. |
291 | 0 | return -1; |
292 | 0 | } else if (e->flags == AOM_EFLAG_FORCE_KF) { |
293 | 0 | return i; |
294 | 0 | } else { |
295 | 0 | continue; |
296 | 0 | } |
297 | 0 | } |
298 | 0 | return -1; // Never reached |
299 | 0 | } |
300 | | |
301 | | // Check if we should encode an ARF or internal ARF. If not, try a LAST |
302 | | // Do some setup associated with the chosen source |
303 | | // temporal_filtered, flush, and frame_update_type are outputs. |
304 | | // Return the frame source, or NULL if we couldn't find one |
305 | | static struct lookahead_entry *choose_frame_source( |
306 | | AV1_COMP *const cpi, int *const flush, int *pop_lookahead, |
307 | | struct lookahead_entry **last_source, |
308 | 0 | EncodeFrameParams *const frame_params) { |
309 | 0 | AV1_COMMON *const cm = &cpi->common; |
310 | 0 | const GF_GROUP *const gf_group = &cpi->ppi->gf_group; |
311 | 0 | struct lookahead_entry *source = NULL; |
312 | | |
313 | | // Source index in lookahead buffer. |
314 | 0 | int src_index = gf_group->arf_src_offset[cpi->gf_frame_index]; |
315 | | |
316 | | // TODO(Aasaipriya): Forced key frames need to be fixed when rc_mode != AOM_Q |
317 | 0 | if (src_index && |
318 | 0 | (is_forced_keyframe_pending(cpi->ppi->lookahead, src_index, |
319 | 0 | cpi->compressor_stage) != -1) && |
320 | 0 | cpi->oxcf.rc_cfg.mode != AOM_Q && !is_stat_generation_stage(cpi)) { |
321 | 0 | src_index = 0; |
322 | 0 | *flush = 1; |
323 | 0 | } |
324 | | |
325 | | // If the current frame is arf, then we should not pop from the lookahead |
326 | | // buffer. If the current frame is not arf, then pop it. This assumes the |
327 | | // first frame in the GF group is not arf. May need to change if it is not |
328 | | // true. |
329 | 0 | *pop_lookahead = (src_index == 0); |
330 | | // If this is a key frame and keyframe filtering is enabled with overlay, |
331 | | // then do not pop. |
332 | 0 | if (*pop_lookahead && cpi->oxcf.kf_cfg.enable_keyframe_filtering > 1 && |
333 | 0 | gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE && |
334 | 0 | !is_stat_generation_stage(cpi) && cpi->ppi->lookahead) { |
335 | 0 | if (cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz && |
336 | 0 | (*flush || |
337 | 0 | cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz == |
338 | 0 | cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].pop_sz)) { |
339 | 0 | *pop_lookahead = 0; |
340 | 0 | } |
341 | 0 | } |
342 | | |
343 | | // LAP stage does not have ARFs or forward key-frames, |
344 | | // hence, always pop_lookahead here. |
345 | 0 | if (is_stat_generation_stage(cpi)) { |
346 | 0 | *pop_lookahead = 1; |
347 | 0 | src_index = 0; |
348 | 0 | } |
349 | |
|
350 | 0 | frame_params->show_frame = *pop_lookahead; |
351 | |
|
352 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
353 | | #if CONFIG_FPMT_TEST |
354 | | if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_ENCODE) { |
355 | | #else |
356 | | { |
357 | | #endif // CONFIG_FPMT_TEST |
358 | | // Future frame in parallel encode set |
359 | | if (gf_group->src_offset[cpi->gf_frame_index] != 0 && |
360 | | !is_stat_generation_stage(cpi)) |
361 | | src_index = gf_group->src_offset[cpi->gf_frame_index]; |
362 | | } |
363 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
364 | 0 | if (frame_params->show_frame) { |
365 | | // show frame, pop from buffer |
366 | | // Get last frame source. |
367 | 0 | if (cm->current_frame.frame_number > 0) { |
368 | 0 | *last_source = av1_lookahead_peek(cpi->ppi->lookahead, src_index - 1, |
369 | 0 | cpi->compressor_stage); |
370 | 0 | } |
371 | | // Read in the source frame. |
372 | 0 | source = av1_lookahead_peek(cpi->ppi->lookahead, src_index, |
373 | 0 | cpi->compressor_stage); |
374 | 0 | } else { |
375 | | // no show frames are arf frames |
376 | 0 | source = av1_lookahead_peek(cpi->ppi->lookahead, src_index, |
377 | 0 | cpi->compressor_stage); |
378 | 0 | if (source != NULL) { |
379 | 0 | cm->showable_frame = 1; |
380 | 0 | } |
381 | 0 | } |
382 | 0 | return source; |
383 | 0 | } |
384 | | |
385 | | // Don't allow a show_existing_frame to coincide with an error resilient or |
386 | | // S-Frame. An exception can be made in the case of a keyframe, since it does |
387 | | // not depend on any previous frames. |
388 | | static int allow_show_existing(const AV1_COMP *const cpi, |
389 | 0 | unsigned int frame_flags) { |
390 | 0 | if (cpi->common.current_frame.frame_number == 0) return 0; |
391 | | |
392 | 0 | const struct lookahead_entry *lookahead_src = |
393 | 0 | av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage); |
394 | 0 | if (lookahead_src == NULL) return 1; |
395 | | |
396 | 0 | const int is_error_resilient = |
397 | 0 | cpi->oxcf.tool_cfg.error_resilient_mode || |
398 | 0 | (lookahead_src->flags & AOM_EFLAG_ERROR_RESILIENT); |
399 | 0 | const int is_s_frame = cpi->oxcf.kf_cfg.enable_sframe || |
400 | 0 | (lookahead_src->flags & AOM_EFLAG_SET_S_FRAME); |
401 | 0 | const int is_key_frame = |
402 | 0 | (cpi->rc.frames_to_key == 0) || (frame_flags & FRAMEFLAGS_KEY); |
403 | 0 | return !(is_error_resilient || is_s_frame) || is_key_frame; |
404 | 0 | } |
405 | | |
406 | | // Update frame_flags to tell the encoder's caller what sort of frame was |
407 | | // encoded. |
408 | | static void update_frame_flags(const AV1_COMMON *const cm, |
409 | | const RefreshFrameInfo *const refresh_frame, |
410 | 0 | unsigned int *frame_flags) { |
411 | 0 | if (encode_show_existing_frame(cm)) { |
412 | 0 | *frame_flags &= ~FRAMEFLAGS_GOLDEN; |
413 | 0 | *frame_flags &= ~FRAMEFLAGS_BWDREF; |
414 | 0 | *frame_flags &= ~FRAMEFLAGS_ALTREF; |
415 | 0 | *frame_flags &= ~FRAMEFLAGS_KEY; |
416 | 0 | return; |
417 | 0 | } |
418 | | |
419 | 0 | if (refresh_frame->golden_frame) { |
420 | 0 | *frame_flags |= FRAMEFLAGS_GOLDEN; |
421 | 0 | } else { |
422 | 0 | *frame_flags &= ~FRAMEFLAGS_GOLDEN; |
423 | 0 | } |
424 | |
|
425 | 0 | if (refresh_frame->alt_ref_frame) { |
426 | 0 | *frame_flags |= FRAMEFLAGS_ALTREF; |
427 | 0 | } else { |
428 | 0 | *frame_flags &= ~FRAMEFLAGS_ALTREF; |
429 | 0 | } |
430 | |
|
431 | 0 | if (refresh_frame->bwd_ref_frame) { |
432 | 0 | *frame_flags |= FRAMEFLAGS_BWDREF; |
433 | 0 | } else { |
434 | 0 | *frame_flags &= ~FRAMEFLAGS_BWDREF; |
435 | 0 | } |
436 | |
|
437 | 0 | if (cm->current_frame.frame_type == KEY_FRAME) { |
438 | 0 | *frame_flags |= FRAMEFLAGS_KEY; |
439 | 0 | } else { |
440 | 0 | *frame_flags &= ~FRAMEFLAGS_KEY; |
441 | 0 | } |
442 | 0 | } |
443 | | |
444 | | #define DUMP_REF_FRAME_IMAGES 0 |
445 | | |
446 | | #if DUMP_REF_FRAME_IMAGES == 1 |
447 | | static int dump_one_image(AV1_COMMON *cm, |
448 | | const YV12_BUFFER_CONFIG *const ref_buf, |
449 | | char *file_name) { |
450 | | int h; |
451 | | FILE *f_ref = NULL; |
452 | | |
453 | | if (ref_buf == NULL) { |
454 | | printf("Frame data buffer is NULL.\n"); |
455 | | return AOM_CODEC_MEM_ERROR; |
456 | | } |
457 | | |
458 | | if ((f_ref = fopen(file_name, "wb")) == NULL) { |
459 | | printf("Unable to open file %s to write.\n", file_name); |
460 | | return AOM_CODEC_MEM_ERROR; |
461 | | } |
462 | | |
463 | | // --- Y --- |
464 | | for (h = 0; h < cm->height; ++h) { |
465 | | fwrite(&ref_buf->y_buffer[h * ref_buf->y_stride], 1, cm->width, f_ref); |
466 | | } |
467 | | // --- U --- |
468 | | for (h = 0; h < (cm->height >> 1); ++h) { |
469 | | fwrite(&ref_buf->u_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1), |
470 | | f_ref); |
471 | | } |
472 | | // --- V --- |
473 | | for (h = 0; h < (cm->height >> 1); ++h) { |
474 | | fwrite(&ref_buf->v_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1), |
475 | | f_ref); |
476 | | } |
477 | | |
478 | | fclose(f_ref); |
479 | | |
480 | | return AOM_CODEC_OK; |
481 | | } |
482 | | |
483 | | static void dump_ref_frame_images(AV1_COMP *cpi) { |
484 | | AV1_COMMON *const cm = &cpi->common; |
485 | | MV_REFERENCE_FRAME ref_frame; |
486 | | |
487 | | for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) { |
488 | | char file_name[256] = ""; |
489 | | snprintf(file_name, sizeof(file_name), "/tmp/enc_F%d_ref_%d.yuv", |
490 | | cm->current_frame.frame_number, ref_frame); |
491 | | dump_one_image(cm, get_ref_frame_yv12_buf(cpi, ref_frame), file_name); |
492 | | } |
493 | | } |
494 | | #endif // DUMP_REF_FRAME_IMAGES == 1 |
495 | | |
496 | 0 | int av1_get_refresh_ref_frame_map(int refresh_frame_flags) { |
497 | 0 | int ref_map_index; |
498 | |
|
499 | 0 | for (ref_map_index = 0; ref_map_index < REF_FRAMES; ++ref_map_index) |
500 | 0 | if ((refresh_frame_flags >> ref_map_index) & 1) break; |
501 | |
|
502 | 0 | if (ref_map_index == REF_FRAMES) ref_map_index = INVALID_IDX; |
503 | 0 | return ref_map_index; |
504 | 0 | } |
505 | | |
506 | | static void update_arf_stack(int ref_map_index, |
507 | 0 | RefBufferStack *ref_buffer_stack) { |
508 | 0 | if (ref_buffer_stack->arf_stack_size >= 0) { |
509 | 0 | if (ref_buffer_stack->arf_stack[0] == ref_map_index) |
510 | 0 | stack_pop(ref_buffer_stack->arf_stack, &ref_buffer_stack->arf_stack_size); |
511 | 0 | } |
512 | |
|
513 | 0 | if (ref_buffer_stack->lst_stack_size) { |
514 | 0 | for (int i = ref_buffer_stack->lst_stack_size - 1; i >= 0; --i) { |
515 | 0 | if (ref_buffer_stack->lst_stack[i] == ref_map_index) { |
516 | 0 | for (int idx = i; idx < ref_buffer_stack->lst_stack_size - 1; ++idx) |
517 | 0 | ref_buffer_stack->lst_stack[idx] = |
518 | 0 | ref_buffer_stack->lst_stack[idx + 1]; |
519 | 0 | ref_buffer_stack->lst_stack[ref_buffer_stack->lst_stack_size - 1] = |
520 | 0 | INVALID_IDX; |
521 | 0 | --ref_buffer_stack->lst_stack_size; |
522 | 0 | } |
523 | 0 | } |
524 | 0 | } |
525 | |
|
526 | 0 | if (ref_buffer_stack->gld_stack_size) { |
527 | 0 | for (int i = ref_buffer_stack->gld_stack_size - 1; i >= 0; --i) { |
528 | 0 | if (ref_buffer_stack->gld_stack[i] == ref_map_index) { |
529 | 0 | for (int idx = i; idx < ref_buffer_stack->gld_stack_size - 1; ++idx) |
530 | 0 | ref_buffer_stack->gld_stack[idx] = |
531 | 0 | ref_buffer_stack->gld_stack[idx + 1]; |
532 | 0 | ref_buffer_stack->gld_stack[ref_buffer_stack->gld_stack_size - 1] = |
533 | 0 | INVALID_IDX; |
534 | 0 | --ref_buffer_stack->gld_stack_size; |
535 | 0 | } |
536 | 0 | } |
537 | 0 | } |
538 | 0 | } |
539 | | |
540 | | // Update reference frame stack info. |
541 | | void av1_update_ref_frame_map(const AV1_COMP *cpi, |
542 | | FRAME_UPDATE_TYPE frame_update_type, |
543 | | REFBUF_STATE refbuf_state, int ref_map_index, |
544 | 0 | RefBufferStack *ref_buffer_stack) { |
545 | 0 | const AV1_COMMON *const cm = &cpi->common; |
546 | | |
547 | | // TODO(jingning): Consider the S-frame same as key frame for the |
548 | | // reference frame tracking purpose. The logic might be better |
549 | | // expressed than converting the frame update type. |
550 | 0 | if (frame_is_sframe(cm)) frame_update_type = KF_UPDATE; |
551 | 0 | if (is_frame_droppable(&cpi->svc, &cpi->ext_flags.refresh_frame)) return; |
552 | | |
553 | 0 | switch (frame_update_type) { |
554 | 0 | case KF_UPDATE: |
555 | 0 | stack_reset(ref_buffer_stack->lst_stack, |
556 | 0 | &ref_buffer_stack->lst_stack_size); |
557 | 0 | stack_reset(ref_buffer_stack->gld_stack, |
558 | 0 | &ref_buffer_stack->gld_stack_size); |
559 | 0 | stack_reset(ref_buffer_stack->arf_stack, |
560 | 0 | &ref_buffer_stack->arf_stack_size); |
561 | 0 | stack_push(ref_buffer_stack->gld_stack, &ref_buffer_stack->gld_stack_size, |
562 | 0 | ref_map_index); |
563 | 0 | break; |
564 | 0 | case GF_UPDATE: |
565 | 0 | update_arf_stack(ref_map_index, ref_buffer_stack); |
566 | 0 | stack_push(ref_buffer_stack->gld_stack, &ref_buffer_stack->gld_stack_size, |
567 | 0 | ref_map_index); |
568 | | // For nonrd_mode: update LAST as well on GF_UPDATE frame. |
569 | | // TODO(jingning, marpan): Why replacing both reference frames with the |
570 | | // same decoded frame? |
571 | 0 | if (cpi->sf.rt_sf.use_nonrd_pick_mode) |
572 | 0 | stack_push(ref_buffer_stack->lst_stack, |
573 | 0 | &ref_buffer_stack->lst_stack_size, ref_map_index); |
574 | 0 | break; |
575 | 0 | case LF_UPDATE: |
576 | 0 | update_arf_stack(ref_map_index, ref_buffer_stack); |
577 | 0 | stack_push(ref_buffer_stack->lst_stack, &ref_buffer_stack->lst_stack_size, |
578 | 0 | ref_map_index); |
579 | 0 | break; |
580 | 0 | case ARF_UPDATE: |
581 | 0 | case INTNL_ARF_UPDATE: |
582 | 0 | if (refbuf_state == REFBUF_RESET) { |
583 | 0 | stack_reset(ref_buffer_stack->lst_stack, |
584 | 0 | &ref_buffer_stack->lst_stack_size); |
585 | 0 | stack_reset(ref_buffer_stack->gld_stack, |
586 | 0 | &ref_buffer_stack->gld_stack_size); |
587 | 0 | stack_reset(ref_buffer_stack->arf_stack, |
588 | 0 | &ref_buffer_stack->arf_stack_size); |
589 | 0 | } else { |
590 | 0 | update_arf_stack(ref_map_index, ref_buffer_stack); |
591 | 0 | } |
592 | 0 | stack_push(ref_buffer_stack->arf_stack, &ref_buffer_stack->arf_stack_size, |
593 | 0 | ref_map_index); |
594 | 0 | break; |
595 | 0 | case OVERLAY_UPDATE: |
596 | 0 | if (refbuf_state == REFBUF_RESET) { |
597 | 0 | ref_map_index = stack_pop(ref_buffer_stack->arf_stack, |
598 | 0 | &ref_buffer_stack->arf_stack_size); |
599 | 0 | stack_reset(ref_buffer_stack->lst_stack, |
600 | 0 | &ref_buffer_stack->lst_stack_size); |
601 | 0 | stack_reset(ref_buffer_stack->gld_stack, |
602 | 0 | &ref_buffer_stack->gld_stack_size); |
603 | 0 | stack_reset(ref_buffer_stack->arf_stack, |
604 | 0 | &ref_buffer_stack->arf_stack_size); |
605 | 0 | stack_push(ref_buffer_stack->gld_stack, |
606 | 0 | &ref_buffer_stack->gld_stack_size, ref_map_index); |
607 | 0 | } else { |
608 | 0 | if (ref_map_index != INVALID_IDX) { |
609 | 0 | update_arf_stack(ref_map_index, ref_buffer_stack); |
610 | 0 | stack_push(ref_buffer_stack->lst_stack, |
611 | 0 | &ref_buffer_stack->lst_stack_size, ref_map_index); |
612 | 0 | } |
613 | 0 | ref_map_index = stack_pop(ref_buffer_stack->arf_stack, |
614 | 0 | &ref_buffer_stack->arf_stack_size); |
615 | 0 | stack_push(ref_buffer_stack->gld_stack, |
616 | 0 | &ref_buffer_stack->gld_stack_size, ref_map_index); |
617 | 0 | } |
618 | 0 | break; |
619 | 0 | case INTNL_OVERLAY_UPDATE: |
620 | 0 | ref_map_index = stack_pop(ref_buffer_stack->arf_stack, |
621 | 0 | &ref_buffer_stack->arf_stack_size); |
622 | 0 | stack_push(ref_buffer_stack->lst_stack, &ref_buffer_stack->lst_stack_size, |
623 | 0 | ref_map_index); |
624 | 0 | break; |
625 | 0 | default: assert(0 && "unknown type"); |
626 | 0 | } |
627 | 0 | return; |
628 | 0 | } |
629 | | |
630 | | static int get_free_ref_map_index( |
631 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
632 | | RefFrameMapPair ref_map_pairs[REF_FRAMES], |
633 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
634 | 0 | const RefBufferStack *ref_buffer_stack) { |
635 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
636 | | (void)ref_buffer_stack; |
637 | | for (int idx = 0; idx < REF_FRAMES; ++idx) |
638 | | if (ref_map_pairs[idx].disp_order == -1) return idx; |
639 | | return INVALID_IDX; |
640 | | #else |
641 | 0 | for (int idx = 0; idx < REF_FRAMES; ++idx) { |
642 | 0 | int is_free = 1; |
643 | 0 | for (int i = 0; i < ref_buffer_stack->arf_stack_size; ++i) { |
644 | 0 | if (ref_buffer_stack->arf_stack[i] == idx) { |
645 | 0 | is_free = 0; |
646 | 0 | break; |
647 | 0 | } |
648 | 0 | } |
649 | |
|
650 | 0 | for (int i = 0; i < ref_buffer_stack->lst_stack_size; ++i) { |
651 | 0 | if (ref_buffer_stack->lst_stack[i] == idx) { |
652 | 0 | is_free = 0; |
653 | 0 | break; |
654 | 0 | } |
655 | 0 | } |
656 | |
|
657 | 0 | for (int i = 0; i < ref_buffer_stack->gld_stack_size; ++i) { |
658 | 0 | if (ref_buffer_stack->gld_stack[i] == idx) { |
659 | 0 | is_free = 0; |
660 | 0 | break; |
661 | 0 | } |
662 | 0 | } |
663 | |
|
664 | 0 | if (is_free) return idx; |
665 | 0 | } |
666 | 0 | return INVALID_IDX; |
667 | 0 | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
668 | 0 | } |
669 | | |
670 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
671 | | static int get_refresh_idx(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES], |
672 | | int update_arf, |
673 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
674 | | GF_GROUP *gf_group, int gf_index, |
675 | | int enable_refresh_skip, |
676 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
677 | | int cur_frame_disp) { |
678 | | int arf_count = 0; |
679 | | int oldest_arf_order = INT32_MAX; |
680 | | int oldest_arf_idx = -1; |
681 | | |
682 | | int oldest_frame_order = INT32_MAX; |
683 | | int oldest_idx = -1; |
684 | | |
685 | | for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) { |
686 | | RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx]; |
687 | | if (ref_pair.disp_order == -1) continue; |
688 | | const int frame_order = ref_pair.disp_order; |
689 | | const int reference_frame_level = ref_pair.pyr_level; |
690 | | // Do not refresh a future frame. |
691 | | if (frame_order > cur_frame_disp) continue; |
692 | | |
693 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
694 | | if (enable_refresh_skip) { |
695 | | int skip_frame = 0; |
696 | | // Prevent refreshing a frame in gf_group->skip_frame_refresh. |
697 | | for (int i = 0; i < REF_FRAMES; i++) { |
698 | | int frame_to_skip = gf_group->skip_frame_refresh[gf_index][i]; |
699 | | if (frame_to_skip == INVALID_IDX) break; |
700 | | if (frame_order == frame_to_skip) { |
701 | | skip_frame = 1; |
702 | | break; |
703 | | } |
704 | | } |
705 | | if (skip_frame) continue; |
706 | | } |
707 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
708 | | |
709 | | // Keep track of the oldest level 1 frame if the current frame is also level |
710 | | // 1. |
711 | | if (reference_frame_level == 1) { |
712 | | // If there are more than 2 level 1 frames in the reference list, |
713 | | // discard the oldest. |
714 | | if (frame_order < oldest_arf_order) { |
715 | | oldest_arf_order = frame_order; |
716 | | oldest_arf_idx = map_idx; |
717 | | } |
718 | | arf_count++; |
719 | | continue; |
720 | | } |
721 | | |
722 | | // Update the overall oldest reference frame. |
723 | | if (frame_order < oldest_frame_order) { |
724 | | oldest_frame_order = frame_order; |
725 | | oldest_idx = map_idx; |
726 | | } |
727 | | } |
728 | | if (update_arf && arf_count > 2) return oldest_arf_idx; |
729 | | if (oldest_idx >= 0) return oldest_idx; |
730 | | if (oldest_arf_idx >= 0) return oldest_arf_idx; |
731 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
732 | | if (oldest_idx == -1) { |
733 | | assert(arf_count > 2 && enable_refresh_skip); |
734 | | return oldest_arf_idx; |
735 | | } |
736 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
737 | | assert(0 && "No valid refresh index found"); |
738 | | return -1; |
739 | | } |
740 | | |
741 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
742 | | // Computes the reference refresh index for INTNL_ARF_UPDATE frame. |
743 | | int av1_calc_refresh_idx_for_intnl_arf( |
744 | | AV1_COMP *cpi, RefFrameMapPair ref_frame_map_pairs[REF_FRAMES], |
745 | | int gf_index) { |
746 | | GF_GROUP *const gf_group = &cpi->ppi->gf_group; |
747 | | |
748 | | // Search for the open slot to store the current frame. |
749 | | int free_fb_index = get_free_ref_map_index(ref_frame_map_pairs, NULL); |
750 | | |
751 | | // Use a free slot if available. |
752 | | if (free_fb_index != INVALID_IDX) { |
753 | | return free_fb_index; |
754 | | } else { |
755 | | int enable_refresh_skip = !is_one_pass_rt_params(cpi); |
756 | | int refresh_idx = |
757 | | get_refresh_idx(ref_frame_map_pairs, 0, gf_group, gf_index, |
758 | | enable_refresh_skip, gf_group->display_idx[gf_index]); |
759 | | return refresh_idx; |
760 | | } |
761 | | } |
762 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
763 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
764 | | |
765 | | int av1_get_refresh_frame_flags(const AV1_COMP *const cpi, |
766 | | const EncodeFrameParams *const frame_params, |
767 | | FRAME_UPDATE_TYPE frame_update_type, |
768 | | int gf_index, |
769 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
770 | | int cur_disp_order, |
771 | | RefFrameMapPair ref_frame_map_pairs[REF_FRAMES], |
772 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
773 | 0 | const RefBufferStack *const ref_buffer_stack) { |
774 | 0 | const AV1_COMMON *const cm = &cpi->common; |
775 | 0 | const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags = |
776 | 0 | &cpi->ext_flags.refresh_frame; |
777 | |
|
778 | 0 | GF_GROUP *gf_group = &cpi->ppi->gf_group; |
779 | 0 | if (gf_group->refbuf_state[gf_index] == REFBUF_RESET) |
780 | 0 | return SELECT_ALL_BUF_SLOTS; |
781 | | |
782 | | // TODO(jingning): Deprecate the following operations. |
783 | | // Switch frames and shown key-frames overwrite all reference slots |
784 | 0 | if (frame_params->frame_type == S_FRAME) return SELECT_ALL_BUF_SLOTS; |
785 | | |
786 | | // show_existing_frames don't actually send refresh_frame_flags so set the |
787 | | // flags to 0 to keep things consistent. |
788 | 0 | if (frame_params->show_existing_frame) return 0; |
789 | | |
790 | 0 | const SVC *const svc = &cpi->svc; |
791 | 0 | if (is_frame_droppable(svc, ext_refresh_frame_flags)) return 0; |
792 | | |
793 | 0 | int refresh_mask = 0; |
794 | |
|
795 | 0 | if (ext_refresh_frame_flags->update_pending) { |
796 | 0 | if (svc->set_ref_frame_config) { |
797 | 0 | for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++) { |
798 | 0 | int ref_frame_map_idx = svc->ref_idx[i]; |
799 | 0 | refresh_mask |= svc->refresh[ref_frame_map_idx] << ref_frame_map_idx; |
800 | 0 | } |
801 | 0 | return refresh_mask; |
802 | 0 | } |
803 | | // Unfortunately the encoder interface reflects the old refresh_*_frame |
804 | | // flags so we have to replicate the old refresh_frame_flags logic here in |
805 | | // order to preserve the behaviour of the flag overrides. |
806 | 0 | int ref_frame_map_idx = get_ref_frame_map_idx(cm, LAST_FRAME); |
807 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
808 | 0 | refresh_mask |= ext_refresh_frame_flags->last_frame << ref_frame_map_idx; |
809 | |
|
810 | 0 | ref_frame_map_idx = get_ref_frame_map_idx(cm, EXTREF_FRAME); |
811 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
812 | 0 | refresh_mask |= ext_refresh_frame_flags->bwd_ref_frame |
813 | 0 | << ref_frame_map_idx; |
814 | |
|
815 | 0 | ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF2_FRAME); |
816 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
817 | 0 | refresh_mask |= ext_refresh_frame_flags->alt2_ref_frame |
818 | 0 | << ref_frame_map_idx; |
819 | |
|
820 | 0 | if (frame_update_type == OVERLAY_UPDATE) { |
821 | 0 | ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME); |
822 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
823 | 0 | refresh_mask |= ext_refresh_frame_flags->golden_frame |
824 | 0 | << ref_frame_map_idx; |
825 | 0 | } else { |
826 | 0 | ref_frame_map_idx = get_ref_frame_map_idx(cm, GOLDEN_FRAME); |
827 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
828 | 0 | refresh_mask |= ext_refresh_frame_flags->golden_frame |
829 | 0 | << ref_frame_map_idx; |
830 | |
|
831 | 0 | ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME); |
832 | 0 | if (ref_frame_map_idx != INVALID_IDX) |
833 | 0 | refresh_mask |= ext_refresh_frame_flags->alt_ref_frame |
834 | 0 | << ref_frame_map_idx; |
835 | 0 | } |
836 | 0 | return refresh_mask; |
837 | 0 | } |
838 | | |
839 | | // Search for the open slot to store the current frame. |
840 | 0 | int free_fb_index = get_free_ref_map_index( |
841 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
842 | | ref_frame_map_pairs, |
843 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
844 | 0 | ref_buffer_stack); |
845 | |
|
846 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
847 | | // No refresh necessary for these frame types. |
848 | | if (frame_update_type == OVERLAY_UPDATE || |
849 | | frame_update_type == INTNL_OVERLAY_UPDATE) |
850 | | return refresh_mask; |
851 | | |
852 | | // If there is an open slot, refresh that one instead of replacing a |
853 | | // reference. |
854 | | if (free_fb_index != INVALID_IDX) { |
855 | | refresh_mask = 1 << free_fb_index; |
856 | | return refresh_mask; |
857 | | } |
858 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
859 | | const int enable_refresh_skip = !is_one_pass_rt_params(cpi); |
860 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
861 | | const int update_arf = frame_update_type == ARF_UPDATE; |
862 | | const int refresh_idx = |
863 | | get_refresh_idx(ref_frame_map_pairs, update_arf, |
864 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
865 | | &cpi->ppi->gf_group, gf_index, enable_refresh_skip, |
866 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
867 | | cur_disp_order); |
868 | | return 1 << refresh_idx; |
869 | | #else |
870 | 0 | switch (frame_update_type) { |
871 | 0 | case KF_UPDATE: |
872 | 0 | case GF_UPDATE: |
873 | 0 | if (free_fb_index != INVALID_IDX) { |
874 | 0 | refresh_mask = 1 << free_fb_index; |
875 | 0 | } else { |
876 | 0 | if (ref_buffer_stack->gld_stack_size) |
877 | 0 | refresh_mask = |
878 | 0 | 1 << ref_buffer_stack |
879 | 0 | ->gld_stack[ref_buffer_stack->gld_stack_size - 1]; |
880 | 0 | else |
881 | 0 | refresh_mask = |
882 | 0 | 1 << ref_buffer_stack |
883 | 0 | ->lst_stack[ref_buffer_stack->lst_stack_size - 1]; |
884 | 0 | } |
885 | 0 | break; |
886 | 0 | case LF_UPDATE: |
887 | 0 | if (free_fb_index != INVALID_IDX) { |
888 | 0 | refresh_mask = 1 << free_fb_index; |
889 | 0 | } else { |
890 | 0 | if (ref_buffer_stack->lst_stack_size >= 2) |
891 | 0 | refresh_mask = |
892 | 0 | 1 << ref_buffer_stack |
893 | 0 | ->lst_stack[ref_buffer_stack->lst_stack_size - 1]; |
894 | 0 | else if (ref_buffer_stack->gld_stack_size >= 2) |
895 | 0 | refresh_mask = |
896 | 0 | 1 << ref_buffer_stack |
897 | 0 | ->gld_stack[ref_buffer_stack->gld_stack_size - 1]; |
898 | 0 | else |
899 | 0 | assert(0 && "No ref map index found"); |
900 | 0 | } |
901 | 0 | break; |
902 | 0 | case ARF_UPDATE: |
903 | 0 | if (free_fb_index != INVALID_IDX) { |
904 | 0 | refresh_mask = 1 << free_fb_index; |
905 | 0 | } else { |
906 | 0 | if (ref_buffer_stack->gld_stack_size >= 3) |
907 | 0 | refresh_mask = |
908 | 0 | 1 << ref_buffer_stack |
909 | 0 | ->gld_stack[ref_buffer_stack->gld_stack_size - 1]; |
910 | 0 | else if (ref_buffer_stack->lst_stack_size >= 2) |
911 | 0 | refresh_mask = |
912 | 0 | 1 << ref_buffer_stack |
913 | 0 | ->lst_stack[ref_buffer_stack->lst_stack_size - 1]; |
914 | 0 | else |
915 | 0 | assert(0 && "No ref map index found"); |
916 | 0 | } |
917 | 0 | break; |
918 | 0 | case INTNL_ARF_UPDATE: |
919 | 0 | if (free_fb_index != INVALID_IDX) { |
920 | 0 | refresh_mask = 1 << free_fb_index; |
921 | 0 | } else { |
922 | 0 | refresh_mask = |
923 | 0 | 1 << ref_buffer_stack |
924 | 0 | ->lst_stack[ref_buffer_stack->lst_stack_size - 1]; |
925 | 0 | } |
926 | 0 | break; |
927 | 0 | case OVERLAY_UPDATE: |
928 | 0 | if (free_fb_index != INVALID_IDX) refresh_mask = 1 << free_fb_index; |
929 | 0 | break; |
930 | 0 | case INTNL_OVERLAY_UPDATE: break; |
931 | 0 | default: assert(0); break; |
932 | 0 | } |
933 | | |
934 | 0 | return refresh_mask; |
935 | 0 | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
936 | 0 | } |
937 | | |
938 | | #if !CONFIG_REALTIME_ONLY |
939 | 0 | void setup_mi(AV1_COMP *const cpi, YV12_BUFFER_CONFIG *src) { |
940 | 0 | AV1_COMMON *const cm = &cpi->common; |
941 | 0 | const int num_planes = av1_num_planes(cm); |
942 | 0 | MACROBLOCK *const x = &cpi->td.mb; |
943 | 0 | MACROBLOCKD *const xd = &x->e_mbd; |
944 | |
|
945 | 0 | av1_setup_src_planes(x, src, 0, 0, num_planes, cm->seq_params->sb_size); |
946 | |
|
947 | 0 | av1_setup_block_planes(xd, cm->seq_params->subsampling_x, |
948 | 0 | cm->seq_params->subsampling_y, num_planes); |
949 | |
|
950 | 0 | set_mi_offsets(&cm->mi_params, xd, 0, 0); |
951 | 0 | } |
952 | | |
953 | | // Apply temporal filtering to source frames and encode the filtered frame. |
954 | | // If the current frame does not require filtering, this function is identical |
955 | | // to av1_encode() except that tpl is not performed. |
956 | | static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest, |
957 | | EncodeFrameInput *const frame_input, |
958 | | EncodeFrameParams *const frame_params, |
959 | 0 | EncodeFrameResults *const frame_results) { |
960 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
961 | | if (cpi->oxcf.pass == 2) start_timing(cpi, denoise_and_encode_time); |
962 | | #endif |
963 | 0 | const AV1EncoderConfig *const oxcf = &cpi->oxcf; |
964 | 0 | AV1_COMMON *const cm = &cpi->common; |
965 | 0 | GF_GROUP *const gf_group = &cpi->ppi->gf_group; |
966 | 0 | FRAME_UPDATE_TYPE update_type = |
967 | 0 | get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index); |
968 | 0 | const int is_second_arf = |
969 | 0 | av1_gop_is_second_arf(gf_group, cpi->gf_frame_index); |
970 | | |
971 | | // Decide whether to apply temporal filtering to the source frame. |
972 | 0 | int apply_filtering = |
973 | 0 | av1_is_temporal_filter_on(oxcf) && !is_stat_generation_stage(cpi); |
974 | 0 | if (update_type != KF_UPDATE && update_type != ARF_UPDATE && !is_second_arf) { |
975 | 0 | apply_filtering = 0; |
976 | 0 | } |
977 | 0 | if (apply_filtering) { |
978 | 0 | if (frame_params->frame_type == KEY_FRAME) { |
979 | | // TODO(angiebird): Move the noise level check to av1_tf_info_filtering. |
980 | | // Decide whether it is allowed to perform key frame filtering |
981 | 0 | int allow_kf_filtering = oxcf->kf_cfg.enable_keyframe_filtering && |
982 | 0 | !frame_params->show_existing_frame && |
983 | 0 | !is_lossless_requested(&oxcf->rc_cfg); |
984 | 0 | if (allow_kf_filtering) { |
985 | 0 | const double y_noise_level = av1_estimate_noise_from_single_plane( |
986 | 0 | frame_input->source, 0, cm->seq_params->bit_depth, |
987 | 0 | NOISE_ESTIMATION_EDGE_THRESHOLD); |
988 | 0 | apply_filtering = y_noise_level > 0; |
989 | 0 | } else { |
990 | 0 | apply_filtering = 0; |
991 | 0 | } |
992 | | // If we are doing kf filtering, set up a few things. |
993 | 0 | if (apply_filtering) { |
994 | 0 | av1_setup_past_independence(cm); |
995 | 0 | } |
996 | 0 | } else if (is_second_arf) { |
997 | 0 | apply_filtering = cpi->sf.hl_sf.second_alt_ref_filtering; |
998 | 0 | } |
999 | 0 | } |
1000 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1001 | | if (cpi->oxcf.pass == 2) start_timing(cpi, apply_filtering_time); |
1002 | | #endif |
1003 | | // Save the pointer to the original source image. |
1004 | 0 | YV12_BUFFER_CONFIG *source_buffer = frame_input->source; |
1005 | | // apply filtering to frame |
1006 | 0 | if (apply_filtering) { |
1007 | 0 | int show_existing_alt_ref = 0; |
1008 | 0 | FRAME_DIFF frame_diff; |
1009 | 0 | int top_index = 0; |
1010 | 0 | int bottom_index = 0; |
1011 | 0 | const int q_index = av1_rc_pick_q_and_bounds( |
1012 | 0 | cpi, cpi->oxcf.frm_dim_cfg.width, cpi->oxcf.frm_dim_cfg.height, |
1013 | 0 | cpi->gf_frame_index, &bottom_index, &top_index); |
1014 | | |
1015 | | // TODO(bohanli): figure out why we need frame_type in cm here. |
1016 | 0 | cm->current_frame.frame_type = frame_params->frame_type; |
1017 | 0 | if (update_type == KF_UPDATE || update_type == ARF_UPDATE) { |
1018 | 0 | YV12_BUFFER_CONFIG *tf_buf = av1_tf_info_get_filtered_buf( |
1019 | 0 | &cpi->ppi->tf_info, cpi->gf_frame_index, &frame_diff); |
1020 | 0 | if (tf_buf != NULL) { |
1021 | 0 | frame_input->source = tf_buf; |
1022 | 0 | show_existing_alt_ref = av1_check_show_filtered_frame( |
1023 | 0 | tf_buf, &frame_diff, q_index, cm->seq_params->bit_depth); |
1024 | 0 | if (show_existing_alt_ref) { |
1025 | 0 | cpi->common.showable_frame |= 1; |
1026 | 0 | } |
1027 | 0 | } |
1028 | 0 | if (gf_group->frame_type[cpi->gf_frame_index] != KEY_FRAME) { |
1029 | 0 | cpi->ppi->show_existing_alt_ref = show_existing_alt_ref; |
1030 | 0 | } |
1031 | 0 | } |
1032 | |
|
1033 | 0 | if (is_second_arf) { |
1034 | 0 | YV12_BUFFER_CONFIG *tf_buf_second_arf = |
1035 | 0 | &cpi->ppi->tf_info.tf_buf_second_arf; |
1036 | | // We didn't apply temporal filtering for second arf ahead in |
1037 | | // av1_tf_info_filtering(). |
1038 | 0 | const int arf_src_index = gf_group->arf_src_offset[cpi->gf_frame_index]; |
1039 | | // Right now, we are still using tf_buf_second_arf due to |
1040 | | // implementation complexity. |
1041 | | // TODO(angiebird): Reuse tf_info->tf_buf here. |
1042 | 0 | av1_temporal_filter(cpi, arf_src_index, cpi->gf_frame_index, &frame_diff, |
1043 | 0 | tf_buf_second_arf); |
1044 | 0 | show_existing_alt_ref = av1_check_show_filtered_frame( |
1045 | 0 | tf_buf_second_arf, &frame_diff, q_index, cm->seq_params->bit_depth); |
1046 | 0 | if (show_existing_alt_ref) { |
1047 | 0 | aom_extend_frame_borders(tf_buf_second_arf, av1_num_planes(cm)); |
1048 | 0 | frame_input->source = tf_buf_second_arf; |
1049 | 0 | aom_copy_metadata_to_frame_buffer(frame_input->source, |
1050 | 0 | source_buffer->metadata); |
1051 | 0 | } |
1052 | | // Currently INTNL_ARF_UPDATE only do show_existing. |
1053 | 0 | cpi->common.showable_frame |= 1; |
1054 | 0 | } |
1055 | 0 | } |
1056 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1057 | | if (cpi->oxcf.pass == 2) end_timing(cpi, apply_filtering_time); |
1058 | | #endif |
1059 | |
|
1060 | 0 | int set_mv_params = frame_params->frame_type == KEY_FRAME || |
1061 | 0 | update_type == ARF_UPDATE || update_type == GF_UPDATE; |
1062 | 0 | cm->show_frame = frame_params->show_frame; |
1063 | 0 | cm->current_frame.frame_type = frame_params->frame_type; |
1064 | | // TODO(bohanli): Why is this? what part of it is necessary? |
1065 | 0 | av1_set_frame_size(cpi, cm->superres_upscaled_width, |
1066 | 0 | cm->superres_upscaled_height); |
1067 | 0 | if (set_mv_params) av1_set_mv_search_params(cpi); |
1068 | |
|
1069 | | #if CONFIG_RD_COMMAND |
1070 | | if (frame_params->frame_type == KEY_FRAME) { |
1071 | | char filepath[] = "rd_command.txt"; |
1072 | | av1_read_rd_command(filepath, &cpi->rd_command); |
1073 | | } |
1074 | | #endif // CONFIG_RD_COMMAND |
1075 | 0 | if (cpi->gf_frame_index == 0 && !is_stat_generation_stage(cpi)) { |
1076 | | // perform tpl after filtering |
1077 | 0 | int allow_tpl = |
1078 | 0 | oxcf->gf_cfg.lag_in_frames > 1 && oxcf->algo_cfg.enable_tpl_model; |
1079 | 0 | if (gf_group->size > MAX_LENGTH_TPL_FRAME_STATS) { |
1080 | 0 | allow_tpl = 0; |
1081 | 0 | } |
1082 | 0 | if (frame_params->frame_type == KEY_FRAME) { |
1083 | | // TODO(angiebird): handle disable_filtered_key_tpl properly |
1084 | 0 | allow_tpl = allow_tpl && !cpi->sf.tpl_sf.disable_filtered_key_tpl; |
1085 | 0 | } else { |
1086 | | // In rare case, it's possible to have non ARF/GF update_type here. |
1087 | | // We should set allow_tpl to zero in the situation |
1088 | 0 | allow_tpl = |
1089 | 0 | allow_tpl && (update_type == ARF_UPDATE || update_type == GF_UPDATE); |
1090 | 0 | } |
1091 | |
|
1092 | 0 | if (allow_tpl) { |
1093 | 0 | if (!cpi->skip_tpl_setup_stats) { |
1094 | 0 | av1_tpl_preload_rc_estimate(cpi, frame_params); |
1095 | 0 | av1_tpl_setup_stats(cpi, 0, frame_params); |
1096 | | #if CONFIG_BITRATE_ACCURACY |
1097 | | assert(cpi->gf_frame_index == 0); |
1098 | | av1_vbr_rc_update_q_index_list(&cpi->vbr_rc_info, &cpi->ppi->tpl_data, |
1099 | | gf_group, cm->seq_params->bit_depth); |
1100 | | #endif |
1101 | 0 | } |
1102 | 0 | } else { |
1103 | 0 | av1_init_tpl_stats(&cpi->ppi->tpl_data); |
1104 | 0 | } |
1105 | 0 | } |
1106 | |
|
1107 | 0 | if (av1_encode(cpi, dest, frame_input, frame_params, frame_results) != |
1108 | 0 | AOM_CODEC_OK) { |
1109 | 0 | return AOM_CODEC_ERROR; |
1110 | 0 | } |
1111 | | |
1112 | | // Set frame_input source to true source for psnr calculation. |
1113 | 0 | if (apply_filtering && is_psnr_calc_enabled(cpi)) { |
1114 | 0 | cpi->source = av1_realloc_and_scale_if_required( |
1115 | 0 | cm, source_buffer, &cpi->scaled_source, cm->features.interp_filter, 0, |
1116 | 0 | false, true, cpi->oxcf.border_in_pixels, |
1117 | 0 | cpi->oxcf.tool_cfg.enable_global_motion); |
1118 | 0 | cpi->unscaled_source = source_buffer; |
1119 | 0 | } |
1120 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1121 | | if (cpi->oxcf.pass == 2) end_timing(cpi, denoise_and_encode_time); |
1122 | | #endif |
1123 | 0 | return AOM_CODEC_OK; |
1124 | 0 | } |
1125 | | #endif // !CONFIG_REALTIME_ONLY |
1126 | | |
1127 | | #if !CONFIG_FRAME_PARALLEL_ENCODE |
1128 | | static INLINE int find_unused_ref_frame(const int *used_ref_frames, |
1129 | 0 | const int *stack, int stack_size) { |
1130 | 0 | for (int i = 0; i < stack_size; ++i) { |
1131 | 0 | const int this_ref = stack[i]; |
1132 | 0 | int ref_idx = 0; |
1133 | 0 | for (ref_idx = 0; ref_idx <= ALTREF_FRAME - LAST_FRAME; ++ref_idx) { |
1134 | 0 | if (this_ref == used_ref_frames[ref_idx]) break; |
1135 | 0 | } |
1136 | | |
1137 | | // not in use |
1138 | 0 | if (ref_idx > ALTREF_FRAME - LAST_FRAME) return this_ref; |
1139 | 0 | } |
1140 | | |
1141 | 0 | return INVALID_IDX; |
1142 | 0 | } |
1143 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1144 | | |
1145 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1146 | | /*!\cond */ |
1147 | | // Struct to keep track of relevant reference frame data. |
1148 | | typedef struct { |
1149 | | int map_idx; |
1150 | | int disp_order; |
1151 | | int pyr_level; |
1152 | | int used; |
1153 | | } RefBufMapData; |
1154 | | /*!\endcond */ |
1155 | | |
1156 | | // Comparison function to sort reference frames in ascending display order. |
1157 | | static int compare_map_idx_pair_asc(const void *a, const void *b) { |
1158 | | if (((RefBufMapData *)a)->disp_order == ((RefBufMapData *)b)->disp_order) { |
1159 | | return 0; |
1160 | | } else if (((const RefBufMapData *)a)->disp_order > |
1161 | | ((const RefBufMapData *)b)->disp_order) { |
1162 | | return 1; |
1163 | | } else { |
1164 | | return -1; |
1165 | | } |
1166 | | } |
1167 | | |
1168 | | // Checks to see if a particular reference frame is already in the reference |
1169 | | // frame map. |
1170 | | static int is_in_ref_map(RefBufMapData *map, int disp_order, int n_frames) { |
1171 | | for (int i = 0; i < n_frames; i++) { |
1172 | | if (disp_order == map[i].disp_order) return 1; |
1173 | | } |
1174 | | return 0; |
1175 | | } |
1176 | | |
1177 | | // Add a reference buffer index to a named reference slot. |
1178 | | static void add_ref_to_slot(RefBufMapData *ref, int *const remapped_ref_idx, |
1179 | | int frame) { |
1180 | | remapped_ref_idx[frame - LAST_FRAME] = ref->map_idx; |
1181 | | ref->used = 1; |
1182 | | } |
1183 | | |
1184 | | // Threshold dictating when we are allowed to start considering |
1185 | | // leaving lowest level frames unmapped. |
1186 | | #define LOW_LEVEL_FRAMES_TR 5 |
1187 | | |
1188 | | // Find which reference buffer should be left out of the named mapping. |
1189 | | // This is because there are 8 reference buffers and only 7 named slots. |
1190 | | static void set_unmapped_ref(RefBufMapData *buffer_map, int n_bufs, |
1191 | | int n_min_level_refs, int min_level, |
1192 | | int cur_frame_disp) { |
1193 | | int max_dist = 0; |
1194 | | int unmapped_idx = -1; |
1195 | | if (n_bufs <= ALTREF_FRAME) return; |
1196 | | for (int i = 0; i < n_bufs; i++) { |
1197 | | if (buffer_map[i].used) continue; |
1198 | | if (buffer_map[i].pyr_level != min_level || |
1199 | | n_min_level_refs >= LOW_LEVEL_FRAMES_TR) { |
1200 | | int dist = abs(cur_frame_disp - buffer_map[i].disp_order); |
1201 | | if (dist > max_dist) { |
1202 | | max_dist = dist; |
1203 | | unmapped_idx = i; |
1204 | | } |
1205 | | } |
1206 | | } |
1207 | | assert(unmapped_idx >= 0 && "Unmapped reference not found"); |
1208 | | buffer_map[unmapped_idx].used = 1; |
1209 | | } |
1210 | | |
1211 | | static void get_ref_frames(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES], |
1212 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
1213 | | const AV1_COMP *const cpi, int gf_index, |
1214 | | int is_parallel_encode, |
1215 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
1216 | | int cur_frame_disp, |
1217 | | int remapped_ref_idx[REF_FRAMES]) { |
1218 | | int buf_map_idx = 0; |
1219 | | |
1220 | | // Initialize reference frame mappings. |
1221 | | for (int i = 0; i < REF_FRAMES; ++i) remapped_ref_idx[i] = INVALID_IDX; |
1222 | | |
1223 | | RefBufMapData buffer_map[REF_FRAMES]; |
1224 | | int n_bufs = 0; |
1225 | | memset(buffer_map, 0, REF_FRAMES * sizeof(buffer_map[0])); |
1226 | | int min_level = MAX_ARF_LAYERS; |
1227 | | int max_level = 0; |
1228 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
1229 | | GF_GROUP *gf_group = &cpi->ppi->gf_group; |
1230 | | int skip_ref_unmapping = 0; |
1231 | | int is_one_pass_rt = is_one_pass_rt_params(cpi); |
1232 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
1233 | | |
1234 | | // Go through current reference buffers and store display order, pyr level, |
1235 | | // and map index. |
1236 | | for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) { |
1237 | | // Get reference frame buffer. |
1238 | | RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx]; |
1239 | | if (ref_pair.disp_order == -1) continue; |
1240 | | const int frame_order = ref_pair.disp_order; |
1241 | | // Avoid duplicates. |
1242 | | if (is_in_ref_map(buffer_map, frame_order, n_bufs)) continue; |
1243 | | const int reference_frame_level = ref_pair.pyr_level; |
1244 | | |
1245 | | // Keep track of the lowest and highest levels that currently exist. |
1246 | | if (reference_frame_level < min_level) min_level = reference_frame_level; |
1247 | | if (reference_frame_level > max_level) max_level = reference_frame_level; |
1248 | | |
1249 | | buffer_map[n_bufs].map_idx = map_idx; |
1250 | | buffer_map[n_bufs].disp_order = frame_order; |
1251 | | buffer_map[n_bufs].pyr_level = reference_frame_level; |
1252 | | buffer_map[n_bufs].used = 0; |
1253 | | n_bufs++; |
1254 | | } |
1255 | | |
1256 | | // Sort frames in ascending display order. |
1257 | | qsort(buffer_map, n_bufs, sizeof(buffer_map[0]), compare_map_idx_pair_asc); |
1258 | | |
1259 | | int n_min_level_refs = 0; |
1260 | | int n_past_high_level = 0; |
1261 | | int closest_past_ref = -1; |
1262 | | int golden_idx = -1; |
1263 | | int altref_idx = -1; |
1264 | | |
1265 | | // Find the GOLDEN_FRAME and BWDREF_FRAME. |
1266 | | // Also collect various stats about the reference frames for the remaining |
1267 | | // mappings. |
1268 | | for (int i = n_bufs - 1; i >= 0; i--) { |
1269 | | if (buffer_map[i].pyr_level == min_level) { |
1270 | | // Keep track of the number of lowest level frames. |
1271 | | n_min_level_refs++; |
1272 | | if (buffer_map[i].disp_order < cur_frame_disp && golden_idx == -1 && |
1273 | | remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] == INVALID_IDX) { |
1274 | | // Save index for GOLDEN. |
1275 | | golden_idx = i; |
1276 | | } else if (buffer_map[i].disp_order > cur_frame_disp && |
1277 | | altref_idx == -1 && |
1278 | | remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] == INVALID_IDX) { |
1279 | | // Save index for ALTREF. |
1280 | | altref_idx = i; |
1281 | | } |
1282 | | } else if (buffer_map[i].disp_order == cur_frame_disp) { |
1283 | | // Map the BWDREF_FRAME if this is the show_existing_frame. |
1284 | | add_ref_to_slot(&buffer_map[i], remapped_ref_idx, BWDREF_FRAME); |
1285 | | } |
1286 | | |
1287 | | // Keep track of the number of past frames that are not at the lowest level. |
1288 | | if (buffer_map[i].disp_order < cur_frame_disp && |
1289 | | buffer_map[i].pyr_level != min_level) |
1290 | | n_past_high_level++; |
1291 | | |
1292 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
1293 | | // During parallel encodes of lower layer frames, exclude the first frame |
1294 | | // (frame_parallel_level 1) from being used for the reference assignment of |
1295 | | // the second frame (frame_parallel_level 2). |
1296 | | if (!is_one_pass_rt && gf_group->frame_parallel_level[gf_index] == 2 && |
1297 | | gf_group->frame_parallel_level[gf_index - 1] == 1 && |
1298 | | gf_group->update_type[gf_index - 1] == INTNL_ARF_UPDATE) { |
1299 | | assert(gf_group->update_type[gf_index] == INTNL_ARF_UPDATE); |
1300 | | #if CONFIG_FPMT_TEST |
1301 | | is_parallel_encode = (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_ENCODE) |
1302 | | ? is_parallel_encode |
1303 | | : 0; |
1304 | | #endif // CONFIG_FPMT_TEST |
1305 | | // If parallel cpis are active, use ref_idx_to_skip, else, use display |
1306 | | // index. |
1307 | | assert(IMPLIES(is_parallel_encode, cpi->ref_idx_to_skip != INVALID_IDX)); |
1308 | | assert(IMPLIES(!is_parallel_encode, |
1309 | | gf_group->skip_frame_as_ref[gf_index] != INVALID_IDX)); |
1310 | | buffer_map[i].used = is_parallel_encode |
1311 | | ? (buffer_map[i].map_idx == cpi->ref_idx_to_skip) |
1312 | | : (buffer_map[i].disp_order == |
1313 | | gf_group->skip_frame_as_ref[gf_index]); |
1314 | | // In case a ref frame is excluded from being used during assignment, |
1315 | | // skip the call to set_unmapped_ref(). Applicable in steady state. |
1316 | | if (buffer_map[i].used) skip_ref_unmapping = 1; |
1317 | | } |
1318 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
1319 | | |
1320 | | // Keep track of where the frames change from being past frames to future |
1321 | | // frames. |
1322 | | if (buffer_map[i].disp_order < cur_frame_disp && closest_past_ref < 0) |
1323 | | closest_past_ref = i; |
1324 | | } |
1325 | | |
1326 | | // Do not map GOLDEN and ALTREF based on their pyramid level if all reference |
1327 | | // frames have the same level. |
1328 | | if (n_min_level_refs <= n_bufs) { |
1329 | | // Map the GOLDEN_FRAME. |
1330 | | if (golden_idx > -1) |
1331 | | add_ref_to_slot(&buffer_map[golden_idx], remapped_ref_idx, GOLDEN_FRAME); |
1332 | | // Map the ALTREF_FRAME. |
1333 | | if (altref_idx > -1) |
1334 | | add_ref_to_slot(&buffer_map[altref_idx], remapped_ref_idx, ALTREF_FRAME); |
1335 | | } |
1336 | | |
1337 | | // Find the buffer to be excluded from the mapping. |
1338 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
1339 | | if (!skip_ref_unmapping) |
1340 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
1341 | | set_unmapped_ref(buffer_map, n_bufs, n_min_level_refs, min_level, |
1342 | | cur_frame_disp); |
1343 | | |
1344 | | // Place past frames in LAST_FRAME, LAST2_FRAME, and LAST3_FRAME. |
1345 | | for (int frame = LAST_FRAME; frame < GOLDEN_FRAME; frame++) { |
1346 | | // Continue if the current ref slot is already full. |
1347 | | if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue; |
1348 | | // Find the next unmapped reference buffer |
1349 | | // in decreasing ouptut order relative to current picture. |
1350 | | int next_buf_max = 0; |
1351 | | int next_disp_order = INT_MIN; |
1352 | | for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) { |
1353 | | if (!buffer_map[buf_map_idx].used && |
1354 | | buffer_map[buf_map_idx].disp_order < cur_frame_disp && |
1355 | | buffer_map[buf_map_idx].disp_order > next_disp_order) { |
1356 | | next_disp_order = buffer_map[buf_map_idx].disp_order; |
1357 | | next_buf_max = buf_map_idx; |
1358 | | } |
1359 | | } |
1360 | | buf_map_idx = next_buf_max; |
1361 | | if (buf_map_idx < 0) break; |
1362 | | if (buffer_map[buf_map_idx].used) break; |
1363 | | add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame); |
1364 | | } |
1365 | | |
1366 | | // Place future frames (if there are any) in BWDREF_FRAME and ALTREF2_FRAME. |
1367 | | for (int frame = BWDREF_FRAME; frame < REF_FRAMES; frame++) { |
1368 | | // Continue if the current ref slot is already full. |
1369 | | if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue; |
1370 | | // Find the next unmapped reference buffer |
1371 | | // in increasing ouptut order relative to current picture. |
1372 | | int next_buf_max = 0; |
1373 | | int next_disp_order = INT_MAX; |
1374 | | for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) { |
1375 | | if (!buffer_map[buf_map_idx].used && |
1376 | | buffer_map[buf_map_idx].disp_order > cur_frame_disp && |
1377 | | buffer_map[buf_map_idx].disp_order < next_disp_order) { |
1378 | | next_disp_order = buffer_map[buf_map_idx].disp_order; |
1379 | | next_buf_max = buf_map_idx; |
1380 | | } |
1381 | | } |
1382 | | buf_map_idx = next_buf_max; |
1383 | | if (buf_map_idx < 0) break; |
1384 | | if (buffer_map[buf_map_idx].used) break; |
1385 | | add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame); |
1386 | | } |
1387 | | |
1388 | | // Place remaining past frames. |
1389 | | buf_map_idx = closest_past_ref; |
1390 | | for (int frame = LAST_FRAME; frame < REF_FRAMES; frame++) { |
1391 | | // Continue if the current ref slot is already full. |
1392 | | if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue; |
1393 | | // Find the next unmapped reference buffer. |
1394 | | for (; buf_map_idx >= 0; buf_map_idx--) { |
1395 | | if (!buffer_map[buf_map_idx].used) break; |
1396 | | } |
1397 | | if (buf_map_idx < 0) break; |
1398 | | if (buffer_map[buf_map_idx].used) break; |
1399 | | add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame); |
1400 | | } |
1401 | | |
1402 | | // Place remaining future frames. |
1403 | | buf_map_idx = n_bufs - 1; |
1404 | | for (int frame = ALTREF_FRAME; frame >= LAST_FRAME; frame--) { |
1405 | | // Continue if the current ref slot is already full. |
1406 | | if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue; |
1407 | | // Find the next unmapped reference buffer. |
1408 | | for (; buf_map_idx > closest_past_ref; buf_map_idx--) { |
1409 | | if (!buffer_map[buf_map_idx].used) break; |
1410 | | } |
1411 | | if (buf_map_idx < 0) break; |
1412 | | if (buffer_map[buf_map_idx].used) break; |
1413 | | add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame); |
1414 | | } |
1415 | | |
1416 | | // Fill any slots that are empty (should only happen for the first 7 frames). |
1417 | | for (int i = 0; i < REF_FRAMES; ++i) |
1418 | | if (remapped_ref_idx[i] == INVALID_IDX) remapped_ref_idx[i] = 0; |
1419 | | } |
1420 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1421 | | |
1422 | | void av1_get_ref_frames(const RefBufferStack *ref_buffer_stack, |
1423 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1424 | | RefFrameMapPair ref_frame_map_pairs[REF_FRAMES], |
1425 | | int cur_frame_disp, |
1426 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
1427 | | const AV1_COMP *cpi, int gf_index, |
1428 | | int is_parallel_encode, |
1429 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
1430 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1431 | 0 | int remapped_ref_idx[REF_FRAMES]) { |
1432 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1433 | | (void)ref_buffer_stack; |
1434 | | get_ref_frames(ref_frame_map_pairs, |
1435 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
1436 | | cpi, gf_index, is_parallel_encode, |
1437 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
1438 | | cur_frame_disp, remapped_ref_idx); |
1439 | | return; |
1440 | | #else |
1441 | 0 | const int *const arf_stack = ref_buffer_stack->arf_stack; |
1442 | 0 | const int *const lst_stack = ref_buffer_stack->lst_stack; |
1443 | 0 | const int *const gld_stack = ref_buffer_stack->gld_stack; |
1444 | 0 | const int arf_stack_size = ref_buffer_stack->arf_stack_size; |
1445 | 0 | const int lst_stack_size = ref_buffer_stack->lst_stack_size; |
1446 | 0 | const int gld_stack_size = ref_buffer_stack->gld_stack_size; |
1447 | | |
1448 | | // Initialization |
1449 | 0 | for (int i = 0; i < REF_FRAMES; ++i) remapped_ref_idx[i] = INVALID_IDX; |
1450 | |
|
1451 | 0 | if (arf_stack_size) { |
1452 | 0 | remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] = arf_stack[arf_stack_size - 1]; |
1453 | |
|
1454 | 0 | if (arf_stack_size > 1) |
1455 | 0 | remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = arf_stack[0]; |
1456 | |
|
1457 | 0 | if (arf_stack_size > 2) |
1458 | 0 | remapped_ref_idx[ALTREF2_FRAME - LAST_FRAME] = arf_stack[1]; |
1459 | 0 | } |
1460 | |
|
1461 | 0 | if (lst_stack_size) { |
1462 | 0 | remapped_ref_idx[LAST_FRAME - LAST_FRAME] = lst_stack[0]; |
1463 | |
|
1464 | 0 | if (lst_stack_size > 1) |
1465 | 0 | remapped_ref_idx[LAST2_FRAME - LAST_FRAME] = lst_stack[1]; |
1466 | 0 | } |
1467 | |
|
1468 | 0 | if (gld_stack_size) { |
1469 | 0 | remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] = gld_stack[0]; |
1470 | | |
1471 | | // If there are more frames in the golden stack, assign them to BWDREF, |
1472 | | // ALTREF2, or LAST3. |
1473 | 0 | if (gld_stack_size > 1) { |
1474 | 0 | if (arf_stack_size <= 2) { |
1475 | 0 | if (arf_stack_size <= 1) { |
1476 | 0 | remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = gld_stack[1]; |
1477 | 0 | if (gld_stack_size > 2) |
1478 | 0 | remapped_ref_idx[ALTREF2_FRAME - LAST_FRAME] = gld_stack[2]; |
1479 | 0 | } else { |
1480 | 0 | remapped_ref_idx[ALTREF2_FRAME - LAST_FRAME] = gld_stack[1]; |
1481 | 0 | } |
1482 | 0 | } else { |
1483 | 0 | remapped_ref_idx[LAST3_FRAME - LAST_FRAME] = gld_stack[1]; |
1484 | 0 | } |
1485 | 0 | } |
1486 | 0 | } |
1487 | |
|
1488 | 0 | for (int idx = ALTREF_FRAME - LAST_FRAME; idx >= 0; --idx) { |
1489 | 0 | int ref_map_index = remapped_ref_idx[idx]; |
1490 | |
|
1491 | 0 | if (ref_map_index != INVALID_IDX) continue; |
1492 | | |
1493 | 0 | ref_map_index = |
1494 | 0 | find_unused_ref_frame(remapped_ref_idx, arf_stack, arf_stack_size); |
1495 | |
|
1496 | 0 | if (ref_map_index == INVALID_IDX) { |
1497 | 0 | ref_map_index = |
1498 | 0 | find_unused_ref_frame(remapped_ref_idx, gld_stack, gld_stack_size); |
1499 | 0 | } |
1500 | |
|
1501 | 0 | if (ref_map_index == INVALID_IDX) { |
1502 | 0 | ref_map_index = |
1503 | 0 | find_unused_ref_frame(remapped_ref_idx, lst_stack, lst_stack_size); |
1504 | 0 | } |
1505 | |
|
1506 | 0 | if (ref_map_index != INVALID_IDX) { |
1507 | 0 | remapped_ref_idx[idx] = ref_map_index; |
1508 | 0 | } else if (!gld_stack_size && arf_stack_size) { |
1509 | 0 | remapped_ref_idx[idx] = ref_buffer_stack->arf_stack[0]; |
1510 | 0 | } else { |
1511 | 0 | remapped_ref_idx[idx] = ref_buffer_stack->gld_stack[0]; |
1512 | 0 | } |
1513 | 0 | } |
1514 | 0 | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1515 | 0 | } |
1516 | | |
1517 | | int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size, |
1518 | | uint8_t *const dest, unsigned int *frame_flags, |
1519 | | int64_t *const time_stamp, int64_t *const time_end, |
1520 | | const aom_rational64_t *const timestamp_ratio, |
1521 | 0 | int *const pop_lookahead, int flush) { |
1522 | 0 | AV1EncoderConfig *const oxcf = &cpi->oxcf; |
1523 | 0 | AV1_COMMON *const cm = &cpi->common; |
1524 | 0 | GF_GROUP *gf_group = &cpi->ppi->gf_group; |
1525 | 0 | ExternalFlags *const ext_flags = &cpi->ext_flags; |
1526 | 0 | GFConfig *const gf_cfg = &oxcf->gf_cfg; |
1527 | |
|
1528 | 0 | EncodeFrameInput frame_input; |
1529 | 0 | EncodeFrameParams frame_params; |
1530 | 0 | EncodeFrameResults frame_results; |
1531 | 0 | memset(&frame_input, 0, sizeof(frame_input)); |
1532 | 0 | memset(&frame_params, 0, sizeof(frame_params)); |
1533 | 0 | memset(&frame_results, 0, sizeof(frame_results)); |
1534 | | |
1535 | | // Check if we need to stuff more src frames |
1536 | 0 | if (flush == 0) { |
1537 | 0 | int srcbuf_size = |
1538 | 0 | av1_lookahead_depth(cpi->ppi->lookahead, cpi->compressor_stage); |
1539 | 0 | int pop_size = |
1540 | 0 | av1_lookahead_pop_sz(cpi->ppi->lookahead, cpi->compressor_stage); |
1541 | | |
1542 | | // Continue buffering look ahead buffer. |
1543 | 0 | if (srcbuf_size < pop_size) return -1; |
1544 | 0 | } |
1545 | | |
1546 | 0 | if (!av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage)) { |
1547 | 0 | #if !CONFIG_REALTIME_ONLY |
1548 | 0 | if (flush && oxcf->pass == AOM_RC_FIRST_PASS && |
1549 | 0 | !cpi->ppi->twopass.first_pass_done) { |
1550 | 0 | av1_end_first_pass(cpi); /* get last stats packet */ |
1551 | 0 | cpi->ppi->twopass.first_pass_done = 1; |
1552 | 0 | } |
1553 | 0 | #endif |
1554 | 0 | return -1; |
1555 | 0 | } |
1556 | | |
1557 | | // TODO(sarahparker) finish bit allocation for one pass pyramid |
1558 | 0 | if (has_no_stats_stage(cpi)) { |
1559 | 0 | gf_cfg->gf_max_pyr_height = |
1560 | 0 | AOMMIN(gf_cfg->gf_max_pyr_height, USE_ALTREF_FOR_ONE_PASS); |
1561 | 0 | gf_cfg->gf_min_pyr_height = |
1562 | 0 | AOMMIN(gf_cfg->gf_min_pyr_height, gf_cfg->gf_max_pyr_height); |
1563 | 0 | } |
1564 | | |
1565 | | // Allocation of mi buffers. |
1566 | 0 | alloc_mb_mode_info_buffers(cpi); |
1567 | |
|
1568 | 0 | cpi->skip_tpl_setup_stats = 0; |
1569 | 0 | #if !CONFIG_REALTIME_ONLY |
1570 | 0 | cpi->twopass_frame.this_frame = NULL; |
1571 | 0 | const int use_one_pass_rt_params = is_one_pass_rt_params(cpi); |
1572 | 0 | if (!use_one_pass_rt_params && !is_stat_generation_stage(cpi)) { |
1573 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1574 | | start_timing(cpi, av1_get_second_pass_params_time); |
1575 | | #endif |
1576 | |
|
1577 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1578 | | // Initialise frame_level_rate_correction_factors with value previous |
1579 | | // to the parallel frames. |
1580 | | if (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) { |
1581 | | for (int i = 0; i < RATE_FACTOR_LEVELS; i++) { |
1582 | | cpi->rc.frame_level_rate_correction_factors[i] = |
1583 | | #if CONFIG_FPMT_TEST |
1584 | | (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) |
1585 | | ? cpi->ppi->p_rc.temp_rate_correction_factors[i] |
1586 | | : |
1587 | | #endif // CONFIG_FPMT_TEST |
1588 | | cpi->ppi->p_rc.rate_correction_factors[i]; |
1589 | | } |
1590 | | } |
1591 | | // copy mv_stats from ppi to frame_level cpi. |
1592 | | cpi->mv_stats = cpi->ppi->mv_stats; |
1593 | | #endif |
1594 | 0 | av1_get_second_pass_params(cpi, &frame_params, *frame_flags); |
1595 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1596 | | end_timing(cpi, av1_get_second_pass_params_time); |
1597 | | #endif |
1598 | 0 | } |
1599 | 0 | #endif |
1600 | |
|
1601 | 0 | if (!is_stat_generation_stage(cpi)) { |
1602 | | // TODO(jingning): fwd key frame always uses show existing frame? |
1603 | 0 | if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE && |
1604 | 0 | gf_group->refbuf_state[cpi->gf_frame_index] == REFBUF_RESET) { |
1605 | 0 | frame_params.show_existing_frame = 1; |
1606 | 0 | } else { |
1607 | 0 | frame_params.show_existing_frame = |
1608 | 0 | (cpi->ppi->show_existing_alt_ref && |
1609 | 0 | gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) || |
1610 | 0 | gf_group->update_type[cpi->gf_frame_index] == INTNL_OVERLAY_UPDATE; |
1611 | 0 | } |
1612 | 0 | frame_params.show_existing_frame &= allow_show_existing(cpi, *frame_flags); |
1613 | | |
1614 | | // Reset show_existing_alt_ref decision to 0 after it is used. |
1615 | 0 | if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) { |
1616 | 0 | cpi->ppi->show_existing_alt_ref = 0; |
1617 | 0 | } |
1618 | 0 | } else { |
1619 | 0 | frame_params.show_existing_frame = 0; |
1620 | 0 | } |
1621 | |
|
1622 | 0 | struct lookahead_entry *source = NULL; |
1623 | 0 | struct lookahead_entry *last_source = NULL; |
1624 | 0 | if (frame_params.show_existing_frame) { |
1625 | 0 | source = av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage); |
1626 | 0 | *pop_lookahead = 1; |
1627 | 0 | frame_params.show_frame = 1; |
1628 | 0 | } else { |
1629 | 0 | source = choose_frame_source(cpi, &flush, pop_lookahead, &last_source, |
1630 | 0 | &frame_params); |
1631 | 0 | } |
1632 | |
|
1633 | 0 | if (source == NULL) { // If no source was found, we can't encode a frame. |
1634 | 0 | #if !CONFIG_REALTIME_ONLY |
1635 | 0 | if (flush && oxcf->pass == AOM_RC_FIRST_PASS && |
1636 | 0 | !cpi->ppi->twopass.first_pass_done) { |
1637 | 0 | av1_end_first_pass(cpi); /* get last stats packet */ |
1638 | 0 | cpi->ppi->twopass.first_pass_done = 1; |
1639 | 0 | } |
1640 | 0 | #endif |
1641 | 0 | return -1; |
1642 | 0 | } |
1643 | | |
1644 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1645 | | // reset src_offset to allow actual encode call for this frame to get its |
1646 | | // source. |
1647 | | gf_group->src_offset[cpi->gf_frame_index] = 0; |
1648 | | #endif |
1649 | | |
1650 | | // Source may be changed if temporal filtered later. |
1651 | 0 | frame_input.source = &source->img; |
1652 | 0 | frame_input.last_source = last_source != NULL ? &last_source->img : NULL; |
1653 | 0 | frame_input.ts_duration = source->ts_end - source->ts_start; |
1654 | | // Save unfiltered source. It is used in av1_get_second_pass_params(). |
1655 | 0 | cpi->unfiltered_source = frame_input.source; |
1656 | |
|
1657 | 0 | *time_stamp = source->ts_start; |
1658 | 0 | *time_end = source->ts_end; |
1659 | 0 | if (source->ts_start < cpi->time_stamps.first_ts_start) { |
1660 | 0 | cpi->time_stamps.first_ts_start = source->ts_start; |
1661 | 0 | cpi->time_stamps.prev_ts_end = source->ts_start; |
1662 | 0 | } |
1663 | |
|
1664 | 0 | av1_apply_encoding_flags(cpi, source->flags); |
1665 | 0 | *frame_flags = (source->flags & AOM_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0; |
1666 | |
|
1667 | | #if CONFIG_FRAME_PARALLEL_ENCODE && CONFIG_FPMT_TEST |
1668 | | if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) { |
1669 | | if (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) { |
1670 | | cpi->framerate = cpi->temp_framerate; |
1671 | | } |
1672 | | } |
1673 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE && CONFIG_FPMT_TEST |
1674 | | |
1675 | | // Shown frames and arf-overlay frames need frame-rate considering |
1676 | 0 | if (frame_params.show_frame) |
1677 | 0 | adjust_frame_rate(cpi, source->ts_start, source->ts_end); |
1678 | |
|
1679 | 0 | if (!frame_params.show_existing_frame) { |
1680 | 0 | if (cpi->film_grain_table) { |
1681 | 0 | cm->cur_frame->film_grain_params_present = aom_film_grain_table_lookup( |
1682 | 0 | cpi->film_grain_table, *time_stamp, *time_end, 0 /* =erase */, |
1683 | 0 | &cm->film_grain_params); |
1684 | 0 | } else { |
1685 | 0 | cm->cur_frame->film_grain_params_present = |
1686 | 0 | cm->seq_params->film_grain_params_present; |
1687 | 0 | } |
1688 | | // only one operating point supported now |
1689 | 0 | const int64_t pts64 = ticks_to_timebase_units(timestamp_ratio, *time_stamp); |
1690 | 0 | if (pts64 < 0 || pts64 > UINT32_MAX) return AOM_CODEC_ERROR; |
1691 | 0 | cm->frame_presentation_time = (uint32_t)pts64; |
1692 | 0 | } |
1693 | | |
1694 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1695 | | start_timing(cpi, av1_get_one_pass_rt_params_time); |
1696 | | #endif |
1697 | | #if CONFIG_REALTIME_ONLY |
1698 | | av1_get_one_pass_rt_params(cpi, &frame_params, *frame_flags); |
1699 | | if (cpi->oxcf.speed >= 5 && cpi->ppi->number_spatial_layers == 1 && |
1700 | | cpi->ppi->number_temporal_layers == 1) |
1701 | | av1_set_reference_structure_one_pass_rt(cpi, cpi->gf_frame_index == 0); |
1702 | | #else |
1703 | 0 | if (use_one_pass_rt_params) { |
1704 | 0 | av1_get_one_pass_rt_params(cpi, &frame_params, *frame_flags); |
1705 | 0 | if (cpi->oxcf.speed >= 5 && cpi->ppi->number_spatial_layers == 1 && |
1706 | 0 | cpi->ppi->number_temporal_layers == 1) |
1707 | 0 | av1_set_reference_structure_one_pass_rt(cpi, cpi->gf_frame_index == 0); |
1708 | 0 | } |
1709 | 0 | #endif |
1710 | | #if CONFIG_COLLECT_COMPONENT_TIMING |
1711 | | end_timing(cpi, av1_get_one_pass_rt_params_time); |
1712 | | #endif |
1713 | |
|
1714 | 0 | FRAME_UPDATE_TYPE frame_update_type = |
1715 | 0 | get_frame_update_type(gf_group, cpi->gf_frame_index); |
1716 | |
|
1717 | 0 | if (frame_params.show_existing_frame && |
1718 | 0 | frame_params.frame_type != KEY_FRAME) { |
1719 | | // Force show-existing frames to be INTER, except forward keyframes |
1720 | 0 | frame_params.frame_type = INTER_FRAME; |
1721 | 0 | } |
1722 | | |
1723 | | // TODO(david.turner@argondesign.com): Move all the encode strategy |
1724 | | // (largely near av1_get_compressed_data) in here |
1725 | | |
1726 | | // TODO(david.turner@argondesign.com): Change all the encode strategy to |
1727 | | // modify frame_params instead of cm or cpi. |
1728 | | |
1729 | | // Per-frame encode speed. In theory this can vary, but things may have |
1730 | | // been written assuming speed-level will not change within a sequence, so |
1731 | | // this parameter should be used with caution. |
1732 | 0 | frame_params.speed = oxcf->speed; |
1733 | | |
1734 | | // Work out some encoding parameters specific to the pass: |
1735 | 0 | if (has_no_stats_stage(cpi) && oxcf->q_cfg.aq_mode == CYCLIC_REFRESH_AQ) { |
1736 | 0 | av1_cyclic_refresh_update_parameters(cpi); |
1737 | 0 | } else if (is_stat_generation_stage(cpi)) { |
1738 | 0 | cpi->td.mb.e_mbd.lossless[0] = is_lossless_requested(&oxcf->rc_cfg); |
1739 | | // Current frame is coded as a key-frame for any of the following cases: |
1740 | | // 1) First frame of a video |
1741 | | // 2) For all-intra frame encoding |
1742 | | // 3) When a key-frame is forced |
1743 | 0 | const int kf_requested = |
1744 | 0 | (cm->current_frame.frame_number == 0 || |
1745 | 0 | oxcf->kf_cfg.key_freq_max == 0 || (*frame_flags & FRAMEFLAGS_KEY)); |
1746 | 0 | if (kf_requested && frame_update_type != OVERLAY_UPDATE && |
1747 | 0 | frame_update_type != INTNL_OVERLAY_UPDATE) { |
1748 | 0 | frame_params.frame_type = KEY_FRAME; |
1749 | 0 | } else { |
1750 | 0 | frame_params.frame_type = INTER_FRAME; |
1751 | 0 | } |
1752 | 0 | } else if (is_stat_consumption_stage(cpi)) { |
1753 | | #if CONFIG_MISMATCH_DEBUG |
1754 | | mismatch_move_frame_idx_w(); |
1755 | | #endif |
1756 | | #if TXCOEFF_COST_TIMER |
1757 | | cm->txcoeff_cost_timer = 0; |
1758 | | cm->txcoeff_cost_count = 0; |
1759 | | #endif |
1760 | 0 | } |
1761 | |
|
1762 | 0 | if (!is_stat_generation_stage(cpi)) |
1763 | 0 | set_ext_overrides(cm, &frame_params, ext_flags); |
1764 | | |
1765 | | // Shown keyframes and S frames refresh all reference buffers |
1766 | 0 | const int force_refresh_all = |
1767 | 0 | ((frame_params.frame_type == KEY_FRAME && frame_params.show_frame) || |
1768 | 0 | frame_params.frame_type == S_FRAME) && |
1769 | 0 | !frame_params.show_existing_frame; |
1770 | |
|
1771 | 0 | av1_configure_buffer_updates( |
1772 | 0 | cpi, &frame_params.refresh_frame, frame_update_type, |
1773 | 0 | gf_group->refbuf_state[cpi->gf_frame_index], force_refresh_all); |
1774 | |
|
1775 | 0 | if (!is_stat_generation_stage(cpi)) { |
1776 | 0 | const RefCntBuffer *ref_frames[INTER_REFS_PER_FRAME]; |
1777 | 0 | const YV12_BUFFER_CONFIG *ref_frame_buf[INTER_REFS_PER_FRAME]; |
1778 | |
|
1779 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1780 | | RefFrameMapPair ref_frame_map_pairs[REF_FRAMES]; |
1781 | | init_ref_map_pair(cpi, ref_frame_map_pairs); |
1782 | | const int order_offset = gf_group->arf_src_offset[cpi->gf_frame_index]; |
1783 | | const int cur_frame_disp = |
1784 | | cpi->common.current_frame.frame_number + order_offset; |
1785 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1786 | |
|
1787 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1788 | | int get_ref_frames = 0; |
1789 | | #if CONFIG_FPMT_TEST |
1790 | | get_ref_frames = |
1791 | | (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) ? 1 : 0; |
1792 | | #endif // CONFIG_FPMT_TEST |
1793 | | if (get_ref_frames || |
1794 | | gf_group->frame_parallel_level[cpi->gf_frame_index] == 0) { |
1795 | | #else |
1796 | 0 | { |
1797 | 0 | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1798 | 0 | if (!ext_flags->refresh_frame.update_pending) { |
1799 | 0 | av1_get_ref_frames(&cpi->ref_buffer_stack, |
1800 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1801 | | ref_frame_map_pairs, cur_frame_disp, |
1802 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
1803 | | cpi, cpi->gf_frame_index, 1, |
1804 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
1805 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1806 | 0 | cm->remapped_ref_idx); |
1807 | 0 | } else if (cpi->svc.set_ref_frame_config) { |
1808 | 0 | for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++) |
1809 | 0 | cm->remapped_ref_idx[i] = cpi->svc.ref_idx[i]; |
1810 | 0 | } |
1811 | 0 | } |
1812 | | |
1813 | | // Get the reference frames |
1814 | 0 | for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) { |
1815 | 0 | ref_frames[i] = get_ref_frame_buf(cm, ref_frame_priority_order[i]); |
1816 | 0 | ref_frame_buf[i] = ref_frames[i] != NULL ? &ref_frames[i]->buf : NULL; |
1817 | 0 | } |
1818 | | |
1819 | | // Work out which reference frame slots may be used. |
1820 | 0 | frame_params.ref_frame_flags = |
1821 | 0 | get_ref_frame_flags(&cpi->sf, is_one_pass_rt_params(cpi), ref_frame_buf, |
1822 | 0 | ext_flags->ref_frame_flags); |
1823 | |
|
1824 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1825 | | // Set primary_ref_frame of non-reference frames as PRIMARY_REF_NONE. |
1826 | | if (cpi->ppi->gf_group.is_frame_non_ref[cpi->gf_frame_index]) { |
1827 | | frame_params.primary_ref_frame = PRIMARY_REF_NONE; |
1828 | | } else { |
1829 | | frame_params.primary_ref_frame = |
1830 | | choose_primary_ref_frame(cpi, &frame_params); |
1831 | | } |
1832 | | #else |
1833 | 0 | frame_params.primary_ref_frame = |
1834 | 0 | choose_primary_ref_frame(cpi, &frame_params); |
1835 | 0 | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1836 | |
|
1837 | 0 | frame_params.order_offset = gf_group->arf_src_offset[cpi->gf_frame_index]; |
1838 | |
|
1839 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1840 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
1841 | | // Call av1_get_refresh_frame_flags() if refresh index not available. |
1842 | | if (!cpi->refresh_idx_available) { |
1843 | | #endif |
1844 | | #endif |
1845 | 0 | frame_params.refresh_frame_flags = av1_get_refresh_frame_flags( |
1846 | 0 | cpi, &frame_params, frame_update_type, cpi->gf_frame_index, |
1847 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1848 | | cur_frame_disp, ref_frame_map_pairs, |
1849 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1850 | 0 | &cpi->ref_buffer_stack); |
1851 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1852 | | #if CONFIG_FRAME_PARALLEL_ENCODE_2 |
1853 | | } else { |
1854 | | assert(cpi->ref_refresh_index != INVALID_IDX); |
1855 | | frame_params.refresh_frame_flags = (1 << cpi->ref_refresh_index); |
1856 | | } |
1857 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1858 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE_2 |
1859 | |
|
1860 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1861 | | // Make the frames marked as is_frame_non_ref to non-reference frames. |
1862 | | if (gf_group->is_frame_non_ref[cpi->gf_frame_index]) |
1863 | | frame_params.refresh_frame_flags = 0; |
1864 | | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1865 | |
|
1866 | | #if CONFIG_FRAME_PARALLEL_ENCODE |
1867 | | frame_params.existing_fb_idx_to_show = INVALID_IDX; |
1868 | | // Find the frame buffer to show based on display order. |
1869 | | if (frame_params.show_existing_frame) { |
1870 | | for (int frame = 0; frame < REF_FRAMES; frame++) { |
1871 | | const RefCntBuffer *const buf = cm->ref_frame_map[frame]; |
1872 | | if (buf == NULL) continue; |
1873 | | const int frame_order = (int)buf->display_order_hint; |
1874 | | if (frame_order == cur_frame_disp) |
1875 | | frame_params.existing_fb_idx_to_show = frame; |
1876 | | } |
1877 | | } |
1878 | | #else |
1879 | 0 | frame_params.existing_fb_idx_to_show = |
1880 | 0 | frame_params.show_existing_frame |
1881 | 0 | ? (frame_update_type == INTNL_OVERLAY_UPDATE |
1882 | 0 | ? get_ref_frame_map_idx(cm, BWDREF_FRAME) |
1883 | 0 | : get_ref_frame_map_idx(cm, ALTREF_FRAME)) |
1884 | 0 | : INVALID_IDX; |
1885 | 0 | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1886 | 0 | } |
1887 | | |
1888 | | // The way frame_params->remapped_ref_idx is setup is a placeholder. |
1889 | | // Currently, reference buffer assignment is done by update_ref_frame_map() |
1890 | | // which is called by high-level strategy AFTER encoding a frame. It |
1891 | | // modifies cm->remapped_ref_idx. If you want to use an alternative method |
1892 | | // to determine reference buffer assignment, just put your assignments into |
1893 | | // frame_params->remapped_ref_idx here and they will be used when encoding |
1894 | | // this frame. If frame_params->remapped_ref_idx is setup independently of |
1895 | | // cm->remapped_ref_idx then update_ref_frame_map() will have no effect. |
1896 | 0 | memcpy(frame_params.remapped_ref_idx, cm->remapped_ref_idx, |
1897 | 0 | REF_FRAMES * sizeof(*cm->remapped_ref_idx)); |
1898 | |
|
1899 | 0 | cpi->td.mb.delta_qindex = 0; |
1900 | |
|
1901 | 0 | if (!frame_params.show_existing_frame) { |
1902 | 0 | cm->quant_params.using_qmatrix = oxcf->q_cfg.using_qm; |
1903 | 0 | } |
1904 | |
|
1905 | | #if CONFIG_REALTIME_ONLY |
1906 | | if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) != |
1907 | | AOM_CODEC_OK) { |
1908 | | return AOM_CODEC_ERROR; |
1909 | | } |
1910 | | #else |
1911 | 0 | if (has_no_stats_stage(cpi) && oxcf->mode == REALTIME && |
1912 | 0 | gf_cfg->lag_in_frames == 0) { |
1913 | 0 | if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) != |
1914 | 0 | AOM_CODEC_OK) { |
1915 | 0 | return AOM_CODEC_ERROR; |
1916 | 0 | } |
1917 | 0 | } else if (denoise_and_encode(cpi, dest, &frame_input, &frame_params, |
1918 | 0 | &frame_results) != AOM_CODEC_OK) { |
1919 | 0 | return AOM_CODEC_ERROR; |
1920 | 0 | } |
1921 | 0 | #endif // CONFIG_REALTIME_ONLY |
1922 | | |
1923 | | // As the frame_update_type can get modified as part of |
1924 | | // av1_adjust_gf_refresh_qp_one_pass_rt |
1925 | 0 | frame_update_type = get_frame_update_type(gf_group, cpi->gf_frame_index); |
1926 | 0 | if (!is_stat_generation_stage(cpi)) { |
1927 | | // First pass doesn't modify reference buffer assignment or produce frame |
1928 | | // flags |
1929 | 0 | update_frame_flags(&cpi->common, &cpi->refresh_frame, frame_flags); |
1930 | 0 | set_additional_frame_flags(cm, frame_flags); |
1931 | 0 | #if !CONFIG_FRAME_PARALLEL_ENCODE |
1932 | 0 | if (!ext_flags->refresh_frame.update_pending) { |
1933 | 0 | int ref_map_index = |
1934 | 0 | av1_get_refresh_ref_frame_map(cm->current_frame.refresh_frame_flags); |
1935 | 0 | av1_update_ref_frame_map(cpi, frame_update_type, |
1936 | 0 | gf_group->refbuf_state[cpi->gf_frame_index], |
1937 | 0 | ref_map_index, &cpi->ref_buffer_stack); |
1938 | 0 | } |
1939 | 0 | #endif // CONFIG_FRAME_PARALLEL_ENCODE |
1940 | 0 | } |
1941 | |
|
1942 | 0 | #if !CONFIG_REALTIME_ONLY |
1943 | | #if TXCOEFF_COST_TIMER |
1944 | | if (!is_stat_generation_stage(cpi)) { |
1945 | | cm->cum_txcoeff_cost_timer += cm->txcoeff_cost_timer; |
1946 | | fprintf(stderr, |
1947 | | "\ntxb coeff cost block number: %ld, frame time: %ld, cum time %ld " |
1948 | | "in us\n", |
1949 | | cm->txcoeff_cost_count, cm->txcoeff_cost_timer, |
1950 | | cm->cum_txcoeff_cost_timer); |
1951 | | } |
1952 | | #endif |
1953 | 0 | #endif // !CONFIG_REALTIME_ONLY |
1954 | |
|
1955 | | #if CONFIG_TUNE_VMAF |
1956 | | if (!is_stat_generation_stage(cpi) && |
1957 | | (oxcf->tune_cfg.tuning >= AOM_TUNE_VMAF_WITH_PREPROCESSING && |
1958 | | oxcf->tune_cfg.tuning <= AOM_TUNE_VMAF_NEG_MAX_GAIN)) { |
1959 | | av1_update_vmaf_curve(cpi); |
1960 | | } |
1961 | | #endif |
1962 | | |
1963 | | // Unpack frame_results: |
1964 | 0 | *size = frame_results.size; |
1965 | | |
1966 | | // Leave a signal for a higher level caller about if this frame is droppable |
1967 | 0 | if (*size > 0) { |
1968 | 0 | cpi->droppable = is_frame_droppable(&cpi->svc, &ext_flags->refresh_frame); |
1969 | 0 | } |
1970 | |
|
1971 | 0 | return AOM_CODEC_OK; |
1972 | 0 | } |