/src/aom/av1/encoder/aq_cyclicrefresh.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright (c) 2016, Alliance for Open Media. All rights reserved |
3 | | * |
4 | | * This source code is subject to the terms of the BSD 2 Clause License and |
5 | | * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License |
6 | | * was not distributed with this source code in the LICENSE file, you can |
7 | | * obtain it at www.aomedia.org/license/software. If the Alliance for Open |
8 | | * Media Patent License 1.0 was not distributed with this source code in the |
9 | | * PATENTS file, you can obtain it at www.aomedia.org/license/patent. |
10 | | */ |
11 | | |
12 | | #include <limits.h> |
13 | | #include <math.h> |
14 | | |
15 | | #include "av1/common/pred_common.h" |
16 | | #include "av1/common/seg_common.h" |
17 | | #include "av1/encoder/aq_cyclicrefresh.h" |
18 | | #include "av1/encoder/ratectrl.h" |
19 | | #include "av1/encoder/segmentation.h" |
20 | | #include "av1/encoder/tokenize.h" |
21 | | #include "aom_dsp/aom_dsp_common.h" |
22 | | |
23 | 0 | CYCLIC_REFRESH *av1_cyclic_refresh_alloc(int mi_rows, int mi_cols) { |
24 | 0 | CYCLIC_REFRESH *const cr = aom_calloc(1, sizeof(*cr)); |
25 | 0 | if (cr == NULL) return NULL; |
26 | | |
27 | 0 | cr->map = aom_calloc(mi_rows * mi_cols, sizeof(*cr->map)); |
28 | 0 | if (cr->map == NULL) { |
29 | 0 | av1_cyclic_refresh_free(cr); |
30 | 0 | return NULL; |
31 | 0 | } |
32 | 0 | return cr; |
33 | 0 | } |
34 | | |
35 | 0 | void av1_cyclic_refresh_free(CYCLIC_REFRESH *cr) { |
36 | 0 | if (cr != NULL) { |
37 | 0 | aom_free(cr->map); |
38 | 0 | aom_free(cr); |
39 | 0 | } |
40 | 0 | } |
41 | | |
42 | | // Check if this coding block, of size bsize, should be considered for refresh |
43 | | // (lower-qp coding). Decision can be based on various factors, such as |
44 | | // size of the coding block (i.e., below min_block size rejected), coding |
45 | | // mode, and rate/distortion. |
46 | | static int candidate_refresh_aq(const CYCLIC_REFRESH *cr, |
47 | | const MB_MODE_INFO *mbmi, int64_t rate, |
48 | 0 | int64_t dist, int bsize, int noise_level) { |
49 | 0 | MV mv = mbmi->mv[0].as_mv; |
50 | 0 | int is_compound = has_second_ref(mbmi); |
51 | | // Reject the block for lower-qp coding for non-compound mode if |
52 | | // projected distortion is above the threshold, and any of the following |
53 | | // is true: |
54 | | // 1) mode uses large mv |
55 | | // 2) mode is an intra-mode |
56 | | // Otherwise accept for refresh. |
57 | 0 | if (!is_compound && dist > cr->thresh_dist_sb && |
58 | 0 | (mv.row > cr->motion_thresh || mv.row < -cr->motion_thresh || |
59 | 0 | mv.col > cr->motion_thresh || mv.col < -cr->motion_thresh || |
60 | 0 | !is_inter_block(mbmi))) |
61 | 0 | return CR_SEGMENT_ID_BASE; |
62 | 0 | else if ((is_compound && noise_level < kMedium) || |
63 | 0 | (bsize >= BLOCK_16X16 && rate < cr->thresh_rate_sb && |
64 | 0 | is_inter_block(mbmi) && mbmi->mv[0].as_int == 0 && |
65 | 0 | cr->rate_boost_fac > 10)) |
66 | | // More aggressive delta-q for bigger blocks with zero motion. |
67 | 0 | return CR_SEGMENT_ID_BOOST2; |
68 | 0 | else |
69 | 0 | return CR_SEGMENT_ID_BOOST1; |
70 | 0 | } |
71 | | |
72 | | // Compute delta-q for the segment. |
73 | 0 | static int compute_deltaq(const AV1_COMP *cpi, int q, double rate_factor) { |
74 | 0 | const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
75 | 0 | const RATE_CONTROL *const rc = &cpi->rc; |
76 | 0 | int deltaq = av1_compute_qdelta_by_rate( |
77 | 0 | rc, cpi->common.current_frame.frame_type, q, rate_factor, |
78 | 0 | cpi->is_screen_content_type, cpi->common.seq_params->bit_depth); |
79 | 0 | if ((-deltaq) > cr->max_qdelta_perc * q / 100) { |
80 | 0 | deltaq = -cr->max_qdelta_perc * q / 100; |
81 | 0 | } |
82 | 0 | return deltaq; |
83 | 0 | } |
84 | | |
85 | | int av1_cyclic_refresh_estimate_bits_at_q(const AV1_COMP *cpi, |
86 | 0 | double correction_factor) { |
87 | 0 | const AV1_COMMON *const cm = &cpi->common; |
88 | 0 | const FRAME_TYPE frame_type = cm->current_frame.frame_type; |
89 | 0 | const int base_qindex = cm->quant_params.base_qindex; |
90 | 0 | const int bit_depth = cm->seq_params->bit_depth; |
91 | 0 | const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
92 | 0 | const int mbs = cm->mi_params.MBs; |
93 | 0 | const int num4x4bl = mbs << 4; |
94 | | // Weight for non-base segments: use actual number of blocks refreshed in |
95 | | // previous/just encoded frame. Note number of blocks here is in 4x4 units. |
96 | 0 | double weight_segment1 = (double)cr->actual_num_seg1_blocks / num4x4bl; |
97 | 0 | double weight_segment2 = (double)cr->actual_num_seg2_blocks / num4x4bl; |
98 | 0 | if (cpi->rc.rtc_external_ratectrl) { |
99 | 0 | weight_segment1 = (double)(cr->percent_refresh * cm->mi_params.mi_rows * |
100 | 0 | cm->mi_params.mi_cols / 100) / |
101 | 0 | num4x4bl; |
102 | 0 | weight_segment2 = 0; |
103 | 0 | } |
104 | | // Take segment weighted average for estimated bits. |
105 | 0 | const int estimated_bits = |
106 | 0 | (int)((1.0 - weight_segment1 - weight_segment2) * |
107 | 0 | av1_estimate_bits_at_q(frame_type, base_qindex, mbs, |
108 | 0 | correction_factor, bit_depth, |
109 | 0 | cpi->is_screen_content_type) + |
110 | 0 | weight_segment1 * av1_estimate_bits_at_q( |
111 | 0 | frame_type, base_qindex + cr->qindex_delta[1], |
112 | 0 | mbs, correction_factor, bit_depth, |
113 | 0 | cpi->is_screen_content_type) + |
114 | 0 | weight_segment2 * av1_estimate_bits_at_q( |
115 | 0 | frame_type, base_qindex + cr->qindex_delta[2], |
116 | 0 | mbs, correction_factor, bit_depth, |
117 | 0 | cpi->is_screen_content_type)); |
118 | 0 | return estimated_bits; |
119 | 0 | } |
120 | | |
121 | | int av1_cyclic_refresh_rc_bits_per_mb(const AV1_COMP *cpi, int i, |
122 | 0 | double correction_factor) { |
123 | 0 | const AV1_COMMON *const cm = &cpi->common; |
124 | 0 | CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
125 | 0 | int bits_per_mb; |
126 | 0 | int num4x4bl = cm->mi_params.MBs << 4; |
127 | | // Weight for segment prior to encoding: take the average of the target |
128 | | // number for the frame to be encoded and the actual from the previous frame. |
129 | 0 | double weight_segment = |
130 | 0 | (double)((cr->target_num_seg_blocks + cr->actual_num_seg1_blocks + |
131 | 0 | cr->actual_num_seg2_blocks) >> |
132 | 0 | 1) / |
133 | 0 | num4x4bl; |
134 | 0 | if (cpi->rc.rtc_external_ratectrl) { |
135 | 0 | weight_segment = (double)((cr->target_num_seg_blocks + |
136 | 0 | cr->percent_refresh * cm->mi_params.mi_rows * |
137 | 0 | cm->mi_params.mi_cols / 100) >> |
138 | 0 | 1) / |
139 | 0 | num4x4bl; |
140 | 0 | } |
141 | | // Compute delta-q corresponding to qindex i. |
142 | 0 | int deltaq = compute_deltaq(cpi, i, cr->rate_ratio_qdelta); |
143 | | // Take segment weighted average for bits per mb. |
144 | 0 | bits_per_mb = |
145 | 0 | (int)((1.0 - weight_segment) * |
146 | 0 | av1_rc_bits_per_mb(cm->current_frame.frame_type, i, |
147 | 0 | correction_factor, cm->seq_params->bit_depth, |
148 | 0 | cpi->is_screen_content_type) + |
149 | 0 | weight_segment * av1_rc_bits_per_mb(cm->current_frame.frame_type, |
150 | 0 | i + deltaq, correction_factor, |
151 | 0 | cm->seq_params->bit_depth, |
152 | 0 | cpi->is_screen_content_type)); |
153 | 0 | return bits_per_mb; |
154 | 0 | } |
155 | | |
156 | | void av1_cyclic_reset_segment_skip(const AV1_COMP *cpi, MACROBLOCK *const x, |
157 | 0 | int mi_row, int mi_col, BLOCK_SIZE bsize) { |
158 | 0 | int cdf_num; |
159 | 0 | const AV1_COMMON *const cm = &cpi->common; |
160 | 0 | MACROBLOCKD *const xd = &x->e_mbd; |
161 | 0 | MB_MODE_INFO *const mbmi = xd->mi[0]; |
162 | 0 | const int prev_segment_id = mbmi->segment_id; |
163 | 0 | CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
164 | 0 | const int bw = mi_size_wide[bsize]; |
165 | 0 | const int bh = mi_size_high[bsize]; |
166 | 0 | const int xmis = AOMMIN(cm->mi_params.mi_cols - mi_col, bw); |
167 | 0 | const int ymis = AOMMIN(cm->mi_params.mi_rows - mi_row, bh); |
168 | 0 | if (!cr->skip_over4x4) { |
169 | 0 | mbmi->segment_id = av1_get_spatial_seg_pred(cm, xd, &cdf_num); |
170 | 0 | if (prev_segment_id != mbmi->segment_id) { |
171 | 0 | const int block_index = mi_row * cm->mi_params.mi_cols + mi_col; |
172 | 0 | for (int mi_y = 0; mi_y < ymis; mi_y++) { |
173 | 0 | for (int mi_x = 0; mi_x < xmis; mi_x++) { |
174 | 0 | const int map_offset = |
175 | 0 | block_index + mi_y * cm->mi_params.mi_cols + mi_x; |
176 | 0 | cr->map[map_offset] = 0; |
177 | 0 | cpi->enc_seg.map[map_offset] = mbmi->segment_id; |
178 | 0 | cm->cur_frame->seg_map[map_offset] = mbmi->segment_id; |
179 | 0 | } |
180 | 0 | } |
181 | 0 | } |
182 | 0 | } |
183 | 0 | if (cyclic_refresh_segment_id(prev_segment_id) == CR_SEGMENT_ID_BOOST1) |
184 | 0 | x->actual_num_seg1_blocks -= xmis * ymis; |
185 | 0 | else if (cyclic_refresh_segment_id(prev_segment_id) == CR_SEGMENT_ID_BOOST2) |
186 | 0 | x->actual_num_seg2_blocks -= xmis * ymis; |
187 | 0 | } |
188 | | |
189 | | void av1_cyclic_refresh_update_segment(const AV1_COMP *cpi, MACROBLOCK *const x, |
190 | | int mi_row, int mi_col, BLOCK_SIZE bsize, |
191 | | int64_t rate, int64_t dist, int skip, |
192 | 0 | RUN_TYPE dry_run) { |
193 | 0 | const AV1_COMMON *const cm = &cpi->common; |
194 | 0 | MACROBLOCKD *const xd = &x->e_mbd; |
195 | 0 | MB_MODE_INFO *const mbmi = xd->mi[0]; |
196 | 0 | CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
197 | 0 | const int bw = mi_size_wide[bsize]; |
198 | 0 | const int bh = mi_size_high[bsize]; |
199 | 0 | const int xmis = AOMMIN(cm->mi_params.mi_cols - mi_col, bw); |
200 | 0 | const int ymis = AOMMIN(cm->mi_params.mi_rows - mi_row, bh); |
201 | 0 | const int block_index = mi_row * cm->mi_params.mi_cols + mi_col; |
202 | 0 | int noise_level = 0; |
203 | 0 | if (cpi->noise_estimate.enabled) noise_level = cpi->noise_estimate.level; |
204 | 0 | const int refresh_this_block = |
205 | 0 | candidate_refresh_aq(cr, mbmi, rate, dist, bsize, noise_level); |
206 | 0 | int sh = cpi->cyclic_refresh->skip_over4x4 ? 2 : 1; |
207 | | // Default is to not update the refresh map. |
208 | 0 | int new_map_value = cr->map[block_index]; |
209 | | |
210 | | // If this block is labeled for refresh, check if we should reset the |
211 | | // segment_id. |
212 | 0 | if (cyclic_refresh_segment_id_boosted(mbmi->segment_id)) { |
213 | 0 | mbmi->segment_id = refresh_this_block; |
214 | | // Reset segment_id if will be skipped. |
215 | 0 | if (skip) mbmi->segment_id = CR_SEGMENT_ID_BASE; |
216 | 0 | } |
217 | | |
218 | | // Update the cyclic refresh map, to be used for setting segmentation map |
219 | | // for the next frame. If the block will be refreshed this frame, mark it |
220 | | // as clean. The magnitude of the -ve influences how long before we consider |
221 | | // it for refresh again. |
222 | 0 | if (cyclic_refresh_segment_id_boosted(mbmi->segment_id)) { |
223 | 0 | new_map_value = -cr->time_for_refresh; |
224 | 0 | } else if (refresh_this_block) { |
225 | | // Else if it is accepted as candidate for refresh, and has not already |
226 | | // been refreshed (marked as 1) then mark it as a candidate for cleanup |
227 | | // for future time (marked as 0), otherwise don't update it. |
228 | 0 | if (cr->map[block_index] == 1) new_map_value = 0; |
229 | 0 | } else { |
230 | | // Leave it marked as block that is not candidate for refresh. |
231 | 0 | new_map_value = 1; |
232 | 0 | } |
233 | | |
234 | | // Update entries in the cyclic refresh map with new_map_value, and |
235 | | // copy mbmi->segment_id into global segmentation map. |
236 | 0 | for (int mi_y = 0; mi_y < ymis; mi_y += sh) { |
237 | 0 | for (int mi_x = 0; mi_x < xmis; mi_x += sh) { |
238 | 0 | const int map_offset = block_index + mi_y * cm->mi_params.mi_cols + mi_x; |
239 | 0 | cr->map[map_offset] = new_map_value; |
240 | 0 | cpi->enc_seg.map[map_offset] = mbmi->segment_id; |
241 | 0 | cm->cur_frame->seg_map[map_offset] = mbmi->segment_id; |
242 | 0 | } |
243 | 0 | } |
244 | | // Accumulate cyclic refresh update counters. |
245 | 0 | if (!dry_run) { |
246 | 0 | if (cyclic_refresh_segment_id(mbmi->segment_id) == CR_SEGMENT_ID_BOOST1) |
247 | 0 | x->actual_num_seg1_blocks += xmis * ymis; |
248 | 0 | else if (cyclic_refresh_segment_id(mbmi->segment_id) == |
249 | 0 | CR_SEGMENT_ID_BOOST2) |
250 | 0 | x->actual_num_seg2_blocks += xmis * ymis; |
251 | 0 | } |
252 | 0 | } |
253 | | |
254 | | // Initializes counters used for cyclic refresh. |
255 | 0 | void av1_init_cyclic_refresh_counters(MACROBLOCK *const x) { |
256 | 0 | x->actual_num_seg1_blocks = 0; |
257 | 0 | x->actual_num_seg2_blocks = 0; |
258 | 0 | x->cnt_zeromv = 0; |
259 | 0 | } |
260 | | |
261 | | // Accumulate cyclic refresh counters. |
262 | | void av1_accumulate_cyclic_refresh_counters( |
263 | 0 | CYCLIC_REFRESH *const cyclic_refresh, const MACROBLOCK *const x) { |
264 | 0 | cyclic_refresh->actual_num_seg1_blocks += x->actual_num_seg1_blocks; |
265 | 0 | cyclic_refresh->actual_num_seg2_blocks += x->actual_num_seg2_blocks; |
266 | 0 | cyclic_refresh->cnt_zeromv += x->cnt_zeromv; |
267 | 0 | } |
268 | | |
269 | 0 | void av1_cyclic_refresh_postencode(AV1_COMP *const cpi) { |
270 | 0 | AV1_COMMON *const cm = &cpi->common; |
271 | 0 | const CommonModeInfoParams *const mi_params = &cm->mi_params; |
272 | 0 | CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
273 | 0 | RATE_CONTROL *const rc = &cpi->rc; |
274 | 0 | SVC *const svc = &cpi->svc; |
275 | 0 | const int avg_cnt_zeromv = |
276 | 0 | 100 * cr->cnt_zeromv / (mi_params->mi_rows * mi_params->mi_cols); |
277 | |
|
278 | 0 | if (!cpi->ppi->use_svc || |
279 | 0 | (cpi->ppi->use_svc && |
280 | 0 | !cpi->svc.layer_context[cpi->svc.temporal_layer_id].is_key_frame && |
281 | 0 | cpi->svc.spatial_layer_id == cpi->svc.number_spatial_layers - 1)) { |
282 | 0 | rc->avg_frame_low_motion = |
283 | 0 | (rc->avg_frame_low_motion == 0) |
284 | 0 | ? avg_cnt_zeromv |
285 | 0 | : (3 * rc->avg_frame_low_motion + avg_cnt_zeromv) / 4; |
286 | | // For SVC: set avg_frame_low_motion (only computed on top spatial layer) |
287 | | // to all lower spatial layers. |
288 | 0 | if (cpi->ppi->use_svc && |
289 | 0 | svc->spatial_layer_id == svc->number_spatial_layers - 1) { |
290 | 0 | for (int i = 0; i < svc->number_spatial_layers - 1; ++i) { |
291 | 0 | const int layer = LAYER_IDS_TO_IDX(i, svc->temporal_layer_id, |
292 | 0 | svc->number_temporal_layers); |
293 | 0 | LAYER_CONTEXT *const lc = &svc->layer_context[layer]; |
294 | 0 | RATE_CONTROL *const lrc = &lc->rc; |
295 | 0 | lrc->avg_frame_low_motion = rc->avg_frame_low_motion; |
296 | 0 | } |
297 | 0 | } |
298 | 0 | } |
299 | 0 | } |
300 | | |
301 | 0 | void av1_cyclic_refresh_set_golden_update(AV1_COMP *const cpi) { |
302 | 0 | RATE_CONTROL *const rc = &cpi->rc; |
303 | 0 | PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc; |
304 | 0 | CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
305 | | // Set minimum gf_interval for GF update to a multiple of the refresh period, |
306 | | // with some max limit. Depending on past encoding stats, GF flag may be |
307 | | // reset and update may not occur until next baseline_gf_interval. |
308 | 0 | const int gf_length_mult[2] = { 8, 4 }; |
309 | 0 | if (cr->percent_refresh > 0) |
310 | 0 | p_rc->baseline_gf_interval = |
311 | 0 | AOMMIN(gf_length_mult[cpi->sf.rt_sf.gf_length_lvl] * |
312 | 0 | (100 / cr->percent_refresh), |
313 | 0 | MAX_GF_INTERVAL_RT); |
314 | 0 | else |
315 | 0 | p_rc->baseline_gf_interval = FIXED_GF_INTERVAL_RT; |
316 | 0 | if (rc->avg_frame_low_motion && rc->avg_frame_low_motion < 40) |
317 | 0 | p_rc->baseline_gf_interval = 16; |
318 | 0 | } |
319 | | |
320 | | // Update the segmentation map, and related quantities: cyclic refresh map, |
321 | | // refresh sb_index, and target number of blocks to be refreshed. |
322 | | // The map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or to |
323 | | // 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock. |
324 | | // Blocks labeled as BOOST1 may later get set to BOOST2 (during the |
325 | | // encoding of the superblock). |
326 | 0 | static void cyclic_refresh_update_map(AV1_COMP *const cpi) { |
327 | 0 | AV1_COMMON *const cm = &cpi->common; |
328 | 0 | const CommonModeInfoParams *const mi_params = &cm->mi_params; |
329 | 0 | CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
330 | 0 | unsigned char *const seg_map = cpi->enc_seg.map; |
331 | 0 | int i, block_count, bl_index, sb_rows, sb_cols, sbs_in_frame; |
332 | 0 | int xmis, ymis, x, y; |
333 | 0 | memset(seg_map, CR_SEGMENT_ID_BASE, mi_params->mi_rows * mi_params->mi_cols); |
334 | 0 | sb_cols = (mi_params->mi_cols + cm->seq_params->mib_size - 1) / |
335 | 0 | cm->seq_params->mib_size; |
336 | 0 | sb_rows = (mi_params->mi_rows + cm->seq_params->mib_size - 1) / |
337 | 0 | cm->seq_params->mib_size; |
338 | 0 | sbs_in_frame = sb_cols * sb_rows; |
339 | | // Number of target blocks to get the q delta (segment 1). |
340 | 0 | block_count = |
341 | 0 | cr->percent_refresh * mi_params->mi_rows * mi_params->mi_cols / 100; |
342 | | // Set the segmentation map: cycle through the superblocks, starting at |
343 | | // cr->mb_index, and stopping when either block_count blocks have been found |
344 | | // to be refreshed, or we have passed through whole frame. |
345 | 0 | if (cr->sb_index >= sbs_in_frame) cr->sb_index = 0; |
346 | 0 | assert(cr->sb_index < sbs_in_frame); |
347 | 0 | i = cr->sb_index; |
348 | 0 | cr->target_num_seg_blocks = 0; |
349 | 0 | do { |
350 | 0 | int sum_map = 0; |
351 | | // Get the mi_row/mi_col corresponding to superblock index i. |
352 | 0 | int sb_row_index = (i / sb_cols); |
353 | 0 | int sb_col_index = i - sb_row_index * sb_cols; |
354 | 0 | int mi_row = sb_row_index * cm->seq_params->mib_size; |
355 | 0 | int mi_col = sb_col_index * cm->seq_params->mib_size; |
356 | 0 | assert(mi_row >= 0 && mi_row < mi_params->mi_rows); |
357 | 0 | assert(mi_col >= 0 && mi_col < mi_params->mi_cols); |
358 | 0 | bl_index = mi_row * mi_params->mi_cols + mi_col; |
359 | | // Loop through all MI blocks in superblock and update map. |
360 | 0 | xmis = AOMMIN(mi_params->mi_cols - mi_col, cm->seq_params->mib_size); |
361 | 0 | ymis = AOMMIN(mi_params->mi_rows - mi_row, cm->seq_params->mib_size); |
362 | | // cr_map only needed at 8x8 blocks. |
363 | 0 | for (y = 0; y < ymis; y += 2) { |
364 | 0 | for (x = 0; x < xmis; x += 2) { |
365 | 0 | const int bl_index2 = bl_index + y * mi_params->mi_cols + x; |
366 | | // If the block is as a candidate for clean up then mark it |
367 | | // for possible boost/refresh (segment 1). The segment id may get |
368 | | // reset to 0 later if block gets coded anything other than GLOBALMV. |
369 | 0 | if (cr->map[bl_index2] == 0) { |
370 | 0 | sum_map += 4; |
371 | 0 | } else if (cr->map[bl_index2] < 0) { |
372 | 0 | cr->map[bl_index2]++; |
373 | 0 | } |
374 | 0 | } |
375 | 0 | } |
376 | | // Enforce constant segment over superblock. |
377 | | // If segment is at least half of superblock, set to 1. |
378 | 0 | if (sum_map >= (xmis * ymis) >> 1) { |
379 | 0 | for (y = 0; y < ymis; y++) |
380 | 0 | for (x = 0; x < xmis; x++) { |
381 | 0 | seg_map[bl_index + y * mi_params->mi_cols + x] = CR_SEGMENT_ID_BOOST1; |
382 | 0 | } |
383 | 0 | cr->target_num_seg_blocks += xmis * ymis; |
384 | 0 | } |
385 | 0 | i++; |
386 | 0 | if (i == sbs_in_frame) { |
387 | 0 | i = 0; |
388 | 0 | } |
389 | 0 | } while (cr->target_num_seg_blocks < block_count && i != cr->sb_index); |
390 | 0 | cr->sb_index = i; |
391 | 0 | } |
392 | | |
393 | | // Set cyclic refresh parameters. |
394 | 0 | void av1_cyclic_refresh_update_parameters(AV1_COMP *const cpi) { |
395 | | // TODO(marpan): Parameters need to be tuned. |
396 | 0 | const RATE_CONTROL *const rc = &cpi->rc; |
397 | 0 | const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc; |
398 | 0 | const AV1_COMMON *const cm = &cpi->common; |
399 | 0 | CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
400 | 0 | int num4x4bl = cm->mi_params.MBs << 4; |
401 | 0 | int target_refresh = 0; |
402 | 0 | double weight_segment_target = 0; |
403 | 0 | double weight_segment = 0; |
404 | 0 | int qp_thresh = AOMMIN(20, rc->best_quality << 1); |
405 | 0 | int qp_max_thresh = 118 * MAXQ >> 7; |
406 | | // Although this segment feature for RTC is only used for |
407 | | // blocks >= 8X8, for more efficient coding of the seg map |
408 | | // cur_frame->seg_map needs to set at 4x4 along with the |
409 | | // function av1_cyclic_reset_segment_skip(). Skipping over |
410 | | // 4x4 will therefore have small bdrate loss (~0.2%), so |
411 | | // we use it only for speed > 9 for now. |
412 | | // Also if loop-filter deltas is applied via segment, then |
413 | | // we need to set cr->skip_over4x4 = 1. |
414 | 0 | cr->skip_over4x4 = (cpi->oxcf.speed > 9) ? 1 : 0; |
415 | 0 | cr->apply_cyclic_refresh = 1; |
416 | 0 | if (frame_is_intra_only(cm) || is_lossless_requested(&cpi->oxcf.rc_cfg) || |
417 | 0 | cpi->svc.temporal_layer_id > 0 || |
418 | 0 | p_rc->avg_frame_qindex[INTER_FRAME] < qp_thresh || |
419 | 0 | (cpi->svc.number_spatial_layers > 1 && |
420 | 0 | cpi->svc.layer_context[cpi->svc.temporal_layer_id].is_key_frame) || |
421 | 0 | (rc->frames_since_key > 20 && |
422 | 0 | p_rc->avg_frame_qindex[INTER_FRAME] > qp_max_thresh) || |
423 | 0 | (rc->avg_frame_low_motion && rc->avg_frame_low_motion < 30 && |
424 | 0 | rc->frames_since_key > 40)) { |
425 | 0 | cr->apply_cyclic_refresh = 0; |
426 | 0 | return; |
427 | 0 | } |
428 | 0 | cr->percent_refresh = 10; |
429 | 0 | if (cpi->svc.number_temporal_layers > 2) cr->percent_refresh = 15; |
430 | 0 | cr->max_qdelta_perc = 60; |
431 | 0 | cr->time_for_refresh = 0; |
432 | 0 | cr->motion_thresh = 32; |
433 | 0 | cr->rate_boost_fac = 15; |
434 | | // Use larger delta-qp (increase rate_ratio_qdelta) for first few (~4) |
435 | | // periods of the refresh cycle, after a key frame. |
436 | | // Account for larger interval on base layer for temporal layers. |
437 | 0 | if (cr->percent_refresh > 0 && |
438 | 0 | rc->frames_since_key < |
439 | 0 | (4 * cpi->svc.number_temporal_layers) * (100 / cr->percent_refresh)) { |
440 | 0 | cr->rate_ratio_qdelta = 3.0; |
441 | 0 | } else { |
442 | 0 | cr->rate_ratio_qdelta = 2.0; |
443 | 0 | } |
444 | | // Adjust some parameters for low resolutions. |
445 | 0 | if (cm->width * cm->height <= 352 * 288) { |
446 | 0 | if (rc->avg_frame_bandwidth < 3000) { |
447 | 0 | cr->motion_thresh = 16; |
448 | 0 | cr->rate_boost_fac = 13; |
449 | 0 | } else { |
450 | 0 | cr->max_qdelta_perc = 50; |
451 | 0 | cr->rate_ratio_qdelta = AOMMAX(cr->rate_ratio_qdelta, 2.0); |
452 | 0 | } |
453 | 0 | } |
454 | 0 | if (cpi->oxcf.rc_cfg.mode == AOM_VBR) { |
455 | | // To be adjusted for VBR mode, e.g., based on gf period and boost. |
456 | | // For now use smaller qp-delta (than CBR), no second boosted seg, and |
457 | | // turn-off (no refresh) on golden refresh (since it's already boosted). |
458 | 0 | cr->percent_refresh = 10; |
459 | 0 | cr->rate_ratio_qdelta = 1.5; |
460 | 0 | cr->rate_boost_fac = 10; |
461 | 0 | if (cpi->refresh_frame.golden_frame) { |
462 | 0 | cr->percent_refresh = 0; |
463 | 0 | cr->rate_ratio_qdelta = 1.0; |
464 | 0 | } |
465 | 0 | } |
466 | | // Weight for segment prior to encoding: take the average of the target |
467 | | // number for the frame to be encoded and the actual from the previous frame. |
468 | | // Use the target if its less. To be used for setting the base qp for the |
469 | | // frame in av1_rc_regulate_q. |
470 | 0 | target_refresh = |
471 | 0 | cr->percent_refresh * cm->mi_params.mi_rows * cm->mi_params.mi_cols / 100; |
472 | 0 | weight_segment_target = (double)(target_refresh) / num4x4bl; |
473 | 0 | weight_segment = (double)((target_refresh + cr->actual_num_seg1_blocks + |
474 | 0 | cr->actual_num_seg2_blocks) >> |
475 | 0 | 1) / |
476 | 0 | num4x4bl; |
477 | 0 | if (weight_segment_target < 7 * weight_segment / 8) |
478 | 0 | weight_segment = weight_segment_target; |
479 | 0 | cr->weight_segment = weight_segment; |
480 | 0 | if (rc->rtc_external_ratectrl) { |
481 | 0 | cr->actual_num_seg1_blocks = cr->percent_refresh * cm->mi_params.mi_rows * |
482 | 0 | cm->mi_params.mi_cols / 100; |
483 | 0 | cr->actual_num_seg2_blocks = 0; |
484 | 0 | cr->weight_segment = (double)(cr->actual_num_seg1_blocks) / num4x4bl; |
485 | 0 | } |
486 | 0 | } |
487 | | |
488 | | // Setup cyclic background refresh: set delta q and segmentation map. |
489 | 0 | void av1_cyclic_refresh_setup(AV1_COMP *const cpi) { |
490 | 0 | AV1_COMMON *const cm = &cpi->common; |
491 | 0 | const RATE_CONTROL *const rc = &cpi->rc; |
492 | 0 | CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
493 | 0 | struct segmentation *const seg = &cm->seg; |
494 | 0 | int resolution_change = |
495 | 0 | cm->prev_frame && (cm->width != cm->prev_frame->width || |
496 | 0 | cm->height != cm->prev_frame->height); |
497 | 0 | if (resolution_change) av1_cyclic_refresh_reset_resize(cpi); |
498 | 0 | if (!cr->apply_cyclic_refresh) { |
499 | | // Set segmentation map to 0 and disable. |
500 | 0 | unsigned char *const seg_map = cpi->enc_seg.map; |
501 | 0 | memset(seg_map, 0, cm->mi_params.mi_rows * cm->mi_params.mi_cols); |
502 | 0 | av1_disable_segmentation(&cm->seg); |
503 | 0 | if (cm->current_frame.frame_type == KEY_FRAME) { |
504 | 0 | cr->sb_index = 0; |
505 | 0 | } |
506 | 0 | return; |
507 | 0 | } else { |
508 | 0 | const double q = av1_convert_qindex_to_q(cm->quant_params.base_qindex, |
509 | 0 | cm->seq_params->bit_depth); |
510 | | // Set rate threshold to some multiple (set to 2 for now) of the target |
511 | | // rate (target is given by sb64_target_rate and scaled by 256). |
512 | 0 | cr->thresh_rate_sb = ((int64_t)(rc->sb64_target_rate) << 8) << 2; |
513 | | // Distortion threshold, quadratic in Q, scale factor to be adjusted. |
514 | | // q will not exceed 457, so (q * q) is within 32bit; see: |
515 | | // av1_convert_qindex_to_q(), av1_ac_quant(), ac_qlookup*[]. |
516 | 0 | cr->thresh_dist_sb = ((int64_t)(q * q)) << 2; |
517 | | // For low-resoln or lower speeds, the rate/dist thresholds need to be |
518 | | // tuned/updated. |
519 | 0 | if (cpi->oxcf.speed <= 7 || (cm->width * cm->height < 640 * 360)) { |
520 | 0 | cr->thresh_dist_sb = 0; |
521 | 0 | cr->thresh_rate_sb = INT64_MAX; |
522 | 0 | } |
523 | | // Set up segmentation. |
524 | | // Clear down the segment map. |
525 | 0 | av1_enable_segmentation(&cm->seg); |
526 | 0 | av1_clearall_segfeatures(seg); |
527 | | |
528 | | // Note: setting temporal_update has no effect, as the seg-map coding method |
529 | | // (temporal or spatial) is determined in |
530 | | // av1_choose_segmap_coding_method(), |
531 | | // based on the coding cost of each method. For error_resilient mode on the |
532 | | // last_frame_seg_map is set to 0, so if temporal coding is used, it is |
533 | | // relative to 0 previous map. |
534 | | // seg->temporal_update = 0; |
535 | | |
536 | | // Segment BASE "Q" feature is disabled so it defaults to the baseline Q. |
537 | 0 | av1_disable_segfeature(seg, CR_SEGMENT_ID_BASE, SEG_LVL_ALT_Q); |
538 | | // Use segment BOOST1 for in-frame Q adjustment. |
539 | 0 | av1_enable_segfeature(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q); |
540 | | // Use segment BOOST2 for more aggressive in-frame Q adjustment. |
541 | 0 | av1_enable_segfeature(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q); |
542 | | |
543 | | // Set the q delta for segment BOOST1. |
544 | 0 | const CommonQuantParams *const quant_params = &cm->quant_params; |
545 | 0 | int qindex_delta = |
546 | 0 | compute_deltaq(cpi, quant_params->base_qindex, cr->rate_ratio_qdelta); |
547 | 0 | cr->qindex_delta[1] = qindex_delta; |
548 | | |
549 | | // Compute rd-mult for segment BOOST1. |
550 | 0 | const int qindex2 = clamp( |
551 | 0 | quant_params->base_qindex + quant_params->y_dc_delta_q + qindex_delta, |
552 | 0 | 0, MAXQ); |
553 | 0 | cr->rdmult = av1_compute_rd_mult(cpi, qindex2); |
554 | |
|
555 | 0 | av1_set_segdata(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q, qindex_delta); |
556 | | |
557 | | // Set a more aggressive (higher) q delta for segment BOOST2. |
558 | 0 | qindex_delta = compute_deltaq( |
559 | 0 | cpi, quant_params->base_qindex, |
560 | 0 | AOMMIN(CR_MAX_RATE_TARGET_RATIO, |
561 | 0 | 0.1 * cr->rate_boost_fac * cr->rate_ratio_qdelta)); |
562 | 0 | cr->qindex_delta[2] = qindex_delta; |
563 | 0 | av1_set_segdata(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q, qindex_delta); |
564 | | |
565 | | // Update the segmentation and refresh map. |
566 | 0 | cyclic_refresh_update_map(cpi); |
567 | 0 | } |
568 | 0 | } |
569 | | |
570 | 0 | int av1_cyclic_refresh_get_rdmult(const CYCLIC_REFRESH *cr) { |
571 | 0 | return cr->rdmult; |
572 | 0 | } |
573 | | |
574 | 0 | void av1_cyclic_refresh_reset_resize(AV1_COMP *const cpi) { |
575 | 0 | const AV1_COMMON *const cm = &cpi->common; |
576 | 0 | CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; |
577 | 0 | memset(cr->map, 0, cm->mi_params.mi_rows * cm->mi_params.mi_cols); |
578 | 0 | cr->sb_index = 0; |
579 | 0 | cpi->refresh_frame.golden_frame = true; |
580 | 0 | cr->apply_cyclic_refresh = 0; |
581 | 0 | } |