/src/libavif/ext/aom/av1/encoder/encoder_alloc.h
Line | Count | Source |
1 | | /* |
2 | | * Copyright (c) 2020, Alliance for Open Media. All rights reserved. |
3 | | * |
4 | | * This source code is subject to the terms of the BSD 2 Clause License and |
5 | | * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License |
6 | | * was not distributed with this source code in the LICENSE file, you can |
7 | | * obtain it at www.aomedia.org/license/software. If the Alliance for Open |
8 | | * Media Patent License 1.0 was not distributed with this source code in the |
9 | | * PATENTS file, you can obtain it at www.aomedia.org/license/patent. |
10 | | */ |
11 | | |
12 | | #ifndef AOM_AV1_ENCODER_ENCODER_ALLOC_H_ |
13 | | #define AOM_AV1_ENCODER_ENCODER_ALLOC_H_ |
14 | | |
15 | | #include "av1/encoder/block.h" |
16 | | #include "av1/encoder/encodeframe_utils.h" |
17 | | #include "av1/encoder/encoder.h" |
18 | | #include "av1/encoder/encodetxb.h" |
19 | | #include "av1/encoder/ethread.h" |
20 | | #include "av1/encoder/global_motion_facade.h" |
21 | | #include "av1/encoder/intra_mode_search_utils.h" |
22 | | #include "av1/encoder/pickcdef.h" |
23 | | |
24 | | #ifdef __cplusplus |
25 | | extern "C" { |
26 | | #endif |
27 | | |
28 | | static inline void dealloc_context_buffers_ext( |
29 | 185k | MBMIExtFrameBufferInfo *mbmi_ext_info) { |
30 | 185k | aom_free(mbmi_ext_info->frame_base); |
31 | 185k | mbmi_ext_info->frame_base = NULL; |
32 | 185k | mbmi_ext_info->alloc_size = 0; |
33 | 185k | } Unexecuted instantiation: av1_cx_iface.c:dealloc_context_buffers_ext encoder.c:dealloc_context_buffers_ext Line | Count | Source | 29 | 96.2k | MBMIExtFrameBufferInfo *mbmi_ext_info) { | 30 | 96.2k | aom_free(mbmi_ext_info->frame_base); | 31 | | mbmi_ext_info->frame_base = NULL; | 32 | 96.2k | mbmi_ext_info->alloc_size = 0; | 33 | 96.2k | } |
Unexecuted instantiation: encoder_utils.c:dealloc_context_buffers_ext Unexecuted instantiation: ethread.c:dealloc_context_buffers_ext Unexecuted instantiation: superres_scale.c:dealloc_context_buffers_ext Unexecuted instantiation: svc_layercontext.c:dealloc_context_buffers_ext Unexecuted instantiation: compound_type.c:dealloc_context_buffers_ext encode_strategy.c:dealloc_context_buffers_ext Line | Count | Source | 29 | 89.3k | MBMIExtFrameBufferInfo *mbmi_ext_info) { | 30 | 89.3k | aom_free(mbmi_ext_info->frame_base); | 31 | | mbmi_ext_info->frame_base = NULL; | 32 | 89.3k | mbmi_ext_info->alloc_size = 0; | 33 | 89.3k | } |
|
34 | | |
35 | | static inline void alloc_context_buffers_ext( |
36 | 131k | AV1_COMMON *cm, MBMIExtFrameBufferInfo *mbmi_ext_info) { |
37 | 131k | const CommonModeInfoParams *const mi_params = &cm->mi_params; |
38 | | |
39 | 131k | const int mi_alloc_size_1d = mi_size_wide[mi_params->mi_alloc_bsize]; |
40 | 131k | const int mi_alloc_rows = |
41 | 131k | (mi_params->mi_rows + mi_alloc_size_1d - 1) / mi_alloc_size_1d; |
42 | 131k | const int mi_alloc_cols = |
43 | 131k | (mi_params->mi_cols + mi_alloc_size_1d - 1) / mi_alloc_size_1d; |
44 | 131k | const int new_ext_mi_size = mi_alloc_rows * mi_alloc_cols; |
45 | | |
46 | 131k | if (new_ext_mi_size > mbmi_ext_info->alloc_size) { |
47 | 89.3k | dealloc_context_buffers_ext(mbmi_ext_info); |
48 | 89.3k | CHECK_MEM_ERROR( |
49 | 89.3k | cm, mbmi_ext_info->frame_base, |
50 | 89.3k | aom_malloc(new_ext_mi_size * sizeof(*mbmi_ext_info->frame_base))); |
51 | 89.3k | mbmi_ext_info->alloc_size = new_ext_mi_size; |
52 | 89.3k | } |
53 | | // The stride needs to be updated regardless of whether new allocation |
54 | | // happened or not. |
55 | 131k | mbmi_ext_info->stride = mi_alloc_cols; |
56 | 131k | } Unexecuted instantiation: av1_cx_iface.c:alloc_context_buffers_ext Unexecuted instantiation: encoder.c:alloc_context_buffers_ext Unexecuted instantiation: encoder_utils.c:alloc_context_buffers_ext Unexecuted instantiation: ethread.c:alloc_context_buffers_ext Unexecuted instantiation: superres_scale.c:alloc_context_buffers_ext Unexecuted instantiation: svc_layercontext.c:alloc_context_buffers_ext Unexecuted instantiation: compound_type.c:alloc_context_buffers_ext encode_strategy.c:alloc_context_buffers_ext Line | Count | Source | 36 | 131k | AV1_COMMON *cm, MBMIExtFrameBufferInfo *mbmi_ext_info) { | 37 | 131k | const CommonModeInfoParams *const mi_params = &cm->mi_params; | 38 | | | 39 | 131k | const int mi_alloc_size_1d = mi_size_wide[mi_params->mi_alloc_bsize]; | 40 | 131k | const int mi_alloc_rows = | 41 | 131k | (mi_params->mi_rows + mi_alloc_size_1d - 1) / mi_alloc_size_1d; | 42 | 131k | const int mi_alloc_cols = | 43 | 131k | (mi_params->mi_cols + mi_alloc_size_1d - 1) / mi_alloc_size_1d; | 44 | 131k | const int new_ext_mi_size = mi_alloc_rows * mi_alloc_cols; | 45 | | | 46 | 131k | if (new_ext_mi_size > mbmi_ext_info->alloc_size) { | 47 | 89.3k | dealloc_context_buffers_ext(mbmi_ext_info); | 48 | 89.3k | CHECK_MEM_ERROR( | 49 | 89.3k | cm, mbmi_ext_info->frame_base, | 50 | 89.3k | aom_malloc(new_ext_mi_size * sizeof(*mbmi_ext_info->frame_base))); | 51 | 89.3k | mbmi_ext_info->alloc_size = new_ext_mi_size; | 52 | 89.3k | } | 53 | | // The stride needs to be updated regardless of whether new allocation | 54 | | // happened or not. | 55 | 131k | mbmi_ext_info->stride = mi_alloc_cols; | 56 | 131k | } |
|
57 | | |
58 | 185k | static inline void alloc_compressor_data(AV1_COMP *cpi) { |
59 | 185k | AV1_COMMON *cm = &cpi->common; |
60 | 185k | CommonModeInfoParams *const mi_params = &cm->mi_params; |
61 | | |
62 | | // Setup mi_params |
63 | 185k | mi_params->set_mb_mi(mi_params, cm->width, cm->height, |
64 | 185k | cpi->sf.part_sf.default_min_partition_size); |
65 | | |
66 | 185k | if (!is_stat_generation_stage(cpi)) av1_alloc_txb_buf(cpi); |
67 | | |
68 | 185k | aom_free(cpi->td.mv_costs_alloc); |
69 | 185k | cpi->td.mv_costs_alloc = NULL; |
70 | | // Avoid the memory allocation of 'mv_costs_alloc' for allintra encoding |
71 | | // mode. |
72 | 185k | if (cpi->oxcf.kf_cfg.key_freq_max != 0) { |
73 | 50.9k | CHECK_MEM_ERROR(cm, cpi->td.mv_costs_alloc, |
74 | 50.9k | (MvCosts *)aom_calloc(1, sizeof(*cpi->td.mv_costs_alloc))); |
75 | 50.9k | cpi->td.mb.mv_costs = cpi->td.mv_costs_alloc; |
76 | 50.9k | } |
77 | | |
78 | 185k | av1_setup_shared_coeff_buffer(cm->seq_params, &cpi->td.shared_coeff_buf, |
79 | 185k | cm->error); |
80 | 185k | if (av1_setup_sms_tree(cpi, &cpi->td)) { |
81 | 0 | aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR, |
82 | 0 | "Failed to allocate SMS tree"); |
83 | 0 | } |
84 | 185k | cpi->td.firstpass_ctx = |
85 | 185k | av1_alloc_pmc(cpi, BLOCK_16X16, &cpi->td.shared_coeff_buf); |
86 | 185k | if (!cpi->td.firstpass_ctx) |
87 | 0 | aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR, |
88 | 0 | "Failed to allocate PICK_MODE_CONTEXT"); |
89 | 185k | } Unexecuted instantiation: av1_cx_iface.c:alloc_compressor_data encoder.c:alloc_compressor_data Line | Count | Source | 58 | 185k | static inline void alloc_compressor_data(AV1_COMP *cpi) { | 59 | 185k | AV1_COMMON *cm = &cpi->common; | 60 | 185k | CommonModeInfoParams *const mi_params = &cm->mi_params; | 61 | | | 62 | | // Setup mi_params | 63 | 185k | mi_params->set_mb_mi(mi_params, cm->width, cm->height, | 64 | 185k | cpi->sf.part_sf.default_min_partition_size); | 65 | | | 66 | 185k | if (!is_stat_generation_stage(cpi)) av1_alloc_txb_buf(cpi); | 67 | | | 68 | 185k | aom_free(cpi->td.mv_costs_alloc); | 69 | 185k | cpi->td.mv_costs_alloc = NULL; | 70 | | // Avoid the memory allocation of 'mv_costs_alloc' for allintra encoding | 71 | | // mode. | 72 | 185k | if (cpi->oxcf.kf_cfg.key_freq_max != 0) { | 73 | 50.9k | CHECK_MEM_ERROR(cm, cpi->td.mv_costs_alloc, | 74 | 50.9k | (MvCosts *)aom_calloc(1, sizeof(*cpi->td.mv_costs_alloc))); | 75 | 50.9k | cpi->td.mb.mv_costs = cpi->td.mv_costs_alloc; | 76 | 50.9k | } | 77 | | | 78 | 185k | av1_setup_shared_coeff_buffer(cm->seq_params, &cpi->td.shared_coeff_buf, | 79 | 185k | cm->error); | 80 | 185k | if (av1_setup_sms_tree(cpi, &cpi->td)) { | 81 | 0 | aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR, | 82 | 0 | "Failed to allocate SMS tree"); | 83 | 0 | } | 84 | 185k | cpi->td.firstpass_ctx = | 85 | 185k | av1_alloc_pmc(cpi, BLOCK_16X16, &cpi->td.shared_coeff_buf); | 86 | 185k | if (!cpi->td.firstpass_ctx) | 87 | 0 | aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR, | 88 | 0 | "Failed to allocate PICK_MODE_CONTEXT"); | 89 | 185k | } |
Unexecuted instantiation: encoder_utils.c:alloc_compressor_data Unexecuted instantiation: ethread.c:alloc_compressor_data Unexecuted instantiation: superres_scale.c:alloc_compressor_data Unexecuted instantiation: svc_layercontext.c:alloc_compressor_data Unexecuted instantiation: compound_type.c:alloc_compressor_data Unexecuted instantiation: encode_strategy.c:alloc_compressor_data |
90 | | |
91 | | // Allocate mbmi buffers which are used to store mode information at block |
92 | | // level. |
93 | 149k | static inline void alloc_mb_mode_info_buffers(AV1_COMP *const cpi) { |
94 | 149k | AV1_COMMON *const cm = &cpi->common; |
95 | 149k | if (av1_alloc_context_buffers(cm, cm->width, cm->height, |
96 | 149k | cpi->sf.part_sf.default_min_partition_size)) { |
97 | 0 | aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR, |
98 | 0 | "Failed to allocate context buffers"); |
99 | 0 | } |
100 | | |
101 | 149k | if (!is_stat_generation_stage(cpi)) |
102 | 131k | alloc_context_buffers_ext(cm, &cpi->mbmi_ext_info); |
103 | 149k | } Unexecuted instantiation: av1_cx_iface.c:alloc_mb_mode_info_buffers Unexecuted instantiation: encoder.c:alloc_mb_mode_info_buffers Unexecuted instantiation: encoder_utils.c:alloc_mb_mode_info_buffers Unexecuted instantiation: ethread.c:alloc_mb_mode_info_buffers Unexecuted instantiation: superres_scale.c:alloc_mb_mode_info_buffers Unexecuted instantiation: svc_layercontext.c:alloc_mb_mode_info_buffers Unexecuted instantiation: compound_type.c:alloc_mb_mode_info_buffers encode_strategy.c:alloc_mb_mode_info_buffers Line | Count | Source | 93 | 149k | static inline void alloc_mb_mode_info_buffers(AV1_COMP *const cpi) { | 94 | 149k | AV1_COMMON *const cm = &cpi->common; | 95 | 149k | if (av1_alloc_context_buffers(cm, cm->width, cm->height, | 96 | 149k | cpi->sf.part_sf.default_min_partition_size)) { | 97 | 0 | aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR, | 98 | 0 | "Failed to allocate context buffers"); | 99 | 0 | } | 100 | | | 101 | 149k | if (!is_stat_generation_stage(cpi)) | 102 | 131k | alloc_context_buffers_ext(cm, &cpi->mbmi_ext_info); | 103 | 149k | } |
|
104 | | |
105 | 185k | static inline void realloc_segmentation_maps(AV1_COMP *cpi) { |
106 | 185k | AV1_COMMON *const cm = &cpi->common; |
107 | 185k | CommonModeInfoParams *const mi_params = &cm->mi_params; |
108 | | |
109 | | // Create the encoder segmentation map and set all entries to 0 |
110 | 185k | aom_free(cpi->enc_seg.map); |
111 | 185k | CHECK_MEM_ERROR(cm, cpi->enc_seg.map, |
112 | 185k | aom_calloc(mi_params->mi_rows * mi_params->mi_cols, 1)); |
113 | | |
114 | | // Create a map used for cyclic background refresh. |
115 | 185k | if (cpi->cyclic_refresh) av1_cyclic_refresh_free(cpi->cyclic_refresh); |
116 | 185k | CHECK_MEM_ERROR( |
117 | 185k | cm, cpi->cyclic_refresh, |
118 | 185k | av1_cyclic_refresh_alloc(mi_params->mi_rows, mi_params->mi_cols)); |
119 | | |
120 | | // Create a map used to mark inactive areas. |
121 | 185k | aom_free(cpi->active_map.map); |
122 | 185k | CHECK_MEM_ERROR(cm, cpi->active_map.map, |
123 | 185k | aom_calloc(mi_params->mi_rows * mi_params->mi_cols, 1)); |
124 | 185k | } Unexecuted instantiation: av1_cx_iface.c:realloc_segmentation_maps encoder.c:realloc_segmentation_maps Line | Count | Source | 105 | 185k | static inline void realloc_segmentation_maps(AV1_COMP *cpi) { | 106 | 185k | AV1_COMMON *const cm = &cpi->common; | 107 | 185k | CommonModeInfoParams *const mi_params = &cm->mi_params; | 108 | | | 109 | | // Create the encoder segmentation map and set all entries to 0 | 110 | 185k | aom_free(cpi->enc_seg.map); | 111 | 185k | CHECK_MEM_ERROR(cm, cpi->enc_seg.map, | 112 | 185k | aom_calloc(mi_params->mi_rows * mi_params->mi_cols, 1)); | 113 | | | 114 | | // Create a map used for cyclic background refresh. | 115 | 185k | if (cpi->cyclic_refresh) av1_cyclic_refresh_free(cpi->cyclic_refresh); | 116 | 185k | CHECK_MEM_ERROR( | 117 | 185k | cm, cpi->cyclic_refresh, | 118 | 185k | av1_cyclic_refresh_alloc(mi_params->mi_rows, mi_params->mi_cols)); | 119 | | | 120 | | // Create a map used to mark inactive areas. | 121 | 185k | aom_free(cpi->active_map.map); | 122 | 185k | CHECK_MEM_ERROR(cm, cpi->active_map.map, | 123 | 185k | aom_calloc(mi_params->mi_rows * mi_params->mi_cols, 1)); | 124 | 185k | } |
Unexecuted instantiation: encoder_utils.c:realloc_segmentation_maps Unexecuted instantiation: ethread.c:realloc_segmentation_maps Unexecuted instantiation: superres_scale.c:realloc_segmentation_maps Unexecuted instantiation: svc_layercontext.c:realloc_segmentation_maps Unexecuted instantiation: compound_type.c:realloc_segmentation_maps Unexecuted instantiation: encode_strategy.c:realloc_segmentation_maps |
125 | | |
126 | | static inline void alloc_obmc_buffers(OBMCBuffer *obmc_buffer, |
127 | 44.6k | struct aom_internal_error_info *error) { |
128 | 44.6k | AOM_CHECK_MEM_ERROR( |
129 | 44.6k | error, obmc_buffer->wsrc, |
130 | 44.6k | (int32_t *)aom_memalign(16, MAX_SB_SQUARE * sizeof(*obmc_buffer->wsrc))); |
131 | 44.6k | AOM_CHECK_MEM_ERROR( |
132 | 44.6k | error, obmc_buffer->mask, |
133 | 44.6k | (int32_t *)aom_memalign(16, MAX_SB_SQUARE * sizeof(*obmc_buffer->mask))); |
134 | 44.6k | AOM_CHECK_MEM_ERROR( |
135 | 44.6k | error, obmc_buffer->above_pred, |
136 | 44.6k | (uint8_t *)aom_memalign( |
137 | 44.6k | 16, MAX_MB_PLANE * MAX_SB_SQUARE * sizeof(*obmc_buffer->above_pred))); |
138 | 44.6k | AOM_CHECK_MEM_ERROR( |
139 | 44.6k | error, obmc_buffer->left_pred, |
140 | 44.6k | (uint8_t *)aom_memalign( |
141 | 44.6k | 16, MAX_MB_PLANE * MAX_SB_SQUARE * sizeof(*obmc_buffer->left_pred))); |
142 | 44.6k | } Unexecuted instantiation: av1_cx_iface.c:alloc_obmc_buffers encoder.c:alloc_obmc_buffers Line | Count | Source | 127 | 29.0k | struct aom_internal_error_info *error) { | 128 | 29.0k | AOM_CHECK_MEM_ERROR( | 129 | 29.0k | error, obmc_buffer->wsrc, | 130 | 29.0k | (int32_t *)aom_memalign(16, MAX_SB_SQUARE * sizeof(*obmc_buffer->wsrc))); | 131 | 29.0k | AOM_CHECK_MEM_ERROR( | 132 | 29.0k | error, obmc_buffer->mask, | 133 | 29.0k | (int32_t *)aom_memalign(16, MAX_SB_SQUARE * sizeof(*obmc_buffer->mask))); | 134 | 29.0k | AOM_CHECK_MEM_ERROR( | 135 | 29.0k | error, obmc_buffer->above_pred, | 136 | 29.0k | (uint8_t *)aom_memalign( | 137 | 29.0k | 16, MAX_MB_PLANE * MAX_SB_SQUARE * sizeof(*obmc_buffer->above_pred))); | 138 | 29.0k | AOM_CHECK_MEM_ERROR( | 139 | 29.0k | error, obmc_buffer->left_pred, | 140 | 29.0k | (uint8_t *)aom_memalign( | 141 | 29.0k | 16, MAX_MB_PLANE * MAX_SB_SQUARE * sizeof(*obmc_buffer->left_pred))); | 142 | 29.0k | } |
Unexecuted instantiation: encoder_utils.c:alloc_obmc_buffers ethread.c:alloc_obmc_buffers Line | Count | Source | 127 | 15.5k | struct aom_internal_error_info *error) { | 128 | 15.5k | AOM_CHECK_MEM_ERROR( | 129 | 15.5k | error, obmc_buffer->wsrc, | 130 | 15.5k | (int32_t *)aom_memalign(16, MAX_SB_SQUARE * sizeof(*obmc_buffer->wsrc))); | 131 | 15.5k | AOM_CHECK_MEM_ERROR( | 132 | 15.5k | error, obmc_buffer->mask, | 133 | 15.5k | (int32_t *)aom_memalign(16, MAX_SB_SQUARE * sizeof(*obmc_buffer->mask))); | 134 | 15.5k | AOM_CHECK_MEM_ERROR( | 135 | 15.5k | error, obmc_buffer->above_pred, | 136 | 15.5k | (uint8_t *)aom_memalign( | 137 | 15.5k | 16, MAX_MB_PLANE * MAX_SB_SQUARE * sizeof(*obmc_buffer->above_pred))); | 138 | 15.5k | AOM_CHECK_MEM_ERROR( | 139 | 15.5k | error, obmc_buffer->left_pred, | 140 | 15.5k | (uint8_t *)aom_memalign( | 141 | 15.5k | 16, MAX_MB_PLANE * MAX_SB_SQUARE * sizeof(*obmc_buffer->left_pred))); | 142 | 15.5k | } |
Unexecuted instantiation: superres_scale.c:alloc_obmc_buffers Unexecuted instantiation: svc_layercontext.c:alloc_obmc_buffers Unexecuted instantiation: compound_type.c:alloc_obmc_buffers Unexecuted instantiation: encode_strategy.c:alloc_obmc_buffers |
143 | | |
144 | 719k | static inline void release_obmc_buffers(OBMCBuffer *obmc_buffer) { |
145 | 719k | aom_free(obmc_buffer->mask); |
146 | 719k | aom_free(obmc_buffer->above_pred); |
147 | 719k | aom_free(obmc_buffer->left_pred); |
148 | 719k | aom_free(obmc_buffer->wsrc); |
149 | | |
150 | 719k | obmc_buffer->mask = NULL; |
151 | 719k | obmc_buffer->above_pred = NULL; |
152 | 719k | obmc_buffer->left_pred = NULL; |
153 | 719k | obmc_buffer->wsrc = NULL; |
154 | 719k | } Unexecuted instantiation: av1_cx_iface.c:release_obmc_buffers encoder.c:release_obmc_buffers Line | Count | Source | 144 | 719k | static inline void release_obmc_buffers(OBMCBuffer *obmc_buffer) { | 145 | 719k | aom_free(obmc_buffer->mask); | 146 | 719k | aom_free(obmc_buffer->above_pred); | 147 | 719k | aom_free(obmc_buffer->left_pred); | 148 | 719k | aom_free(obmc_buffer->wsrc); | 149 | | | 150 | 719k | obmc_buffer->mask = NULL; | 151 | 719k | obmc_buffer->above_pred = NULL; | 152 | 719k | obmc_buffer->left_pred = NULL; | 153 | | obmc_buffer->wsrc = NULL; | 154 | 719k | } |
Unexecuted instantiation: encoder_utils.c:release_obmc_buffers Unexecuted instantiation: ethread.c:release_obmc_buffers Unexecuted instantiation: superres_scale.c:release_obmc_buffers Unexecuted instantiation: svc_layercontext.c:release_obmc_buffers Unexecuted instantiation: compound_type.c:release_obmc_buffers Unexecuted instantiation: encode_strategy.c:release_obmc_buffers |
155 | | |
156 | | static inline void alloc_compound_type_rd_buffers( |
157 | 44.6k | struct aom_internal_error_info *error, CompoundTypeRdBuffers *const bufs) { |
158 | 44.6k | AOM_CHECK_MEM_ERROR( |
159 | 44.6k | error, bufs->pred0, |
160 | 44.6k | (uint8_t *)aom_memalign(16, 2 * MAX_SB_SQUARE * sizeof(*bufs->pred0))); |
161 | 44.6k | AOM_CHECK_MEM_ERROR( |
162 | 44.6k | error, bufs->pred1, |
163 | 44.6k | (uint8_t *)aom_memalign(16, 2 * MAX_SB_SQUARE * sizeof(*bufs->pred1))); |
164 | 44.6k | AOM_CHECK_MEM_ERROR( |
165 | 44.6k | error, bufs->residual1, |
166 | 44.6k | (int16_t *)aom_memalign(32, MAX_SB_SQUARE * sizeof(*bufs->residual1))); |
167 | 44.6k | AOM_CHECK_MEM_ERROR( |
168 | 44.6k | error, bufs->diff10, |
169 | 44.6k | (int16_t *)aom_memalign(32, MAX_SB_SQUARE * sizeof(*bufs->diff10))); |
170 | 44.6k | AOM_CHECK_MEM_ERROR(error, bufs->tmp_best_mask_buf, |
171 | 44.6k | (uint8_t *)aom_malloc(2 * MAX_SB_SQUARE * |
172 | 44.6k | sizeof(*bufs->tmp_best_mask_buf))); |
173 | 44.6k | } Unexecuted instantiation: av1_cx_iface.c:alloc_compound_type_rd_buffers encoder.c:alloc_compound_type_rd_buffers Line | Count | Source | 157 | 29.0k | struct aom_internal_error_info *error, CompoundTypeRdBuffers *const bufs) { | 158 | 29.0k | AOM_CHECK_MEM_ERROR( | 159 | 29.0k | error, bufs->pred0, | 160 | 29.0k | (uint8_t *)aom_memalign(16, 2 * MAX_SB_SQUARE * sizeof(*bufs->pred0))); | 161 | 29.0k | AOM_CHECK_MEM_ERROR( | 162 | 29.0k | error, bufs->pred1, | 163 | 29.0k | (uint8_t *)aom_memalign(16, 2 * MAX_SB_SQUARE * sizeof(*bufs->pred1))); | 164 | 29.0k | AOM_CHECK_MEM_ERROR( | 165 | 29.0k | error, bufs->residual1, | 166 | 29.0k | (int16_t *)aom_memalign(32, MAX_SB_SQUARE * sizeof(*bufs->residual1))); | 167 | 29.0k | AOM_CHECK_MEM_ERROR( | 168 | 29.0k | error, bufs->diff10, | 169 | 29.0k | (int16_t *)aom_memalign(32, MAX_SB_SQUARE * sizeof(*bufs->diff10))); | 170 | 29.0k | AOM_CHECK_MEM_ERROR(error, bufs->tmp_best_mask_buf, | 171 | 29.0k | (uint8_t *)aom_malloc(2 * MAX_SB_SQUARE * | 172 | 29.0k | sizeof(*bufs->tmp_best_mask_buf))); | 173 | 29.0k | } |
Unexecuted instantiation: encoder_utils.c:alloc_compound_type_rd_buffers ethread.c:alloc_compound_type_rd_buffers Line | Count | Source | 157 | 15.5k | struct aom_internal_error_info *error, CompoundTypeRdBuffers *const bufs) { | 158 | 15.5k | AOM_CHECK_MEM_ERROR( | 159 | 15.5k | error, bufs->pred0, | 160 | 15.5k | (uint8_t *)aom_memalign(16, 2 * MAX_SB_SQUARE * sizeof(*bufs->pred0))); | 161 | 15.5k | AOM_CHECK_MEM_ERROR( | 162 | 15.5k | error, bufs->pred1, | 163 | 15.5k | (uint8_t *)aom_memalign(16, 2 * MAX_SB_SQUARE * sizeof(*bufs->pred1))); | 164 | 15.5k | AOM_CHECK_MEM_ERROR( | 165 | 15.5k | error, bufs->residual1, | 166 | 15.5k | (int16_t *)aom_memalign(32, MAX_SB_SQUARE * sizeof(*bufs->residual1))); | 167 | 15.5k | AOM_CHECK_MEM_ERROR( | 168 | 15.5k | error, bufs->diff10, | 169 | 15.5k | (int16_t *)aom_memalign(32, MAX_SB_SQUARE * sizeof(*bufs->diff10))); | 170 | 15.5k | AOM_CHECK_MEM_ERROR(error, bufs->tmp_best_mask_buf, | 171 | 15.5k | (uint8_t *)aom_malloc(2 * MAX_SB_SQUARE * | 172 | 15.5k | sizeof(*bufs->tmp_best_mask_buf))); | 173 | 15.5k | } |
Unexecuted instantiation: superres_scale.c:alloc_compound_type_rd_buffers Unexecuted instantiation: svc_layercontext.c:alloc_compound_type_rd_buffers Unexecuted instantiation: compound_type.c:alloc_compound_type_rd_buffers Unexecuted instantiation: encode_strategy.c:alloc_compound_type_rd_buffers |
174 | | |
175 | | static inline void release_compound_type_rd_buffers( |
176 | 719k | CompoundTypeRdBuffers *const bufs) { |
177 | 719k | aom_free(bufs->pred0); |
178 | 719k | aom_free(bufs->pred1); |
179 | 719k | aom_free(bufs->residual1); |
180 | 719k | aom_free(bufs->diff10); |
181 | 719k | aom_free(bufs->tmp_best_mask_buf); |
182 | 719k | av1_zero(*bufs); // Set all pointers to NULL for safety. |
183 | 719k | } Unexecuted instantiation: av1_cx_iface.c:release_compound_type_rd_buffers encoder.c:release_compound_type_rd_buffers Line | Count | Source | 176 | 719k | CompoundTypeRdBuffers *const bufs) { | 177 | 719k | aom_free(bufs->pred0); | 178 | 719k | aom_free(bufs->pred1); | 179 | 719k | aom_free(bufs->residual1); | 180 | 719k | aom_free(bufs->diff10); | 181 | 719k | aom_free(bufs->tmp_best_mask_buf); | 182 | 719k | av1_zero(*bufs); // Set all pointers to NULL for safety. | 183 | 719k | } |
Unexecuted instantiation: encoder_utils.c:release_compound_type_rd_buffers Unexecuted instantiation: ethread.c:release_compound_type_rd_buffers Unexecuted instantiation: superres_scale.c:release_compound_type_rd_buffers Unexecuted instantiation: svc_layercontext.c:release_compound_type_rd_buffers Unexecuted instantiation: compound_type.c:release_compound_type_rd_buffers Unexecuted instantiation: encode_strategy.c:release_compound_type_rd_buffers |
184 | | |
185 | 96.2k | static inline void dealloc_compressor_data(AV1_COMP *cpi) { |
186 | 96.2k | AV1_COMMON *const cm = &cpi->common; |
187 | 96.2k | TokenInfo *token_info = &cpi->token_info; |
188 | 96.2k | const int num_planes = av1_num_planes(cm); |
189 | 96.2k | dealloc_context_buffers_ext(&cpi->mbmi_ext_info); |
190 | | |
191 | 96.2k | aom_free(cpi->tile_data); |
192 | 96.2k | cpi->tile_data = NULL; |
193 | 96.2k | cpi->allocated_tiles = 0; |
194 | | |
195 | | // Delete sementation map |
196 | 96.2k | aom_free(cpi->enc_seg.map); |
197 | 96.2k | cpi->enc_seg.map = NULL; |
198 | | |
199 | 96.2k | av1_cyclic_refresh_free(cpi->cyclic_refresh); |
200 | 96.2k | cpi->cyclic_refresh = NULL; |
201 | | |
202 | 96.2k | aom_free(cpi->active_map.map); |
203 | 96.2k | cpi->active_map.map = NULL; |
204 | | |
205 | 96.2k | aom_free(cpi->roi.roi_map); |
206 | 96.2k | cpi->roi.roi_map = NULL; |
207 | | |
208 | 96.2k | aom_free(cpi->ssim_rdmult_scaling_factors); |
209 | 96.2k | cpi->ssim_rdmult_scaling_factors = NULL; |
210 | | |
211 | 96.2k | aom_free(cpi->tpl_rdmult_scaling_factors); |
212 | 96.2k | cpi->tpl_rdmult_scaling_factors = NULL; |
213 | | |
214 | | #if CONFIG_TUNE_VMAF |
215 | | aom_free(cpi->vmaf_info.rdmult_scaling_factors); |
216 | | cpi->vmaf_info.rdmult_scaling_factors = NULL; |
217 | | aom_close_vmaf_model(cpi->vmaf_info.vmaf_model); |
218 | | #endif |
219 | | |
220 | | #if CONFIG_TUNE_BUTTERAUGLI |
221 | | aom_free(cpi->butteraugli_info.rdmult_scaling_factors); |
222 | | cpi->butteraugli_info.rdmult_scaling_factors = NULL; |
223 | | aom_free_frame_buffer(&cpi->butteraugli_info.source); |
224 | | aom_free_frame_buffer(&cpi->butteraugli_info.resized_source); |
225 | | #endif |
226 | | |
227 | | #if CONFIG_SALIENCY_MAP |
228 | | aom_free(cpi->saliency_map); |
229 | | aom_free(cpi->sm_scaling_factor); |
230 | | #endif |
231 | | |
232 | 96.2k | release_obmc_buffers(&cpi->td.mb.obmc_buffer); |
233 | | |
234 | 96.2k | aom_free(cpi->td.mv_costs_alloc); |
235 | 96.2k | cpi->td.mv_costs_alloc = NULL; |
236 | 96.2k | aom_free(cpi->td.dv_costs_alloc); |
237 | 96.2k | cpi->td.dv_costs_alloc = NULL; |
238 | | |
239 | 96.2k | aom_free(cpi->td.mb.sb_stats_cache); |
240 | 96.2k | cpi->td.mb.sb_stats_cache = NULL; |
241 | | |
242 | 96.2k | aom_free(cpi->td.mb.sb_fp_stats); |
243 | 96.2k | cpi->td.mb.sb_fp_stats = NULL; |
244 | | |
245 | | #if CONFIG_PARTITION_SEARCH_ORDER |
246 | | aom_free(cpi->td.mb.rdcost); |
247 | | cpi->td.mb.rdcost = NULL; |
248 | | #endif |
249 | | |
250 | 96.2k | av1_free_pc_tree_recursive(cpi->td.pc_root, num_planes, 0, 0, |
251 | 96.2k | cpi->sf.part_sf.partition_search_type); |
252 | 96.2k | cpi->td.pc_root = NULL; |
253 | | |
254 | 288k | for (int i = 0; i < 2; i++) { |
255 | 192k | aom_free(cpi->td.mb.intrabc_hash_info.hash_value_buffer[i]); |
256 | 192k | cpi->td.mb.intrabc_hash_info.hash_value_buffer[i] = NULL; |
257 | 192k | } |
258 | | |
259 | 96.2k | av1_hash_table_destroy(&cpi->td.mb.intrabc_hash_info.intrabc_hash_table); |
260 | | |
261 | 96.2k | aom_free(cm->tpl_mvs); |
262 | 96.2k | cm->tpl_mvs = NULL; |
263 | | |
264 | 96.2k | aom_free(cpi->td.pixel_gradient_info); |
265 | 96.2k | cpi->td.pixel_gradient_info = NULL; |
266 | | |
267 | 96.2k | aom_free(cpi->td.src_var_info_of_4x4_sub_blocks); |
268 | 96.2k | cpi->td.src_var_info_of_4x4_sub_blocks = NULL; |
269 | | |
270 | 96.2k | aom_free(cpi->td.vt64x64); |
271 | 96.2k | cpi->td.vt64x64 = NULL; |
272 | | |
273 | 96.2k | av1_free_pmc(cpi->td.firstpass_ctx, num_planes); |
274 | 96.2k | cpi->td.firstpass_ctx = NULL; |
275 | | |
276 | 96.2k | const int is_highbitdepth = cpi->tf_ctx.is_highbitdepth; |
277 | | // This call ensures that the buffers allocated by tf_alloc_and_reset_data() |
278 | | // in av1_temporal_filter() for single-threaded encode are freed in case an |
279 | | // error is encountered during temporal filtering (due to early termination |
280 | | // tf_dealloc_data() in av1_temporal_filter() would not be invoked). |
281 | 96.2k | tf_dealloc_data(&cpi->td.tf_data, is_highbitdepth); |
282 | | |
283 | | // This call ensures that tpl_tmp_buffers for single-threaded encode are freed |
284 | | // in case of an error during tpl. |
285 | 96.2k | tpl_dealloc_temp_buffers(&cpi->td.tpl_tmp_buffers); |
286 | | |
287 | | // This call ensures that the global motion (gm) data buffers for |
288 | | // single-threaded encode are freed in case of an error during gm. |
289 | 96.2k | gm_dealloc_data(&cpi->td.gm_data); |
290 | | |
291 | | // This call ensures that CDEF search context buffers are deallocated in case |
292 | | // of an error during cdef search. |
293 | 96.2k | av1_cdef_dealloc_data(cpi->cdef_search_ctx); |
294 | 96.2k | aom_free(cpi->cdef_search_ctx); |
295 | 96.2k | cpi->cdef_search_ctx = NULL; |
296 | | |
297 | 96.2k | av1_dealloc_mb_data(&cpi->td.mb, num_planes); |
298 | | |
299 | 96.2k | av1_dealloc_mb_wiener_var_pred_buf(&cpi->td); |
300 | | |
301 | 96.2k | av1_free_txb_buf(cpi); |
302 | 96.2k | av1_free_context_buffers(cm); |
303 | | |
304 | 96.2k | aom_free_frame_buffer(&cpi->last_frame_uf); |
305 | 96.2k | #if !CONFIG_REALTIME_ONLY |
306 | 96.2k | av1_free_restoration_buffers(cm); |
307 | 96.2k | av1_free_firstpass_data(&cpi->firstpass_data); |
308 | 96.2k | #endif |
309 | | |
310 | 96.2k | if (!is_stat_generation_stage(cpi)) { |
311 | 89.3k | av1_free_cdef_buffers(cm, &cpi->ppi->p_mt_info.cdef_worker, |
312 | 89.3k | &cpi->mt_info.cdef_sync); |
313 | 89.3k | } |
314 | | |
315 | 268k | for (int plane = 0; plane < num_planes; plane++) { |
316 | 171k | aom_free(cpi->pick_lr_ctxt.rusi[plane]); |
317 | 171k | cpi->pick_lr_ctxt.rusi[plane] = NULL; |
318 | 171k | } |
319 | 96.2k | aom_free(cpi->pick_lr_ctxt.dgd_avg); |
320 | 96.2k | cpi->pick_lr_ctxt.dgd_avg = NULL; |
321 | | |
322 | 96.2k | aom_free_frame_buffer(&cpi->trial_frame_rst); |
323 | 96.2k | aom_free_frame_buffer(&cpi->scaled_source); |
324 | 96.2k | aom_free_frame_buffer(&cpi->scaled_last_source); |
325 | 96.2k | aom_free_frame_buffer(&cpi->orig_source); |
326 | 96.2k | aom_free_frame_buffer(&cpi->svc.source_last_TL0); |
327 | | |
328 | 96.2k | free_token_info(token_info); |
329 | | |
330 | 96.2k | av1_free_shared_coeff_buffer(&cpi->td.shared_coeff_buf); |
331 | 96.2k | av1_free_sms_tree(&cpi->td); |
332 | | |
333 | 96.2k | aom_free(cpi->td.mb.palette_buffer); |
334 | 96.2k | release_compound_type_rd_buffers(&cpi->td.mb.comp_rd_buffer); |
335 | 96.2k | aom_free(cpi->td.mb.tmp_conv_dst); |
336 | 288k | for (int j = 0; j < 2; ++j) { |
337 | 192k | aom_free(cpi->td.mb.tmp_pred_bufs[j]); |
338 | 192k | } |
339 | | |
340 | 96.2k | #if CONFIG_DENOISE && !CONFIG_REALTIME_ONLY |
341 | 96.2k | if (cpi->denoise_and_model) { |
342 | 0 | aom_denoise_and_model_free(cpi->denoise_and_model); |
343 | 0 | cpi->denoise_and_model = NULL; |
344 | 0 | } |
345 | 96.2k | #endif |
346 | 96.2k | #if !CONFIG_REALTIME_ONLY |
347 | 96.2k | if (cpi->film_grain_table) { |
348 | 0 | aom_film_grain_table_free(cpi->film_grain_table); |
349 | 0 | aom_free(cpi->film_grain_table); |
350 | 0 | cpi->film_grain_table = NULL; |
351 | 0 | } |
352 | 96.2k | #endif |
353 | | |
354 | 96.2k | if (cpi->ppi->use_svc) av1_free_svc_cyclic_refresh(cpi); |
355 | 96.2k | aom_free(cpi->svc.layer_context); |
356 | 96.2k | cpi->svc.layer_context = NULL; |
357 | | |
358 | 96.2k | aom_free(cpi->consec_zero_mv); |
359 | 96.2k | cpi->consec_zero_mv = NULL; |
360 | 96.2k | cpi->consec_zero_mv_alloc_size = 0; |
361 | | |
362 | 96.2k | aom_free(cpi->src_sad_blk_64x64); |
363 | 96.2k | cpi->src_sad_blk_64x64 = NULL; |
364 | | |
365 | 96.2k | aom_free(cpi->mb_weber_stats); |
366 | 96.2k | cpi->mb_weber_stats = NULL; |
367 | | |
368 | 96.2k | if (cpi->oxcf.enable_rate_guide_deltaq) { |
369 | 0 | aom_free(cpi->prep_rate_estimates); |
370 | 0 | cpi->prep_rate_estimates = NULL; |
371 | |
|
372 | 0 | aom_free(cpi->ext_rate_distribution); |
373 | 0 | cpi->ext_rate_distribution = NULL; |
374 | 0 | } |
375 | | |
376 | 96.2k | aom_free(cpi->mb_delta_q); |
377 | 96.2k | cpi->mb_delta_q = NULL; |
378 | 96.2k | } Unexecuted instantiation: av1_cx_iface.c:dealloc_compressor_data encoder.c:dealloc_compressor_data Line | Count | Source | 185 | 96.2k | static inline void dealloc_compressor_data(AV1_COMP *cpi) { | 186 | 96.2k | AV1_COMMON *const cm = &cpi->common; | 187 | 96.2k | TokenInfo *token_info = &cpi->token_info; | 188 | 96.2k | const int num_planes = av1_num_planes(cm); | 189 | 96.2k | dealloc_context_buffers_ext(&cpi->mbmi_ext_info); | 190 | | | 191 | 96.2k | aom_free(cpi->tile_data); | 192 | 96.2k | cpi->tile_data = NULL; | 193 | 96.2k | cpi->allocated_tiles = 0; | 194 | | | 195 | | // Delete sementation map | 196 | 96.2k | aom_free(cpi->enc_seg.map); | 197 | 96.2k | cpi->enc_seg.map = NULL; | 198 | | | 199 | 96.2k | av1_cyclic_refresh_free(cpi->cyclic_refresh); | 200 | 96.2k | cpi->cyclic_refresh = NULL; | 201 | | | 202 | 96.2k | aom_free(cpi->active_map.map); | 203 | 96.2k | cpi->active_map.map = NULL; | 204 | | | 205 | 96.2k | aom_free(cpi->roi.roi_map); | 206 | 96.2k | cpi->roi.roi_map = NULL; | 207 | | | 208 | 96.2k | aom_free(cpi->ssim_rdmult_scaling_factors); | 209 | 96.2k | cpi->ssim_rdmult_scaling_factors = NULL; | 210 | | | 211 | 96.2k | aom_free(cpi->tpl_rdmult_scaling_factors); | 212 | 96.2k | cpi->tpl_rdmult_scaling_factors = NULL; | 213 | | | 214 | | #if CONFIG_TUNE_VMAF | 215 | | aom_free(cpi->vmaf_info.rdmult_scaling_factors); | 216 | | cpi->vmaf_info.rdmult_scaling_factors = NULL; | 217 | | aom_close_vmaf_model(cpi->vmaf_info.vmaf_model); | 218 | | #endif | 219 | | | 220 | | #if CONFIG_TUNE_BUTTERAUGLI | 221 | | aom_free(cpi->butteraugli_info.rdmult_scaling_factors); | 222 | | cpi->butteraugli_info.rdmult_scaling_factors = NULL; | 223 | | aom_free_frame_buffer(&cpi->butteraugli_info.source); | 224 | | aom_free_frame_buffer(&cpi->butteraugli_info.resized_source); | 225 | | #endif | 226 | | | 227 | | #if CONFIG_SALIENCY_MAP | 228 | | aom_free(cpi->saliency_map); | 229 | | aom_free(cpi->sm_scaling_factor); | 230 | | #endif | 231 | | | 232 | 96.2k | release_obmc_buffers(&cpi->td.mb.obmc_buffer); | 233 | | | 234 | 96.2k | aom_free(cpi->td.mv_costs_alloc); | 235 | 96.2k | cpi->td.mv_costs_alloc = NULL; | 236 | 96.2k | aom_free(cpi->td.dv_costs_alloc); | 237 | 96.2k | cpi->td.dv_costs_alloc = NULL; | 238 | | | 239 | 96.2k | aom_free(cpi->td.mb.sb_stats_cache); | 240 | 96.2k | cpi->td.mb.sb_stats_cache = NULL; | 241 | | | 242 | 96.2k | aom_free(cpi->td.mb.sb_fp_stats); | 243 | 96.2k | cpi->td.mb.sb_fp_stats = NULL; | 244 | | | 245 | | #if CONFIG_PARTITION_SEARCH_ORDER | 246 | | aom_free(cpi->td.mb.rdcost); | 247 | | cpi->td.mb.rdcost = NULL; | 248 | | #endif | 249 | | | 250 | 96.2k | av1_free_pc_tree_recursive(cpi->td.pc_root, num_planes, 0, 0, | 251 | 96.2k | cpi->sf.part_sf.partition_search_type); | 252 | 96.2k | cpi->td.pc_root = NULL; | 253 | | | 254 | 288k | for (int i = 0; i < 2; i++) { | 255 | 192k | aom_free(cpi->td.mb.intrabc_hash_info.hash_value_buffer[i]); | 256 | 192k | cpi->td.mb.intrabc_hash_info.hash_value_buffer[i] = NULL; | 257 | 192k | } | 258 | | | 259 | 96.2k | av1_hash_table_destroy(&cpi->td.mb.intrabc_hash_info.intrabc_hash_table); | 260 | | | 261 | 96.2k | aom_free(cm->tpl_mvs); | 262 | 96.2k | cm->tpl_mvs = NULL; | 263 | | | 264 | 96.2k | aom_free(cpi->td.pixel_gradient_info); | 265 | 96.2k | cpi->td.pixel_gradient_info = NULL; | 266 | | | 267 | 96.2k | aom_free(cpi->td.src_var_info_of_4x4_sub_blocks); | 268 | 96.2k | cpi->td.src_var_info_of_4x4_sub_blocks = NULL; | 269 | | | 270 | 96.2k | aom_free(cpi->td.vt64x64); | 271 | 96.2k | cpi->td.vt64x64 = NULL; | 272 | | | 273 | 96.2k | av1_free_pmc(cpi->td.firstpass_ctx, num_planes); | 274 | 96.2k | cpi->td.firstpass_ctx = NULL; | 275 | | | 276 | 96.2k | const int is_highbitdepth = cpi->tf_ctx.is_highbitdepth; | 277 | | // This call ensures that the buffers allocated by tf_alloc_and_reset_data() | 278 | | // in av1_temporal_filter() for single-threaded encode are freed in case an | 279 | | // error is encountered during temporal filtering (due to early termination | 280 | | // tf_dealloc_data() in av1_temporal_filter() would not be invoked). | 281 | 96.2k | tf_dealloc_data(&cpi->td.tf_data, is_highbitdepth); | 282 | | | 283 | | // This call ensures that tpl_tmp_buffers for single-threaded encode are freed | 284 | | // in case of an error during tpl. | 285 | 96.2k | tpl_dealloc_temp_buffers(&cpi->td.tpl_tmp_buffers); | 286 | | | 287 | | // This call ensures that the global motion (gm) data buffers for | 288 | | // single-threaded encode are freed in case of an error during gm. | 289 | 96.2k | gm_dealloc_data(&cpi->td.gm_data); | 290 | | | 291 | | // This call ensures that CDEF search context buffers are deallocated in case | 292 | | // of an error during cdef search. | 293 | 96.2k | av1_cdef_dealloc_data(cpi->cdef_search_ctx); | 294 | 96.2k | aom_free(cpi->cdef_search_ctx); | 295 | 96.2k | cpi->cdef_search_ctx = NULL; | 296 | | | 297 | 96.2k | av1_dealloc_mb_data(&cpi->td.mb, num_planes); | 298 | | | 299 | 96.2k | av1_dealloc_mb_wiener_var_pred_buf(&cpi->td); | 300 | | | 301 | 96.2k | av1_free_txb_buf(cpi); | 302 | 96.2k | av1_free_context_buffers(cm); | 303 | | | 304 | 96.2k | aom_free_frame_buffer(&cpi->last_frame_uf); | 305 | 96.2k | #if !CONFIG_REALTIME_ONLY | 306 | 96.2k | av1_free_restoration_buffers(cm); | 307 | 96.2k | av1_free_firstpass_data(&cpi->firstpass_data); | 308 | 96.2k | #endif | 309 | | | 310 | 96.2k | if (!is_stat_generation_stage(cpi)) { | 311 | 89.3k | av1_free_cdef_buffers(cm, &cpi->ppi->p_mt_info.cdef_worker, | 312 | 89.3k | &cpi->mt_info.cdef_sync); | 313 | 89.3k | } | 314 | | | 315 | 268k | for (int plane = 0; plane < num_planes; plane++) { | 316 | 171k | aom_free(cpi->pick_lr_ctxt.rusi[plane]); | 317 | 171k | cpi->pick_lr_ctxt.rusi[plane] = NULL; | 318 | 171k | } | 319 | 96.2k | aom_free(cpi->pick_lr_ctxt.dgd_avg); | 320 | 96.2k | cpi->pick_lr_ctxt.dgd_avg = NULL; | 321 | | | 322 | 96.2k | aom_free_frame_buffer(&cpi->trial_frame_rst); | 323 | 96.2k | aom_free_frame_buffer(&cpi->scaled_source); | 324 | 96.2k | aom_free_frame_buffer(&cpi->scaled_last_source); | 325 | 96.2k | aom_free_frame_buffer(&cpi->orig_source); | 326 | 96.2k | aom_free_frame_buffer(&cpi->svc.source_last_TL0); | 327 | | | 328 | 96.2k | free_token_info(token_info); | 329 | | | 330 | 96.2k | av1_free_shared_coeff_buffer(&cpi->td.shared_coeff_buf); | 331 | 96.2k | av1_free_sms_tree(&cpi->td); | 332 | | | 333 | 96.2k | aom_free(cpi->td.mb.palette_buffer); | 334 | 96.2k | release_compound_type_rd_buffers(&cpi->td.mb.comp_rd_buffer); | 335 | 96.2k | aom_free(cpi->td.mb.tmp_conv_dst); | 336 | 288k | for (int j = 0; j < 2; ++j) { | 337 | 192k | aom_free(cpi->td.mb.tmp_pred_bufs[j]); | 338 | 192k | } | 339 | | | 340 | 96.2k | #if CONFIG_DENOISE && !CONFIG_REALTIME_ONLY | 341 | 96.2k | if (cpi->denoise_and_model) { | 342 | 0 | aom_denoise_and_model_free(cpi->denoise_and_model); | 343 | 0 | cpi->denoise_and_model = NULL; | 344 | 0 | } | 345 | 96.2k | #endif | 346 | 96.2k | #if !CONFIG_REALTIME_ONLY | 347 | 96.2k | if (cpi->film_grain_table) { | 348 | 0 | aom_film_grain_table_free(cpi->film_grain_table); | 349 | 0 | aom_free(cpi->film_grain_table); | 350 | 0 | cpi->film_grain_table = NULL; | 351 | 0 | } | 352 | 96.2k | #endif | 353 | | | 354 | 96.2k | if (cpi->ppi->use_svc) av1_free_svc_cyclic_refresh(cpi); | 355 | 96.2k | aom_free(cpi->svc.layer_context); | 356 | 96.2k | cpi->svc.layer_context = NULL; | 357 | | | 358 | 96.2k | aom_free(cpi->consec_zero_mv); | 359 | 96.2k | cpi->consec_zero_mv = NULL; | 360 | 96.2k | cpi->consec_zero_mv_alloc_size = 0; | 361 | | | 362 | 96.2k | aom_free(cpi->src_sad_blk_64x64); | 363 | 96.2k | cpi->src_sad_blk_64x64 = NULL; | 364 | | | 365 | 96.2k | aom_free(cpi->mb_weber_stats); | 366 | 96.2k | cpi->mb_weber_stats = NULL; | 367 | | | 368 | 96.2k | if (cpi->oxcf.enable_rate_guide_deltaq) { | 369 | 0 | aom_free(cpi->prep_rate_estimates); | 370 | 0 | cpi->prep_rate_estimates = NULL; | 371 | |
| 372 | 0 | aom_free(cpi->ext_rate_distribution); | 373 | 0 | cpi->ext_rate_distribution = NULL; | 374 | 0 | } | 375 | | | 376 | 96.2k | aom_free(cpi->mb_delta_q); | 377 | | cpi->mb_delta_q = NULL; | 378 | 96.2k | } |
Unexecuted instantiation: encoder_utils.c:dealloc_compressor_data Unexecuted instantiation: ethread.c:dealloc_compressor_data Unexecuted instantiation: superres_scale.c:dealloc_compressor_data Unexecuted instantiation: svc_layercontext.c:dealloc_compressor_data Unexecuted instantiation: compound_type.c:dealloc_compressor_data Unexecuted instantiation: encode_strategy.c:dealloc_compressor_data |
379 | | |
380 | 131k | static inline void allocate_gradient_info_for_hog(AV1_COMP *cpi) { |
381 | 131k | if (!is_gradient_caching_for_hog_enabled(cpi)) return; |
382 | | |
383 | 62.9k | PixelLevelGradientInfo *pixel_gradient_info = cpi->td.pixel_gradient_info; |
384 | 62.9k | if (!pixel_gradient_info) { |
385 | 52.6k | const AV1_COMMON *const cm = &cpi->common; |
386 | 52.6k | const int plane_types = PLANE_TYPES >> cm->seq_params->monochrome; |
387 | 52.6k | CHECK_MEM_ERROR( |
388 | 52.6k | cm, pixel_gradient_info, |
389 | 52.6k | aom_malloc(sizeof(*pixel_gradient_info) * plane_types * MAX_SB_SQUARE)); |
390 | 52.6k | cpi->td.pixel_gradient_info = pixel_gradient_info; |
391 | 52.6k | } |
392 | | |
393 | 62.9k | cpi->td.mb.pixel_gradient_info = pixel_gradient_info; |
394 | 62.9k | } Unexecuted instantiation: av1_cx_iface.c:allocate_gradient_info_for_hog encoder.c:allocate_gradient_info_for_hog Line | Count | Source | 380 | 131k | static inline void allocate_gradient_info_for_hog(AV1_COMP *cpi) { | 381 | 131k | if (!is_gradient_caching_for_hog_enabled(cpi)) return; | 382 | | | 383 | 62.9k | PixelLevelGradientInfo *pixel_gradient_info = cpi->td.pixel_gradient_info; | 384 | 62.9k | if (!pixel_gradient_info) { | 385 | 52.6k | const AV1_COMMON *const cm = &cpi->common; | 386 | 52.6k | const int plane_types = PLANE_TYPES >> cm->seq_params->monochrome; | 387 | 52.6k | CHECK_MEM_ERROR( | 388 | 52.6k | cm, pixel_gradient_info, | 389 | 52.6k | aom_malloc(sizeof(*pixel_gradient_info) * plane_types * MAX_SB_SQUARE)); | 390 | 52.6k | cpi->td.pixel_gradient_info = pixel_gradient_info; | 391 | 52.6k | } | 392 | | | 393 | 62.9k | cpi->td.mb.pixel_gradient_info = pixel_gradient_info; | 394 | 62.9k | } |
Unexecuted instantiation: encoder_utils.c:allocate_gradient_info_for_hog Unexecuted instantiation: ethread.c:allocate_gradient_info_for_hog Unexecuted instantiation: superres_scale.c:allocate_gradient_info_for_hog Unexecuted instantiation: svc_layercontext.c:allocate_gradient_info_for_hog Unexecuted instantiation: compound_type.c:allocate_gradient_info_for_hog Unexecuted instantiation: encode_strategy.c:allocate_gradient_info_for_hog |
395 | | |
396 | 131k | static inline void allocate_src_var_of_4x4_sub_block_buf(AV1_COMP *cpi) { |
397 | 131k | if (!is_src_var_for_4x4_sub_blocks_caching_enabled(cpi)) return; |
398 | | |
399 | 37.6k | Block4x4VarInfo *source_variance_info = |
400 | 37.6k | cpi->td.src_var_info_of_4x4_sub_blocks; |
401 | 37.6k | if (!source_variance_info) { |
402 | 37.6k | const AV1_COMMON *const cm = &cpi->common; |
403 | 37.6k | const BLOCK_SIZE sb_size = cm->seq_params->sb_size; |
404 | 37.6k | const int mi_count_in_sb = mi_size_wide[sb_size] * mi_size_high[sb_size]; |
405 | 37.6k | CHECK_MEM_ERROR(cm, source_variance_info, |
406 | 37.6k | aom_malloc(sizeof(*source_variance_info) * mi_count_in_sb)); |
407 | 37.6k | cpi->td.src_var_info_of_4x4_sub_blocks = source_variance_info; |
408 | 37.6k | } |
409 | | |
410 | 37.6k | cpi->td.mb.src_var_info_of_4x4_sub_blocks = source_variance_info; |
411 | 37.6k | } Unexecuted instantiation: av1_cx_iface.c:allocate_src_var_of_4x4_sub_block_buf encoder.c:allocate_src_var_of_4x4_sub_block_buf Line | Count | Source | 396 | 131k | static inline void allocate_src_var_of_4x4_sub_block_buf(AV1_COMP *cpi) { | 397 | 131k | if (!is_src_var_for_4x4_sub_blocks_caching_enabled(cpi)) return; | 398 | | | 399 | 37.6k | Block4x4VarInfo *source_variance_info = | 400 | 37.6k | cpi->td.src_var_info_of_4x4_sub_blocks; | 401 | 37.6k | if (!source_variance_info) { | 402 | 37.6k | const AV1_COMMON *const cm = &cpi->common; | 403 | 37.6k | const BLOCK_SIZE sb_size = cm->seq_params->sb_size; | 404 | 37.6k | const int mi_count_in_sb = mi_size_wide[sb_size] * mi_size_high[sb_size]; | 405 | 37.6k | CHECK_MEM_ERROR(cm, source_variance_info, | 406 | 37.6k | aom_malloc(sizeof(*source_variance_info) * mi_count_in_sb)); | 407 | 37.6k | cpi->td.src_var_info_of_4x4_sub_blocks = source_variance_info; | 408 | 37.6k | } | 409 | | | 410 | 37.6k | cpi->td.mb.src_var_info_of_4x4_sub_blocks = source_variance_info; | 411 | 37.6k | } |
Unexecuted instantiation: encoder_utils.c:allocate_src_var_of_4x4_sub_block_buf Unexecuted instantiation: ethread.c:allocate_src_var_of_4x4_sub_block_buf Unexecuted instantiation: superres_scale.c:allocate_src_var_of_4x4_sub_block_buf Unexecuted instantiation: svc_layercontext.c:allocate_src_var_of_4x4_sub_block_buf Unexecuted instantiation: compound_type.c:allocate_src_var_of_4x4_sub_block_buf Unexecuted instantiation: encode_strategy.c:allocate_src_var_of_4x4_sub_block_buf |
412 | | |
413 | 51.0k | static inline void variance_partition_alloc(AV1_COMP *cpi) { |
414 | 51.0k | AV1_COMMON *const cm = &cpi->common; |
415 | 51.0k | const int num_64x64_blocks = (cm->seq_params->sb_size == BLOCK_64X64) ? 1 : 4; |
416 | 51.0k | if (cpi->td.vt64x64) { |
417 | 14.3k | if (num_64x64_blocks != cpi->td.num_64x64_blocks) { |
418 | 0 | aom_free(cpi->td.vt64x64); |
419 | 0 | cpi->td.vt64x64 = NULL; |
420 | 0 | } |
421 | 14.3k | } |
422 | 51.0k | if (!cpi->td.vt64x64) { |
423 | 36.7k | CHECK_MEM_ERROR(cm, cpi->td.vt64x64, |
424 | 36.7k | aom_malloc(sizeof(*cpi->td.vt64x64) * num_64x64_blocks)); |
425 | 36.7k | cpi->td.num_64x64_blocks = num_64x64_blocks; |
426 | 36.7k | } |
427 | 51.0k | } Unexecuted instantiation: av1_cx_iface.c:variance_partition_alloc encoder.c:variance_partition_alloc Line | Count | Source | 413 | 51.0k | static inline void variance_partition_alloc(AV1_COMP *cpi) { | 414 | 51.0k | AV1_COMMON *const cm = &cpi->common; | 415 | 51.0k | const int num_64x64_blocks = (cm->seq_params->sb_size == BLOCK_64X64) ? 1 : 4; | 416 | 51.0k | if (cpi->td.vt64x64) { | 417 | 14.3k | if (num_64x64_blocks != cpi->td.num_64x64_blocks) { | 418 | 0 | aom_free(cpi->td.vt64x64); | 419 | 0 | cpi->td.vt64x64 = NULL; | 420 | 0 | } | 421 | 14.3k | } | 422 | 51.0k | if (!cpi->td.vt64x64) { | 423 | 36.7k | CHECK_MEM_ERROR(cm, cpi->td.vt64x64, | 424 | 36.7k | aom_malloc(sizeof(*cpi->td.vt64x64) * num_64x64_blocks)); | 425 | 36.7k | cpi->td.num_64x64_blocks = num_64x64_blocks; | 426 | 36.7k | } | 427 | 51.0k | } |
Unexecuted instantiation: encoder_utils.c:variance_partition_alloc Unexecuted instantiation: ethread.c:variance_partition_alloc Unexecuted instantiation: superres_scale.c:variance_partition_alloc Unexecuted instantiation: svc_layercontext.c:variance_partition_alloc Unexecuted instantiation: compound_type.c:variance_partition_alloc Unexecuted instantiation: encode_strategy.c:variance_partition_alloc |
428 | | |
429 | | static inline YV12_BUFFER_CONFIG *realloc_and_scale_source(AV1_COMP *cpi, |
430 | | int scaled_width, |
431 | 0 | int scaled_height) { |
432 | 0 | AV1_COMMON *cm = &cpi->common; |
433 | 0 | const int num_planes = av1_num_planes(cm); |
434 | |
|
435 | 0 | if (scaled_width == cpi->unscaled_source->y_crop_width && |
436 | 0 | scaled_height == cpi->unscaled_source->y_crop_height) { |
437 | 0 | return cpi->unscaled_source; |
438 | 0 | } |
439 | | |
440 | 0 | if (aom_realloc_frame_buffer( |
441 | 0 | &cpi->scaled_source, scaled_width, scaled_height, |
442 | 0 | cm->seq_params->subsampling_x, cm->seq_params->subsampling_y, |
443 | 0 | cm->seq_params->use_highbitdepth, AOM_BORDER_IN_PIXELS, |
444 | 0 | cm->features.byte_alignment, NULL, NULL, NULL, cpi->alloc_pyramid, 0)) |
445 | 0 | aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR, |
446 | 0 | "Failed to reallocate scaled source buffer"); |
447 | 0 | assert(cpi->scaled_source.y_crop_width == scaled_width); |
448 | 0 | assert(cpi->scaled_source.y_crop_height == scaled_height); |
449 | 0 | if (!av1_resize_and_extend_frame_nonnormative( |
450 | 0 | cpi->unscaled_source, &cpi->scaled_source, |
451 | 0 | (int)cm->seq_params->bit_depth, num_planes)) |
452 | 0 | aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR, |
453 | 0 | "Failed to reallocate buffers during resize"); |
454 | 0 | return &cpi->scaled_source; |
455 | 0 | } Unexecuted instantiation: av1_cx_iface.c:realloc_and_scale_source Unexecuted instantiation: encoder.c:realloc_and_scale_source Unexecuted instantiation: encoder_utils.c:realloc_and_scale_source Unexecuted instantiation: ethread.c:realloc_and_scale_source Unexecuted instantiation: superres_scale.c:realloc_and_scale_source Unexecuted instantiation: svc_layercontext.c:realloc_and_scale_source Unexecuted instantiation: compound_type.c:realloc_and_scale_source Unexecuted instantiation: encode_strategy.c:realloc_and_scale_source |
456 | | |
457 | | // Deallocate allocated thread_data. |
458 | 89.3k | static inline void free_thread_data(AV1_PRIMARY *ppi) { |
459 | 89.3k | PrimaryMultiThreadInfo *const p_mt_info = &ppi->p_mt_info; |
460 | 89.3k | const int num_tf_workers = |
461 | 89.3k | AOMMIN(p_mt_info->num_mod_workers[MOD_TF], p_mt_info->num_workers); |
462 | 89.3k | const int num_tpl_workers = |
463 | 89.3k | AOMMIN(p_mt_info->num_mod_workers[MOD_TPL], p_mt_info->num_workers); |
464 | 89.3k | const int is_highbitdepth = ppi->seq_params.use_highbitdepth; |
465 | 89.3k | const int num_planes = ppi->seq_params.monochrome ? 1 : MAX_MB_PLANE; |
466 | 713k | for (int t = 1; t < p_mt_info->num_workers; ++t) { |
467 | 623k | EncWorkerData *const thread_data = &p_mt_info->tile_thr_data[t]; |
468 | 623k | thread_data->td = thread_data->original_td; |
469 | 623k | ThreadData *const td = thread_data->td; |
470 | 623k | if (!td) continue; |
471 | 623k | aom_free(td->tctx); |
472 | 623k | aom_free(td->palette_buffer); |
473 | 623k | aom_free(td->tmp_conv_dst); |
474 | 623k | release_compound_type_rd_buffers(&td->comp_rd_buffer); |
475 | 1.87M | for (int j = 0; j < 2; ++j) { |
476 | 1.24M | aom_free(td->tmp_pred_bufs[j]); |
477 | 1.24M | } |
478 | 623k | aom_free(td->pixel_gradient_info); |
479 | 623k | aom_free(td->src_var_info_of_4x4_sub_blocks); |
480 | 623k | release_obmc_buffers(&td->obmc_buffer); |
481 | 623k | aom_free(td->vt64x64); |
482 | | |
483 | 1.87M | for (int x = 0; x < 2; x++) { |
484 | 1.24M | aom_free(td->hash_value_buffer[x]); |
485 | 1.24M | td->hash_value_buffer[x] = NULL; |
486 | 1.24M | } |
487 | 623k | aom_free(td->mv_costs_alloc); |
488 | 623k | td->mv_costs_alloc = NULL; |
489 | 623k | aom_free(td->dv_costs_alloc); |
490 | 623k | td->dv_costs_alloc = NULL; |
491 | 623k | aom_free(td->counts); |
492 | 623k | av1_free_pmc(td->firstpass_ctx, num_planes); |
493 | 623k | td->firstpass_ctx = NULL; |
494 | 623k | av1_free_shared_coeff_buffer(&td->shared_coeff_buf); |
495 | 623k | av1_free_sms_tree(td); |
496 | | // This call ensures that the buffers allocated by tf_alloc_and_reset_data() |
497 | | // in prepare_tf_workers() for MT encode are freed in case an error is |
498 | | // encountered during temporal filtering (due to early termination |
499 | | // tf_dealloc_thread_data() in av1_tf_do_filtering_mt() would not be |
500 | | // invoked). |
501 | 623k | if (t < num_tf_workers) tf_dealloc_data(&td->tf_data, is_highbitdepth); |
502 | | // This call ensures that tpl_tmp_buffers for MT encode are freed in case of |
503 | | // an error during tpl. |
504 | 623k | if (t < num_tpl_workers) tpl_dealloc_temp_buffers(&td->tpl_tmp_buffers); |
505 | | // This call ensures that the buffers in gm_data for MT encode are freed in |
506 | | // case of an error during gm. |
507 | 623k | gm_dealloc_data(&td->gm_data); |
508 | 623k | av1_dealloc_mb_data(&td->mb, num_planes); |
509 | 623k | aom_free(td->mb.sb_stats_cache); |
510 | 623k | td->mb.sb_stats_cache = NULL; |
511 | 623k | aom_free(td->mb.sb_fp_stats); |
512 | 623k | td->mb.sb_fp_stats = NULL; |
513 | | #if CONFIG_PARTITION_SEARCH_ORDER |
514 | | aom_free(td->mb.rdcost); |
515 | | td->mb.rdcost = NULL; |
516 | | #endif |
517 | 623k | av1_free_pc_tree_recursive(td->pc_root, num_planes, 0, 0, SEARCH_PARTITION); |
518 | 623k | td->pc_root = NULL; |
519 | 623k | av1_dealloc_mb_wiener_var_pred_buf(td); |
520 | 623k | aom_free(td); |
521 | 623k | thread_data->td = NULL; |
522 | | thread_data->original_td = NULL; |
523 | 623k | } |
524 | 89.3k | } Unexecuted instantiation: av1_cx_iface.c:free_thread_data encoder.c:free_thread_data Line | Count | Source | 458 | 89.3k | static inline void free_thread_data(AV1_PRIMARY *ppi) { | 459 | 89.3k | PrimaryMultiThreadInfo *const p_mt_info = &ppi->p_mt_info; | 460 | 89.3k | const int num_tf_workers = | 461 | 89.3k | AOMMIN(p_mt_info->num_mod_workers[MOD_TF], p_mt_info->num_workers); | 462 | 89.3k | const int num_tpl_workers = | 463 | 89.3k | AOMMIN(p_mt_info->num_mod_workers[MOD_TPL], p_mt_info->num_workers); | 464 | 89.3k | const int is_highbitdepth = ppi->seq_params.use_highbitdepth; | 465 | 89.3k | const int num_planes = ppi->seq_params.monochrome ? 1 : MAX_MB_PLANE; | 466 | 713k | for (int t = 1; t < p_mt_info->num_workers; ++t) { | 467 | 623k | EncWorkerData *const thread_data = &p_mt_info->tile_thr_data[t]; | 468 | 623k | thread_data->td = thread_data->original_td; | 469 | 623k | ThreadData *const td = thread_data->td; | 470 | 623k | if (!td) continue; | 471 | 623k | aom_free(td->tctx); | 472 | 623k | aom_free(td->palette_buffer); | 473 | 623k | aom_free(td->tmp_conv_dst); | 474 | 623k | release_compound_type_rd_buffers(&td->comp_rd_buffer); | 475 | 1.87M | for (int j = 0; j < 2; ++j) { | 476 | 1.24M | aom_free(td->tmp_pred_bufs[j]); | 477 | 1.24M | } | 478 | 623k | aom_free(td->pixel_gradient_info); | 479 | 623k | aom_free(td->src_var_info_of_4x4_sub_blocks); | 480 | 623k | release_obmc_buffers(&td->obmc_buffer); | 481 | 623k | aom_free(td->vt64x64); | 482 | | | 483 | 1.87M | for (int x = 0; x < 2; x++) { | 484 | 1.24M | aom_free(td->hash_value_buffer[x]); | 485 | 1.24M | td->hash_value_buffer[x] = NULL; | 486 | 1.24M | } | 487 | 623k | aom_free(td->mv_costs_alloc); | 488 | 623k | td->mv_costs_alloc = NULL; | 489 | 623k | aom_free(td->dv_costs_alloc); | 490 | 623k | td->dv_costs_alloc = NULL; | 491 | 623k | aom_free(td->counts); | 492 | 623k | av1_free_pmc(td->firstpass_ctx, num_planes); | 493 | 623k | td->firstpass_ctx = NULL; | 494 | 623k | av1_free_shared_coeff_buffer(&td->shared_coeff_buf); | 495 | 623k | av1_free_sms_tree(td); | 496 | | // This call ensures that the buffers allocated by tf_alloc_and_reset_data() | 497 | | // in prepare_tf_workers() for MT encode are freed in case an error is | 498 | | // encountered during temporal filtering (due to early termination | 499 | | // tf_dealloc_thread_data() in av1_tf_do_filtering_mt() would not be | 500 | | // invoked). | 501 | 623k | if (t < num_tf_workers) tf_dealloc_data(&td->tf_data, is_highbitdepth); | 502 | | // This call ensures that tpl_tmp_buffers for MT encode are freed in case of | 503 | | // an error during tpl. | 504 | 623k | if (t < num_tpl_workers) tpl_dealloc_temp_buffers(&td->tpl_tmp_buffers); | 505 | | // This call ensures that the buffers in gm_data for MT encode are freed in | 506 | | // case of an error during gm. | 507 | 623k | gm_dealloc_data(&td->gm_data); | 508 | 623k | av1_dealloc_mb_data(&td->mb, num_planes); | 509 | 623k | aom_free(td->mb.sb_stats_cache); | 510 | 623k | td->mb.sb_stats_cache = NULL; | 511 | 623k | aom_free(td->mb.sb_fp_stats); | 512 | 623k | td->mb.sb_fp_stats = NULL; | 513 | | #if CONFIG_PARTITION_SEARCH_ORDER | 514 | | aom_free(td->mb.rdcost); | 515 | | td->mb.rdcost = NULL; | 516 | | #endif | 517 | 623k | av1_free_pc_tree_recursive(td->pc_root, num_planes, 0, 0, SEARCH_PARTITION); | 518 | 623k | td->pc_root = NULL; | 519 | 623k | av1_dealloc_mb_wiener_var_pred_buf(td); | 520 | 623k | aom_free(td); | 521 | 623k | thread_data->td = NULL; | 522 | | thread_data->original_td = NULL; | 523 | 623k | } | 524 | 89.3k | } |
Unexecuted instantiation: encoder_utils.c:free_thread_data Unexecuted instantiation: ethread.c:free_thread_data Unexecuted instantiation: superres_scale.c:free_thread_data Unexecuted instantiation: svc_layercontext.c:free_thread_data Unexecuted instantiation: compound_type.c:free_thread_data Unexecuted instantiation: encode_strategy.c:free_thread_data |
525 | | |
526 | | #ifdef __cplusplus |
527 | | } // extern "C" |
528 | | #endif |
529 | | |
530 | | #endif // AOM_AV1_ENCODER_ENCODER_ALLOC_H_ |