Coverage Report

Created: 2025-12-31 07:57

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/ffmpeg/libavcodec/apv_decode.c
Line
Count
Source
1
/*
2
 * This file is part of FFmpeg.
3
 *
4
 * FFmpeg is free software; you can redistribute it and/or
5
 * modify it under the terms of the GNU Lesser General Public
6
 * License as published by the Free Software Foundation; either
7
 * version 2.1 of the License, or (at your option) any later version.
8
 *
9
 * FFmpeg is distributed in the hope that it will be useful,
10
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12
 * Lesser General Public License for more details.
13
 *
14
 * You should have received a copy of the GNU Lesser General Public
15
 * License along with FFmpeg; if not, write to the Free Software
16
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
 */
18
19
#include <stdatomic.h>
20
21
#include "libavutil/attributes.h"
22
#include "libavutil/mastering_display_metadata.h"
23
#include "libavutil/mem_internal.h"
24
#include "libavutil/pixdesc.h"
25
#include "libavutil/thread.h"
26
27
#include "apv.h"
28
#include "apv_decode.h"
29
#include "apv_dsp.h"
30
#include "avcodec.h"
31
#include "cbs.h"
32
#include "cbs_apv.h"
33
#include "codec_internal.h"
34
#include "decode.h"
35
#include "internal.h"
36
#include "thread.h"
37
38
39
typedef struct APVDerivedTileInfo {
40
    uint8_t  tile_cols;
41
    uint8_t  tile_rows;
42
    uint16_t num_tiles;
43
    // The spec uses an extra element on the end of these arrays
44
    // not corresponding to any tile.
45
    uint16_t col_starts[APV_MAX_TILE_COLS + 1];
46
    uint16_t row_starts[APV_MAX_TILE_ROWS + 1];
47
} APVDerivedTileInfo;
48
49
typedef struct APVDecodeContext {
50
    CodedBitstreamContext *cbc;
51
    APVDSPContext dsp;
52
53
    CodedBitstreamFragment au;
54
    APVDerivedTileInfo tile_info;
55
56
    AVPacket *pkt;
57
    AVFrame *output_frame;
58
    atomic_int tile_errors;
59
60
    int nb_unit;
61
62
    uint8_t warned_additional_frames;
63
    uint8_t warned_unknown_pbu_types;
64
} APVDecodeContext;
65
66
static const enum AVPixelFormat apv_format_table[5][5] = {
67
    { AV_PIX_FMT_GRAY8,    AV_PIX_FMT_GRAY10,     AV_PIX_FMT_GRAY12,     AV_PIX_FMT_GRAY14, AV_PIX_FMT_GRAY16 },
68
    { 0 }, // 4:2:0 is not valid.
69
    { AV_PIX_FMT_YUV422P,  AV_PIX_FMT_YUV422P10,  AV_PIX_FMT_YUV422P12,  AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV422P16 },
70
    { AV_PIX_FMT_YUV444P,  AV_PIX_FMT_YUV444P10,  AV_PIX_FMT_YUV444P12,  AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV444P16 },
71
    { AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUVA444P10, AV_PIX_FMT_YUVA444P12, 0                   ,AV_PIX_FMT_YUVA444P16 },
72
};
73
74
static APVVLCLUT decode_lut;
75
76
static int apv_decode_check_format(AVCodecContext *avctx,
77
                                   const APVRawFrameHeader *header)
78
20.3k
{
79
20.3k
    int err, bit_depth;
80
81
20.3k
    avctx->profile = header->frame_info.profile_idc;
82
20.3k
    avctx->level   = header->frame_info.level_idc;
83
84
20.3k
    bit_depth = header->frame_info.bit_depth_minus8 + 8;
85
20.3k
    if (bit_depth < 8 || bit_depth > 16 || bit_depth % 2) {
86
1.74k
        avpriv_request_sample(avctx, "Bit depth %d", bit_depth);
87
1.74k
        return AVERROR_PATCHWELCOME;
88
1.74k
    }
89
18.6k
    avctx->pix_fmt =
90
18.6k
        apv_format_table[header->frame_info.chroma_format_idc][bit_depth - 4 >> 2];
91
92
18.6k
    if (!avctx->pix_fmt) {
93
242
        avpriv_request_sample(avctx, "YUVA444P14");
94
242
        return AVERROR_PATCHWELCOME;
95
242
    }
96
97
18.4k
    err = ff_set_dimensions(avctx,
98
18.4k
                            FFALIGN(header->frame_info.frame_width,  16),
99
18.4k
                            FFALIGN(header->frame_info.frame_height, 16));
100
18.4k
    if (err < 0) {
101
        // Unsupported frame size.
102
2.53k
        return err;
103
2.53k
    }
104
15.8k
    avctx->width  = header->frame_info.frame_width;
105
15.8k
    avctx->height = header->frame_info.frame_height;
106
107
15.8k
    avctx->sample_aspect_ratio = (AVRational){ 1, 1 };
108
109
15.8k
    avctx->color_primaries = header->color_primaries;
110
15.8k
    avctx->color_trc       = header->transfer_characteristics;
111
15.8k
    avctx->colorspace      = header->matrix_coefficients;
112
15.8k
    avctx->color_range     = header->full_range_flag ? AVCOL_RANGE_JPEG
113
15.8k
                                                     : AVCOL_RANGE_MPEG;
114
15.8k
    avctx->chroma_sample_location = AVCHROMA_LOC_TOPLEFT;
115
116
15.8k
    avctx->refs = 0;
117
15.8k
    avctx->has_b_frames = 0;
118
119
15.8k
    return 0;
120
18.4k
}
121
122
static const CodedBitstreamUnitType apv_decompose_unit_types[] = {
123
    APV_PBU_PRIMARY_FRAME,
124
    APV_PBU_METADATA,
125
};
126
127
static AVOnce apv_entropy_once = AV_ONCE_INIT;
128
129
static av_cold void apv_entropy_build_decode_lut(void)
130
1
{
131
1
    ff_apv_entropy_build_decode_lut(&decode_lut);
132
1
}
133
134
static av_cold int apv_decode_init(AVCodecContext *avctx)
135
2.95k
{
136
2.95k
    APVDecodeContext *apv = avctx->priv_data;
137
2.95k
    int err;
138
139
2.95k
    ff_thread_once(&apv_entropy_once, apv_entropy_build_decode_lut);
140
141
2.95k
    err = ff_cbs_init(&apv->cbc, AV_CODEC_ID_APV, avctx);
142
2.95k
    if (err < 0)
143
0
        return err;
144
145
2.95k
    apv->cbc->decompose_unit_types =
146
2.95k
        apv_decompose_unit_types;
147
2.95k
    apv->cbc->nb_decompose_unit_types =
148
2.95k
        FF_ARRAY_ELEMS(apv_decompose_unit_types);
149
150
    // Extradata could be set here, but is ignored by the decoder.
151
152
2.95k
    apv->pkt = avctx->internal->in_pkt;
153
2.95k
    ff_apv_dsp_init(&apv->dsp);
154
155
2.95k
    atomic_init(&apv->tile_errors, 0);
156
157
2.95k
    return 0;
158
2.95k
}
159
160
static av_cold void apv_decode_flush(AVCodecContext *avctx)
161
164k
{
162
164k
    APVDecodeContext *apv = avctx->priv_data;
163
164
164k
    apv->nb_unit = 0;
165
164k
    av_packet_unref(apv->pkt);
166
164k
    ff_cbs_fragment_reset(&apv->au);
167
164k
    ff_cbs_flush(apv->cbc);
168
164k
}
169
170
static av_cold int apv_decode_close(AVCodecContext *avctx)
171
2.95k
{
172
2.95k
    APVDecodeContext *apv = avctx->priv_data;
173
174
2.95k
    ff_cbs_fragment_free(&apv->au);
175
2.95k
    ff_cbs_close(&apv->cbc);
176
177
2.95k
    return 0;
178
2.95k
}
179
180
static int apv_decode_block(AVCodecContext *avctx,
181
                            void *output,
182
                            ptrdiff_t pitch,
183
                            GetBitContext *gbc,
184
                            APVEntropyState *entropy_state,
185
                            int bit_depth,
186
                            int qp_shift,
187
                            const uint16_t *qmatrix)
188
7.68M
{
189
7.68M
    APVDecodeContext *apv = avctx->priv_data;
190
7.68M
    int err;
191
192
7.68M
    LOCAL_ALIGNED_32(int16_t, coeff, [64]);
193
7.68M
    memset(coeff, 0, 64 * sizeof(int16_t));
194
195
7.68M
    err = ff_apv_entropy_decode_block(coeff, gbc, entropy_state);
196
7.68M
    if (err < 0)
197
11.8k
        return err;
198
199
7.67M
    apv->dsp.decode_transquant(output, pitch,
200
7.67M
                               coeff, qmatrix,
201
7.67M
                               bit_depth, qp_shift);
202
203
7.67M
    return 0;
204
7.68M
}
205
206
static int apv_decode_tile_component(AVCodecContext *avctx, void *data,
207
                                     int job, int thread)
208
22.7k
{
209
22.7k
    APVRawFrame                      *input = data;
210
22.7k
    APVDecodeContext                   *apv = avctx->priv_data;
211
22.7k
    const CodedBitstreamAPVContext *apv_cbc = apv->cbc->priv_data;
212
22.7k
    const APVDerivedTileInfo     *tile_info = &apv->tile_info;
213
214
22.7k
    int tile_index = job / apv_cbc->num_comp;
215
22.7k
    int comp_index = job % apv_cbc->num_comp;
216
217
22.7k
    const AVPixFmtDescriptor *pix_fmt_desc =
218
22.7k
        av_pix_fmt_desc_get(avctx->pix_fmt);
219
220
22.7k
    int sub_w_shift = comp_index == 0 ? 0 : pix_fmt_desc->log2_chroma_w;
221
22.7k
    int sub_h_shift = comp_index == 0 ? 0 : pix_fmt_desc->log2_chroma_h;
222
223
22.7k
    APVRawTile *tile = &input->tile[tile_index];
224
225
22.7k
    int tile_y = tile_index / tile_info->tile_cols;
226
22.7k
    int tile_x = tile_index % tile_info->tile_cols;
227
228
22.7k
    int tile_start_x = tile_info->col_starts[tile_x];
229
22.7k
    int tile_start_y = tile_info->row_starts[tile_y];
230
231
22.7k
    int tile_width  = tile_info->col_starts[tile_x + 1] - tile_start_x;
232
22.7k
    int tile_height = tile_info->row_starts[tile_y + 1] - tile_start_y;
233
234
22.7k
    int tile_mb_width  = tile_width  / APV_MB_WIDTH;
235
22.7k
    int tile_mb_height = tile_height / APV_MB_HEIGHT;
236
237
22.7k
    int blk_mb_width  = 2 >> sub_w_shift;
238
22.7k
    int blk_mb_height = 2 >> sub_h_shift;
239
240
22.7k
    int bit_depth;
241
22.7k
    int qp_shift;
242
22.7k
    LOCAL_ALIGNED_32(uint16_t, qmatrix_scaled, [64]);
243
244
22.7k
    GetBitContext gbc;
245
246
22.7k
    APVEntropyState entropy_state = {
247
22.7k
        .log_ctx           = avctx,
248
22.7k
        .decode_lut        = &decode_lut,
249
22.7k
        .prev_dc           = 0,
250
22.7k
        .prev_k_dc         = 5,
251
22.7k
        .prev_k_level      = 0,
252
22.7k
    };
253
254
22.7k
    int err;
255
256
22.7k
    err = init_get_bits8(&gbc, tile->tile_data[comp_index],
257
22.7k
                         tile->tile_header.tile_data_size[comp_index]);
258
22.7k
    if (err < 0)
259
194
        goto fail;
260
261
    // Combine the bitstream quantisation matrix with the qp scaling
262
    // in advance.  (Including qp_shift as well would overflow 16 bits.)
263
    // Fix the row ordering at the same time.
264
22.5k
    {
265
22.5k
        static const uint8_t apv_level_scale[6] = { 40, 45, 51, 57, 64, 71 };
266
22.5k
        int qp = tile->tile_header.tile_qp[comp_index];
267
22.5k
        int level_scale = apv_level_scale[qp % 6];
268
269
22.5k
        bit_depth = apv_cbc->bit_depth;
270
22.5k
        qp_shift  = qp / 6;
271
272
202k
        for (int y = 0; y < 8; y++) {
273
1.62M
            for (int x = 0; x < 8; x++)
274
1.44M
                qmatrix_scaled[y * 8 + x] = level_scale *
275
1.44M
                    input->frame_header.quantization_matrix.q_matrix[comp_index][x][y];
276
180k
        }
277
22.5k
    }
278
279
155k
    for (int mb_y = 0; mb_y < tile_mb_height; mb_y++) {
280
2.22M
        for (int mb_x = 0; mb_x < tile_mb_width; mb_x++) {
281
6.24M
            for (int blk_y = 0; blk_y < blk_mb_height; blk_y++) {
282
11.8M
                for (int blk_x = 0; blk_x < blk_mb_width; blk_x++) {
283
7.68M
                    int frame_y = (tile_start_y +
284
7.68M
                                   APV_MB_HEIGHT * mb_y +
285
7.68M
                                   APV_TR_SIZE * blk_y) >> sub_h_shift;
286
7.68M
                    int frame_x = (tile_start_x +
287
7.68M
                                   APV_MB_WIDTH * mb_x +
288
7.68M
                                   APV_TR_SIZE * blk_x) >> sub_w_shift;
289
290
7.68M
                    ptrdiff_t frame_pitch = apv->output_frame->linesize[comp_index];
291
7.68M
                    uint8_t  *block_start = apv->output_frame->data[comp_index] +
292
7.68M
                                            frame_y * frame_pitch + 2 * frame_x;
293
294
7.68M
                    err = apv_decode_block(avctx,
295
7.68M
                                           block_start, frame_pitch,
296
7.68M
                                           &gbc, &entropy_state,
297
7.68M
                                           bit_depth,
298
7.68M
                                           qp_shift,
299
7.68M
                                           qmatrix_scaled);
300
7.68M
                    if (err < 0) {
301
                        // Error in block decode means entropy desync,
302
                        // so this is not recoverable.
303
11.8k
                        goto fail;
304
11.8k
                    }
305
7.68M
                }
306
4.17M
            }
307
2.08M
        }
308
144k
    }
309
310
10.6k
    av_log(avctx, AV_LOG_DEBUG,
311
10.6k
           "Decoded tile %d component %d: %dx%d MBs starting at (%d,%d)\n",
312
10.6k
           tile_index, comp_index, tile_mb_width, tile_mb_height,
313
10.6k
           tile_start_x, tile_start_y);
314
315
10.6k
    return 0;
316
317
12.0k
fail:
318
12.0k
    av_log(avctx, AV_LOG_VERBOSE,
319
12.0k
           "Decode error in tile %d component %d.\n",
320
12.0k
           tile_index, comp_index);
321
12.0k
    atomic_fetch_add_explicit(&apv->tile_errors, 1, memory_order_relaxed);
322
12.0k
    return err;
323
22.5k
}
324
325
static void apv_derive_tile_info(APVDerivedTileInfo *ti,
326
                                 const APVRawFrameHeader *fh)
327
15.6k
{
328
15.6k
    int frame_width_in_mbs  = (fh->frame_info.frame_width  + (APV_MB_WIDTH  - 1)) >> 4;
329
15.6k
    int frame_height_in_mbs = (fh->frame_info.frame_height + (APV_MB_HEIGHT - 1)) >> 4;
330
15.6k
    int start_mb, i;
331
332
15.6k
    start_mb = 0;
333
31.3k
    for (i = 0; start_mb < frame_width_in_mbs; i++) {
334
15.6k
        ti->col_starts[i] = start_mb * APV_MB_WIDTH;
335
15.6k
        start_mb += fh->tile_info.tile_width_in_mbs;
336
15.6k
    }
337
15.6k
    ti->col_starts[i] = frame_width_in_mbs * APV_MB_WIDTH;
338
15.6k
    ti->tile_cols = i;
339
340
15.6k
    start_mb = 0;
341
31.3k
    for (i = 0; start_mb < frame_height_in_mbs; i++) {
342
15.6k
        ti->row_starts[i] = start_mb * APV_MB_HEIGHT;
343
15.6k
        start_mb += fh->tile_info.tile_height_in_mbs;
344
15.6k
    }
345
15.6k
    ti->row_starts[i] = frame_height_in_mbs * APV_MB_HEIGHT;
346
15.6k
    ti->tile_rows = i;
347
348
15.6k
    ti->num_tiles = ti->tile_cols * ti->tile_rows;
349
15.6k
}
350
351
static int apv_decode(AVCodecContext *avctx, AVFrame *output,
352
                      APVRawFrame *input)
353
20.3k
{
354
20.3k
    APVDecodeContext                   *apv = avctx->priv_data;
355
20.3k
    const AVPixFmtDescriptor          *desc = NULL;
356
20.3k
    APVDerivedTileInfo           *tile_info = &apv->tile_info;
357
20.3k
    int err, job_count;
358
359
20.3k
    err = apv_decode_check_format(avctx, &input->frame_header);
360
20.3k
    if (err < 0) {
361
4.52k
        av_log(avctx, AV_LOG_ERROR, "Unsupported format parameters.\n");
362
4.52k
        return err;
363
4.52k
    }
364
365
15.8k
    if (avctx->skip_frame == AVDISCARD_ALL)
366
204
        return 0;
367
368
15.6k
    desc = av_pix_fmt_desc_get(avctx->pix_fmt);
369
15.6k
    av_assert0(desc);
370
371
15.6k
    err = ff_thread_get_buffer(avctx, output, 0);
372
15.6k
    if (err < 0)
373
0
        return err;
374
375
15.6k
    apv->output_frame = output;
376
15.6k
    atomic_store_explicit(&apv->tile_errors, 0, memory_order_relaxed);
377
378
15.6k
    apv_derive_tile_info(tile_info, &input->frame_header);
379
380
    // Each component within a tile is independent of every other,
381
    // so we can decode all in parallel.
382
15.6k
    job_count = tile_info->num_tiles * desc->nb_components;
383
384
15.6k
    avctx->execute2(avctx, apv_decode_tile_component,
385
15.6k
                    input, NULL, job_count);
386
387
15.6k
    err = atomic_load_explicit(&apv->tile_errors, memory_order_relaxed);
388
15.6k
    if (err > 0) {
389
8.04k
        av_log(avctx, AV_LOG_ERROR,
390
8.04k
               "Decode errors in %d tile components.\n", err);
391
8.04k
        if (avctx->flags & AV_CODEC_FLAG_OUTPUT_CORRUPT) {
392
            // Output the frame anyway.
393
0
            output->flags |= AV_FRAME_FLAG_CORRUPT;
394
8.04k
        } else {
395
8.04k
            return AVERROR_INVALIDDATA;
396
8.04k
        }
397
8.04k
    }
398
399
7.62k
    return 0;
400
15.6k
}
401
402
static int apv_decode_metadata(AVCodecContext *avctx, AVFrame *frame,
403
                               const APVRawMetadata *md)
404
2.15k
{
405
2.15k
    int err;
406
407
14.5k
    for (int i = 0; i < md->metadata_count; i++) {
408
12.4k
        const APVRawMetadataPayload *pl = &md->payloads[i];
409
410
12.4k
        switch (pl->payload_type) {
411
1.87k
        case APV_METADATA_MDCV:
412
1.87k
            {
413
1.87k
                const APVRawMetadataMDCV *mdcv = &pl->mdcv;
414
1.87k
                AVMasteringDisplayMetadata *mdm;
415
416
1.87k
                err = ff_decode_mastering_display_new(avctx, frame, &mdm);
417
1.87k
                if (err < 0)
418
0
                    return err;
419
420
1.87k
                if (mdm) {
421
7.50k
                    for (int j = 0; j < 3; j++) {
422
5.62k
                        mdm->display_primaries[j][0] =
423
5.62k
                            av_make_q(mdcv->primary_chromaticity_x[j], 1 << 16);
424
5.62k
                        mdm->display_primaries[j][1] =
425
5.62k
                            av_make_q(mdcv->primary_chromaticity_y[j], 1 << 16);
426
5.62k
                    }
427
428
1.87k
                    mdm->white_point[0] =
429
1.87k
                        av_make_q(mdcv->white_point_chromaticity_x, 1 << 16);
430
1.87k
                    mdm->white_point[1] =
431
1.87k
                        av_make_q(mdcv->white_point_chromaticity_y, 1 << 16);
432
433
1.87k
                    mdm->max_luminance =
434
1.87k
                        av_make_q(mdcv->max_mastering_luminance, 1 << 8);
435
1.87k
                    mdm->min_luminance =
436
1.87k
                        av_make_q(mdcv->min_mastering_luminance, 1 << 14);
437
438
1.87k
                    mdm->has_primaries = 1;
439
1.87k
                    mdm->has_luminance = 1;
440
1.87k
                }
441
1.87k
            }
442
0
            break;
443
2.49k
        case APV_METADATA_CLL:
444
2.49k
            {
445
2.49k
                const APVRawMetadataCLL *cll = &pl->cll;
446
2.49k
                AVContentLightMetadata *clm;
447
448
2.49k
                err = ff_decode_content_light_new(avctx, frame, &clm);
449
2.49k
                if (err < 0)
450
0
                    return err;
451
452
2.49k
                if (clm) {
453
2.49k
                    clm->MaxCLL  = cll->max_cll;
454
2.49k
                    clm->MaxFALL = cll->max_fall;
455
2.49k
                }
456
2.49k
            }
457
0
            break;
458
8.03k
        default:
459
            // Ignore other types of metadata.
460
8.03k
            break;
461
12.4k
        }
462
12.4k
    }
463
464
2.15k
    return 0;
465
2.15k
}
466
467
static int apv_receive_frame_internal(AVCodecContext *avctx, AVFrame *frame)
468
98.6k
{
469
98.6k
    APVDecodeContext      *apv = avctx->priv_data;
470
98.6k
    CodedBitstreamFragment *au = &apv->au;
471
98.6k
    int i, err;
472
473
107k
    for (i = apv->nb_unit; i < au->nb_units; i++) {
474
29.7k
        CodedBitstreamUnit *pbu = &au->units[i];
475
476
29.7k
        switch (pbu->type) {
477
20.3k
        case APV_PBU_PRIMARY_FRAME:
478
20.3k
            err = apv_decode(avctx, frame, pbu->content);
479
20.3k
            i++;
480
20.3k
            goto end;
481
2.15k
        case APV_PBU_METADATA:
482
2.15k
            apv_decode_metadata(avctx, frame, pbu->content);
483
2.15k
            break;
484
262
        case APV_PBU_NON_PRIMARY_FRAME:
485
895
        case APV_PBU_PREVIEW_FRAME:
486
2.28k
        case APV_PBU_DEPTH_FRAME:
487
2.48k
        case APV_PBU_ALPHA_FRAME:
488
2.48k
            if (!avctx->internal->is_copy &&
489
2.48k
                !apv->warned_additional_frames) {
490
78
                av_log(avctx, AV_LOG_WARNING,
491
78
                       "Stream contains additional non-primary frames "
492
78
                       "which will be ignored by the decoder.\n");
493
78
                apv->warned_additional_frames = 1;
494
78
            }
495
2.48k
            break;
496
950
        case APV_PBU_ACCESS_UNIT_INFORMATION:
497
1.16k
        case APV_PBU_FILLER:
498
            // Not relevant to the decoder.
499
1.16k
            break;
500
3.57k
        default:
501
3.57k
            if (!avctx->internal->is_copy &&
502
3.57k
                !apv->warned_unknown_pbu_types) {
503
134
                av_log(avctx, AV_LOG_WARNING,
504
134
                       "Stream contains PBUs with unknown types "
505
134
                       "which will be ignored by the decoder.\n");
506
134
                apv->warned_unknown_pbu_types = 1;
507
134
            }
508
3.57k
            break;
509
29.7k
        }
510
29.7k
    }
511
512
78.2k
    err = AVERROR(EAGAIN);
513
98.6k
end:
514
98.6k
    av_assert0(i <= apv->au.nb_units);
515
98.6k
    apv->nb_unit = i;
516
517
98.6k
    if ((err < 0 && err != AVERROR(EAGAIN)) || apv->au.nb_units == i) {
518
97.0k
        av_packet_unref(apv->pkt);
519
97.0k
        ff_cbs_fragment_reset(&apv->au);
520
97.0k
        apv->nb_unit = 0;
521
97.0k
    }
522
98.6k
    if (!err && !frame->buf[0])
523
204
        err = AVERROR(EAGAIN);
524
525
98.6k
    return err;
526
98.6k
}
527
528
static int apv_receive_frame(AVCodecContext *avctx, AVFrame *frame)
529
423k
{
530
423k
    APVDecodeContext *apv = avctx->priv_data;
531
423k
    int err;
532
533
501k
    do {
534
501k
        if (!apv->au.nb_units) {
535
500k
            err = ff_decode_get_packet(avctx, apv->pkt);
536
500k
            if (err < 0)
537
163k
                return err;
538
539
337k
            err = ff_cbs_read_packet(apv->cbc, &apv->au, apv->pkt);
540
337k
            if (err < 0) {
541
239k
                ff_cbs_fragment_reset(&apv->au);
542
239k
                av_packet_unref(apv->pkt);
543
239k
                av_log(avctx, AV_LOG_ERROR, "Failed to read packet.\n");
544
239k
                return err;
545
239k
            }
546
547
97.4k
            apv->nb_unit = 0;
548
97.4k
            av_log(avctx, AV_LOG_DEBUG, "Total PBUs on this packet: %d.\n",
549
97.4k
                   apv->au.nb_units);
550
97.4k
        }
551
552
98.6k
        err = apv_receive_frame_internal(avctx, frame);
553
98.6k
    } while (err == AVERROR(EAGAIN));
554
555
20.1k
    return err;
556
423k
}
557
558
const FFCodec ff_apv_decoder = {
559
    .p.name                = "apv",
560
    CODEC_LONG_NAME("Advanced Professional Video"),
561
    .p.type                = AVMEDIA_TYPE_VIDEO,
562
    .p.id                  = AV_CODEC_ID_APV,
563
    .priv_data_size        = sizeof(APVDecodeContext),
564
    .init                  = apv_decode_init,
565
    .flush                 = apv_decode_flush,
566
    .close                 = apv_decode_close,
567
    FF_CODEC_RECEIVE_FRAME_CB(apv_receive_frame),
568
    .p.capabilities        = AV_CODEC_CAP_DR1 |
569
                             AV_CODEC_CAP_SLICE_THREADS |
570
                             AV_CODEC_CAP_FRAME_THREADS,
571
    .caps_internal         = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
572
};