Line | Count | Source (jump to first uncovered line) |
1 | | // Copyright 2019 Joe Drago. All rights reserved. |
2 | | // SPDX-License-Identifier: BSD-2-Clause |
3 | | |
4 | | #include "avif/internal.h" |
5 | | |
6 | | #include <assert.h> |
7 | | #include <ctype.h> |
8 | | #include <inttypes.h> |
9 | | #include <limits.h> |
10 | | #include <math.h> |
11 | | #include <stdio.h> |
12 | | #include <string.h> |
13 | | |
14 | | #define AUXTYPE_SIZE 64 |
15 | | #define CONTENTTYPE_SIZE 64 |
16 | | |
17 | | // class VisualSampleEntry(codingname) extends SampleEntry(codingname) { |
18 | | // unsigned int(16) pre_defined = 0; |
19 | | // const unsigned int(16) reserved = 0; |
20 | | // unsigned int(32)[3] pre_defined = 0; |
21 | | // unsigned int(16) width; |
22 | | // unsigned int(16) height; |
23 | | // template unsigned int(32) horizresolution = 0x00480000; // 72 dpi |
24 | | // template unsigned int(32) vertresolution = 0x00480000; // 72 dpi |
25 | | // const unsigned int(32) reserved = 0; |
26 | | // template unsigned int(16) frame_count = 1; |
27 | | // string[32] compressorname; |
28 | | // template unsigned int(16) depth = 0x0018; |
29 | | // int(16) pre_defined = -1; |
30 | | // // other boxes from derived specifications |
31 | | // CleanApertureBox clap; // optional |
32 | | // PixelAspectRatioBox pasp; // optional |
33 | | // } |
34 | | static const size_t VISUALSAMPLEENTRY_SIZE = 78; |
35 | | |
36 | | // The only supported ipma box values for both version and flags are [0,1], so there technically |
37 | | // can't be more than 4 unique tuples right now. |
38 | 14.9k | #define MAX_IPMA_VERSION_AND_FLAGS_SEEN 4 |
39 | | |
40 | | // --------------------------------------------------------------------------- |
41 | | // AVIF codec type (AV1 or AV2) |
42 | | |
43 | | static avifCodecType avifGetCodecType(const uint8_t * fourcc) |
44 | 79.6k | { |
45 | 79.6k | if (!memcmp(fourcc, "av01", 4)) { |
46 | 75.2k | return AVIF_CODEC_TYPE_AV1; |
47 | 75.2k | } |
48 | | #if defined(AVIF_CODEC_AVM) |
49 | | if (!memcmp(fourcc, "av02", 4)) { |
50 | | return AVIF_CODEC_TYPE_AV2; |
51 | | } |
52 | | #endif |
53 | 4.36k | return AVIF_CODEC_TYPE_UNKNOWN; |
54 | 79.6k | } |
55 | | |
56 | | static const char * avifGetConfigurationPropertyName(avifCodecType codecType) |
57 | 28.7k | { |
58 | 28.7k | switch (codecType) { |
59 | 28.7k | case AVIF_CODEC_TYPE_AV1: |
60 | 28.7k | return "av1C"; |
61 | | #if defined(AVIF_CODEC_AVM) |
62 | | case AVIF_CODEC_TYPE_AV2: |
63 | | return "av2C"; |
64 | | #endif |
65 | 0 | default: |
66 | 0 | assert(AVIF_FALSE); |
67 | 0 | return NULL; |
68 | 28.7k | } |
69 | 28.7k | } |
70 | | |
71 | | // --------------------------------------------------------------------------- |
72 | | // Box data structures |
73 | | |
74 | | typedef uint8_t avifBrand[4]; |
75 | | AVIF_ARRAY_DECLARE(avifBrandArray, avifBrand, brand); |
76 | | |
77 | | // ftyp |
78 | | typedef struct avifFileType |
79 | | { |
80 | | uint8_t majorBrand[4]; |
81 | | uint8_t minorVersion[4]; |
82 | | // If not null, points to a memory block of 4 * compatibleBrandsCount bytes. |
83 | | const uint8_t * compatibleBrands; |
84 | | int compatibleBrandsCount; |
85 | | } avifFileType; |
86 | | |
87 | | // ispe |
88 | | typedef struct avifImageSpatialExtents |
89 | | { |
90 | | uint32_t width; |
91 | | uint32_t height; |
92 | | } avifImageSpatialExtents; |
93 | | |
94 | | // auxC |
95 | | typedef struct avifAuxiliaryType |
96 | | { |
97 | | char auxType[AUXTYPE_SIZE]; |
98 | | } avifAuxiliaryType; |
99 | | |
100 | | // infe mime content_type |
101 | | typedef struct avifContentType |
102 | | { |
103 | | char contentType[CONTENTTYPE_SIZE]; |
104 | | } avifContentType; |
105 | | |
106 | | // colr |
107 | | typedef struct avifColourInformationBox |
108 | | { |
109 | | avifBool hasICC; |
110 | | uint64_t iccOffset; |
111 | | size_t iccSize; |
112 | | |
113 | | avifBool hasNCLX; |
114 | | avifColorPrimaries colorPrimaries; |
115 | | avifTransferCharacteristics transferCharacteristics; |
116 | | avifMatrixCoefficients matrixCoefficients; |
117 | | avifRange range; |
118 | | } avifColourInformationBox; |
119 | | |
120 | 2.43k | #define MAX_PIXI_PLANE_DEPTHS 4 |
121 | | typedef struct avifPixelInformationProperty |
122 | | { |
123 | | uint8_t planeDepths[MAX_PIXI_PLANE_DEPTHS]; |
124 | | uint8_t planeCount; |
125 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_EXTENDED_PIXI) |
126 | | avifBool hasExtendedFields; // The fields below were signaled if this is true. |
127 | | uint8_t subsamplingFlag[MAX_PIXI_PLANE_DEPTHS]; // The fields below were signaled if this is true for a given channel. |
128 | | uint8_t subsamplingType[MAX_PIXI_PLANE_DEPTHS]; |
129 | | uint8_t subsamplingLocation[MAX_PIXI_PLANE_DEPTHS]; |
130 | | #endif // AVIF_ENABLE_EXPERIMENTAL_EXTENDED_PIXI |
131 | | } avifPixelInformationProperty; |
132 | | |
133 | | typedef struct avifOperatingPointSelectorProperty |
134 | | { |
135 | | uint8_t opIndex; |
136 | | } avifOperatingPointSelectorProperty; |
137 | | |
138 | | typedef struct avifLayerSelectorProperty |
139 | | { |
140 | | uint16_t layerID; |
141 | | } avifLayerSelectorProperty; |
142 | | |
143 | | typedef struct avifAV1LayeredImageIndexingProperty |
144 | | { |
145 | | uint32_t layerSize[3]; |
146 | | } avifAV1LayeredImageIndexingProperty; |
147 | | |
148 | | typedef struct avifOpaqueProperty |
149 | | { |
150 | | uint8_t usertype[16]; // Same as in avifImageItemProperty. |
151 | | avifRWData boxPayload; // Same as in avifImageItemProperty. |
152 | | } avifOpaqueProperty; |
153 | | |
154 | | // Array of item or track ids. |
155 | | AVIF_ARRAY_DECLARE(avifCodecEntityIDs, uint32_t, ids); |
156 | | |
157 | | // Content of a box inside a 'grpl' box, representing a group of entities. |
158 | | typedef struct avifEntityToGroup |
159 | | { |
160 | | uint8_t groupingType[4]; |
161 | | uint32_t groupID; |
162 | | avifCodecEntityIDs entityIDs; |
163 | | } avifEntityToGroup; |
164 | | AVIF_ARRAY_DECLARE(avifEntityToGroups, avifEntityToGroup, groups); |
165 | | |
166 | | // --------------------------------------------------------------------------- |
167 | | // Top-level structures |
168 | | |
169 | | struct avifMeta; |
170 | | |
171 | | // Temporary storage for ipco/stsd contents until they can be associated and memcpy'd to an avifDecoderItem |
172 | | typedef struct avifProperty |
173 | | { |
174 | | uint8_t type[4]; |
175 | | avifBool isOpaque; |
176 | | union |
177 | | { |
178 | | avifImageSpatialExtents ispe; |
179 | | avifAuxiliaryType auxC; // Contents of 'auxC' for items, or 'auxi' for tracks |
180 | | avifColourInformationBox colr; |
181 | | avifCodecConfigurationBox av1C; // TODO(yguyon): Rename or add av2C |
182 | | avifPixelAspectRatioBox pasp; |
183 | | avifCleanApertureBox clap; |
184 | | avifImageRotation irot; |
185 | | avifImageMirror imir; |
186 | | avifPixelInformationProperty pixi; |
187 | | avifOperatingPointSelectorProperty a1op; |
188 | | avifLayerSelectorProperty lsel; |
189 | | avifAV1LayeredImageIndexingProperty a1lx; |
190 | | avifContentLightLevelInformationBox clli; |
191 | | avifOpaqueProperty opaque; |
192 | | } u; |
193 | | } avifProperty; |
194 | | AVIF_ARRAY_DECLARE(avifPropertyArray, avifProperty, prop); |
195 | | |
196 | | // Finds the first property of a given type. |
197 | | static const avifProperty * avifPropertyArrayFind(const avifPropertyArray * properties, const char * type) |
198 | 175k | { |
199 | 708k | for (uint32_t propertyIndex = 0; propertyIndex < properties->count; ++propertyIndex) { |
200 | 580k | const avifProperty * prop = &properties->prop[propertyIndex]; |
201 | 580k | if (!memcmp(prop->type, type, 4)) { |
202 | 47.1k | return prop; |
203 | 47.1k | } |
204 | 580k | } |
205 | 128k | return NULL; |
206 | 175k | } |
207 | | |
208 | | AVIF_ARRAY_DECLARE(avifExtentArray, avifExtent, extent); |
209 | | |
210 | | // one "item" worth for decoding (all iref, iloc, iprp, etc refer to one of these) |
211 | | typedef struct avifDecoderItem |
212 | | { |
213 | | uint32_t id; |
214 | | struct avifMeta * meta; // Unowned; A back-pointer for convenience |
215 | | uint8_t type[4]; |
216 | | size_t size; |
217 | | avifBool idatStored; // If true, offset is relative to the associated meta box's idat box (iloc construction_method==1) |
218 | | uint32_t width; // Set from this item's ispe property, if present |
219 | | uint32_t height; // Set from this item's ispe property, if present |
220 | | avifContentType contentType; |
221 | | avifPropertyArray properties; |
222 | | avifExtentArray extents; // All extent offsets/sizes |
223 | | avifRWData mergedExtents; // if set, is a single contiguous block of this item's extents (unused when extents.count == 1) |
224 | | avifBool ownsMergedExtents; // if true, mergedExtents must be freed when this item is destroyed |
225 | | avifBool partialMergedExtents; // If true, mergedExtents doesn't have all of the item data yet |
226 | | uint32_t thumbnailForID; // if non-zero, this item is a thumbnail for Item #{thumbnailForID} |
227 | | uint32_t auxForID; // if non-zero, this item is an auxC plane for Item #{auxForID} |
228 | | uint32_t descForID; // if non-zero, this item is a content description for Item #{descForID} |
229 | | uint32_t dimgForID; // if non-zero, this item is an input of derived Item #{dimgForID} |
230 | | uint32_t dimgIdx; // If dimgForId is non-zero, this is the zero-based index of this item in the list of Item #{dimgForID}'s dimg. |
231 | | avifBool hasDimgFrom; // whether there is a 'dimg' box with this item's id as 'fromID' |
232 | | uint32_t premByID; // if non-zero, this item is premultiplied by Item #{premByID} |
233 | | avifBool hasUnsupportedEssentialProperty; // If true, this item cites a property flagged as 'essential' that libavif doesn't support (yet). Ignore the item, if so. |
234 | | avifBool ipmaSeen; // if true, this item already received a property association |
235 | | avifBool progressive; // if true, this item has progressive layers (a1lx), but does not select a specific layer (the layer_id value in lsel is set to 0xFFFF) |
236 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
237 | | avifPixelFormat miniBoxPixelFormat; // Set from the MinimizedImageBox, if present (AVIF_PIXEL_FORMAT_NONE otherwise) |
238 | | avifChromaSamplePosition miniBoxChromaSamplePosition; // Set from the MinimizedImageBox, if present (AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN otherwise) |
239 | | #endif |
240 | | } avifDecoderItem; |
241 | | AVIF_ARRAY_DECLARE(avifDecoderItemArray, avifDecoderItem *, item); |
242 | | |
243 | | // grid storage |
244 | | typedef struct avifImageGrid |
245 | | { |
246 | | uint32_t rows; // Legal range: [1-256] |
247 | | uint32_t columns; // Legal range: [1-256] |
248 | | uint32_t outputWidth; |
249 | | uint32_t outputHeight; |
250 | | } avifImageGrid; |
251 | | |
252 | | // --------------------------------------------------------------------------- |
253 | | // avifTrack |
254 | | |
255 | | typedef struct avifSampleTableChunk |
256 | | { |
257 | | uint64_t offset; |
258 | | } avifSampleTableChunk; |
259 | | AVIF_ARRAY_DECLARE(avifSampleTableChunkArray, avifSampleTableChunk, chunk); |
260 | | |
261 | | typedef struct avifSampleTableSampleToChunk |
262 | | { |
263 | | uint32_t firstChunk; |
264 | | uint32_t samplesPerChunk; |
265 | | uint32_t sampleDescriptionIndex; |
266 | | } avifSampleTableSampleToChunk; |
267 | | AVIF_ARRAY_DECLARE(avifSampleTableSampleToChunkArray, avifSampleTableSampleToChunk, sampleToChunk); |
268 | | |
269 | | typedef struct avifSampleTableSampleSize |
270 | | { |
271 | | uint32_t size; |
272 | | } avifSampleTableSampleSize; |
273 | | AVIF_ARRAY_DECLARE(avifSampleTableSampleSizeArray, avifSampleTableSampleSize, sampleSize); |
274 | | |
275 | | typedef struct avifSampleTableTimeToSample |
276 | | { |
277 | | uint32_t sampleCount; |
278 | | uint32_t sampleDelta; |
279 | | } avifSampleTableTimeToSample; |
280 | | AVIF_ARRAY_DECLARE(avifSampleTableTimeToSampleArray, avifSampleTableTimeToSample, timeToSample); |
281 | | |
282 | | typedef struct avifSyncSample |
283 | | { |
284 | | uint32_t sampleNumber; |
285 | | } avifSyncSample; |
286 | | AVIF_ARRAY_DECLARE(avifSyncSampleArray, avifSyncSample, syncSample); |
287 | | |
288 | | typedef struct avifSampleDescription |
289 | | { |
290 | | uint8_t format[4]; |
291 | | avifPropertyArray properties; |
292 | | } avifSampleDescription; |
293 | | AVIF_ARRAY_DECLARE(avifSampleDescriptionArray, avifSampleDescription, description); |
294 | | |
295 | | typedef struct avifSampleTable |
296 | | { |
297 | | avifSampleTableChunkArray chunks; |
298 | | avifSampleDescriptionArray sampleDescriptions; |
299 | | avifSampleTableSampleToChunkArray sampleToChunks; |
300 | | avifSampleTableSampleSizeArray sampleSizes; |
301 | | avifSampleTableTimeToSampleArray timeToSamples; |
302 | | avifSyncSampleArray syncSamples; |
303 | | uint32_t allSamplesSize; // If this is non-zero, sampleSizes will be empty and all samples will be this size |
304 | | } avifSampleTable; |
305 | | |
306 | | static void avifSampleTableDestroy(avifSampleTable * sampleTable); |
307 | | |
308 | | static avifSampleTable * avifSampleTableCreate(void) |
309 | 498 | { |
310 | 498 | avifSampleTable * sampleTable = (avifSampleTable *)avifAlloc(sizeof(avifSampleTable)); |
311 | 498 | if (sampleTable == NULL) { |
312 | 0 | return NULL; |
313 | 0 | } |
314 | 498 | memset(sampleTable, 0, sizeof(avifSampleTable)); |
315 | 498 | if (!avifArrayCreate(&sampleTable->chunks, sizeof(avifSampleTableChunk), 16) || |
316 | 498 | !avifArrayCreate(&sampleTable->sampleDescriptions, sizeof(avifSampleDescription), 2) || |
317 | 498 | !avifArrayCreate(&sampleTable->sampleToChunks, sizeof(avifSampleTableSampleToChunk), 16) || |
318 | 498 | !avifArrayCreate(&sampleTable->sampleSizes, sizeof(avifSampleTableSampleSize), 16) || |
319 | 498 | !avifArrayCreate(&sampleTable->timeToSamples, sizeof(avifSampleTableTimeToSample), 16) || |
320 | 498 | !avifArrayCreate(&sampleTable->syncSamples, sizeof(avifSyncSample), 16)) { |
321 | 0 | avifSampleTableDestroy(sampleTable); |
322 | 0 | return NULL; |
323 | 0 | } |
324 | 498 | return sampleTable; |
325 | 498 | } |
326 | | |
327 | | static void avifPropertyArrayDestroy(avifPropertyArray * array) |
328 | 38.6k | { |
329 | 162k | for (size_t i = 0; i < array->count; ++i) { |
330 | 124k | if (array->prop[i].isOpaque) { |
331 | 42.8k | avifRWDataFree(&array->prop[i].u.opaque.boxPayload); |
332 | 42.8k | } |
333 | 124k | } |
334 | 38.6k | avifArrayDestroy(array); |
335 | 38.6k | } |
336 | | |
337 | | static void avifSampleTableDestroy(avifSampleTable * sampleTable) |
338 | 498 | { |
339 | 498 | avifArrayDestroy(&sampleTable->chunks); |
340 | 665 | for (uint32_t i = 0; i < sampleTable->sampleDescriptions.count; ++i) { |
341 | 167 | avifSampleDescription * description = &sampleTable->sampleDescriptions.description[i]; |
342 | 167 | avifPropertyArrayDestroy(&description->properties); |
343 | 167 | } |
344 | 498 | avifArrayDestroy(&sampleTable->sampleDescriptions); |
345 | 498 | avifArrayDestroy(&sampleTable->sampleToChunks); |
346 | 498 | avifArrayDestroy(&sampleTable->sampleSizes); |
347 | 498 | avifArrayDestroy(&sampleTable->timeToSamples); |
348 | 498 | avifArrayDestroy(&sampleTable->syncSamples); |
349 | 498 | avifFree(sampleTable); |
350 | 498 | } |
351 | | |
352 | | static uint32_t avifSampleTableGetImageDelta(const avifSampleTable * sampleTable, uint32_t imageIndex) |
353 | 2 | { |
354 | 2 | uint32_t maxSampleIndex = 0; |
355 | 2 | for (uint32_t i = 0; i < sampleTable->timeToSamples.count; ++i) { |
356 | 0 | const avifSampleTableTimeToSample * timeToSample = &sampleTable->timeToSamples.timeToSample[i]; |
357 | 0 | maxSampleIndex += timeToSample->sampleCount; |
358 | 0 | if ((imageIndex < maxSampleIndex) || (i == (sampleTable->timeToSamples.count - 1))) { |
359 | 0 | return timeToSample->sampleDelta; |
360 | 0 | } |
361 | 0 | } |
362 | | |
363 | | // TODO: fail here? |
364 | 2 | return 1; |
365 | 2 | } |
366 | | |
367 | | static avifCodecType avifSampleTableGetCodecType(const avifSampleTable * sampleTable) |
368 | 116 | { |
369 | 125 | for (uint32_t i = 0; i < sampleTable->sampleDescriptions.count; ++i) { |
370 | 121 | const avifCodecType codecType = avifGetCodecType(sampleTable->sampleDescriptions.description[i].format); |
371 | 121 | if (codecType != AVIF_CODEC_TYPE_UNKNOWN) { |
372 | 112 | return codecType; |
373 | 112 | } |
374 | 121 | } |
375 | 4 | return AVIF_CODEC_TYPE_UNKNOWN; |
376 | 116 | } |
377 | | |
378 | | static uint32_t avifCodecConfigurationBoxGetDepth(const avifCodecConfigurationBox * av1C) |
379 | 16.0k | { |
380 | 16.0k | if (av1C->twelveBit) { |
381 | 3.38k | return 12; |
382 | 12.7k | } else if (av1C->highBitdepth) { |
383 | 3.93k | return 10; |
384 | 3.93k | } |
385 | 8.77k | return 8; |
386 | 16.0k | } |
387 | | |
388 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_EXTENDED_PIXI) |
389 | | uint8_t avifCodecConfigurationBoxGetSubsamplingType(const avifCodecConfigurationBox * av1C, uint8_t channelIndex) |
390 | | { |
391 | | if (channelIndex == 0) { |
392 | | return AVIF_PIXI_444; |
393 | | } |
394 | | if (av1C->chromaSubsamplingX == 0) { |
395 | | if (av1C->chromaSubsamplingY == 0) { |
396 | | return AVIF_PIXI_444; |
397 | | } |
398 | | return AVIF_PIXI_440; |
399 | | } |
400 | | if (av1C->chromaSubsamplingY == 0) { |
401 | | return AVIF_PIXI_422; |
402 | | } |
403 | | return AVIF_PIXI_420; |
404 | | } |
405 | | |
406 | | // Mapping from PixelInformationBox subsampling_type and subsampling_location as defined in ISO/IEC 23008-12:2024/CDAM 2:2025 section 6.5.6.3 |
407 | | // to chroma_sample_position as defined in AV1 specification Section 6.4.2. |
408 | | static uint8_t avifSubsamplingLocationToChromaSamplePosition(uint8_t subsamplingType, uint8_t subsamplingLocation) |
409 | | { |
410 | | if (subsamplingType == AVIF_PIXI_444) { |
411 | | return AVIF_CHROMA_SAMPLE_POSITION_COLOCATED; |
412 | | } |
413 | | if (subsamplingType == AVIF_PIXI_422) { |
414 | | if (subsamplingLocation == 0 || subsamplingLocation == 2 || subsamplingLocation == 4) { |
415 | | return AVIF_CHROMA_SAMPLE_POSITION_COLOCATED; |
416 | | } |
417 | | } |
418 | | if (subsamplingType == AVIF_PIXI_420) { |
419 | | if (subsamplingLocation == 0) { |
420 | | return AVIF_CHROMA_SAMPLE_POSITION_VERTICAL; |
421 | | } |
422 | | if (subsamplingLocation == 2) { |
423 | | return AVIF_CHROMA_SAMPLE_POSITION_COLOCATED; |
424 | | } |
425 | | } |
426 | | if (subsamplingType == AVIF_PIXI_411) { |
427 | | if (subsamplingLocation == 0 || subsamplingLocation == 2 || subsamplingLocation == 4) { |
428 | | return AVIF_CHROMA_SAMPLE_POSITION_COLOCATED; |
429 | | } |
430 | | } |
431 | | if (subsamplingType == AVIF_PIXI_440) { |
432 | | if (subsamplingLocation == 0 || subsamplingLocation == 1) { |
433 | | return AVIF_CHROMA_SAMPLE_POSITION_VERTICAL; |
434 | | } |
435 | | if (subsamplingLocation == 2 || subsamplingLocation == 3) { |
436 | | return AVIF_CHROMA_SAMPLE_POSITION_COLOCATED; |
437 | | } |
438 | | } |
439 | | return AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN; |
440 | | } |
441 | | #endif // AVIF_ENABLE_EXPERIMENTAL_EXTENDED_PIXI |
442 | | |
443 | | static const avifPropertyArray * avifSampleTableGetProperties(const avifSampleTable * sampleTable, avifCodecType codecType) |
444 | 112 | { |
445 | 112 | for (uint32_t i = 0; i < sampleTable->sampleDescriptions.count; ++i) { |
446 | 112 | const avifSampleDescription * description = &sampleTable->sampleDescriptions.description[i]; |
447 | 112 | if (avifGetCodecType(description->format) == codecType) { |
448 | 112 | return &description->properties; |
449 | 112 | } |
450 | 112 | } |
451 | 0 | return NULL; |
452 | 112 | } |
453 | | |
454 | | // one video track ("trak" contents) |
455 | | typedef struct avifTrack |
456 | | { |
457 | | uint32_t id; |
458 | | uint8_t handlerType[4]; |
459 | | uint32_t auxForID; // if non-zero, this track is an auxC plane for Track #{auxForID} |
460 | | uint32_t premByID; // if non-zero, this track is premultiplied by Track #{premByID} |
461 | | uint32_t mediaTimescale; |
462 | | uint64_t mediaDuration; |
463 | | uint64_t trackDuration; |
464 | | uint64_t segmentDuration; |
465 | | avifBool isRepeating; |
466 | | int repetitionCount; |
467 | | uint32_t width; |
468 | | uint32_t height; |
469 | | avifSampleTable * sampleTable; |
470 | | struct avifMeta * meta; |
471 | | } avifTrack; |
472 | | AVIF_ARRAY_DECLARE(avifTrackArray, avifTrack, track); |
473 | | |
474 | | // --------------------------------------------------------------------------- |
475 | | // avifCodecDecodeInput |
476 | | |
477 | | avifCodecDecodeInput * avifCodecDecodeInputCreate(void) |
478 | 14.7k | { |
479 | 14.7k | avifCodecDecodeInput * decodeInput = (avifCodecDecodeInput *)avifAlloc(sizeof(avifCodecDecodeInput)); |
480 | 14.7k | if (decodeInput == NULL) { |
481 | 0 | return NULL; |
482 | 0 | } |
483 | 14.7k | memset(decodeInput, 0, sizeof(avifCodecDecodeInput)); |
484 | 14.7k | if (!avifArrayCreate(&decodeInput->samples, sizeof(avifDecodeSample), 1)) { |
485 | 0 | avifFree(decodeInput); |
486 | 0 | return NULL; |
487 | 0 | } |
488 | 14.7k | return decodeInput; |
489 | 14.7k | } |
490 | | |
491 | | void avifCodecDecodeInputDestroy(avifCodecDecodeInput * decodeInput) |
492 | 14.7k | { |
493 | 30.4k | for (uint32_t sampleIndex = 0; sampleIndex < decodeInput->samples.count; ++sampleIndex) { |
494 | 15.6k | avifDecodeSample * sample = &decodeInput->samples.sample[sampleIndex]; |
495 | 15.6k | if (sample->ownsData) { |
496 | 0 | avifRWDataFree((avifRWData *)&sample->data); |
497 | 0 | } |
498 | 15.6k | } |
499 | 14.7k | avifArrayDestroy(&decodeInput->samples); |
500 | 14.7k | avifFree(decodeInput); |
501 | 14.7k | } |
502 | | |
503 | | // Returns how many samples are in the chunk. |
504 | | static uint32_t avifGetSampleCountOfChunk(const avifSampleTableSampleToChunkArray * sampleToChunks, uint32_t chunkIndex) |
505 | 180 | { |
506 | 180 | uint32_t sampleCount = 0; |
507 | 358 | for (int sampleToChunkIndex = sampleToChunks->count - 1; sampleToChunkIndex >= 0; --sampleToChunkIndex) { |
508 | 357 | const avifSampleTableSampleToChunk * sampleToChunk = &sampleToChunks->sampleToChunk[sampleToChunkIndex]; |
509 | 357 | if (sampleToChunk->firstChunk <= (chunkIndex + 1)) { |
510 | 179 | sampleCount = sampleToChunk->samplesPerChunk; |
511 | 179 | break; |
512 | 179 | } |
513 | 357 | } |
514 | 180 | return sampleCount; |
515 | 180 | } |
516 | | |
517 | | static avifResult avifCodecDecodeInputFillFromSampleTable(avifCodecDecodeInput * decodeInput, |
518 | | avifSampleTable * sampleTable, |
519 | | const uint32_t imageCountLimit, |
520 | | const uint64_t sizeHint, |
521 | | avifDiagnostics * diag) |
522 | 56 | { |
523 | 56 | if (imageCountLimit) { |
524 | | // Verify that the we're not about to exceed the frame count limit. |
525 | | |
526 | 56 | uint32_t imageCountLeft = imageCountLimit; |
527 | 153 | for (uint32_t chunkIndex = 0; chunkIndex < sampleTable->chunks.count; ++chunkIndex) { |
528 | | // First, figure out how many samples are in this chunk |
529 | 105 | uint32_t sampleCount = avifGetSampleCountOfChunk(&sampleTable->sampleToChunks, chunkIndex); |
530 | 105 | if (sampleCount == 0) { |
531 | | // chunks with 0 samples are invalid |
532 | 2 | avifDiagnosticsPrintf(diag, "Sample table contains a chunk with 0 samples"); |
533 | 2 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
534 | 2 | } |
535 | | |
536 | 103 | if (sampleCount > imageCountLeft) { |
537 | | // This file exceeds the imageCountLimit, bail out |
538 | 6 | avifDiagnosticsPrintf(diag, "Exceeded avifDecoder's imageCountLimit"); |
539 | 6 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
540 | 6 | } |
541 | 97 | imageCountLeft -= sampleCount; |
542 | 97 | } |
543 | 56 | } |
544 | | |
545 | 48 | uint32_t sampleSizeIndex = 0; |
546 | 99 | for (uint32_t chunkIndex = 0; chunkIndex < sampleTable->chunks.count; ++chunkIndex) { |
547 | 75 | avifSampleTableChunk * chunk = &sampleTable->chunks.chunk[chunkIndex]; |
548 | | |
549 | | // First, figure out how many samples are in this chunk |
550 | 75 | uint32_t sampleCount = avifGetSampleCountOfChunk(&sampleTable->sampleToChunks, chunkIndex); |
551 | 75 | if (sampleCount == 0) { |
552 | | // chunks with 0 samples are invalid |
553 | 0 | avifDiagnosticsPrintf(diag, "Sample table contains a chunk with 0 samples"); |
554 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
555 | 0 | } |
556 | | |
557 | 75 | uint64_t sampleOffset = chunk->offset; |
558 | 1.11k | for (uint32_t sampleIndex = 0; sampleIndex < sampleCount; ++sampleIndex) { |
559 | 1.06k | uint32_t sampleSize = sampleTable->allSamplesSize; |
560 | 1.06k | if (sampleSize == 0) { |
561 | 18 | if (sampleSizeIndex >= sampleTable->sampleSizes.count) { |
562 | | // We've run out of samples to sum |
563 | 1 | avifDiagnosticsPrintf(diag, "Truncated sample table"); |
564 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
565 | 1 | } |
566 | 17 | avifSampleTableSampleSize * sampleSizePtr = &sampleTable->sampleSizes.sampleSize[sampleSizeIndex]; |
567 | 17 | sampleSize = sampleSizePtr->size; |
568 | 17 | } |
569 | | |
570 | 1.06k | avifDecodeSample * sample = (avifDecodeSample *)avifArrayPush(&decodeInput->samples); |
571 | 1.06k | AVIF_CHECKERR(sample != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
572 | 1.06k | sample->offset = sampleOffset; |
573 | 1.06k | sample->size = sampleSize; |
574 | 1.06k | sample->spatialID = AVIF_SPATIAL_ID_UNSET; // Not filtering by spatial_id |
575 | 1.06k | sample->sync = AVIF_FALSE; // to potentially be set to true following the outer loop |
576 | | |
577 | 1.06k | if (sampleSize > UINT64_MAX - sampleOffset) { |
578 | 0 | avifDiagnosticsPrintf(diag, |
579 | 0 | "Sample table contains an offset/size pair which overflows: [%" PRIu64 " / %u]", |
580 | 0 | sampleOffset, |
581 | 0 | sampleSize); |
582 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
583 | 0 | } |
584 | 1.06k | if (sizeHint && ((sampleOffset + sampleSize) > sizeHint)) { |
585 | 23 | avifDiagnosticsPrintf(diag, "Exceeded avifIO's sizeHint, possibly truncated data"); |
586 | 23 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
587 | 23 | } |
588 | | |
589 | 1.03k | sampleOffset += sampleSize; |
590 | 1.03k | ++sampleSizeIndex; |
591 | 1.03k | } |
592 | 75 | } |
593 | | |
594 | | // Mark appropriate samples as sync |
595 | 48 | for (uint32_t syncSampleIndex = 0; syncSampleIndex < sampleTable->syncSamples.count; ++syncSampleIndex) { |
596 | 24 | uint32_t frameIndex = sampleTable->syncSamples.syncSample[syncSampleIndex].sampleNumber - 1; // sampleNumber is 1-based |
597 | 24 | if (frameIndex < decodeInput->samples.count) { |
598 | 2 | decodeInput->samples.sample[frameIndex].sync = AVIF_TRUE; |
599 | 2 | } |
600 | 24 | } |
601 | | |
602 | | // Assume frame 0 is sync, just in case the stss box is absent in the BMFF. (Unnecessary?) |
603 | 24 | if (decodeInput->samples.count > 0) { |
604 | 24 | decodeInput->samples.sample[0].sync = AVIF_TRUE; |
605 | 24 | } |
606 | 24 | return AVIF_RESULT_OK; |
607 | 48 | } |
608 | | |
609 | | static avifResult avifCodecDecodeInputFillFromDecoderItem(avifCodecDecodeInput * decodeInput, |
610 | | avifDecoderItem * item, |
611 | | avifBool allowProgressive, |
612 | | const uint32_t imageCountLimit, |
613 | | const uint64_t sizeHint, |
614 | | avifDiagnostics * diag) |
615 | 14.7k | { |
616 | 14.7k | if (sizeHint && (item->size > sizeHint)) { |
617 | 130 | avifDiagnosticsPrintf(diag, "Exceeded avifIO's sizeHint, possibly truncated data"); |
618 | 130 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
619 | 130 | } |
620 | | |
621 | 14.6k | uint8_t layerCount = 0; |
622 | 14.6k | size_t layerSizes[4] = { 0 }; |
623 | 14.6k | const avifProperty * a1lxProp = avifPropertyArrayFind(&item->properties, "a1lx"); |
624 | 14.6k | if (a1lxProp) { |
625 | | // Calculate layer count and all layer sizes from the a1lx box, and then validate |
626 | | |
627 | 29 | size_t remainingSize = item->size; |
628 | 50 | for (int i = 0; i < 3; ++i) { |
629 | 47 | ++layerCount; |
630 | | |
631 | 47 | const size_t layerSize = (size_t)a1lxProp->u.a1lx.layerSize[i]; |
632 | 47 | if (layerSize) { |
633 | 24 | if (layerSize >= remainingSize) { // >= instead of > because there must be room for the last layer |
634 | 3 | avifDiagnosticsPrintf(diag, "a1lx layer index [%d] does not fit in item size", i); |
635 | 3 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
636 | 3 | } |
637 | 21 | layerSizes[i] = layerSize; |
638 | 21 | remainingSize -= layerSize; |
639 | 23 | } else { |
640 | 23 | layerSizes[i] = remainingSize; |
641 | 23 | remainingSize = 0; |
642 | 23 | break; |
643 | 23 | } |
644 | 47 | } |
645 | 26 | if (remainingSize > 0) { |
646 | 3 | AVIF_ASSERT_OR_RETURN(layerCount == 3); |
647 | 3 | ++layerCount; |
648 | 3 | layerSizes[3] = remainingSize; |
649 | 3 | } |
650 | 26 | } |
651 | | |
652 | 14.6k | const avifProperty * lselProp = avifPropertyArrayFind(&item->properties, "lsel"); |
653 | | // Progressive images offer layers via the a1lxProp, but don't specify a layer selection with lsel. |
654 | | // |
655 | | // For backward compatibility with earlier drafts of AVIF spec v1.1.0, treat an absent lsel as |
656 | | // equivalent to layer_id == 0xFFFF during the transitional period. Remove !lselProp when the test |
657 | | // images have been updated to the v1.1.0 spec. |
658 | 14.6k | item->progressive = (a1lxProp && (!lselProp || (lselProp->u.lsel.layerID == 0xFFFF))); |
659 | 14.6k | if (lselProp && (lselProp->u.lsel.layerID != 0xFFFF)) { |
660 | | // Layer selection. This requires that the underlying AV1 codec decodes all layers, |
661 | | // and then only returns the requested layer as a single frame. To the user of libavif, |
662 | | // this appears to be a single frame. |
663 | | |
664 | 72 | decodeInput->allLayers = AVIF_TRUE; |
665 | | |
666 | 72 | size_t sampleSize = 0; |
667 | 72 | if (layerCount > 0) { |
668 | | // Optimization: If we're selecting a layer that doesn't require the entire image's payload (hinted via the a1lx box) |
669 | | |
670 | 3 | if (lselProp->u.lsel.layerID >= layerCount) { |
671 | 1 | avifDiagnosticsPrintf(diag, |
672 | 1 | "lsel property requests layer index [%u] which isn't present in a1lx property ([%u] layers)", |
673 | 1 | lselProp->u.lsel.layerID, |
674 | 1 | layerCount); |
675 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
676 | 1 | } |
677 | | |
678 | 5 | for (uint8_t i = 0; i <= lselProp->u.lsel.layerID; ++i) { |
679 | 3 | sampleSize += layerSizes[i]; |
680 | 3 | } |
681 | 69 | } else { |
682 | | // This layer's payload subsection is unknown, just use the whole payload |
683 | 69 | sampleSize = item->size; |
684 | 69 | } |
685 | | |
686 | 71 | avifDecodeSample * sample = (avifDecodeSample *)avifArrayPush(&decodeInput->samples); |
687 | 71 | AVIF_CHECKERR(sample != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
688 | 71 | sample->itemID = item->id; |
689 | 71 | sample->offset = 0; |
690 | 71 | sample->size = sampleSize; |
691 | 71 | AVIF_ASSERT_OR_RETURN(lselProp->u.lsel.layerID < AVIF_MAX_AV1_LAYER_COUNT); |
692 | 71 | sample->spatialID = (uint8_t)lselProp->u.lsel.layerID; |
693 | 71 | sample->sync = AVIF_TRUE; |
694 | 14.5k | } else if (allowProgressive && item->progressive) { |
695 | | // Progressive image. Decode all layers and expose them all to the user. |
696 | |
|
697 | 0 | if (imageCountLimit && (layerCount > imageCountLimit)) { |
698 | 0 | avifDiagnosticsPrintf(diag, "Exceeded avifDecoder's imageCountLimit (progressive)"); |
699 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
700 | 0 | } |
701 | | |
702 | 0 | decodeInput->allLayers = AVIF_TRUE; |
703 | |
|
704 | 0 | size_t offset = 0; |
705 | 0 | for (int i = 0; i < layerCount; ++i) { |
706 | 0 | avifDecodeSample * sample = (avifDecodeSample *)avifArrayPush(&decodeInput->samples); |
707 | 0 | AVIF_CHECKERR(sample != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
708 | 0 | sample->itemID = item->id; |
709 | 0 | sample->offset = offset; |
710 | 0 | sample->size = layerSizes[i]; |
711 | 0 | sample->spatialID = AVIF_SPATIAL_ID_UNSET; |
712 | 0 | sample->sync = (i == 0); // Assume all layers depend on the first layer |
713 | |
|
714 | 0 | offset += layerSizes[i]; |
715 | 0 | } |
716 | 14.5k | } else { |
717 | | // Typical case: Use the entire item's payload for a single frame output |
718 | | |
719 | 14.5k | avifDecodeSample * sample = (avifDecodeSample *)avifArrayPush(&decodeInput->samples); |
720 | 14.5k | AVIF_CHECKERR(sample != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
721 | 14.5k | sample->itemID = item->id; |
722 | 14.5k | sample->offset = 0; |
723 | 14.5k | sample->size = item->size; |
724 | 14.5k | sample->spatialID = AVIF_SPATIAL_ID_UNSET; |
725 | 14.5k | sample->sync = AVIF_TRUE; |
726 | 14.5k | } |
727 | 14.6k | return AVIF_RESULT_OK; |
728 | 14.6k | } |
729 | | |
730 | | // --------------------------------------------------------------------------- |
731 | | // Helper macros / functions |
732 | | |
733 | | #define BEGIN_STREAM(VARNAME, PTR, SIZE, DIAG, CONTEXT) \ |
734 | 310k | avifROStream VARNAME; \ |
735 | 310k | avifROData VARNAME##_roData; \ |
736 | 310k | VARNAME##_roData.data = PTR; \ |
737 | 310k | VARNAME##_roData.size = SIZE; \ |
738 | 310k | avifROStreamStart(&VARNAME, &VARNAME##_roData, DIAG, CONTEXT) |
739 | | |
740 | | typedef enum avifUniqueBoxFlag |
741 | | { |
742 | | AVIF_UNIQUE_ILOC = 0, |
743 | | AVIF_UNIQUE_PITM, |
744 | | AVIF_UNIQUE_IDAT, |
745 | | AVIF_UNIQUE_IPRP, |
746 | | AVIF_UNIQUE_IINF, |
747 | | AVIF_UNIQUE_IREF, |
748 | | AVIF_UNIQUE_GRPL, |
749 | | } avifUniqueBoxFlag; |
750 | | // Use this to keep track of whether or not a child box that must be unique (0 or 1 present) has |
751 | | // been seen yet, when parsing a parent box. If the "seen" bit is already set for a given box when |
752 | | // it is encountered during parse, an error is thrown. Which bit corresponds to which box is |
753 | | // dictated entirely by the calling function. |
754 | | static avifBool uniqueBoxSeen(uint32_t * uniqueBoxFlags, |
755 | | avifUniqueBoxFlag whichFlag, |
756 | | const char * parentBoxType, |
757 | | const char * boxType, |
758 | | avifDiagnostics * diagnostics) |
759 | 62.3k | { |
760 | 62.3k | const uint32_t flag = 1 << whichFlag; |
761 | 62.3k | if (*uniqueBoxFlags & flag) { |
762 | | // This box has already been seen. Error! |
763 | 7 | avifDiagnosticsPrintf(diagnostics, "Box[%s] contains a duplicate unique box of type '%s'", parentBoxType, boxType); |
764 | 7 | return AVIF_FALSE; |
765 | 7 | } |
766 | | |
767 | | // Mark this box as seen. |
768 | 62.3k | *uniqueBoxFlags |= flag; |
769 | 62.3k | return AVIF_TRUE; |
770 | 62.3k | } |
771 | | |
772 | | // --------------------------------------------------------------------------- |
773 | | // avifDecoderData |
774 | | |
775 | | typedef struct avifTile |
776 | | { |
777 | | avifCodecDecodeInput * input; |
778 | | avifCodecType codecType; |
779 | | // This may point to a codec that it owns or point to a shared codec that it does not own. In the shared case, this will |
780 | | // point to one of the avifCodec instances in avifDecoderData. |
781 | | struct avifCodec * codec; |
782 | | avifImage * image; |
783 | | uint32_t width; // Either avifTrack.width or avifDecoderItem.width |
784 | | uint32_t height; // Either avifTrack.height or avifDecoderItem.height |
785 | | uint8_t operatingPoint; |
786 | | } avifTile; |
787 | | AVIF_ARRAY_DECLARE(avifTileArray, avifTile, tile); |
788 | | |
789 | | // This holds one "meta" box (from the BMFF and HEIF standards) worth of relevant-to-AVIF information. |
790 | | // * If a meta box is parsed from the root level of the BMFF, it can contain the information about |
791 | | // "items" which might be color planes, alpha planes, or EXIF or XMP metadata. |
792 | | // * If a meta box is parsed from inside of a track ("trak") box, any metadata (EXIF/XMP) items inside |
793 | | // of that box are implicitly associated with that track. |
794 | | typedef struct avifMeta |
795 | | { |
796 | | // Items (from HEIF) are the generic storage for any data that does not require timed processing |
797 | | // (single image color planes, alpha planes, EXIF, XMP, etc). Each item has a unique integer ID >1, |
798 | | // and is defined by a series of child boxes in a meta box: |
799 | | // * iloc - location: byte offset to item data, item size in bytes |
800 | | // * iinf - information: type of item (color planes, alpha plane, EXIF, XMP) |
801 | | // * ipco - properties: dimensions, aspect ratio, image transformations, references to other items |
802 | | // * ipma - associations: Attaches an item in the properties list to a given item |
803 | | // |
804 | | // Items are lazily created in this array when any of the above boxes refer to one by a new (unseen) ID, |
805 | | // and are then further modified/updated as new information for an item's ID is parsed. |
806 | | avifDecoderItemArray items; |
807 | | |
808 | | // Any ipco boxes explained above are populated into this array as a staging area, which are |
809 | | // then duplicated into the appropriate items upon encountering an item property association |
810 | | // (ipma) box. |
811 | | avifPropertyArray properties; |
812 | | |
813 | | // Filled with the contents of this meta box's "idat" box, which is raw data that an item can |
814 | | // directly refer to in its item location box (iloc) instead of just giving an offset into the |
815 | | // overall file. If all items' iloc boxes simply point at an offset/length in the file itself, |
816 | | // this buffer will likely be empty. |
817 | | avifRWData idat; |
818 | | |
819 | | // Ever-incrementing ID for uniquely identifying which 'meta' box contains an idat (when |
820 | | // multiple meta boxes exist as BMFF siblings). Each time avifParseMetaBox() is called on an |
821 | | // avifMeta struct, this value is incremented. Any time an additional meta box is detected at |
822 | | // the same "level" (root level, trak level, etc), this ID helps distinguish which meta box's |
823 | | // "idat" is which, as items implicitly reference idat boxes that exist in the same meta |
824 | | // box. |
825 | | uint32_t idatID; |
826 | | |
827 | | // Contents of a pitm box, which signal which of the items in this file is the main image. For |
828 | | // AVIF, this should point at an image item containing color planes, and all other items |
829 | | // are ignored unless they refer to this item in some way (alpha plane, EXIF/XMP metadata). |
830 | | uint32_t primaryItemID; |
831 | | |
832 | | // Contents of grpl box, which signal groups of entities (items or tracks). |
833 | | avifEntityToGroups entityToGroups; |
834 | | |
835 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
836 | | // If true, the fields above were extracted from a MinimizedImageBox. |
837 | | avifBool fromMiniBox; |
838 | | #endif |
839 | | |
840 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
841 | | // Parsed from Sample Transform metadata if present, otherwise empty. |
842 | | avifSampleTransformExpression sampleTransformExpression; |
843 | | // Bit depth extracted from the pixi property of the Sample Transform derived image item, if any. |
844 | | uint32_t sampleTransformDepth; |
845 | | #endif |
846 | | } avifMeta; |
847 | | |
848 | | static void avifMetaDestroy(avifMeta * meta); |
849 | | |
850 | | static avifMeta * avifMetaCreate(void) |
851 | 18.1k | { |
852 | 18.1k | avifMeta * meta = (avifMeta *)avifAlloc(sizeof(avifMeta)); |
853 | 18.1k | if (meta == NULL) { |
854 | 0 | return NULL; |
855 | 0 | } |
856 | 18.1k | memset(meta, 0, sizeof(avifMeta)); |
857 | 18.1k | if (!avifArrayCreate(&meta->items, sizeof(avifDecoderItem *), 8) || !avifArrayCreate(&meta->properties, sizeof(avifProperty), 16) || |
858 | 18.1k | !avifArrayCreate(&meta->entityToGroups, sizeof(avifEntityToGroup), 1)) { |
859 | 0 | avifMetaDestroy(meta); |
860 | 0 | return NULL; |
861 | 0 | } |
862 | 18.1k | return meta; |
863 | 18.1k | } |
864 | | |
865 | | static void avifMetaDestroy(avifMeta * meta) |
866 | 18.1k | { |
867 | 38.5k | for (uint32_t i = 0; i < meta->items.count; ++i) { |
868 | 20.3k | avifDecoderItem * item = meta->items.item[i]; |
869 | 20.3k | avifPropertyArrayDestroy(&item->properties); |
870 | 20.3k | avifArrayDestroy(&item->extents); |
871 | 20.3k | if (item->ownsMergedExtents) { |
872 | 16 | avifRWDataFree(&item->mergedExtents); |
873 | 16 | } |
874 | 20.3k | avifFree(item); |
875 | 20.3k | } |
876 | 18.1k | avifArrayDestroy(&meta->items); |
877 | 18.1k | avifPropertyArrayDestroy(&meta->properties); |
878 | 18.1k | avifRWDataFree(&meta->idat); |
879 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
880 | | avifArrayDestroy(&meta->sampleTransformExpression); |
881 | | #endif |
882 | 18.2k | for (uint32_t i = 0; i < meta->entityToGroups.count; ++i) { |
883 | 100 | avifArrayDestroy(&meta->entityToGroups.groups[i].entityIDs); |
884 | 100 | } |
885 | 18.1k | avifArrayDestroy(&meta->entityToGroups); |
886 | 18.1k | avifFree(meta); |
887 | 18.1k | } |
888 | | |
889 | | static avifResult avifCheckItemID(const char * boxFourcc, uint32_t itemID, avifDiagnostics * diag) |
890 | 60.8k | { |
891 | | // Section 8.11.1.1 of ISO/IEC 14496-12 about MetaBox definition: |
892 | | // The item_ID value of 0 should not be used |
893 | | // Section 8.11.6 of ISO/IEC 14496-12 about ItemInfoEntry syntax and semantics: |
894 | | // item_ID contains either 0 for the primary resource (e.g. the XML contained in an XMLBox) |
895 | | // or the ID of the item for which the following information is defined. |
896 | | // Assuming 'infe' is the only way to properly define an item in AVIF, a compliant item cannot have an ID of zero. |
897 | | // One way to bypass that rule would be to have 'infe' with item_ID being 0, referring to "the primary resource", |
898 | | // and 'pitm' defining "the primary resource" as the item with an item_ID of 0. libavif considers that as invalid. |
899 | 60.8k | if (itemID == 0) { |
900 | 35 | avifDiagnosticsPrintf(diag, "Box[%.4s] has an invalid item ID [%u]", boxFourcc, itemID); |
901 | 35 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
902 | 35 | } |
903 | 60.8k | return AVIF_RESULT_OK; |
904 | 60.8k | } |
905 | | |
906 | | static avifResult avifMetaFindOrCreateItem(avifMeta * meta, uint32_t itemID, avifDecoderItem ** item) |
907 | 90.3k | { |
908 | 90.3k | *item = NULL; |
909 | 90.3k | AVIF_ASSERT_OR_RETURN(itemID != 0); |
910 | | |
911 | 152k | for (uint32_t i = 0; i < meta->items.count; ++i) { |
912 | 131k | if (meta->items.item[i]->id == itemID) { |
913 | 69.9k | *item = meta->items.item[i]; |
914 | 69.9k | return AVIF_RESULT_OK; |
915 | 69.9k | } |
916 | 131k | } |
917 | | |
918 | 20.3k | avifDecoderItem ** itemPtr = (avifDecoderItem **)avifArrayPush(&meta->items); |
919 | 20.3k | AVIF_CHECKERR(itemPtr != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
920 | 20.3k | *item = (avifDecoderItem *)avifAlloc(sizeof(avifDecoderItem)); |
921 | 20.3k | if (*item == NULL) { |
922 | 0 | avifArrayPop(&meta->items); |
923 | 0 | return AVIF_RESULT_OUT_OF_MEMORY; |
924 | 0 | } |
925 | 20.3k | memset(*item, 0, sizeof(avifDecoderItem)); |
926 | | |
927 | 20.3k | *itemPtr = *item; |
928 | 20.3k | if (!avifArrayCreate(&(*item)->properties, sizeof(avifProperty), 16)) { |
929 | 0 | avifFree(*item); |
930 | 0 | *item = NULL; |
931 | 0 | avifArrayPop(&meta->items); |
932 | 0 | return AVIF_RESULT_OUT_OF_MEMORY; |
933 | 0 | } |
934 | 20.3k | if (!avifArrayCreate(&(*item)->extents, sizeof(avifExtent), 1)) { |
935 | 0 | avifPropertyArrayDestroy(&(*item)->properties); |
936 | 0 | avifFree(*item); |
937 | 0 | *item = NULL; |
938 | 0 | avifArrayPop(&meta->items); |
939 | 0 | return AVIF_RESULT_OUT_OF_MEMORY; |
940 | 0 | } |
941 | 20.3k | (*item)->id = itemID; |
942 | 20.3k | (*item)->meta = meta; |
943 | 20.3k | return AVIF_RESULT_OK; |
944 | 20.3k | } |
945 | | |
946 | | // A group of AVIF tiles in an image item, such as a single tile or a grid of multiple tiles. |
947 | | typedef struct avifTileInfo |
948 | | { |
949 | | unsigned int tileCount; |
950 | | unsigned int decodedTileCount; |
951 | | unsigned int firstTileIndex; // Within avifDecoderData.tiles. |
952 | | avifImageGrid grid; |
953 | | } avifTileInfo; |
954 | | |
955 | | typedef struct avifDecoderData |
956 | | { |
957 | | avifMeta * meta; // The root-level meta box |
958 | | avifTrackArray tracks; |
959 | | avifTileArray tiles; |
960 | | avifTileInfo tileInfos[AVIF_ITEM_CATEGORY_COUNT]; |
961 | | avifDecoderSource source; |
962 | | // When decoding AVIF images with grid, use a single decoder instance for all the tiles instead of creating a decoder instance |
963 | | // for each tile. If that is the case, |codec| will be used by all the tiles. |
964 | | // |
965 | | // There are some edge cases where we will still need multiple decoder instances: |
966 | | // * For animated AVIF with alpha, we will need two instances (one for the color planes and one for the alpha plane since they are both |
967 | | // encoded as separate video sequences). In this case, |codec| will be used for the color planes and |codecAlpha| will be |
968 | | // used for the alpha plane. |
969 | | // * For grid images with multiple layers. In this case, each tile will need its own decoder instance since there would be |
970 | | // multiple layers in each tile. In this case, |codec| and |codecAlpha| are not used and each tile will have its own |
971 | | // decoder instance. |
972 | | // * For grid images where the operating points of all the tiles are not the same. In this case, each tile needs its own |
973 | | // decoder instance (same as above). |
974 | | avifCodec * codec; |
975 | | avifCodec * codecAlpha; |
976 | | uint8_t majorBrand[4]; // From the file's ftyp, used by AVIF_DECODER_SOURCE_AUTO |
977 | | avifBrandArray compatibleBrands; // From the file's ftyp |
978 | | avifDiagnostics * diag; // Shallow copy; owned by avifDecoder |
979 | | const avifSampleTable * sourceSampleTable; // NULL unless (source == AVIF_DECODER_SOURCE_TRACKS), owned by an avifTrack |
980 | | avifBool cicpSet; // True if avifDecoder's image has had its CICP set correctly yet. |
981 | | // This allows nclx colr boxes to override AV1 CICP, as specified in the MIAF |
982 | | // standard (ISO/IEC 23000-22:2019), section 7.3.6.4: |
983 | | // The colour information property takes precedence over any colour information |
984 | | // in the image bitstream, i.e. if the property is present, colour information in |
985 | | // the bitstream shall be ignored. |
986 | | |
987 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
988 | | // Remember the dimg association order to the Sample Transform derived image item. |
989 | | // Colour items only. The alpha items are implicit. |
990 | | uint8_t sampleTransformNumInputImageItems; // At most AVIF_SAMPLE_TRANSFORM_MAX_NUM_INPUT_IMAGE_ITEMS. |
991 | | avifItemCategory sampleTransformInputImageItems[AVIF_SAMPLE_TRANSFORM_MAX_NUM_INPUT_IMAGE_ITEMS]; |
992 | | #endif |
993 | | } avifDecoderData; |
994 | | |
995 | | static void avifDecoderDataDestroy(avifDecoderData * data); |
996 | | |
997 | | static avifDecoderData * avifDecoderDataCreate(void) |
998 | 17.2k | { |
999 | 17.2k | avifDecoderData * data = (avifDecoderData *)avifAlloc(sizeof(avifDecoderData)); |
1000 | 17.2k | if (data == NULL) { |
1001 | 0 | return NULL; |
1002 | 0 | } |
1003 | 17.2k | memset(data, 0, sizeof(avifDecoderData)); |
1004 | 17.2k | data->meta = avifMetaCreate(); |
1005 | 17.2k | if (data->meta == NULL || !avifArrayCreate(&data->tracks, sizeof(avifTrack), 2) || |
1006 | 17.2k | !avifArrayCreate(&data->tiles, sizeof(avifTile), 8)) { |
1007 | 0 | avifDecoderDataDestroy(data); |
1008 | 0 | return NULL; |
1009 | 0 | } |
1010 | 17.2k | return data; |
1011 | 17.2k | } |
1012 | | |
1013 | | static void avifDecoderDataResetCodec(avifDecoderData * data) |
1014 | 14.1k | { |
1015 | 28.3k | for (unsigned int i = 0; i < data->tiles.count; ++i) { |
1016 | 14.2k | avifTile * tile = &data->tiles.tile[i]; |
1017 | 14.2k | if (tile->image) { |
1018 | 14.2k | avifImageFreePlanes(tile->image, AVIF_PLANES_ALL); // forget any pointers into codec image buffers |
1019 | 14.2k | } |
1020 | 14.2k | if (tile->codec) { |
1021 | | // Check if tile->codec was created separately and destroy it in that case. |
1022 | 0 | if (tile->codec != data->codec && tile->codec != data->codecAlpha) { |
1023 | 0 | avifCodecDestroy(tile->codec); |
1024 | 0 | } |
1025 | 0 | tile->codec = NULL; |
1026 | 0 | } |
1027 | 14.2k | } |
1028 | 56.5k | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
1029 | 42.4k | data->tileInfos[c].decodedTileCount = 0; |
1030 | 42.4k | } |
1031 | 14.1k | if (data->codec) { |
1032 | 0 | avifCodecDestroy(data->codec); |
1033 | 0 | data->codec = NULL; |
1034 | 0 | } |
1035 | 14.1k | if (data->codecAlpha) { |
1036 | 0 | avifCodecDestroy(data->codecAlpha); |
1037 | 0 | data->codecAlpha = NULL; |
1038 | 0 | } |
1039 | 14.1k | } |
1040 | | |
1041 | | static avifTile * avifDecoderDataCreateTile(avifDecoderData * data, avifCodecType codecType, uint32_t width, uint32_t height, uint8_t operatingPoint) |
1042 | 14.7k | { |
1043 | 14.7k | avifTile * tile = (avifTile *)avifArrayPush(&data->tiles); |
1044 | 14.7k | if (tile == NULL) { |
1045 | 0 | return NULL; |
1046 | 0 | } |
1047 | 14.7k | tile->codecType = codecType; |
1048 | 14.7k | tile->image = avifImageCreateEmpty(); |
1049 | 14.7k | if (!tile->image) { |
1050 | 0 | goto error; |
1051 | 0 | } |
1052 | 14.7k | tile->input = avifCodecDecodeInputCreate(); |
1053 | 14.7k | if (!tile->input) { |
1054 | 0 | goto error; |
1055 | 0 | } |
1056 | 14.7k | tile->width = width; |
1057 | 14.7k | tile->height = height; |
1058 | 14.7k | tile->operatingPoint = operatingPoint; |
1059 | 14.7k | return tile; |
1060 | | |
1061 | 0 | error: |
1062 | 0 | if (tile->input) { |
1063 | 0 | avifCodecDecodeInputDestroy(tile->input); |
1064 | 0 | } |
1065 | 0 | if (tile->image) { |
1066 | 0 | avifImageDestroy(tile->image); |
1067 | 0 | } |
1068 | 0 | avifArrayPop(&data->tiles); |
1069 | 0 | return NULL; |
1070 | 14.7k | } |
1071 | | |
1072 | | static avifTrack * avifDecoderDataCreateTrack(avifDecoderData * data) |
1073 | 848 | { |
1074 | 848 | avifTrack * track = (avifTrack *)avifArrayPush(&data->tracks); |
1075 | 848 | if (track == NULL) { |
1076 | 0 | return NULL; |
1077 | 0 | } |
1078 | 848 | track->meta = avifMetaCreate(); |
1079 | 848 | if (track->meta == NULL) { |
1080 | 0 | avifArrayPop(&data->tracks); |
1081 | 0 | return NULL; |
1082 | 0 | } |
1083 | 848 | return track; |
1084 | 848 | } |
1085 | | |
1086 | | static void avifDecoderDataClearTiles(avifDecoderData * data) |
1087 | 32.2k | { |
1088 | 47.0k | for (unsigned int i = 0; i < data->tiles.count; ++i) { |
1089 | 14.7k | avifTile * tile = &data->tiles.tile[i]; |
1090 | 14.7k | if (tile->input) { |
1091 | 14.7k | avifCodecDecodeInputDestroy(tile->input); |
1092 | 14.7k | tile->input = NULL; |
1093 | 14.7k | } |
1094 | 14.7k | if (tile->codec) { |
1095 | | // Check if tile->codec was created separately and destroy it in that case. |
1096 | 14.2k | if (tile->codec != data->codec && tile->codec != data->codecAlpha) { |
1097 | 104 | avifCodecDestroy(tile->codec); |
1098 | 104 | } |
1099 | 14.2k | tile->codec = NULL; |
1100 | 14.2k | } |
1101 | 14.7k | if (tile->image) { |
1102 | 14.7k | avifImageDestroy(tile->image); |
1103 | 14.7k | tile->image = NULL; |
1104 | 14.7k | } |
1105 | 14.7k | } |
1106 | 32.2k | data->tiles.count = 0; |
1107 | 129k | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
1108 | 96.7k | data->tileInfos[c].tileCount = 0; |
1109 | 96.7k | data->tileInfos[c].decodedTileCount = 0; |
1110 | 96.7k | } |
1111 | 32.2k | if (data->codec) { |
1112 | 14.0k | avifCodecDestroy(data->codec); |
1113 | 14.0k | data->codec = NULL; |
1114 | 14.0k | } |
1115 | 32.2k | if (data->codecAlpha) { |
1116 | 0 | avifCodecDestroy(data->codecAlpha); |
1117 | 0 | data->codecAlpha = NULL; |
1118 | 0 | } |
1119 | 32.2k | } |
1120 | | |
1121 | | static void avifDecoderDataDestroy(avifDecoderData * data) |
1122 | 17.2k | { |
1123 | 17.2k | if (data->meta) { |
1124 | 17.2k | avifMetaDestroy(data->meta); |
1125 | 17.2k | } |
1126 | 18.1k | for (uint32_t i = 0; i < data->tracks.count; ++i) { |
1127 | 848 | avifTrack * track = &data->tracks.track[i]; |
1128 | 848 | if (track->sampleTable) { |
1129 | 498 | avifSampleTableDestroy(track->sampleTable); |
1130 | 498 | } |
1131 | 848 | if (track->meta) { |
1132 | 848 | avifMetaDestroy(track->meta); |
1133 | 848 | } |
1134 | 848 | } |
1135 | 17.2k | avifArrayDestroy(&data->tracks); |
1136 | 17.2k | avifDecoderDataClearTiles(data); |
1137 | 17.2k | avifArrayDestroy(&data->tiles); |
1138 | 17.2k | avifArrayDestroy(&data->compatibleBrands); |
1139 | 17.2k | avifFree(data); |
1140 | 17.2k | } |
1141 | | |
1142 | | // This returns the max extent that has to be read in order to decode this item. If |
1143 | | // the item is stored in an idat, the data has already been read during Parse() and |
1144 | | // this function will return AVIF_RESULT_OK with a 0-byte extent. |
1145 | | static avifResult avifDecoderItemMaxExtent(const avifDecoderItem * item, const avifDecodeSample * sample, avifExtent * outExtent) |
1146 | 0 | { |
1147 | 0 | if (item->extents.count == 0) { |
1148 | 0 | return AVIF_RESULT_TRUNCATED_DATA; |
1149 | 0 | } |
1150 | | |
1151 | 0 | if (item->idatStored) { |
1152 | | // construction_method: idat(1) |
1153 | |
|
1154 | 0 | if (item->meta->idat.size > 0) { |
1155 | | // Already read from a meta box during Parse() |
1156 | 0 | memset(outExtent, 0, sizeof(avifExtent)); |
1157 | 0 | return AVIF_RESULT_OK; |
1158 | 0 | } |
1159 | | |
1160 | | // no associated idat box was found in the meta box, bail out |
1161 | 0 | return AVIF_RESULT_NO_CONTENT; |
1162 | 0 | } |
1163 | | |
1164 | | // construction_method: file(0) |
1165 | | |
1166 | 0 | if (sample->size == 0) { |
1167 | 0 | return AVIF_RESULT_TRUNCATED_DATA; |
1168 | 0 | } |
1169 | 0 | uint64_t remainingOffset = sample->offset; |
1170 | 0 | size_t remainingBytes = sample->size; // This may be smaller than item->size if the item is progressive |
1171 | | |
1172 | | // Assert that the for loop below will execute at least one iteration. |
1173 | 0 | AVIF_ASSERT_OR_RETURN(item->extents.count != 0); |
1174 | 0 | uint64_t minOffset = UINT64_MAX; |
1175 | 0 | uint64_t maxOffset = 0; |
1176 | 0 | for (uint32_t extentIter = 0; extentIter < item->extents.count; ++extentIter) { |
1177 | 0 | avifExtent * extent = &item->extents.extent[extentIter]; |
1178 | | |
1179 | | // Make local copies of extent->offset and extent->size as they might need to be adjusted |
1180 | | // due to the sample's offset. |
1181 | 0 | uint64_t startOffset = extent->offset; |
1182 | 0 | size_t extentSize = extent->size; |
1183 | 0 | if (remainingOffset) { |
1184 | 0 | if (remainingOffset >= extentSize) { |
1185 | 0 | remainingOffset -= extentSize; |
1186 | 0 | continue; |
1187 | 0 | } else { |
1188 | 0 | if (remainingOffset > UINT64_MAX - startOffset) { |
1189 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1190 | 0 | } |
1191 | 0 | startOffset += remainingOffset; |
1192 | 0 | extentSize -= (size_t)remainingOffset; |
1193 | 0 | remainingOffset = 0; |
1194 | 0 | } |
1195 | 0 | } |
1196 | | |
1197 | 0 | const size_t usedExtentSize = (extentSize < remainingBytes) ? extentSize : remainingBytes; |
1198 | |
|
1199 | 0 | if (usedExtentSize > UINT64_MAX - startOffset) { |
1200 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1201 | 0 | } |
1202 | 0 | const uint64_t endOffset = startOffset + usedExtentSize; |
1203 | |
|
1204 | 0 | if (minOffset > startOffset) { |
1205 | 0 | minOffset = startOffset; |
1206 | 0 | } |
1207 | 0 | if (maxOffset < endOffset) { |
1208 | 0 | maxOffset = endOffset; |
1209 | 0 | } |
1210 | |
|
1211 | 0 | remainingBytes -= usedExtentSize; |
1212 | 0 | if (remainingBytes == 0) { |
1213 | | // We've got enough bytes for this sample. |
1214 | 0 | break; |
1215 | 0 | } |
1216 | 0 | } |
1217 | | |
1218 | 0 | if (remainingBytes != 0) { |
1219 | 0 | return AVIF_RESULT_TRUNCATED_DATA; |
1220 | 0 | } |
1221 | | |
1222 | 0 | outExtent->offset = minOffset; |
1223 | 0 | const uint64_t extentLength = maxOffset - minOffset; |
1224 | | #if UINT64_MAX > SIZE_MAX |
1225 | | if (extentLength > SIZE_MAX) { |
1226 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1227 | | } |
1228 | | #endif |
1229 | 0 | outExtent->size = (size_t)extentLength; |
1230 | 0 | return AVIF_RESULT_OK; |
1231 | 0 | } |
1232 | | |
1233 | | static uint8_t avifDecoderItemOperatingPoint(const avifDecoderItem * item) |
1234 | 14.7k | { |
1235 | 14.7k | const avifProperty * a1opProp = avifPropertyArrayFind(&item->properties, "a1op"); |
1236 | 14.7k | if (a1opProp) { |
1237 | 59 | return a1opProp->u.a1op.opIndex; |
1238 | 59 | } |
1239 | 14.6k | return 0; // default |
1240 | 14.7k | } |
1241 | | |
1242 | | static avifResult avifDecoderItemValidateProperties(const avifDecoderItem * item, |
1243 | | const char * configPropName, |
1244 | | avifDiagnostics * diag, |
1245 | | const avifStrictFlags strictFlags) |
1246 | 14.5k | { |
1247 | 14.5k | const avifProperty * const configProp = avifPropertyArrayFind(&item->properties, configPropName); |
1248 | 14.5k | if (!configProp) { |
1249 | | // An item configuration property box is mandatory in all valid AVIF configurations. Bail out. |
1250 | 5 | avifDiagnosticsPrintf(diag, "Item ID %u of type '%.4s' is missing mandatory %s property", item->id, (const char *)item->type, configPropName); |
1251 | 5 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1252 | 5 | } |
1253 | | |
1254 | 14.5k | if (!memcmp(item->type, "grid", 4)) { |
1255 | 293 | for (uint32_t i = 0; i < item->meta->items.count; ++i) { |
1256 | 260 | avifDecoderItem * tile = item->meta->items.item[i]; |
1257 | 260 | if (tile->dimgForID != item->id) { |
1258 | 151 | continue; |
1259 | 151 | } |
1260 | | // Tile item types were checked in avifDecoderGenerateImageTiles(), no need to do it here. |
1261 | | |
1262 | | // MIAF (ISO 23000-22:2019), Section 7.3.11.4.1: |
1263 | | // All input images of a grid image item shall use the same [...] chroma sampling format, |
1264 | | // and the same decoder configuration (see 7.3.6.2). |
1265 | | |
1266 | | // The chroma sampling format is part of the decoder configuration. |
1267 | 109 | const avifProperty * tileConfigProp = avifPropertyArrayFind(&tile->properties, configPropName); |
1268 | 109 | if (!tileConfigProp) { |
1269 | 10 | avifDiagnosticsPrintf(diag, |
1270 | 10 | "Tile item ID %u of type '%.4s' is missing mandatory %s property", |
1271 | 10 | tile->id, |
1272 | 10 | (const char *)tile->type, |
1273 | 10 | configPropName); |
1274 | 10 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1275 | 10 | } |
1276 | | // configProp was copied from a tile item to the grid item. Comparing tileConfigProp with it |
1277 | | // is equivalent to comparing tileConfigProp with the configPropName from the first tile. |
1278 | 99 | if ((tileConfigProp->u.av1C.seqProfile != configProp->u.av1C.seqProfile) || |
1279 | 99 | (tileConfigProp->u.av1C.seqLevelIdx0 != configProp->u.av1C.seqLevelIdx0) || |
1280 | 99 | (tileConfigProp->u.av1C.seqTier0 != configProp->u.av1C.seqTier0) || |
1281 | 99 | (tileConfigProp->u.av1C.highBitdepth != configProp->u.av1C.highBitdepth) || |
1282 | 99 | (tileConfigProp->u.av1C.twelveBit != configProp->u.av1C.twelveBit) || |
1283 | 99 | (tileConfigProp->u.av1C.monochrome != configProp->u.av1C.monochrome) || |
1284 | 99 | (tileConfigProp->u.av1C.chromaSubsamplingX != configProp->u.av1C.chromaSubsamplingX) || |
1285 | 99 | (tileConfigProp->u.av1C.chromaSubsamplingY != configProp->u.av1C.chromaSubsamplingY) || |
1286 | 99 | (tileConfigProp->u.av1C.chromaSamplePosition != configProp->u.av1C.chromaSamplePosition)) { |
1287 | 10 | avifDiagnosticsPrintf(diag, |
1288 | 10 | "The fields of the %s property of tile item ID %u of type '%.4s' differs from other tiles", |
1289 | 10 | configPropName, |
1290 | 10 | tile->id, |
1291 | 10 | (const char *)tile->type); |
1292 | 10 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1293 | 10 | } |
1294 | 99 | } |
1295 | 53 | } |
1296 | | |
1297 | 14.4k | const avifProperty * pixiProp = avifPropertyArrayFind(&item->properties, "pixi"); |
1298 | 14.4k | if (!pixiProp && (strictFlags & AVIF_STRICT_PIXI_REQUIRED)) { |
1299 | | // A pixi box is mandatory in all valid AVIF configurations. Bail out. |
1300 | 0 | avifDiagnosticsPrintf(diag, |
1301 | 0 | "[Strict] Item ID %u of type '%.4s' is missing mandatory pixi property", |
1302 | 0 | item->id, |
1303 | 0 | (const char *)item->type); |
1304 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1305 | 0 | } |
1306 | | |
1307 | 14.4k | if (pixiProp) { |
1308 | 1.94k | const uint32_t configDepth = avifCodecConfigurationBoxGetDepth(&configProp->u.av1C); |
1309 | 6.50k | for (uint8_t i = 0; i < pixiProp->u.pixi.planeCount; ++i) { |
1310 | 4.56k | if (pixiProp->u.pixi.planeDepths[i] != configDepth) { |
1311 | | // pixi depth must match configuration property depth |
1312 | 2 | avifDiagnosticsPrintf(diag, |
1313 | 2 | "Item ID %u depth specified by pixi property [%u] does not match %s property depth [%u]", |
1314 | 2 | item->id, |
1315 | 2 | pixiProp->u.pixi.planeDepths[i], |
1316 | 2 | configPropName, |
1317 | 2 | configDepth); |
1318 | 2 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1319 | 2 | } |
1320 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_EXTENDED_PIXI) |
1321 | | if (pixiProp->u.pixi.subsamplingFlag[i]) { |
1322 | | if (pixiProp->u.pixi.subsamplingType[i] != avifCodecConfigurationBoxGetSubsamplingType(&configProp->u.av1C, i)) { |
1323 | | avifDiagnosticsPrintf(diag, |
1324 | | "Item ID %u subsampling type specified by pixi property [%u] for channel %u does not match %s property [%u,%u]", |
1325 | | item->id, |
1326 | | pixiProp->u.pixi.subsamplingType[i], |
1327 | | i, |
1328 | | configPropName, |
1329 | | configProp->u.av1C.chromaSubsamplingX, |
1330 | | configProp->u.av1C.chromaSubsamplingY); |
1331 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1332 | | } |
1333 | | if (configProp->u.av1C.chromaSamplePosition != AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN) { |
1334 | | const avifChromaSamplePosition expectedChromaSamplePosition = |
1335 | | i == AVIF_CHAN_Y ? AVIF_CHROMA_SAMPLE_POSITION_COLOCATED : configProp->u.av1C.chromaSamplePosition; |
1336 | | if (avifSubsamplingLocationToChromaSamplePosition(pixiProp->u.pixi.subsamplingType[i], |
1337 | | pixiProp->u.pixi.subsamplingLocation[i]) != |
1338 | | expectedChromaSamplePosition) { |
1339 | | avifDiagnosticsPrintf(diag, |
1340 | | "Item ID %u subsampling type and location specified by pixi property [%u,%u] for channel %u does not match %s property chroma sample position [%u]", |
1341 | | item->id, |
1342 | | pixiProp->u.pixi.subsamplingType[i], |
1343 | | pixiProp->u.pixi.subsamplingLocation[i], |
1344 | | i, |
1345 | | configPropName, |
1346 | | configProp->u.av1C.chromaSamplePosition); |
1347 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1348 | | } |
1349 | | } |
1350 | | } |
1351 | | #endif // AVIF_ENABLE_EXPERIMENTAL_EXTENDED_PIXI |
1352 | 4.56k | } |
1353 | 1.94k | } |
1354 | | |
1355 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
1356 | | if (item->miniBoxPixelFormat != AVIF_PIXEL_FORMAT_NONE) { |
1357 | | // This is a MinimizedImageBox ('mini'). |
1358 | | |
1359 | | avifPixelFormat av1CPixelFormat; |
1360 | | if (configProp->u.av1C.monochrome) { |
1361 | | av1CPixelFormat = AVIF_PIXEL_FORMAT_YUV400; |
1362 | | } else if (configProp->u.av1C.chromaSubsamplingY == 1) { |
1363 | | av1CPixelFormat = AVIF_PIXEL_FORMAT_YUV420; |
1364 | | } else if (configProp->u.av1C.chromaSubsamplingX == 1) { |
1365 | | av1CPixelFormat = AVIF_PIXEL_FORMAT_YUV422; |
1366 | | } else { |
1367 | | av1CPixelFormat = AVIF_PIXEL_FORMAT_YUV444; |
1368 | | } |
1369 | | if (item->miniBoxPixelFormat != av1CPixelFormat) { |
1370 | | if (!memcmp(configPropName, "av2C", 4) && item->miniBoxPixelFormat == AVIF_PIXEL_FORMAT_YUV400 && |
1371 | | av1CPixelFormat == AVIF_PIXEL_FORMAT_YUV420) { |
1372 | | // avm does not handle monochrome as of research-v8.0.0. |
1373 | | // 4:2:0 is used instead. |
1374 | | } else { |
1375 | | avifDiagnosticsPrintf(diag, |
1376 | | "Item ID %u format [%s] specified by MinimizedImageBox does not match %s property format [%s]", |
1377 | | item->id, |
1378 | | avifPixelFormatToString(item->miniBoxPixelFormat), |
1379 | | configPropName, |
1380 | | avifPixelFormatToString(av1CPixelFormat)); |
1381 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1382 | | } |
1383 | | } |
1384 | | |
1385 | | if (configProp->u.av1C.chromaSamplePosition == /*CSP_UNKNOWN=*/0) { |
1386 | | // Section 6.4.2. Color config semantics of AV1 specification says: |
1387 | | // CSP_UNKNOWN - the source video transfer function must be signaled outside the AV1 bitstream |
1388 | | // See https://aomediacodec.github.io/av1-spec/#color-config-semantics |
1389 | | |
1390 | | // So item->miniBoxChromaSamplePosition can differ and will override the AV1 value. |
1391 | | } else if ((uint8_t)item->miniBoxChromaSamplePosition != configProp->u.av1C.chromaSamplePosition) { |
1392 | | avifDiagnosticsPrintf(diag, |
1393 | | "Item ID %u chroma sample position [%u] specified by MinimizedImageBox does not match %s property chroma sample position [%u]", |
1394 | | item->id, |
1395 | | (uint32_t)item->miniBoxChromaSamplePosition, |
1396 | | configPropName, |
1397 | | configProp->u.av1C.chromaSamplePosition); |
1398 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1399 | | } |
1400 | | } |
1401 | | #endif // AVIF_ENABLE_EXPERIMENTAL_MINI |
1402 | | |
1403 | 14.4k | if (strictFlags & AVIF_STRICT_CLAP_VALID) { |
1404 | 0 | const avifProperty * clapProp = avifPropertyArrayFind(&item->properties, "clap"); |
1405 | 0 | if (clapProp) { |
1406 | 0 | const avifProperty * ispeProp = avifPropertyArrayFind(&item->properties, "ispe"); |
1407 | 0 | if (!ispeProp) { |
1408 | 0 | avifDiagnosticsPrintf(diag, |
1409 | 0 | "[Strict] Item ID %u is missing an ispe property, so its clap property cannot be validated", |
1410 | 0 | item->id); |
1411 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1412 | 0 | } |
1413 | | |
1414 | 0 | avifCropRect cropRect; |
1415 | 0 | const uint32_t imageW = ispeProp->u.ispe.width; |
1416 | 0 | const uint32_t imageH = ispeProp->u.ispe.height; |
1417 | 0 | const avifBool validClap = avifCropRectFromCleanApertureBox(&cropRect, &clapProp->u.clap, imageW, imageH, diag); |
1418 | 0 | if (!validClap) { |
1419 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1420 | 0 | } |
1421 | 0 | } |
1422 | 0 | } |
1423 | 14.4k | return AVIF_RESULT_OK; |
1424 | 14.4k | } |
1425 | | |
1426 | | static avifResult avifDecoderItemRead(avifDecoderItem * item, |
1427 | | avifIO * io, |
1428 | | avifROData * outData, |
1429 | | size_t offset, |
1430 | | size_t partialByteCount, |
1431 | | avifDiagnostics * diag) |
1432 | 33.9k | { |
1433 | 33.9k | if (item->mergedExtents.data && !item->partialMergedExtents) { |
1434 | | // Multiple extents have already been concatenated for this item, just return it |
1435 | 4 | if (offset >= item->mergedExtents.size) { |
1436 | 0 | avifDiagnosticsPrintf(diag, "Item ID %u read has overflowing offset", item->id); |
1437 | 0 | return AVIF_RESULT_TRUNCATED_DATA; |
1438 | 0 | } |
1439 | 4 | outData->data = item->mergedExtents.data + offset; |
1440 | 4 | outData->size = item->mergedExtents.size - offset; |
1441 | 4 | return AVIF_RESULT_OK; |
1442 | 4 | } |
1443 | | |
1444 | 33.9k | if (item->extents.count == 0) { |
1445 | 0 | avifDiagnosticsPrintf(diag, "Item ID %u has zero extents", item->id); |
1446 | 0 | return AVIF_RESULT_TRUNCATED_DATA; |
1447 | 0 | } |
1448 | | |
1449 | | // Find this item's source of all extents' data, based on the construction method |
1450 | 33.9k | const avifRWData * idatBuffer = NULL; |
1451 | 33.9k | if (item->idatStored) { |
1452 | | // construction_method: idat(1) |
1453 | | |
1454 | 35 | if (item->meta->idat.size > 0) { |
1455 | 34 | idatBuffer = &item->meta->idat; |
1456 | 34 | } else { |
1457 | | // no associated idat box was found in the meta box, bail out |
1458 | 1 | avifDiagnosticsPrintf(diag, "Item ID %u is stored in an idat, but no associated idat box was found", item->id); |
1459 | 1 | return AVIF_RESULT_NO_CONTENT; |
1460 | 1 | } |
1461 | 35 | } |
1462 | | |
1463 | | // Merge extents into a single contiguous buffer |
1464 | 33.9k | if ((io->sizeHint > 0) && (item->size > io->sizeHint)) { |
1465 | | // Sanity check: somehow the sum of extents exceeds the entire file or idat size! |
1466 | 31 | avifDiagnosticsPrintf(diag, "Item ID %u reported size failed size hint sanity check. Truncated data?", item->id); |
1467 | 31 | return AVIF_RESULT_TRUNCATED_DATA; |
1468 | 31 | } |
1469 | | |
1470 | 33.8k | if (offset >= item->size) { |
1471 | 0 | avifDiagnosticsPrintf(diag, "Item ID %u read has overflowing offset", item->id); |
1472 | 0 | return AVIF_RESULT_TRUNCATED_DATA; |
1473 | 0 | } |
1474 | 33.8k | const size_t maxOutputSize = item->size - offset; |
1475 | 33.8k | const size_t readOutputSize = (partialByteCount && (partialByteCount < maxOutputSize)) ? partialByteCount : maxOutputSize; |
1476 | 33.8k | const size_t totalBytesToRead = offset + readOutputSize; |
1477 | | |
1478 | | // If there is a single extent for this item and the source of the read buffer is going to be |
1479 | | // persistent for the lifetime of the avifDecoder (whether it comes from its own internal |
1480 | | // idatBuffer or from a known-persistent IO), we can avoid buffer duplication and just use the |
1481 | | // preexisting buffer. |
1482 | 33.8k | avifBool singlePersistentBuffer = ((item->extents.count == 1) && (idatBuffer || io->persistent)); |
1483 | 33.8k | if (!singlePersistentBuffer) { |
1484 | | // Always allocate the item's full size here, as progressive image decodes will do partial |
1485 | | // reads into this buffer and begin feeding the buffer to the underlying AV1 decoder, but |
1486 | | // will then write more into this buffer without flushing the AV1 decoder (which is still |
1487 | | // holding the address of the previous allocation of this buffer). This strategy avoids |
1488 | | // use-after-free issues in the AV1 decoder and unnecessary reallocs as a typical |
1489 | | // progressive decode use case will eventually decode the final layer anyway. |
1490 | 52 | AVIF_CHECKRES(avifRWDataRealloc(&item->mergedExtents, item->size)); |
1491 | 52 | item->ownsMergedExtents = AVIF_TRUE; |
1492 | 52 | } |
1493 | | |
1494 | | // Set this until we manage to fill the entire mergedExtents buffer |
1495 | 33.8k | item->partialMergedExtents = AVIF_TRUE; |
1496 | | |
1497 | 33.8k | uint8_t * front = item->mergedExtents.data; |
1498 | 33.8k | size_t remainingBytes = totalBytesToRead; |
1499 | 33.9k | for (uint32_t extentIter = 0; extentIter < item->extents.count; ++extentIter) { |
1500 | 33.9k | avifExtent * extent = &item->extents.extent[extentIter]; |
1501 | | |
1502 | 33.9k | size_t bytesToRead = extent->size; |
1503 | 33.9k | if (bytesToRead > remainingBytes) { |
1504 | 18.6k | bytesToRead = remainingBytes; |
1505 | 18.6k | } |
1506 | | |
1507 | 33.9k | avifROData offsetBuffer; |
1508 | 33.9k | if (idatBuffer) { |
1509 | 35 | if (extent->offset > idatBuffer->size) { |
1510 | 1 | avifDiagnosticsPrintf(diag, "Item ID %u has impossible extent offset in idat buffer", item->id); |
1511 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1512 | 1 | } |
1513 | | // Since extent->offset (a uint64_t) is not bigger than idatBuffer->size (a size_t), |
1514 | | // it is safe to cast extent->offset to size_t. |
1515 | 34 | const size_t extentOffset = (size_t)extent->offset; |
1516 | 34 | if (extent->size > idatBuffer->size - extentOffset) { |
1517 | 1 | avifDiagnosticsPrintf(diag, "Item ID %u has impossible extent size in idat buffer", item->id); |
1518 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1519 | 1 | } |
1520 | 33 | offsetBuffer.data = idatBuffer->data + extentOffset; |
1521 | 33 | offsetBuffer.size = idatBuffer->size - extentOffset; |
1522 | 33.8k | } else { |
1523 | | // construction_method: file(0) |
1524 | | |
1525 | 33.8k | if ((io->sizeHint > 0) && (extent->offset > io->sizeHint)) { |
1526 | 33 | avifDiagnosticsPrintf(diag, "Item ID %u extent offset failed size hint sanity check. Truncated data?", item->id); |
1527 | 33 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1528 | 33 | } |
1529 | 33.8k | avifResult readResult = io->read(io, 0, extent->offset, bytesToRead, &offsetBuffer); |
1530 | 33.8k | if (readResult != AVIF_RESULT_OK) { |
1531 | 0 | return readResult; |
1532 | 0 | } |
1533 | 33.8k | if (bytesToRead != offsetBuffer.size) { |
1534 | 313 | avifDiagnosticsPrintf(diag, |
1535 | 313 | "Item ID %u tried to read %zu bytes, but only received %zu bytes", |
1536 | 313 | item->id, |
1537 | 313 | bytesToRead, |
1538 | 313 | offsetBuffer.size); |
1539 | 313 | return AVIF_RESULT_TRUNCATED_DATA; |
1540 | 313 | } |
1541 | 33.8k | } |
1542 | | |
1543 | 33.5k | if (singlePersistentBuffer) { |
1544 | 33.4k | memcpy(&item->mergedExtents, &offsetBuffer, sizeof(avifRWData)); |
1545 | 33.4k | item->mergedExtents.size = bytesToRead; |
1546 | 33.4k | } else { |
1547 | 82 | AVIF_ASSERT_OR_RETURN(item->ownsMergedExtents); |
1548 | 82 | AVIF_ASSERT_OR_RETURN(front); |
1549 | 82 | memcpy(front, offsetBuffer.data, bytesToRead); |
1550 | 82 | front += bytesToRead; |
1551 | 82 | } |
1552 | | |
1553 | 33.5k | remainingBytes -= bytesToRead; |
1554 | 33.5k | if (remainingBytes == 0) { |
1555 | | // This happens when partialByteCount is set |
1556 | 33.5k | break; |
1557 | 33.5k | } |
1558 | 33.5k | } |
1559 | 33.5k | if (remainingBytes != 0) { |
1560 | | // This should be impossible? |
1561 | 0 | avifDiagnosticsPrintf(diag, "Item ID %u has %zu unexpected trailing bytes", item->id, remainingBytes); |
1562 | 0 | return AVIF_RESULT_TRUNCATED_DATA; |
1563 | 0 | } |
1564 | | |
1565 | 33.5k | outData->data = item->mergedExtents.data + offset; |
1566 | 33.5k | outData->size = readOutputSize; |
1567 | 33.5k | item->partialMergedExtents = (item->size != totalBytesToRead); |
1568 | 33.5k | return AVIF_RESULT_OK; |
1569 | 33.5k | } |
1570 | | |
1571 | | // Returns the avifCodecType of the first tile of the gridItem. |
1572 | | static avifCodecType avifDecoderItemGetGridCodecType(const avifDecoderItem * gridItem) |
1573 | 110 | { |
1574 | 377 | for (uint32_t i = 0; i < gridItem->meta->items.count; ++i) { |
1575 | 375 | avifDecoderItem * item = gridItem->meta->items.item[i]; |
1576 | 375 | const avifCodecType tileCodecType = avifGetCodecType(item->type); |
1577 | 375 | if ((item->dimgForID == gridItem->id) && (tileCodecType != AVIF_CODEC_TYPE_UNKNOWN)) { |
1578 | 108 | return tileCodecType; |
1579 | 108 | } |
1580 | 375 | } |
1581 | 2 | return AVIF_CODEC_TYPE_UNKNOWN; |
1582 | 110 | } |
1583 | | |
1584 | | // Fills the dimgIdxToItemIdx array with a mapping from each 0-based tile index in the 'dimg' reference |
1585 | | // to its corresponding 0-based index in the avifMeta::items array. |
1586 | | static avifResult avifFillDimgIdxToItemIdxArray(uint32_t * dimgIdxToItemIdx, uint32_t numExpectedTiles, const avifDecoderItem * gridItem) |
1587 | 165 | { |
1588 | 165 | const uint32_t itemIndexNotSet = UINT32_MAX; |
1589 | 589 | for (uint32_t dimgIdx = 0; dimgIdx < numExpectedTiles; ++dimgIdx) { |
1590 | 424 | dimgIdxToItemIdx[dimgIdx] = itemIndexNotSet; |
1591 | 424 | } |
1592 | 165 | uint32_t numTiles = 0; |
1593 | 1.42k | for (uint32_t i = 0; i < gridItem->meta->items.count; ++i) { |
1594 | 1.26k | if (gridItem->meta->items.item[i]->dimgForID == gridItem->id) { |
1595 | 424 | const uint32_t tileItemDimgIdx = gridItem->meta->items.item[i]->dimgIdx; |
1596 | 424 | AVIF_CHECKERR(tileItemDimgIdx < numExpectedTiles, AVIF_RESULT_INVALID_IMAGE_GRID); |
1597 | 424 | AVIF_CHECKERR(dimgIdxToItemIdx[tileItemDimgIdx] == itemIndexNotSet, AVIF_RESULT_INVALID_IMAGE_GRID); |
1598 | 424 | dimgIdxToItemIdx[tileItemDimgIdx] = i; |
1599 | 424 | ++numTiles; |
1600 | 424 | } |
1601 | 1.26k | } |
1602 | | // The number of tiles has been verified in avifDecoderItemReadAndParse(). |
1603 | 165 | AVIF_ASSERT_OR_RETURN(numTiles == numExpectedTiles); |
1604 | 165 | return AVIF_RESULT_OK; |
1605 | 165 | } |
1606 | | |
1607 | | // Copies the codec type property (av1C or av2C) from the first grid tile to the grid item. |
1608 | | // Also checks that all tiles have the same codec type and that it's valid. |
1609 | | static avifResult avifDecoderAdoptGridTileCodecType(avifDecoder * decoder, |
1610 | | avifDecoderItem * gridItem, |
1611 | | const uint32_t * dimgIdxToItemIdx, |
1612 | | uint32_t numTiles) |
1613 | 102 | { |
1614 | 102 | avifDecoderItem * firstTileItem = NULL; |
1615 | 277 | for (uint32_t dimgIdx = 0; dimgIdx < numTiles; ++dimgIdx) { |
1616 | 214 | const uint32_t itemIdx = dimgIdxToItemIdx[dimgIdx]; |
1617 | 214 | AVIF_ASSERT_OR_RETURN(itemIdx < gridItem->meta->items.count); |
1618 | 214 | avifDecoderItem * item = gridItem->meta->items.item[itemIdx]; |
1619 | | |
1620 | | // According to HEIF (ISO 14496-12), Section 6.6.2.3.1, the SingleItemTypeReferenceBox of type 'dimg' |
1621 | | // identifies the input images of the derived image item of type 'grid'. Since the reference_count |
1622 | | // shall be equal to rows*columns, unknown tile item types cannot be skipped but must be considered |
1623 | | // as errors. |
1624 | 214 | const avifCodecType tileCodecType = avifGetCodecType(item->type); |
1625 | 214 | if (tileCodecType == AVIF_CODEC_TYPE_UNKNOWN) { |
1626 | 25 | char type[4]; |
1627 | 125 | for (int j = 0; j < 4; j++) { |
1628 | 100 | if (isprint((unsigned char)item->type[j])) { |
1629 | 69 | type[j] = item->type[j]; |
1630 | 69 | } else { |
1631 | 31 | type[j] = '.'; |
1632 | 31 | } |
1633 | 100 | } |
1634 | 25 | avifDiagnosticsPrintf(&decoder->diag, |
1635 | 25 | "Tile item ID %u has an unknown item type '%.4s' (%02x%02x%02x%02x)", |
1636 | 25 | item->id, |
1637 | 25 | type, |
1638 | 25 | item->type[0], |
1639 | 25 | item->type[1], |
1640 | 25 | item->type[2], |
1641 | 25 | item->type[3]); |
1642 | 25 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
1643 | 25 | } |
1644 | | |
1645 | 189 | if (item->hasUnsupportedEssentialProperty) { |
1646 | | // An essential property isn't supported by libavif; can't |
1647 | | // decode a grid image if any tile in the grid isn't supported. |
1648 | 10 | avifDiagnosticsPrintf(&decoder->diag, "Grid image contains tile with an unsupported property marked as essential"); |
1649 | 10 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
1650 | 10 | } |
1651 | | |
1652 | 179 | if (firstTileItem == NULL) { |
1653 | 72 | firstTileItem = item; |
1654 | | // Adopt the configuration property of the first image item tile, so that it can be queried from |
1655 | | // the top-level color/alpha item during avifDecoderReset(). |
1656 | 72 | const avifCodecType codecType = avifGetCodecType(item->type); |
1657 | 72 | const char * configPropName = avifGetConfigurationPropertyName(codecType); |
1658 | 72 | const avifProperty * srcProp = avifPropertyArrayFind(&item->properties, configPropName); |
1659 | 72 | if (!srcProp) { |
1660 | 4 | avifDiagnosticsPrintf(&decoder->diag, "Grid image's first tile is missing an %s property", configPropName); |
1661 | 4 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
1662 | 4 | } |
1663 | 68 | avifProperty * dstProp = (avifProperty *)avifArrayPush(&gridItem->properties); |
1664 | 68 | AVIF_CHECKERR(dstProp != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
1665 | 68 | *dstProp = *srcProp; |
1666 | | |
1667 | 107 | } else if (memcmp(item->type, firstTileItem->type, 4)) { |
1668 | | // MIAF (ISO 23000-22:2019), Section 7.3.11.4.1: |
1669 | | // All input images of a grid image item shall use the same coding format [...] |
1670 | | // The coding format is defined by the item type. |
1671 | 0 | avifDiagnosticsPrintf(&decoder->diag, |
1672 | 0 | "Tile item ID %u of type '%.4s' differs from other tile type '%.4s'", |
1673 | 0 | item->id, |
1674 | 0 | (const char *)item->type, |
1675 | 0 | (const char *)firstTileItem->type); |
1676 | 0 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
1677 | 0 | } |
1678 | 179 | } |
1679 | 63 | return AVIF_RESULT_OK; |
1680 | 102 | } |
1681 | | |
1682 | | // If the item is a grid, copies the codec type property (av1C or av2C) from the first grid tile to the grid item. |
1683 | | // Also checks that all tiles have the same codec type and that it's valid. |
1684 | | static avifResult avifDecoderAdoptGridTileCodecTypeIfNeeded(avifDecoder * decoder, avifDecoderItem * item, const avifTileInfo * info) |
1685 | 14.6k | { |
1686 | 14.6k | if ((info->grid.rows > 0) && (info->grid.columns > 0)) { |
1687 | | // The number of tiles was verified in avifDecoderItemReadAndParse(). |
1688 | 102 | const uint32_t numTiles = info->grid.rows * info->grid.columns; |
1689 | 102 | uint32_t * dimgIdxToItemIdx = (uint32_t *)avifAlloc(numTiles * sizeof(uint32_t)); |
1690 | 102 | AVIF_CHECKERR(dimgIdxToItemIdx != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
1691 | 102 | avifResult result = avifFillDimgIdxToItemIdxArray(dimgIdxToItemIdx, numTiles, item); |
1692 | 102 | if (result == AVIF_RESULT_OK) { |
1693 | 102 | result = avifDecoderAdoptGridTileCodecType(decoder, item, dimgIdxToItemIdx, numTiles); |
1694 | 102 | } |
1695 | 102 | avifFree(dimgIdxToItemIdx); |
1696 | 102 | AVIF_CHECKRES(result); |
1697 | 102 | } |
1698 | 14.6k | return AVIF_RESULT_OK; |
1699 | 14.6k | } |
1700 | | |
1701 | | // Creates the tiles and associate them to the items in the order of the 'dimg' association. |
1702 | | static avifResult avifDecoderGenerateImageGridTiles(avifDecoder * decoder, |
1703 | | avifDecoderItem * gridItem, |
1704 | | avifItemCategory itemCategory, |
1705 | | const uint32_t * dimgIdxToItemIdx, |
1706 | | uint32_t numTiles) |
1707 | 63 | { |
1708 | 63 | avifBool progressive = AVIF_TRUE; |
1709 | 205 | for (uint32_t dimgIdx = 0; dimgIdx < numTiles; ++dimgIdx) { |
1710 | 152 | const uint32_t itemIdx = dimgIdxToItemIdx[dimgIdx]; |
1711 | 152 | AVIF_ASSERT_OR_RETURN(itemIdx < gridItem->meta->items.count); |
1712 | 152 | avifDecoderItem * item = gridItem->meta->items.item[itemIdx]; |
1713 | | |
1714 | 152 | const avifCodecType tileCodecType = avifGetCodecType(item->type); |
1715 | 152 | AVIF_CHECKERR(tileCodecType != AVIF_CODEC_TYPE_UNKNOWN, AVIF_RESULT_INVALID_IMAGE_GRID); |
1716 | 152 | const avifTile * tile = |
1717 | 152 | avifDecoderDataCreateTile(decoder->data, tileCodecType, item->width, item->height, avifDecoderItemOperatingPoint(item)); |
1718 | 152 | AVIF_CHECKERR(tile != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
1719 | 152 | AVIF_CHECKRES(avifCodecDecodeInputFillFromDecoderItem(tile->input, |
1720 | 152 | item, |
1721 | 152 | decoder->allowProgressive, |
1722 | 152 | decoder->imageCountLimit, |
1723 | 152 | decoder->io->sizeHint, |
1724 | 152 | &decoder->diag)); |
1725 | 142 | tile->input->itemCategory = itemCategory; |
1726 | | |
1727 | 142 | if (!item->progressive) { |
1728 | 128 | progressive = AVIF_FALSE; |
1729 | 128 | } |
1730 | 142 | } |
1731 | 53 | if (itemCategory == AVIF_ITEM_COLOR && progressive) { |
1732 | | // If all the items that make up the grid are progressive, then propagate that status to the top-level grid item. |
1733 | 1 | gridItem->progressive = AVIF_TRUE; |
1734 | 1 | } |
1735 | 53 | return AVIF_RESULT_OK; |
1736 | 63 | } |
1737 | | |
1738 | | // Allocates the dstImage. Also verifies some spec compliance rules for grids, if relevant. |
1739 | | static avifResult avifDecoderDataAllocateImagePlanes(avifDecoderData * data, const avifTileInfo * info, avifImage * dstImage) |
1740 | 21 | { |
1741 | 21 | const avifTile * tile = &data->tiles.tile[info->firstTileIndex]; |
1742 | 21 | uint32_t dstWidth; |
1743 | 21 | uint32_t dstHeight; |
1744 | | |
1745 | 21 | if (info->grid.rows > 0 && info->grid.columns > 0) { |
1746 | 21 | const avifImageGrid * grid = &info->grid; |
1747 | | // Validate grid image size and tile size. |
1748 | | // |
1749 | | // HEIF (ISO/IEC 23008-12:2017), Section 6.6.2.3.1: |
1750 | | // The tiled input images shall completely "cover" the reconstructed image grid canvas, ... |
1751 | 21 | if (((tile->image->width * grid->columns) < grid->outputWidth) || ((tile->image->height * grid->rows) < grid->outputHeight)) { |
1752 | 2 | avifDiagnosticsPrintf(data->diag, |
1753 | 2 | "Grid image tiles do not completely cover the image (HEIF (ISO/IEC 23008-12:2017), Section 6.6.2.3.1)"); |
1754 | 2 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
1755 | 2 | } |
1756 | | // Tiles in the rightmost column and bottommost row must overlap the reconstructed image grid canvas. See MIAF (ISO/IEC 23000-22:2019), Section 7.3.11.4.2, Figure 2. |
1757 | 19 | if (((tile->image->width * (grid->columns - 1)) >= grid->outputWidth) || |
1758 | 19 | ((tile->image->height * (grid->rows - 1)) >= grid->outputHeight)) { |
1759 | 2 | avifDiagnosticsPrintf(data->diag, |
1760 | 2 | "Grid image tiles in the rightmost column and bottommost row do not overlap the reconstructed image grid canvas. See MIAF (ISO/IEC 23000-22:2019), Section 7.3.11.4.2, Figure 2"); |
1761 | 2 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
1762 | 2 | } |
1763 | 17 | if (!avifAreGridDimensionsValid(tile->image->yuvFormat, |
1764 | 17 | grid->outputWidth, |
1765 | 17 | grid->outputHeight, |
1766 | 17 | tile->image->width, |
1767 | 17 | tile->image->height, |
1768 | 17 | data->diag)) { |
1769 | 2 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
1770 | 2 | } |
1771 | 15 | dstWidth = grid->outputWidth; |
1772 | 15 | dstHeight = grid->outputHeight; |
1773 | 15 | } else { |
1774 | | // Only one tile. Width and height are inherited from the 'ispe' property of the corresponding avifDecoderItem. |
1775 | 0 | dstWidth = tile->width; |
1776 | 0 | dstHeight = tile->height; |
1777 | 0 | } |
1778 | | |
1779 | 15 | const avifBool alpha = avifIsAlpha(tile->input->itemCategory); |
1780 | 15 | if (alpha) { |
1781 | | // An alpha tile does not contain any YUV pixels. |
1782 | 0 | AVIF_ASSERT_OR_RETURN(tile->image->yuvFormat == AVIF_PIXEL_FORMAT_NONE); |
1783 | 0 | } |
1784 | | |
1785 | 15 | const uint32_t dstDepth = tile->image->depth; |
1786 | | |
1787 | | // Lazily populate dstImage with the new frame's properties. |
1788 | 15 | const avifBool dimsOrDepthIsDifferent = (dstImage->width != dstWidth) || (dstImage->height != dstHeight) || |
1789 | 15 | (dstImage->depth != dstDepth); |
1790 | 15 | const avifBool yuvFormatIsDifferent = !alpha && (dstImage->yuvFormat != tile->image->yuvFormat); |
1791 | 15 | if (dimsOrDepthIsDifferent || yuvFormatIsDifferent) { |
1792 | 14 | if (alpha) { |
1793 | | // Alpha doesn't match size, just bail out |
1794 | 0 | avifDiagnosticsPrintf(data->diag, "Alpha plane dimensions do not match color plane dimensions"); |
1795 | 0 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
1796 | 0 | } |
1797 | | |
1798 | 14 | if (dimsOrDepthIsDifferent) { |
1799 | 13 | avifImageFreePlanes(dstImage, AVIF_PLANES_ALL); |
1800 | 13 | dstImage->width = dstWidth; |
1801 | 13 | dstImage->height = dstHeight; |
1802 | 13 | dstImage->depth = dstDepth; |
1803 | 13 | } |
1804 | 14 | if (yuvFormatIsDifferent) { |
1805 | 12 | avifImageFreePlanes(dstImage, AVIF_PLANES_YUV); |
1806 | 12 | dstImage->yuvFormat = tile->image->yuvFormat; |
1807 | 12 | } |
1808 | | // Keep dstImage->yuvRange which is already set to its correct value |
1809 | | // (extracted from the 'colr' box if parsed or from a Sequence Header OBU otherwise). |
1810 | | |
1811 | 14 | if (!data->cicpSet) { |
1812 | 0 | data->cicpSet = AVIF_TRUE; |
1813 | 0 | dstImage->colorPrimaries = tile->image->colorPrimaries; |
1814 | 0 | dstImage->transferCharacteristics = tile->image->transferCharacteristics; |
1815 | 0 | dstImage->matrixCoefficients = tile->image->matrixCoefficients; |
1816 | 0 | } |
1817 | 14 | } |
1818 | | |
1819 | 15 | if (avifImageAllocatePlanes(dstImage, alpha ? AVIF_PLANES_A : AVIF_PLANES_YUV) != AVIF_RESULT_OK) { |
1820 | 0 | avifDiagnosticsPrintf(data->diag, "Image allocation failure"); |
1821 | 0 | return AVIF_RESULT_OUT_OF_MEMORY; |
1822 | 0 | } |
1823 | 15 | return AVIF_RESULT_OK; |
1824 | 15 | } |
1825 | | |
1826 | | // Copies over the pixels from the tile into dstImage. |
1827 | | // Verifies that the relevant properties of the tile match those of the first tile in case of a grid. |
1828 | | static avifResult avifDecoderDataCopyTileToImage(avifDecoderData * data, |
1829 | | const avifTileInfo * info, |
1830 | | avifImage * dstImage, |
1831 | | const avifTile * tile, |
1832 | | unsigned int tileIndex) |
1833 | 23 | { |
1834 | 23 | const avifTile * firstTile = &data->tiles.tile[info->firstTileIndex]; |
1835 | 23 | if (tile != firstTile) { |
1836 | | // Check for tile consistency. All tiles in a grid image should match the first tile in the properties checked below. |
1837 | 8 | if ((tile->image->width != firstTile->image->width) || (tile->image->height != firstTile->image->height) || |
1838 | 8 | (tile->image->depth != firstTile->image->depth) || (tile->image->yuvFormat != firstTile->image->yuvFormat) || |
1839 | 8 | (tile->image->yuvRange != firstTile->image->yuvRange) || (tile->image->colorPrimaries != firstTile->image->colorPrimaries) || |
1840 | 8 | (tile->image->transferCharacteristics != firstTile->image->transferCharacteristics) || |
1841 | 8 | (tile->image->matrixCoefficients != firstTile->image->matrixCoefficients)) { |
1842 | 5 | avifDiagnosticsPrintf(data->diag, "Grid image contains mismatched tiles"); |
1843 | 5 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
1844 | 5 | } |
1845 | 8 | } |
1846 | | |
1847 | 18 | avifImage srcView; |
1848 | 18 | avifImageSetDefaults(&srcView); |
1849 | 18 | avifImage dstView; |
1850 | 18 | avifImageSetDefaults(&dstView); |
1851 | 18 | avifCropRect dstViewRect = { 0, 0, firstTile->image->width, firstTile->image->height }; |
1852 | 18 | if (info->grid.rows > 0 && info->grid.columns > 0) { |
1853 | 18 | unsigned int rowIndex = tileIndex / info->grid.columns; |
1854 | 18 | unsigned int colIndex = tileIndex % info->grid.columns; |
1855 | 18 | dstViewRect.x = firstTile->image->width * colIndex; |
1856 | 18 | dstViewRect.y = firstTile->image->height * rowIndex; |
1857 | 18 | if (dstViewRect.x + dstViewRect.width > info->grid.outputWidth) { |
1858 | 18 | dstViewRect.width = info->grid.outputWidth - dstViewRect.x; |
1859 | 18 | } |
1860 | 18 | if (dstViewRect.y + dstViewRect.height > info->grid.outputHeight) { |
1861 | 3 | dstViewRect.height = info->grid.outputHeight - dstViewRect.y; |
1862 | 3 | } |
1863 | 18 | } |
1864 | 18 | const avifCropRect srcViewRect = { 0, 0, dstViewRect.width, dstViewRect.height }; |
1865 | 18 | AVIF_ASSERT_OR_RETURN(avifImageSetViewRect(&dstView, dstImage, &dstViewRect) == AVIF_RESULT_OK && |
1866 | 18 | avifImageSetViewRect(&srcView, tile->image, &srcViewRect) == AVIF_RESULT_OK); |
1867 | 18 | avifImageCopySamples(&dstView, &srcView, avifIsAlpha(tile->input->itemCategory) ? AVIF_PLANES_A : AVIF_PLANES_YUV); |
1868 | 18 | return AVIF_RESULT_OK; |
1869 | 18 | } |
1870 | | |
1871 | | // If colorId == 0 (a sentinel value as item IDs must be nonzero), accept any found EXIF/XMP metadata. Passing in 0 |
1872 | | // is used when finding metadata in a meta box embedded in a trak box, as any items inside of a meta box that is |
1873 | | // inside of a trak box are implicitly associated to the track. |
1874 | | static avifResult avifDecoderFindMetadata(avifDecoder * decoder, avifMeta * meta, avifImage * image, uint32_t colorId) |
1875 | 14.6k | { |
1876 | 14.6k | if (decoder->ignoreExif && decoder->ignoreXMP) { |
1877 | | // Nothing to do! |
1878 | 0 | return AVIF_RESULT_OK; |
1879 | 0 | } |
1880 | | |
1881 | 32.1k | for (uint32_t itemIndex = 0; itemIndex < meta->items.count; ++itemIndex) { |
1882 | 17.4k | avifDecoderItem * item = meta->items.item[itemIndex]; |
1883 | 17.4k | if (!item->size) { |
1884 | 427 | continue; |
1885 | 427 | } |
1886 | 17.0k | if (item->hasUnsupportedEssentialProperty) { |
1887 | | // An essential property isn't supported by libavif; ignore the item. |
1888 | 208 | continue; |
1889 | 208 | } |
1890 | | |
1891 | 16.8k | if ((colorId > 0) && (item->descForID != colorId)) { |
1892 | | // Not a content description (metadata) for the colorOBU, skip it |
1893 | 15.4k | continue; |
1894 | 15.4k | } |
1895 | | |
1896 | 1.34k | if (!decoder->ignoreExif && !memcmp(item->type, "Exif", 4)) { |
1897 | 599 | avifROData exifContents; |
1898 | 599 | avifResult readResult = avifDecoderItemRead(item, decoder->io, &exifContents, 0, 0, &decoder->diag); |
1899 | 599 | if (readResult != AVIF_RESULT_OK) { |
1900 | 2 | return readResult; |
1901 | 2 | } |
1902 | | |
1903 | | // Advance past Annex A.2.1's header |
1904 | 597 | BEGIN_STREAM(exifBoxStream, exifContents.data, exifContents.size, &decoder->diag, "Exif header"); |
1905 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
1906 | | // The MinimizedImageBox does not signal the exifTiffHeaderOffset. |
1907 | | if (!meta->fromMiniBox) |
1908 | | #endif |
1909 | 597 | { |
1910 | 597 | uint32_t exifTiffHeaderOffset; |
1911 | 597 | AVIF_CHECKERR(avifROStreamReadU32(&exifBoxStream, &exifTiffHeaderOffset), |
1912 | 597 | AVIF_RESULT_INVALID_EXIF_PAYLOAD); // unsigned int(32) exif_tiff_header_offset; |
1913 | 596 | size_t expectedExifTiffHeaderOffset; |
1914 | 596 | AVIF_CHECKRES(avifGetExifTiffHeaderOffset(avifROStreamCurrent(&exifBoxStream), |
1915 | 596 | avifROStreamRemainingBytes(&exifBoxStream), |
1916 | 596 | &expectedExifTiffHeaderOffset)); |
1917 | 593 | AVIF_CHECKERR(exifTiffHeaderOffset == expectedExifTiffHeaderOffset, AVIF_RESULT_INVALID_EXIF_PAYLOAD); |
1918 | 593 | } |
1919 | | |
1920 | 585 | AVIF_CHECKRES(avifRWDataSet(&image->exif, avifROStreamCurrent(&exifBoxStream), avifROStreamRemainingBytes(&exifBoxStream))); |
1921 | 742 | } else if (!decoder->ignoreXMP && !memcmp(item->type, "mime", 4) && |
1922 | 742 | !strcmp(item->contentType.contentType, AVIF_CONTENT_TYPE_XMP)) { |
1923 | 160 | avifROData xmpContents; |
1924 | 160 | avifResult readResult = avifDecoderItemRead(item, decoder->io, &xmpContents, 0, 0, &decoder->diag); |
1925 | 160 | if (readResult != AVIF_RESULT_OK) { |
1926 | 2 | return readResult; |
1927 | 2 | } |
1928 | | |
1929 | 158 | AVIF_CHECKRES(avifImageSetMetadataXMP(image, xmpContents.data, xmpContents.size)); |
1930 | 158 | } |
1931 | 1.34k | } |
1932 | 14.6k | return AVIF_RESULT_OK; |
1933 | 14.6k | } |
1934 | | |
1935 | | // --------------------------------------------------------------------------- |
1936 | | // URN |
1937 | | |
1938 | | static avifBool isAlphaURN(const char * urn) |
1939 | 332 | { |
1940 | 332 | return !strcmp(urn, AVIF_URN_ALPHA0) || !strcmp(urn, AVIF_URN_ALPHA1); |
1941 | 332 | } |
1942 | | |
1943 | | // --------------------------------------------------------------------------- |
1944 | | // BMFF Parsing |
1945 | | |
1946 | | static avifBool avifParseHandlerBox(const uint8_t * raw, size_t rawLen, uint8_t handlerType[4], avifDiagnostics * diag) |
1947 | 16.2k | { |
1948 | 16.2k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[hdlr]"); |
1949 | | |
1950 | 16.2k | AVIF_CHECK(avifROStreamReadAndEnforceVersion(&s, /*enforcedVersion=*/0, /*flags=*/NULL)); |
1951 | | |
1952 | 16.2k | uint32_t predefined; |
1953 | 16.2k | AVIF_CHECK(avifROStreamReadU32(&s, &predefined)); // unsigned int(32) pre_defined = 0; |
1954 | 16.2k | if (predefined != 0) { |
1955 | 49 | avifDiagnosticsPrintf(diag, "Box[hdlr] contains a pre_defined value that is nonzero"); |
1956 | 49 | return AVIF_FALSE; |
1957 | 49 | } |
1958 | | |
1959 | 16.2k | AVIF_CHECK(avifROStreamRead(&s, handlerType, 4)); // unsigned int(32) handler_type; |
1960 | | |
1961 | 64.7k | for (int i = 0; i < 3; ++i) { |
1962 | 48.5k | uint32_t reserved; |
1963 | 48.5k | AVIF_CHECK(avifROStreamReadU32(&s, &reserved)); // const unsigned int(32)[3] reserved = 0; |
1964 | 48.5k | } |
1965 | | |
1966 | | // Verify that a valid string is here, but don't bother to store it |
1967 | 16.1k | AVIF_CHECK(avifROStreamReadString(&s, NULL, 0)); // string name; |
1968 | 16.1k | return AVIF_TRUE; |
1969 | 16.1k | } |
1970 | | |
1971 | | static avifResult avifParseItemLocationBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
1972 | 15.4k | { |
1973 | 15.4k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[iloc]"); |
1974 | | |
1975 | | // Section 8.11.3.2 of ISO/IEC 14496-12. |
1976 | 15.4k | uint8_t version; |
1977 | 15.4k | AVIF_CHECKERR(avifROStreamReadVersionAndFlags(&s, &version, NULL), AVIF_RESULT_BMFF_PARSE_FAILED); |
1978 | 15.4k | if (version > 2) { |
1979 | 4 | avifDiagnosticsPrintf(diag, "Box[iloc] has an unsupported version [%u]", version); |
1980 | 4 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1981 | 4 | } |
1982 | | |
1983 | 15.4k | uint8_t offsetSize, lengthSize, baseOffsetSize, indexSize = 0; |
1984 | 15.4k | uint32_t reserved; |
1985 | 15.4k | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &offsetSize, /*bitCount=*/4), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(4) offset_size; |
1986 | 15.4k | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &lengthSize, /*bitCount=*/4), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(4) length_size; |
1987 | 15.4k | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &baseOffsetSize, /*bitCount=*/4), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(4) base_offset_size; |
1988 | 15.4k | if (version == 1 || version == 2) { |
1989 | 161 | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &indexSize, /*bitCount=*/4), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(4) index_size; |
1990 | 15.2k | } else { |
1991 | 15.2k | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &reserved, /*bitCount=*/4), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(4) reserved; |
1992 | 15.2k | } |
1993 | | |
1994 | | // Section 8.11.3.3 of ISO/IEC 14496-12. |
1995 | 15.4k | if ((offsetSize != 0 && offsetSize != 4 && offsetSize != 8) || (lengthSize != 0 && lengthSize != 4 && lengthSize != 8) || |
1996 | 15.4k | (baseOffsetSize != 0 && baseOffsetSize != 4 && baseOffsetSize != 8) || (indexSize != 0 && indexSize != 4 && indexSize != 8)) { |
1997 | 17 | avifDiagnosticsPrintf(diag, "Box[iloc] has an invalid size"); |
1998 | 17 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
1999 | 17 | } |
2000 | | |
2001 | 15.4k | uint16_t tmp16; |
2002 | 15.4k | uint32_t itemCount; |
2003 | 15.4k | if (version < 2) { |
2004 | 15.3k | AVIF_CHECKERR(avifROStreamReadU16(&s, &tmp16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) item_count; |
2005 | 15.3k | itemCount = tmp16; |
2006 | 15.3k | } else { |
2007 | 77 | AVIF_CHECKERR(avifROStreamReadU32(&s, &itemCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) item_count; |
2008 | 77 | } |
2009 | 33.9k | for (uint32_t i = 0; i < itemCount; ++i) { |
2010 | 18.8k | uint32_t itemID; |
2011 | 18.8k | if (version < 2) { |
2012 | 18.7k | AVIF_CHECKERR(avifROStreamReadU16(&s, &tmp16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) item_ID; |
2013 | 18.6k | itemID = tmp16; |
2014 | 18.6k | } else { |
2015 | 113 | AVIF_CHECKERR(avifROStreamReadU32(&s, &itemID), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) item_ID; |
2016 | 113 | } |
2017 | 18.7k | AVIF_CHECKRES(avifCheckItemID("iloc", itemID, diag)); |
2018 | | |
2019 | 18.7k | avifDecoderItem * item; |
2020 | 18.7k | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, itemID, &item)); |
2021 | 18.7k | if (item->extents.count > 0) { |
2022 | | // This item has already been given extents via this iloc box. This is invalid. |
2023 | 15 | avifDiagnosticsPrintf(diag, "Item ID [%u] contains duplicate sets of extents", itemID); |
2024 | 15 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2025 | 15 | } |
2026 | | |
2027 | 18.7k | if (version == 1 || version == 2) { |
2028 | 332 | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &reserved, /*bitCount=*/12), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(12) reserved = 0; |
2029 | 294 | if (reserved) { |
2030 | 24 | avifDiagnosticsPrintf(diag, "Box[iloc] has a non null reserved field [%u]", reserved); |
2031 | 24 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2032 | 24 | } |
2033 | 270 | uint8_t constructionMethod; |
2034 | 270 | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &constructionMethod, /*bitCount=*/4), |
2035 | 270 | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(4) construction_method; |
2036 | 270 | if (constructionMethod != 0 /* file offset */ && constructionMethod != 1 /* idat offset */) { |
2037 | | // construction method 2 (item offset) unsupported |
2038 | 4 | avifDiagnosticsPrintf(diag, "Box[iloc] has an unsupported construction method [%u]", constructionMethod); |
2039 | 4 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2040 | 4 | } |
2041 | 266 | if (constructionMethod == 1) { |
2042 | 79 | item->idatStored = AVIF_TRUE; |
2043 | 79 | } |
2044 | 266 | } |
2045 | | |
2046 | 18.6k | uint16_t dataReferenceIndex; |
2047 | 18.6k | AVIF_CHECKERR(avifROStreamReadU16(&s, &dataReferenceIndex), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) data_reference_index; |
2048 | 18.6k | uint64_t baseOffset; |
2049 | 18.6k | AVIF_CHECKERR(avifROStreamReadUX8(&s, &baseOffset, baseOffsetSize), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(base_offset_size*8) base_offset; |
2050 | 18.6k | uint16_t extentCount; |
2051 | 18.6k | AVIF_CHECKERR(avifROStreamReadU16(&s, &extentCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) extent_count; |
2052 | 8.44M | for (int extentIter = 0; extentIter < extentCount; ++extentIter) { |
2053 | 8.42M | if ((version == 1 || version == 2) && indexSize > 0) { |
2054 | | // Section 8.11.3.1 of ISO/IEC 14496-12: |
2055 | | // The item_reference_index is only used for the method item_offset; it indicates the 1-based index |
2056 | | // of the item reference with referenceType 'iloc' linked from this item. If index_size is 0, then |
2057 | | // the value 1 is implied; the value 0 is reserved. |
2058 | 136 | uint64_t itemReferenceIndex; // Ignored unless construction_method=2 which is unsupported, but still read it. |
2059 | 136 | AVIF_CHECKERR(avifROStreamReadUX8(&s, &itemReferenceIndex, indexSize), |
2060 | 136 | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(index_size*8) item_reference_index; |
2061 | 136 | } |
2062 | | |
2063 | 8.42M | uint64_t extentOffset; |
2064 | 8.42M | AVIF_CHECKERR(avifROStreamReadUX8(&s, &extentOffset, offsetSize), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(offset_size*8) extent_offset; |
2065 | 8.42M | uint64_t extentLength; |
2066 | 8.42M | AVIF_CHECKERR(avifROStreamReadUX8(&s, &extentLength, lengthSize), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(length_size*8) extent_length; |
2067 | | |
2068 | 8.42M | avifExtent * extent = (avifExtent *)avifArrayPush(&item->extents); |
2069 | 8.42M | AVIF_CHECKERR(extent != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
2070 | 8.42M | if (extentOffset > UINT64_MAX - baseOffset) { |
2071 | 3 | avifDiagnosticsPrintf(diag, |
2072 | 3 | "Item ID [%u] contains an extent offset which overflows: [base: %" PRIu64 " offset:%" PRIu64 "]", |
2073 | 3 | itemID, |
2074 | 3 | baseOffset, |
2075 | 3 | extentOffset); |
2076 | 3 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2077 | 3 | } |
2078 | 8.42M | uint64_t offset = baseOffset + extentOffset; |
2079 | 8.42M | extent->offset = offset; |
2080 | | #if UINT64_MAX > SIZE_MAX |
2081 | | if (extentLength > SIZE_MAX) { |
2082 | | avifDiagnosticsPrintf(diag, "Item ID [%u] contains an extent length which overflows: [%" PRIu64 "]", itemID, extentLength); |
2083 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2084 | | } |
2085 | | #endif |
2086 | 8.42M | extent->size = (size_t)extentLength; |
2087 | 8.42M | if (extent->size > SIZE_MAX - item->size) { |
2088 | 2 | avifDiagnosticsPrintf(diag, |
2089 | 2 | "Item ID [%u] contains an extent length which overflows the item size: [%zu, %zu]", |
2090 | 2 | itemID, |
2091 | 2 | extent->size, |
2092 | 2 | item->size); |
2093 | 2 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2094 | 2 | } |
2095 | 8.42M | item->size += extent->size; |
2096 | 8.42M | } |
2097 | 18.6k | } |
2098 | 15.1k | return AVIF_RESULT_OK; |
2099 | 15.4k | } |
2100 | | |
2101 | | static avifBool avifParseImageGridBox(avifImageGrid * grid, |
2102 | | const uint8_t * raw, |
2103 | | size_t rawLen, |
2104 | | uint32_t imageSizeLimit, |
2105 | | uint32_t imageDimensionLimit, |
2106 | | avifDiagnostics * diag) |
2107 | 139 | { |
2108 | 139 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[grid]"); |
2109 | | |
2110 | 139 | uint8_t version, flags; |
2111 | 139 | AVIF_CHECK(avifROStreamRead(&s, &version, 1)); // unsigned int(8) version = 0; |
2112 | 139 | if (version != 0) { |
2113 | 1 | avifDiagnosticsPrintf(diag, "Box[grid] has unsupported version [%u]", version); |
2114 | 1 | return AVIF_FALSE; |
2115 | 1 | } |
2116 | 138 | uint8_t rowsMinusOne, columnsMinusOne; |
2117 | 138 | AVIF_CHECK(avifROStreamRead(&s, &flags, 1)); // unsigned int(8) flags; |
2118 | 137 | AVIF_CHECK(avifROStreamRead(&s, &rowsMinusOne, 1)); // unsigned int(8) rows_minus_one; |
2119 | 136 | AVIF_CHECK(avifROStreamRead(&s, &columnsMinusOne, 1)); // unsigned int(8) columns_minus_one; |
2120 | 135 | grid->rows = (uint32_t)rowsMinusOne + 1; |
2121 | 135 | grid->columns = (uint32_t)columnsMinusOne + 1; |
2122 | | |
2123 | 135 | uint32_t fieldLength = ((flags & 1) + 1) * 16; |
2124 | 135 | if (fieldLength == 16) { |
2125 | 122 | uint16_t outputWidth16, outputHeight16; |
2126 | 122 | AVIF_CHECK(avifROStreamReadU16(&s, &outputWidth16)); // unsigned int(FieldLength) output_width; |
2127 | 121 | AVIF_CHECK(avifROStreamReadU16(&s, &outputHeight16)); // unsigned int(FieldLength) output_height; |
2128 | 120 | grid->outputWidth = outputWidth16; |
2129 | 120 | grid->outputHeight = outputHeight16; |
2130 | 120 | } else { |
2131 | 13 | if (fieldLength != 32) { |
2132 | | // This should be impossible |
2133 | 0 | avifDiagnosticsPrintf(diag, "Grid box contains illegal field length: [%u]", fieldLength); |
2134 | 0 | return AVIF_FALSE; |
2135 | 0 | } |
2136 | 13 | AVIF_CHECK(avifROStreamReadU32(&s, &grid->outputWidth)); // unsigned int(FieldLength) output_width; |
2137 | 12 | AVIF_CHECK(avifROStreamReadU32(&s, &grid->outputHeight)); // unsigned int(FieldLength) output_height; |
2138 | 12 | } |
2139 | 131 | if ((grid->outputWidth == 0) || (grid->outputHeight == 0)) { |
2140 | 2 | avifDiagnosticsPrintf(diag, "Grid box contains illegal dimensions: [%u x %u]", grid->outputWidth, grid->outputHeight); |
2141 | 2 | return AVIF_FALSE; |
2142 | 2 | } |
2143 | 129 | if (avifDimensionsTooLarge(grid->outputWidth, grid->outputHeight, imageSizeLimit, imageDimensionLimit)) { |
2144 | 13 | avifDiagnosticsPrintf(diag, "Grid box dimensions are too large: [%u x %u]", grid->outputWidth, grid->outputHeight); |
2145 | 13 | return AVIF_FALSE; |
2146 | 13 | } |
2147 | 116 | return avifROStreamRemainingBytes(&s) == 0; |
2148 | 129 | } |
2149 | | |
2150 | | static avifBool avifParseGainMapMetadata(avifGainMap * gainMap, avifROStream * s) |
2151 | 0 | { |
2152 | 0 | uint32_t isMultichannel; |
2153 | 0 | AVIF_CHECK(avifROStreamReadBitsU32(s, &isMultichannel, 1)); // unsigned int(1) is_multichannel; |
2154 | 0 | const uint8_t channelCount = isMultichannel ? 3 : 1; |
2155 | |
|
2156 | 0 | uint32_t useBaseColorSpace; |
2157 | 0 | AVIF_CHECK(avifROStreamReadBitsU32(s, &useBaseColorSpace, 1)); // unsigned int(1) use_base_colour_space; |
2158 | 0 | gainMap->useBaseColorSpace = useBaseColorSpace ? AVIF_TRUE : AVIF_FALSE; |
2159 | |
|
2160 | 0 | uint32_t reserved; |
2161 | 0 | AVIF_CHECK(avifROStreamReadBitsU32(s, &reserved, 6)); // unsigned int(6) reserved; |
2162 | | |
2163 | 0 | AVIF_CHECK(avifROStreamReadU32(s, &gainMap->baseHdrHeadroom.n)); // unsigned int(32) base_hdr_headroom_numerator; |
2164 | 0 | AVIF_CHECK(avifROStreamReadU32(s, &gainMap->baseHdrHeadroom.d)); // unsigned int(32) base_hdr_headroom_denominator; |
2165 | 0 | AVIF_CHECK(avifROStreamReadU32(s, &gainMap->alternateHdrHeadroom.n)); // unsigned int(32) alternate_hdr_headroom_numerator; |
2166 | 0 | AVIF_CHECK(avifROStreamReadU32(s, &gainMap->alternateHdrHeadroom.d)); // unsigned int(32) alternate_hdr_headroom_denominator; |
2167 | | |
2168 | 0 | for (int c = 0; c < channelCount; ++c) { |
2169 | 0 | AVIF_CHECK(avifROStreamReadU32(s, (uint32_t *)&gainMap->gainMapMin[c].n)); // int(32) gain_map_min_numerator; |
2170 | 0 | AVIF_CHECK(avifROStreamReadU32(s, &gainMap->gainMapMin[c].d)); // unsigned int(32) gain_map_min_denominator; |
2171 | 0 | AVIF_CHECK(avifROStreamReadU32(s, (uint32_t *)&gainMap->gainMapMax[c].n)); // int(32) gain_map_max_numerator; |
2172 | 0 | AVIF_CHECK(avifROStreamReadU32(s, &gainMap->gainMapMax[c].d)); // unsigned int(32) gain_map_max_denominator; |
2173 | 0 | AVIF_CHECK(avifROStreamReadU32(s, &gainMap->gainMapGamma[c].n)); // unsigned int(32) gamma_numerator; |
2174 | 0 | AVIF_CHECK(avifROStreamReadU32(s, &gainMap->gainMapGamma[c].d)); // unsigned int(32) gamma_denominator; |
2175 | 0 | AVIF_CHECK(avifROStreamReadU32(s, (uint32_t *)&gainMap->baseOffset[c].n)); // int(32) base_offset_numerator; |
2176 | 0 | AVIF_CHECK(avifROStreamReadU32(s, &gainMap->baseOffset[c].d)); // unsigned int(32) base_offset_denominator; |
2177 | 0 | AVIF_CHECK(avifROStreamReadU32(s, (uint32_t *)&gainMap->alternateOffset[c].n)); // int(32) alternate_offset_numerator; |
2178 | 0 | AVIF_CHECK(avifROStreamReadU32(s, &gainMap->alternateOffset[c].d)); // unsigned int(32) alternate_offset_denominator; |
2179 | 0 | } |
2180 | | |
2181 | | // Fill the remaining values by copying those from the first channel. |
2182 | 0 | for (int c = channelCount; c < 3; ++c) { |
2183 | 0 | gainMap->gainMapMin[c] = gainMap->gainMapMin[0]; |
2184 | 0 | gainMap->gainMapMax[c] = gainMap->gainMapMax[0]; |
2185 | 0 | gainMap->gainMapGamma[c] = gainMap->gainMapGamma[0]; |
2186 | 0 | gainMap->baseOffset[c] = gainMap->baseOffset[0]; |
2187 | 0 | gainMap->alternateOffset[c] = gainMap->alternateOffset[0]; |
2188 | 0 | } |
2189 | 0 | return AVIF_TRUE; |
2190 | 0 | } |
2191 | | |
2192 | | // If the gain map's version or minimum_version tag is not supported, returns AVIF_RESULT_NOT_IMPLEMENTED. |
2193 | | static avifResult avifParseToneMappedImageBox(avifGainMap * gainMap, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2194 | 0 | { |
2195 | 0 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[tmap]"); |
2196 | |
|
2197 | 0 | uint8_t version; |
2198 | 0 | AVIF_CHECKERR(avifROStreamRead(&s, &version, 1), AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE); // unsigned int(8) version = 0; |
2199 | 0 | if (version != 0) { |
2200 | 0 | avifDiagnosticsPrintf(diag, "Box[tmap] has unsupported version [%u]", version); |
2201 | 0 | return AVIF_RESULT_NOT_IMPLEMENTED; |
2202 | 0 | } |
2203 | | |
2204 | 0 | uint16_t minimumVersion; |
2205 | 0 | AVIF_CHECKERR(avifROStreamReadU16(&s, &minimumVersion), AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE); // unsigned int(16) minimum_version; |
2206 | 0 | const uint16_t supportedMetadataVersion = 0; |
2207 | 0 | if (minimumVersion > supportedMetadataVersion) { |
2208 | 0 | avifDiagnosticsPrintf(diag, "Box[tmap] has unsupported minimum version [%u]", minimumVersion); |
2209 | 0 | return AVIF_RESULT_NOT_IMPLEMENTED; |
2210 | 0 | } |
2211 | 0 | uint16_t writerVersion; |
2212 | 0 | AVIF_CHECKERR(avifROStreamReadU16(&s, &writerVersion), AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE); // unsigned int(16) writer_version; |
2213 | 0 | AVIF_CHECKERR(writerVersion >= minimumVersion, AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE); |
2214 | | |
2215 | 0 | AVIF_CHECKERR(avifParseGainMapMetadata(gainMap, &s), AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE); |
2216 | | |
2217 | 0 | if (writerVersion <= supportedMetadataVersion) { |
2218 | 0 | AVIF_CHECKERR(avifROStreamRemainingBytes(&s) == 0, AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE); |
2219 | 0 | } |
2220 | | |
2221 | 0 | if (avifGainMapValidateMetadata(gainMap, diag) != AVIF_RESULT_OK) { |
2222 | 0 | return AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE; |
2223 | 0 | } |
2224 | | |
2225 | 0 | return AVIF_RESULT_OK; |
2226 | 0 | } |
2227 | | |
2228 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
2229 | | // bit_depth is assumed to be 2 (32-bit). |
2230 | | static avifResult avifParseSampleTransformTokens(avifROStream * s, avifSampleTransformExpression * expression) |
2231 | | { |
2232 | | uint8_t tokenCount; |
2233 | | AVIF_CHECKERR(avifROStreamRead(s, &tokenCount, /*size=*/1), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(8) token_count; |
2234 | | AVIF_CHECKERR(tokenCount != 0, AVIF_RESULT_BMFF_PARSE_FAILED); |
2235 | | AVIF_CHECKERR(avifArrayCreate(expression, sizeof(expression->tokens[0]), tokenCount), AVIF_RESULT_OUT_OF_MEMORY); |
2236 | | |
2237 | | for (uint32_t t = 0; t < tokenCount; ++t) { |
2238 | | avifSampleTransformToken * token = (avifSampleTransformToken *)avifArrayPush(expression); |
2239 | | AVIF_CHECKERR(token != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
2240 | | |
2241 | | uint8_t tokenValue; |
2242 | | AVIF_CHECKERR(avifROStreamRead(s, &tokenValue, /*size=*/1), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(8) token; |
2243 | | if (tokenValue == AVIF_SAMPLE_TRANSFORM_CONSTANT) { |
2244 | | token->type = AVIF_SAMPLE_TRANSFORM_CONSTANT; |
2245 | | // Two's complement representation is assumed here. |
2246 | | uint32_t constant; |
2247 | | AVIF_CHECKERR(avifROStreamReadU32(s, &constant), AVIF_RESULT_BMFF_PARSE_FAILED); // signed int(1<<(bit_depth+3)) constant; |
2248 | | token->constant = (int32_t)constant; |
2249 | | } else if (tokenValue <= AVIF_SAMPLE_TRANSFORM_LAST_INPUT_IMAGE_ITEM_INDEX) { |
2250 | | AVIF_ASSERT_OR_RETURN(tokenValue >= AVIF_SAMPLE_TRANSFORM_FIRST_INPUT_IMAGE_ITEM_INDEX); |
2251 | | token->type = AVIF_SAMPLE_TRANSFORM_INPUT_IMAGE_ITEM_INDEX; |
2252 | | token->inputImageItemIndex = tokenValue; |
2253 | | } else if (tokenValue >= AVIF_SAMPLE_TRANSFORM_FIRST_UNARY_OPERATOR && tokenValue <= AVIF_SAMPLE_TRANSFORM_LAST_UNARY_OPERATOR) { |
2254 | | token->type = (avifSampleTransformTokenType)tokenValue; // unary operator |
2255 | | } else if (tokenValue >= AVIF_SAMPLE_TRANSFORM_FIRST_BINARY_OPERATOR && tokenValue <= AVIF_SAMPLE_TRANSFORM_LAST_BINARY_OPERATOR) { |
2256 | | token->type = (avifSampleTransformTokenType)tokenValue; // binary operator |
2257 | | } else { |
2258 | | token->type = AVIF_SAMPLE_TRANSFORM_RESERVED; |
2259 | | } |
2260 | | } |
2261 | | AVIF_CHECKERR(avifROStreamRemainingBytes(s) == 0, AVIF_RESULT_BMFF_PARSE_FAILED); |
2262 | | return AVIF_RESULT_OK; |
2263 | | } |
2264 | | |
2265 | | // Parses the raw bitstream of the 'sato' Sample Transform derived image item and extracts the expression. |
2266 | | static avifResult avifParseSampleTransformImageBox(const uint8_t * raw, |
2267 | | size_t rawLen, |
2268 | | uint32_t numInputImageItems, |
2269 | | avifSampleTransformExpression * expression, |
2270 | | avifDiagnostics * diag) |
2271 | | { |
2272 | | BEGIN_STREAM(s, raw, rawLen, diag, "Box[sato]"); |
2273 | | |
2274 | | uint8_t version, reserved, bitDepth; |
2275 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &version, /*bitCount=*/2), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(2) version = 0; |
2276 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &reserved, /*bitCount=*/4), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(4) reserved; |
2277 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &bitDepth, /*bitCount=*/2), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(2) bit_depth; |
2278 | | AVIF_CHECKERR(version == 0, AVIF_RESULT_NOT_IMPLEMENTED); |
2279 | | AVIF_CHECKERR(bitDepth == AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_32, AVIF_RESULT_NOT_IMPLEMENTED); |
2280 | | |
2281 | | const avifResult result = avifParseSampleTransformTokens(&s, expression); |
2282 | | if (result != AVIF_RESULT_OK) { |
2283 | | avifArrayDestroy(expression); |
2284 | | return result; |
2285 | | } |
2286 | | if (!avifSampleTransformExpressionIsValid(expression, numInputImageItems)) { |
2287 | | avifArrayDestroy(expression); |
2288 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2289 | | } |
2290 | | return AVIF_RESULT_OK; |
2291 | | } |
2292 | | |
2293 | | static avifResult avifDecoderSampleTransformItemValidateProperties(const avifDecoderItem * item, avifDiagnostics * diag) |
2294 | | { |
2295 | | const avifProperty * pixiProp = avifPropertyArrayFind(&item->properties, "pixi"); |
2296 | | if (!pixiProp) { |
2297 | | avifDiagnosticsPrintf(diag, "Item ID %u of type '%.4s' is missing mandatory pixi property", item->id, (const char *)item->type); |
2298 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2299 | | } |
2300 | | for (uint8_t i = 0; i < pixiProp->u.pixi.planeCount; ++i) { |
2301 | | if (pixiProp->u.pixi.planeDepths[i] != pixiProp->u.pixi.planeDepths[0]) { |
2302 | | avifDiagnosticsPrintf(diag, |
2303 | | "Item ID %u of type '%.4s' has different depths specified by pixi property [%u, %u], this is not supported", |
2304 | | item->id, |
2305 | | (const char *)item->type, |
2306 | | pixiProp->u.pixi.planeDepths[0], |
2307 | | pixiProp->u.pixi.planeDepths[i]); |
2308 | | return AVIF_RESULT_NOT_IMPLEMENTED; |
2309 | | } |
2310 | | } |
2311 | | |
2312 | | const avifProperty * ispeProp = avifPropertyArrayFind(&item->properties, "ispe"); |
2313 | | if (!ispeProp) { |
2314 | | avifDiagnosticsPrintf(diag, "Item ID %u of type '%.4s' is missing mandatory ispe property", item->id, (const char *)item->type); |
2315 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2316 | | } |
2317 | | |
2318 | | for (uint32_t i = 0; i < item->meta->items.count; ++i) { |
2319 | | avifDecoderItem * inputImageItem = item->meta->items.item[i]; |
2320 | | if (inputImageItem->dimgForID != item->id) { |
2321 | | continue; |
2322 | | } |
2323 | | // Even if inputImageItem is a grid, the ispe property from its first tile should have been copied to the grid item. |
2324 | | const avifProperty * inputImageItemIspeProp = avifPropertyArrayFind(&inputImageItem->properties, "ispe"); |
2325 | | AVIF_ASSERT_OR_RETURN(inputImageItemIspeProp != NULL); |
2326 | | if (inputImageItemIspeProp->u.ispe.width != ispeProp->u.ispe.width || |
2327 | | inputImageItemIspeProp->u.ispe.height != ispeProp->u.ispe.height) { |
2328 | | avifDiagnosticsPrintf(diag, |
2329 | | "The fields of the ispe property of item ID %u of type '%.4s' differs from item ID %u", |
2330 | | inputImageItem->id, |
2331 | | (const char *)inputImageItem->type, |
2332 | | item->id); |
2333 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2334 | | } |
2335 | | // TODO(yguyon): Check that all input image items share the same codec config (except for the bit depth value). |
2336 | | } |
2337 | | |
2338 | | AVIF_CHECKERR(avifPropertyArrayFind(&item->properties, "clap") == NULL, AVIF_RESULT_NOT_IMPLEMENTED); |
2339 | | return AVIF_RESULT_OK; |
2340 | | } |
2341 | | #endif // AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM |
2342 | | |
2343 | | // Extracts the codecType from the item type or from its children. |
2344 | | // Also parses and outputs grid information if the item is a grid. |
2345 | | // isItemInInput must be false if the item is a made-up structure |
2346 | | // (and thus not part of the parseable input bitstream). |
2347 | | static avifResult avifDecoderItemReadAndParse(const avifDecoder * decoder, |
2348 | | avifDecoderItem * item, |
2349 | | avifBool isItemInInput, |
2350 | | avifImageGrid * grid, |
2351 | | avifCodecType * codecType) |
2352 | 14.7k | { |
2353 | 14.7k | if (!memcmp(item->type, "grid", 4)) { |
2354 | 177 | if (isItemInInput) { |
2355 | 173 | avifROData readData; |
2356 | 173 | AVIF_CHECKRES(avifDecoderItemRead(item, decoder->io, &readData, 0, 0, decoder->data->diag)); |
2357 | 139 | AVIF_CHECKERR(avifParseImageGridBox(grid, |
2358 | 139 | readData.data, |
2359 | 139 | readData.size, |
2360 | 139 | decoder->imageSizeLimit, |
2361 | 139 | decoder->imageDimensionLimit, |
2362 | 139 | decoder->data->diag), |
2363 | 139 | AVIF_RESULT_INVALID_IMAGE_GRID); |
2364 | | // Validate that there are exactly the same number of dimg items to form the grid. |
2365 | 114 | uint32_t dimgItemCount = 0; |
2366 | 993 | for (uint32_t i = 0; i < item->meta->items.count; ++i) { |
2367 | 879 | if (item->meta->items.item[i]->dimgForID == item->id) { |
2368 | 272 | ++dimgItemCount; |
2369 | 272 | } |
2370 | 879 | } |
2371 | 114 | AVIF_CHECKERR(dimgItemCount == grid->rows * grid->columns, AVIF_RESULT_INVALID_IMAGE_GRID); |
2372 | 114 | } else { |
2373 | | // item was generated for convenience and is not part of the bitstream. |
2374 | | // grid information should already be set. |
2375 | 4 | AVIF_ASSERT_OR_RETURN(grid->rows > 0 && grid->columns > 0); |
2376 | 4 | } |
2377 | 110 | *codecType = avifDecoderItemGetGridCodecType(item); |
2378 | 110 | AVIF_CHECKERR(*codecType != AVIF_CODEC_TYPE_UNKNOWN, AVIF_RESULT_INVALID_IMAGE_GRID); |
2379 | 14.6k | } else { |
2380 | 14.6k | *codecType = avifGetCodecType(item->type); |
2381 | 14.6k | AVIF_ASSERT_OR_RETURN(*codecType != AVIF_CODEC_TYPE_UNKNOWN); |
2382 | 14.6k | } |
2383 | | // TODO(yguyon): If AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM is defined, backward-incompatible |
2384 | | // files with a primary 'sato' Sample Transform derived image item could be |
2385 | | // handled here (compared to backward-compatible files with a 'sato' item in the |
2386 | | // same 'altr' group as the primary regular color item which are handled in |
2387 | | // avifDecoderDataFindSampleTransformImageItem() below). |
2388 | 14.7k | return AVIF_RESULT_OK; |
2389 | 14.7k | } |
2390 | | |
2391 | | static avifBool avifParseImageSpatialExtentsProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2392 | 15.5k | { |
2393 | 15.5k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[ispe]"); |
2394 | 15.5k | AVIF_CHECK(avifROStreamReadAndEnforceVersion(&s, /*enforcedVersion=*/0, /*flags=*/NULL)); |
2395 | | |
2396 | 15.5k | avifImageSpatialExtents * ispe = &prop->u.ispe; |
2397 | 15.5k | AVIF_CHECK(avifROStreamReadU32(&s, &ispe->width)); |
2398 | 15.5k | AVIF_CHECK(avifROStreamReadU32(&s, &ispe->height)); |
2399 | 15.5k | return AVIF_TRUE; |
2400 | 15.5k | } |
2401 | | |
2402 | | static avifBool avifParseAuxiliaryTypeProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2403 | 354 | { |
2404 | 354 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[auxC]"); |
2405 | 354 | AVIF_CHECK(avifROStreamReadAndEnforceVersion(&s, /*enforcedVersion=*/0, /*flags=*/NULL)); |
2406 | | |
2407 | 353 | AVIF_CHECK(avifROStreamReadString(&s, prop->u.auxC.auxType, AUXTYPE_SIZE)); |
2408 | 352 | return AVIF_TRUE; |
2409 | 353 | } |
2410 | | |
2411 | | static avifBool avifParseColourInformationBox(avifProperty * prop, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2412 | 4.31k | { |
2413 | 4.31k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[colr]"); |
2414 | | |
2415 | 4.31k | avifColourInformationBox * colr = &prop->u.colr; |
2416 | 4.31k | colr->hasICC = AVIF_FALSE; |
2417 | 4.31k | colr->hasNCLX = AVIF_FALSE; |
2418 | | |
2419 | 4.31k | uint8_t colorType[4]; // unsigned int(32) colour_type; |
2420 | 4.31k | AVIF_CHECK(avifROStreamRead(&s, colorType, 4)); |
2421 | 4.31k | if (!memcmp(colorType, "rICC", 4) || !memcmp(colorType, "prof", 4)) { |
2422 | 994 | colr->hasICC = AVIF_TRUE; |
2423 | | // Remember the offset of the ICC payload relative to the beginning of the stream. A direct pointer cannot be stored |
2424 | | // because decoder->io->persistent could have been AVIF_FALSE when obtaining raw through decoder->io->read(). |
2425 | | // The bytes could be copied now instead of remembering the offset, but it is as invasive as passing rawOffset everywhere. |
2426 | 994 | colr->iccOffset = rawOffset + avifROStreamOffset(&s); |
2427 | 994 | colr->iccSize = avifROStreamRemainingBytes(&s); |
2428 | 3.32k | } else if (!memcmp(colorType, "nclx", 4)) { |
2429 | 1.43k | AVIF_CHECK(avifROStreamReadU16(&s, &colr->colorPrimaries)); // unsigned int(16) colour_primaries; |
2430 | 1.43k | AVIF_CHECK(avifROStreamReadU16(&s, &colr->transferCharacteristics)); // unsigned int(16) transfer_characteristics; |
2431 | 1.43k | AVIF_CHECK(avifROStreamReadU16(&s, &colr->matrixCoefficients)); // unsigned int(16) matrix_coefficients; |
2432 | 1.42k | uint8_t full_range_flag; |
2433 | 1.42k | AVIF_CHECK(avifROStreamReadBitsU8(&s, &full_range_flag, /*bitCount=*/1)); // unsigned int(1) full_range_flag; |
2434 | 1.42k | colr->range = full_range_flag ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED; |
2435 | 1.42k | uint8_t reserved; |
2436 | 1.42k | AVIF_CHECK(avifROStreamReadBitsU8(&s, &reserved, /*bitCount=*/7)); // unsigned int(7) reserved = 0; |
2437 | 1.42k | if (reserved) { |
2438 | 3 | avifDiagnosticsPrintf(diag, "Box[colr] contains nonzero reserved bits [%u]", reserved); |
2439 | 3 | return AVIF_FALSE; |
2440 | 3 | } |
2441 | 1.42k | colr->hasNCLX = AVIF_TRUE; |
2442 | 1.42k | } |
2443 | 4.30k | return AVIF_TRUE; |
2444 | 4.31k | } |
2445 | | |
2446 | | static avifResult avifParseContentLightLevelInformation(avifROStream * s, avifContentLightLevelInformationBox * clli) |
2447 | 209 | { |
2448 | 209 | AVIF_CHECKERR(avifROStreamReadBitsU16(s, &clli->maxCLL, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) max_content_light_level |
2449 | 207 | AVIF_CHECKERR(avifROStreamReadBitsU16(s, &clli->maxPALL, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) max_pic_average_light_level |
2450 | 205 | return AVIF_RESULT_OK; |
2451 | 207 | } |
2452 | | static avifResult avifParseContentLightLevelInformationBox(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2453 | 209 | { |
2454 | 209 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[clli]"); |
2455 | 209 | AVIF_CHECKRES(avifParseContentLightLevelInformation(&s, &prop->u.clli)); |
2456 | 205 | return AVIF_RESULT_OK; |
2457 | 209 | } |
2458 | | |
2459 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
2460 | | static avifResult avifSkipMasteringDisplayColourVolume(avifROStream * s) |
2461 | | { |
2462 | | for (int c = 0; c < 3; c++) { |
2463 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) display_primaries_x; |
2464 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) display_primaries_y; |
2465 | | } |
2466 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) white_point_x; |
2467 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) white_point_y; |
2468 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) max_display_mastering_luminance; |
2469 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) min_display_mastering_luminance; |
2470 | | return AVIF_RESULT_OK; |
2471 | | } |
2472 | | |
2473 | | static avifResult avifSkipContentColourVolume(avifROStream * s) |
2474 | | { |
2475 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(1) reserved = 0; // ccv_cancel_flag |
2476 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(1) reserved = 0; // ccv_persistence_flag |
2477 | | uint8_t ccvPrimariesPresent; |
2478 | | AVIF_CHECKERR(avifROStreamReadBitsU8(s, &ccvPrimariesPresent, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(1) ccv_primaries_present_flag; |
2479 | | uint8_t ccvMinLuminanceValuePresent, ccvMaxLuminanceValuePresent, ccvAvgLuminanceValuePresent; |
2480 | | AVIF_CHECKERR(avifROStreamReadBitsU8(s, &ccvMinLuminanceValuePresent, 1), |
2481 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(1) ccv_min_luminance_value_present_flag; |
2482 | | AVIF_CHECKERR(avifROStreamReadBitsU8(s, &ccvMaxLuminanceValuePresent, 1), |
2483 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(1) ccv_max_luminance_value_present_flag; |
2484 | | AVIF_CHECKERR(avifROStreamReadBitsU8(s, &ccvAvgLuminanceValuePresent, 1), |
2485 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(1) ccv_avg_luminance_value_present_flag; |
2486 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 2), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(2) reserved = 0; |
2487 | | |
2488 | | if (ccvPrimariesPresent) { |
2489 | | for (int c = 0; c < 3; c++) { |
2490 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // signed int(32) ccv_primaries_x[[c]]; |
2491 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // signed int(32) ccv_primaries_y[[c]]; |
2492 | | } |
2493 | | } |
2494 | | if (ccvMinLuminanceValuePresent) { |
2495 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) ccv_min_luminance_value; |
2496 | | } |
2497 | | if (ccvMaxLuminanceValuePresent) { |
2498 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) ccv_max_luminance_value; |
2499 | | } |
2500 | | if (ccvAvgLuminanceValuePresent) { |
2501 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) ccv_avg_luminance_value; |
2502 | | } |
2503 | | return AVIF_RESULT_OK; |
2504 | | } |
2505 | | |
2506 | | static avifResult avifSkipAmbientViewingEnvironment(avifROStream * s) |
2507 | | { |
2508 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) ambient_illuminance; |
2509 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) ambient_light_x; |
2510 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) ambient_light_y; |
2511 | | return AVIF_RESULT_OK; |
2512 | | } |
2513 | | |
2514 | | static avifResult avifSkipReferenceViewingEnvironment(avifROStream * s) |
2515 | | { |
2516 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) surround_luminance; |
2517 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) surround_light_x; |
2518 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) surround_light_y; |
2519 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) periphery_luminance; |
2520 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) periphery_light_x; |
2521 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 16), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) periphery_light_y; |
2522 | | return AVIF_RESULT_OK; |
2523 | | } |
2524 | | |
2525 | | static avifResult avifSkipNominalDiffuseWhite(avifROStream * s) |
2526 | | { |
2527 | | AVIF_CHECKERR(avifROStreamSkipBits(s, 32), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) diffuse_white_luminance; |
2528 | | return AVIF_RESULT_OK; |
2529 | | } |
2530 | | |
2531 | | static avifResult avifParseMiniHDRProperties(avifROStream * s, uint32_t * hasClli, avifContentLightLevelInformationBox * clli) |
2532 | | { |
2533 | | AVIF_CHECKERR(avifROStreamReadBitsU32(s, hasClli, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) clli_flag; |
2534 | | uint32_t hasMdcv, hasCclv, hasAmve, hasReve, hasNdwt; |
2535 | | AVIF_CHECKERR(avifROStreamReadBitsU32(s, &hasMdcv, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) mdcv_flag; |
2536 | | AVIF_CHECKERR(avifROStreamReadBitsU32(s, &hasCclv, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) cclv_flag; |
2537 | | AVIF_CHECKERR(avifROStreamReadBitsU32(s, &hasAmve, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) amve_flag; |
2538 | | AVIF_CHECKERR(avifROStreamReadBitsU32(s, &hasReve, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) reve_flag; |
2539 | | AVIF_CHECKERR(avifROStreamReadBitsU32(s, &hasNdwt, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) ndwt_flag; |
2540 | | if (*hasClli) { |
2541 | | AVIF_CHECKRES(avifParseContentLightLevelInformation(s, clli)); // ContentLightLevel clli; |
2542 | | } |
2543 | | if (hasMdcv) { |
2544 | | AVIF_CHECKRES(avifSkipMasteringDisplayColourVolume(s)); // MasteringDisplayColourVolume mdcv; |
2545 | | } |
2546 | | if (hasCclv) { |
2547 | | AVIF_CHECKRES(avifSkipContentColourVolume(s)); // ContentColourVolume cclv; |
2548 | | } |
2549 | | if (hasAmve) { |
2550 | | AVIF_CHECKRES(avifSkipAmbientViewingEnvironment(s)); // AmbientViewingEnvironment amve; |
2551 | | } |
2552 | | if (hasReve) { |
2553 | | AVIF_CHECKRES(avifSkipReferenceViewingEnvironment(s)); // ReferenceViewingEnvironment reve; |
2554 | | } |
2555 | | if (hasNdwt) { |
2556 | | AVIF_CHECKRES(avifSkipNominalDiffuseWhite(s)); // NominalDiffuseWhite ndwt; |
2557 | | } |
2558 | | return AVIF_RESULT_OK; |
2559 | | } |
2560 | | #endif // AVIF_ENABLE_EXPERIMENTAL_MINI |
2561 | | |
2562 | | // Implementation of section 2.3.3 of AV1 Codec ISO Media File Format Binding specification v1.2.0. |
2563 | | // See https://aomediacodec.github.io/av1-isobmff/v1.2.0.html#av1codecconfigurationbox-syntax. |
2564 | | static avifBool avifParseCodecConfiguration(avifROStream * s, avifCodecConfigurationBox * config, const char * configPropName, avifDiagnostics * diag) |
2565 | 15.3k | { |
2566 | 15.3k | const size_t av1COffset = s->offset; |
2567 | | |
2568 | 15.3k | uint32_t marker, version; |
2569 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU32(s, &marker, /*bitCount=*/1)); // unsigned int (1) marker = 1; |
2570 | 15.3k | if (!marker) { |
2571 | 1 | avifDiagnosticsPrintf(diag, "%.4s contains illegal marker: [%u]", configPropName, marker); |
2572 | 1 | return AVIF_FALSE; |
2573 | 1 | } |
2574 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU32(s, &version, /*bitCount=*/7)); // unsigned int (7) version = 1; |
2575 | 15.3k | if (version != 1) { |
2576 | 2 | avifDiagnosticsPrintf(diag, "%.4s contains illegal version: [%u]", configPropName, version); |
2577 | 2 | return AVIF_FALSE; |
2578 | 2 | } |
2579 | | |
2580 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU8(s, &config->seqProfile, /*bitCount=*/3)); // unsigned int (3) seq_profile; |
2581 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU8(s, &config->seqLevelIdx0, /*bitCount=*/5)); // unsigned int (5) seq_level_idx_0; |
2582 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU8(s, &config->seqTier0, /*bitCount=*/1)); // unsigned int (1) seq_tier_0; |
2583 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU8(s, &config->highBitdepth, /*bitCount=*/1)); // unsigned int (1) high_bitdepth; |
2584 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU8(s, &config->twelveBit, /*bitCount=*/1)); // unsigned int (1) twelve_bit; |
2585 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU8(s, &config->monochrome, /*bitCount=*/1)); // unsigned int (1) monochrome; |
2586 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU8(s, &config->chromaSubsamplingX, /*bitCount=*/1)); // unsigned int (1) chroma_subsampling_x; |
2587 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU8(s, &config->chromaSubsamplingY, /*bitCount=*/1)); // unsigned int (1) chroma_subsampling_y; |
2588 | 15.3k | AVIF_CHECK(avifROStreamReadBitsU8(s, &config->chromaSamplePosition, /*bitCount=*/2)); // unsigned int (2) chroma_sample_position; |
2589 | | |
2590 | | // unsigned int (3) reserved = 0; |
2591 | | // unsigned int (1) initial_presentation_delay_present; |
2592 | | // if (initial_presentation_delay_present) { |
2593 | | // unsigned int (4) initial_presentation_delay_minus_one; |
2594 | | // } else { |
2595 | | // unsigned int (4) reserved = 0; |
2596 | | // } |
2597 | 15.3k | AVIF_CHECK(avifROStreamSkip(s, /*byteCount=*/1)); |
2598 | | |
2599 | | // According to section 2.2.1 of AV1 Image File Format specification v1.1.0: |
2600 | | // - Sequence Header OBUs should not be present in the AV1CodecConfigurationBox. |
2601 | | // - If a Sequence Header OBU is present in the AV1CodecConfigurationBox, |
2602 | | // it shall match the Sequence Header OBU in the AV1 Image Item Data. |
2603 | | // - Metadata OBUs, if present, shall match the values given in other item properties, |
2604 | | // such as the PixelInformationProperty or ColourInformationBox. |
2605 | | // See https://aomediacodec.github.io/av1-avif/v1.1.0.html#av1-configuration-item-property. |
2606 | | // For simplicity, the constraints above are not enforced. |
2607 | | // The following is skipped by avifParseItemPropertyContainerBox(). |
2608 | | // unsigned int (8) configOBUs[]; |
2609 | | |
2610 | 15.3k | AVIF_CHECK(s->offset - av1COffset == 4); // Make sure avifParseCodecConfiguration() reads exactly 4 bytes. |
2611 | 15.3k | return AVIF_TRUE; |
2612 | 15.3k | } |
2613 | | |
2614 | | static avifBool avifParseCodecConfigurationBoxProperty(avifProperty * prop, |
2615 | | const uint8_t * raw, |
2616 | | size_t rawLen, |
2617 | | const char * configPropName, |
2618 | | avifDiagnostics * diag) |
2619 | 15.3k | { |
2620 | 15.3k | char diagContext[10]; |
2621 | 15.3k | snprintf(diagContext, sizeof(diagContext), "Box[%.4s]", configPropName); // "Box[av1C]" or "Box[av2C]" |
2622 | 15.3k | BEGIN_STREAM(s, raw, rawLen, diag, diagContext); |
2623 | 15.3k | return avifParseCodecConfiguration(&s, &prop->u.av1C, configPropName, diag); |
2624 | 15.3k | } |
2625 | | |
2626 | | static avifBool avifParsePixelAspectRatioBoxProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2627 | 262 | { |
2628 | 262 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[pasp]"); |
2629 | | |
2630 | 262 | avifPixelAspectRatioBox * pasp = &prop->u.pasp; |
2631 | 262 | AVIF_CHECK(avifROStreamReadU32(&s, &pasp->hSpacing)); // unsigned int(32) hSpacing; |
2632 | 261 | AVIF_CHECK(avifROStreamReadU32(&s, &pasp->vSpacing)); // unsigned int(32) vSpacing; |
2633 | 260 | return AVIF_TRUE; |
2634 | 261 | } |
2635 | | |
2636 | | static avifBool avifParseCleanApertureBoxProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2637 | 11 | { |
2638 | 11 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[clap]"); |
2639 | | |
2640 | 11 | avifCleanApertureBox * clap = &prop->u.clap; |
2641 | 11 | AVIF_CHECK(avifROStreamReadU32(&s, &clap->widthN)); // unsigned int(32) cleanApertureWidthN; |
2642 | 10 | AVIF_CHECK(avifROStreamReadU32(&s, &clap->widthD)); // unsigned int(32) cleanApertureWidthD; |
2643 | 9 | AVIF_CHECK(avifROStreamReadU32(&s, &clap->heightN)); // unsigned int(32) cleanApertureHeightN; |
2644 | 8 | AVIF_CHECK(avifROStreamReadU32(&s, &clap->heightD)); // unsigned int(32) cleanApertureHeightD; |
2645 | 7 | AVIF_CHECK(avifROStreamReadU32(&s, &clap->horizOffN)); // unsigned int(32) horizOffN; |
2646 | 6 | AVIF_CHECK(avifROStreamReadU32(&s, &clap->horizOffD)); // unsigned int(32) horizOffD; |
2647 | 5 | AVIF_CHECK(avifROStreamReadU32(&s, &clap->vertOffN)); // unsigned int(32) vertOffN; |
2648 | 4 | AVIF_CHECK(avifROStreamReadU32(&s, &clap->vertOffD)); // unsigned int(32) vertOffD; |
2649 | 3 | return AVIF_TRUE; |
2650 | 4 | } |
2651 | | |
2652 | | static avifBool avifParseImageRotationProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2653 | 243 | { |
2654 | 243 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[irot]"); |
2655 | | |
2656 | 243 | avifImageRotation * irot = &prop->u.irot; |
2657 | 243 | uint8_t reserved; |
2658 | 243 | AVIF_CHECK(avifROStreamReadBitsU8(&s, &reserved, /*bitCount=*/6)); // unsigned int (6) reserved = 0; |
2659 | 242 | if (reserved) { |
2660 | 4 | avifDiagnosticsPrintf(diag, "Box[irot] contains nonzero reserved bits [%u]", reserved); |
2661 | 4 | return AVIF_FALSE; |
2662 | 4 | } |
2663 | 238 | AVIF_CHECK(avifROStreamReadBitsU8(&s, &irot->angle, /*bitCount=*/2)); // unsigned int (2) angle; |
2664 | 238 | return AVIF_TRUE; |
2665 | 238 | } |
2666 | | |
2667 | | static avifBool avifParseImageMirrorProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2668 | 111 | { |
2669 | 111 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[imir]"); |
2670 | | |
2671 | 111 | avifImageMirror * imir = &prop->u.imir; |
2672 | 111 | uint8_t reserved; |
2673 | 111 | AVIF_CHECK(avifROStreamReadBitsU8(&s, &reserved, /*bitCount=*/7)); // unsigned int(7) reserved = 0; |
2674 | 110 | if (reserved) { |
2675 | 2 | avifDiagnosticsPrintf(diag, "Box[imir] contains nonzero reserved bits [%u]", reserved); |
2676 | 2 | return AVIF_FALSE; |
2677 | 2 | } |
2678 | 108 | AVIF_CHECK(avifROStreamReadBitsU8(&s, &imir->axis, /*bitCount=*/1)); // unsigned int(1) axis; |
2679 | 108 | return AVIF_TRUE; |
2680 | 108 | } |
2681 | | |
2682 | | static avifResult avifParsePixelInformationProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2683 | 2.44k | { |
2684 | 2.44k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[pixi]"); |
2685 | 2.44k | uint32_t flags = 0; // px_flags |
2686 | 2.44k | AVIF_CHECKERR(avifROStreamReadAndEnforceVersion(&s, /*enforcedVersion=*/0, &flags), AVIF_RESULT_BMFF_PARSE_FAILED); |
2687 | | |
2688 | 2.43k | avifPixelInformationProperty * pixi = &prop->u.pixi; |
2689 | 2.43k | AVIF_CHECKERR(avifROStreamRead(&s, &pixi->planeCount, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int (8) num_channels; |
2690 | 2.43k | if (pixi->planeCount < 1 || pixi->planeCount > MAX_PIXI_PLANE_DEPTHS) { |
2691 | 5 | avifDiagnosticsPrintf(diag, "Box[pixi] contains unsupported plane count [%u]", pixi->planeCount); |
2692 | 5 | return AVIF_RESULT_NOT_IMPLEMENTED; |
2693 | 5 | } |
2694 | 7.85k | for (uint8_t i = 0; i < pixi->planeCount; ++i) { |
2695 | 5.43k | AVIF_CHECKERR(avifROStreamRead(&s, &pixi->planeDepths[i], 1), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int (8) bits_per_channel; |
2696 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_EXTENDED_PIXI) |
2697 | | if (pixi->planeDepths[i] == 0) { |
2698 | | avifDiagnosticsPrintf(diag, "Box[pixi] plane depth shall not be 0 for channel %u", i); |
2699 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2700 | | } |
2701 | | #endif // AVIF_ENABLE_EXPERIMENTAL_EXTENDED_PIXI |
2702 | 5.42k | if (pixi->planeDepths[i] != pixi->planeDepths[0]) { |
2703 | 5 | avifDiagnosticsPrintf(diag, |
2704 | 5 | "Box[pixi] contains unsupported mismatched plane depths [%u != %u]", |
2705 | 5 | pixi->planeDepths[i], |
2706 | 5 | pixi->planeDepths[0]); |
2707 | 5 | return AVIF_RESULT_NOT_IMPLEMENTED; |
2708 | 5 | } |
2709 | 5.42k | } |
2710 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_EXTENDED_PIXI) |
2711 | | if (flags & 1) { |
2712 | | for (uint8_t i = 0; i < pixi->planeCount; ++i) { |
2713 | | uint8_t channelIdc, reserved, componentFormat, channelLabelFlag; |
2714 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &channelIdc, /*bitCount=*/3), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(3) channel_idc; |
2715 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &reserved, /*bitCount=*/1), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(1) reserved = 0; |
2716 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &componentFormat, /*bitCount=*/2), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(2) component_format; |
2717 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &pixi->subsamplingFlag[i], /*bitCount=*/1), |
2718 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(1) subsampling_flag; |
2719 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &channelLabelFlag, /*bitCount=*/1), |
2720 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(1) channel_label_flag; |
2721 | | if (pixi->subsamplingFlag[i]) { |
2722 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &pixi->subsamplingType[i], /*bitCount=*/4), |
2723 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(4) subsampling_type; |
2724 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &pixi->subsamplingLocation[i], /*bitCount=*/4), |
2725 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(4) subsampling_location; |
2726 | | } |
2727 | | |
2728 | | // ISO/IEC 23008-12:2024/CDAM 2:2025 section 6.5.6.3: |
2729 | | // This field indicates the contents of the channel. A value of 0 indicates colour/grayscale. A value of |
2730 | | // 1 indicates alpha. A value of 2 indicates depth. Values 3-7 are reserved for future use. At most one |
2731 | | // channel shall have a channel_idc of 1. |
2732 | | if (channelIdc != 0) { |
2733 | | avifDiagnosticsPrintf(diag, "Box[pixi] contains unsupported channel_idc %u for channel %u", channelIdc, i); |
2734 | | return AVIF_RESULT_NOT_IMPLEMENTED; |
2735 | | } |
2736 | | if (reserved != 0) { |
2737 | | avifDiagnosticsPrintf(diag, "Box[pixi] contains non-zero reserved field %u for channel %u", reserved, i); |
2738 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2739 | | } |
2740 | | // ISO/IEC 23008-12:2024/CDAM 2:2025 section 6.5.6.3: |
2741 | | // component_format: This field indicates the data type of the channel as defined by the component_format |
2742 | | // values in ISO/IEC 23001-17 where component_bit_depth is considered to be equal to bits_per_channel. |
2743 | | // ISO/IEC 23001-17 section 5.2.1.2: |
2744 | | // component_format: When equal to 0, component value is an unsigned integer coded on component_bit_depth bits. |
2745 | | if (componentFormat != 0) { |
2746 | | avifDiagnosticsPrintf(diag, "Box[pixi] contains unsupported component_format %u for channel %u", componentFormat, i); |
2747 | | return AVIF_RESULT_NOT_IMPLEMENTED; |
2748 | | } |
2749 | | if (pixi->subsamplingFlag[i]) { |
2750 | | if (pixi->subsamplingType[i] >= AVIF_PIXI_SUBSAMPLING_RESERVED) { |
2751 | | avifDiagnosticsPrintf(diag, |
2752 | | "Box[pixi] contains reserved subsampling_type %u for channel %u", |
2753 | | pixi->subsamplingType[i], |
2754 | | i); |
2755 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2756 | | } |
2757 | | if (pixi->subsamplingLocation[i] > 4) { |
2758 | | avifDiagnosticsPrintf(diag, |
2759 | | "Box[pixi] contains reserved subsampling_location %u for channel %u", |
2760 | | pixi->subsamplingLocation[i], |
2761 | | i); |
2762 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2763 | | } |
2764 | | } |
2765 | | if (channelLabelFlag) { |
2766 | | AVIF_CHECKERR(avifROStreamReadString(&s, NULL, 0), AVIF_RESULT_BMFF_PARSE_FAILED); // utf8string channel_label; (skipped) |
2767 | | } |
2768 | | } |
2769 | | } |
2770 | | #endif // AVIF_ENABLE_EXPERIMENTAL_EXTENDED_PIXI |
2771 | 2.42k | return AVIF_RESULT_OK; |
2772 | 2.43k | } |
2773 | | |
2774 | | static avifBool avifParseOperatingPointSelectorProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2775 | 82 | { |
2776 | 82 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[a1op]"); |
2777 | | |
2778 | 82 | avifOperatingPointSelectorProperty * a1op = &prop->u.a1op; |
2779 | 82 | AVIF_CHECK(avifROStreamRead(&s, &a1op->opIndex, 1)); |
2780 | 81 | if (a1op->opIndex > 31) { // 31 is AV1's max operating point value |
2781 | 1 | avifDiagnosticsPrintf(diag, "Box[a1op] contains an unsupported operating point [%u]", a1op->opIndex); |
2782 | 1 | return AVIF_FALSE; |
2783 | 1 | } |
2784 | 80 | return AVIF_TRUE; |
2785 | 81 | } |
2786 | | |
2787 | | static avifBool avifParseLayerSelectorProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2788 | 149 | { |
2789 | 149 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[lsel]"); |
2790 | | |
2791 | 149 | avifLayerSelectorProperty * lsel = &prop->u.lsel; |
2792 | 149 | AVIF_CHECK(avifROStreamReadU16(&s, &lsel->layerID)); |
2793 | 148 | if ((lsel->layerID != 0xFFFF) && (lsel->layerID >= AVIF_MAX_AV1_LAYER_COUNT)) { |
2794 | 14 | avifDiagnosticsPrintf(diag, "Box[lsel] contains an unsupported layer [%u]", lsel->layerID); |
2795 | 14 | return AVIF_FALSE; |
2796 | 14 | } |
2797 | 134 | return AVIF_TRUE; |
2798 | 148 | } |
2799 | | |
2800 | | static avifBool avifParseAV1LayeredImageIndexingProperty(avifProperty * prop, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
2801 | 57 | { |
2802 | 57 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[a1lx]"); |
2803 | | |
2804 | 57 | avifAV1LayeredImageIndexingProperty * a1lx = &prop->u.a1lx; |
2805 | | |
2806 | 57 | uint8_t largeSize = 0; |
2807 | 57 | AVIF_CHECK(avifROStreamRead(&s, &largeSize, 1)); |
2808 | 56 | if (largeSize & 0xFE) { |
2809 | 5 | avifDiagnosticsPrintf(diag, "Box[a1lx] has bits set in the reserved section [%u]", largeSize); |
2810 | 5 | return AVIF_FALSE; |
2811 | 5 | } |
2812 | | |
2813 | 192 | for (int i = 0; i < 3; ++i) { |
2814 | 147 | if (largeSize) { |
2815 | 12 | AVIF_CHECK(avifROStreamReadU32(&s, &a1lx->layerSize[i])); |
2816 | 135 | } else { |
2817 | 135 | uint16_t layerSize16; |
2818 | 135 | AVIF_CHECK(avifROStreamReadU16(&s, &layerSize16)); |
2819 | 132 | a1lx->layerSize[i] = (uint32_t)layerSize16; |
2820 | 132 | } |
2821 | 147 | } |
2822 | | |
2823 | | // Layer sizes will be validated later (when the item's size is known) |
2824 | 45 | return AVIF_TRUE; |
2825 | 51 | } |
2826 | | |
2827 | | static avifResult avifParseItemPropertyContainerBox(avifPropertyArray * properties, |
2828 | | uint64_t rawOffset, |
2829 | | const uint8_t * raw, |
2830 | | size_t rawLen, |
2831 | | avifBool isTrack, |
2832 | | avifDiagnostics * diag) |
2833 | 15.2k | { |
2834 | 15.2k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[ipco]"); |
2835 | | |
2836 | 78.0k | while (avifROStreamHasBytesLeft(&s, 1)) { |
2837 | 62.9k | avifBoxHeader header; |
2838 | 62.9k | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &header), AVIF_RESULT_BMFF_PARSE_FAILED); |
2839 | | |
2840 | 62.8k | avifProperty * prop = (avifProperty *)avifArrayPush(properties); |
2841 | 62.8k | AVIF_CHECKERR(prop != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
2842 | 62.8k | memcpy(prop->type, header.type, 4); |
2843 | 62.8k | prop->isOpaque = AVIF_FALSE; |
2844 | 62.8k | if (!memcmp(header.type, "ispe", 4)) { |
2845 | 15.5k | AVIF_CHECKERR(avifParseImageSpatialExtentsProperty(prop, avifROStreamCurrent(&s), header.size, diag), |
2846 | 15.5k | AVIF_RESULT_BMFF_PARSE_FAILED); |
2847 | 47.2k | } else if ((!memcmp(header.type, "auxC", 4) && !isTrack) || (!memcmp(header.type, "auxi", 4) && isTrack)) { |
2848 | 354 | AVIF_CHECKERR(avifParseAuxiliaryTypeProperty(prop, avifROStreamCurrent(&s), header.size, diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
2849 | 46.9k | } else if (!memcmp(header.type, "colr", 4)) { |
2850 | 4.31k | AVIF_CHECKERR(avifParseColourInformationBox(prop, rawOffset + avifROStreamOffset(&s), avifROStreamCurrent(&s), header.size, diag), |
2851 | 4.31k | AVIF_RESULT_BMFF_PARSE_FAILED); |
2852 | 42.6k | } else if (!memcmp(header.type, "av1C", 4)) { |
2853 | 15.3k | AVIF_CHECKERR(avifParseCodecConfigurationBoxProperty(prop, avifROStreamCurrent(&s), header.size, "av1C", diag), |
2854 | 15.3k | AVIF_RESULT_BMFF_PARSE_FAILED); |
2855 | | #if defined(AVIF_CODEC_AVM) |
2856 | | } else if (!memcmp(header.type, "av2C", 4)) { |
2857 | | AVIF_CHECKERR(avifParseCodecConfigurationBoxProperty(prop, avifROStreamCurrent(&s), header.size, "av2C", diag), |
2858 | | AVIF_RESULT_BMFF_PARSE_FAILED); |
2859 | | #endif |
2860 | 27.3k | } else if (!memcmp(header.type, "pasp", 4)) { |
2861 | 262 | AVIF_CHECKERR(avifParsePixelAspectRatioBoxProperty(prop, avifROStreamCurrent(&s), header.size, diag), |
2862 | 262 | AVIF_RESULT_BMFF_PARSE_FAILED); |
2863 | 27.0k | } else if (!memcmp(header.type, "clap", 4)) { |
2864 | 11 | AVIF_CHECKERR(avifParseCleanApertureBoxProperty(prop, avifROStreamCurrent(&s), header.size, diag), |
2865 | 11 | AVIF_RESULT_BMFF_PARSE_FAILED); |
2866 | 27.0k | } else if (!memcmp(header.type, "irot", 4)) { |
2867 | 243 | AVIF_CHECKERR(avifParseImageRotationProperty(prop, avifROStreamCurrent(&s), header.size, diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
2868 | 26.7k | } else if (!memcmp(header.type, "imir", 4)) { |
2869 | 111 | AVIF_CHECKERR(avifParseImageMirrorProperty(prop, avifROStreamCurrent(&s), header.size, diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
2870 | 26.6k | } else if (!memcmp(header.type, "pixi", 4)) { |
2871 | 2.44k | AVIF_CHECKRES(avifParsePixelInformationProperty(prop, avifROStreamCurrent(&s), header.size, diag)); |
2872 | 24.2k | } else if (!memcmp(header.type, "a1op", 4)) { |
2873 | 82 | AVIF_CHECKERR(avifParseOperatingPointSelectorProperty(prop, avifROStreamCurrent(&s), header.size, diag), |
2874 | 82 | AVIF_RESULT_BMFF_PARSE_FAILED); |
2875 | 24.1k | } else if (!memcmp(header.type, "lsel", 4)) { |
2876 | 149 | AVIF_CHECKERR(avifParseLayerSelectorProperty(prop, avifROStreamCurrent(&s), header.size, diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
2877 | 24.0k | } else if (!memcmp(header.type, "a1lx", 4)) { |
2878 | 57 | AVIF_CHECKERR(avifParseAV1LayeredImageIndexingProperty(prop, avifROStreamCurrent(&s), header.size, diag), |
2879 | 57 | AVIF_RESULT_BMFF_PARSE_FAILED); |
2880 | 23.9k | } else if (!memcmp(header.type, "clli", 4)) { |
2881 | 209 | AVIF_CHECKRES(avifParseContentLightLevelInformationBox(prop, avifROStreamCurrent(&s), header.size, diag)); |
2882 | 23.7k | } else { |
2883 | 23.7k | prop->isOpaque = AVIF_TRUE; |
2884 | 23.7k | memset(&prop->u.opaque, 0, sizeof(prop->u.opaque)); |
2885 | 23.7k | memcpy(prop->u.opaque.usertype, header.usertype, sizeof(prop->u.opaque.usertype)); |
2886 | 23.7k | AVIF_CHECKRES(avifRWDataSet(&prop->u.opaque.boxPayload, avifROStreamCurrent(&s), header.size)); |
2887 | 23.7k | } |
2888 | | |
2889 | 62.7k | AVIF_CHECKERR(avifROStreamSkip(&s, header.size), AVIF_RESULT_BMFF_PARSE_FAILED); |
2890 | 62.7k | } |
2891 | 15.1k | return AVIF_RESULT_OK; |
2892 | 15.2k | } |
2893 | | |
2894 | | static avifResult avifParseItemPropertyAssociation(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag, uint32_t * outVersionAndFlags) |
2895 | 15.0k | { |
2896 | | // NOTE: If this function ever adds support for versions other than [0,1] or flags other than |
2897 | | // [0,1], please increase the value of MAX_IPMA_VERSION_AND_FLAGS_SEEN accordingly. |
2898 | | |
2899 | 15.0k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[ipma]"); |
2900 | | |
2901 | 15.0k | uint8_t version; |
2902 | 15.0k | uint32_t flags; |
2903 | 15.0k | AVIF_CHECKERR(avifROStreamReadVersionAndFlags(&s, &version, &flags), AVIF_RESULT_BMFF_PARSE_FAILED); |
2904 | 15.0k | avifBool propertyIndexIsU15 = ((flags & 0x1) != 0); |
2905 | 15.0k | *outVersionAndFlags = ((uint32_t)version << 24) | flags; |
2906 | | |
2907 | 15.0k | uint32_t entryCount; |
2908 | 15.0k | AVIF_CHECKERR(avifROStreamReadU32(&s, &entryCount), AVIF_RESULT_BMFF_PARSE_FAILED); |
2909 | 15.0k | unsigned int prevItemID = 0; |
2910 | 31.5k | for (uint32_t entryIndex = 0; entryIndex < entryCount; ++entryIndex) { |
2911 | | // ISO/IEC 14496-12, Seventh edition, 2022-01, Section 8.11.14.1: |
2912 | | // Each ItemPropertyAssociationBox shall be ordered by increasing item_ID, and there shall |
2913 | | // be at most one occurrence of a given item_ID, in the set of ItemPropertyAssociationBox |
2914 | | // boxes. |
2915 | 16.5k | unsigned int itemID; |
2916 | 16.5k | if (version < 1) { |
2917 | 16.4k | uint16_t tmp; |
2918 | 16.4k | AVIF_CHECKERR(avifROStreamReadU16(&s, &tmp), AVIF_RESULT_BMFF_PARSE_FAILED); |
2919 | 16.4k | itemID = tmp; |
2920 | 16.4k | } else { |
2921 | 141 | AVIF_CHECKERR(avifROStreamReadU32(&s, &itemID), AVIF_RESULT_BMFF_PARSE_FAILED); |
2922 | 141 | } |
2923 | 16.5k | AVIF_CHECKRES(avifCheckItemID("ipma", itemID, diag)); |
2924 | 16.5k | if (itemID <= prevItemID) { |
2925 | 5 | avifDiagnosticsPrintf(diag, "Box[ipma] item IDs are not ordered by increasing ID"); |
2926 | 5 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2927 | 5 | } |
2928 | 16.5k | prevItemID = itemID; |
2929 | | |
2930 | 16.5k | avifDecoderItem * item; |
2931 | 16.5k | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, itemID, &item)); |
2932 | 16.5k | if (item->ipmaSeen) { |
2933 | 1 | avifDiagnosticsPrintf(diag, "Duplicate Box[ipma] for item ID [%u]", itemID); |
2934 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2935 | 1 | } |
2936 | 16.5k | item->ipmaSeen = AVIF_TRUE; |
2937 | | |
2938 | 16.5k | uint8_t associationCount; |
2939 | 16.5k | AVIF_CHECKERR(avifROStreamRead(&s, &associationCount, 1), AVIF_RESULT_BMFF_PARSE_FAILED); |
2940 | 82.6k | for (uint8_t associationIndex = 0; associationIndex < associationCount; ++associationIndex) { |
2941 | 66.2k | uint8_t essential; |
2942 | 66.2k | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &essential, /*bitCount=*/1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) essential; |
2943 | 66.1k | uint32_t propertyIndex; |
2944 | 66.1k | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &propertyIndex, /*bitCount=*/propertyIndexIsU15 ? 15 : 7), |
2945 | 66.1k | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(7/15) property_index; |
2946 | | |
2947 | | // ISO/IEC 14496-12 Section 8.11.14.3: |
2948 | | // 0 indicating that no property is associated (the essential indicator shall also be 0) |
2949 | 66.1k | if (propertyIndex == 0) { |
2950 | 5.02k | if (essential) { |
2951 | 1 | avifDiagnosticsPrintf(diag, "Box[ipma] for item ID [%u] contains an illegal essential property index 0", itemID); |
2952 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2953 | 1 | } |
2954 | 5.02k | continue; |
2955 | 5.02k | } |
2956 | 61.1k | --propertyIndex; // 1-indexed |
2957 | | |
2958 | 61.1k | if (propertyIndex >= meta->properties.count) { |
2959 | 51 | avifDiagnosticsPrintf(diag, |
2960 | 51 | "Box[ipma] for item ID [%u] contains an illegal property index [%u] (out of [%u] properties)", |
2961 | 51 | itemID, |
2962 | 51 | propertyIndex, |
2963 | 51 | meta->properties.count); |
2964 | 51 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2965 | 51 | } |
2966 | | |
2967 | | // Copy property to item |
2968 | 61.1k | const avifProperty * srcProp = &meta->properties.prop[propertyIndex]; |
2969 | | |
2970 | | // Some properties are supported and parsed by libavif. |
2971 | | // Other properties are forwarded to the user as opaque blobs. |
2972 | 61.1k | const avifBool supportedType = !srcProp->isOpaque; |
2973 | 61.1k | if (supportedType) { |
2974 | 42.0k | if (essential) { |
2975 | | // Verify that it is legal for this property to be flagged as essential. Any |
2976 | | // types in this list are *required* in the spec to not be flagged as essential |
2977 | | // when associated with an item. |
2978 | 18.2k | static const char * const nonessentialTypes[] = { |
2979 | | |
2980 | | // AVIF: Section 2.3.2.3.2: "If associated, it shall not be marked as essential." |
2981 | 18.2k | "a1lx" |
2982 | | |
2983 | 18.2k | }; |
2984 | 18.2k | size_t nonessentialTypesCount = sizeof(nonessentialTypes) / sizeof(nonessentialTypes[0]); |
2985 | 36.4k | for (size_t i = 0; i < nonessentialTypesCount; ++i) { |
2986 | 18.2k | if (!memcmp(srcProp->type, nonessentialTypes[i], 4)) { |
2987 | 1 | avifDiagnosticsPrintf(diag, |
2988 | 1 | "Item ID [%u] has a %s property association which must not be marked essential, but is", |
2989 | 1 | itemID, |
2990 | 1 | nonessentialTypes[i]); |
2991 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
2992 | 1 | } |
2993 | 18.2k | } |
2994 | 23.8k | } else { |
2995 | | // Verify that it is legal for this property to not be flagged as essential. Any |
2996 | | // types in this list are *required* in the spec to be flagged as essential when |
2997 | | // associated with an item. |
2998 | 23.8k | static const char * const essentialTypes[] = { |
2999 | | |
3000 | | // AVIF: Section 2.3.2.1.1: "If associated, it shall be marked as essential." |
3001 | 23.8k | "a1op", |
3002 | | |
3003 | | // HEIF: Section 6.5.11.1: "essential shall be equal to 1 for an 'lsel' item property." |
3004 | 23.8k | "lsel", |
3005 | | |
3006 | | // MIAF 2019/Amd. 2:2021: Section 7.3.9: |
3007 | | // All transformative properties associated with coded and derived images shall be |
3008 | | // marked as essential |
3009 | | // It makes no sense to allow for non-essential crop/orientation associated with an item |
3010 | | // that is not a coded or derived image, so for simplicity 'item' is not checked here. |
3011 | 23.8k | "clap", |
3012 | 23.8k | "irot", |
3013 | 23.8k | "imir" |
3014 | | |
3015 | 23.8k | }; |
3016 | 23.8k | size_t essentialTypesCount = sizeof(essentialTypes) / sizeof(essentialTypes[0]); |
3017 | 142k | for (size_t i = 0; i < essentialTypesCount; ++i) { |
3018 | 119k | if (!memcmp(srcProp->type, essentialTypes[i], 4)) { |
3019 | 4 | avifDiagnosticsPrintf(diag, |
3020 | 4 | "Item ID [%u] has a %s property association which must be marked essential, but is not", |
3021 | 4 | itemID, |
3022 | 4 | essentialTypes[i]); |
3023 | 4 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3024 | 4 | } |
3025 | 119k | } |
3026 | 23.8k | } |
3027 | | |
3028 | | // Supported and valid; associate it with this item. |
3029 | 42.0k | avifProperty * dstProp = (avifProperty *)avifArrayPush(&item->properties); |
3030 | 42.0k | AVIF_CHECKERR(dstProp != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3031 | 42.0k | *dstProp = *srcProp; |
3032 | 42.0k | } else { |
3033 | 19.0k | if (essential) { |
3034 | | // ISO/IEC 23008-12 Section 10.2.1: |
3035 | | // Under any brand, the primary item (or an alternative if alternative support is required) |
3036 | | // shall be processable by a reader implementing only the required features of that brand. |
3037 | | // Specifically, given that each brand has a set of properties that a reader is required to |
3038 | | // support: the item shall not have properties that are marked as essential and are outside |
3039 | | // this set. |
3040 | | // It is assumed that this rule also applies to items the primary item depends on (such as |
3041 | | // the cells of a grid). |
3042 | | |
3043 | | // Discovered an essential item property that libavif doesn't support! |
3044 | | // Make a note to ignore this item later. |
3045 | 698 | item->hasUnsupportedEssentialProperty = AVIF_TRUE; |
3046 | 698 | } |
3047 | | |
3048 | | // Will be forwarded to the user through avifImage::properties. |
3049 | 19.0k | avifProperty * dstProp = (avifProperty *)avifArrayPush(&item->properties); |
3050 | 19.0k | AVIF_CHECKERR(dstProp != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3051 | 19.0k | dstProp->isOpaque = AVIF_TRUE; |
3052 | 19.0k | memcpy(dstProp->type, srcProp->type, sizeof(dstProp->type)); |
3053 | 19.0k | memcpy(dstProp->u.opaque.usertype, srcProp->u.opaque.usertype, sizeof(dstProp->u.opaque.usertype)); |
3054 | 19.0k | AVIF_CHECKRES( |
3055 | 19.0k | avifRWDataSet(&dstProp->u.opaque.boxPayload, srcProp->u.opaque.boxPayload.data, srcProp->u.opaque.boxPayload.size)); |
3056 | 19.0k | } |
3057 | 61.1k | } |
3058 | 16.5k | } |
3059 | 14.9k | return AVIF_RESULT_OK; |
3060 | 15.0k | } |
3061 | | |
3062 | | static avifBool avifParsePrimaryItemBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3063 | 15.1k | { |
3064 | 15.1k | if (meta->primaryItemID > 0) { |
3065 | | // Illegal to have multiple pitm boxes, bail out |
3066 | 1 | avifDiagnosticsPrintf(diag, "Multiple boxes of unique Box[pitm] found"); |
3067 | 1 | return AVIF_FALSE; |
3068 | 1 | } |
3069 | | |
3070 | 15.1k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[pitm]"); |
3071 | | |
3072 | 15.1k | uint8_t version; |
3073 | 15.1k | AVIF_CHECK(avifROStreamReadVersionAndFlags(&s, &version, NULL)); |
3074 | | |
3075 | 15.1k | if (version == 0) { |
3076 | 15.0k | uint16_t tmp16; |
3077 | 15.0k | AVIF_CHECK(avifROStreamReadU16(&s, &tmp16)); // unsigned int(16) item_ID; |
3078 | 15.0k | meta->primaryItemID = tmp16; |
3079 | 15.0k | } else { |
3080 | 20 | AVIF_CHECK(avifROStreamReadU32(&s, &meta->primaryItemID)); // unsigned int(32) item_ID; |
3081 | 20 | } |
3082 | 15.1k | return AVIF_TRUE; |
3083 | 15.1k | } |
3084 | | |
3085 | | static avifBool avifParseItemDataBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3086 | 51 | { |
3087 | | // Check to see if we've already seen an idat box for this meta box. If so, bail out |
3088 | 51 | if (meta->idat.size > 0) { |
3089 | 0 | avifDiagnosticsPrintf(diag, "Meta box contains multiple idat boxes"); |
3090 | 0 | return AVIF_FALSE; |
3091 | 0 | } |
3092 | 51 | if (rawLen == 0) { |
3093 | 1 | avifDiagnosticsPrintf(diag, "idat box has a length of 0"); |
3094 | 1 | return AVIF_FALSE; |
3095 | 1 | } |
3096 | | |
3097 | 50 | if (avifRWDataSet(&meta->idat, raw, rawLen) != AVIF_RESULT_OK) { |
3098 | 0 | return AVIF_FALSE; |
3099 | 0 | } |
3100 | 50 | return AVIF_TRUE; |
3101 | 50 | } |
3102 | | |
3103 | | static avifResult avifParseItemPropertiesBox(avifMeta * meta, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3104 | 15.1k | { |
3105 | 15.1k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[iprp]"); |
3106 | | |
3107 | 15.1k | avifBoxHeader ipcoHeader; |
3108 | 15.1k | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &ipcoHeader), AVIF_RESULT_BMFF_PARSE_FAILED); |
3109 | 15.1k | if (memcmp(ipcoHeader.type, "ipco", 4)) { |
3110 | 2 | avifDiagnosticsPrintf(diag, "Failed to find Box[ipco] as the first box in Box[iprp]"); |
3111 | 2 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3112 | 2 | } |
3113 | | |
3114 | | // Read all item properties inside of ItemPropertyContainerBox |
3115 | 15.1k | AVIF_CHECKRES(avifParseItemPropertyContainerBox(&meta->properties, |
3116 | 15.1k | rawOffset + avifROStreamOffset(&s), |
3117 | 15.1k | avifROStreamCurrent(&s), |
3118 | 15.1k | ipcoHeader.size, |
3119 | 15.1k | /*isTrack=*/AVIF_FALSE, |
3120 | 15.1k | diag)); |
3121 | 15.0k | AVIF_CHECKERR(avifROStreamSkip(&s, ipcoHeader.size), AVIF_RESULT_BMFF_PARSE_FAILED); |
3122 | | |
3123 | 15.0k | uint32_t versionAndFlagsSeen[MAX_IPMA_VERSION_AND_FLAGS_SEEN]; |
3124 | 15.0k | uint32_t versionAndFlagsSeenCount = 0; |
3125 | | |
3126 | | // Now read all ItemPropertyAssociation until the end of the box, and make associations |
3127 | 29.9k | while (avifROStreamHasBytesLeft(&s, 1)) { |
3128 | 15.0k | avifBoxHeader ipmaHeader; |
3129 | 15.0k | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &ipmaHeader), AVIF_RESULT_BMFF_PARSE_FAILED); |
3130 | | |
3131 | 15.0k | if (!memcmp(ipmaHeader.type, "ipma", 4)) { |
3132 | 15.0k | uint32_t versionAndFlags; |
3133 | 15.0k | AVIF_CHECKRES(avifParseItemPropertyAssociation(meta, avifROStreamCurrent(&s), ipmaHeader.size, diag, &versionAndFlags)); |
3134 | 14.9k | for (uint32_t i = 0; i < versionAndFlagsSeenCount; ++i) { |
3135 | 25 | if (versionAndFlagsSeen[i] == versionAndFlags) { |
3136 | | // BMFF (ISO/IEC 14496-12:2022) 8.11.14.1 - There shall be at most one |
3137 | | // ItemPropertyAssociationBox with a given pair of values of version and |
3138 | | // flags. |
3139 | 2 | avifDiagnosticsPrintf(diag, "Multiple Box[ipma] with a given pair of values of version and flags. See BMFF (ISO/IEC 14496-12:2022) 8.11.14.1"); |
3140 | 2 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3141 | 2 | } |
3142 | 25 | } |
3143 | 14.9k | if (versionAndFlagsSeenCount == MAX_IPMA_VERSION_AND_FLAGS_SEEN) { |
3144 | 0 | avifDiagnosticsPrintf(diag, "Exceeded possible count of unique ipma version and flags tuples"); |
3145 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3146 | 0 | } |
3147 | 14.9k | versionAndFlagsSeen[versionAndFlagsSeenCount] = versionAndFlags; |
3148 | 14.9k | ++versionAndFlagsSeenCount; |
3149 | 14.9k | } else { |
3150 | | // These must all be type ipma |
3151 | 4 | avifDiagnosticsPrintf(diag, "Box[iprp] contains a box that isn't type 'ipma'"); |
3152 | 4 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3153 | 4 | } |
3154 | | |
3155 | 14.9k | AVIF_CHECKERR(avifROStreamSkip(&s, ipmaHeader.size), AVIF_RESULT_BMFF_PARSE_FAILED); |
3156 | 14.9k | } |
3157 | 14.8k | return AVIF_RESULT_OK; |
3158 | 15.0k | } |
3159 | | |
3160 | | static avifResult avifParseItemInfoEntry(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3161 | 18.1k | { |
3162 | | // Section 8.11.6.2 of ISO/IEC 14496-12. |
3163 | 18.1k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[infe]"); |
3164 | | |
3165 | 18.1k | uint8_t version; |
3166 | 18.1k | uint32_t flags; |
3167 | 18.1k | AVIF_CHECKERR(avifROStreamReadVersionAndFlags(&s, &version, &flags), AVIF_RESULT_BMFF_PARSE_FAILED); |
3168 | | // Version 2+ is required for item_type |
3169 | 18.1k | if (version != 2 && version != 3) { |
3170 | 4 | avifDiagnosticsPrintf(s.diag, "%s: Expecting box version 2 or 3, got version %u", s.diagContext, version); |
3171 | 4 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3172 | 4 | } |
3173 | | // TODO: check flags. ISO/IEC 23008-12:2017, Section 9.2 says: |
3174 | | // The flags field of ItemInfoEntry with version greater than or equal to 2 is specified as |
3175 | | // follows: |
3176 | | // |
3177 | | // (flags & 1) equal to 1 indicates that the item is not intended to be a part of the |
3178 | | // presentation. For example, when (flags & 1) is equal to 1 for an image item, the image |
3179 | | // item should not be displayed. |
3180 | | // (flags & 1) equal to 0 indicates that the item is intended to be a part of the |
3181 | | // presentation. |
3182 | | // |
3183 | | // See also Section 6.4.2. |
3184 | | |
3185 | 18.1k | uint32_t itemID; |
3186 | 18.1k | if (version == 2) { |
3187 | 18.0k | uint16_t tmp; |
3188 | 18.0k | AVIF_CHECKERR(avifROStreamReadU16(&s, &tmp), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) item_ID; |
3189 | 18.0k | itemID = tmp; |
3190 | 18.0k | } else { |
3191 | 56 | AVIF_ASSERT_OR_RETURN(version == 3); |
3192 | 56 | AVIF_CHECKERR(avifROStreamReadU32(&s, &itemID), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) item_ID; |
3193 | 56 | } |
3194 | 18.1k | AVIF_CHECKRES(avifCheckItemID("infe", itemID, diag)); |
3195 | 18.1k | uint16_t itemProtectionIndex; |
3196 | 18.1k | AVIF_CHECKERR(avifROStreamReadU16(&s, &itemProtectionIndex), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) item_protection_index; |
3197 | 18.1k | uint8_t itemType[4]; |
3198 | 18.1k | AVIF_CHECKERR(avifROStreamRead(&s, itemType, 4), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) item_type; |
3199 | 18.1k | AVIF_CHECKERR(avifROStreamReadString(&s, NULL, 0), AVIF_RESULT_BMFF_PARSE_FAILED); // utf8string item_name; (skipped) |
3200 | 18.1k | avifContentType contentType; |
3201 | 18.1k | if (!memcmp(itemType, "mime", 4)) { |
3202 | 610 | AVIF_CHECKERR(avifROStreamReadString(&s, contentType.contentType, CONTENTTYPE_SIZE), AVIF_RESULT_BMFF_PARSE_FAILED); // utf8string content_type; |
3203 | | // utf8string content_encoding; //optional |
3204 | 17.5k | } else { |
3205 | | // if (item_type == 'uri ') { |
3206 | | // utf8string item_uri_type; |
3207 | | // } |
3208 | 17.5k | memset(&contentType, 0, sizeof(contentType)); |
3209 | 17.5k | } |
3210 | | |
3211 | 18.1k | avifDecoderItem * item; |
3212 | 18.1k | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, itemID, &item)); |
3213 | | |
3214 | 18.1k | memcpy(item->type, itemType, sizeof(itemType)); |
3215 | 18.1k | item->contentType = contentType; |
3216 | 18.1k | return AVIF_RESULT_OK; |
3217 | 18.1k | } |
3218 | | |
3219 | | static avifResult avifParseItemInfoBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3220 | 15.1k | { |
3221 | 15.1k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[iinf]"); |
3222 | | |
3223 | 15.1k | uint8_t version; |
3224 | 15.1k | AVIF_CHECKERR(avifROStreamReadVersionAndFlags(&s, &version, NULL), AVIF_RESULT_BMFF_PARSE_FAILED); |
3225 | 15.1k | uint32_t entryCount; |
3226 | 15.1k | if (version == 0) { |
3227 | 14.7k | uint16_t tmp; |
3228 | 14.7k | AVIF_CHECKERR(avifROStreamReadU16(&s, &tmp), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) entry_count; |
3229 | 14.7k | entryCount = tmp; |
3230 | 14.7k | } else if (version == 1) { |
3231 | 393 | AVIF_CHECKERR(avifROStreamReadU32(&s, &entryCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) entry_count; |
3232 | 393 | } else { |
3233 | 8 | avifDiagnosticsPrintf(diag, "Box[iinf] has an unsupported version %u", version); |
3234 | 8 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3235 | 8 | } |
3236 | | |
3237 | 33.2k | for (uint32_t entryIndex = 0; entryIndex < entryCount; ++entryIndex) { |
3238 | 18.2k | avifBoxHeader infeHeader; |
3239 | 18.2k | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &infeHeader), AVIF_RESULT_BMFF_PARSE_FAILED); |
3240 | | |
3241 | 18.1k | if (!memcmp(infeHeader.type, "infe", 4)) { |
3242 | 18.1k | AVIF_CHECKRES(avifParseItemInfoEntry(meta, avifROStreamCurrent(&s), infeHeader.size, diag)); |
3243 | 18.1k | } else { |
3244 | | // These must all be type infe |
3245 | 1 | avifDiagnosticsPrintf(diag, "Box[iinf] contains a box that isn't type 'infe'"); |
3246 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3247 | 1 | } |
3248 | | |
3249 | 18.1k | AVIF_CHECKERR(avifROStreamSkip(&s, infeHeader.size), AVIF_RESULT_BMFF_PARSE_FAILED); |
3250 | 18.1k | } |
3251 | | |
3252 | 15.0k | return AVIF_RESULT_OK; |
3253 | 15.1k | } |
3254 | | |
3255 | | static avifResult avifParseItemReferenceBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3256 | 1.38k | { |
3257 | 1.38k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[iref]"); |
3258 | | |
3259 | 1.38k | uint8_t version; |
3260 | 1.38k | AVIF_CHECKERR(avifROStreamReadVersionAndFlags(&s, &version, NULL), AVIF_RESULT_BMFF_PARSE_FAILED); |
3261 | 1.38k | if (version > 1) { |
3262 | | // iref versions > 1 are not supported. Skip it. |
3263 | 41 | return AVIF_RESULT_OK; |
3264 | 41 | } |
3265 | | |
3266 | 3.84k | while (avifROStreamHasBytesLeft(&s, 1)) { |
3267 | 2.64k | avifBoxHeader irefHeader; |
3268 | 2.64k | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &irefHeader), AVIF_RESULT_BMFF_PARSE_FAILED); |
3269 | | |
3270 | 2.63k | uint32_t fromID = 0; |
3271 | 2.63k | if (version == 0) { |
3272 | 2.52k | uint16_t tmp; |
3273 | 2.52k | AVIF_CHECKERR(avifROStreamReadU16(&s, &tmp), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) from_item_ID; |
3274 | 2.52k | fromID = tmp; |
3275 | 2.52k | } else { |
3276 | | // version == 1 |
3277 | 112 | AVIF_CHECKERR(avifROStreamReadU32(&s, &fromID), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) from_item_ID; |
3278 | 112 | } |
3279 | | // ISO 14496-12 section 8.11.12.1: "index values start at 1" |
3280 | 2.63k | AVIF_CHECKRES(avifCheckItemID("iref", fromID, diag)); |
3281 | | |
3282 | 2.63k | avifDecoderItem * item; |
3283 | 2.63k | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, fromID, &item)); |
3284 | 2.63k | if (!memcmp(irefHeader.type, "dimg", 4)) { |
3285 | 278 | if (item->hasDimgFrom) { |
3286 | | // ISO/IEC 23008-12 (HEIF) 6.6.1: The number of SingleItemTypeReferenceBoxes with the box type 'dimg' |
3287 | | // and with the same value of from_item_ID shall not be greater than 1. |
3288 | 1 | avifDiagnosticsPrintf(diag, "Box[iinf] contains duplicate boxes of type 'dimg' with the same from_item_ID value %u", fromID); |
3289 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3290 | 1 | } |
3291 | 277 | item->hasDimgFrom = AVIF_TRUE; |
3292 | 277 | } |
3293 | | |
3294 | 2.63k | uint16_t referenceCount = 0; |
3295 | 2.63k | AVIF_CHECKERR(avifROStreamReadU16(&s, &referenceCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) reference_count; |
3296 | | |
3297 | 7.39k | for (uint16_t refIndex = 0; refIndex < referenceCount; ++refIndex) { |
3298 | 4.88k | uint32_t toID = 0; |
3299 | 4.88k | if (version == 0) { |
3300 | 4.56k | uint16_t tmp; |
3301 | 4.56k | AVIF_CHECKERR(avifROStreamReadU16(&s, &tmp), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(16) to_item_ID; |
3302 | 4.54k | toID = tmp; |
3303 | 4.54k | } else { |
3304 | | // version == 1 |
3305 | 323 | AVIF_CHECKERR(avifROStreamReadU32(&s, &toID), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) to_item_ID; |
3306 | 323 | } |
3307 | 4.79k | AVIF_CHECKRES(avifCheckItemID("iref", toID, diag)); |
3308 | | |
3309 | | // Read this reference as "{fromID} is a {irefType} for {toID}" |
3310 | 4.77k | if (!memcmp(irefHeader.type, "thmb", 4)) { |
3311 | 572 | item->thumbnailForID = toID; |
3312 | 4.20k | } else if (!memcmp(irefHeader.type, "auxl", 4)) { |
3313 | 685 | item->auxForID = toID; |
3314 | 3.51k | } else if (!memcmp(irefHeader.type, "cdsc", 4)) { |
3315 | 1.60k | item->descForID = toID; |
3316 | 1.91k | } else if (!memcmp(irefHeader.type, "dimg", 4)) { |
3317 | | // derived images refer in the opposite direction |
3318 | 1.29k | avifDecoderItem * dimg; |
3319 | 1.29k | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, toID, &dimg)); |
3320 | | |
3321 | | // Section 8.11.12.1 of ISO/IEC 14496-12: |
3322 | | // The items linked to are then represented by an array of to_item_IDs; |
3323 | | // within a given array, a given value shall occur at most once. |
3324 | 1.29k | AVIF_CHECKERR(dimg->dimgForID != fromID, AVIF_RESULT_INVALID_IMAGE_GRID); |
3325 | | // A given value may occur within multiple arrays but this is not supported by libavif. |
3326 | 1.28k | AVIF_CHECKERR(dimg->dimgForID == 0, AVIF_RESULT_NOT_IMPLEMENTED); |
3327 | 1.28k | dimg->dimgForID = fromID; |
3328 | 1.28k | dimg->dimgIdx = refIndex; |
3329 | 1.28k | } else if (!memcmp(irefHeader.type, "prem", 4)) { |
3330 | 199 | item->premByID = toID; |
3331 | 199 | } |
3332 | 4.77k | } |
3333 | 2.62k | } |
3334 | | |
3335 | 1.19k | return AVIF_RESULT_OK; |
3336 | 1.34k | } |
3337 | | |
3338 | | static avifResult avifParseGroupsListBox(avifMeta * meta, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3339 | 58 | { |
3340 | 58 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[grpl]"); |
3341 | | |
3342 | 113 | while (avifROStreamHasBytesLeft(&s, 1)) { |
3343 | 110 | avifBoxHeader groupHeader; |
3344 | 110 | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &groupHeader), AVIF_RESULT_BMFF_PARSE_FAILED); |
3345 | | // We don't check the flag or version as they depend on the grouping type (and for simplicity). |
3346 | | // ISO/IEC 14496-12:2024 Section 8.15.3.2 |
3347 | | // version shall be 0 unless defined otherwise for the grouping_type. Any values of flags such that |
3348 | | // (flags & 0x000FFF) is not equal to 0 are reserved. The values of flags shall be such that (flags |
3349 | | // & 0xFFF000) is equal to 0 unless defined otherwise for the grouping_type. |
3350 | 101 | AVIF_CHECKERR(avifROStreamReadVersionAndFlags(&s, NULL, NULL), AVIF_RESULT_BMFF_PARSE_FAILED); |
3351 | | |
3352 | 100 | avifEntityToGroup * group = avifArrayPush(&meta->entityToGroups); |
3353 | 100 | AVIF_CHECKERR(group != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3354 | 100 | AVIF_CHECKERR(avifArrayCreate(&group->entityIDs, sizeof(uint32_t), 2), AVIF_RESULT_OUT_OF_MEMORY); |
3355 | | |
3356 | 100 | memcpy(group->groupingType, groupHeader.type, 4); |
3357 | 100 | AVIF_CHECKERR(avifROStreamReadU32(&s, &group->groupID), AVIF_RESULT_BMFF_PARSE_FAILED); |
3358 | 99 | uint32_t numEntitiesInGroup; |
3359 | 99 | AVIF_CHECKERR(avifROStreamReadU32(&s, &numEntitiesInGroup), AVIF_RESULT_BMFF_PARSE_FAILED); |
3360 | 1.07k | for (uint32_t i = 0; i < numEntitiesInGroup; ++i) { |
3361 | 1.01k | uint32_t * entityId = avifArrayPush(&group->entityIDs); |
3362 | 1.01k | AVIF_CHECKERR(entityId != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3363 | 1.01k | AVIF_CHECKERR(avifROStreamReadU32(&s, entityId), AVIF_RESULT_BMFF_PARSE_FAILED); |
3364 | 1.01k | } |
3365 | 98 | } |
3366 | | |
3367 | 3 | return AVIF_RESULT_OK; |
3368 | 58 | } |
3369 | | |
3370 | | static avifResult avifParseMetaBox(avifMeta * meta, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3371 | 16.2k | { |
3372 | 16.2k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[meta]"); |
3373 | | |
3374 | 16.2k | uint32_t flags; |
3375 | 16.2k | AVIF_CHECKERR(avifROStreamReadAndEnforceVersion(&s, 0, &flags), AVIF_RESULT_BMFF_PARSE_FAILED); |
3376 | | |
3377 | 16.2k | ++meta->idatID; // for tracking idat |
3378 | | |
3379 | 16.2k | avifBool firstBox = AVIF_TRUE; |
3380 | 16.2k | uint32_t uniqueBoxFlags = 0; |
3381 | 94.0k | while (avifROStreamHasBytesLeft(&s, 1)) { |
3382 | 78.8k | avifBoxHeader header; |
3383 | 78.8k | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &header), AVIF_RESULT_BMFF_PARSE_FAILED); |
3384 | | |
3385 | 78.8k | if (firstBox) { |
3386 | 16.1k | if (!memcmp(header.type, "hdlr", 4)) { |
3387 | 16.1k | uint8_t handlerType[4]; |
3388 | 16.1k | AVIF_CHECKERR(avifParseHandlerBox(avifROStreamCurrent(&s), header.size, handlerType, diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3389 | | // HEIF (ISO/IEC 23008-12:2022), Section 6.2: |
3390 | | // The handler type for the MetaBox shall be 'pict'. |
3391 | 16.0k | if (memcmp(handlerType, "pict", 4) != 0) { |
3392 | 8 | avifDiagnosticsPrintf(diag, "Box[hdlr] handler_type is not 'pict'"); |
3393 | 8 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3394 | 8 | } |
3395 | 16.0k | firstBox = AVIF_FALSE; |
3396 | 16.0k | } else { |
3397 | | // hdlr must be the first box! |
3398 | 10 | avifDiagnosticsPrintf(diag, "Box[meta] does not have a Box[hdlr] as its first child box"); |
3399 | 10 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3400 | 10 | } |
3401 | 62.6k | } else if (!memcmp(header.type, "hdlr", 4)) { |
3402 | 1 | avifDiagnosticsPrintf(diag, "Box[meta] contains a duplicate unique box of type 'hdlr'"); |
3403 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3404 | 62.6k | } else if (!memcmp(header.type, "iloc", 4)) { |
3405 | 15.4k | AVIF_CHECKERR(uniqueBoxSeen(&uniqueBoxFlags, AVIF_UNIQUE_ILOC, "meta", "iloc", diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3406 | 15.4k | AVIF_CHECKRES(avifParseItemLocationBox(meta, avifROStreamCurrent(&s), header.size, diag)); |
3407 | 47.2k | } else if (!memcmp(header.type, "pitm", 4)) { |
3408 | 15.1k | AVIF_CHECKERR(uniqueBoxSeen(&uniqueBoxFlags, AVIF_UNIQUE_PITM, "meta", "pitm", diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3409 | 15.1k | AVIF_CHECKERR(avifParsePrimaryItemBox(meta, avifROStreamCurrent(&s), header.size, diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3410 | 32.1k | } else if (!memcmp(header.type, "idat", 4)) { |
3411 | 52 | AVIF_CHECKERR(uniqueBoxSeen(&uniqueBoxFlags, AVIF_UNIQUE_IDAT, "meta", "idat", diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3412 | 51 | AVIF_CHECKERR(avifParseItemDataBox(meta, avifROStreamCurrent(&s), header.size, diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3413 | 32.0k | } else if (!memcmp(header.type, "iprp", 4)) { |
3414 | 15.1k | AVIF_CHECKERR(uniqueBoxSeen(&uniqueBoxFlags, AVIF_UNIQUE_IPRP, "meta", "iprp", diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3415 | 15.1k | AVIF_CHECKRES(avifParseItemPropertiesBox(meta, rawOffset + avifROStreamOffset(&s), avifROStreamCurrent(&s), header.size, diag)); |
3416 | 16.8k | } else if (!memcmp(header.type, "iinf", 4)) { |
3417 | 15.1k | AVIF_CHECKERR(uniqueBoxSeen(&uniqueBoxFlags, AVIF_UNIQUE_IINF, "meta", "iinf", diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3418 | 15.1k | AVIF_CHECKRES(avifParseItemInfoBox(meta, avifROStreamCurrent(&s), header.size, diag)); |
3419 | 15.1k | } else if (!memcmp(header.type, "iref", 4)) { |
3420 | 1.38k | AVIF_CHECKERR(uniqueBoxSeen(&uniqueBoxFlags, AVIF_UNIQUE_IREF, "meta", "iref", diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3421 | 1.38k | AVIF_CHECKRES(avifParseItemReferenceBox(meta, avifROStreamCurrent(&s), header.size, diag)); |
3422 | 1.38k | } else if (!memcmp(header.type, "grpl", 4)) { |
3423 | 59 | AVIF_CHECKERR(uniqueBoxSeen(&uniqueBoxFlags, AVIF_UNIQUE_GRPL, "meta", "grpl", diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3424 | 58 | AVIF_CHECKRES(avifParseGroupsListBox(meta, avifROStreamCurrent(&s), header.size, diag)); |
3425 | 58 | } |
3426 | | |
3427 | 77.8k | AVIF_CHECKERR(avifROStreamSkip(&s, header.size), AVIF_RESULT_BMFF_PARSE_FAILED); |
3428 | 77.8k | } |
3429 | 15.1k | if (firstBox) { |
3430 | | // The meta box must not be empty (it must contain at least a hdlr box) |
3431 | 1 | avifDiagnosticsPrintf(diag, "Box[meta] has no child boxes"); |
3432 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3433 | 1 | } |
3434 | 15.1k | return AVIF_RESULT_OK; |
3435 | 15.1k | } |
3436 | | |
3437 | | static avifBool avifParseTrackHeaderBox(avifTrack * track, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3438 | 286 | { |
3439 | 286 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[tkhd]"); |
3440 | | |
3441 | 286 | uint8_t version; |
3442 | 286 | AVIF_CHECK(avifROStreamReadVersionAndFlags(&s, &version, NULL)); |
3443 | | |
3444 | 285 | uint32_t ignored32, trackID; |
3445 | 285 | uint64_t ignored64; |
3446 | 285 | if (version == 1) { |
3447 | 9 | AVIF_CHECK(avifROStreamReadU64(&s, &ignored64)); // unsigned int(64) creation_time; |
3448 | 8 | AVIF_CHECK(avifROStreamReadU64(&s, &ignored64)); // unsigned int(64) modification_time; |
3449 | 7 | AVIF_CHECK(avifROStreamReadU32(&s, &trackID)); // unsigned int(32) track_ID; |
3450 | 6 | AVIF_CHECK(avifROStreamReadU32(&s, &ignored32)); // const unsigned int(32) reserved = 0; |
3451 | 4 | AVIF_CHECK(avifROStreamReadU64(&s, &track->trackDuration)); // unsigned int(64) duration; |
3452 | 276 | } else if (version == 0) { |
3453 | 267 | uint32_t trackDuration; |
3454 | 267 | AVIF_CHECK(avifROStreamReadU32(&s, &ignored32)); // unsigned int(32) creation_time; |
3455 | 266 | AVIF_CHECK(avifROStreamReadU32(&s, &ignored32)); // unsigned int(32) modification_time; |
3456 | 265 | AVIF_CHECK(avifROStreamReadU32(&s, &trackID)); // unsigned int(32) track_ID; |
3457 | 264 | AVIF_CHECK(avifROStreamReadU32(&s, &ignored32)); // const unsigned int(32) reserved = 0; |
3458 | 263 | AVIF_CHECK(avifROStreamReadU32(&s, &trackDuration)); // unsigned int(32) duration; |
3459 | 262 | track->trackDuration = (trackDuration == AVIF_INDEFINITE_DURATION32) ? AVIF_INDEFINITE_DURATION64 : trackDuration; |
3460 | 262 | } else { |
3461 | | // Unsupported version |
3462 | 9 | avifDiagnosticsPrintf(diag, "Box[tkhd] has an unsupported version [%u]", version); |
3463 | 9 | return AVIF_FALSE; |
3464 | 9 | } |
3465 | 264 | track->id = trackID; |
3466 | | |
3467 | | // Skipping the following 52 bytes here: |
3468 | | // ------------------------------------ |
3469 | | // const unsigned int(32)[2] reserved = 0; |
3470 | | // template int(16) layer = 0; |
3471 | | // template int(16) alternate_group = 0; |
3472 | | // template int(16) volume = {if track_is_audio 0x0100 else 0}; |
3473 | | // const unsigned int(16) reserved = 0; |
3474 | | // template int(32)[9] matrix= { 0x00010000,0,0,0,0x00010000,0,0,0,0x40000000 }; // unity matrix |
3475 | 264 | AVIF_CHECK(avifROStreamSkip(&s, 52)); |
3476 | | |
3477 | 218 | uint32_t width, height; |
3478 | 218 | AVIF_CHECK(avifROStreamReadU32(&s, &width)); // unsigned int(32) width; |
3479 | 217 | AVIF_CHECK(avifROStreamReadU32(&s, &height)); // unsigned int(32) height; |
3480 | 216 | track->width = width >> 16; |
3481 | 216 | track->height = height >> 16; |
3482 | | |
3483 | | // TODO: support scaling based on width/height track header info? |
3484 | | |
3485 | 216 | return AVIF_TRUE; |
3486 | 217 | } |
3487 | | |
3488 | | static avifBool avifParseMediaHeaderBox(avifTrack * track, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3489 | 136 | { |
3490 | 136 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[mdhd]"); |
3491 | | |
3492 | 136 | uint8_t version; |
3493 | 136 | AVIF_CHECK(avifROStreamReadVersionAndFlags(&s, &version, NULL)); |
3494 | | |
3495 | 135 | uint32_t ignored32, mediaTimescale, mediaDuration32; |
3496 | 135 | uint64_t ignored64, mediaDuration64; |
3497 | 135 | if (version == 1) { |
3498 | 6 | AVIF_CHECK(avifROStreamReadU64(&s, &ignored64)); // unsigned int(64) creation_time; |
3499 | 4 | AVIF_CHECK(avifROStreamReadU64(&s, &ignored64)); // unsigned int(64) modification_time; |
3500 | 3 | AVIF_CHECK(avifROStreamReadU32(&s, &mediaTimescale)); // unsigned int(32) timescale; |
3501 | 2 | AVIF_CHECK(avifROStreamReadU64(&s, &mediaDuration64)); // unsigned int(64) duration; |
3502 | 1 | track->mediaDuration = mediaDuration64; |
3503 | 129 | } else if (version == 0) { |
3504 | 123 | AVIF_CHECK(avifROStreamReadU32(&s, &ignored32)); // unsigned int(32) creation_time; |
3505 | 122 | AVIF_CHECK(avifROStreamReadU32(&s, &ignored32)); // unsigned int(32) modification_time; |
3506 | 121 | AVIF_CHECK(avifROStreamReadU32(&s, &mediaTimescale)); // unsigned int(32) timescale; |
3507 | 120 | AVIF_CHECK(avifROStreamReadU32(&s, &mediaDuration32)); // unsigned int(32) duration; |
3508 | 119 | track->mediaDuration = (uint64_t)mediaDuration32; |
3509 | 119 | } else { |
3510 | | // Unsupported version |
3511 | 6 | avifDiagnosticsPrintf(diag, "Box[mdhd] has an unsupported version [%u]", version); |
3512 | 6 | return AVIF_FALSE; |
3513 | 6 | } |
3514 | | |
3515 | 120 | track->mediaTimescale = mediaTimescale; |
3516 | 120 | return AVIF_TRUE; |
3517 | 135 | } |
3518 | | |
3519 | | static avifResult avifParseChunkOffsetBox(avifSampleTable * sampleTable, avifBool largeOffsets, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3520 | 156 | { |
3521 | 156 | BEGIN_STREAM(s, raw, rawLen, diag, largeOffsets ? "Box[co64]" : "Box[stco]"); |
3522 | | |
3523 | 156 | AVIF_CHECKERR(avifROStreamReadAndEnforceVersion(&s, /*enforcedVersion=*/0, /*flags=*/NULL), AVIF_RESULT_BMFF_PARSE_FAILED); |
3524 | | |
3525 | 154 | uint32_t entryCount; |
3526 | 154 | AVIF_CHECKERR(avifROStreamReadU32(&s, &entryCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) entry_count; |
3527 | 504 | for (uint32_t i = 0; i < entryCount; ++i) { |
3528 | 400 | uint64_t offset; |
3529 | 400 | if (largeOffsets) { |
3530 | 64 | AVIF_CHECKERR(avifROStreamReadU64(&s, &offset), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(64) chunk_offset; |
3531 | 336 | } else { |
3532 | 336 | uint32_t offset32; |
3533 | 336 | AVIF_CHECKERR(avifROStreamReadU32(&s, &offset32), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) chunk_offset; |
3534 | 301 | offset = (uint64_t)offset32; |
3535 | 301 | } |
3536 | | |
3537 | 351 | avifSampleTableChunk * chunk = (avifSampleTableChunk *)avifArrayPush(&sampleTable->chunks); |
3538 | 351 | AVIF_CHECKERR(chunk != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3539 | 351 | chunk->offset = offset; |
3540 | 351 | } |
3541 | 104 | return AVIF_RESULT_OK; |
3542 | 153 | } |
3543 | | |
3544 | | static avifResult avifParseSampleToChunkBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3545 | 150 | { |
3546 | 150 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[stsc]"); |
3547 | | |
3548 | 150 | AVIF_CHECKERR(avifROStreamReadAndEnforceVersion(&s, /*enforcedVersion=*/0, /*flags=*/NULL), AVIF_RESULT_BMFF_PARSE_FAILED); |
3549 | | |
3550 | 149 | uint32_t entryCount; |
3551 | 149 | AVIF_CHECKERR(avifROStreamReadU32(&s, &entryCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) entry_count; |
3552 | 148 | uint32_t prevFirstChunk = 0; |
3553 | 322 | for (uint32_t i = 0; i < entryCount; ++i) { |
3554 | 226 | avifSampleTableSampleToChunk * sampleToChunk = (avifSampleTableSampleToChunk *)avifArrayPush(&sampleTable->sampleToChunks); |
3555 | 226 | AVIF_CHECKERR(sampleToChunk != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3556 | 226 | AVIF_CHECKERR(avifROStreamReadU32(&s, &sampleToChunk->firstChunk), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) first_chunk; |
3557 | 210 | AVIF_CHECKERR(avifROStreamReadU32(&s, &sampleToChunk->samplesPerChunk), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) samples_per_chunk; |
3558 | 197 | AVIF_CHECKERR(avifROStreamReadU32(&s, &sampleToChunk->sampleDescriptionIndex), |
3559 | 197 | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) sample_description_index; |
3560 | | // The first_chunk fields should start with 1 and be strictly increasing. |
3561 | 181 | if (i == 0) { |
3562 | 93 | if (sampleToChunk->firstChunk != 1) { |
3563 | 3 | avifDiagnosticsPrintf(diag, "Box[stsc] does not begin with chunk 1 [%u]", sampleToChunk->firstChunk); |
3564 | 3 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3565 | 3 | } |
3566 | 93 | } else { |
3567 | 88 | if (sampleToChunk->firstChunk <= prevFirstChunk) { |
3568 | 4 | avifDiagnosticsPrintf(diag, "Box[stsc] chunks are not strictly increasing"); |
3569 | 4 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3570 | 4 | } |
3571 | 88 | } |
3572 | 174 | prevFirstChunk = sampleToChunk->firstChunk; |
3573 | 174 | } |
3574 | 96 | return AVIF_RESULT_OK; |
3575 | 148 | } |
3576 | | |
3577 | | static avifResult avifParseSampleSizeBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3578 | 177 | { |
3579 | 177 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[stsz]"); |
3580 | | |
3581 | 177 | AVIF_CHECKERR(avifROStreamReadAndEnforceVersion(&s, /*enforcedVersion=*/0, /*flags=*/NULL), AVIF_RESULT_BMFF_PARSE_FAILED); |
3582 | | |
3583 | 176 | uint32_t allSamplesSize, sampleCount; |
3584 | 176 | AVIF_CHECKERR(avifROStreamReadU32(&s, &allSamplesSize), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) sample_size; |
3585 | 175 | AVIF_CHECKERR(avifROStreamReadU32(&s, &sampleCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) sample_count; |
3586 | | |
3587 | 174 | if (allSamplesSize > 0) { |
3588 | 104 | sampleTable->allSamplesSize = allSamplesSize; |
3589 | 104 | } else { |
3590 | 217 | for (uint32_t i = 0; i < sampleCount; ++i) { |
3591 | 194 | avifSampleTableSampleSize * sampleSize = (avifSampleTableSampleSize *)avifArrayPush(&sampleTable->sampleSizes); |
3592 | 194 | AVIF_CHECKERR(sampleSize != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3593 | 194 | AVIF_CHECKERR(avifROStreamReadU32(&s, &sampleSize->size), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) entry_size; |
3594 | 194 | } |
3595 | 70 | } |
3596 | 127 | return AVIF_RESULT_OK; |
3597 | 174 | } |
3598 | | |
3599 | | static avifResult avifParseSyncSampleBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3600 | 147 | { |
3601 | 147 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[stss]"); |
3602 | | |
3603 | 147 | AVIF_CHECKERR(avifROStreamReadAndEnforceVersion(&s, /*enforcedVersion=*/0, /*flags=*/NULL), AVIF_RESULT_BMFF_PARSE_FAILED); |
3604 | | |
3605 | 146 | uint32_t entryCount; |
3606 | 146 | AVIF_CHECKERR(avifROStreamReadU32(&s, &entryCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) entry_count; |
3607 | | |
3608 | 411 | for (uint32_t i = 0; i < entryCount; ++i) { |
3609 | 314 | uint32_t sampleNumber = 0; |
3610 | 314 | AVIF_CHECKERR(avifROStreamReadU32(&s, &sampleNumber), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) sample_number; |
3611 | 266 | avifSyncSample * syncSample = (avifSyncSample *)avifArrayPush(&sampleTable->syncSamples); |
3612 | 266 | AVIF_CHECKERR(syncSample != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3613 | 266 | syncSample->sampleNumber = sampleNumber; |
3614 | 266 | } |
3615 | 97 | return AVIF_RESULT_OK; |
3616 | 145 | } |
3617 | | |
3618 | | static avifResult avifParseTimeToSampleBox(avifSampleTable * sampleTable, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3619 | 84 | { |
3620 | 84 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[stts]"); |
3621 | | |
3622 | 84 | AVIF_CHECKERR(avifROStreamReadAndEnforceVersion(&s, /*enforcedVersion=*/0, /*flags=*/NULL), AVIF_RESULT_BMFF_PARSE_FAILED); |
3623 | | |
3624 | 82 | uint32_t entryCount; |
3625 | 82 | AVIF_CHECKERR(avifROStreamReadU32(&s, &entryCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) entry_count; |
3626 | | |
3627 | 226 | for (uint32_t i = 0; i < entryCount; ++i) { |
3628 | 201 | avifSampleTableTimeToSample * timeToSample = (avifSampleTableTimeToSample *)avifArrayPush(&sampleTable->timeToSamples); |
3629 | 201 | AVIF_CHECKERR(timeToSample != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3630 | 201 | AVIF_CHECKERR(avifROStreamReadU32(&s, &timeToSample->sampleCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) sample_count; |
3631 | 154 | AVIF_CHECKERR(avifROStreamReadU32(&s, &timeToSample->sampleDelta), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) sample_delta; |
3632 | 154 | } |
3633 | 25 | return AVIF_RESULT_OK; |
3634 | 81 | } |
3635 | | |
3636 | | static avifResult avifParseSampleDescriptionBox(avifSampleTable * sampleTable, |
3637 | | uint64_t rawOffset, |
3638 | | const uint8_t * raw, |
3639 | | size_t rawLen, |
3640 | | avifDiagnostics * diag) |
3641 | 183 | { |
3642 | 183 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[stsd]"); |
3643 | | |
3644 | 183 | uint8_t version; |
3645 | 183 | AVIF_CHECKERR(avifROStreamReadVersionAndFlags(&s, &version, NULL), AVIF_RESULT_BMFF_PARSE_FAILED); |
3646 | | |
3647 | | // Section 8.5.2.3 of ISO/IEC 14496-12: |
3648 | | // version is set to zero. A version number of 1 shall be treated as a version of 0. |
3649 | 182 | if (version != 0 && version != 1) { |
3650 | 6 | avifDiagnosticsPrintf(diag, "Box[stsd]: Expecting box version 0 or 1, got version %u", version); |
3651 | 6 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3652 | 6 | } |
3653 | | |
3654 | 176 | uint32_t entryCount; |
3655 | 176 | AVIF_CHECKERR(avifROStreamReadU32(&s, &entryCount), AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(32) entry_count; |
3656 | | |
3657 | 338 | for (uint32_t i = 0; i < entryCount; ++i) { |
3658 | 218 | avifBoxHeader sampleEntryHeader; |
3659 | 218 | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &sampleEntryHeader), AVIF_RESULT_BMFF_PARSE_FAILED); |
3660 | | |
3661 | 167 | avifSampleDescription * description = (avifSampleDescription *)avifArrayPush(&sampleTable->sampleDescriptions); |
3662 | 167 | AVIF_CHECKERR(description != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3663 | 167 | if (!avifArrayCreate(&description->properties, sizeof(avifProperty), 16)) { |
3664 | 0 | avifArrayPop(&sampleTable->sampleDescriptions); |
3665 | 0 | return AVIF_RESULT_OUT_OF_MEMORY; |
3666 | 0 | } |
3667 | 167 | memcpy(description->format, sampleEntryHeader.type, sizeof(description->format)); |
3668 | 167 | const size_t sampleEntryBytes = sampleEntryHeader.size; |
3669 | 167 | if (avifGetCodecType(description->format) != AVIF_CODEC_TYPE_UNKNOWN) { |
3670 | 87 | if (sampleEntryBytes < VISUALSAMPLEENTRY_SIZE) { |
3671 | 1 | avifDiagnosticsPrintf(diag, "Not enough bytes to parse VisualSampleEntry"); |
3672 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3673 | 1 | } |
3674 | 86 | AVIF_CHECKRES(avifParseItemPropertyContainerBox(&description->properties, |
3675 | 86 | rawOffset + avifROStreamOffset(&s) + VISUALSAMPLEENTRY_SIZE, |
3676 | 86 | avifROStreamCurrent(&s) + VISUALSAMPLEENTRY_SIZE, |
3677 | 86 | sampleEntryBytes - VISUALSAMPLEENTRY_SIZE, |
3678 | 86 | /*isTrack=*/AVIF_TRUE, |
3679 | 86 | diag)); |
3680 | 86 | } |
3681 | | |
3682 | 163 | AVIF_CHECKERR(avifROStreamSkip(&s, sampleEntryBytes), AVIF_RESULT_BMFF_PARSE_FAILED); |
3683 | 163 | } |
3684 | 120 | return AVIF_RESULT_OK; |
3685 | 175 | } |
3686 | | |
3687 | | static avifResult avifParseSampleTableBox(avifTrack * track, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3688 | 499 | { |
3689 | 499 | if (track->sampleTable) { |
3690 | | // A TrackBox may only have one SampleTable |
3691 | 1 | avifDiagnosticsPrintf(diag, "Duplicate Box[stbl] for a single track detected"); |
3692 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3693 | 1 | } |
3694 | 498 | track->sampleTable = avifSampleTableCreate(); |
3695 | 498 | AVIF_CHECKERR(track->sampleTable != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3696 | | |
3697 | 498 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[stbl]"); |
3698 | | |
3699 | 1.26k | while (avifROStreamHasBytesLeft(&s, 1)) { |
3700 | 1.12k | avifBoxHeader header; |
3701 | 1.12k | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &header), AVIF_RESULT_BMFF_PARSE_FAILED); |
3702 | | |
3703 | 1.09k | if (!memcmp(header.type, "stco", 4)) { |
3704 | 139 | AVIF_CHECKRES(avifParseChunkOffsetBox(track->sampleTable, AVIF_FALSE, avifROStreamCurrent(&s), header.size, diag)); |
3705 | 951 | } else if (!memcmp(header.type, "co64", 4)) { |
3706 | 17 | AVIF_CHECKRES(avifParseChunkOffsetBox(track->sampleTable, AVIF_TRUE, avifROStreamCurrent(&s), header.size, diag)); |
3707 | 934 | } else if (!memcmp(header.type, "stsc", 4)) { |
3708 | 150 | AVIF_CHECKRES(avifParseSampleToChunkBox(track->sampleTable, avifROStreamCurrent(&s), header.size, diag)); |
3709 | 784 | } else if (!memcmp(header.type, "stsz", 4)) { |
3710 | 177 | AVIF_CHECKRES(avifParseSampleSizeBox(track->sampleTable, avifROStreamCurrent(&s), header.size, diag)); |
3711 | 607 | } else if (!memcmp(header.type, "stss", 4)) { |
3712 | 147 | AVIF_CHECKRES(avifParseSyncSampleBox(track->sampleTable, avifROStreamCurrent(&s), header.size, diag)); |
3713 | 460 | } else if (!memcmp(header.type, "stts", 4)) { |
3714 | 84 | AVIF_CHECKRES(avifParseTimeToSampleBox(track->sampleTable, avifROStreamCurrent(&s), header.size, diag)); |
3715 | 376 | } else if (!memcmp(header.type, "stsd", 4)) { |
3716 | 183 | AVIF_CHECKRES(avifParseSampleDescriptionBox(track->sampleTable, |
3717 | 183 | rawOffset + avifROStreamOffset(&s), |
3718 | 183 | avifROStreamCurrent(&s), |
3719 | 183 | header.size, |
3720 | 183 | diag)); |
3721 | 183 | } |
3722 | | |
3723 | 762 | AVIF_CHECKERR(avifROStreamSkip(&s, header.size), AVIF_RESULT_BMFF_PARSE_FAILED); |
3724 | 762 | } |
3725 | 137 | return AVIF_RESULT_OK; |
3726 | 498 | } |
3727 | | |
3728 | | static avifResult avifParseMediaInformationBox(avifTrack * track, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3729 | 526 | { |
3730 | 526 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[minf]"); |
3731 | | |
3732 | 1.09k | while (avifROStreamHasBytesLeft(&s, 1)) { |
3733 | 930 | avifBoxHeader header; |
3734 | 930 | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &header), AVIF_RESULT_BMFF_PARSE_FAILED); |
3735 | | |
3736 | 926 | if (!memcmp(header.type, "stbl", 4)) { |
3737 | 499 | AVIF_CHECKRES(avifParseSampleTableBox(track, rawOffset + avifROStreamOffset(&s), avifROStreamCurrent(&s), header.size, diag)); |
3738 | 499 | } |
3739 | | |
3740 | 564 | AVIF_CHECKERR(avifROStreamSkip(&s, header.size), AVIF_RESULT_BMFF_PARSE_FAILED); |
3741 | 564 | } |
3742 | 160 | return AVIF_RESULT_OK; |
3743 | 526 | } |
3744 | | |
3745 | | static avifResult avifParseMediaBox(avifTrack * track, uint64_t rawOffset, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3746 | 568 | { |
3747 | 568 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[mdia]"); |
3748 | | |
3749 | 1.14k | while (avifROStreamHasBytesLeft(&s, 1)) { |
3750 | 972 | avifBoxHeader header; |
3751 | 972 | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &header), AVIF_RESULT_BMFF_PARSE_FAILED); |
3752 | | |
3753 | 963 | if (!memcmp(header.type, "mdhd", 4)) { |
3754 | 136 | AVIF_CHECKERR(avifParseMediaHeaderBox(track, avifROStreamCurrent(&s), header.size, diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3755 | 827 | } else if (!memcmp(header.type, "minf", 4)) { |
3756 | 526 | AVIF_CHECKRES( |
3757 | 526 | avifParseMediaInformationBox(track, rawOffset + avifROStreamOffset(&s), avifROStreamCurrent(&s), header.size, diag)); |
3758 | 526 | } else if (!memcmp(header.type, "hdlr", 4)) { |
3759 | 98 | AVIF_CHECKERR(avifParseHandlerBox(avifROStreamCurrent(&s), header.size, track->handlerType, diag), |
3760 | 98 | AVIF_RESULT_BMFF_PARSE_FAILED); |
3761 | 98 | } |
3762 | | |
3763 | 580 | AVIF_CHECKERR(avifROStreamSkip(&s, header.size), AVIF_RESULT_BMFF_PARSE_FAILED); |
3764 | 580 | } |
3765 | 176 | return AVIF_RESULT_OK; |
3766 | 568 | } |
3767 | | |
3768 | | static avifBool avifTrackReferenceBox(avifTrack * track, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3769 | 67 | { |
3770 | 67 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[tref]"); |
3771 | | |
3772 | 119 | while (avifROStreamHasBytesLeft(&s, 1)) { |
3773 | 64 | avifBoxHeader header; |
3774 | 64 | AVIF_CHECK(avifROStreamReadBoxHeader(&s, &header)); |
3775 | | |
3776 | 58 | if (!memcmp(header.type, "auxl", 4)) { |
3777 | 10 | uint32_t toID; |
3778 | 10 | AVIF_CHECK(avifROStreamReadU32(&s, &toID)); // unsigned int(32) track_IDs[]; |
3779 | 9 | AVIF_CHECK(avifROStreamSkip(&s, header.size - sizeof(uint32_t))); // just take the first one |
3780 | 7 | track->auxForID = toID; |
3781 | 48 | } else if (!memcmp(header.type, "prem", 4)) { |
3782 | 6 | uint32_t byID; |
3783 | 6 | AVIF_CHECK(avifROStreamReadU32(&s, &byID)); // unsigned int(32) track_IDs[]; |
3784 | 5 | AVIF_CHECK(avifROStreamSkip(&s, header.size - sizeof(uint32_t))); // just take the first one |
3785 | 3 | track->premByID = byID; |
3786 | 42 | } else { |
3787 | 42 | AVIF_CHECK(avifROStreamSkip(&s, header.size)); |
3788 | 42 | } |
3789 | 58 | } |
3790 | 55 | return AVIF_TRUE; |
3791 | 67 | } |
3792 | | |
3793 | | static avifBool avifParseEditListBox(avifTrack * track, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3794 | 216 | { |
3795 | 216 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[elst]"); |
3796 | | |
3797 | 216 | uint8_t version; |
3798 | 216 | uint32_t flags; |
3799 | 216 | AVIF_CHECK(avifROStreamReadVersionAndFlags(&s, &version, &flags)); |
3800 | | |
3801 | 215 | if ((flags & 1) == 0) { |
3802 | 110 | track->isRepeating = AVIF_FALSE; |
3803 | 110 | return AVIF_TRUE; |
3804 | 110 | } |
3805 | | |
3806 | 105 | track->isRepeating = AVIF_TRUE; |
3807 | 105 | uint32_t entryCount; |
3808 | 105 | AVIF_CHECK(avifROStreamReadU32(&s, &entryCount)); // unsigned int(32) entry_count; |
3809 | 104 | if (entryCount != 1) { |
3810 | 27 | avifDiagnosticsPrintf(diag, "Box[elst] contains an entry_count != 1 [%u]", entryCount); |
3811 | 27 | return AVIF_FALSE; |
3812 | 27 | } |
3813 | | |
3814 | 77 | if (version == 1) { |
3815 | 69 | AVIF_CHECK(avifROStreamReadU64(&s, &track->segmentDuration)); // unsigned int(64) segment_duration; |
3816 | 69 | } else if (version == 0) { |
3817 | 3 | uint32_t segmentDuration; |
3818 | 3 | AVIF_CHECK(avifROStreamReadU32(&s, &segmentDuration)); // unsigned int(32) segment_duration; |
3819 | 2 | track->segmentDuration = segmentDuration; |
3820 | 5 | } else { |
3821 | | // Unsupported version |
3822 | 5 | avifDiagnosticsPrintf(diag, "Box[elst] has an unsupported version [%u]", version); |
3823 | 5 | return AVIF_FALSE; |
3824 | 5 | } |
3825 | 70 | if (track->segmentDuration == 0) { |
3826 | 1 | avifDiagnosticsPrintf(diag, "Box[elst] Invalid value for segment_duration (0)."); |
3827 | 1 | return AVIF_FALSE; |
3828 | 1 | } |
3829 | 69 | return AVIF_TRUE; |
3830 | 70 | } |
3831 | | |
3832 | | static avifBool avifParseEditBox(avifTrack * track, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
3833 | 226 | { |
3834 | 226 | BEGIN_STREAM(s, raw, rawLen, diag, "Box[edts]"); |
3835 | | |
3836 | 226 | avifBool elstBoxSeen = AVIF_FALSE; |
3837 | 450 | while (avifROStreamHasBytesLeft(&s, 1)) { |
3838 | 267 | avifBoxHeader header; |
3839 | 267 | AVIF_CHECK(avifROStreamReadBoxHeader(&s, &header)); |
3840 | | |
3841 | 261 | if (!memcmp(header.type, "elst", 4)) { |
3842 | 216 | if (elstBoxSeen) { |
3843 | 0 | avifDiagnosticsPrintf(diag, "More than one [elst] Box was found."); |
3844 | 0 | return AVIF_FALSE; |
3845 | 0 | } |
3846 | 216 | AVIF_CHECK(avifParseEditListBox(track, avifROStreamCurrent(&s), header.size, diag)); |
3847 | 179 | elstBoxSeen = AVIF_TRUE; |
3848 | 179 | } |
3849 | 224 | AVIF_CHECK(avifROStreamSkip(&s, header.size)); |
3850 | 224 | } |
3851 | 183 | if (!elstBoxSeen) { |
3852 | 5 | avifDiagnosticsPrintf(diag, "Box[edts] contains no [elst] Box."); |
3853 | 5 | return AVIF_FALSE; |
3854 | 5 | } |
3855 | 178 | return AVIF_TRUE; |
3856 | 183 | } |
3857 | | |
3858 | | static avifResult avifParseTrackBox(avifDecoderData * data, uint64_t rawOffset, const uint8_t * raw, size_t rawLen) |
3859 | 848 | { |
3860 | 848 | BEGIN_STREAM(s, raw, rawLen, data->diag, "Box[trak]"); |
3861 | | |
3862 | 848 | avifTrack * track = avifDecoderDataCreateTrack(data); |
3863 | 848 | AVIF_CHECKERR(track != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
3864 | | |
3865 | 848 | avifBool edtsBoxSeen = AVIF_FALSE; |
3866 | 848 | avifBool tkhdSeen = AVIF_FALSE; |
3867 | 1.82k | while (avifROStreamHasBytesLeft(&s, 1)) { |
3868 | 1.53k | avifBoxHeader header; |
3869 | 1.53k | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &header), AVIF_RESULT_BMFF_PARSE_FAILED); |
3870 | | |
3871 | 1.50k | if (!memcmp(header.type, "tkhd", 4)) { |
3872 | 287 | if (tkhdSeen) { |
3873 | 1 | avifDiagnosticsPrintf(data->diag, "Box[trak] contains a duplicate unique box of type 'tkhd'"); |
3874 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3875 | 1 | } |
3876 | 286 | AVIF_CHECKERR(avifParseTrackHeaderBox(track, avifROStreamCurrent(&s), header.size, data->diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3877 | 216 | tkhdSeen = AVIF_TRUE; |
3878 | 1.21k | } else if (!memcmp(header.type, "meta", 4)) { |
3879 | 12 | AVIF_CHECKRES( |
3880 | 12 | avifParseMetaBox(track->meta, rawOffset + avifROStreamOffset(&s), avifROStreamCurrent(&s), header.size, data->diag)); |
3881 | 1.20k | } else if (!memcmp(header.type, "mdia", 4)) { |
3882 | 568 | AVIF_CHECKRES(avifParseMediaBox(track, rawOffset + avifROStreamOffset(&s), avifROStreamCurrent(&s), header.size, data->diag)); |
3883 | 639 | } else if (!memcmp(header.type, "tref", 4)) { |
3884 | 67 | AVIF_CHECKERR(avifTrackReferenceBox(track, avifROStreamCurrent(&s), header.size, data->diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3885 | 572 | } else if (!memcmp(header.type, "edts", 4)) { |
3886 | 227 | if (edtsBoxSeen) { |
3887 | 1 | avifDiagnosticsPrintf(data->diag, "Box[trak] contains a duplicate unique box of type 'edts'"); |
3888 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3889 | 1 | } |
3890 | 226 | AVIF_CHECKERR(avifParseEditBox(track, avifROStreamCurrent(&s), header.size, data->diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
3891 | 178 | edtsBoxSeen = AVIF_TRUE; |
3892 | 178 | } |
3893 | | |
3894 | 977 | AVIF_CHECKERR(avifROStreamSkip(&s, header.size), AVIF_RESULT_BMFF_PARSE_FAILED); |
3895 | 977 | } |
3896 | 294 | if (!tkhdSeen) { |
3897 | 135 | avifDiagnosticsPrintf(data->diag, "Box[trak] does not contain a mandatory [tkhd] box"); |
3898 | 135 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3899 | 135 | } |
3900 | 159 | if (!edtsBoxSeen) { |
3901 | 87 | track->repetitionCount = AVIF_REPETITION_COUNT_UNKNOWN; |
3902 | 87 | } else if (track->isRepeating) { |
3903 | 0 | if (track->trackDuration == AVIF_INDEFINITE_DURATION64) { |
3904 | | // If isRepeating is true and the track duration is unknown/indefinite, then set the repetition count to infinite |
3905 | | // (Section 9.6.1 of ISO/IEC 23008-12 Part 12). |
3906 | 0 | track->repetitionCount = AVIF_REPETITION_COUNT_INFINITE; |
3907 | 0 | } else { |
3908 | | // Section 9.6.1. of ISO/IEC 23008-12 Part 12: 1, the entire edit list is repeated a sufficient number of times to |
3909 | | // equal the track duration. |
3910 | | // |
3911 | | // Since libavif uses repetitionCount (which is 0-based), we subtract the value by 1 to derive the number of |
3912 | | // repetitions. |
3913 | 0 | AVIF_ASSERT_OR_RETURN(track->segmentDuration != 0); |
3914 | | // We specifically check for trackDuration == 0 here and not when it is actually read in order to accept files which |
3915 | | // inadvertently has a trackDuration of 0 without any edit lists. |
3916 | 0 | if (track->trackDuration == 0) { |
3917 | 0 | avifDiagnosticsPrintf(data->diag, "Invalid track duration 0."); |
3918 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3919 | 0 | } |
3920 | 0 | const uint64_t repetitionCount = |
3921 | 0 | (track->trackDuration / track->segmentDuration) + (track->trackDuration % track->segmentDuration != 0) - 1; |
3922 | 0 | if (repetitionCount > INT_MAX) { |
3923 | | // repetitionCount does not fit in an integer and hence it is |
3924 | | // likely to be a very large value. So, we just set it to |
3925 | | // infinite. |
3926 | 0 | track->repetitionCount = AVIF_REPETITION_COUNT_INFINITE; |
3927 | 0 | } else { |
3928 | 0 | track->repetitionCount = (int)repetitionCount; |
3929 | 0 | } |
3930 | 0 | } |
3931 | 72 | } else { |
3932 | 72 | track->repetitionCount = 0; |
3933 | 72 | } |
3934 | | |
3935 | 159 | return AVIF_RESULT_OK; |
3936 | 159 | } |
3937 | | |
3938 | | static avifResult avifParseMovieBox(avifDecoderData * data, |
3939 | | uint64_t rawOffset, |
3940 | | const uint8_t * raw, |
3941 | | size_t rawLen, |
3942 | | uint32_t imageSizeLimit, |
3943 | | uint32_t imageDimensionLimit) |
3944 | 836 | { |
3945 | 836 | BEGIN_STREAM(s, raw, rawLen, data->diag, "Box[moov]"); |
3946 | | |
3947 | 836 | avifBool hasTrak = AVIF_FALSE; |
3948 | 1.58k | while (avifROStreamHasBytesLeft(&s, 1)) { |
3949 | 1.50k | avifBoxHeader header; |
3950 | 1.50k | AVIF_CHECKERR(avifROStreamReadBoxHeader(&s, &header), AVIF_RESULT_BMFF_PARSE_FAILED); |
3951 | | |
3952 | 1.44k | if (!memcmp(header.type, "trak", 4)) { |
3953 | 848 | AVIF_CHECKRES(avifParseTrackBox(data, rawOffset + avifROStreamOffset(&s), avifROStreamCurrent(&s), header.size)); |
3954 | 159 | hasTrak = AVIF_TRUE; |
3955 | | |
3956 | 159 | const avifTrack * track = &data->tracks.track[data->tracks.count - 1]; |
3957 | 159 | if (!memcmp(track->handlerType, "pict", 4) || !memcmp(track->handlerType, "vide", 4) || |
3958 | 159 | !memcmp(track->handlerType, "auxv", 4)) { |
3959 | 42 | if ((track->width == 0) || (track->height == 0)) { |
3960 | 3 | avifDiagnosticsPrintf(data->diag, "Track ID [%u] has an invalid size [%ux%u]", track->id, track->width, track->height); |
3961 | 3 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3962 | 3 | } |
3963 | 39 | if (avifDimensionsTooLarge(track->width, track->height, imageSizeLimit, imageDimensionLimit)) { |
3964 | 2 | avifDiagnosticsPrintf(data->diag, |
3965 | 2 | "Track ID [%u] dimensions are too large [%ux%u]", |
3966 | 2 | track->id, |
3967 | 2 | track->width, |
3968 | 2 | track->height); |
3969 | 2 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3970 | 2 | } |
3971 | 39 | } |
3972 | 159 | } |
3973 | | |
3974 | 746 | AVIF_CHECKERR(avifROStreamSkip(&s, header.size), AVIF_RESULT_BMFF_PARSE_FAILED); |
3975 | 746 | } |
3976 | 76 | if (!hasTrak) { |
3977 | 4 | avifDiagnosticsPrintf(data->diag, "moov box does not contain any tracks"); |
3978 | 4 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
3979 | 4 | } |
3980 | 72 | return AVIF_RESULT_OK; |
3981 | 76 | } |
3982 | | |
3983 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
3984 | | static avifProperty * avifMetaCreateProperty(avifMeta * meta, const char * propertyType) |
3985 | | { |
3986 | | avifProperty * metaProperty = avifArrayPush(&meta->properties); |
3987 | | AVIF_CHECK(metaProperty); |
3988 | | memcpy(metaProperty->type, propertyType, 4); |
3989 | | return metaProperty; |
3990 | | } |
3991 | | |
3992 | | static avifProperty * avifDecoderItemAddProperty(avifDecoderItem * item, const avifProperty * metaProperty) |
3993 | | { |
3994 | | avifProperty * itemProperty = avifArrayPush(&item->properties); |
3995 | | AVIF_CHECK(itemProperty); |
3996 | | *itemProperty = *metaProperty; |
3997 | | return itemProperty; |
3998 | | } |
3999 | | |
4000 | | static avifResult avifParseMinimizedImageBox(avifDecoderData * data, |
4001 | | uint64_t rawOffset, |
4002 | | const uint8_t * raw, |
4003 | | size_t rawLen, |
4004 | | avifBool isAvifAccordingToMinorVersion, |
4005 | | avifDiagnostics * diag) |
4006 | | { |
4007 | | avifMeta * meta = data->meta; |
4008 | | BEGIN_STREAM(s, raw, rawLen, diag, "Box[mini]"); |
4009 | | |
4010 | | meta->fromMiniBox = AVIF_TRUE; |
4011 | | |
4012 | | uint32_t version; |
4013 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &version, 2), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(2) version = 0; |
4014 | | AVIF_CHECKERR(version == 0, AVIF_RESULT_BMFF_PARSE_FAILED); |
4015 | | |
4016 | | // flags |
4017 | | uint32_t hasExplicitCodecTypes, floatFlag, fullRange, hasAlpha, hasExplicitCicp, hasHdr, hasIcc, hasExif, hasXmp; |
4018 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &hasExplicitCodecTypes, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) explicit_codec_types_flag; |
4019 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &floatFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) float_flag; |
4020 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &fullRange, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) full_range_flag; |
4021 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &hasAlpha, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) alpha_flag; |
4022 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &hasExplicitCicp, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) explicit_cicp_flag; |
4023 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &hasHdr, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) hdr_flag; |
4024 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &hasIcc, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) icc_flag; |
4025 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &hasExif, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) exif_flag; |
4026 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &hasXmp, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) xmp_flag; |
4027 | | |
4028 | | uint32_t chromaSubsampling, orientation; |
4029 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &chromaSubsampling, 2), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(2) chroma_subsampling; |
4030 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &orientation, 3), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(3) orientation_minus1; |
4031 | | ++orientation; |
4032 | | |
4033 | | // Spatial extents |
4034 | | uint32_t largeDimensionsFlag, width, height; |
4035 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &largeDimensionsFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) large_dimensions_flag; |
4036 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &width, largeDimensionsFlag ? 15 : 7), |
4037 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_dimensions_flag ? 15 : 7) width_minus1; |
4038 | | ++width; |
4039 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &height, largeDimensionsFlag ? 15 : 7), |
4040 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_dimensions_flag ? 15 : 7) height_minus1; |
4041 | | ++height; |
4042 | | |
4043 | | // Pixel information |
4044 | | uint32_t chromaIsHorizontallyCentered = 0, chromaIsVerticallyCentered = 0; |
4045 | | if (chromaSubsampling == 1 || chromaSubsampling == 2) { |
4046 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &chromaIsHorizontallyCentered, 1), |
4047 | | AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) chroma_is_horizontally_centered; |
4048 | | } |
4049 | | if (chromaSubsampling == 1) { |
4050 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &chromaIsVerticallyCentered, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) chroma_is_vertically_centered; |
4051 | | } |
4052 | | |
4053 | | uint32_t bitDepth; |
4054 | | if (floatFlag) { |
4055 | | // bit(2) bit_depth_log2_minus4; |
4056 | | return AVIF_RESULT_BMFF_PARSE_FAILED; // Either invalid AVIF or unsupported non-AVIF. |
4057 | | } else { |
4058 | | uint32_t highBitDepthFlag; |
4059 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &highBitDepthFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) high_bit_depth_flag; |
4060 | | if (highBitDepthFlag) { |
4061 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &bitDepth, 3), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(3) bit_depth_minus9; |
4062 | | bitDepth += 9; |
4063 | | } else { |
4064 | | bitDepth = 8; |
4065 | | } |
4066 | | } |
4067 | | |
4068 | | uint32_t alphaIsPremultiplied = 0; |
4069 | | if (hasAlpha) { |
4070 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &alphaIsPremultiplied, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) alpha_is_premultiplied; |
4071 | | } |
4072 | | |
4073 | | // Colour properties |
4074 | | uint8_t colorPrimaries; |
4075 | | uint8_t transferCharacteristics; |
4076 | | uint8_t matrixCoefficients; |
4077 | | if (hasExplicitCicp) { |
4078 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &colorPrimaries, 8), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(8) colour_primaries; |
4079 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &transferCharacteristics, 8), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(8) transfer_characteristics; |
4080 | | if (chromaSubsampling != 0) { |
4081 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &matrixCoefficients, 8), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(8) matrix_coefficients; |
4082 | | } else { |
4083 | | matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED; // 2 |
4084 | | } |
4085 | | } else { |
4086 | | colorPrimaries = hasIcc ? AVIF_COLOR_PRIMARIES_UNSPECIFIED // 2 |
4087 | | : AVIF_COLOR_PRIMARIES_BT709; // 1 |
4088 | | transferCharacteristics = hasIcc ? AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED // 2 |
4089 | | : AVIF_TRANSFER_CHARACTERISTICS_SRGB; // 13 |
4090 | | matrixCoefficients = chromaSubsampling == 0 ? AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED // 2 |
4091 | | : AVIF_MATRIX_COEFFICIENTS_BT601; // 6 |
4092 | | } |
4093 | | |
4094 | | uint8_t infeType[4]; |
4095 | | uint8_t codecConfigType[4]; |
4096 | | if (hasExplicitCodecTypes) { |
4097 | | // bit(32) infe_type; |
4098 | | for (int i = 0; i < 4; ++i) { |
4099 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &infeType[i], 8), AVIF_RESULT_BMFF_PARSE_FAILED); |
4100 | | } |
4101 | | // bit(32) codec_config_type; |
4102 | | for (int i = 0; i < 4; ++i) { |
4103 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &codecConfigType[i], 8), AVIF_RESULT_BMFF_PARSE_FAILED); |
4104 | | } |
4105 | | #if defined(AVIF_CODEC_AVM) |
4106 | | AVIF_CHECKERR((!memcmp(infeType, "av01", 4) && !memcmp(codecConfigType, "av1C", 4)) || |
4107 | | (!memcmp(infeType, "av02", 4) && !memcmp(codecConfigType, "av2C", 4)), |
4108 | | AVIF_RESULT_BMFF_PARSE_FAILED); |
4109 | | #else |
4110 | | AVIF_CHECKERR(!memcmp(infeType, "av01", 4) && !memcmp(codecConfigType, "av1C", 4), AVIF_RESULT_BMFF_PARSE_FAILED); |
4111 | | #endif |
4112 | | } else { |
4113 | | AVIF_CHECKERR(isAvifAccordingToMinorVersion, AVIF_RESULT_BMFF_PARSE_FAILED); |
4114 | | memcpy(infeType, "av01", 4); |
4115 | | memcpy(codecConfigType, "av1C", 4); |
4116 | | } |
4117 | | |
4118 | | // High Dynamic Range properties |
4119 | | uint32_t hasGainmap = AVIF_FALSE; |
4120 | | uint32_t tmapHasIcc = AVIF_FALSE; |
4121 | | uint32_t gainmapWidth = 0, gainmapHeight = 0; |
4122 | | uint8_t gainmapMatrixCoefficients = 0; |
4123 | | uint32_t gainmapFullRange = 0; |
4124 | | uint32_t gainmapChromaSubsampling = 0; |
4125 | | uint32_t gainmapBitDepth = 0; |
4126 | | uint32_t tmapHasExplicitCicp = AVIF_FALSE; |
4127 | | uint8_t tmapColorPrimaries = AVIF_COLOR_PRIMARIES_UNKNOWN; |
4128 | | uint8_t tmapTransferCharacteristics = AVIF_TRANSFER_CHARACTERISTICS_UNKNOWN; |
4129 | | uint8_t tmapMatrixCoefficients = AVIF_MATRIX_COEFFICIENTS_IDENTITY; |
4130 | | uint32_t tmapFullRange = AVIF_FALSE; |
4131 | | uint32_t hasClli = AVIF_FALSE, tmapHasClli = AVIF_FALSE; |
4132 | | avifContentLightLevelInformationBox clli = { 0 }, tmapClli = { 0 }; |
4133 | | if (hasHdr) { |
4134 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &hasGainmap, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) gainmap_flag; |
4135 | | if (hasGainmap) { |
4136 | | // avifDecoderReset() requires the 'tmap' brand to be registered for the tone mapping derived image item to be parsed. |
4137 | | if (data->compatibleBrands.capacity == 0) { |
4138 | | AVIF_CHECKERR(avifArrayCreate(&data->compatibleBrands, sizeof(avifBrand), 1), AVIF_RESULT_OUT_OF_MEMORY); |
4139 | | } |
4140 | | avifBrand * brand = avifArrayPush(&data->compatibleBrands); |
4141 | | AVIF_CHECKERR(brand != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
4142 | | memcpy(brand, "tmap", sizeof(avifBrand)); |
4143 | | |
4144 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapWidth, largeDimensionsFlag ? 15 : 7), |
4145 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_dimensions_flag ? 15 : 7) gainmap_width_minus1; |
4146 | | ++gainmapWidth; |
4147 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapHeight, largeDimensionsFlag ? 15 : 7), |
4148 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_dimensions_flag ? 15 : 7) gainmap_height_minus1; |
4149 | | ++gainmapHeight; |
4150 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &gainmapMatrixCoefficients, 8), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(8) gainmap_matrix_coefficients; |
4151 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapFullRange, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) gainmap_full_range_flag; |
4152 | | |
4153 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapChromaSubsampling, 2), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(2) gainmap_chroma_subsampling; |
4154 | | uint32_t gainmapChromaIsHorizontallyCentered = 0, gainmapChromaIsVerticallyCentered = 0; |
4155 | | if (gainmapChromaSubsampling == 1 || gainmapChromaSubsampling == 2) { |
4156 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapChromaIsHorizontallyCentered, 1), |
4157 | | AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) gainmap_chroma_is_horizontally_centered; |
4158 | | } |
4159 | | if (gainmapChromaSubsampling == 1) { |
4160 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapChromaIsVerticallyCentered, 1), |
4161 | | AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) gainmap_chroma_is_vertically_centered; |
4162 | | } |
4163 | | |
4164 | | uint32_t gainmapFloatFlag; |
4165 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapFloatFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) gainmap_float_flag; |
4166 | | if (gainmapFloatFlag) { |
4167 | | // bit(2) gainmap_bit_depth_log2_minus4; |
4168 | | return AVIF_RESULT_BMFF_PARSE_FAILED; // Either invalid AVIF or unsupported non-AVIF. |
4169 | | } else { |
4170 | | uint32_t gainmapHighBitDepthFlag; |
4171 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapHighBitDepthFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) gainmap_high_bit_depth_flag; |
4172 | | if (gainmapHighBitDepthFlag) { |
4173 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapBitDepth, 3), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(3) gainmap_bit_depth_minus9; |
4174 | | gainmapBitDepth += 9; |
4175 | | } else { |
4176 | | gainmapBitDepth = 8; |
4177 | | } |
4178 | | } |
4179 | | |
4180 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &tmapHasIcc, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) tmap_icc_flag; |
4181 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &tmapHasExplicitCicp, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) tmap_explicit_cicp_flag; |
4182 | | if (tmapHasExplicitCicp) { |
4183 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &tmapColorPrimaries, 8), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(8) tmap_colour_primaries; |
4184 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &tmapTransferCharacteristics, 8), |
4185 | | AVIF_RESULT_BMFF_PARSE_FAILED); // bit(8) tmap_transfer_characteristics; |
4186 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &tmapMatrixCoefficients, 8), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(8) tmap_matrix_coefficients; |
4187 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &tmapFullRange, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) tmap_full_range_flag; |
4188 | | } else { |
4189 | | tmapColorPrimaries = AVIF_COLOR_PRIMARIES_BT709; // 1 |
4190 | | tmapTransferCharacteristics = AVIF_TRANSFER_CHARACTERISTICS_SRGB; // 13 |
4191 | | tmapMatrixCoefficients = AVIF_MATRIX_COEFFICIENTS_BT601; // 6 |
4192 | | tmapFullRange = 1; |
4193 | | } |
4194 | | } |
4195 | | AVIF_CHECKRES(avifParseMiniHDRProperties(&s, &hasClli, &clli)); |
4196 | | if (hasGainmap) { |
4197 | | AVIF_CHECKRES(avifParseMiniHDRProperties(&s, &tmapHasClli, &tmapClli)); |
4198 | | } |
4199 | | } |
4200 | | |
4201 | | // Chunk sizes |
4202 | | uint32_t largeMetadataFlag = 0, largeCodecConfigFlag = 0, largeItemDataFlag = 0; |
4203 | | if (hasIcc || hasExif || hasXmp || (hasHdr && hasGainmap)) { |
4204 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &largeMetadataFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) large_metadata_flag; |
4205 | | } |
4206 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &largeCodecConfigFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) large_codec_config_flag; |
4207 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &largeItemDataFlag, 1), AVIF_RESULT_BMFF_PARSE_FAILED); // bit(1) large_item_data_flag; |
4208 | | |
4209 | | uint32_t iccDataSize = 0; |
4210 | | if (hasIcc) { |
4211 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &iccDataSize, largeMetadataFlag ? 20 : 10), |
4212 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_metadata_flag ? 20 : 10) icc_data_size_minus1; |
4213 | | ++iccDataSize; |
4214 | | } |
4215 | | uint32_t tmapIccDataSize = 0; |
4216 | | if (hasHdr && hasGainmap && tmapHasIcc) { |
4217 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &tmapIccDataSize, largeMetadataFlag ? 20 : 10), |
4218 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_metadata_flag ? 20 : 10) tmap_icc_data_size_minus1; |
4219 | | ++tmapIccDataSize; |
4220 | | } |
4221 | | |
4222 | | uint32_t gainmapMetadataSize = 0, gainmapItemDataSize = 0, gainmapItemCodecConfigSize = 0; |
4223 | | if (hasHdr && hasGainmap) { |
4224 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapMetadataSize, largeMetadataFlag ? 20 : 10), |
4225 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_metadata_flag ? 20 : 10) gainmap_metadata_size; |
4226 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapItemDataSize, largeItemDataFlag ? 28 : 15), |
4227 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_item_data_flag ? 28 : 15) gainmap_item_data_size; |
4228 | | if (gainmapItemDataSize > 0) { |
4229 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &gainmapItemCodecConfigSize, largeCodecConfigFlag ? 12 : 3), |
4230 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_codec_config_flag ? 12 : 3) gainmap_item_codec_config_size; |
4231 | | } |
4232 | | } |
4233 | | |
4234 | | uint32_t mainItemCodecConfigSize, mainItemDataSize; |
4235 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &mainItemCodecConfigSize, largeCodecConfigFlag ? 12 : 3), |
4236 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_codec_config_flag ? 12 : 3) main_item_codec_config_size; |
4237 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &mainItemDataSize, largeItemDataFlag ? 28 : 15), |
4238 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_item_data_flag ? 28 : 15) main_item_data_size_minus1; |
4239 | | ++mainItemDataSize; |
4240 | | |
4241 | | uint32_t alphaItemCodecConfigSize = 0, alphaItemDataSize = 0; |
4242 | | if (hasAlpha) { |
4243 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &alphaItemDataSize, largeItemDataFlag ? 28 : 15), |
4244 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_item_data_flag ? 28 : 15) alpha_item_data_size; |
4245 | | } |
4246 | | if (hasAlpha && alphaItemDataSize != 0) { |
4247 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &alphaItemCodecConfigSize, largeCodecConfigFlag ? 12 : 3), |
4248 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_codec_config_flag ? 12 : 3) alpha_item_codec_config_size; |
4249 | | } |
4250 | | |
4251 | | if (hasExif || hasXmp) { |
4252 | | uint8_t exifXmpCompressedFlag; |
4253 | | AVIF_CHECKERR(avifROStreamReadBitsU8(&s, &exifXmpCompressedFlag, 1), |
4254 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(1) exif_xmp_compressed_flag; |
4255 | | AVIF_CHECKERR(!exifXmpCompressedFlag, AVIF_RESULT_NOT_IMPLEMENTED); |
4256 | | } |
4257 | | uint32_t exifDataSize = 0; |
4258 | | if (hasExif) { |
4259 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &exifDataSize, largeMetadataFlag ? 20 : 10), |
4260 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_metadata_flag ? 20 : 10) exif_data_size_minus_one; |
4261 | | ++exifDataSize; |
4262 | | } |
4263 | | uint32_t xmpDataSize = 0; |
4264 | | if (hasXmp) { |
4265 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &xmpDataSize, largeMetadataFlag ? 20 : 10), |
4266 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(large_metadata_flag ? 20 : 10) xmp_data_size_minus_one; |
4267 | | ++xmpDataSize; |
4268 | | } |
4269 | | |
4270 | | // trailing_bits(); // bit padding till byte alignment |
4271 | | if (s.numUsedBitsInPartialByte) { |
4272 | | uint32_t padding; |
4273 | | AVIF_CHECKERR(avifROStreamReadBitsU32(&s, &padding, 8 - s.numUsedBitsInPartialByte), AVIF_RESULT_BMFF_PARSE_FAILED); |
4274 | | AVIF_CHECKERR(padding == 0, AVIF_RESULT_BMFF_PARSE_FAILED); // Only accept zeros as padding. |
4275 | | } |
4276 | | |
4277 | | // Codec configuration ('av1C' always uses 4 bytes) |
4278 | | avifCodecConfigurationBox mainItemCodecConfig; |
4279 | | AVIF_CHECKERR(mainItemCodecConfigSize == 4, AVIF_RESULT_BMFF_PARSE_FAILED); |
4280 | | AVIF_CHECKERR(avifParseCodecConfiguration(&s, &mainItemCodecConfig, (const char *)codecConfigType, diag), |
4281 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(8) main_item_codec_config[main_item_codec_config_size]; |
4282 | | avifCodecConfigurationBox alphaItemCodecConfig = { 0 }; |
4283 | | if (hasAlpha && alphaItemDataSize != 0) { |
4284 | | if (alphaItemCodecConfigSize == 0) { |
4285 | | alphaItemCodecConfigSize = mainItemCodecConfigSize; |
4286 | | alphaItemCodecConfig = mainItemCodecConfig; |
4287 | | } else { |
4288 | | AVIF_CHECKERR(alphaItemCodecConfigSize == 4, AVIF_RESULT_BMFF_PARSE_FAILED); |
4289 | | AVIF_CHECKERR(avifParseCodecConfiguration(&s, &alphaItemCodecConfig, (const char *)codecConfigType, diag), |
4290 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(8) alpha_item_codec_config[alpha_item_codec_config_size]; |
4291 | | } |
4292 | | } |
4293 | | avifCodecConfigurationBox gainmapItemCodecConfig = { 0 }; |
4294 | | if (hasHdr && hasGainmap) { |
4295 | | if (gainmapItemCodecConfigSize == 0) { |
4296 | | gainmapItemCodecConfigSize = mainItemCodecConfigSize; |
4297 | | gainmapItemCodecConfig = mainItemCodecConfig; |
4298 | | } else { |
4299 | | AVIF_CHECKERR(gainmapItemCodecConfigSize == 4, AVIF_RESULT_BMFF_PARSE_FAILED); |
4300 | | AVIF_CHECKERR(avifParseCodecConfiguration(&s, &gainmapItemCodecConfig, (const char *)codecConfigType, diag), |
4301 | | AVIF_RESULT_BMFF_PARSE_FAILED); // unsigned int(8) gainmap_item_codec_config[gainmap_item_codec_config_size]; |
4302 | | } |
4303 | | } |
4304 | | |
4305 | | // Make sure all metadata and coded chunks fit into the 'meta' box whose size is rawLen. |
4306 | | // There should be no missing nor unused byte. |
4307 | | |
4308 | | AVIF_CHECKERR(avifROStreamRemainingBytes(&s) == (uint64_t)iccDataSize + tmapIccDataSize + gainmapMetadataSize + alphaItemDataSize + |
4309 | | gainmapItemDataSize + mainItemDataSize + exifDataSize + xmpDataSize, |
4310 | | AVIF_RESULT_BMFF_PARSE_FAILED); |
4311 | | |
4312 | | // Create the items and properties generated by the MinimizedImageBox. |
4313 | | // The MinimizedImageBox always creates 8 properties for specification easiness. |
4314 | | // Use FreeSpaceBoxes as no-op placeholder properties when necessary. |
4315 | | // There is no need to use placeholder items because item IDs do not have to |
4316 | | // be contiguous, whereas property indices shall be 1, 2, 3, 4, 5 etc. |
4317 | | |
4318 | | meta->primaryItemID = 1; |
4319 | | avifDecoderItem * colorItem; |
4320 | | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, meta->primaryItemID, &colorItem)); |
4321 | | memcpy(colorItem->type, infeType, 4); |
4322 | | colorItem->width = width; |
4323 | | colorItem->height = height; |
4324 | | colorItem->miniBoxPixelFormat = chromaSubsampling == 0 ? AVIF_PIXEL_FORMAT_YUV400 |
4325 | | : chromaSubsampling == 1 ? AVIF_PIXEL_FORMAT_YUV420 |
4326 | | : chromaSubsampling == 2 ? AVIF_PIXEL_FORMAT_YUV422 |
4327 | | : AVIF_PIXEL_FORMAT_YUV444; |
4328 | | if (colorItem->miniBoxPixelFormat == AVIF_PIXEL_FORMAT_YUV422) { |
4329 | | // In AV1, the chroma_sample_position syntax element is not present for the YUV 4:2:2 format. |
4330 | | // Assume that AV1 uses the same 4:2:2 chroma sample location as HEVC and VVC (colocated). |
4331 | | AVIF_CHECKERR(!chromaIsHorizontallyCentered, AVIF_RESULT_BMFF_PARSE_FAILED); |
4332 | | // chromaIsVerticallyCentered: Ignored unless chroma_subsampling is 1. |
4333 | | colorItem->miniBoxChromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN; |
4334 | | } else if (colorItem->miniBoxPixelFormat == AVIF_PIXEL_FORMAT_YUV420) { |
4335 | | if (chromaIsHorizontallyCentered) { |
4336 | | // There is no way to describe this with AV1's chroma_sample_position enum besides CSP_UNKNOWN. |
4337 | | // There is a proposal to assign the reserved value 3 (CSP_RESERVED) to the center chroma sample position. |
4338 | | colorItem->miniBoxChromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN; |
4339 | | } else { |
4340 | | colorItem->miniBoxChromaSamplePosition = chromaIsVerticallyCentered ? AVIF_CHROMA_SAMPLE_POSITION_VERTICAL |
4341 | | : AVIF_CHROMA_SAMPLE_POSITION_COLOCATED; |
4342 | | } |
4343 | | } else { |
4344 | | // chromaIsHorizontallyCentered: Ignored unless chroma_subsampling is 1 or 2. |
4345 | | // chromaIsVerticallyCentered: Ignored unless chroma_subsampling is 1. |
4346 | | colorItem->miniBoxChromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN; |
4347 | | } |
4348 | | |
4349 | | avifDecoderItem * alphaItem = NULL; |
4350 | | if (hasAlpha) { |
4351 | | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, /*itemID=*/2, &alphaItem)); |
4352 | | memcpy(alphaItem->type, infeType, 4); |
4353 | | alphaItem->width = width; |
4354 | | alphaItem->height = height; |
4355 | | alphaItem->miniBoxPixelFormat = AVIF_PIXEL_FORMAT_YUV400; |
4356 | | alphaItem->miniBoxChromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN; |
4357 | | } |
4358 | | |
4359 | | avifDecoderItem * tmapItem = NULL; |
4360 | | if (hasGainmap) { |
4361 | | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, /*itemID=*/3, &tmapItem)); |
4362 | | memcpy(tmapItem->type, "tmap", 4); |
4363 | | colorItem->dimgForID = tmapItem->id; |
4364 | | colorItem->dimgIdx = 0; |
4365 | | |
4366 | | // avifDecoderReset() requires the 'tmap' item to be an alternative to the primary item. |
4367 | | avifEntityToGroup * group = avifArrayPush(&data->meta->entityToGroups); |
4368 | | AVIF_CHECKERR(group != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
4369 | | memcpy(group->groupingType, "altr", 4); |
4370 | | AVIF_CHECKERR(avifArrayCreate(&group->entityIDs, sizeof(uint32_t), 2), AVIF_RESULT_OUT_OF_MEMORY); |
4371 | | uint32_t * groupEntityId = avifArrayPush(&group->entityIDs); |
4372 | | AVIF_CHECKERR(groupEntityId != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
4373 | | *groupEntityId = tmapItem->id; |
4374 | | groupEntityId = avifArrayPush(&group->entityIDs); |
4375 | | AVIF_CHECKERR(groupEntityId != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
4376 | | *groupEntityId = colorItem->id; |
4377 | | } |
4378 | | avifDecoderItem * gainmapItem = NULL; |
4379 | | if (gainmapItemDataSize != 0) { |
4380 | | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, /*itemID=*/4, &gainmapItem)); |
4381 | | memcpy(gainmapItem->type, infeType, 4); |
4382 | | gainmapItem->width = gainmapWidth; |
4383 | | gainmapItem->height = gainmapHeight; |
4384 | | gainmapItem->dimgForID = tmapItem->id; |
4385 | | gainmapItem->dimgIdx = 1; |
4386 | | } |
4387 | | |
4388 | | // Property with fixed index 1. |
4389 | | avifProperty * colorCodecConfigProp = avifMetaCreateProperty(meta, (const char *)codecConfigType); |
4390 | | AVIF_CHECKERR(colorCodecConfigProp, AVIF_RESULT_OUT_OF_MEMORY); |
4391 | | colorCodecConfigProp->u.av1C = mainItemCodecConfig; |
4392 | | AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, colorCodecConfigProp), AVIF_RESULT_OUT_OF_MEMORY); |
4393 | | |
4394 | | // Property with fixed index 2. |
4395 | | avifProperty * ispeProp = avifMetaCreateProperty(meta, "ispe"); |
4396 | | AVIF_CHECKERR(ispeProp, AVIF_RESULT_OUT_OF_MEMORY); |
4397 | | ispeProp->u.ispe.width = width; |
4398 | | ispeProp->u.ispe.height = height; |
4399 | | AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, ispeProp), AVIF_RESULT_OUT_OF_MEMORY); |
4400 | | |
4401 | | // Property with fixed index 3. |
4402 | | avifProperty * pixiProp = avifMetaCreateProperty(meta, "pixi"); |
4403 | | AVIF_CHECKERR(pixiProp, AVIF_RESULT_OUT_OF_MEMORY); |
4404 | | pixiProp->u.pixi.planeCount = chromaSubsampling == 0 ? 1 : 3; |
4405 | | for (uint8_t plane = 0; plane < pixiProp->u.pixi.planeCount; ++plane) { |
4406 | | pixiProp->u.pixi.planeDepths[plane] = (uint8_t)bitDepth; |
4407 | | } |
4408 | | AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, pixiProp), AVIF_RESULT_OUT_OF_MEMORY); |
4409 | | |
4410 | | // Property with fixed index 4. |
4411 | | avifProperty * colrPropNCLX = avifMetaCreateProperty(meta, "colr"); |
4412 | | AVIF_CHECKERR(colrPropNCLX, AVIF_RESULT_OUT_OF_MEMORY); |
4413 | | colrPropNCLX->u.colr.hasNCLX = AVIF_TRUE; // colour_type "nclx" |
4414 | | colrPropNCLX->u.colr.colorPrimaries = (avifColorPrimaries)colorPrimaries; |
4415 | | colrPropNCLX->u.colr.transferCharacteristics = (avifTransferCharacteristics)transferCharacteristics; |
4416 | | colrPropNCLX->u.colr.matrixCoefficients = (avifMatrixCoefficients)matrixCoefficients; |
4417 | | colrPropNCLX->u.colr.range = fullRange ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED; |
4418 | | AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, colrPropNCLX), AVIF_RESULT_OUT_OF_MEMORY); |
4419 | | |
4420 | | // Property with fixed index 5. |
4421 | | if (iccDataSize != 0) { |
4422 | | avifProperty * colrPropICC = avifMetaCreateProperty(meta, "colr"); |
4423 | | AVIF_CHECKERR(colrPropICC, AVIF_RESULT_OUT_OF_MEMORY); |
4424 | | colrPropICC->u.colr.hasICC = AVIF_TRUE; // colour_type "rICC" or "prof" |
4425 | | colrPropICC->u.colr.iccOffset = rawOffset + avifROStreamOffset(&s); |
4426 | | colrPropICC->u.colr.iccSize = (size_t)iccDataSize; |
4427 | | AVIF_CHECKERR(avifROStreamSkip(&s, colrPropICC->u.colr.iccSize), AVIF_RESULT_BMFF_PARSE_FAILED); |
4428 | | AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, colrPropICC), AVIF_RESULT_OUT_OF_MEMORY); |
4429 | | } else { |
4430 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4431 | | } |
4432 | | |
4433 | | if (hasAlpha) { |
4434 | | // Property with fixed index 6. |
4435 | | avifProperty * alphaCodecConfigProp = avifMetaCreateProperty(meta, (const char *)codecConfigType); |
4436 | | AVIF_CHECKERR(alphaCodecConfigProp, AVIF_RESULT_OUT_OF_MEMORY); |
4437 | | alphaCodecConfigProp->u.av1C = alphaItemCodecConfig; |
4438 | | AVIF_CHECKERR(avifDecoderItemAddProperty(alphaItem, alphaCodecConfigProp), AVIF_RESULT_OUT_OF_MEMORY); |
4439 | | |
4440 | | // Property with fixed index 7. |
4441 | | alphaItem->auxForID = colorItem->id; |
4442 | | colorItem->premByID = alphaIsPremultiplied; |
4443 | | avifProperty * alphaAuxProp = avifMetaCreateProperty(meta, "auxC"); |
4444 | | AVIF_CHECKERR(alphaAuxProp, AVIF_RESULT_OUT_OF_MEMORY); |
4445 | | strcpy(alphaAuxProp->u.auxC.auxType, AVIF_URN_ALPHA0); |
4446 | | AVIF_CHECKERR(avifDecoderItemAddProperty(alphaItem, alphaAuxProp), AVIF_RESULT_OUT_OF_MEMORY); |
4447 | | |
4448 | | // Property with fixed index 2 (reused). |
4449 | | AVIF_CHECKERR(avifDecoderItemAddProperty(alphaItem, ispeProp), AVIF_RESULT_OUT_OF_MEMORY); |
4450 | | |
4451 | | // Property with fixed index 8. |
4452 | | avifProperty * alphaPixiProp = avifMetaCreateProperty(meta, "pixi"); |
4453 | | AVIF_CHECKERR(alphaPixiProp, AVIF_RESULT_OUT_OF_MEMORY); |
4454 | | memcpy(alphaPixiProp->type, "pixi", 4); |
4455 | | alphaPixiProp->u.pixi.planeCount = 1; |
4456 | | alphaPixiProp->u.pixi.planeDepths[0] = (uint8_t)bitDepth; |
4457 | | AVIF_CHECKERR(avifDecoderItemAddProperty(alphaItem, alphaPixiProp), AVIF_RESULT_OUT_OF_MEMORY); |
4458 | | } else { |
4459 | | // Placeholders 6, 7 and 8. |
4460 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); |
4461 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); |
4462 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); |
4463 | | } |
4464 | | |
4465 | | // Same behavior as avifImageExtractExifOrientationToIrotImir(). |
4466 | | if (orientation == 3 || orientation == 5 || orientation == 6 || orientation == 7 || orientation == 8) { |
4467 | | // Property with fixed index 9. |
4468 | | avifProperty * irotProp = avifMetaCreateProperty(meta, "irot"); |
4469 | | AVIF_CHECKERR(irotProp, AVIF_RESULT_OUT_OF_MEMORY); |
4470 | | irotProp->u.irot.angle = orientation == 3 ? 2 : (orientation == 5 || orientation == 8) ? 1 : 3; |
4471 | | AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, irotProp), AVIF_RESULT_OUT_OF_MEMORY); |
4472 | | } else { |
4473 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4474 | | } |
4475 | | if (orientation == 2 || orientation == 4 || orientation == 5 || orientation == 7) { |
4476 | | // Property with fixed index 10. |
4477 | | avifProperty * imirProp = avifMetaCreateProperty(meta, "imir"); |
4478 | | AVIF_CHECKERR(imirProp, AVIF_RESULT_OUT_OF_MEMORY); |
4479 | | imirProp->u.imir.axis = orientation == 2 ? 1 : 0; |
4480 | | AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, imirProp), AVIF_RESULT_OUT_OF_MEMORY); |
4481 | | } else { |
4482 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4483 | | } |
4484 | | |
4485 | | if (hasClli) { |
4486 | | // Property with fixed index 11. |
4487 | | avifProperty * clliProp = avifMetaCreateProperty(meta, "clli"); |
4488 | | AVIF_CHECKERR(clliProp, AVIF_RESULT_OUT_OF_MEMORY); |
4489 | | clliProp->u.clli = clli; |
4490 | | AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, clliProp), AVIF_RESULT_OUT_OF_MEMORY); |
4491 | | } else { |
4492 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4493 | | } |
4494 | | // Properties with fixed indices 12 to 16 are ignored by libavif (mdcv, cclv, amve, reve and ndwt). |
4495 | | for (int i = 12; i <= 16; ++i) { |
4496 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4497 | | } |
4498 | | |
4499 | | if (gainmapItemCodecConfigSize != 0) { |
4500 | | // Property with fixed index 17. |
4501 | | avifProperty * gainmapCodecConfigProp = avifMetaCreateProperty(meta, (const char *)codecConfigType); |
4502 | | AVIF_CHECKERR(gainmapCodecConfigProp, AVIF_RESULT_OUT_OF_MEMORY); |
4503 | | gainmapCodecConfigProp->u.av1C = gainmapItemCodecConfig; |
4504 | | AVIF_CHECKERR(avifDecoderItemAddProperty(gainmapItem, gainmapCodecConfigProp), AVIF_RESULT_OUT_OF_MEMORY); |
4505 | | } else { |
4506 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4507 | | } |
4508 | | |
4509 | | if (gainmapItemDataSize != 0) { |
4510 | | // Property with fixed index 18. |
4511 | | avifProperty * gainmapIspeProp = avifMetaCreateProperty(meta, "ispe"); |
4512 | | AVIF_CHECKERR(gainmapIspeProp, AVIF_RESULT_OUT_OF_MEMORY); |
4513 | | gainmapIspeProp->u.ispe.width = gainmapWidth; |
4514 | | gainmapIspeProp->u.ispe.height = gainmapHeight; |
4515 | | AVIF_CHECKERR(avifDecoderItemAddProperty(gainmapItem, gainmapIspeProp), AVIF_RESULT_OUT_OF_MEMORY); |
4516 | | |
4517 | | // Property with fixed index 19. |
4518 | | avifProperty * gainmapPixiProp = avifMetaCreateProperty(meta, "pixi"); |
4519 | | AVIF_CHECKERR(gainmapPixiProp, AVIF_RESULT_OUT_OF_MEMORY); |
4520 | | memcpy(gainmapPixiProp->type, "pixi", 4); |
4521 | | gainmapPixiProp->u.pixi.planeCount = gainmapChromaSubsampling == 0 ? 1 : 3; |
4522 | | for (uint8_t plane = 0; plane < gainmapPixiProp->u.pixi.planeCount; ++plane) { |
4523 | | gainmapPixiProp->u.pixi.planeDepths[plane] = (uint8_t)gainmapBitDepth; |
4524 | | } |
4525 | | AVIF_CHECKERR(avifDecoderItemAddProperty(gainmapItem, gainmapPixiProp), AVIF_RESULT_OUT_OF_MEMORY); |
4526 | | |
4527 | | // Property with fixed index 20. |
4528 | | avifProperty * gainmapColrPropNCLX = avifMetaCreateProperty(meta, "colr"); |
4529 | | AVIF_CHECKERR(gainmapColrPropNCLX, AVIF_RESULT_OUT_OF_MEMORY); |
4530 | | gainmapColrPropNCLX->u.colr.hasNCLX = AVIF_TRUE; // colour_type "nclx" |
4531 | | gainmapColrPropNCLX->u.colr.colorPrimaries = AVIF_COLOR_PRIMARIES_UNSPECIFIED; // 2 |
4532 | | gainmapColrPropNCLX->u.colr.transferCharacteristics = AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED; // 2 |
4533 | | gainmapColrPropNCLX->u.colr.matrixCoefficients = (avifMatrixCoefficients)gainmapMatrixCoefficients; |
4534 | | gainmapColrPropNCLX->u.colr.range = gainmapFullRange ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED; |
4535 | | AVIF_CHECKERR(avifDecoderItemAddProperty(gainmapItem, gainmapColrPropNCLX), AVIF_RESULT_OUT_OF_MEMORY); |
4536 | | } else { |
4537 | | // Placeholders 18, 19 and 20. |
4538 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); |
4539 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); |
4540 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); |
4541 | | } |
4542 | | |
4543 | | if (hasGainmap) { |
4544 | | // Property with fixed index 21. |
4545 | | avifProperty * tmapIspeProp = avifMetaCreateProperty(meta, "ispe"); |
4546 | | AVIF_CHECKERR(tmapIspeProp, AVIF_RESULT_OUT_OF_MEMORY); |
4547 | | tmapIspeProp->u.ispe.width = orientation <= 4 ? width : height; |
4548 | | tmapIspeProp->u.ispe.height = orientation <= 4 ? height : width; |
4549 | | AVIF_CHECKERR(avifDecoderItemAddProperty(tmapItem, tmapIspeProp), AVIF_RESULT_OUT_OF_MEMORY); |
4550 | | } else { |
4551 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4552 | | } |
4553 | | |
4554 | | if (hasGainmap && (tmapHasExplicitCicp || !tmapHasIcc)) { |
4555 | | // Property with fixed index 22. |
4556 | | avifProperty * tmapColrPropNCLX = avifMetaCreateProperty(meta, "colr"); |
4557 | | AVIF_CHECKERR(tmapColrPropNCLX, AVIF_RESULT_OUT_OF_MEMORY); |
4558 | | tmapColrPropNCLX->u.colr.hasNCLX = AVIF_TRUE; // colour_type "nclx" |
4559 | | tmapColrPropNCLX->u.colr.colorPrimaries = (avifColorPrimaries)tmapColorPrimaries; |
4560 | | tmapColrPropNCLX->u.colr.transferCharacteristics = (avifTransferCharacteristics)tmapTransferCharacteristics; |
4561 | | tmapColrPropNCLX->u.colr.matrixCoefficients = (avifMatrixCoefficients)tmapMatrixCoefficients; |
4562 | | tmapColrPropNCLX->u.colr.range = tmapFullRange ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED; |
4563 | | AVIF_CHECKERR(avifDecoderItemAddProperty(tmapItem, tmapColrPropNCLX), AVIF_RESULT_OUT_OF_MEMORY); |
4564 | | } else { |
4565 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4566 | | } |
4567 | | |
4568 | | if (tmapIccDataSize != 0) { |
4569 | | // Property with fixed index 23. |
4570 | | avifProperty * tmapColrPropICC = avifMetaCreateProperty(meta, "colr"); |
4571 | | AVIF_CHECKERR(tmapColrPropICC, AVIF_RESULT_OUT_OF_MEMORY); |
4572 | | tmapColrPropICC->u.colr.hasICC = AVIF_TRUE; // colour_type "rICC" or "prof" |
4573 | | tmapColrPropICC->u.colr.iccOffset = rawOffset + avifROStreamOffset(&s); |
4574 | | tmapColrPropICC->u.colr.iccSize = tmapIccDataSize; |
4575 | | AVIF_CHECKERR(avifROStreamSkip(&s, tmapColrPropICC->u.colr.iccSize), AVIF_RESULT_BMFF_PARSE_FAILED); |
4576 | | AVIF_CHECKERR(avifDecoderItemAddProperty(colorItem, tmapColrPropICC), AVIF_RESULT_OUT_OF_MEMORY); |
4577 | | } else { |
4578 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4579 | | } |
4580 | | |
4581 | | if (tmapHasClli) { |
4582 | | // Property with fixed index 24. |
4583 | | avifProperty * tmapClliProp = avifMetaCreateProperty(meta, "clli"); |
4584 | | AVIF_CHECKERR(tmapClliProp, AVIF_RESULT_OUT_OF_MEMORY); |
4585 | | tmapClliProp->u.clli = tmapClli; |
4586 | | AVIF_CHECKERR(avifDecoderItemAddProperty(tmapItem, tmapClliProp), AVIF_RESULT_OUT_OF_MEMORY); |
4587 | | } else { |
4588 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4589 | | } |
4590 | | // Properties with fixed indices 25 to 29 are ignored by libavif (mdcv, cclv, amve, reve and ndwt). |
4591 | | for (int i = 25; i <= 29; ++i) { |
4592 | | AVIF_CHECKERR(avifMetaCreateProperty(meta, "skip"), AVIF_RESULT_OUT_OF_MEMORY); // Placeholder. |
4593 | | } |
4594 | | AVIF_ASSERT_OR_RETURN(meta->properties.count == 29); |
4595 | | |
4596 | | // Extents. |
4597 | | |
4598 | | if (gainmapMetadataSize != 0) { |
4599 | | // Prepend the version field to the GainMapMetadata to form the ToneMapImage syntax. |
4600 | | tmapItem->size = gainmapMetadataSize + 1; |
4601 | | AVIF_CHECKRES(avifRWDataRealloc(&tmapItem->mergedExtents, tmapItem->size)); |
4602 | | tmapItem->ownsMergedExtents = AVIF_TRUE; |
4603 | | tmapItem->mergedExtents.data[0] = 0; // unsigned int(8) version = 0; |
4604 | | AVIF_CHECKERR(avifROStreamRead(&s, tmapItem->mergedExtents.data + 1, gainmapMetadataSize), AVIF_RESULT_BMFF_PARSE_FAILED); |
4605 | | } |
4606 | | |
4607 | | if (hasAlpha) { |
4608 | | avifExtent * alphaExtent = (avifExtent *)avifArrayPush(&alphaItem->extents); |
4609 | | AVIF_CHECKERR(alphaExtent, AVIF_RESULT_OUT_OF_MEMORY); |
4610 | | alphaExtent->offset = rawOffset + avifROStreamOffset(&s); |
4611 | | alphaExtent->size = alphaItemDataSize; |
4612 | | AVIF_CHECKERR(avifROStreamSkip(&s, alphaExtent->size), AVIF_RESULT_BMFF_PARSE_FAILED); |
4613 | | alphaItem->size = alphaExtent->size; |
4614 | | } |
4615 | | |
4616 | | if (gainmapItemDataSize != 0) { |
4617 | | avifExtent * gainmapExtent = (avifExtent *)avifArrayPush(&gainmapItem->extents); |
4618 | | AVIF_CHECKERR(gainmapExtent, AVIF_RESULT_OUT_OF_MEMORY); |
4619 | | gainmapExtent->offset = rawOffset + avifROStreamOffset(&s); |
4620 | | gainmapExtent->size = gainmapItemDataSize; |
4621 | | AVIF_CHECKERR(avifROStreamSkip(&s, gainmapExtent->size), AVIF_RESULT_BMFF_PARSE_FAILED); |
4622 | | gainmapItem->size = gainmapExtent->size; |
4623 | | } |
4624 | | |
4625 | | avifExtent * colorExtent = (avifExtent *)avifArrayPush(&colorItem->extents); |
4626 | | AVIF_CHECKERR(colorExtent, AVIF_RESULT_OUT_OF_MEMORY); |
4627 | | colorExtent->offset = rawOffset + avifROStreamOffset(&s); |
4628 | | colorExtent->size = mainItemDataSize; |
4629 | | AVIF_CHECKERR(avifROStreamSkip(&s, colorExtent->size), AVIF_RESULT_BMFF_PARSE_FAILED); |
4630 | | colorItem->size = colorExtent->size; |
4631 | | |
4632 | | if (hasExif) { |
4633 | | avifDecoderItem * exifItem; |
4634 | | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, /*itemID=*/6, &exifItem)); |
4635 | | memcpy(exifItem->type, "Exif", 4); |
4636 | | exifItem->descForID = colorItem->id; // 'cdsc' |
4637 | | |
4638 | | avifExtent * exifExtent = (avifExtent *)avifArrayPush(&exifItem->extents); |
4639 | | AVIF_CHECKERR(exifExtent, AVIF_RESULT_OUT_OF_MEMORY); |
4640 | | exifExtent->offset = rawOffset + avifROStreamOffset(&s); |
4641 | | exifExtent->size = exifDataSize; // Does not include unsigned int(32) exif_tiff_header_offset; |
4642 | | AVIF_CHECKERR(avifROStreamSkip(&s, exifExtent->size), AVIF_RESULT_BMFF_PARSE_FAILED); |
4643 | | exifItem->size = exifExtent->size; |
4644 | | } |
4645 | | |
4646 | | if (hasXmp) { |
4647 | | avifDecoderItem * xmpItem; |
4648 | | AVIF_CHECKRES(avifMetaFindOrCreateItem(meta, /*itemID=*/7, &xmpItem)); |
4649 | | memcpy(xmpItem->type, "mime", 4); |
4650 | | memcpy(xmpItem->contentType.contentType, AVIF_CONTENT_TYPE_XMP, sizeof(AVIF_CONTENT_TYPE_XMP)); |
4651 | | xmpItem->descForID = colorItem->id; // 'cdsc' |
4652 | | |
4653 | | avifExtent * xmpExtent = (avifExtent *)avifArrayPush(&xmpItem->extents); |
4654 | | AVIF_CHECKERR(xmpExtent, AVIF_RESULT_OUT_OF_MEMORY); |
4655 | | xmpExtent->offset = rawOffset + avifROStreamOffset(&s); |
4656 | | xmpExtent->size = xmpDataSize; |
4657 | | AVIF_CHECKERR(avifROStreamSkip(&s, xmpExtent->size), AVIF_RESULT_BMFF_PARSE_FAILED); |
4658 | | xmpItem->size = xmpExtent->size; |
4659 | | } |
4660 | | return AVIF_RESULT_OK; |
4661 | | } |
4662 | | #endif // AVIF_ENABLE_EXPERIMENTAL_MINI |
4663 | | |
4664 | | static avifBool avifParseFileTypeBox(avifFileType * ftyp, const uint8_t * raw, size_t rawLen, avifDiagnostics * diag) |
4665 | 51.9k | { |
4666 | 51.9k | BEGIN_STREAM(s, raw, rawLen, diag, "Box[ftyp]"); |
4667 | | |
4668 | 51.9k | AVIF_CHECK(avifROStreamRead(&s, ftyp->majorBrand, 4)); |
4669 | 51.9k | AVIF_CHECK(avifROStreamRead(&s, ftyp->minorVersion, 4)); |
4670 | | |
4671 | 51.9k | size_t compatibleBrandsBytes = avifROStreamRemainingBytes(&s); |
4672 | 51.9k | if ((compatibleBrandsBytes % 4) != 0) { |
4673 | 6 | avifDiagnosticsPrintf(diag, "Box[ftyp] contains a compatible brands section that isn't divisible by 4 [%zu]", compatibleBrandsBytes); |
4674 | 6 | return AVIF_FALSE; |
4675 | 6 | } |
4676 | 51.8k | ftyp->compatibleBrands = avifROStreamCurrent(&s); |
4677 | 51.8k | AVIF_CHECK(avifROStreamSkip(&s, compatibleBrandsBytes)); |
4678 | 51.8k | ftyp->compatibleBrandsCount = (int)compatibleBrandsBytes / 4; |
4679 | | |
4680 | 51.8k | return AVIF_TRUE; |
4681 | 51.8k | } |
4682 | | |
4683 | | static avifBool avifFileTypeHasBrand(avifFileType * ftyp, const char * brand); |
4684 | | static avifBool avifFileTypeIsCompatible(avifFileType * ftyp); |
4685 | | |
4686 | | static avifResult avifParse(avifDecoder * decoder) |
4687 | 17.2k | { |
4688 | | // Note: this top-level function is the only avifParse*() function that returns avifResult instead of avifBool. |
4689 | | // Be sure to use AVIF_CHECKERR() in this function with an explicit error result instead of simply using AVIF_CHECK(). |
4690 | | |
4691 | 17.2k | avifResult readResult; |
4692 | 17.2k | uint64_t parseOffset = 0; |
4693 | 17.2k | avifDecoderData * data = decoder->data; |
4694 | 17.2k | avifBool ftypSeen = AVIF_FALSE; |
4695 | 17.2k | avifBool metaSeen = AVIF_FALSE; |
4696 | 17.2k | avifBool metaIsSizeZero = AVIF_FALSE; |
4697 | 17.2k | avifBool moovSeen = AVIF_FALSE; |
4698 | 17.2k | avifBool needsMeta = AVIF_FALSE; |
4699 | 17.2k | avifBool needsMoov = AVIF_FALSE; |
4700 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
4701 | | avifBool miniSeen = AVIF_FALSE; |
4702 | | avifBool needsMini = AVIF_FALSE; |
4703 | | #endif |
4704 | 17.2k | avifBool needsTmap = AVIF_FALSE; |
4705 | 17.2k | avifBool tmapSeen = AVIF_FALSE; |
4706 | 17.2k | avifFileType ftyp = { 0 }; |
4707 | | |
4708 | 35.7k | for (;;) { |
4709 | | // Read just enough to get the next box header (a max of 32 bytes) |
4710 | 35.7k | avifROData headerContents; |
4711 | 35.7k | if ((decoder->io->sizeHint > 0) && (parseOffset > decoder->io->sizeHint)) { |
4712 | 162 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
4713 | 162 | } |
4714 | 35.5k | readResult = decoder->io->read(decoder->io, 0, parseOffset, 32, &headerContents); |
4715 | 35.5k | if (readResult != AVIF_RESULT_OK) { |
4716 | 0 | return readResult; |
4717 | 0 | } |
4718 | 35.5k | if (!headerContents.size) { |
4719 | | // If we got AVIF_RESULT_OK from the reader but received 0 bytes, |
4720 | | // we've reached the end of the file with no errors. Hooray! |
4721 | 36 | break; |
4722 | 36 | } |
4723 | | |
4724 | | // Parse the header, and find out how many bytes it actually was |
4725 | 35.5k | BEGIN_STREAM(headerStream, headerContents.data, headerContents.size, &decoder->diag, "File-level box header"); |
4726 | 35.5k | avifBoxHeader header; |
4727 | 35.5k | AVIF_CHECKERR(avifROStreamReadBoxHeaderPartial(&headerStream, &header, /*topLevel=*/AVIF_TRUE), AVIF_RESULT_BMFF_PARSE_FAILED); |
4728 | 35.5k | parseOffset += headerStream.offset; |
4729 | 35.5k | AVIF_ASSERT_OR_RETURN(decoder->io->sizeHint == 0 || parseOffset <= decoder->io->sizeHint); |
4730 | | |
4731 | | // Try to get the remainder of the box, if necessary |
4732 | 35.5k | uint64_t boxOffset = 0; |
4733 | 35.5k | avifROData boxContents = AVIF_DATA_EMPTY; |
4734 | | |
4735 | 35.5k | avifBool isFtyp = AVIF_FALSE, isMeta = AVIF_FALSE, isMoov = AVIF_FALSE; |
4736 | 35.5k | avifBool isNonSkippableVariableLengthBox = AVIF_FALSE; |
4737 | 35.5k | if (!memcmp(header.type, "ftyp", 4)) { |
4738 | 17.3k | isFtyp = AVIF_TRUE; |
4739 | 17.3k | isNonSkippableVariableLengthBox = AVIF_TRUE; |
4740 | 18.1k | } else if (!memcmp(header.type, "meta", 4)) { |
4741 | 16.2k | isMeta = AVIF_TRUE; |
4742 | 16.2k | isNonSkippableVariableLengthBox = AVIF_TRUE; |
4743 | 16.2k | metaIsSizeZero = header.isSizeZeroBox; |
4744 | 16.2k | } else if (!memcmp(header.type, "moov", 4)) { |
4745 | 875 | isMoov = AVIF_TRUE; |
4746 | 875 | isNonSkippableVariableLengthBox = AVIF_TRUE; |
4747 | 875 | } |
4748 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
4749 | | avifBool isMini = AVIF_FALSE; |
4750 | | if (!isNonSkippableVariableLengthBox && !memcmp(header.type, "mini", 4)) { |
4751 | | isMini = AVIF_TRUE; |
4752 | | isNonSkippableVariableLengthBox = AVIF_TRUE; |
4753 | | } |
4754 | | #endif |
4755 | | |
4756 | 35.5k | if (!isFtyp && (isNonSkippableVariableLengthBox || !memcmp(header.type, "free", 4) || !memcmp(header.type, "skip", 4) || |
4757 | 18.1k | !memcmp(header.type, "mdat", 4))) { |
4758 | | // Section 6.3.4 of ISO/IEC 14496-12: |
4759 | | // The FileTypeBox shall occur before any variable-length box (e.g. movie, free space, media data). |
4760 | 17.1k | AVIF_CHECKERR(ftypSeen, AVIF_RESULT_BMFF_PARSE_FAILED); |
4761 | 17.1k | } |
4762 | | |
4763 | 35.5k | if (isNonSkippableVariableLengthBox) { |
4764 | 34.4k | boxOffset = parseOffset; |
4765 | 34.4k | size_t sizeToRead; |
4766 | 34.4k | if (header.isSizeZeroBox) { |
4767 | | // The box body goes till the end of the file. |
4768 | 507 | if (decoder->io->sizeHint != 0 && decoder->io->sizeHint - parseOffset < SIZE_MAX) { |
4769 | 507 | sizeToRead = (size_t)(decoder->io->sizeHint - parseOffset); |
4770 | 507 | } else { |
4771 | 0 | sizeToRead = SIZE_MAX; // This will get truncated. See the documentation of avifIOReadFunc. |
4772 | 0 | } |
4773 | 33.9k | } else { |
4774 | 33.9k | sizeToRead = header.size; |
4775 | 33.9k | } |
4776 | 34.4k | readResult = decoder->io->read(decoder->io, 0, parseOffset, sizeToRead, &boxContents); |
4777 | 34.4k | if (readResult != AVIF_RESULT_OK) { |
4778 | 0 | return readResult; |
4779 | 0 | } |
4780 | 34.4k | if (header.isSizeZeroBox) { |
4781 | 507 | header.size = boxContents.size; |
4782 | 33.9k | } else if (boxContents.size != header.size) { |
4783 | | // A truncated box, bail out |
4784 | 104 | return AVIF_RESULT_TRUNCATED_DATA; |
4785 | 104 | } |
4786 | 34.4k | } else if (header.isSizeZeroBox) { |
4787 | | // An unknown top level box with size 0 was found. If we reach here it means we haven't completed parsing successfully |
4788 | | // since there are no further boxes left. |
4789 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
4790 | 1.08k | } else if (header.size > (UINT64_MAX - parseOffset)) { |
4791 | 3 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
4792 | 3 | } |
4793 | 35.4k | parseOffset += header.size; |
4794 | | |
4795 | 35.4k | if (isFtyp) { |
4796 | 17.3k | AVIF_CHECKERR(!ftypSeen, AVIF_RESULT_BMFF_PARSE_FAILED); |
4797 | 17.2k | AVIF_CHECKERR(avifParseFileTypeBox(&ftyp, boxContents.data, boxContents.size, data->diag), AVIF_RESULT_BMFF_PARSE_FAILED); |
4798 | 17.2k | AVIF_CHECKERR(avifFileTypeIsCompatible(&ftyp), AVIF_RESULT_INVALID_FTYP); |
4799 | 17.2k | ftypSeen = AVIF_TRUE; |
4800 | 17.2k | memcpy(data->majorBrand, ftyp.majorBrand, 4); // Remember the major brand for future AVIF_DECODER_SOURCE_AUTO decisions |
4801 | 17.2k | if (ftyp.compatibleBrandsCount > 0) { |
4802 | 15.6k | AVIF_CHECKERR(avifArrayCreate(&data->compatibleBrands, sizeof(avifBrand), ftyp.compatibleBrandsCount), |
4803 | 15.6k | AVIF_RESULT_OUT_OF_MEMORY); |
4804 | 15.6k | memcpy(data->compatibleBrands.brand, ftyp.compatibleBrands, sizeof(avifBrand) * ftyp.compatibleBrandsCount); |
4805 | 15.6k | data->compatibleBrands.count = ftyp.compatibleBrandsCount; |
4806 | 15.6k | } |
4807 | 17.2k | needsMeta = avifFileTypeHasBrand(&ftyp, "avif"); |
4808 | 17.2k | needsMoov = avifFileTypeHasBrand(&ftyp, "avis"); |
4809 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
4810 | | needsMini = avifFileTypeHasBrand(&ftyp, "mif3"); |
4811 | | if (needsMini) { |
4812 | | AVIF_CHECKERR(!needsMeta, AVIF_RESULT_INVALID_FTYP); |
4813 | | // Section O.2.1.2 of ISO/IEC 23008-12:2014, CDAM 2: |
4814 | | // When the 'mif3' brand is present as the major_brand of the FileTypeBox, |
4815 | | // the minor_version of the FileTypeBox shall be 0 or a brand that is either |
4816 | | // structurally compatible with the 'mif3' brand, such as a codec brand |
4817 | | // complying with the 'mif3' structural brand, or a brand to which the file |
4818 | | // conforms after the equivalent MetaBox has been transformed from |
4819 | | // MinimizedImageBox as specified in Clause O.4. |
4820 | | AVIF_CHECKERR(!memcmp(ftyp.minorVersion, "\0\0\0\0", 4) || !memcmp(ftyp.minorVersion, "avif", 4), |
4821 | | AVIF_RESULT_BMFF_PARSE_FAILED); |
4822 | | } |
4823 | | #endif // AVIF_ENABLE_EXPERIMENTAL_MINI |
4824 | 17.2k | needsTmap = avifFileTypeHasBrand(&ftyp, "tmap"); |
4825 | 17.2k | if (needsTmap) { |
4826 | 45 | needsMeta = AVIF_TRUE; |
4827 | 45 | } |
4828 | 18.1k | } else if (isMeta) { |
4829 | 16.2k | AVIF_CHECKERR(!metaSeen, AVIF_RESULT_BMFF_PARSE_FAILED); |
4830 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
4831 | | AVIF_CHECKERR(!miniSeen, AVIF_RESULT_BMFF_PARSE_FAILED); |
4832 | | #endif |
4833 | 16.2k | AVIF_CHECKRES(avifParseMetaBox(data->meta, boxOffset, boxContents.data, boxContents.size, data->diag)); |
4834 | 15.1k | metaSeen = AVIF_TRUE; |
4835 | | |
4836 | 33.4k | for (uint32_t itemIndex = 0; itemIndex < data->meta->items.count; ++itemIndex) { |
4837 | 18.3k | if (!memcmp(data->meta->items.item[itemIndex]->type, "tmap", 4)) { |
4838 | 17 | tmapSeen = AVIF_TRUE; |
4839 | 17 | break; |
4840 | 17 | } |
4841 | 18.3k | } |
4842 | | |
4843 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
4844 | | } else if (isMini) { |
4845 | | AVIF_CHECKERR(!metaSeen, AVIF_RESULT_BMFF_PARSE_FAILED); |
4846 | | AVIF_CHECKERR(!miniSeen, AVIF_RESULT_BMFF_PARSE_FAILED); |
4847 | | const avifBool isAvifAccordingToMinorVersion = !memcmp(ftyp.minorVersion, "avif", 4); |
4848 | | AVIF_CHECKRES( |
4849 | | avifParseMinimizedImageBox(data, boxOffset, boxContents.data, boxContents.size, isAvifAccordingToMinorVersion, data->diag)); |
4850 | | miniSeen = AVIF_TRUE; |
4851 | | #endif |
4852 | 15.1k | } else if (isMoov) { |
4853 | 836 | AVIF_CHECKERR(!moovSeen, AVIF_RESULT_BMFF_PARSE_FAILED); |
4854 | 836 | AVIF_CHECKRES( |
4855 | 836 | avifParseMovieBox(data, boxOffset, boxContents.data, boxContents.size, decoder->imageSizeLimit, decoder->imageDimensionLimit)); |
4856 | 72 | moovSeen = AVIF_TRUE; |
4857 | 72 | decoder->imageSequenceTrackPresent = AVIF_TRUE; |
4858 | 72 | } |
4859 | | |
4860 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
4861 | | if (ftypSeen && !needsMini) { |
4862 | | // When MinimizedImageBox is present in a file, the 'mif3' brand or a derived brand that implies the 'mif3' |
4863 | | // brand shall be the major brand or present among the compatible brands in the FileTypeBox. |
4864 | | AVIF_CHECKERR(!miniSeen, AVIF_RESULT_BMFF_PARSE_FAILED); |
4865 | | } |
4866 | | #endif // AVIF_ENABLE_EXPERIMENTAL_MINI |
4867 | | |
4868 | | // See if there is enough information to consider Parse() a success and early-out: |
4869 | | // * If the brand 'avif' is present, require a meta box |
4870 | | // * If the brand 'avis' is present, require a moov box |
4871 | | // * If AVIF_ENABLE_EXPERIMENTAL_MINI is defined and the brand 'mif3' is present, require a mini box |
4872 | 33.5k | avifBool sawEverythingNeeded = ftypSeen && (!needsMeta || metaSeen) && (!needsMoov || moovSeen) && (!needsTmap || tmapSeen); |
4873 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
4874 | | sawEverythingNeeded = sawEverythingNeeded && (!needsMini || miniSeen); |
4875 | | #endif |
4876 | 33.5k | if (sawEverythingNeeded) { |
4877 | 15.1k | return AVIF_RESULT_OK; |
4878 | 15.1k | } |
4879 | 33.5k | } |
4880 | 36 | if (!ftypSeen) { |
4881 | 0 | return AVIF_RESULT_INVALID_FTYP; |
4882 | 0 | } |
4883 | 36 | if ((needsMeta && !metaSeen) || (needsMoov && !moovSeen)) { |
4884 | 34 | return AVIF_RESULT_TRUNCATED_DATA; |
4885 | 34 | } |
4886 | 2 | if (needsTmap && !tmapSeen) { |
4887 | 2 | return metaIsSizeZero ? AVIF_RESULT_TRUNCATED_DATA : AVIF_RESULT_BMFF_PARSE_FAILED; |
4888 | 2 | } |
4889 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
4890 | | if (needsMini && !miniSeen) { |
4891 | | return AVIF_RESULT_TRUNCATED_DATA; |
4892 | | } |
4893 | | #endif |
4894 | 0 | return AVIF_RESULT_OK; |
4895 | 2 | } |
4896 | | |
4897 | | // --------------------------------------------------------------------------- |
4898 | | |
4899 | | static avifBool avifFileTypeHasBrand(avifFileType * ftyp, const char * brand) |
4900 | 106k | { |
4901 | 106k | if (!memcmp(ftyp->majorBrand, brand, 4)) { |
4902 | 51.7k | return AVIF_TRUE; |
4903 | 51.7k | } |
4904 | | |
4905 | 161k | for (int compatibleBrandIndex = 0; compatibleBrandIndex < ftyp->compatibleBrandsCount; ++compatibleBrandIndex) { |
4906 | 123k | const uint8_t * compatibleBrand = &ftyp->compatibleBrands[4 * compatibleBrandIndex]; |
4907 | 123k | if (!memcmp(compatibleBrand, brand, 4)) { |
4908 | 17.4k | return AVIF_TRUE; |
4909 | 17.4k | } |
4910 | 123k | } |
4911 | 37.3k | return AVIF_FALSE; |
4912 | 54.8k | } |
4913 | | |
4914 | | static avifBool avifFileTypeIsCompatible(avifFileType * ftyp) |
4915 | 51.8k | { |
4916 | 51.8k | return avifFileTypeHasBrand(ftyp, "avif") || avifFileTypeHasBrand(ftyp, "avis") |
4917 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_MINI) |
4918 | | || avifFileTypeHasBrand(ftyp, "mif3") |
4919 | | #endif // AVIF_ENABLE_EXPERIMENTAL_MINI |
4920 | 51.8k | ; |
4921 | 51.8k | } |
4922 | | |
4923 | | avifBool avifPeekCompatibleFileType(const avifROData * input) |
4924 | 35.0k | { |
4925 | 35.0k | BEGIN_STREAM(s, input->data, input->size, NULL, NULL); |
4926 | | |
4927 | 35.0k | avifBoxHeader header; |
4928 | 35.0k | if (!avifROStreamReadBoxHeaderPartial(&s, &header, /*topLevel=*/AVIF_TRUE) || memcmp(header.type, "ftyp", 4)) { |
4929 | 243 | return AVIF_FALSE; |
4930 | 243 | } |
4931 | 34.8k | if (header.isSizeZeroBox) { |
4932 | | // The ftyp box goes on till the end of the file. Either there is no brand requiring anything in the file but a |
4933 | | // FileTypebox (so not AVIF), or it is invalid. |
4934 | 3 | return AVIF_FALSE; |
4935 | 3 | } |
4936 | 34.8k | AVIF_CHECK(avifROStreamHasBytesLeft(&s, header.size)); |
4937 | | |
4938 | 34.6k | avifFileType ftyp; |
4939 | 34.6k | memset(&ftyp, 0, sizeof(avifFileType)); |
4940 | 34.6k | avifBool parsed = avifParseFileTypeBox(&ftyp, avifROStreamCurrent(&s), header.size, NULL); |
4941 | 34.6k | if (!parsed) { |
4942 | 22 | return AVIF_FALSE; |
4943 | 22 | } |
4944 | 34.6k | return avifFileTypeIsCompatible(&ftyp); |
4945 | 34.6k | } |
4946 | | |
4947 | | static avifBool avifBrandArrayHasBrand(avifBrandArray * brands, const char * brand) |
4948 | 14.6k | { |
4949 | 61.2k | for (uint32_t brandIndex = 0; brandIndex < brands->count; ++brandIndex) { |
4950 | 46.6k | if (!memcmp(brands->brand[brandIndex], brand, 4)) { |
4951 | 13 | return AVIF_TRUE; |
4952 | 13 | } |
4953 | 46.6k | } |
4954 | 14.6k | return AVIF_FALSE; |
4955 | 14.6k | } |
4956 | | |
4957 | | // --------------------------------------------------------------------------- |
4958 | | |
4959 | | avifDecoder * avifDecoderCreate(void) |
4960 | 17.2k | { |
4961 | 17.2k | avifDecoder * decoder = (avifDecoder *)avifAlloc(sizeof(avifDecoder)); |
4962 | 17.2k | if (decoder == NULL) { |
4963 | 0 | return NULL; |
4964 | 0 | } |
4965 | 17.2k | memset(decoder, 0, sizeof(avifDecoder)); |
4966 | 17.2k | decoder->maxThreads = 1; |
4967 | 17.2k | decoder->imageSizeLimit = AVIF_DEFAULT_IMAGE_SIZE_LIMIT; |
4968 | 17.2k | decoder->imageDimensionLimit = AVIF_DEFAULT_IMAGE_DIMENSION_LIMIT; |
4969 | 17.2k | decoder->imageCountLimit = AVIF_DEFAULT_IMAGE_COUNT_LIMIT; |
4970 | 17.2k | decoder->strictFlags = AVIF_STRICT_ENABLED; |
4971 | 17.2k | decoder->imageContentToDecode = AVIF_IMAGE_CONTENT_DECODE_DEFAULT; |
4972 | 17.2k | return decoder; |
4973 | 17.2k | } |
4974 | | |
4975 | | static void avifDecoderCleanup(avifDecoder * decoder) |
4976 | 34.5k | { |
4977 | 34.5k | if (decoder->data) { |
4978 | 17.2k | avifDecoderDataDestroy(decoder->data); |
4979 | 17.2k | decoder->data = NULL; |
4980 | 17.2k | } |
4981 | | |
4982 | 34.5k | if (decoder->image) { |
4983 | 14.9k | avifImageDestroy(decoder->image); |
4984 | 14.9k | decoder->image = NULL; |
4985 | 14.9k | } |
4986 | 34.5k | avifDiagnosticsClearError(&decoder->diag); |
4987 | 34.5k | } |
4988 | | |
4989 | | void avifDecoderDestroy(avifDecoder * decoder) |
4990 | 17.2k | { |
4991 | 17.2k | avifDecoderCleanup(decoder); |
4992 | 17.2k | avifIODestroy(decoder->io); |
4993 | 17.2k | avifFree(decoder); |
4994 | 17.2k | } |
4995 | | |
4996 | | avifResult avifDecoderSetSource(avifDecoder * decoder, avifDecoderSource source) |
4997 | 0 | { |
4998 | 0 | decoder->requestedSource = source; |
4999 | 0 | return avifDecoderReset(decoder); |
5000 | 0 | } |
5001 | | |
5002 | | void avifDecoderSetIO(avifDecoder * decoder, avifIO * io) |
5003 | 17.2k | { |
5004 | 17.2k | avifIODestroy(decoder->io); |
5005 | 17.2k | decoder->io = io; |
5006 | 17.2k | } |
5007 | | |
5008 | | avifResult avifDecoderSetIOMemory(avifDecoder * decoder, const uint8_t * data, size_t size) |
5009 | 17.2k | { |
5010 | 17.2k | avifIO * io = avifIOCreateMemoryReader(data, size); |
5011 | 17.2k | AVIF_CHECKERR(io != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
5012 | 17.2k | avifDecoderSetIO(decoder, io); |
5013 | 17.2k | return AVIF_RESULT_OK; |
5014 | 17.2k | } |
5015 | | |
5016 | | avifResult avifDecoderSetIOFile(avifDecoder * decoder, const char * filename) |
5017 | 0 | { |
5018 | 0 | avifIO * io = avifIOCreateFileReader(filename); |
5019 | 0 | if (!io) { |
5020 | 0 | return AVIF_RESULT_IO_ERROR; |
5021 | 0 | } |
5022 | 0 | avifDecoderSetIO(decoder, io); |
5023 | 0 | return AVIF_RESULT_OK; |
5024 | 0 | } |
5025 | | |
5026 | | // 0-byte extents are ignored/overwritten during the merge, as they are the signal from helper |
5027 | | // functions that no extent was necessary for this given sample. If both provided extents are |
5028 | | // >0 bytes, this will set dst to be an extent that bounds both supplied extents. |
5029 | | static avifResult avifExtentMerge(avifExtent * dst, const avifExtent * src) |
5030 | 0 | { |
5031 | 0 | if (!dst->size) { |
5032 | 0 | *dst = *src; |
5033 | 0 | return AVIF_RESULT_OK; |
5034 | 0 | } |
5035 | 0 | if (!src->size) { |
5036 | 0 | return AVIF_RESULT_OK; |
5037 | 0 | } |
5038 | | |
5039 | 0 | const uint64_t minExtent1 = dst->offset; |
5040 | 0 | const uint64_t maxExtent1 = dst->offset + dst->size; |
5041 | 0 | const uint64_t minExtent2 = src->offset; |
5042 | 0 | const uint64_t maxExtent2 = src->offset + src->size; |
5043 | 0 | dst->offset = AVIF_MIN(minExtent1, minExtent2); |
5044 | 0 | const uint64_t extentLength = AVIF_MAX(maxExtent1, maxExtent2) - dst->offset; |
5045 | | #if UINT64_MAX > SIZE_MAX |
5046 | | if (extentLength > SIZE_MAX) { |
5047 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5048 | | } |
5049 | | #endif |
5050 | 0 | dst->size = (size_t)extentLength; |
5051 | 0 | return AVIF_RESULT_OK; |
5052 | 0 | } |
5053 | | |
5054 | | avifResult avifDecoderNthImageMaxExtent(const avifDecoder * decoder, uint32_t frameIndex, avifExtent * outExtent) |
5055 | 0 | { |
5056 | 0 | if (!decoder->data) { |
5057 | | // Nothing has been parsed yet |
5058 | 0 | return AVIF_RESULT_NO_CONTENT; |
5059 | 0 | } |
5060 | | |
5061 | 0 | memset(outExtent, 0, sizeof(avifExtent)); |
5062 | |
|
5063 | 0 | uint32_t startFrameIndex = avifDecoderNearestKeyframe(decoder, frameIndex); |
5064 | 0 | uint32_t endFrameIndex = frameIndex; |
5065 | 0 | for (uint32_t currentFrameIndex = startFrameIndex; currentFrameIndex <= endFrameIndex; ++currentFrameIndex) { |
5066 | 0 | for (unsigned int tileIndex = 0; tileIndex < decoder->data->tiles.count; ++tileIndex) { |
5067 | 0 | avifTile * tile = &decoder->data->tiles.tile[tileIndex]; |
5068 | 0 | if (currentFrameIndex >= tile->input->samples.count) { |
5069 | 0 | return AVIF_RESULT_NO_IMAGES_REMAINING; |
5070 | 0 | } |
5071 | | |
5072 | 0 | avifDecodeSample * sample = &tile->input->samples.sample[currentFrameIndex]; |
5073 | 0 | avifExtent sampleExtent; |
5074 | 0 | if (sample->itemID) { |
5075 | | // The data comes from an item. Let avifDecoderItemMaxExtent() do the heavy lifting. |
5076 | |
|
5077 | 0 | avifDecoderItem * item; |
5078 | 0 | AVIF_CHECKRES(avifMetaFindOrCreateItem(decoder->data->meta, sample->itemID, &item)); |
5079 | 0 | avifResult maxExtentResult = avifDecoderItemMaxExtent(item, sample, &sampleExtent); |
5080 | 0 | if (maxExtentResult != AVIF_RESULT_OK) { |
5081 | 0 | return maxExtentResult; |
5082 | 0 | } |
5083 | 0 | } else { |
5084 | | // The data likely comes from a sample table. Use the sample position directly. |
5085 | |
|
5086 | 0 | sampleExtent.offset = sample->offset; |
5087 | 0 | sampleExtent.size = sample->size; |
5088 | 0 | } |
5089 | | |
5090 | 0 | if (sampleExtent.size > UINT64_MAX - sampleExtent.offset) { |
5091 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5092 | 0 | } |
5093 | | |
5094 | 0 | avifResult extentMergeResult = avifExtentMerge(outExtent, &sampleExtent); |
5095 | 0 | if (extentMergeResult != AVIF_RESULT_OK) { |
5096 | 0 | return extentMergeResult; |
5097 | 0 | } |
5098 | 0 | } |
5099 | 0 | } |
5100 | 0 | return AVIF_RESULT_OK; |
5101 | 0 | } |
5102 | | |
5103 | | static avifResult avifDecoderPrepareSample(avifDecoder * decoder, avifDecodeSample * sample, size_t partialByteCount) |
5104 | 34.6k | { |
5105 | 34.6k | if (!sample->data.size || sample->partialData) { |
5106 | | // This sample hasn't been read from IO or had its extents fully merged yet. |
5107 | | |
5108 | 33.0k | size_t bytesToRead = sample->size; |
5109 | 33.0k | if (partialByteCount && (bytesToRead > partialByteCount)) { |
5110 | 18.7k | bytesToRead = partialByteCount; |
5111 | 18.7k | } |
5112 | | |
5113 | 33.0k | if (sample->itemID) { |
5114 | | // The data comes from an item. Let avifDecoderItemRead() do the heavy lifting. |
5115 | | |
5116 | 32.9k | avifDecoderItem * item; |
5117 | 32.9k | AVIF_CHECKRES(avifMetaFindOrCreateItem(decoder->data->meta, sample->itemID, &item)); |
5118 | 32.9k | avifROData itemContents; |
5119 | | #if UINT64_MAX > SIZE_MAX |
5120 | | if (sample->offset > SIZE_MAX) { |
5121 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5122 | | } |
5123 | | #endif |
5124 | 32.9k | size_t offset = (size_t)sample->offset; |
5125 | 32.9k | avifResult readResult = avifDecoderItemRead(item, decoder->io, &itemContents, offset, bytesToRead, &decoder->diag); |
5126 | 32.9k | if (readResult != AVIF_RESULT_OK) { |
5127 | 342 | return readResult; |
5128 | 342 | } |
5129 | | |
5130 | | // avifDecoderItemRead is guaranteed to already be persisted by either the underlying IO |
5131 | | // or by mergedExtents; just reuse the buffer here. |
5132 | 32.6k | sample->data = itemContents; |
5133 | 32.6k | sample->ownsData = AVIF_FALSE; |
5134 | 32.6k | sample->partialData = item->partialMergedExtents; |
5135 | 32.6k | } else { |
5136 | | // The data likely comes from a sample table. Pull the sample and make a copy if necessary. |
5137 | | |
5138 | 94 | avifROData sampleContents; |
5139 | 94 | if ((decoder->io->sizeHint > 0) && (sample->offset > decoder->io->sizeHint)) { |
5140 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5141 | 0 | } |
5142 | 94 | avifResult readResult = decoder->io->read(decoder->io, 0, sample->offset, bytesToRead, &sampleContents); |
5143 | 94 | if (readResult != AVIF_RESULT_OK) { |
5144 | 0 | return readResult; |
5145 | 0 | } |
5146 | 94 | if (sampleContents.size != bytesToRead) { |
5147 | 0 | return AVIF_RESULT_TRUNCATED_DATA; |
5148 | 0 | } |
5149 | | |
5150 | 94 | sample->ownsData = !decoder->io->persistent; |
5151 | 94 | sample->partialData = (bytesToRead != sample->size); |
5152 | 94 | if (decoder->io->persistent) { |
5153 | 94 | sample->data = sampleContents; |
5154 | 94 | } else { |
5155 | 0 | AVIF_CHECKRES(avifRWDataSet((avifRWData *)&sample->data, sampleContents.data, sampleContents.size)); |
5156 | 0 | } |
5157 | 94 | } |
5158 | 33.0k | } |
5159 | 34.3k | return AVIF_RESULT_OK; |
5160 | 34.6k | } |
5161 | | |
5162 | | // Returns AVIF_TRUE if the item should be skipped. Items should be skipped for one of the following reasons: |
5163 | | // * Size is 0. |
5164 | | // * Has an essential property that isn't supported by libavif. |
5165 | | // * Item is not a single image or a grid. |
5166 | | // * Item is a thumbnail. |
5167 | | static avifBool avifDecoderItemShouldBeSkipped(const avifDecoderItem * item) |
5168 | 51.0k | { |
5169 | 51.0k | return !item->size || item->hasUnsupportedEssentialProperty || |
5170 | 51.0k | (avifGetCodecType(item->type) == AVIF_CODEC_TYPE_UNKNOWN && memcmp(item->type, "grid", 4)) || item->thumbnailForID != 0; |
5171 | 51.0k | } |
5172 | | |
5173 | | avifResult avifDecoderParse(avifDecoder * decoder) |
5174 | 17.2k | { |
5175 | 17.2k | avifDiagnosticsClearError(&decoder->diag); |
5176 | | |
5177 | | // An imageSizeLimit greater than AVIF_DEFAULT_IMAGE_SIZE_LIMIT and the special value of 0 to |
5178 | | // disable the limit are not yet implemented. |
5179 | 17.2k | if ((decoder->imageSizeLimit > AVIF_DEFAULT_IMAGE_SIZE_LIMIT) || (decoder->imageSizeLimit == 0)) { |
5180 | 0 | return AVIF_RESULT_NOT_IMPLEMENTED; |
5181 | 0 | } |
5182 | 17.2k | if (!decoder->io || !decoder->io->read) { |
5183 | 0 | return AVIF_RESULT_IO_NOT_SET; |
5184 | 0 | } |
5185 | | |
5186 | | // Cleanup anything lingering in the decoder |
5187 | 17.2k | avifDecoderCleanup(decoder); |
5188 | | |
5189 | | // ----------------------------------------------------------------------- |
5190 | | // Parse BMFF boxes |
5191 | | |
5192 | 17.2k | decoder->data = avifDecoderDataCreate(); |
5193 | 17.2k | AVIF_CHECKERR(decoder->data != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
5194 | 17.2k | decoder->data->diag = &decoder->diag; |
5195 | | |
5196 | 17.2k | AVIF_CHECKRES(avifParse(decoder)); |
5197 | | |
5198 | | // Walk the decoded items (if any) and harvest ispe |
5199 | 15.1k | avifDecoderData * data = decoder->data; |
5200 | 33.3k | for (uint32_t itemIndex = 0; itemIndex < data->meta->items.count; ++itemIndex) { |
5201 | 18.4k | avifDecoderItem * item = data->meta->items.item[itemIndex]; |
5202 | 18.4k | if (avifDecoderItemShouldBeSkipped(item)) { |
5203 | 3.02k | continue; |
5204 | 3.02k | } |
5205 | | |
5206 | 15.3k | const avifProperty * ispeProp = avifPropertyArrayFind(&item->properties, "ispe"); |
5207 | 15.3k | if (ispeProp) { |
5208 | 15.2k | item->width = ispeProp->u.ispe.width; |
5209 | 15.2k | item->height = ispeProp->u.ispe.height; |
5210 | | |
5211 | 15.2k | if ((item->width == 0) || (item->height == 0)) { |
5212 | 2 | avifDiagnosticsPrintf(data->diag, "Item ID [%u] has an invalid size [%ux%u]", item->id, item->width, item->height); |
5213 | 2 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5214 | 2 | } |
5215 | 15.2k | if (avifDimensionsTooLarge(item->width, item->height, decoder->imageSizeLimit, decoder->imageDimensionLimit)) { |
5216 | 65 | avifDiagnosticsPrintf(data->diag, "Item ID [%u] dimensions are too large [%ux%u]", item->id, item->width, item->height); |
5217 | 65 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5218 | 65 | } |
5219 | 15.2k | } else { |
5220 | 139 | const avifProperty * auxCProp = avifPropertyArrayFind(&item->properties, "auxC"); |
5221 | 139 | if (auxCProp && isAlphaURN(auxCProp->u.auxC.auxType)) { |
5222 | 49 | if (decoder->strictFlags & AVIF_STRICT_ALPHA_ISPE_REQUIRED) { |
5223 | 0 | avifDiagnosticsPrintf(data->diag, |
5224 | 0 | "[Strict] Alpha auxiliary image item ID [%u] is missing a mandatory ispe property", |
5225 | 0 | item->id); |
5226 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5227 | 0 | } |
5228 | 90 | } else { |
5229 | 90 | avifDiagnosticsPrintf(data->diag, "Item ID [%u] is missing a mandatory ispe property", item->id); |
5230 | 90 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5231 | 90 | } |
5232 | 139 | } |
5233 | 15.3k | } |
5234 | 14.9k | return avifDecoderReset(decoder); |
5235 | 15.1k | } |
5236 | | |
5237 | | static avifResult avifCodecCreateInternal(avifCodecChoice choice, const avifTile * tile, avifDiagnostics * diag, avifCodec ** codec) |
5238 | 14.1k | { |
5239 | | #if defined(AVIF_CODEC_AVM) |
5240 | | // AVIF_CODEC_CHOICE_AUTO leads to AVIF_CODEC_TYPE_AV1 by default. Reroute correctly. |
5241 | | if (choice == AVIF_CODEC_CHOICE_AUTO && tile->codecType == AVIF_CODEC_TYPE_AV2) { |
5242 | | choice = AVIF_CODEC_CHOICE_AVM; |
5243 | | } |
5244 | | #endif |
5245 | | |
5246 | 14.1k | const avifCodecType codecTypeFromChoice = avifCodecTypeFromChoice(choice, AVIF_CODEC_FLAG_CAN_DECODE); |
5247 | 14.1k | if (codecTypeFromChoice == AVIF_CODEC_TYPE_UNKNOWN) { |
5248 | 0 | avifDiagnosticsPrintf(diag, |
5249 | 0 | "Tile type is %s but there is no compatible codec available to decode it", |
5250 | 0 | avifGetConfigurationPropertyName(tile->codecType)); |
5251 | 0 | return AVIF_RESULT_NO_CODEC_AVAILABLE; |
5252 | 14.1k | } else if (choice != AVIF_CODEC_CHOICE_AUTO && codecTypeFromChoice != tile->codecType) { |
5253 | 0 | avifDiagnosticsPrintf(diag, |
5254 | 0 | "Tile type is %s but incompatible %s codec was explicitly set as decoding implementation", |
5255 | 0 | avifGetConfigurationPropertyName(tile->codecType), |
5256 | 0 | avifCodecName(choice, AVIF_CODEC_FLAG_CAN_DECODE)); |
5257 | 0 | return AVIF_RESULT_DECODE_COLOR_FAILED; |
5258 | 0 | } |
5259 | | |
5260 | 14.1k | AVIF_CHECKRES(avifCodecCreate(choice, AVIF_CODEC_FLAG_CAN_DECODE, codec)); |
5261 | 14.1k | AVIF_CHECKERR(*codec, AVIF_RESULT_OUT_OF_MEMORY); |
5262 | 14.1k | (*codec)->diag = diag; |
5263 | 14.1k | (*codec)->operatingPoint = tile->operatingPoint; |
5264 | 14.1k | (*codec)->allLayers = tile->input->allLayers; |
5265 | 14.1k | return AVIF_RESULT_OK; |
5266 | 14.1k | } |
5267 | | |
5268 | | static avifBool avifTilesCanBeDecodedWithSameCodecInstance(const avifDecoderData * data) |
5269 | 82 | { |
5270 | 82 | int32_t numImageBuffers = 0, numStolenImageBuffers = 0; |
5271 | 328 | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
5272 | 246 | if (data->tileInfos[c].tileCount > 0) { |
5273 | 134 | ++numImageBuffers; |
5274 | 134 | } |
5275 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
5276 | | // The sample operations require multiple buffers for compositing so no plane is stolen |
5277 | | // when there is a 'sato' Sample Transform derived image item. |
5278 | | if (c >= AVIF_SAMPLE_TRANSFORM_MIN_CATEGORY && c <= AVIF_SAMPLE_TRANSFORM_MAX_CATEGORY && data->tileInfos[c].tileCount > 0) { |
5279 | | continue; |
5280 | | } |
5281 | | #endif |
5282 | 246 | if (data->tileInfos[c].tileCount == 1) { |
5283 | 104 | ++numStolenImageBuffers; |
5284 | 104 | } |
5285 | 246 | } |
5286 | 82 | if (numStolenImageBuffers > 0 && numImageBuffers > 1) { |
5287 | | // Single tile image with single tile alpha plane or gain map. In this case each tile needs its own decoder since the planes will be |
5288 | | // "stolen". Stealing either the color or the alpha plane (or gain map) will invalidate the other ones when decode is called the second |
5289 | | // (or third) time. |
5290 | 52 | return AVIF_FALSE; |
5291 | 52 | } |
5292 | 30 | const uint8_t firstTileOperatingPoint = data->tiles.tile[0].operatingPoint; |
5293 | 30 | const avifBool firstTileAllLayers = data->tiles.tile[0].input->allLayers; |
5294 | 64 | for (unsigned int i = 1; i < data->tiles.count; ++i) { |
5295 | 34 | const avifTile * tile = &data->tiles.tile[i]; |
5296 | 34 | if (tile->operatingPoint != firstTileOperatingPoint || tile->input->allLayers != firstTileAllLayers) { |
5297 | 0 | return AVIF_FALSE; |
5298 | 0 | } |
5299 | | // avifDecoderItemValidateProperties() verified during avifDecoderParse() that all tiles |
5300 | | // share the same coding format so no need to check for codecType equality here. |
5301 | 34 | } |
5302 | 30 | return AVIF_TRUE; |
5303 | 30 | } |
5304 | | |
5305 | | static avifResult avifDecoderCreateCodecs(avifDecoder * decoder) |
5306 | 14.1k | { |
5307 | 14.1k | avifDecoderData * data = decoder->data; |
5308 | 14.1k | avifDecoderDataResetCodec(data); |
5309 | | |
5310 | 14.1k | if (data->source == AVIF_DECODER_SOURCE_TRACKS) { |
5311 | | // In this case, we will use at most two codec instances (one for the color planes and one for the alpha plane). |
5312 | | // Gain maps are not supported. |
5313 | 3 | AVIF_CHECKRES(avifCodecCreateInternal(decoder->codecChoice, &decoder->data->tiles.tile[0], &decoder->diag, &data->codec)); |
5314 | 3 | data->tiles.tile[0].codec = data->codec; |
5315 | 3 | if (data->tiles.count > 1) { |
5316 | 0 | AVIF_CHECKRES(avifCodecCreateInternal(decoder->codecChoice, &decoder->data->tiles.tile[1], &decoder->diag, &data->codecAlpha)); |
5317 | 0 | data->tiles.tile[1].codec = data->codecAlpha; |
5318 | 0 | } |
5319 | 14.1k | } else { |
5320 | | // In this case, we will use one codec instance when there is only one tile or when all of the following conditions are |
5321 | | // met: |
5322 | | // - The image must have exactly one layer (i.e.) decoder->imageCount == 1. |
5323 | | // - All the tiles must have the same operating point (because the codecs take operating point once at initialization |
5324 | | // and do not allow it to be changed later). |
5325 | | // - All the tiles must have the same value for allLayers (because the codecs take allLayers once at initialization |
5326 | | // and do not allow it to be changed later). |
5327 | | // - If the image has a single tile, it must not have a single tile alpha plane (in this case we will steal the planes |
5328 | | // from the decoder, so we cannot use the same decoder for both the color and the alpha planes). |
5329 | | // - All tiles have the same type (AV1 or AV2). |
5330 | | // Otherwise, we will use |tiles.count| decoder instances (one instance for each tile). |
5331 | 14.1k | avifBool canUseSingleCodecInstance = (data->tiles.count == 1) || |
5332 | 14.1k | (decoder->imageCount == 1 && avifTilesCanBeDecodedWithSameCodecInstance(data)); |
5333 | 14.1k | if (canUseSingleCodecInstance) { |
5334 | 14.0k | AVIF_CHECKRES(avifCodecCreateInternal(decoder->codecChoice, &decoder->data->tiles.tile[0], &decoder->diag, &data->codec)); |
5335 | 28.2k | for (unsigned int i = 0; i < decoder->data->tiles.count; ++i) { |
5336 | 14.1k | decoder->data->tiles.tile[i].codec = data->codec; |
5337 | 14.1k | } |
5338 | 14.0k | } else { |
5339 | 156 | for (unsigned int i = 0; i < decoder->data->tiles.count; ++i) { |
5340 | 104 | avifTile * tile = &decoder->data->tiles.tile[i]; |
5341 | 104 | AVIF_CHECKRES(avifCodecCreateInternal(decoder->codecChoice, tile, &decoder->diag, &tile->codec)); |
5342 | 104 | } |
5343 | 52 | } |
5344 | 14.1k | } |
5345 | 14.1k | return AVIF_RESULT_OK; |
5346 | 14.1k | } |
5347 | | |
5348 | | // Returns the primary color item if found, or NULL. |
5349 | | static avifDecoderItem * avifMetaFindColorItem(avifMeta * meta) |
5350 | 14.8k | { |
5351 | 15.2k | for (uint32_t itemIndex = 0; itemIndex < meta->items.count; ++itemIndex) { |
5352 | 15.1k | avifDecoderItem * item = meta->items.item[itemIndex]; |
5353 | 15.1k | if (avifDecoderItemShouldBeSkipped(item)) { |
5354 | 367 | continue; |
5355 | 367 | } |
5356 | 14.7k | if (item->id == meta->primaryItemID) { |
5357 | 14.7k | return item; |
5358 | 14.7k | } |
5359 | 14.7k | } |
5360 | 106 | return NULL; |
5361 | 14.8k | } |
5362 | | |
5363 | | // Returns AVIF_TRUE if item is an alpha auxiliary item of the parent color |
5364 | | // item. |
5365 | | static avifBool avifDecoderItemIsAlphaAux(const avifDecoderItem * item, uint32_t colorItemId) |
5366 | 15.8k | { |
5367 | 15.8k | if (item->auxForID != colorItemId) |
5368 | 15.6k | return AVIF_FALSE; |
5369 | 215 | const avifProperty * auxCProp = avifPropertyArrayFind(&item->properties, "auxC"); |
5370 | 215 | return auxCProp && isAlphaURN(auxCProp->u.auxC.auxType); |
5371 | 15.8k | } |
5372 | | |
5373 | | // Finds the alpha item whose parent item is colorItem and sets it in the alphaItem output parameter. Returns AVIF_RESULT_OK on |
5374 | | // success. Note that *alphaItem can be NULL even if the return value is AVIF_RESULT_OK. If the colorItem is a grid and the alpha |
5375 | | // item is represented as a set of auxl items to each color tile, then a fake item will be created and *isAlphaItemInInput will be |
5376 | | // set to AVIF_FALSE. In this case, the alpha item merely exists to hold the locations of the alpha tile items. The data of this |
5377 | | // item need not be read and the pixi property cannot be validated. Otherwise, *isAlphaItemInInput will be set to AVIF_TRUE when |
5378 | | // *alphaItem is not NULL. |
5379 | | static avifResult avifMetaFindAlphaItem(avifMeta * meta, |
5380 | | const avifDecoderItem * colorItem, |
5381 | | const avifTileInfo * colorInfo, |
5382 | | avifDecoderItem ** alphaItem, |
5383 | | avifTileInfo * alphaInfo, |
5384 | | avifBool * isAlphaItemInInput) |
5385 | 14.6k | { |
5386 | 32.0k | for (uint32_t itemIndex = 0; itemIndex < meta->items.count; ++itemIndex) { |
5387 | 17.5k | avifDecoderItem * item = meta->items.item[itemIndex]; |
5388 | 17.5k | if (avifDecoderItemShouldBeSkipped(item)) { |
5389 | 2.47k | continue; |
5390 | 2.47k | } |
5391 | 15.0k | if (avifDecoderItemIsAlphaAux(item, colorItem->id)) { |
5392 | 68 | *alphaItem = item; |
5393 | 68 | *isAlphaItemInInput = AVIF_TRUE; |
5394 | 68 | return AVIF_RESULT_OK; |
5395 | 68 | } |
5396 | 15.0k | } |
5397 | 14.5k | if (memcmp(colorItem->type, "grid", 4)) { |
5398 | 14.4k | *alphaItem = NULL; |
5399 | 14.4k | *isAlphaItemInInput = AVIF_FALSE; |
5400 | 14.4k | return AVIF_RESULT_OK; |
5401 | 14.4k | } |
5402 | | // If color item is a grid, check if there is an alpha channel which is represented as an auxl item to each color tile item. |
5403 | 104 | const uint32_t tileCount = colorInfo->grid.rows * colorInfo->grid.columns; |
5404 | 104 | if (tileCount == 0) { |
5405 | 0 | *alphaItem = NULL; |
5406 | 0 | *isAlphaItemInInput = AVIF_FALSE; |
5407 | 0 | return AVIF_RESULT_OK; |
5408 | 0 | } |
5409 | | // Keep the same 'dimg' order as it defines where each tile is located in the reconstructed image. |
5410 | 104 | uint32_t * dimgIdxToAlphaItemIdx = (uint32_t *)avifAlloc(tileCount * sizeof(uint32_t)); |
5411 | 104 | AVIF_CHECKERR(dimgIdxToAlphaItemIdx != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
5412 | 104 | const uint32_t itemIndexNotSet = UINT32_MAX; |
5413 | 366 | for (uint32_t dimgIdx = 0; dimgIdx < tileCount; ++dimgIdx) { |
5414 | 262 | dimgIdxToAlphaItemIdx[dimgIdx] = itemIndexNotSet; |
5415 | 262 | } |
5416 | 104 | uint32_t alphaItemCount = 0; |
5417 | 323 | for (uint32_t i = 0; i < meta->items.count; ++i) { |
5418 | 319 | const avifDecoderItem * const item = meta->items.item[i]; |
5419 | 319 | if (item->dimgForID == colorItem->id) { |
5420 | 109 | avifBool seenAlphaForCurrentItem = AVIF_FALSE; |
5421 | 937 | for (uint32_t j = 0; j < meta->items.count; ++j) { |
5422 | 830 | avifDecoderItem * auxlItem = meta->items.item[j]; |
5423 | 830 | if (avifDecoderItemIsAlphaAux(auxlItem, item->id)) { |
5424 | 12 | if (seenAlphaForCurrentItem || auxlItem->dimgForID != 0 || item->dimgIdx >= tileCount || |
5425 | 12 | dimgIdxToAlphaItemIdx[item->dimgIdx] != itemIndexNotSet) { |
5426 | | // One of the following invalid cases: |
5427 | | // * Multiple items are claiming to be the alpha auxiliary of the current item. |
5428 | | // * Alpha auxiliary is dimg for another item. |
5429 | | // * There are too many items in the dimg array (also checked later in avifFillDimgIdxToItemIdxArray()). |
5430 | | // * There is a repetition in the dimg array (also checked later in avifFillDimgIdxToItemIdxArray()). |
5431 | 2 | avifFree(dimgIdxToAlphaItemIdx); |
5432 | 2 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
5433 | 2 | } |
5434 | 10 | dimgIdxToAlphaItemIdx[item->dimgIdx] = j; |
5435 | 10 | ++alphaItemCount; |
5436 | 10 | seenAlphaForCurrentItem = AVIF_TRUE; |
5437 | 10 | } |
5438 | 830 | } |
5439 | 107 | if (!seenAlphaForCurrentItem) { |
5440 | | // No alpha auxiliary item was found for the current item. Treat this as an image without alpha. |
5441 | 98 | avifFree(dimgIdxToAlphaItemIdx); |
5442 | 98 | *alphaItem = NULL; |
5443 | 98 | *isAlphaItemInInput = AVIF_FALSE; |
5444 | 98 | return AVIF_RESULT_OK; |
5445 | 98 | } |
5446 | 107 | } |
5447 | 319 | } |
5448 | 4 | if (alphaItemCount != tileCount) { |
5449 | 0 | avifFree(dimgIdxToAlphaItemIdx); |
5450 | 0 | return AVIF_RESULT_INVALID_IMAGE_GRID; |
5451 | 0 | } |
5452 | | // Find an unused ID. |
5453 | 4 | avifResult result; |
5454 | 4 | if (meta->items.count >= UINT32_MAX - 1) { |
5455 | | // In the improbable case where all IDs are used. |
5456 | 0 | result = AVIF_RESULT_DECODE_ALPHA_FAILED; |
5457 | 4 | } else { |
5458 | 4 | uint32_t newItemID = 0; |
5459 | 4 | avifBool isUsed; |
5460 | 22 | do { |
5461 | 22 | ++newItemID; |
5462 | 22 | isUsed = AVIF_FALSE; |
5463 | 104 | for (uint32_t i = 0; i < meta->items.count; ++i) { |
5464 | 100 | if (meta->items.item[i]->id == newItemID) { |
5465 | 18 | isUsed = AVIF_TRUE; |
5466 | 18 | break; |
5467 | 18 | } |
5468 | 100 | } |
5469 | 22 | } while (isUsed && newItemID != 0); |
5470 | 4 | result = avifMetaFindOrCreateItem(meta, newItemID, alphaItem); // Create new empty item. |
5471 | 4 | } |
5472 | 4 | if (result != AVIF_RESULT_OK) { |
5473 | 0 | avifFree(dimgIdxToAlphaItemIdx); |
5474 | 0 | return result; |
5475 | 0 | } |
5476 | 4 | memcpy((*alphaItem)->type, "grid", 4); // Make it a grid and register alpha items as its tiles. |
5477 | 4 | (*alphaItem)->width = colorItem->width; |
5478 | 4 | (*alphaItem)->height = colorItem->height; |
5479 | 12 | for (uint32_t dimgIdx = 0; dimgIdx < tileCount; ++dimgIdx) { |
5480 | 8 | if (dimgIdxToAlphaItemIdx[dimgIdx] >= meta->items.count) { |
5481 | 0 | avifFree(dimgIdxToAlphaItemIdx); |
5482 | 0 | AVIF_ASSERT_OR_RETURN(AVIF_FALSE); |
5483 | 0 | } |
5484 | 8 | avifDecoderItem * alphaTileItem = meta->items.item[dimgIdxToAlphaItemIdx[dimgIdx]]; |
5485 | 8 | alphaTileItem->dimgForID = (*alphaItem)->id; |
5486 | 8 | alphaTileItem->dimgIdx = dimgIdx; |
5487 | 8 | } |
5488 | 4 | avifFree(dimgIdxToAlphaItemIdx); |
5489 | 4 | *isAlphaItemInInput = AVIF_FALSE; |
5490 | 4 | alphaInfo->grid = colorInfo->grid; |
5491 | 4 | return AVIF_RESULT_OK; |
5492 | 4 | } |
5493 | | |
5494 | | // On success, this function returns AVIF_RESULT_OK and does the following: |
5495 | | // * If a nclx property was found in |properties|: |
5496 | | // - Set |*colorPrimaries|, |*transferCharacteristics|, |*matrixCoefficients| |
5497 | | // and |*yuvRange|. |
5498 | | // - If cicpSet is not NULL, set |*cicpSet| to AVIF_TRUE. |
5499 | | // This function fails if more than one nclx property is found in |properties|. |
5500 | | // The output parameters may be populated even in case of failure and must be |
5501 | | // ignored. |
5502 | | static avifResult avifReadColorNclxProperty(const avifPropertyArray * properties, |
5503 | | avifColorPrimaries * colorPrimaries, |
5504 | | avifTransferCharacteristics * transferCharacteristics, |
5505 | | avifMatrixCoefficients * matrixCoefficients, |
5506 | | avifRange * yuvRange, |
5507 | | avifBool * cicpSet) |
5508 | 14.4k | { |
5509 | 14.4k | avifBool colrNCLXSeen = AVIF_FALSE; |
5510 | 67.7k | for (uint32_t propertyIndex = 0; propertyIndex < properties->count; ++propertyIndex) { |
5511 | 53.3k | avifProperty * prop = &properties->prop[propertyIndex]; |
5512 | 53.3k | if (!memcmp(prop->type, "colr", 4) && prop->u.colr.hasNCLX) { |
5513 | 1.16k | if (colrNCLXSeen) { |
5514 | 2 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5515 | 2 | } |
5516 | 1.16k | colrNCLXSeen = AVIF_TRUE; |
5517 | 1.16k | if (cicpSet != NULL) { |
5518 | 1.16k | *cicpSet = AVIF_TRUE; |
5519 | 1.16k | } |
5520 | 1.16k | *colorPrimaries = prop->u.colr.colorPrimaries; |
5521 | 1.16k | *transferCharacteristics = prop->u.colr.transferCharacteristics; |
5522 | 1.16k | *matrixCoefficients = prop->u.colr.matrixCoefficients; |
5523 | 1.16k | *yuvRange = prop->u.colr.range; |
5524 | 1.16k | } |
5525 | 53.3k | } |
5526 | 14.4k | return AVIF_RESULT_OK; |
5527 | 14.4k | } |
5528 | | |
5529 | | // On success, this function returns AVIF_RESULT_OK and does the following: |
5530 | | // * If a colr property was found in |properties|: |
5531 | | // - Read the icc data into |icc| from |io|. |
5532 | | // - Sets the CICP values as documented in avifReadColorNclxProperty(). |
5533 | | // This function fails if more than one icc or nclx property is found in |
5534 | | // |properties|. The output parameters may be populated even in case of failure |
5535 | | // and must be ignored (and the |icc| object may need to be freed). |
5536 | | static avifResult avifReadColorProperties(avifIO * io, |
5537 | | const avifPropertyArray * properties, |
5538 | | avifRWData * icc, |
5539 | | avifColorPrimaries * colorPrimaries, |
5540 | | avifTransferCharacteristics * transferCharacteristics, |
5541 | | avifMatrixCoefficients * matrixCoefficients, |
5542 | | avifRange * yuvRange, |
5543 | | avifBool * cicpSet) |
5544 | 14.4k | { |
5545 | | // Find and adopt all colr boxes "at most one for a given value of colour type" (HEIF 6.5.5.1, from Amendment 3) |
5546 | | // Accept one of each type, and bail out if more than one of a given type is provided. |
5547 | 14.4k | avifBool colrICCSeen = AVIF_FALSE; |
5548 | 67.7k | for (uint32_t propertyIndex = 0; propertyIndex < properties->count; ++propertyIndex) { |
5549 | 53.3k | avifProperty * prop = &properties->prop[propertyIndex]; |
5550 | 53.3k | if (!memcmp(prop->type, "colr", 4) && prop->u.colr.hasICC) { |
5551 | 932 | if (colrICCSeen) { |
5552 | 1 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5553 | 1 | } |
5554 | 931 | avifROData iccRead; |
5555 | 931 | AVIF_CHECKRES(io->read(io, 0, prop->u.colr.iccOffset, prop->u.colr.iccSize, &iccRead)); |
5556 | 931 | colrICCSeen = AVIF_TRUE; |
5557 | 931 | AVIF_CHECKRES(avifRWDataSet(icc, iccRead.data, iccRead.size)); |
5558 | 931 | } |
5559 | 53.3k | } |
5560 | 14.4k | return avifReadColorNclxProperty(properties, colorPrimaries, transferCharacteristics, matrixCoefficients, yuvRange, cicpSet); |
5561 | 14.4k | } |
5562 | | |
5563 | | // Finds a 'tmap' (tone mapped image item) box associated with the given 'colorItem'. |
5564 | | // If found, fills 'toneMappedImageItem' and sets 'gainMapItemID' to the id of the gain map |
5565 | | // item associated with the box. Otherwise, sets 'toneMappedImageItem' to NULL. |
5566 | | // Returns AVIF_RESULT_OK if no errors were encountered (whether or not a tmap box was found). |
5567 | | // Assumes that there is a single tmap item, and not, e.g., a grid of tmap items. |
5568 | | // TODO(maryla): add support for files with multiple tmap items if it gets allowed by the spec. |
5569 | | static avifResult avifDecoderDataFindToneMappedImageItem(const avifDecoderData * data, |
5570 | | const avifDecoderItem * colorItem, |
5571 | | avifDecoderItem ** toneMappedImageItem, |
5572 | | uint32_t * gainMapItemID) |
5573 | 13 | { |
5574 | 74 | for (uint32_t itemIndex = 0; itemIndex < data->meta->items.count; ++itemIndex) { |
5575 | 65 | avifDecoderItem * item = data->meta->items.item[itemIndex]; |
5576 | 65 | if (!item->size || item->hasUnsupportedEssentialProperty || item->thumbnailForID != 0) { |
5577 | 40 | continue; |
5578 | 40 | } |
5579 | 25 | if (!memcmp(item->type, "tmap", 4)) { |
5580 | | // The tmap box should be associated (via 'iref'->'dimg') to two items: |
5581 | | // the first one is the base image, the second one is the gain map. |
5582 | 4 | uint32_t dimgItemIDs[2] = { 0, 0 }; |
5583 | 4 | uint32_t numDimgItemIDs = 0; |
5584 | 29 | for (uint32_t otherItemIndex = 0; otherItemIndex < data->meta->items.count; ++otherItemIndex) { |
5585 | 25 | avifDecoderItem * otherItem = data->meta->items.item[otherItemIndex]; |
5586 | 25 | if (otherItem->dimgForID != item->id) { |
5587 | 25 | continue; |
5588 | 25 | } |
5589 | 0 | if (otherItem->dimgIdx < 2) { |
5590 | 0 | AVIF_ASSERT_OR_RETURN(dimgItemIDs[otherItem->dimgIdx] == 0); |
5591 | 0 | dimgItemIDs[otherItem->dimgIdx] = otherItem->id; |
5592 | 0 | } |
5593 | 0 | numDimgItemIDs++; |
5594 | 0 | } |
5595 | | // Even with numDimgItemIDs == 2, one of the ids could be 0 if there are duplicate entries in the 'dimg' box. |
5596 | 4 | if (numDimgItemIDs != 2 || dimgItemIDs[0] == 0 || dimgItemIDs[1] == 0) { |
5597 | 4 | avifDiagnosticsPrintf(data->diag, "box[dimg] for 'tmap' item %d must have exactly 2 entries with distinct ids", item->id); |
5598 | 4 | return AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE; |
5599 | 4 | } |
5600 | 0 | if (dimgItemIDs[0] != colorItem->id) { |
5601 | 0 | continue; |
5602 | 0 | } |
5603 | | |
5604 | 0 | *toneMappedImageItem = item; |
5605 | 0 | *gainMapItemID = dimgItemIDs[1]; |
5606 | 0 | return AVIF_RESULT_OK; |
5607 | 0 | } |
5608 | 25 | } |
5609 | 9 | *toneMappedImageItem = NULL; |
5610 | 9 | *gainMapItemID = 0; |
5611 | 9 | return AVIF_RESULT_OK; |
5612 | 13 | } |
5613 | | |
5614 | | // Returns AVIF_TRUE if the two entity ids (usually item ids) are part of an |
5615 | | // 'altr' group (representing entities that are alternatives of each other) |
5616 | | // with 'id1' appearing before 'id2' (meaning that 'id1' should be preferred). |
5617 | | static avifBool avifIsPreferredAlternativeTo(const avifDecoderData * data, uint32_t id1, uint32_t id2) |
5618 | 0 | { |
5619 | 0 | for (uint32_t i = 0; i < data->meta->entityToGroups.count; ++i) { |
5620 | 0 | avifEntityToGroup * group = &data->meta->entityToGroups.groups[i]; |
5621 | 0 | if (memcmp(group->groupingType, "altr", 4) != 0) { |
5622 | 0 | continue; |
5623 | 0 | } |
5624 | 0 | avifBool id1Found = AVIF_FALSE; |
5625 | 0 | for (uint32_t j = 0; j < group->entityIDs.count; ++j) { |
5626 | 0 | if (group->entityIDs.ids[j] == id1) { |
5627 | 0 | id1Found = AVIF_TRUE; |
5628 | 0 | } else if (group->entityIDs.ids[j] == id2) { |
5629 | | // Assume id2 is only present in one altr group, as per ISO/IEC 14496-12:2022 |
5630 | | // Section 8.15.3.1: |
5631 | | // Any entity_id value shall be mapped to only one grouping of type 'altr'. |
5632 | 0 | return id1Found; |
5633 | 0 | } |
5634 | 0 | } |
5635 | 0 | } |
5636 | 0 | return AVIF_FALSE; |
5637 | 0 | } |
5638 | | |
5639 | | // Finds a 'tmap' (tone mapped image item) box associated with the given 'colorItem', |
5640 | | // then finds the associated gain map image. |
5641 | | // If found, fills 'toneMappedImageItem', 'gainMapItem' and 'gainMapCodecType', and |
5642 | | // allocates and fills metadata in decoder->image->gainMap. |
5643 | | // Otherwise, sets 'toneMappedImageItem' and 'gainMapItem' to NULL. |
5644 | | // Returns AVIF_RESULT_OK if no errors were encountered (whether or not a gain map was found). |
5645 | | // Assumes that there is a single tmap item, and not, e.g., a grid of tmap items. |
5646 | | static avifResult avifDecoderFindGainMapItem(const avifDecoder * decoder, |
5647 | | const avifDecoderItem * colorItem, |
5648 | | avifDecoderItem ** toneMappedImageItem, |
5649 | | avifDecoderItem ** gainMapItem, |
5650 | | avifCodecType * gainMapCodecType) |
5651 | 13 | { |
5652 | 13 | *toneMappedImageItem = NULL; |
5653 | 13 | *gainMapItem = NULL; |
5654 | 13 | *gainMapCodecType = AVIF_CODEC_TYPE_UNKNOWN; |
5655 | | |
5656 | 13 | avifDecoderData * data = decoder->data; |
5657 | | |
5658 | 13 | uint32_t gainMapItemID; |
5659 | 13 | avifDecoderItem * toneMappedImageItemTmp; |
5660 | 13 | AVIF_CHECKRES(avifDecoderDataFindToneMappedImageItem(data, colorItem, &toneMappedImageItemTmp, &gainMapItemID)); |
5661 | 9 | if (!toneMappedImageItemTmp) { |
5662 | 9 | return AVIF_RESULT_OK; |
5663 | 9 | } |
5664 | | |
5665 | 0 | if (!avifIsPreferredAlternativeTo(data, toneMappedImageItemTmp->id, colorItem->id)) { |
5666 | 0 | return AVIF_RESULT_OK; |
5667 | 0 | } |
5668 | | |
5669 | 0 | AVIF_ASSERT_OR_RETURN(gainMapItemID != 0); |
5670 | 0 | avifDecoderItem * gainMapItemTmp; |
5671 | 0 | AVIF_CHECKRES(avifMetaFindOrCreateItem(data->meta, gainMapItemID, &gainMapItemTmp)); |
5672 | 0 | if (avifDecoderItemShouldBeSkipped(gainMapItemTmp)) { |
5673 | 0 | avifDiagnosticsPrintf(data->diag, "Box[tmap] gain map item %d is not a supported image type", gainMapItemID); |
5674 | 0 | return AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE; |
5675 | 0 | } |
5676 | | |
5677 | 0 | AVIF_CHECKRES(avifDecoderItemReadAndParse(decoder, |
5678 | 0 | gainMapItemTmp, |
5679 | 0 | /*isItemInInput=*/AVIF_TRUE, |
5680 | 0 | &data->tileInfos[AVIF_ITEM_GAIN_MAP].grid, |
5681 | 0 | gainMapCodecType)); |
5682 | | |
5683 | 0 | decoder->image->gainMap = avifGainMapCreate(); |
5684 | 0 | AVIF_CHECKERR(decoder->image->gainMap, AVIF_RESULT_OUT_OF_MEMORY); |
5685 | | |
5686 | 0 | avifGainMap * const gainMap = decoder->image->gainMap; |
5687 | 0 | AVIF_CHECKRES(avifReadColorProperties(decoder->io, |
5688 | 0 | &toneMappedImageItemTmp->properties, |
5689 | 0 | &gainMap->altICC, |
5690 | 0 | &gainMap->altColorPrimaries, |
5691 | 0 | &gainMap->altTransferCharacteristics, |
5692 | 0 | &gainMap->altMatrixCoefficients, |
5693 | 0 | &gainMap->altYUVRange, |
5694 | 0 | /*cicpSet=*/NULL)); |
5695 | | |
5696 | 0 | const avifProperty * clliProp = avifPropertyArrayFind(&toneMappedImageItemTmp->properties, "clli"); |
5697 | 0 | if (clliProp) { |
5698 | 0 | gainMap->altCLLI = clliProp->u.clli; |
5699 | 0 | } |
5700 | |
|
5701 | 0 | const avifProperty * pixiProp = avifPropertyArrayFind(&toneMappedImageItemTmp->properties, "pixi"); |
5702 | 0 | if (pixiProp) { |
5703 | 0 | gainMap->altPlaneCount = pixiProp->u.pixi.planeCount; |
5704 | 0 | gainMap->altDepth = pixiProp->u.pixi.planeDepths[0]; |
5705 | 0 | } |
5706 | |
|
5707 | 0 | const avifProperty * ispeProp = avifPropertyArrayFind(&toneMappedImageItemTmp->properties, "ispe"); |
5708 | 0 | if (!ispeProp) { |
5709 | | // HEIF (ISO/IEC 23008-12:2022), Section 6.5.3.1: |
5710 | | // Every image item shall be associated with one property of this type, prior to the association |
5711 | | // of all transformative properties. |
5712 | 0 | avifDiagnosticsPrintf(data->diag, "Box[tmap] missing mandatory ispe property"); |
5713 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5714 | 0 | } |
5715 | 0 | if (ispeProp->u.ispe.width != colorItem->width || ispeProp->u.ispe.height != colorItem->height) { |
5716 | 0 | avifDiagnosticsPrintf(data->diag, "Box[tmap] ispe property width/height does not match base image"); |
5717 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5718 | 0 | } |
5719 | | |
5720 | 0 | if (avifPropertyArrayFind(&toneMappedImageItemTmp->properties, "pasp") || |
5721 | 0 | avifPropertyArrayFind(&toneMappedImageItemTmp->properties, "clap") || |
5722 | 0 | avifPropertyArrayFind(&toneMappedImageItemTmp->properties, "irot") || |
5723 | 0 | avifPropertyArrayFind(&toneMappedImageItemTmp->properties, "imir")) { |
5724 | | // libavif requires the bitstream contain the same pasp, clap, irot, imir |
5725 | | // properties for both the base and gain map image items used as input to |
5726 | | // the tone-mapped derived image item. libavif also requires the tone-mapped |
5727 | | // derived image item itself not be associated with these properties. This is |
5728 | | // enforced at encoding. Other patterns are rejected at decoding. |
5729 | 0 | avifDiagnosticsPrintf(data->diag, |
5730 | 0 | "Box[tmap] 'pasp', 'clap', 'irot' and 'imir' properties must be associated with base and gain map items instead of 'tmap'"); |
5731 | 0 | return AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE; |
5732 | 0 | } |
5733 | | |
5734 | 0 | if (decoder->imageContentToDecode & AVIF_IMAGE_CONTENT_GAIN_MAP) { |
5735 | 0 | gainMap->image = avifImageCreateEmpty(); |
5736 | 0 | AVIF_CHECKERR(gainMap->image, AVIF_RESULT_OUT_OF_MEMORY); |
5737 | | |
5738 | | // Look for a colr nclx box. Other colr box types (e.g. ICC) are not supported. |
5739 | 0 | AVIF_CHECKRES(avifReadColorNclxProperty(&gainMapItemTmp->properties, |
5740 | 0 | &gainMap->image->colorPrimaries, |
5741 | 0 | &gainMap->image->transferCharacteristics, |
5742 | 0 | &gainMap->image->matrixCoefficients, |
5743 | 0 | &gainMap->image->yuvRange, |
5744 | 0 | /*cicpSet=*/NULL)); |
5745 | 0 | } |
5746 | | |
5747 | | // Only set the output parameters after everything has been validated. |
5748 | 0 | *toneMappedImageItem = toneMappedImageItemTmp; |
5749 | 0 | *gainMapItem = gainMapItemTmp; |
5750 | 0 | return AVIF_RESULT_OK; |
5751 | 0 | } |
5752 | | |
5753 | | static avifResult aviDecoderCheckGainMapProperties(avifDecoder * decoder, const avifPropertyArray * gainMapProperties) |
5754 | 0 | { |
5755 | 0 | const avifImage * image = decoder->image; |
5756 | | // libavif requires the bitstream contain the same 'pasp', 'clap', 'irot', 'imir' |
5757 | | // properties for both the base and gain map image items used as input to |
5758 | | // the tone-mapped derived image item. libavif also requires the tone-mapped |
5759 | | // derived image item itself not be associated with these properties. This is |
5760 | | // enforced at encoding. Other patterns are rejected at decoding. |
5761 | 0 | const avifProperty * paspProp = avifPropertyArrayFind(gainMapProperties, "pasp"); |
5762 | 0 | if (!paspProp != !(image->transformFlags & AVIF_TRANSFORM_PASP) || |
5763 | 0 | (paspProp && (paspProp->u.pasp.hSpacing != image->pasp.hSpacing || paspProp->u.pasp.vSpacing != image->pasp.vSpacing))) { |
5764 | 0 | avifDiagnosticsPrintf(&decoder->diag, |
5765 | 0 | "Pixel aspect ratio property mismatch between input items of tone-mapping derived image item"); |
5766 | 0 | return AVIF_RESULT_DECODE_GAIN_MAP_FAILED; |
5767 | 0 | } |
5768 | 0 | const avifProperty * clapProp = avifPropertyArrayFind(gainMapProperties, "clap"); |
5769 | 0 | if (!clapProp != !(image->transformFlags & AVIF_TRANSFORM_CLAP) || |
5770 | 0 | (clapProp && (clapProp->u.clap.widthN != image->clap.widthN || clapProp->u.clap.widthD != image->clap.widthD || |
5771 | 0 | clapProp->u.clap.heightN != image->clap.heightN || clapProp->u.clap.heightD != image->clap.heightD || |
5772 | 0 | clapProp->u.clap.horizOffN != image->clap.horizOffN || clapProp->u.clap.horizOffD != image->clap.horizOffD || |
5773 | 0 | clapProp->u.clap.vertOffN != image->clap.vertOffN || clapProp->u.clap.vertOffD != image->clap.vertOffD))) { |
5774 | 0 | avifDiagnosticsPrintf(&decoder->diag, "Clean aperture property mismatch between input items of tone-mapping derived image item"); |
5775 | 0 | return AVIF_RESULT_DECODE_GAIN_MAP_FAILED; |
5776 | 0 | } |
5777 | 0 | const avifProperty * irotProp = avifPropertyArrayFind(gainMapProperties, "irot"); |
5778 | 0 | if (!irotProp != !(image->transformFlags & AVIF_TRANSFORM_IROT) || (irotProp && irotProp->u.irot.angle != image->irot.angle)) { |
5779 | 0 | avifDiagnosticsPrintf(&decoder->diag, "Rotation property mismatch between input items of tone-mapping derived image item"); |
5780 | 0 | return AVIF_RESULT_DECODE_GAIN_MAP_FAILED; |
5781 | 0 | } |
5782 | 0 | const avifProperty * imirProp = avifPropertyArrayFind(gainMapProperties, "imir"); |
5783 | 0 | if (!imirProp != !(image->transformFlags & AVIF_TRANSFORM_IMIR) || (imirProp && imirProp->u.imir.axis != image->imir.axis)) { |
5784 | 0 | avifDiagnosticsPrintf(&decoder->diag, "Mirroring property mismatch between input items of tone-mapping derived image item"); |
5785 | 0 | return AVIF_RESULT_DECODE_GAIN_MAP_FAILED; |
5786 | 0 | } |
5787 | 0 | return AVIF_RESULT_OK; |
5788 | 0 | } |
5789 | | |
5790 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
5791 | | // Finds a 'sato' Sample Transform derived image item box. |
5792 | | // If found, fills 'sampleTransformItem'. Otherwise, sets 'sampleTransformItem' to NULL. |
5793 | | // Returns AVIF_RESULT_OK on success (whether or not a 'sato' box was found). |
5794 | | // Assumes that there is a single 'sato' item. |
5795 | | // Assumes that the 'sato' item is not the primary item and that both the primary item and 'sato' |
5796 | | // are in the same 'altr' group. |
5797 | | // TODO(yguyon): Check instead of assuming. |
5798 | | static avifResult avifDecoderDataFindSampleTransformImageItem(avifDecoderData * data, avifDecoderItem ** sampleTransformItem) |
5799 | | { |
5800 | | for (uint32_t itemIndex = 0; itemIndex < data->meta->items.count; ++itemIndex) { |
5801 | | avifDecoderItem * item = data->meta->items.item[itemIndex]; |
5802 | | if (!item->size || item->hasUnsupportedEssentialProperty || item->thumbnailForID != 0) { |
5803 | | continue; |
5804 | | } |
5805 | | if (!memcmp(item->type, "sato", 4)) { |
5806 | | *sampleTransformItem = item; |
5807 | | return AVIF_RESULT_OK; |
5808 | | } |
5809 | | } |
5810 | | *sampleTransformItem = NULL; |
5811 | | return AVIF_RESULT_OK; |
5812 | | } |
5813 | | #endif // AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM |
5814 | | |
5815 | | static avifResult avifDecoderGenerateImageTiles(avifDecoder * decoder, avifTileInfo * info, avifDecoderItem * item, avifItemCategory itemCategory) |
5816 | 14.6k | { |
5817 | 14.6k | const uint32_t previousTileCount = decoder->data->tiles.count; |
5818 | 14.6k | if ((info->grid.rows > 0) && (info->grid.columns > 0)) { |
5819 | | // The number of tiles was verified in avifDecoderItemReadAndParse(). |
5820 | 63 | const uint32_t numTiles = info->grid.rows * info->grid.columns; |
5821 | 63 | uint32_t * dimgIdxToItemIdx = (uint32_t *)avifAlloc(numTiles * sizeof(uint32_t)); |
5822 | 63 | AVIF_CHECKERR(dimgIdxToItemIdx != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
5823 | 63 | avifResult result = avifFillDimgIdxToItemIdxArray(dimgIdxToItemIdx, numTiles, item); |
5824 | 63 | if (result == AVIF_RESULT_OK) { |
5825 | 63 | result = avifDecoderGenerateImageGridTiles(decoder, item, itemCategory, dimgIdxToItemIdx, numTiles); |
5826 | 63 | } |
5827 | 63 | avifFree(dimgIdxToItemIdx); |
5828 | 63 | AVIF_CHECKRES(result); |
5829 | 14.5k | } else { |
5830 | 14.5k | AVIF_CHECKERR(item->size != 0, AVIF_RESULT_MISSING_IMAGE_ITEM); |
5831 | | |
5832 | 14.5k | const avifCodecType codecType = avifGetCodecType(item->type); |
5833 | 14.5k | AVIF_ASSERT_OR_RETURN(codecType != AVIF_CODEC_TYPE_UNKNOWN); |
5834 | 14.5k | avifTile * tile = |
5835 | 14.5k | avifDecoderDataCreateTile(decoder->data, codecType, item->width, item->height, avifDecoderItemOperatingPoint(item)); |
5836 | 14.5k | AVIF_CHECKERR(tile, AVIF_RESULT_OUT_OF_MEMORY); |
5837 | 14.5k | AVIF_CHECKRES(avifCodecDecodeInputFillFromDecoderItem(tile->input, |
5838 | 14.5k | item, |
5839 | 14.5k | decoder->allowProgressive, |
5840 | 14.5k | decoder->imageCountLimit, |
5841 | 14.5k | decoder->io->sizeHint, |
5842 | 14.5k | &decoder->diag)); |
5843 | 14.4k | tile->input->itemCategory = itemCategory; |
5844 | 14.4k | } |
5845 | 14.5k | info->tileCount = decoder->data->tiles.count - previousTileCount; |
5846 | 14.5k | return AVIF_RESULT_OK; |
5847 | 14.6k | } |
5848 | | |
5849 | | // Populates depth, yuvFormat and yuvChromaSamplePosition fields on 'image' based on data from the codec config property (e.g. "av1C"). |
5850 | | static avifResult avifReadCodecConfigProperty(avifImage * image, const avifPropertyArray * properties, avifCodecType codecType) |
5851 | 14.1k | { |
5852 | 14.1k | const avifProperty * configProp = avifPropertyArrayFind(properties, avifGetConfigurationPropertyName(codecType)); |
5853 | 14.1k | if (configProp) { |
5854 | 14.1k | image->depth = avifCodecConfigurationBoxGetDepth(&configProp->u.av1C); |
5855 | 14.1k | if (configProp->u.av1C.monochrome) { |
5856 | 1.68k | image->yuvFormat = AVIF_PIXEL_FORMAT_YUV400; |
5857 | 12.4k | } else { |
5858 | 12.4k | if (configProp->u.av1C.chromaSubsamplingX && configProp->u.av1C.chromaSubsamplingY) { |
5859 | 903 | image->yuvFormat = AVIF_PIXEL_FORMAT_YUV420; |
5860 | 11.5k | } else if (configProp->u.av1C.chromaSubsamplingX) { |
5861 | 864 | image->yuvFormat = AVIF_PIXEL_FORMAT_YUV422; |
5862 | 10.7k | } else { |
5863 | 10.7k | image->yuvFormat = AVIF_PIXEL_FORMAT_YUV444; |
5864 | 10.7k | } |
5865 | 12.4k | } |
5866 | 14.1k | image->yuvChromaSamplePosition = (avifChromaSamplePosition)configProp->u.av1C.chromaSamplePosition; |
5867 | 14.1k | } else { |
5868 | | // A configuration property box is mandatory in all valid AVIF configurations. Bail out. |
5869 | 16 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5870 | 16 | } |
5871 | 14.1k | return AVIF_RESULT_OK; |
5872 | 14.1k | } |
5873 | | |
5874 | | avifResult avifDecoderReset(avifDecoder * decoder) |
5875 | 14.9k | { |
5876 | 14.9k | avifDiagnosticsClearError(&decoder->diag); |
5877 | | |
5878 | 14.9k | avifDecoderData * data = decoder->data; |
5879 | 14.9k | if (!data) { |
5880 | | // Nothing to reset. |
5881 | 0 | return AVIF_RESULT_OK; |
5882 | 0 | } |
5883 | | |
5884 | 59.8k | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
5885 | 44.8k | memset(&data->tileInfos[c].grid, 0, sizeof(data->tileInfos[c].grid)); |
5886 | 44.8k | } |
5887 | 14.9k | avifDecoderDataClearTiles(data); |
5888 | | |
5889 | | // Prepare / cleanup decoded image state |
5890 | 14.9k | if (decoder->image) { |
5891 | 0 | avifImageDestroy(decoder->image); |
5892 | 0 | } |
5893 | 14.9k | decoder->image = avifImageCreateEmpty(); |
5894 | 14.9k | AVIF_CHECKERR(decoder->image, AVIF_RESULT_OUT_OF_MEMORY); |
5895 | 14.9k | decoder->progressiveState = AVIF_PROGRESSIVE_STATE_UNAVAILABLE; |
5896 | 14.9k | data->cicpSet = AVIF_FALSE; |
5897 | | |
5898 | 14.9k | memset(&decoder->ioStats, 0, sizeof(decoder->ioStats)); |
5899 | | |
5900 | | // ----------------------------------------------------------------------- |
5901 | | // Build decode input |
5902 | | |
5903 | 14.9k | data->sourceSampleTable = NULL; // Reset |
5904 | 14.9k | if (decoder->requestedSource == AVIF_DECODER_SOURCE_AUTO) { |
5905 | | // Honor the major brand (avif or avis) if present, otherwise prefer avis (tracks) if possible. |
5906 | 14.9k | if (!memcmp(data->majorBrand, "avis", 4)) { |
5907 | 14 | data->source = AVIF_DECODER_SOURCE_TRACKS; |
5908 | 14.9k | } else if (!memcmp(data->majorBrand, "avif", 4)) { |
5909 | 10.9k | data->source = AVIF_DECODER_SOURCE_PRIMARY_ITEM; |
5910 | 10.9k | } else if (data->tracks.count > 0) { |
5911 | 51 | data->source = AVIF_DECODER_SOURCE_TRACKS; |
5912 | 3.95k | } else { |
5913 | 3.95k | data->source = AVIF_DECODER_SOURCE_PRIMARY_ITEM; |
5914 | 3.95k | } |
5915 | 14.9k | } else { |
5916 | 0 | data->source = decoder->requestedSource; |
5917 | 0 | } |
5918 | | |
5919 | 14.9k | avifCodecType colorCodecType = AVIF_CODEC_TYPE_UNKNOWN; |
5920 | 14.9k | const avifPropertyArray * colorProperties = NULL; |
5921 | 14.9k | const avifPropertyArray * gainMapProperties = NULL; |
5922 | 14.9k | if (data->source == AVIF_DECODER_SOURCE_TRACKS) { |
5923 | 65 | avifTrack * colorTrack = NULL; |
5924 | 65 | avifTrack * alphaTrack = NULL; |
5925 | | |
5926 | | // Find primary track - this probably needs some better detection |
5927 | 65 | uint32_t colorTrackIndex = 0; |
5928 | 76 | for (; colorTrackIndex < data->tracks.count; ++colorTrackIndex) { |
5929 | 67 | avifTrack * track = &data->tracks.track[colorTrackIndex]; |
5930 | 67 | if (!track->sampleTable) { |
5931 | 5 | continue; |
5932 | 5 | } |
5933 | 62 | if (!track->id) { // trak box might be missing a tkhd box inside, skip it |
5934 | 1 | continue; |
5935 | 1 | } |
5936 | 61 | if (!track->sampleTable->chunks.count) { |
5937 | 1 | continue; |
5938 | 1 | } |
5939 | 60 | colorCodecType = avifSampleTableGetCodecType(track->sampleTable); |
5940 | 60 | if (colorCodecType == AVIF_CODEC_TYPE_UNKNOWN) { |
5941 | 4 | continue; |
5942 | 4 | } |
5943 | 56 | if (track->auxForID != 0) { |
5944 | 0 | continue; |
5945 | 0 | } |
5946 | | // HEIF (ISO/IEC 23008-12:2022), Section 7.1: |
5947 | | // In order to distinguish image sequences from video, the handler type in the |
5948 | | // HandlerBox of the track is 'pict' to indicate an image sequence track. |
5949 | | // But we do not check the handler type because it may break some existing files. |
5950 | | |
5951 | | // Found one! |
5952 | 56 | break; |
5953 | 56 | } |
5954 | 65 | if (colorTrackIndex == data->tracks.count) { |
5955 | 9 | avifDiagnosticsPrintf(&decoder->diag, "Failed to find AV1 color track"); |
5956 | 9 | return AVIF_RESULT_NO_CONTENT; |
5957 | 9 | } |
5958 | 56 | colorTrack = &data->tracks.track[colorTrackIndex]; |
5959 | | |
5960 | 56 | colorProperties = avifSampleTableGetProperties(colorTrack->sampleTable, colorCodecType); |
5961 | 56 | if (!colorProperties) { |
5962 | 0 | avifDiagnosticsPrintf(&decoder->diag, "Failed to find AV1 color track's color properties"); |
5963 | 0 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
5964 | 0 | } |
5965 | | |
5966 | | // Find Exif and/or XMP metadata, if any |
5967 | 56 | if (colorTrack->meta) { |
5968 | | // See the comment above avifDecoderFindMetadata() for the explanation of using 0 here |
5969 | 56 | avifResult findResult = avifDecoderFindMetadata(decoder, colorTrack->meta, decoder->image, 0); |
5970 | 56 | if (findResult != AVIF_RESULT_OK) { |
5971 | 0 | return findResult; |
5972 | 0 | } |
5973 | 56 | } |
5974 | | |
5975 | 56 | uint32_t alphaTrackIndex = 0; |
5976 | 56 | avifCodecType alphaCodecType = AVIF_CODEC_TYPE_UNKNOWN; |
5977 | 133 | for (; alphaTrackIndex < data->tracks.count; ++alphaTrackIndex) { |
5978 | 77 | avifTrack * track = &data->tracks.track[alphaTrackIndex]; |
5979 | 77 | if (!track->sampleTable) { |
5980 | 21 | continue; |
5981 | 21 | } |
5982 | 56 | if (!track->id) { |
5983 | 0 | continue; |
5984 | 0 | } |
5985 | 56 | if (!track->sampleTable->chunks.count) { |
5986 | 0 | continue; |
5987 | 0 | } |
5988 | 56 | alphaCodecType = avifSampleTableGetCodecType(track->sampleTable); |
5989 | 56 | if (alphaCodecType == AVIF_CODEC_TYPE_UNKNOWN) { |
5990 | 0 | continue; |
5991 | 0 | } |
5992 | 56 | const avifPropertyArray * alphaProperties = avifSampleTableGetProperties(track->sampleTable, alphaCodecType); |
5993 | 56 | const avifProperty * auxiProp = alphaProperties ? avifPropertyArrayFind(alphaProperties, "auxi") : NULL; |
5994 | | // If auxi is present, check that it contains the alpha URN. |
5995 | | // If auxi is not present, assume that the track is alpha. This is for backward compatibility with |
5996 | | // old versions of libavif that did not write this property, see |
5997 | | // https://github.com/AOMediaCodec/libavif/commit/98faa17 |
5998 | 56 | if (auxiProp && !isAlphaURN(auxiProp->u.auxC.auxType)) { |
5999 | 2 | continue; |
6000 | 2 | } |
6001 | | // Do not check the track's handlerType. It should be "auxv" according to |
6002 | | // HEIF (ISO/IEC 23008-12:2022), Section 7.5.3.1, but old versions of libavif used to write |
6003 | | // "pict" instead. See https://github.com/AOMediaCodec/libavif/commit/65d0af9 |
6004 | | |
6005 | 54 | if (track->auxForID == colorTrack->id) { |
6006 | | // Found it! |
6007 | 0 | break; |
6008 | 0 | } |
6009 | 54 | } |
6010 | 56 | if (alphaTrackIndex != data->tracks.count) { |
6011 | 0 | alphaTrack = &data->tracks.track[alphaTrackIndex]; |
6012 | 0 | } |
6013 | | |
6014 | 56 | const uint8_t operatingPoint = 0; // No way to set operating point via tracks |
6015 | 56 | avifTile * colorTile = avifDecoderDataCreateTile(data, colorCodecType, colorTrack->width, colorTrack->height, operatingPoint); |
6016 | 56 | AVIF_CHECKERR(colorTile != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
6017 | 56 | AVIF_CHECKRES(avifCodecDecodeInputFillFromSampleTable(colorTile->input, |
6018 | 56 | colorTrack->sampleTable, |
6019 | 56 | decoder->imageCountLimit, |
6020 | 56 | decoder->io->sizeHint, |
6021 | 56 | data->diag)); |
6022 | 24 | data->tileInfos[AVIF_ITEM_COLOR].tileCount = 1; |
6023 | | |
6024 | 24 | if (alphaTrack) { |
6025 | 0 | avifTile * alphaTile = avifDecoderDataCreateTile(data, alphaCodecType, alphaTrack->width, alphaTrack->height, operatingPoint); |
6026 | 0 | AVIF_CHECKERR(alphaTile != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
6027 | 0 | AVIF_CHECKRES(avifCodecDecodeInputFillFromSampleTable(alphaTile->input, |
6028 | 0 | alphaTrack->sampleTable, |
6029 | 0 | decoder->imageCountLimit, |
6030 | 0 | decoder->io->sizeHint, |
6031 | 0 | data->diag)); |
6032 | 0 | alphaTile->input->itemCategory = AVIF_ITEM_ALPHA; |
6033 | 0 | data->tileInfos[AVIF_ITEM_ALPHA].tileCount = 1; |
6034 | 0 | } |
6035 | | |
6036 | | // Stash off sample table for future timing information |
6037 | 24 | data->sourceSampleTable = colorTrack->sampleTable; |
6038 | | |
6039 | | // Image sequence timing |
6040 | 24 | decoder->imageIndex = -1; |
6041 | 24 | decoder->imageCount = (int)colorTile->input->samples.count; |
6042 | 24 | decoder->timescale = colorTrack->mediaTimescale; |
6043 | 24 | decoder->durationInTimescales = colorTrack->mediaDuration; |
6044 | 24 | if (colorTrack->mediaTimescale) { |
6045 | 14 | decoder->duration = (double)decoder->durationInTimescales / (double)colorTrack->mediaTimescale; |
6046 | 14 | } else { |
6047 | 10 | decoder->duration = 0; |
6048 | 10 | } |
6049 | | // If the alphaTrack->repetitionCount and colorTrack->repetitionCount are different, we will simply use the |
6050 | | // colorTrack's repetitionCount. |
6051 | 24 | decoder->repetitionCount = colorTrack->repetitionCount; |
6052 | | |
6053 | 24 | memset(&decoder->imageTiming, 0, sizeof(decoder->imageTiming)); // to be set in avifDecoderNextImage() |
6054 | | |
6055 | 24 | decoder->image->width = colorTrack->width; |
6056 | 24 | decoder->image->height = colorTrack->height; |
6057 | 24 | decoder->alphaPresent = (alphaTrack != NULL); |
6058 | 24 | decoder->image->alphaPremultiplied = decoder->alphaPresent && (colorTrack->premByID == alphaTrack->id); |
6059 | 14.8k | } else { |
6060 | | // Create from items |
6061 | | |
6062 | 14.8k | if (data->meta->primaryItemID == 0) { |
6063 | | // A primary item is required |
6064 | 69 | avifDiagnosticsPrintf(&decoder->diag, "Primary item not specified"); |
6065 | 69 | return AVIF_RESULT_MISSING_IMAGE_ITEM; |
6066 | 69 | } |
6067 | | |
6068 | | // Main item of each group category (top-level item such as grid or single tile), if any. |
6069 | 14.8k | avifDecoderItem * mainItems[AVIF_ITEM_CATEGORY_COUNT]; |
6070 | 14.8k | avifCodecType codecType[AVIF_ITEM_CATEGORY_COUNT]; |
6071 | 59.2k | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6072 | 44.4k | mainItems[c] = NULL; |
6073 | 44.4k | codecType[c] = AVIF_CODEC_TYPE_UNKNOWN; |
6074 | 44.4k | } |
6075 | | |
6076 | | // Mandatory primary color item |
6077 | 14.8k | mainItems[AVIF_ITEM_COLOR] = avifMetaFindColorItem(data->meta); |
6078 | 14.8k | if (!mainItems[AVIF_ITEM_COLOR]) { |
6079 | 106 | avifDiagnosticsPrintf(&decoder->diag, "Primary item not found"); |
6080 | 106 | return AVIF_RESULT_MISSING_IMAGE_ITEM; |
6081 | 106 | } |
6082 | 14.7k | AVIF_CHECKRES(avifDecoderItemReadAndParse(decoder, |
6083 | 14.7k | mainItems[AVIF_ITEM_COLOR], |
6084 | 14.7k | /*isItemInInput=*/AVIF_TRUE, |
6085 | 14.7k | &data->tileInfos[AVIF_ITEM_COLOR].grid, |
6086 | 14.7k | &codecType[AVIF_ITEM_COLOR])); |
6087 | 14.6k | colorProperties = &mainItems[AVIF_ITEM_COLOR]->properties; |
6088 | 14.6k | colorCodecType = codecType[AVIF_ITEM_COLOR]; |
6089 | | |
6090 | | // Optional alpha auxiliary item |
6091 | 14.6k | avifBool isAlphaItemInInput; |
6092 | 14.6k | AVIF_CHECKRES(avifMetaFindAlphaItem(data->meta, |
6093 | 14.6k | mainItems[AVIF_ITEM_COLOR], |
6094 | 14.6k | &data->tileInfos[AVIF_ITEM_COLOR], |
6095 | 14.6k | &mainItems[AVIF_ITEM_ALPHA], |
6096 | 14.6k | &data->tileInfos[AVIF_ITEM_ALPHA], |
6097 | 14.6k | &isAlphaItemInInput)); |
6098 | 14.6k | if (mainItems[AVIF_ITEM_ALPHA]) { |
6099 | 72 | AVIF_CHECKRES(avifDecoderItemReadAndParse(decoder, |
6100 | 72 | mainItems[AVIF_ITEM_ALPHA], |
6101 | 72 | isAlphaItemInInput, |
6102 | 72 | &data->tileInfos[AVIF_ITEM_ALPHA].grid, |
6103 | 72 | &codecType[AVIF_ITEM_ALPHA])); |
6104 | 72 | } |
6105 | | |
6106 | | // Section 10.2.6 of 23008-12:2024/AMD 1:2024(E): |
6107 | | // 'tmap' brand |
6108 | | // This brand enables file players to identify and decode HEIF files containing tone-map derived image |
6109 | | // items. When present, this brand shall be among the brands included in the compatible_brands |
6110 | | // array of the FileTypeBox. |
6111 | | // |
6112 | | // If the file contains a 'tmap' item but doesn't have the 'tmap' brand, it is technically invalid. |
6113 | | // However, we don't report any error because in order to do detect this case consistently, we would |
6114 | | // need to remove the early exit in avifParse() to check if a 'tmap' item might be present |
6115 | | // further down the file. Instead, we simply ignore tmap items in files that lack the 'tmap' brand. |
6116 | 14.6k | if (avifBrandArrayHasBrand(&data->compatibleBrands, "tmap")) { |
6117 | 13 | avifDecoderItem * toneMappedImageItem; |
6118 | 13 | avifDecoderItem * gainMapItem; |
6119 | 13 | avifCodecType gainMapCodecType; |
6120 | 13 | AVIF_CHECKRES( |
6121 | 13 | avifDecoderFindGainMapItem(decoder, mainItems[AVIF_ITEM_COLOR], &toneMappedImageItem, &gainMapItem, &gainMapCodecType)); |
6122 | 9 | if (toneMappedImageItem != NULL) { |
6123 | | // Read the gain map's metadata. |
6124 | 0 | avifROData tmapData; |
6125 | 0 | AVIF_CHECKRES(avifDecoderItemRead(toneMappedImageItem, decoder->io, &tmapData, 0, 0, data->diag)); |
6126 | 0 | AVIF_ASSERT_OR_RETURN(decoder->image->gainMap != NULL); |
6127 | 0 | const avifResult tmapParsingRes = |
6128 | 0 | avifParseToneMappedImageBox(decoder->image->gainMap, tmapData.data, tmapData.size, data->diag); |
6129 | 0 | if (tmapParsingRes == AVIF_RESULT_NOT_IMPLEMENTED) { |
6130 | | // Unsupported gain map version. Simply ignore the gain map. |
6131 | 0 | avifGainMapDestroy(decoder->image->gainMap); |
6132 | 0 | decoder->image->gainMap = NULL; |
6133 | 0 | } else { |
6134 | 0 | AVIF_CHECKRES(tmapParsingRes); |
6135 | 0 | if (decoder->imageContentToDecode & AVIF_IMAGE_CONTENT_GAIN_MAP) { |
6136 | 0 | mainItems[AVIF_ITEM_GAIN_MAP] = gainMapItem; |
6137 | 0 | codecType[AVIF_ITEM_GAIN_MAP] = gainMapCodecType; |
6138 | 0 | } |
6139 | 0 | } |
6140 | 0 | } |
6141 | 9 | } |
6142 | | |
6143 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
6144 | | // AVIF_ITEM_SAMPLE_TRANSFORM (not used through mainItems because not a coded item (well grids are not coded items either but it's different)). |
6145 | | avifDecoderItem * sampleTransformItem = NULL; |
6146 | | AVIF_CHECKRES(avifDecoderDataFindSampleTransformImageItem(data, &sampleTransformItem)); |
6147 | | if (sampleTransformItem != NULL) { |
6148 | | AVIF_ASSERT_OR_RETURN(data->sampleTransformNumInputImageItems == 0); |
6149 | | |
6150 | | for (uint32_t i = 0; i < data->meta->items.count; ++i) { |
6151 | | avifDecoderItem * inputImageItem = data->meta->items.item[i]; |
6152 | | if (inputImageItem->dimgForID == sampleTransformItem->id) { |
6153 | | ++data->sampleTransformNumInputImageItems; |
6154 | | } |
6155 | | } |
6156 | | // Check max number of input items allowed by the format. |
6157 | | if (data->sampleTransformNumInputImageItems > 32) { |
6158 | | avifDiagnosticsPrintf(data->diag, |
6159 | | "Box[sato] too many input items, format allows up to 32, got %d", |
6160 | | data->sampleTransformNumInputImageItems); |
6161 | | return AVIF_RESULT_BMFF_PARSE_FAILED; |
6162 | | } |
6163 | | // Check max number of input items supported by this implementation. |
6164 | | AVIF_CHECKERR(data->sampleTransformNumInputImageItems <= AVIF_SAMPLE_TRANSFORM_MAX_NUM_INPUT_IMAGE_ITEMS, |
6165 | | AVIF_RESULT_NOT_IMPLEMENTED); |
6166 | | |
6167 | | uint32_t numExtraInputImageItems = 0; |
6168 | | for (uint32_t i = 0; i < data->meta->items.count; ++i) { |
6169 | | avifDecoderItem * inputImageItem = data->meta->items.item[i]; |
6170 | | if (inputImageItem->dimgForID != sampleTransformItem->id) { |
6171 | | continue; |
6172 | | } |
6173 | | if (avifDecoderItemShouldBeSkipped(inputImageItem)) { |
6174 | | avifDiagnosticsPrintf(data->diag, "Box[sato] input item %u is not a supported image type", inputImageItem->id); |
6175 | | return AVIF_RESULT_DECODE_SAMPLE_TRANSFORM_FAILED; |
6176 | | } |
6177 | | |
6178 | | AVIF_ASSERT_OR_RETURN(inputImageItem->dimgIdx < AVIF_SAMPLE_TRANSFORM_MAX_NUM_INPUT_IMAGE_ITEMS); |
6179 | | avifItemCategory * category = &data->sampleTransformInputImageItems[inputImageItem->dimgIdx]; |
6180 | | avifBool foundItem = AVIF_FALSE; |
6181 | | avifItemCategory alphaCategory = AVIF_ITEM_CATEGORY_COUNT; |
6182 | | for (int c = AVIF_ITEM_COLOR; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6183 | | if (mainItems[c] && inputImageItem->id == mainItems[c]->id) { |
6184 | | *category = c; |
6185 | | AVIF_CHECKERR(*category == AVIF_ITEM_COLOR, AVIF_RESULT_NOT_IMPLEMENTED); |
6186 | | alphaCategory = AVIF_ITEM_ALPHA; |
6187 | | foundItem = AVIF_TRUE; |
6188 | | break; |
6189 | | } |
6190 | | } |
6191 | | if (!foundItem) { |
6192 | | AVIF_CHECKERR(numExtraInputImageItems < AVIF_SAMPLE_TRANSFORM_MAX_NUM_EXTRA_INPUT_IMAGE_ITEMS, |
6193 | | AVIF_RESULT_NOT_IMPLEMENTED); |
6194 | | *category = (avifItemCategory)(AVIF_ITEM_SAMPLE_TRANSFORM_INPUT_0_COLOR + numExtraInputImageItems); |
6195 | | alphaCategory = (avifItemCategory)(AVIF_ITEM_SAMPLE_TRANSFORM_INPUT_0_ALPHA + numExtraInputImageItems); |
6196 | | mainItems[*category] = inputImageItem; |
6197 | | ++numExtraInputImageItems; |
6198 | | |
6199 | | AVIF_CHECKRES(avifDecoderItemReadAndParse(decoder, |
6200 | | inputImageItem, |
6201 | | /*isItemInInput=*/AVIF_TRUE, |
6202 | | &data->tileInfos[*category].grid, |
6203 | | &codecType[*category])); |
6204 | | |
6205 | | // Optional alpha auxiliary item |
6206 | | avifBool isAlphaInputImageItemInInput = AVIF_FALSE; |
6207 | | AVIF_CHECKRES(avifMetaFindAlphaItem(data->meta, |
6208 | | mainItems[*category], |
6209 | | &data->tileInfos[*category], |
6210 | | &mainItems[alphaCategory], |
6211 | | &data->tileInfos[alphaCategory], |
6212 | | &isAlphaInputImageItemInInput)); |
6213 | | |
6214 | | AVIF_CHECKERR(!mainItems[alphaCategory] == !mainItems[AVIF_ITEM_ALPHA], AVIF_RESULT_NOT_IMPLEMENTED); |
6215 | | if (mainItems[alphaCategory] != NULL) { |
6216 | | AVIF_CHECKERR(isAlphaInputImageItemInInput == isAlphaItemInInput, AVIF_RESULT_NOT_IMPLEMENTED); |
6217 | | AVIF_CHECKERR((mainItems[*category]->premByID == mainItems[alphaCategory]->id) == |
6218 | | (mainItems[AVIF_ITEM_COLOR]->premByID == mainItems[AVIF_ITEM_ALPHA]->id), |
6219 | | AVIF_RESULT_NOT_IMPLEMENTED); |
6220 | | AVIF_CHECKRES(avifDecoderItemReadAndParse(decoder, |
6221 | | mainItems[alphaCategory], |
6222 | | isAlphaInputImageItemInInput, |
6223 | | &data->tileInfos[alphaCategory].grid, |
6224 | | &codecType[alphaCategory])); |
6225 | | } |
6226 | | } |
6227 | | } |
6228 | | |
6229 | | AVIF_ASSERT_OR_RETURN(data->meta->sampleTransformExpression.tokens == NULL); |
6230 | | avifROData satoData; |
6231 | | AVIF_CHECKRES(avifDecoderItemRead(sampleTransformItem, decoder->io, &satoData, 0, 0, data->diag)); |
6232 | | AVIF_CHECKRES(avifParseSampleTransformImageBox(satoData.data, |
6233 | | satoData.size, |
6234 | | data->sampleTransformNumInputImageItems, |
6235 | | &data->meta->sampleTransformExpression, |
6236 | | data->diag)); |
6237 | | AVIF_CHECKRES(avifDecoderSampleTransformItemValidateProperties(sampleTransformItem, data->diag)); |
6238 | | const avifProperty * pixiProp = avifPropertyArrayFind(&sampleTransformItem->properties, "pixi"); |
6239 | | AVIF_ASSERT_OR_RETURN(pixiProp != NULL); |
6240 | | data->meta->sampleTransformDepth = pixiProp->u.pixi.planeDepths[0]; |
6241 | | } |
6242 | | #endif // AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM |
6243 | | |
6244 | | // Find Exif and/or XMP metadata, if any |
6245 | 14.6k | AVIF_CHECKRES(avifDecoderFindMetadata(decoder, data->meta, decoder->image, mainItems[AVIF_ITEM_COLOR]->id)); |
6246 | | |
6247 | | // Set all counts and timing to safe-but-uninteresting values |
6248 | 14.6k | decoder->imageIndex = -1; |
6249 | 14.6k | decoder->imageCount = 1; |
6250 | 14.6k | decoder->imageTiming.timescale = 1; |
6251 | 14.6k | decoder->imageTiming.pts = 0; |
6252 | 14.6k | decoder->imageTiming.ptsInTimescales = 0; |
6253 | 14.6k | decoder->imageTiming.duration = 1; |
6254 | 14.6k | decoder->imageTiming.durationInTimescales = 1; |
6255 | 14.6k | decoder->timescale = 1; |
6256 | 14.6k | decoder->duration = 1; |
6257 | 14.6k | decoder->durationInTimescales = 1; |
6258 | | |
6259 | 57.8k | for (int c = AVIF_ITEM_COLOR; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6260 | 43.4k | if (!mainItems[c]) { |
6261 | 28.7k | continue; |
6262 | 28.7k | } |
6263 | | |
6264 | 14.6k | if (avifIsAlpha((avifItemCategory)c) && !mainItems[c]->width && !mainItems[c]->height) { |
6265 | | // NON-STANDARD: Alpha subimage does not have an ispe property; adopt width/height from color item |
6266 | 36 | AVIF_ASSERT_OR_RETURN(!(decoder->strictFlags & AVIF_STRICT_ALPHA_ISPE_REQUIRED)); |
6267 | 36 | mainItems[c]->width = mainItems[AVIF_ITEM_COLOR]->width; |
6268 | 36 | mainItems[c]->height = mainItems[AVIF_ITEM_COLOR]->height; |
6269 | 36 | } |
6270 | | |
6271 | 14.6k | AVIF_CHECKRES(avifDecoderAdoptGridTileCodecTypeIfNeeded(decoder, mainItems[c], &data->tileInfos[c])); |
6272 | | |
6273 | 14.6k | if (!(decoder->imageContentToDecode & AVIF_IMAGE_CONTENT_COLOR_AND_ALPHA) && (c == AVIF_ITEM_COLOR || c == AVIF_ITEM_ALPHA)) { |
6274 | 0 | continue; |
6275 | 0 | } |
6276 | | |
6277 | 14.6k | AVIF_CHECKRES(avifDecoderGenerateImageTiles(decoder, &data->tileInfos[c], mainItems[c], (avifItemCategory)c)); |
6278 | | |
6279 | 14.5k | avifStrictFlags strictFlags = decoder->strictFlags; |
6280 | 14.5k | if (avifIsAlpha((avifItemCategory)c) && !isAlphaItemInInput) { |
6281 | | // In this case, the made up grid item will not have an associated pixi property. So validate everything else |
6282 | | // but the pixi property. |
6283 | 0 | strictFlags &= ~(avifStrictFlags)AVIF_STRICT_PIXI_REQUIRED; |
6284 | 0 | } |
6285 | 14.5k | AVIF_CHECKRES( |
6286 | 14.5k | avifDecoderItemValidateProperties(mainItems[c], avifGetConfigurationPropertyName(codecType[c]), &decoder->diag, strictFlags)); |
6287 | 14.5k | } |
6288 | | |
6289 | 14.4k | if (mainItems[AVIF_ITEM_COLOR]->progressive) { |
6290 | 7 | decoder->progressiveState = AVIF_PROGRESSIVE_STATE_AVAILABLE; |
6291 | | // data->tileInfos[AVIF_ITEM_COLOR].firstTileIndex is not yet defined but will be set to 0 a few lines below. |
6292 | 7 | const avifTile * colorTile = &data->tiles.tile[0]; |
6293 | 7 | if (colorTile->input->samples.count > 1) { |
6294 | 0 | decoder->progressiveState = AVIF_PROGRESSIVE_STATE_ACTIVE; |
6295 | 0 | decoder->imageCount = (int)colorTile->input->samples.count; |
6296 | 0 | } |
6297 | 7 | } |
6298 | | |
6299 | 14.4k | decoder->image->width = mainItems[AVIF_ITEM_COLOR]->width; |
6300 | 14.4k | decoder->image->height = mainItems[AVIF_ITEM_COLOR]->height; |
6301 | 14.4k | decoder->alphaPresent = (mainItems[AVIF_ITEM_ALPHA] != NULL); |
6302 | 14.4k | decoder->image->alphaPremultiplied = decoder->alphaPresent && |
6303 | 14.4k | (mainItems[AVIF_ITEM_COLOR]->premByID == mainItems[AVIF_ITEM_ALPHA]->id); |
6304 | | |
6305 | 14.4k | if (mainItems[AVIF_ITEM_GAIN_MAP]) { |
6306 | 0 | AVIF_ASSERT_OR_RETURN(decoder->image->gainMap && decoder->image->gainMap->image); |
6307 | 0 | decoder->image->gainMap->image->width = mainItems[AVIF_ITEM_GAIN_MAP]->width; |
6308 | 0 | decoder->image->gainMap->image->height = mainItems[AVIF_ITEM_GAIN_MAP]->height; |
6309 | | // Must be called after avifDecoderGenerateImageTiles() which among other things copies the |
6310 | | // codec config property from the first tile of a grid to the grid item (when grids are used). |
6311 | 0 | AVIF_CHECKRES(avifReadCodecConfigProperty(decoder->image->gainMap->image, |
6312 | 0 | &mainItems[AVIF_ITEM_GAIN_MAP]->properties, |
6313 | 0 | codecType[AVIF_ITEM_GAIN_MAP])); |
6314 | 0 | gainMapProperties = &mainItems[AVIF_ITEM_GAIN_MAP]->properties; |
6315 | 0 | } |
6316 | 14.4k | } |
6317 | | |
6318 | 14.4k | uint32_t firstTileIndex = 0; |
6319 | 57.7k | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6320 | 43.3k | data->tileInfos[c].firstTileIndex = firstTileIndex; |
6321 | 43.3k | firstTileIndex += data->tileInfos[c].tileCount; |
6322 | 43.3k | } |
6323 | | |
6324 | | // Sanity check tiles |
6325 | 28.9k | for (uint32_t tileIndex = 0; tileIndex < data->tiles.count; ++tileIndex) { |
6326 | 14.5k | avifTile * tile = &data->tiles.tile[tileIndex]; |
6327 | 30.0k | for (uint32_t sampleIndex = 0; sampleIndex < tile->input->samples.count; ++sampleIndex) { |
6328 | 15.5k | avifDecodeSample * sample = &tile->input->samples.sample[sampleIndex]; |
6329 | 15.5k | if (!sample->size) { |
6330 | | // Every sample must have some data |
6331 | 2 | return AVIF_RESULT_BMFF_PARSE_FAILED; |
6332 | 2 | } |
6333 | | |
6334 | 15.5k | if (tile->input->itemCategory == AVIF_ITEM_COLOR) { |
6335 | 15.4k | decoder->ioStats.colorOBUSize += sample->size; |
6336 | 15.4k | } else if (tile->input->itemCategory == AVIF_ITEM_ALPHA) { |
6337 | 66 | decoder->ioStats.alphaOBUSize += sample->size; |
6338 | 66 | } |
6339 | 15.5k | } |
6340 | 14.5k | } |
6341 | | |
6342 | 14.4k | AVIF_CHECKRES(avifReadColorProperties(decoder->io, |
6343 | 14.4k | colorProperties, |
6344 | 14.4k | &decoder->image->icc, |
6345 | 14.4k | &decoder->image->colorPrimaries, |
6346 | 14.4k | &decoder->image->transferCharacteristics, |
6347 | 14.4k | &decoder->image->matrixCoefficients, |
6348 | 14.4k | &decoder->image->yuvRange, |
6349 | 14.4k | &data->cicpSet)); |
6350 | | |
6351 | 14.4k | const avifProperty * clliProp = avifPropertyArrayFind(colorProperties, "clli"); |
6352 | 14.4k | if (clliProp) { |
6353 | 123 | decoder->image->clli = clliProp->u.clli; |
6354 | 123 | } |
6355 | | |
6356 | | // Transformations |
6357 | 14.4k | const avifProperty * paspProp = avifPropertyArrayFind(colorProperties, "pasp"); |
6358 | 14.4k | if (paspProp) { |
6359 | 244 | decoder->image->transformFlags |= AVIF_TRANSFORM_PASP; |
6360 | 244 | decoder->image->pasp = paspProp->u.pasp; |
6361 | 244 | } |
6362 | 14.4k | const avifProperty * clapProp = avifPropertyArrayFind(colorProperties, "clap"); |
6363 | 14.4k | if (clapProp) { |
6364 | 0 | decoder->image->transformFlags |= AVIF_TRANSFORM_CLAP; |
6365 | 0 | decoder->image->clap = clapProp->u.clap; |
6366 | 0 | } |
6367 | 14.4k | const avifProperty * irotProp = avifPropertyArrayFind(colorProperties, "irot"); |
6368 | 14.4k | if (irotProp) { |
6369 | 168 | decoder->image->transformFlags |= AVIF_TRANSFORM_IROT; |
6370 | 168 | decoder->image->irot = irotProp->u.irot; |
6371 | 168 | } |
6372 | 14.4k | const avifProperty * imirProp = avifPropertyArrayFind(colorProperties, "imir"); |
6373 | 14.4k | if (imirProp) { |
6374 | 77 | decoder->image->transformFlags |= AVIF_TRANSFORM_IMIR; |
6375 | 77 | decoder->image->imir = imirProp->u.imir; |
6376 | 77 | } |
6377 | 14.4k | if (gainMapProperties != NULL) { |
6378 | 0 | AVIF_CHECKRES(aviDecoderCheckGainMapProperties(decoder, gainMapProperties)); |
6379 | 0 | } |
6380 | | |
6381 | 14.4k | if (!data->cicpSet && (data->tiles.count > 0)) { |
6382 | 13.2k | avifTile * firstTile = &data->tiles.tile[0]; |
6383 | 13.2k | if (firstTile->input->samples.count > 0) { |
6384 | 13.2k | avifDecodeSample * sample = &firstTile->input->samples.sample[0]; |
6385 | | |
6386 | | // Harvest CICP from the AV1's sequence header, which should be very close to the front |
6387 | | // of the first sample. Read in successively larger chunks until we successfully parse the sequence. |
6388 | 13.2k | static const size_t searchSampleChunkIncrement = 64; |
6389 | 13.2k | static const size_t searchSampleSizeMax = 4096; |
6390 | 13.2k | size_t searchSampleSize = 0; |
6391 | 20.4k | do { |
6392 | 20.4k | searchSampleSize += searchSampleChunkIncrement; |
6393 | 20.4k | if (searchSampleSize > sample->size) { |
6394 | 1.67k | searchSampleSize = sample->size; |
6395 | 1.67k | } |
6396 | | |
6397 | 20.4k | avifResult prepareResult = avifDecoderPrepareSample(decoder, sample, searchSampleSize); |
6398 | 20.4k | if (prepareResult != AVIF_RESULT_OK) { |
6399 | 273 | return prepareResult; |
6400 | 273 | } |
6401 | | |
6402 | 20.2k | avifSequenceHeader sequenceHeader; |
6403 | 20.2k | if (avifSequenceHeaderParse(&sequenceHeader, &sample->data, firstTile->codecType)) { |
6404 | 12.4k | data->cicpSet = AVIF_TRUE; |
6405 | 12.4k | decoder->image->colorPrimaries = sequenceHeader.colorPrimaries; |
6406 | 12.4k | decoder->image->transferCharacteristics = sequenceHeader.transferCharacteristics; |
6407 | 12.4k | decoder->image->matrixCoefficients = sequenceHeader.matrixCoefficients; |
6408 | 12.4k | decoder->image->yuvRange = sequenceHeader.range; |
6409 | 12.4k | break; |
6410 | 12.4k | } |
6411 | 20.2k | } while (searchSampleSize != sample->size && searchSampleSize < searchSampleSizeMax); |
6412 | 13.2k | } |
6413 | 13.2k | } |
6414 | | |
6415 | 14.1k | AVIF_CHECKRES(avifReadCodecConfigProperty(decoder->image, colorProperties, colorCodecType)); |
6416 | | |
6417 | | // Expose as raw bytes all other properties that libavif does not care about. |
6418 | 66.4k | for (size_t i = 0; i < colorProperties->count; ++i) { |
6419 | 52.2k | const avifProperty * property = &colorProperties->prop[i]; |
6420 | 52.2k | if (property->isOpaque) { |
6421 | 16.9k | AVIF_CHECKRES(avifImagePushProperty(decoder->image, |
6422 | 16.9k | property->type, |
6423 | 16.9k | property->u.opaque.usertype, |
6424 | 16.9k | property->u.opaque.boxPayload.data, |
6425 | 16.9k | property->u.opaque.boxPayload.size)); |
6426 | 16.9k | } |
6427 | 52.2k | } |
6428 | | |
6429 | 14.1k | if (gainMapProperties) { |
6430 | 0 | for (size_t i = 0; i < gainMapProperties->count; ++i) { |
6431 | 0 | const avifProperty * property = &gainMapProperties->prop[i]; |
6432 | 0 | if (property->isOpaque) { |
6433 | 0 | AVIF_CHECKRES(avifImagePushProperty(decoder->image->gainMap->image, |
6434 | 0 | property->type, |
6435 | 0 | property->u.opaque.usertype, |
6436 | 0 | property->u.opaque.boxPayload.data, |
6437 | 0 | property->u.opaque.boxPayload.size)); |
6438 | 0 | } |
6439 | 0 | } |
6440 | 0 | } |
6441 | 14.1k | return AVIF_RESULT_OK; |
6442 | 14.1k | } |
6443 | | |
6444 | | static avifResult avifDecoderPrepareTiles(avifDecoder * decoder, uint32_t nextImageIndex, const avifTileInfo * info) |
6445 | 42.2k | { |
6446 | 56.4k | for (unsigned int tileIndex = info->decodedTileCount; tileIndex < info->tileCount; ++tileIndex) { |
6447 | 14.2k | avifTile * tile = &decoder->data->tiles.tile[info->firstTileIndex + tileIndex]; |
6448 | | |
6449 | 14.2k | if (nextImageIndex >= tile->input->samples.count) { |
6450 | 0 | return AVIF_RESULT_NO_IMAGES_REMAINING; |
6451 | 0 | } |
6452 | | |
6453 | 14.2k | avifDecodeSample * sample = &tile->input->samples.sample[nextImageIndex]; |
6454 | 14.2k | avifResult prepareResult = avifDecoderPrepareSample(decoder, sample, 0); |
6455 | 14.2k | if (prepareResult != AVIF_RESULT_OK) { |
6456 | 69 | return prepareResult; |
6457 | 69 | } |
6458 | 14.2k | } |
6459 | 42.2k | return AVIF_RESULT_OK; |
6460 | 42.2k | } |
6461 | | |
6462 | | static avifResult avifImageLimitedToFullAlpha(avifImage * image) |
6463 | 27 | { |
6464 | 27 | if (image->imageOwnsAlphaPlane) { |
6465 | 0 | return AVIF_RESULT_NOT_IMPLEMENTED; |
6466 | 0 | } |
6467 | | |
6468 | 27 | const uint8_t * alphaPlane = image->alphaPlane; |
6469 | 27 | const uint32_t alphaRowBytes = image->alphaRowBytes; |
6470 | | |
6471 | | // We cannot do the range conversion in place since it will modify the |
6472 | | // codec's internal frame buffers. Allocate memory for the conversion. |
6473 | 27 | image->alphaPlane = NULL; |
6474 | 27 | image->alphaRowBytes = 0; |
6475 | 27 | const avifResult allocationResult = avifImageAllocatePlanes(image, AVIF_PLANES_A); |
6476 | 27 | if (allocationResult != AVIF_RESULT_OK) { |
6477 | 0 | return allocationResult; |
6478 | 0 | } |
6479 | | |
6480 | 27 | if (image->depth > 8) { |
6481 | 347 | for (uint32_t j = 0; j < image->height; ++j) { |
6482 | 336 | const uint8_t * srcRow = &alphaPlane[j * alphaRowBytes]; |
6483 | 336 | uint8_t * dstRow = &image->alphaPlane[j * image->alphaRowBytes]; |
6484 | 9.19k | for (uint32_t i = 0; i < image->width; ++i) { |
6485 | 8.86k | int srcAlpha = *((const uint16_t *)&srcRow[i * 2]); |
6486 | 8.86k | int dstAlpha = avifLimitedToFullY(image->depth, srcAlpha); |
6487 | 8.86k | *((uint16_t *)&dstRow[i * 2]) = (uint16_t)dstAlpha; |
6488 | 8.86k | } |
6489 | 336 | } |
6490 | 16 | } else { |
6491 | 570 | for (uint32_t j = 0; j < image->height; ++j) { |
6492 | 554 | const uint8_t * srcRow = &alphaPlane[j * alphaRowBytes]; |
6493 | 554 | uint8_t * dstRow = &image->alphaPlane[j * image->alphaRowBytes]; |
6494 | 23.1k | for (uint32_t i = 0; i < image->width; ++i) { |
6495 | 22.5k | int srcAlpha = srcRow[i]; |
6496 | 22.5k | int dstAlpha = avifLimitedToFullY(image->depth, srcAlpha); |
6497 | 22.5k | dstRow[i] = (uint8_t)dstAlpha; |
6498 | 22.5k | } |
6499 | 554 | } |
6500 | 16 | } |
6501 | 27 | return AVIF_RESULT_OK; |
6502 | 27 | } |
6503 | | |
6504 | | static avifResult avifGetErrorForItemCategory(avifItemCategory itemCategory) |
6505 | 11.8k | { |
6506 | 11.8k | if (itemCategory == AVIF_ITEM_GAIN_MAP) { |
6507 | 0 | return AVIF_RESULT_DECODE_GAIN_MAP_FAILED; |
6508 | 0 | } |
6509 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
6510 | | if (itemCategory >= AVIF_SAMPLE_TRANSFORM_MIN_CATEGORY && itemCategory <= AVIF_SAMPLE_TRANSFORM_MAX_CATEGORY) { |
6511 | | return AVIF_RESULT_DECODE_SAMPLE_TRANSFORM_FAILED; |
6512 | | } |
6513 | | #endif |
6514 | 11.8k | return avifIsAlpha(itemCategory) ? AVIF_RESULT_DECODE_ALPHA_FAILED : AVIF_RESULT_DECODE_COLOR_FAILED; |
6515 | 11.8k | } |
6516 | | |
6517 | | static avifResult avifDecoderDecodeTiles(avifDecoder * decoder, uint32_t nextImageIndex, avifTileInfo * info) |
6518 | 18.5k | { |
6519 | 18.5k | const unsigned int oldDecodedTileCount = info->decodedTileCount; |
6520 | 20.8k | for (unsigned int tileIndex = oldDecodedTileCount; tileIndex < info->tileCount; ++tileIndex) { |
6521 | 14.1k | avifTile * tile = &decoder->data->tiles.tile[info->firstTileIndex + tileIndex]; |
6522 | | |
6523 | 14.1k | const avifDecodeSample * sample = &tile->input->samples.sample[nextImageIndex]; |
6524 | 14.1k | if (sample->data.size < sample->size) { |
6525 | 0 | AVIF_ASSERT_OR_RETURN(decoder->allowIncremental); |
6526 | | // Data is missing but there is no error yet. Output available pixel rows. |
6527 | 0 | return AVIF_RESULT_OK; |
6528 | 0 | } |
6529 | | |
6530 | 14.1k | avifBool isLimitedRangeAlpha = AVIF_FALSE; |
6531 | 14.1k | tile->codec->maxThreads = decoder->maxThreads; |
6532 | 14.1k | tile->codec->imageSizeLimit = decoder->imageSizeLimit; |
6533 | 14.1k | if (!tile->codec->getNextImage(tile->codec, sample, avifIsAlpha(tile->input->itemCategory), &isLimitedRangeAlpha, tile->image)) { |
6534 | 11.8k | avifDiagnosticsPrintf(&decoder->diag, "tile->codec->getNextImage() failed"); |
6535 | 11.8k | return avifGetErrorForItemCategory(tile->input->itemCategory); |
6536 | 11.8k | } |
6537 | | |
6538 | | // Section 2.3.4 of AV1 Codec ISO Media File Format Binding v1.2.0 says: |
6539 | | // the full_range_flag in the colr box shall match the color_range |
6540 | | // flag in the Sequence Header OBU. |
6541 | | // See https://aomediacodec.github.io/av1-isobmff/v1.2.0.html#av1codecconfigurationbox-semantics. |
6542 | | // If a 'colr' box of colour_type 'nclx' was parsed, a mismatch between |
6543 | | // the 'colr' decoder->image->yuvRange and the AV1 OBU |
6544 | | // tile->image->yuvRange should be treated as an error. |
6545 | | // However codec_svt.c was not encoding the color_range field for |
6546 | | // multiple years, so there probably are files in the wild that will |
6547 | | // fail decoding if this is enforced. Thus this pattern is allowed. |
6548 | | // Section 12.1.5.1 of ISO 14496-12 (ISOBMFF) says: |
6549 | | // If colour information is supplied in both this [colr] box, and also |
6550 | | // in the video bitstream, this box takes precedence, and over-rides |
6551 | | // the information in the bitstream. |
6552 | | // So decoder->image->yuvRange is kept because it was either the 'colr' |
6553 | | // value set when the 'colr' box was parsed, or it was the AV1 OBU value |
6554 | | // extracted from the sequence header OBU of the first tile of the first |
6555 | | // frame (if no 'colr' box of colour_type 'nclx' was found). |
6556 | | |
6557 | | // Alpha plane with limited range is not allowed by the latest revision |
6558 | | // of the specification. However, it was allowed in version 1.0.0 of the |
6559 | | // specification. To allow such files, simply convert the alpha plane to |
6560 | | // full range. |
6561 | 2.30k | if (avifIsAlpha(tile->input->itemCategory) && isLimitedRangeAlpha) { |
6562 | 27 | avifResult result = avifImageLimitedToFullAlpha(tile->image); |
6563 | 27 | if (result != AVIF_RESULT_OK) { |
6564 | 0 | avifDiagnosticsPrintf(&decoder->diag, "avifImageLimitedToFullAlpha failed"); |
6565 | 0 | return result; |
6566 | 0 | } |
6567 | 27 | } |
6568 | | |
6569 | | // Scale the decoded image so that it corresponds to this tile's output dimensions |
6570 | 2.30k | if ((tile->width != tile->image->width) || (tile->height != tile->image->height)) { |
6571 | 1.58k | if (avifImageScaleWithLimit(tile->image, |
6572 | 1.58k | tile->width, |
6573 | 1.58k | tile->height, |
6574 | 1.58k | decoder->imageSizeLimit, |
6575 | 1.58k | decoder->imageDimensionLimit, |
6576 | 1.58k | &decoder->diag) != AVIF_RESULT_OK) { |
6577 | 0 | return avifGetErrorForItemCategory(tile->input->itemCategory); |
6578 | 0 | } |
6579 | 1.58k | } |
6580 | | |
6581 | | #if defined(AVIF_CODEC_AVM) |
6582 | | avifDecoderItem * tileItem = NULL; |
6583 | | for (uint32_t itemIndex = 0; itemIndex < decoder->data->meta->items.count; ++itemIndex) { |
6584 | | avifDecoderItem * item = decoder->data->meta->items.item[itemIndex]; |
6585 | | if (avifDecoderItemShouldBeSkipped(item)) { |
6586 | | continue; |
6587 | | } |
6588 | | if (item->id == sample->itemID) { |
6589 | | tileItem = item; |
6590 | | break; |
6591 | | } |
6592 | | } |
6593 | | if (tileItem != NULL) { |
6594 | | const avifProperty * prop = avifPropertyArrayFind(&tileItem->properties, "pixi"); |
6595 | | // Match the decoded image format with the number of planes specified in 'pixi'. |
6596 | | if (prop != NULL && prop->u.pixi.planeCount == 1 && tile->image->yuvFormat == AVIF_PIXEL_FORMAT_YUV420) { |
6597 | | // Codecs such as avm do not support monochrome so samples were encoded as 4:2:0. |
6598 | | // Ignore the UV planes at decoding. |
6599 | | tile->image->yuvFormat = AVIF_PIXEL_FORMAT_YUV400; |
6600 | | if (tile->image->imageOwnsYUVPlanes) { |
6601 | | avifFree(tile->image->yuvPlanes[AVIF_CHAN_U]); |
6602 | | avifFree(tile->image->yuvPlanes[AVIF_CHAN_V]); |
6603 | | } |
6604 | | tile->image->yuvPlanes[AVIF_CHAN_U] = NULL; |
6605 | | tile->image->yuvRowBytes[AVIF_CHAN_U] = 0; |
6606 | | tile->image->yuvPlanes[AVIF_CHAN_V] = NULL; |
6607 | | tile->image->yuvRowBytes[AVIF_CHAN_V] = 0; |
6608 | | } |
6609 | | } |
6610 | | #endif |
6611 | | |
6612 | 2.30k | ++info->decodedTileCount; |
6613 | | |
6614 | 2.30k | const avifBool isGrid = (info->grid.rows > 0) && (info->grid.columns > 0); |
6615 | 2.30k | avifBool stealPlanes = !isGrid; |
6616 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
6617 | | if (decoder->data->meta->sampleTransformExpression.count > 0) { |
6618 | | // Keep everything as a copy for now. |
6619 | | stealPlanes = AVIF_FALSE; |
6620 | | } |
6621 | | if (tile->input->itemCategory >= AVIF_SAMPLE_TRANSFORM_MIN_CATEGORY && |
6622 | | tile->input->itemCategory <= AVIF_SAMPLE_TRANSFORM_MAX_CATEGORY) { |
6623 | | // Keep Sample Transform input image item samples in tiles. |
6624 | | // The expression will be applied in avifDecoderNextImage() below instead, once all the tiles are available. |
6625 | | continue; |
6626 | | } |
6627 | | #endif |
6628 | | |
6629 | 2.30k | if (!stealPlanes) { |
6630 | 29 | avifImage * dstImage = decoder->image; |
6631 | 29 | if (tile->input->itemCategory == AVIF_ITEM_GAIN_MAP) { |
6632 | 0 | AVIF_ASSERT_OR_RETURN(dstImage->gainMap && dstImage->gainMap->image); |
6633 | 0 | dstImage = dstImage->gainMap->image; |
6634 | 0 | } |
6635 | 29 | if (tileIndex == 0) { |
6636 | 21 | AVIF_CHECKRES(avifDecoderDataAllocateImagePlanes(decoder->data, info, dstImage)); |
6637 | 21 | } |
6638 | 23 | AVIF_CHECKRES(avifDecoderDataCopyTileToImage(decoder->data, info, dstImage, tile, tileIndex)); |
6639 | 2.27k | } else { |
6640 | 2.27k | AVIF_ASSERT_OR_RETURN(info->tileCount == 1); |
6641 | 2.27k | AVIF_ASSERT_OR_RETURN(tileIndex == 0); |
6642 | 2.27k | avifImage * src = tile->image; |
6643 | | |
6644 | 2.27k | if (tile->input->itemCategory == AVIF_ITEM_GAIN_MAP) { |
6645 | 0 | AVIF_ASSERT_OR_RETURN(decoder->image->gainMap && decoder->image->gainMap->image); |
6646 | 0 | decoder->image->gainMap->image->width = src->width; |
6647 | 0 | decoder->image->gainMap->image->height = src->height; |
6648 | 0 | decoder->image->gainMap->image->depth = src->depth; |
6649 | 2.27k | } else { |
6650 | 2.27k | if ((decoder->image->width != src->width) || (decoder->image->height != src->height) || |
6651 | 2.27k | (decoder->image->depth != src->depth)) { |
6652 | 870 | if (avifIsAlpha(tile->input->itemCategory)) { |
6653 | 5 | avifDiagnosticsPrintf(&decoder->diag, |
6654 | 5 | "The color image item does not match the alpha image item in width, height, or bit depth"); |
6655 | 5 | return AVIF_RESULT_DECODE_ALPHA_FAILED; |
6656 | 5 | } |
6657 | 865 | avifImageFreePlanes(decoder->image, AVIF_PLANES_ALL); |
6658 | | |
6659 | 865 | decoder->image->width = src->width; |
6660 | 865 | decoder->image->height = src->height; |
6661 | 865 | decoder->image->depth = src->depth; |
6662 | 865 | } |
6663 | 2.27k | } |
6664 | | |
6665 | 2.27k | if (avifIsAlpha(tile->input->itemCategory)) { |
6666 | 36 | avifImageStealPlanes(decoder->image, src, AVIF_PLANES_A); |
6667 | 2.23k | } else if (tile->input->itemCategory == AVIF_ITEM_GAIN_MAP) { |
6668 | 0 | AVIF_ASSERT_OR_RETURN(decoder->image->gainMap && decoder->image->gainMap->image); |
6669 | 0 | avifImageStealPlanes(decoder->image->gainMap->image, src, AVIF_PLANES_YUV); |
6670 | 2.23k | } else { // AVIF_ITEM_COLOR |
6671 | 2.23k | avifImageStealPlanes(decoder->image, src, AVIF_PLANES_YUV); |
6672 | 2.23k | } |
6673 | 2.27k | } |
6674 | 2.30k | } |
6675 | 6.70k | return AVIF_RESULT_OK; |
6676 | 18.5k | } |
6677 | | |
6678 | | // Returns AVIF_FALSE if there is currently a partially decoded frame. |
6679 | | static avifBool avifDecoderDataFrameFullyDecoded(const avifDecoderData * data) |
6680 | 16.3k | { |
6681 | 23.0k | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6682 | 20.8k | if (data->tileInfos[c].decodedTileCount != data->tileInfos[c].tileCount) { |
6683 | 14.1k | return AVIF_FALSE; |
6684 | 14.1k | } |
6685 | 20.8k | } |
6686 | 2.23k | return AVIF_TRUE; |
6687 | 16.3k | } |
6688 | | |
6689 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
6690 | | static avifResult avifDecoderApplySampleTransform(const avifDecoder * decoder, avifImage * dstImage) |
6691 | | { |
6692 | | if (dstImage->depth != decoder->data->meta->sampleTransformDepth) { |
6693 | | AVIF_ASSERT_OR_RETURN(dstImage->yuvPlanes[0] != NULL); |
6694 | | AVIF_ASSERT_OR_RETURN(dstImage->imageOwnsYUVPlanes); |
6695 | | |
6696 | | // Use a temporary buffer because dstImage may point to decoder->image, which could be an input image. |
6697 | | avifImage * dstImageWithCorrectDepth = |
6698 | | avifImageCreate(dstImage->width, dstImage->height, decoder->data->meta->sampleTransformDepth, dstImage->yuvFormat); |
6699 | | AVIF_CHECKERR(dstImageWithCorrectDepth != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
6700 | | avifResult result = |
6701 | | avifImageAllocatePlanes(dstImageWithCorrectDepth, dstImage->alphaPlane != NULL ? AVIF_PLANES_ALL : AVIF_PLANES_YUV); |
6702 | | if (result == AVIF_RESULT_OK) { |
6703 | | result = avifDecoderApplySampleTransform(decoder, dstImageWithCorrectDepth); |
6704 | | if (result == AVIF_RESULT_OK) { |
6705 | | // Keep the same dstImage object rather than swapping decoder->image, in case the user already accessed it. |
6706 | | avifImageFreePlanes(dstImage, AVIF_PLANES_ALL); |
6707 | | dstImage->depth = dstImageWithCorrectDepth->depth; |
6708 | | avifImageStealPlanes(dstImage, dstImageWithCorrectDepth, AVIF_PLANES_ALL); |
6709 | | } |
6710 | | } |
6711 | | avifImageDestroy(dstImageWithCorrectDepth); |
6712 | | return result; |
6713 | | } |
6714 | | |
6715 | | for (int pass = 0; pass < (decoder->alphaPresent ? 2 : 1); ++pass) { |
6716 | | avifBool alpha = (pass == 0) ? AVIF_FALSE : AVIF_TRUE; |
6717 | | AVIF_ASSERT_OR_RETURN(decoder->data->sampleTransformNumInputImageItems <= AVIF_SAMPLE_TRANSFORM_MAX_NUM_INPUT_IMAGE_ITEMS); |
6718 | | const avifImage * inputImages[AVIF_SAMPLE_TRANSFORM_MAX_NUM_INPUT_IMAGE_ITEMS]; |
6719 | | for (uint32_t i = 0; i < decoder->data->sampleTransformNumInputImageItems; ++i) { |
6720 | | avifItemCategory category = decoder->data->sampleTransformInputImageItems[i]; |
6721 | | if (category == AVIF_ITEM_COLOR) { |
6722 | | inputImages[i] = decoder->image; |
6723 | | } else { |
6724 | | AVIF_ASSERT_OR_RETURN(category >= AVIF_ITEM_SAMPLE_TRANSFORM_INPUT_0_COLOR && |
6725 | | category < AVIF_ITEM_SAMPLE_TRANSFORM_INPUT_0_COLOR + |
6726 | | AVIF_SAMPLE_TRANSFORM_MAX_NUM_EXTRA_INPUT_IMAGE_ITEMS); |
6727 | | if (alpha) { |
6728 | | category += AVIF_SAMPLE_TRANSFORM_MAX_NUM_EXTRA_INPUT_IMAGE_ITEMS; |
6729 | | } |
6730 | | const avifTileInfo * tileInfo = &decoder->data->tileInfos[category]; |
6731 | | AVIF_CHECKERR(tileInfo->tileCount == 1, AVIF_RESULT_NOT_IMPLEMENTED); // TODO(yguyon): Implement Sample Transform grids |
6732 | | inputImages[i] = decoder->data->tiles.tile[tileInfo->firstTileIndex].image; |
6733 | | AVIF_ASSERT_OR_RETURN(inputImages[i] != NULL); |
6734 | | } |
6735 | | } |
6736 | | AVIF_CHECKRES(avifImageApplyExpression(dstImage, |
6737 | | AVIF_SAMPLE_TRANSFORM_BIT_DEPTH_32, |
6738 | | &decoder->data->meta->sampleTransformExpression, |
6739 | | decoder->data->sampleTransformNumInputImageItems, |
6740 | | inputImages, |
6741 | | alpha ? AVIF_PLANES_A : AVIF_PLANES_YUV)); |
6742 | | } |
6743 | | return AVIF_RESULT_OK; |
6744 | | } |
6745 | | #endif // AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM |
6746 | | |
6747 | | avifResult avifDecoderNextImage(avifDecoder * decoder) |
6748 | 14.1k | { |
6749 | 14.1k | avifDiagnosticsClearError(&decoder->diag); |
6750 | | |
6751 | 14.1k | if (!decoder->data || decoder->data->tiles.count == 0) { |
6752 | | // Nothing has been parsed yet |
6753 | 0 | return AVIF_RESULT_NO_CONTENT; |
6754 | 0 | } |
6755 | | |
6756 | 14.1k | if (!decoder->io || !decoder->io->read) { |
6757 | 0 | return AVIF_RESULT_IO_NOT_SET; |
6758 | 0 | } |
6759 | | |
6760 | 14.1k | if (avifDecoderDataFrameFullyDecoded(decoder->data)) { |
6761 | | // A frame was decoded during the last avifDecoderNextImage() call. |
6762 | 0 | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6763 | 0 | decoder->data->tileInfos[c].decodedTileCount = 0; |
6764 | 0 | } |
6765 | 0 | } |
6766 | | |
6767 | 14.1k | AVIF_ASSERT_OR_RETURN(decoder->data->tiles.count == (decoder->data->tileInfos[AVIF_ITEM_CATEGORY_COUNT - 1].firstTileIndex + |
6768 | 14.1k | decoder->data->tileInfos[AVIF_ITEM_CATEGORY_COUNT - 1].tileCount)); |
6769 | | |
6770 | 14.1k | const uint32_t nextImageIndex = (uint32_t)(decoder->imageIndex + 1); |
6771 | | |
6772 | | // Ensure that we have created the codecs before proceeding with the decoding. |
6773 | 14.1k | if (!decoder->data->tiles.tile[0].codec) { |
6774 | 14.1k | AVIF_CHECKRES(avifDecoderCreateCodecs(decoder)); |
6775 | 14.1k | } |
6776 | | |
6777 | | // Acquire all sample data for the current image first, allowing for any read call to bail out |
6778 | | // with AVIF_RESULT_WAITING_ON_IO harmlessly / idempotently, unless decoder->allowIncremental. |
6779 | 14.1k | avifResult prepareTileResult[AVIF_ITEM_CATEGORY_COUNT]; |
6780 | 56.3k | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6781 | 42.2k | prepareTileResult[c] = avifDecoderPrepareTiles(decoder, nextImageIndex, &decoder->data->tileInfos[c]); |
6782 | 42.2k | if (!decoder->allowIncremental || (prepareTileResult[c] != AVIF_RESULT_WAITING_ON_IO)) { |
6783 | 42.2k | AVIF_CHECKRES(prepareTileResult[c]); |
6784 | 42.2k | } |
6785 | 42.2k | } |
6786 | | |
6787 | | // Decode all available color tiles now, then all available alpha tiles, then all available bit |
6788 | | // depth extension tiles. The order of appearance of the tiles in the bitstream is left to the |
6789 | | // encoder's choice, and decoding as many as possible of each category in parallel is beneficial |
6790 | | // for incremental decoding, as pixel rows need all channels to be decoded before being |
6791 | | // accessible to the user. |
6792 | 20.7k | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6793 | 18.5k | AVIF_CHECKRES(avifDecoderDecodeTiles(decoder, nextImageIndex, &decoder->data->tileInfos[c])); |
6794 | 18.5k | } |
6795 | | |
6796 | 2.23k | if (!avifDecoderDataFrameFullyDecoded(decoder->data)) { |
6797 | 0 | AVIF_ASSERT_OR_RETURN(decoder->allowIncremental); |
6798 | | // The image is not completely decoded. There should be no error unrelated to missing bytes, |
6799 | | // and at least some missing bytes. |
6800 | 0 | avifResult firstNonOkResult = AVIF_RESULT_OK; |
6801 | 0 | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6802 | 0 | AVIF_ASSERT_OR_RETURN(prepareTileResult[c] == AVIF_RESULT_OK || prepareTileResult[c] == AVIF_RESULT_WAITING_ON_IO); |
6803 | 0 | if (firstNonOkResult == AVIF_RESULT_OK) { |
6804 | 0 | firstNonOkResult = prepareTileResult[c]; |
6805 | 0 | } |
6806 | 0 | } |
6807 | 0 | AVIF_ASSERT_OR_RETURN(firstNonOkResult != AVIF_RESULT_OK); |
6808 | | // Return the "not enough bytes" status now instead of moving on to the next frame. |
6809 | 0 | return AVIF_RESULT_WAITING_ON_IO; |
6810 | 0 | } |
6811 | 8.92k | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6812 | 6.69k | AVIF_ASSERT_OR_RETURN(prepareTileResult[c] == AVIF_RESULT_OK); |
6813 | 6.69k | } |
6814 | | |
6815 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
6816 | | if (decoder->data->meta->sampleTransformExpression.count > 0) { |
6817 | | // TODO(yguyon): Add a field in avifDecoder and only perform sample transformations upon request. |
6818 | | AVIF_CHECKRES(avifDecoderApplySampleTransform(decoder, decoder->image)); |
6819 | | } |
6820 | | #endif // AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM |
6821 | | |
6822 | | // Only advance decoder->imageIndex once the image is completely decoded, so that |
6823 | | // avifDecoderNthImage(decoder, decoder->imageIndex + 1) is equivalent to avifDecoderNextImage(decoder) |
6824 | | // if the previous call to avifDecoderNextImage() returned AVIF_RESULT_WAITING_ON_IO. |
6825 | 2.23k | decoder->imageIndex = (int)nextImageIndex; |
6826 | | // The decoded tile counts will be reset to 0 the next time avifDecoderNextImage() is called, |
6827 | | // for avifDecoderDecodedRowCount() to work until then. |
6828 | 2.23k | if (decoder->data->sourceSampleTable) { |
6829 | | // Decoding from a track! Provide timing information. |
6830 | | |
6831 | 2 | avifResult timingResult = avifDecoderNthImageTiming(decoder, decoder->imageIndex, &decoder->imageTiming); |
6832 | 2 | if (timingResult != AVIF_RESULT_OK) { |
6833 | 0 | return timingResult; |
6834 | 0 | } |
6835 | 2 | } |
6836 | 2.23k | return AVIF_RESULT_OK; |
6837 | 2.23k | } |
6838 | | |
6839 | | avifResult avifDecoderNthImageTiming(const avifDecoder * decoder, uint32_t frameIndex, avifImageTiming * outTiming) |
6840 | 2 | { |
6841 | 2 | if (!decoder->data) { |
6842 | | // Nothing has been parsed yet |
6843 | 0 | return AVIF_RESULT_NO_CONTENT; |
6844 | 0 | } |
6845 | | |
6846 | 2 | if ((frameIndex > INT_MAX) || ((int)frameIndex >= decoder->imageCount)) { |
6847 | | // Impossible index |
6848 | 0 | return AVIF_RESULT_NO_IMAGES_REMAINING; |
6849 | 0 | } |
6850 | | |
6851 | 2 | if (!decoder->data->sourceSampleTable) { |
6852 | | // There isn't any real timing associated with this decode, so |
6853 | | // just hand back the defaults chosen in avifDecoderReset(). |
6854 | 0 | *outTiming = decoder->imageTiming; |
6855 | 0 | return AVIF_RESULT_OK; |
6856 | 0 | } |
6857 | | |
6858 | 2 | outTiming->timescale = decoder->timescale; |
6859 | 2 | outTiming->ptsInTimescales = 0; |
6860 | 2 | for (uint32_t imageIndex = 0; imageIndex < frameIndex; ++imageIndex) { |
6861 | 0 | outTiming->ptsInTimescales += avifSampleTableGetImageDelta(decoder->data->sourceSampleTable, imageIndex); |
6862 | 0 | } |
6863 | 2 | outTiming->durationInTimescales = avifSampleTableGetImageDelta(decoder->data->sourceSampleTable, frameIndex); |
6864 | | |
6865 | 2 | if (outTiming->timescale > 0) { |
6866 | 1 | outTiming->pts = (double)outTiming->ptsInTimescales / (double)outTiming->timescale; |
6867 | 1 | outTiming->duration = (double)outTiming->durationInTimescales / (double)outTiming->timescale; |
6868 | 1 | } else { |
6869 | 1 | outTiming->pts = 0.0; |
6870 | 1 | outTiming->duration = 0.0; |
6871 | 1 | } |
6872 | 2 | return AVIF_RESULT_OK; |
6873 | 2 | } |
6874 | | |
6875 | | avifResult avifDecoderNthImage(avifDecoder * decoder, uint32_t frameIndex) |
6876 | 0 | { |
6877 | 0 | avifDiagnosticsClearError(&decoder->diag); |
6878 | |
|
6879 | 0 | if (!decoder->data) { |
6880 | | // Nothing has been parsed yet |
6881 | 0 | return AVIF_RESULT_NO_CONTENT; |
6882 | 0 | } |
6883 | | |
6884 | 0 | if ((frameIndex > INT_MAX) || ((int)frameIndex >= decoder->imageCount)) { |
6885 | | // Impossible index |
6886 | 0 | return AVIF_RESULT_NO_IMAGES_REMAINING; |
6887 | 0 | } |
6888 | | |
6889 | 0 | int requestedIndex = (int)frameIndex; |
6890 | 0 | if (requestedIndex == (decoder->imageIndex + 1)) { |
6891 | | // It's just the next image (already partially decoded or not at all), nothing special here |
6892 | 0 | return avifDecoderNextImage(decoder); |
6893 | 0 | } |
6894 | | |
6895 | 0 | if (requestedIndex == decoder->imageIndex) { |
6896 | 0 | if (avifDecoderDataFrameFullyDecoded(decoder->data)) { |
6897 | | // The current fully decoded image (decoder->imageIndex) is requested, nothing to do |
6898 | 0 | return AVIF_RESULT_OK; |
6899 | 0 | } |
6900 | | // The next image (decoder->imageIndex + 1) is partially decoded but |
6901 | | // the previous image (decoder->imageIndex) is requested. |
6902 | | // Fall through to resetting the decoder data and start decoding from |
6903 | | // the nearest key frame. |
6904 | 0 | } |
6905 | | |
6906 | 0 | int nearestKeyFrame = (int)avifDecoderNearestKeyframe(decoder, frameIndex); |
6907 | 0 | if ((nearestKeyFrame > (decoder->imageIndex + 1)) || (requestedIndex <= decoder->imageIndex)) { |
6908 | | // If we get here, we need to start decoding from the nearest key frame. |
6909 | | // So discard the unused decoder state and its previous frames. This |
6910 | | // will force the setup of new AV1 decoder (avifCodec) instances in |
6911 | | // avifDecoderNextImage(). |
6912 | 0 | decoder->imageIndex = nearestKeyFrame - 1; // prepare to read nearest keyframe |
6913 | 0 | avifDecoderDataResetCodec(decoder->data); |
6914 | 0 | } |
6915 | 0 | for (;;) { |
6916 | 0 | avifResult result = avifDecoderNextImage(decoder); |
6917 | 0 | if (result != AVIF_RESULT_OK) { |
6918 | 0 | return result; |
6919 | 0 | } |
6920 | | |
6921 | 0 | if (requestedIndex == decoder->imageIndex) { |
6922 | 0 | break; |
6923 | 0 | } |
6924 | 0 | } |
6925 | 0 | return AVIF_RESULT_OK; |
6926 | 0 | } |
6927 | | |
6928 | | avifBool avifDecoderIsKeyframe(const avifDecoder * decoder, uint32_t frameIndex) |
6929 | 0 | { |
6930 | 0 | if (!decoder->data || (decoder->data->tiles.count == 0)) { |
6931 | | // Nothing has been parsed yet |
6932 | 0 | return AVIF_FALSE; |
6933 | 0 | } |
6934 | | |
6935 | | // *All* tiles for the requested frameIndex must be keyframes in order for |
6936 | | // avifDecoderIsKeyframe() to return true, otherwise we may seek to a frame in which the color |
6937 | | // planes are a keyframe but the alpha plane isn't a keyframe, which will cause an alpha plane |
6938 | | // decode failure. |
6939 | 0 | for (unsigned int i = 0; i < decoder->data->tiles.count; ++i) { |
6940 | 0 | const avifTile * tile = &decoder->data->tiles.tile[i]; |
6941 | 0 | if ((frameIndex >= tile->input->samples.count) || !tile->input->samples.sample[frameIndex].sync) { |
6942 | 0 | return AVIF_FALSE; |
6943 | 0 | } |
6944 | 0 | } |
6945 | 0 | return AVIF_TRUE; |
6946 | 0 | } |
6947 | | |
6948 | | uint32_t avifDecoderNearestKeyframe(const avifDecoder * decoder, uint32_t frameIndex) |
6949 | 0 | { |
6950 | 0 | if (!decoder->data) { |
6951 | | // Nothing has been parsed yet |
6952 | 0 | return 0; |
6953 | 0 | } |
6954 | | |
6955 | 0 | for (; frameIndex != 0; --frameIndex) { |
6956 | 0 | if (avifDecoderIsKeyframe(decoder, frameIndex)) { |
6957 | 0 | break; |
6958 | 0 | } |
6959 | 0 | } |
6960 | 0 | return frameIndex; |
6961 | 0 | } |
6962 | | |
6963 | | // Returns the number of available rows in decoder->image given a color or alpha subimage. |
6964 | | static uint32_t avifGetDecodedRowCount(const avifDecoder * decoder, const avifTileInfo * info, const avifImage * image) |
6965 | 0 | { |
6966 | 0 | if (info->decodedTileCount == info->tileCount) { |
6967 | 0 | return image->height; |
6968 | 0 | } |
6969 | 0 | if (info->decodedTileCount == 0) { |
6970 | 0 | return 0; |
6971 | 0 | } |
6972 | | |
6973 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
6974 | | if (decoder->data->meta->sampleTransformExpression.count > 0) { |
6975 | | // TODO(yguyon): Support incremental Sample Transforms |
6976 | | return 0; |
6977 | | } |
6978 | | #endif |
6979 | | |
6980 | 0 | if ((info->grid.rows > 0) && (info->grid.columns > 0)) { |
6981 | | // Grid of AVIF tiles (not to be confused with AV1 tiles). |
6982 | 0 | const uint32_t tileHeight = decoder->data->tiles.tile[info->firstTileIndex].height; |
6983 | 0 | return AVIF_MIN((info->decodedTileCount / info->grid.columns) * tileHeight, image->height); |
6984 | 0 | } else { |
6985 | | // Non-grid image. |
6986 | 0 | return image->height; |
6987 | 0 | } |
6988 | 0 | } |
6989 | | |
6990 | | uint32_t avifDecoderDecodedRowCount(const avifDecoder * decoder) |
6991 | 0 | { |
6992 | 0 | uint32_t minRowCount = decoder->image->height; |
6993 | 0 | for (int c = 0; c < AVIF_ITEM_CATEGORY_COUNT; ++c) { |
6994 | 0 | if (c == AVIF_ITEM_GAIN_MAP) { |
6995 | 0 | const avifImage * const gainMap = decoder->image->gainMap ? decoder->image->gainMap->image : NULL; |
6996 | 0 | if ((decoder->imageContentToDecode & AVIF_IMAGE_CONTENT_GAIN_MAP) && gainMap != NULL && gainMap->height != 0) { |
6997 | 0 | uint32_t gainMapRowCount = avifGetDecodedRowCount(decoder, &decoder->data->tileInfos[AVIF_ITEM_GAIN_MAP], gainMap); |
6998 | 0 | if (gainMap->height != decoder->image->height) { |
6999 | 0 | const uint32_t scaledGainMapRowCount = |
7000 | 0 | (uint32_t)floorf((float)gainMapRowCount / gainMap->height * decoder->image->height); |
7001 | | // Make sure it matches the formula described in the comment of avifDecoderDecodedRowCount() in avif.h. |
7002 | 0 | AVIF_CHECKERR((uint32_t)lround((double)scaledGainMapRowCount / decoder->image->height * |
7003 | 0 | decoder->image->gainMap->image->height) <= gainMapRowCount, |
7004 | 0 | 0); |
7005 | 0 | gainMapRowCount = scaledGainMapRowCount; |
7006 | 0 | } |
7007 | 0 | minRowCount = AVIF_MIN(minRowCount, gainMapRowCount); |
7008 | 0 | } |
7009 | 0 | continue; |
7010 | 0 | } |
7011 | 0 | const uint32_t rowCount = avifGetDecodedRowCount(decoder, &decoder->data->tileInfos[c], decoder->image); |
7012 | 0 | minRowCount = AVIF_MIN(minRowCount, rowCount); |
7013 | 0 | } |
7014 | 0 | return minRowCount; |
7015 | 0 | } |
7016 | | |
7017 | | avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image) |
7018 | 0 | { |
7019 | 0 | avifResult result = avifDecoderParse(decoder); |
7020 | 0 | if (result != AVIF_RESULT_OK) { |
7021 | 0 | return result; |
7022 | 0 | } |
7023 | 0 | result = avifDecoderNextImage(decoder); |
7024 | 0 | if (result != AVIF_RESULT_OK) { |
7025 | 0 | return result; |
7026 | 0 | } |
7027 | | // If decoder->image->imageOwnsYUVPlanes is true and decoder->image is not used after this call, |
7028 | | // the ownership of the planes in decoder->image could be transferred here instead of copied. |
7029 | | // However most codec_*.c implementations allocate the output buffer themselves and return a |
7030 | | // view, unless some postprocessing is applied (container-level grid reconstruction for |
7031 | | // example), so the first condition rarely holds. |
7032 | | // The second condition does not hold either: it is not required by the documentation in avif.h. |
7033 | 0 | return avifImageCopy(image, decoder->image, AVIF_PLANES_ALL); |
7034 | 0 | } |
7035 | | |
7036 | | avifResult avifDecoderReadMemory(avifDecoder * decoder, avifImage * image, const uint8_t * data, size_t size) |
7037 | 0 | { |
7038 | 0 | avifDiagnosticsClearError(&decoder->diag); |
7039 | 0 | avifResult result = avifDecoderSetIOMemory(decoder, data, size); |
7040 | 0 | if (result != AVIF_RESULT_OK) { |
7041 | 0 | return result; |
7042 | 0 | } |
7043 | 0 | return avifDecoderRead(decoder, image); |
7044 | 0 | } |
7045 | | |
7046 | | avifResult avifDecoderReadFile(avifDecoder * decoder, avifImage * image, const char * filename) |
7047 | 0 | { |
7048 | 0 | avifDiagnosticsClearError(&decoder->diag); |
7049 | 0 | avifResult result = avifDecoderSetIOFile(decoder, filename); |
7050 | 0 | if (result != AVIF_RESULT_OK) { |
7051 | 0 | return result; |
7052 | 0 | } |
7053 | 0 | return avifDecoderRead(decoder, image); |
7054 | 0 | } |