Line | Count | Source (jump to first uncovered line) |
1 | | // Copyright 2019 Joe Drago. All rights reserved. |
2 | | // SPDX-License-Identifier: BSD-2-Clause |
3 | | |
4 | | #include "avif/internal.h" |
5 | | |
6 | | #include <assert.h> |
7 | | #include <limits.h> |
8 | | #include <stdint.h> |
9 | | #include <string.h> |
10 | | |
11 | 0 | #define STR_HELPER(x) #x |
12 | 0 | #define STR(x) STR_HELPER(x) |
13 | 0 | #define AVIF_VERSION_STRING (STR(AVIF_VERSION_MAJOR) "." STR(AVIF_VERSION_MINOR) "." STR(AVIF_VERSION_PATCH)) |
14 | | |
15 | | const char * avifVersion(void) |
16 | 0 | { |
17 | 0 | return AVIF_VERSION_STRING; |
18 | 0 | } |
19 | | |
20 | | const char * avifPixelFormatToString(avifPixelFormat format) |
21 | 0 | { |
22 | 0 | switch (format) { |
23 | 0 | case AVIF_PIXEL_FORMAT_YUV444: |
24 | 0 | return "YUV444"; |
25 | 0 | case AVIF_PIXEL_FORMAT_YUV420: |
26 | 0 | return "YUV420"; |
27 | 0 | case AVIF_PIXEL_FORMAT_YUV422: |
28 | 0 | return "YUV422"; |
29 | 0 | case AVIF_PIXEL_FORMAT_YUV400: |
30 | 0 | return "YUV400"; |
31 | 0 | case AVIF_PIXEL_FORMAT_NONE: |
32 | 0 | case AVIF_PIXEL_FORMAT_COUNT: |
33 | 0 | default: |
34 | 0 | break; |
35 | 0 | } |
36 | 0 | return "Unknown"; |
37 | 0 | } |
38 | | |
39 | | void avifGetPixelFormatInfo(avifPixelFormat format, avifPixelFormatInfo * info) |
40 | 10.8M | { |
41 | 10.8M | memset(info, 0, sizeof(avifPixelFormatInfo)); |
42 | | |
43 | 10.8M | switch (format) { |
44 | 1.75M | case AVIF_PIXEL_FORMAT_YUV444: |
45 | 1.75M | info->chromaShiftX = 0; |
46 | 1.75M | info->chromaShiftY = 0; |
47 | 1.75M | break; |
48 | | |
49 | 1.70M | case AVIF_PIXEL_FORMAT_YUV422: |
50 | 1.70M | info->chromaShiftX = 1; |
51 | 1.70M | info->chromaShiftY = 0; |
52 | 1.70M | break; |
53 | | |
54 | 965k | case AVIF_PIXEL_FORMAT_YUV420: |
55 | 965k | info->chromaShiftX = 1; |
56 | 965k | info->chromaShiftY = 1; |
57 | 965k | break; |
58 | | |
59 | 6.45M | case AVIF_PIXEL_FORMAT_YUV400: |
60 | 6.45M | info->monochrome = AVIF_TRUE; |
61 | | // The nonexistent chroma is considered as subsampled in each dimension |
62 | | // according to the AV1 specification. See sections 5.5.2 and 6.4.2. |
63 | 6.45M | info->chromaShiftX = 1; |
64 | 6.45M | info->chromaShiftY = 1; |
65 | 6.45M | break; |
66 | | |
67 | 8.55k | case AVIF_PIXEL_FORMAT_NONE: |
68 | 8.55k | case AVIF_PIXEL_FORMAT_COUNT: |
69 | 8.55k | default: |
70 | 8.55k | break; |
71 | 10.8M | } |
72 | 10.8M | } |
73 | | |
74 | | const char * avifResultToString(avifResult result) |
75 | 0 | { |
76 | | // clang-format off |
77 | 0 | switch (result) { |
78 | 0 | case AVIF_RESULT_OK: return "OK"; |
79 | 0 | case AVIF_RESULT_INVALID_FTYP: return "Invalid ftyp"; |
80 | 0 | case AVIF_RESULT_NO_CONTENT: return "No content"; |
81 | 0 | case AVIF_RESULT_NO_YUV_FORMAT_SELECTED: return "No YUV format selected"; |
82 | 0 | case AVIF_RESULT_REFORMAT_FAILED: return "Reformat failed"; |
83 | 0 | case AVIF_RESULT_UNSUPPORTED_DEPTH: return "Unsupported depth"; |
84 | 0 | case AVIF_RESULT_ENCODE_COLOR_FAILED: return "Encoding of color planes failed"; |
85 | 0 | case AVIF_RESULT_ENCODE_ALPHA_FAILED: return "Encoding of alpha plane failed"; |
86 | 0 | case AVIF_RESULT_BMFF_PARSE_FAILED: return "BMFF parsing failed"; |
87 | 0 | case AVIF_RESULT_MISSING_IMAGE_ITEM: return "Missing or empty image item"; |
88 | 0 | case AVIF_RESULT_DECODE_COLOR_FAILED: return "Decoding of color planes failed"; |
89 | 0 | case AVIF_RESULT_DECODE_ALPHA_FAILED: return "Decoding of alpha plane failed"; |
90 | 0 | case AVIF_RESULT_COLOR_ALPHA_SIZE_MISMATCH: return "Color and alpha planes size mismatch"; |
91 | 0 | case AVIF_RESULT_ISPE_SIZE_MISMATCH: return "Plane sizes don't match ispe values"; |
92 | 0 | case AVIF_RESULT_NO_CODEC_AVAILABLE: return "No codec available"; |
93 | 0 | case AVIF_RESULT_NO_IMAGES_REMAINING: return "No images remaining"; |
94 | 0 | case AVIF_RESULT_INVALID_EXIF_PAYLOAD: return "Invalid Exif payload"; |
95 | 0 | case AVIF_RESULT_INVALID_IMAGE_GRID: return "Invalid image grid"; |
96 | 0 | case AVIF_RESULT_INVALID_CODEC_SPECIFIC_OPTION: return "Invalid codec-specific option"; |
97 | 0 | case AVIF_RESULT_TRUNCATED_DATA: return "Truncated data"; |
98 | 0 | case AVIF_RESULT_IO_NOT_SET: return "IO not set"; |
99 | 0 | case AVIF_RESULT_IO_ERROR: return "IO Error"; |
100 | 0 | case AVIF_RESULT_WAITING_ON_IO: return "Waiting on IO"; |
101 | 0 | case AVIF_RESULT_INVALID_ARGUMENT: return "Invalid argument"; |
102 | 0 | case AVIF_RESULT_NOT_IMPLEMENTED: return "Not implemented"; |
103 | 0 | case AVIF_RESULT_OUT_OF_MEMORY: return "Out of memory"; |
104 | 0 | case AVIF_RESULT_CANNOT_CHANGE_SETTING: return "Cannot change some setting during encoding"; |
105 | 0 | case AVIF_RESULT_INCOMPATIBLE_IMAGE: return "The image is incompatible with already encoded images"; |
106 | 0 | case AVIF_RESULT_INTERNAL_ERROR: return "Internal error"; |
107 | 0 | case AVIF_RESULT_ENCODE_GAIN_MAP_FAILED: return "Encoding of gain map planes failed"; |
108 | 0 | case AVIF_RESULT_DECODE_GAIN_MAP_FAILED: return "Decoding of gain map planes failed"; |
109 | 0 | case AVIF_RESULT_INVALID_TONE_MAPPED_IMAGE: return "Invalid tone mapped image item"; |
110 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
111 | | case AVIF_RESULT_ENCODE_SAMPLE_TRANSFORM_FAILED: return "Encoding of sample transformed image failed"; |
112 | | case AVIF_RESULT_DECODE_SAMPLE_TRANSFORM_FAILED: return "Decoding of sample transformed image failed"; |
113 | | #endif |
114 | 0 | case AVIF_RESULT_UNKNOWN_ERROR: |
115 | 0 | default: |
116 | 0 | break; |
117 | 0 | } |
118 | | // clang-format on |
119 | 0 | return "Unknown Error"; |
120 | 0 | } |
121 | | |
122 | | const char * avifProgressiveStateToString(avifProgressiveState progressiveState) |
123 | 0 | { |
124 | | // clang-format off |
125 | 0 | switch (progressiveState) { |
126 | 0 | case AVIF_PROGRESSIVE_STATE_UNAVAILABLE: return "Unavailable"; |
127 | 0 | case AVIF_PROGRESSIVE_STATE_AVAILABLE: return "Available"; |
128 | 0 | case AVIF_PROGRESSIVE_STATE_ACTIVE: return "Active"; |
129 | 0 | default: |
130 | 0 | break; |
131 | 0 | } |
132 | | // clang-format on |
133 | 0 | return "Unknown"; |
134 | 0 | } |
135 | | |
136 | | void avifImageSetDefaults(avifImage * image) |
137 | 1.75M | { |
138 | 1.75M | memset(image, 0, sizeof(avifImage)); |
139 | 1.75M | image->yuvRange = AVIF_RANGE_FULL; |
140 | 1.75M | image->colorPrimaries = AVIF_COLOR_PRIMARIES_UNSPECIFIED; |
141 | 1.75M | image->transferCharacteristics = AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED; |
142 | 1.75M | image->matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED; |
143 | 1.75M | } |
144 | | |
145 | | avifImage * avifImageCreate(uint32_t width, uint32_t height, uint32_t depth, avifPixelFormat yuvFormat) |
146 | 1.70M | { |
147 | | // width and height are checked when actually used, for example by avifImageAllocatePlanes(). |
148 | 1.70M | AVIF_CHECKERR(depth <= 16, NULL); // avifImage only supports up to 16 bits per sample. See avifImageUsesU16(). |
149 | | // Cast to silence "comparison of unsigned expression is always true" warning. |
150 | 1.70M | AVIF_CHECKERR((int)yuvFormat >= AVIF_PIXEL_FORMAT_NONE && yuvFormat < AVIF_PIXEL_FORMAT_COUNT, NULL); |
151 | | |
152 | 1.70M | avifImage * image = (avifImage *)avifAlloc(sizeof(avifImage)); |
153 | 1.70M | AVIF_CHECKERR(image, NULL); |
154 | 1.70M | avifImageSetDefaults(image); |
155 | 1.70M | image->width = width; |
156 | 1.70M | image->height = height; |
157 | 1.70M | image->depth = depth; |
158 | 1.70M | image->yuvFormat = yuvFormat; |
159 | 1.70M | return image; |
160 | 1.70M | } |
161 | | |
162 | | avifImage * avifImageCreateEmpty(void) |
163 | 1.62M | { |
164 | 1.62M | return avifImageCreate(0, 0, 0, AVIF_PIXEL_FORMAT_NONE); |
165 | 1.62M | } |
166 | | |
167 | | void avifImageCopyNoAlloc(avifImage * dstImage, const avifImage * srcImage) |
168 | 166k | { |
169 | 166k | dstImage->width = srcImage->width; |
170 | 166k | dstImage->height = srcImage->height; |
171 | 166k | dstImage->depth = srcImage->depth; |
172 | 166k | dstImage->yuvFormat = srcImage->yuvFormat; |
173 | 166k | dstImage->yuvRange = srcImage->yuvRange; |
174 | 166k | dstImage->yuvChromaSamplePosition = srcImage->yuvChromaSamplePosition; |
175 | 166k | dstImage->alphaPremultiplied = srcImage->alphaPremultiplied; |
176 | | |
177 | 166k | dstImage->colorPrimaries = srcImage->colorPrimaries; |
178 | 166k | dstImage->transferCharacteristics = srcImage->transferCharacteristics; |
179 | 166k | dstImage->matrixCoefficients = srcImage->matrixCoefficients; |
180 | 166k | dstImage->clli = srcImage->clli; |
181 | | |
182 | 166k | dstImage->transformFlags = srcImage->transformFlags; |
183 | 166k | dstImage->pasp = srcImage->pasp; |
184 | 166k | dstImage->clap = srcImage->clap; |
185 | 166k | dstImage->irot = srcImage->irot; |
186 | 166k | dstImage->imir = srcImage->imir; |
187 | 166k | } |
188 | | |
189 | | void avifImageCopySamples(avifImage * dstImage, const avifImage * srcImage, avifPlanesFlags planes) |
190 | 108k | { |
191 | 108k | assert(srcImage->depth == dstImage->depth); |
192 | 108k | if (planes & AVIF_PLANES_YUV) { |
193 | 52.9k | assert(srcImage->yuvFormat == dstImage->yuvFormat); |
194 | | // Note that there may be a mismatch between srcImage->yuvRange and dstImage->yuvRange |
195 | | // because libavif allows for 'colr' and AV1 OBU video range values to differ. |
196 | 52.9k | } |
197 | 108k | const size_t bytesPerPixel = avifImageUsesU16(srcImage) ? 2 : 1; |
198 | | |
199 | 108k | const avifBool skipColor = !(planes & AVIF_PLANES_YUV); |
200 | 108k | const avifBool skipAlpha = !(planes & AVIF_PLANES_A); |
201 | 544k | for (int c = AVIF_CHAN_Y; c <= AVIF_CHAN_A; ++c) { |
202 | 435k | const avifBool alpha = c == AVIF_CHAN_A; |
203 | 435k | if ((skipColor && !alpha) || (skipAlpha && alpha)) { |
204 | 238k | continue; |
205 | 238k | } |
206 | | |
207 | 196k | const uint32_t planeWidth = avifImagePlaneWidth(srcImage, c); |
208 | 196k | const uint32_t planeHeight = avifImagePlaneHeight(srcImage, c); |
209 | 196k | const uint8_t * srcRow = avifImagePlane(srcImage, c); |
210 | 196k | uint8_t * dstRow = avifImagePlane(dstImage, c); |
211 | 196k | const uint32_t srcRowBytes = avifImagePlaneRowBytes(srcImage, c); |
212 | 196k | const uint32_t dstRowBytes = avifImagePlaneRowBytes(dstImage, c); |
213 | 196k | assert(!srcRow == !dstRow); |
214 | 196k | if (!srcRow) { |
215 | 74.3k | continue; |
216 | 74.3k | } |
217 | 122k | assert(planeWidth == avifImagePlaneWidth(dstImage, c)); |
218 | 122k | assert(planeHeight == avifImagePlaneHeight(dstImage, c)); |
219 | | |
220 | 122k | const size_t planeWidthBytes = planeWidth * bytesPerPixel; |
221 | 16.8M | for (uint32_t y = 0; y < planeHeight; ++y) { |
222 | 16.7M | memcpy(dstRow, srcRow, planeWidthBytes); |
223 | 16.7M | srcRow += srcRowBytes; |
224 | 16.7M | dstRow += dstRowBytes; |
225 | 16.7M | } |
226 | 122k | } |
227 | 108k | } |
228 | | |
229 | | static avifResult avifImageCopyProperties(avifImage * dstImage, const avifImage * srcImage) |
230 | 82.4k | { |
231 | 82.4k | for (size_t i = 0; i < dstImage->numProperties; ++i) { |
232 | 0 | avifRWDataFree(&dstImage->properties[i].boxPayload); |
233 | 0 | } |
234 | 82.4k | avifFree(dstImage->properties); |
235 | 82.4k | dstImage->properties = NULL; |
236 | 82.4k | dstImage->numProperties = 0; |
237 | | |
238 | 82.4k | if (srcImage->numProperties != 0) { |
239 | 5.67k | dstImage->properties = (avifImageItemProperty *)avifAlloc(srcImage->numProperties * sizeof(srcImage->properties[0])); |
240 | 5.67k | AVIF_CHECKERR(dstImage->properties != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
241 | 5.67k | memset(dstImage->properties, 0, srcImage->numProperties * sizeof(srcImage->properties[0])); |
242 | 5.67k | dstImage->numProperties = srcImage->numProperties; |
243 | 13.7k | for (size_t i = 0; i < srcImage->numProperties; ++i) { |
244 | 8.10k | memcpy(dstImage->properties[i].boxtype, srcImage->properties[i].boxtype, sizeof(srcImage->properties[i].boxtype)); |
245 | 8.10k | memcpy(dstImage->properties[i].usertype, srcImage->properties[i].usertype, sizeof(srcImage->properties[i].usertype)); |
246 | 8.10k | AVIF_CHECKRES(avifRWDataSet(&dstImage->properties[i].boxPayload, |
247 | 8.10k | srcImage->properties[i].boxPayload.data, |
248 | 8.10k | srcImage->properties[i].boxPayload.size)); |
249 | 8.10k | } |
250 | 5.67k | } |
251 | 82.4k | return AVIF_RESULT_OK; |
252 | 82.4k | } |
253 | | |
254 | | avifResult avifImageCopy(avifImage * dstImage, const avifImage * srcImage, avifPlanesFlags planes) |
255 | 82.4k | { |
256 | 82.4k | avifImageFreePlanes(dstImage, AVIF_PLANES_ALL); |
257 | 82.4k | avifImageCopyNoAlloc(dstImage, srcImage); |
258 | | |
259 | 82.4k | AVIF_CHECKRES(avifImageSetProfileICC(dstImage, srcImage->icc.data, srcImage->icc.size)); |
260 | | |
261 | 82.4k | AVIF_CHECKRES(avifRWDataSet(&dstImage->exif, srcImage->exif.data, srcImage->exif.size)); |
262 | 82.4k | AVIF_CHECKRES(avifImageSetMetadataXMP(dstImage, srcImage->xmp.data, srcImage->xmp.size)); |
263 | | |
264 | 82.4k | AVIF_CHECKRES(avifImageCopyProperties(dstImage, srcImage)); |
265 | | |
266 | 82.4k | if ((planes & AVIF_PLANES_YUV) && srcImage->yuvPlanes[AVIF_CHAN_Y]) { |
267 | 32.3k | if ((srcImage->yuvFormat != AVIF_PIXEL_FORMAT_YUV400) && |
268 | 32.3k | (!srcImage->yuvPlanes[AVIF_CHAN_U] || !srcImage->yuvPlanes[AVIF_CHAN_V])) { |
269 | 0 | return AVIF_RESULT_INVALID_ARGUMENT; |
270 | 0 | } |
271 | 32.3k | const avifResult allocationResult = avifImageAllocatePlanes(dstImage, AVIF_PLANES_YUV); |
272 | 32.3k | if (allocationResult != AVIF_RESULT_OK) { |
273 | 0 | return allocationResult; |
274 | 0 | } |
275 | 32.3k | } |
276 | 82.4k | if ((planes & AVIF_PLANES_A) && srcImage->alphaPlane) { |
277 | 9.61k | const avifResult allocationResult = avifImageAllocatePlanes(dstImage, AVIF_PLANES_A); |
278 | 9.61k | if (allocationResult != AVIF_RESULT_OK) { |
279 | 0 | return allocationResult; |
280 | 0 | } |
281 | 9.61k | } |
282 | 82.4k | avifImageCopySamples(dstImage, srcImage, planes); |
283 | | |
284 | 82.4k | if (srcImage->gainMap) { |
285 | 4.80k | if (!dstImage->gainMap) { |
286 | 4.80k | dstImage->gainMap = avifGainMapCreate(); |
287 | 4.80k | AVIF_CHECKERR(dstImage->gainMap, AVIF_RESULT_OUT_OF_MEMORY); |
288 | 4.80k | } |
289 | 19.2k | for (int c = 0; c < 3; ++c) { |
290 | 14.4k | dstImage->gainMap->gainMapMin[c] = srcImage->gainMap->gainMapMin[c]; |
291 | 14.4k | dstImage->gainMap->gainMapMax[c] = srcImage->gainMap->gainMapMax[c]; |
292 | 14.4k | dstImage->gainMap->gainMapGamma[c] = srcImage->gainMap->gainMapGamma[c]; |
293 | 14.4k | dstImage->gainMap->baseOffset[c] = srcImage->gainMap->baseOffset[c]; |
294 | 14.4k | dstImage->gainMap->alternateOffset[c] = srcImage->gainMap->alternateOffset[c]; |
295 | 14.4k | } |
296 | 4.80k | dstImage->gainMap->baseHdrHeadroom = srcImage->gainMap->baseHdrHeadroom; |
297 | 4.80k | dstImage->gainMap->alternateHdrHeadroom = srcImage->gainMap->alternateHdrHeadroom; |
298 | 4.80k | dstImage->gainMap->useBaseColorSpace = srcImage->gainMap->useBaseColorSpace; |
299 | 4.80k | AVIF_CHECKRES(avifRWDataSet(&dstImage->gainMap->altICC, srcImage->gainMap->altICC.data, srcImage->gainMap->altICC.size)); |
300 | 4.80k | dstImage->gainMap->altColorPrimaries = srcImage->gainMap->altColorPrimaries; |
301 | 4.80k | dstImage->gainMap->altTransferCharacteristics = srcImage->gainMap->altTransferCharacteristics; |
302 | 4.80k | dstImage->gainMap->altMatrixCoefficients = srcImage->gainMap->altMatrixCoefficients; |
303 | 4.80k | dstImage->gainMap->altDepth = srcImage->gainMap->altDepth; |
304 | 4.80k | dstImage->gainMap->altPlaneCount = srcImage->gainMap->altPlaneCount; |
305 | 4.80k | dstImage->gainMap->altCLLI = srcImage->gainMap->altCLLI; |
306 | | |
307 | 4.80k | if (srcImage->gainMap->image) { |
308 | 3.88k | if (!dstImage->gainMap->image) { |
309 | 3.88k | dstImage->gainMap->image = avifImageCreateEmpty(); |
310 | 3.88k | } |
311 | 3.88k | AVIF_CHECKRES(avifImageCopy(dstImage->gainMap->image, srcImage->gainMap->image, planes)); |
312 | 3.88k | } else if (dstImage->gainMap->image) { |
313 | 0 | avifImageDestroy(dstImage->gainMap->image); |
314 | 0 | dstImage->gainMap->image = NULL; |
315 | 0 | } |
316 | 77.6k | } else if (dstImage->gainMap) { |
317 | 0 | avifGainMapDestroy(dstImage->gainMap); |
318 | 0 | dstImage->gainMap = NULL; |
319 | 0 | } |
320 | | |
321 | 82.4k | return AVIF_RESULT_OK; |
322 | 82.4k | } |
323 | | |
324 | | avifResult avifImageSetViewRect(avifImage * dstImage, const avifImage * srcImage, const avifCropRect * rect) |
325 | 83.3k | { |
326 | 83.3k | avifPixelFormatInfo formatInfo; |
327 | 83.3k | avifGetPixelFormatInfo(srcImage->yuvFormat, &formatInfo); |
328 | 83.3k | if ((rect->width > srcImage->width) || (rect->height > srcImage->height) || (rect->x > (srcImage->width - rect->width)) || |
329 | 83.3k | (rect->y > (srcImage->height - rect->height))) { |
330 | 0 | return AVIF_RESULT_INVALID_ARGUMENT; |
331 | 0 | } |
332 | 83.3k | if (!formatInfo.monochrome && ((rect->x & formatInfo.chromaShiftX) || (rect->y & formatInfo.chromaShiftY))) { |
333 | 4 | return AVIF_RESULT_INVALID_ARGUMENT; |
334 | 4 | } |
335 | 83.3k | avifImageFreePlanes(dstImage, AVIF_PLANES_ALL); // dstImage->imageOwnsYUVPlanes and dstImage->imageOwnsAlphaPlane set to AVIF_FALSE. |
336 | 83.3k | avifImageCopyNoAlloc(dstImage, srcImage); |
337 | 83.3k | dstImage->width = rect->width; |
338 | 83.3k | dstImage->height = rect->height; |
339 | 83.3k | const uint32_t pixelBytes = (srcImage->depth > 8) ? 2 : 1; |
340 | 83.3k | if (srcImage->yuvPlanes[AVIF_CHAN_Y]) { |
341 | 298k | for (int yuvPlane = AVIF_CHAN_Y; yuvPlane <= AVIF_CHAN_V; ++yuvPlane) { |
342 | 224k | if (srcImage->yuvRowBytes[yuvPlane]) { |
343 | 137k | const size_t planeX = (yuvPlane == AVIF_CHAN_Y) ? rect->x : (rect->x >> formatInfo.chromaShiftX); |
344 | 137k | const size_t planeY = (yuvPlane == AVIF_CHAN_Y) ? rect->y : (rect->y >> formatInfo.chromaShiftY); |
345 | 137k | dstImage->yuvPlanes[yuvPlane] = |
346 | 137k | srcImage->yuvPlanes[yuvPlane] + planeY * srcImage->yuvRowBytes[yuvPlane] + planeX * pixelBytes; |
347 | 137k | dstImage->yuvRowBytes[yuvPlane] = srcImage->yuvRowBytes[yuvPlane]; |
348 | 137k | } |
349 | 224k | } |
350 | 74.7k | } |
351 | 83.3k | if (srcImage->alphaPlane) { |
352 | 23.0k | dstImage->alphaPlane = srcImage->alphaPlane + (size_t)rect->y * srcImage->alphaRowBytes + (size_t)rect->x * pixelBytes; |
353 | 23.0k | dstImage->alphaRowBytes = srcImage->alphaRowBytes; |
354 | 23.0k | } |
355 | 83.3k | return AVIF_RESULT_OK; |
356 | 83.3k | } |
357 | | |
358 | | void avifImageDestroy(avifImage * image) |
359 | 1.70M | { |
360 | 1.70M | if (image->gainMap) { |
361 | 21.1k | avifGainMapDestroy(image->gainMap); |
362 | 21.1k | } |
363 | 1.70M | avifImageFreePlanes(image, AVIF_PLANES_ALL); |
364 | 1.70M | avifRWDataFree(&image->icc); |
365 | 1.70M | avifRWDataFree(&image->exif); |
366 | 1.70M | avifRWDataFree(&image->xmp); |
367 | 1.77M | for (size_t i = 0; i < image->numProperties; ++i) { |
368 | 70.2k | avifRWDataFree(&image->properties[i].boxPayload); |
369 | 70.2k | } |
370 | 1.70M | avifFree(image->properties); |
371 | 1.70M | image->properties = NULL; |
372 | 1.70M | image->numProperties = 0; |
373 | 1.70M | avifFree(image); |
374 | 1.70M | } |
375 | | |
376 | | avifResult avifImageSetProfileICC(avifImage * image, const uint8_t * icc, size_t iccSize) |
377 | 82.5k | { |
378 | 82.5k | return avifRWDataSet(&image->icc, icc, iccSize); |
379 | 82.5k | } |
380 | | |
381 | | avifResult avifImageSetMetadataXMP(avifImage * image, const uint8_t * xmp, size_t xmpSize) |
382 | 82.8k | { |
383 | 82.8k | return avifRWDataSet(&image->xmp, xmp, xmpSize); |
384 | 82.8k | } |
385 | | |
386 | | avifResult avifImagePushProperty(avifImage * image, const uint8_t boxtype[4], const uint8_t usertype[16], const uint8_t * boxPayload, size_t boxPayloadSize) |
387 | 62.1k | { |
388 | 62.1k | AVIF_CHECKERR(image->numProperties < SIZE_MAX / sizeof(avifImageItemProperty), AVIF_RESULT_INVALID_ARGUMENT); |
389 | | // Shallow copy the current properties. |
390 | 62.1k | const size_t numProperties = image->numProperties + 1; |
391 | 62.1k | avifImageItemProperty * const properties = (avifImageItemProperty *)avifAlloc(numProperties * sizeof(properties[0])); |
392 | 62.1k | AVIF_CHECKERR(properties != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
393 | 62.1k | if (image->numProperties != 0) { |
394 | 19.6k | memcpy(properties, image->properties, image->numProperties * sizeof(properties[0])); |
395 | 19.6k | } |
396 | | // Free the old array and replace it by the new one. |
397 | 62.1k | avifFree(image->properties); |
398 | 62.1k | image->properties = properties; |
399 | 62.1k | image->numProperties = numProperties; |
400 | | // Set the new property. |
401 | 62.1k | avifImageItemProperty * const property = &image->properties[image->numProperties - 1]; |
402 | 62.1k | memset(property, 0, sizeof(*property)); |
403 | 62.1k | memcpy(property->boxtype, boxtype, sizeof(property->boxtype)); |
404 | 62.1k | memcpy(property->usertype, usertype, sizeof(property->usertype)); |
405 | 62.1k | AVIF_CHECKRES(avifRWDataSet(&property->boxPayload, boxPayload, boxPayloadSize)); |
406 | 62.1k | return AVIF_RESULT_OK; |
407 | 62.1k | } |
408 | | |
409 | | avifResult avifImageAddOpaqueProperty(avifImage * image, const uint8_t boxtype[4], const uint8_t * data, size_t dataSize) |
410 | 1.85k | { |
411 | 1.85k | const uint8_t uuid[16] = { 0 }; |
412 | | // Do not allow adding properties that are also handled by libavif |
413 | 1.85k | if (avifIsKnownPropertyType(boxtype)) { |
414 | 0 | return AVIF_RESULT_INVALID_ARGUMENT; |
415 | 0 | } |
416 | 1.85k | return avifImagePushProperty(image, boxtype, uuid, data, dataSize); |
417 | 1.85k | } |
418 | | |
419 | | avifResult avifImageAddUUIDProperty(avifImage * image, const uint8_t uuid[16], const uint8_t * data, size_t dataSize) |
420 | 2.25k | { |
421 | 2.25k | const uint8_t boxtype[4] = { 'u', 'u', 'i', 'd' }; |
422 | | // Do not allow adding invalid UUIDs, or using uuid representation of properties that are also handled by libavif |
423 | 2.25k | if (!avifIsValidUUID(uuid)) { |
424 | 1.68k | return AVIF_RESULT_INVALID_ARGUMENT; |
425 | 1.68k | } |
426 | 574 | return avifImagePushProperty(image, boxtype, uuid, data, dataSize); |
427 | 2.25k | } |
428 | | |
429 | | avifResult avifImageAllocatePlanes(avifImage * image, avifPlanesFlags planes) |
430 | 156k | { |
431 | 156k | if (image->width == 0 || image->height == 0) { |
432 | 0 | return AVIF_RESULT_INVALID_ARGUMENT; |
433 | 0 | } |
434 | 156k | const uint32_t channelSize = avifImageUsesU16(image) ? 2 : 1; |
435 | 156k | if (image->width > UINT32_MAX / channelSize) { |
436 | 0 | return AVIF_RESULT_INVALID_ARGUMENT; |
437 | 0 | } |
438 | 156k | const uint32_t fullRowBytes = channelSize * image->width; |
439 | 156k | if (image->height > PTRDIFF_MAX / fullRowBytes) { |
440 | 0 | return AVIF_RESULT_INVALID_ARGUMENT; |
441 | 0 | } |
442 | 156k | const size_t fullSize = (size_t)fullRowBytes * image->height; |
443 | | |
444 | 156k | if ((planes & AVIF_PLANES_YUV) && (image->yuvFormat != AVIF_PIXEL_FORMAT_NONE)) { |
445 | 143k | avifPixelFormatInfo info; |
446 | 143k | avifGetPixelFormatInfo(image->yuvFormat, &info); |
447 | | |
448 | 143k | image->imageOwnsYUVPlanes = AVIF_TRUE; |
449 | 143k | if (!image->yuvPlanes[AVIF_CHAN_Y]) { |
450 | 139k | image->yuvPlanes[AVIF_CHAN_Y] = (uint8_t *)avifAlloc(fullSize); |
451 | 139k | if (!image->yuvPlanes[AVIF_CHAN_Y]) { |
452 | 0 | return AVIF_RESULT_OUT_OF_MEMORY; |
453 | 0 | } |
454 | 139k | image->yuvRowBytes[AVIF_CHAN_Y] = fullRowBytes; |
455 | 139k | } |
456 | | |
457 | 143k | if (!info.monochrome) { |
458 | | // Intermediary computation as 64 bits in case width or height is exactly UINT32_MAX. |
459 | 85.7k | const uint32_t shiftedW = (uint32_t)(((uint64_t)image->width + info.chromaShiftX) >> info.chromaShiftX); |
460 | 85.7k | const uint32_t shiftedH = (uint32_t)(((uint64_t)image->height + info.chromaShiftY) >> info.chromaShiftY); |
461 | | |
462 | | // These are less than or equal to fullRowBytes/fullSize. No need to check overflows. |
463 | 85.7k | const uint32_t uvRowBytes = channelSize * shiftedW; |
464 | 85.7k | const size_t uvSize = (size_t)uvRowBytes * shiftedH; |
465 | | |
466 | 257k | for (int uvPlane = AVIF_CHAN_U; uvPlane <= AVIF_CHAN_V; ++uvPlane) { |
467 | 171k | if (!image->yuvPlanes[uvPlane]) { |
468 | 166k | image->yuvPlanes[uvPlane] = (uint8_t *)avifAlloc(uvSize); |
469 | 166k | if (!image->yuvPlanes[uvPlane]) { |
470 | 0 | return AVIF_RESULT_OUT_OF_MEMORY; |
471 | 0 | } |
472 | 166k | image->yuvRowBytes[uvPlane] = uvRowBytes; |
473 | 166k | } |
474 | 171k | } |
475 | 85.7k | } |
476 | 143k | } |
477 | 156k | if (planes & AVIF_PLANES_A) { |
478 | 46.0k | image->imageOwnsAlphaPlane = AVIF_TRUE; |
479 | 46.0k | if (!image->alphaPlane) { |
480 | 45.6k | image->alphaPlane = (uint8_t *)avifAlloc(fullSize); |
481 | 45.6k | if (!image->alphaPlane) { |
482 | 0 | return AVIF_RESULT_OUT_OF_MEMORY; |
483 | 0 | } |
484 | 45.6k | image->alphaRowBytes = fullRowBytes; |
485 | 45.6k | } |
486 | 46.0k | } |
487 | 156k | return AVIF_RESULT_OK; |
488 | 156k | } |
489 | | |
490 | | void avifImageFreePlanes(avifImage * image, avifPlanesFlags planes) |
491 | 2.27M | { |
492 | 2.27M | if ((planes & AVIF_PLANES_YUV) && (image->yuvFormat != AVIF_PIXEL_FORMAT_NONE)) { |
493 | 586k | if (image->imageOwnsYUVPlanes) { |
494 | 139k | avifFree(image->yuvPlanes[AVIF_CHAN_Y]); |
495 | 139k | avifFree(image->yuvPlanes[AVIF_CHAN_U]); |
496 | 139k | avifFree(image->yuvPlanes[AVIF_CHAN_V]); |
497 | 139k | } |
498 | 586k | image->yuvPlanes[AVIF_CHAN_Y] = NULL; |
499 | 586k | image->yuvRowBytes[AVIF_CHAN_Y] = 0; |
500 | 586k | image->yuvPlanes[AVIF_CHAN_U] = NULL; |
501 | 586k | image->yuvRowBytes[AVIF_CHAN_U] = 0; |
502 | 586k | image->yuvPlanes[AVIF_CHAN_V] = NULL; |
503 | 586k | image->yuvRowBytes[AVIF_CHAN_V] = 0; |
504 | 586k | image->imageOwnsYUVPlanes = AVIF_FALSE; |
505 | 586k | } |
506 | 2.27M | if (planes & AVIF_PLANES_A) { |
507 | 2.08M | if (image->imageOwnsAlphaPlane) { |
508 | 45.3k | avifFree(image->alphaPlane); |
509 | 45.3k | } |
510 | 2.08M | image->alphaPlane = NULL; |
511 | 2.08M | image->alphaRowBytes = 0; |
512 | 2.08M | image->imageOwnsAlphaPlane = AVIF_FALSE; |
513 | 2.08M | } |
514 | 2.27M | } |
515 | | |
516 | | void avifImageStealPlanes(avifImage * dstImage, avifImage * srcImage, avifPlanesFlags planes) |
517 | 112k | { |
518 | 112k | avifImageFreePlanes(dstImage, planes); |
519 | | |
520 | 112k | if (planes & AVIF_PLANES_YUV) { |
521 | 85.1k | dstImage->yuvPlanes[AVIF_CHAN_Y] = srcImage->yuvPlanes[AVIF_CHAN_Y]; |
522 | 85.1k | dstImage->yuvRowBytes[AVIF_CHAN_Y] = srcImage->yuvRowBytes[AVIF_CHAN_Y]; |
523 | 85.1k | dstImage->yuvPlanes[AVIF_CHAN_U] = srcImage->yuvPlanes[AVIF_CHAN_U]; |
524 | 85.1k | dstImage->yuvRowBytes[AVIF_CHAN_U] = srcImage->yuvRowBytes[AVIF_CHAN_U]; |
525 | 85.1k | dstImage->yuvPlanes[AVIF_CHAN_V] = srcImage->yuvPlanes[AVIF_CHAN_V]; |
526 | 85.1k | dstImage->yuvRowBytes[AVIF_CHAN_V] = srcImage->yuvRowBytes[AVIF_CHAN_V]; |
527 | | |
528 | 85.1k | srcImage->yuvPlanes[AVIF_CHAN_Y] = NULL; |
529 | 85.1k | srcImage->yuvRowBytes[AVIF_CHAN_Y] = 0; |
530 | 85.1k | srcImage->yuvPlanes[AVIF_CHAN_U] = NULL; |
531 | 85.1k | srcImage->yuvRowBytes[AVIF_CHAN_U] = 0; |
532 | 85.1k | srcImage->yuvPlanes[AVIF_CHAN_V] = NULL; |
533 | 85.1k | srcImage->yuvRowBytes[AVIF_CHAN_V] = 0; |
534 | | |
535 | 85.1k | dstImage->yuvFormat = srcImage->yuvFormat; |
536 | 85.1k | dstImage->imageOwnsYUVPlanes = srcImage->imageOwnsYUVPlanes; |
537 | 85.1k | srcImage->imageOwnsYUVPlanes = AVIF_FALSE; |
538 | 85.1k | } |
539 | 112k | if (planes & AVIF_PLANES_A) { |
540 | 27.1k | dstImage->alphaPlane = srcImage->alphaPlane; |
541 | 27.1k | dstImage->alphaRowBytes = srcImage->alphaRowBytes; |
542 | | |
543 | 27.1k | srcImage->alphaPlane = NULL; |
544 | 27.1k | srcImage->alphaRowBytes = 0; |
545 | | |
546 | 27.1k | dstImage->imageOwnsAlphaPlane = srcImage->imageOwnsAlphaPlane; |
547 | 27.1k | srcImage->imageOwnsAlphaPlane = AVIF_FALSE; |
548 | 27.1k | } |
549 | 112k | } |
550 | | |
551 | | avifBool avifImageUsesU16(const avifImage * image) |
552 | 281k | { |
553 | 281k | return (image->depth > 8); |
554 | 281k | } |
555 | | |
556 | | avifBool avifImageIsOpaque(const avifImage * image) |
557 | 26.4k | { |
558 | 26.4k | if (!image->alphaPlane) { |
559 | 13.3k | return AVIF_TRUE; |
560 | 13.3k | } |
561 | | |
562 | 13.1k | const uint32_t opaqueValue = (1u << image->depth) - 1u; |
563 | 13.1k | const uint8_t * row = image->alphaPlane; |
564 | 13.4k | for (uint32_t y = 0; y < image->height; ++y) { |
565 | 13.3k | if (avifImageUsesU16(image)) { |
566 | 4.81k | const uint16_t * row16 = (const uint16_t *)row; |
567 | 5.95k | for (uint32_t x = 0; x < image->width; ++x) { |
568 | 5.67k | if (row16[x] != opaqueValue) { |
569 | 4.53k | return AVIF_FALSE; |
570 | 4.53k | } |
571 | 5.67k | } |
572 | 8.58k | } else { |
573 | 8.63k | for (uint32_t x = 0; x < image->width; ++x) { |
574 | 8.61k | if (row[x] != opaqueValue) { |
575 | 8.55k | return AVIF_FALSE; |
576 | 8.55k | } |
577 | 8.61k | } |
578 | 8.58k | } |
579 | 297 | row += image->alphaRowBytes; |
580 | 297 | } |
581 | 27 | return AVIF_TRUE; |
582 | 13.1k | } |
583 | | |
584 | | uint8_t * avifImagePlane(const avifImage * image, int channel) |
585 | 21.9M | { |
586 | 21.9M | if ((channel == AVIF_CHAN_Y) || (channel == AVIF_CHAN_U) || (channel == AVIF_CHAN_V)) { |
587 | 20.1M | return image->yuvPlanes[channel]; |
588 | 20.1M | } |
589 | 1.78M | if (channel == AVIF_CHAN_A) { |
590 | 1.78M | return image->alphaPlane; |
591 | 1.78M | } |
592 | 0 | return NULL; |
593 | 1.78M | } |
594 | | |
595 | | uint32_t avifImagePlaneRowBytes(const avifImage * image, int channel) |
596 | 21.9M | { |
597 | 21.9M | if ((channel == AVIF_CHAN_Y) || (channel == AVIF_CHAN_U) || (channel == AVIF_CHAN_V)) { |
598 | 20.1M | return image->yuvRowBytes[channel]; |
599 | 20.1M | } |
600 | 1.78M | if (channel == AVIF_CHAN_A) { |
601 | 1.78M | return image->alphaRowBytes; |
602 | 1.78M | } |
603 | 0 | return 0; |
604 | 1.78M | } |
605 | | |
606 | | uint32_t avifImagePlaneWidth(const avifImage * image, int channel) |
607 | 10.9M | { |
608 | 10.9M | if (channel == AVIF_CHAN_Y) { |
609 | 3.37M | return image->width; |
610 | 3.37M | } |
611 | 7.58M | if ((channel == AVIF_CHAN_U) || (channel == AVIF_CHAN_V)) { |
612 | 6.73M | avifPixelFormatInfo formatInfo; |
613 | 6.73M | avifGetPixelFormatInfo(image->yuvFormat, &formatInfo); |
614 | 6.73M | if (formatInfo.monochrome) { |
615 | 4.07M | return 0; |
616 | 4.07M | } |
617 | 2.65M | return (image->width + formatInfo.chromaShiftX) >> formatInfo.chromaShiftX; |
618 | 6.73M | } |
619 | 851k | if ((channel == AVIF_CHAN_A) && image->alphaPlane) { |
620 | 825k | return image->width; |
621 | 825k | } |
622 | 25.6k | return 0; |
623 | 851k | } |
624 | | |
625 | | uint32_t avifImagePlaneHeight(const avifImage * image, int channel) |
626 | 689k | { |
627 | 689k | if (channel == AVIF_CHAN_Y) { |
628 | 193k | return image->height; |
629 | 193k | } |
630 | 495k | if ((channel == AVIF_CHAN_U) || (channel == AVIF_CHAN_V)) { |
631 | 368k | avifPixelFormatInfo formatInfo; |
632 | 368k | avifGetPixelFormatInfo(image->yuvFormat, &formatInfo); |
633 | 368k | if (formatInfo.monochrome) { |
634 | 135k | return 0; |
635 | 135k | } |
636 | 232k | return (image->height + formatInfo.chromaShiftY) >> formatInfo.chromaShiftY; |
637 | 368k | } |
638 | 127k | if ((channel == AVIF_CHAN_A) && image->alphaPlane) { |
639 | 49.4k | return image->height; |
640 | 49.4k | } |
641 | 78.3k | return 0; |
642 | 127k | } |
643 | | |
644 | | avifBool avifDimensionsTooLarge(uint32_t width, uint32_t height, uint32_t imageSizeLimit, uint32_t imageDimensionLimit) |
645 | 2.72M | { |
646 | 2.72M | if (width > (imageSizeLimit / height)) { |
647 | 2.51k | return AVIF_TRUE; |
648 | 2.51k | } |
649 | 2.71M | if ((imageDimensionLimit != 0) && ((width > imageDimensionLimit) || (height > imageDimensionLimit))) { |
650 | 1.99k | return AVIF_TRUE; |
651 | 1.99k | } |
652 | 2.71M | return AVIF_FALSE; |
653 | 2.71M | } |
654 | | |
655 | | // avifCodecCreate*() functions are in their respective codec_*.c files |
656 | | |
657 | | void avifCodecDestroy(avifCodec * codec) |
658 | 192k | { |
659 | 192k | if (codec && codec->destroyInternal) { |
660 | 192k | codec->destroyInternal(codec); |
661 | 192k | } |
662 | 192k | avifFree(codec); |
663 | 192k | } |
664 | | |
665 | | // --------------------------------------------------------------------------- |
666 | | // avifRGBImage |
667 | | |
668 | | avifBool avifRGBFormatIsGray(avifRGBFormat format) |
669 | 11.2k | { |
670 | 11.2k | return (format == AVIF_RGB_FORMAT_GRAY) || (format == AVIF_RGB_FORMAT_GRAYA) || (format == AVIF_RGB_FORMAT_AGRAY); |
671 | 11.2k | } |
672 | | |
673 | | avifBool avifRGBFormatHasAlpha(avifRGBFormat format) |
674 | 38.6k | { |
675 | 38.6k | return (format != AVIF_RGB_FORMAT_RGB) && (format != AVIF_RGB_FORMAT_BGR) && (format != AVIF_RGB_FORMAT_RGB_565) && |
676 | 38.6k | (format != AVIF_RGB_FORMAT_GRAY); |
677 | 38.6k | } |
678 | | |
679 | | uint32_t avifRGBFormatChannelCount(avifRGBFormat format) |
680 | 20.9k | { |
681 | 20.9k | if (format == AVIF_RGB_FORMAT_GRAY) { |
682 | 1.00k | return 1; |
683 | 1.00k | } |
684 | 19.9k | if ((format == AVIF_RGB_FORMAT_GRAYA) || (format == AVIF_RGB_FORMAT_AGRAY)) { |
685 | 1.13k | return 2; |
686 | 1.13k | } |
687 | 18.8k | return avifRGBFormatHasAlpha(format) ? 4 : 3; |
688 | 19.9k | } |
689 | | |
690 | | uint32_t avifRGBImagePixelSize(const avifRGBImage * rgb) |
691 | 20.8k | { |
692 | 20.8k | if (rgb->format == AVIF_RGB_FORMAT_RGB_565) { |
693 | 486 | return 2; |
694 | 486 | } |
695 | 20.3k | return avifRGBFormatChannelCount(rgb->format) * ((rgb->depth > 8) ? 2 : 1); |
696 | 20.8k | } |
697 | | |
698 | | void avifRGBImageSetDefaults(avifRGBImage * rgb, const avifImage * image) |
699 | 10.2k | { |
700 | 10.2k | rgb->width = image->width; |
701 | 10.2k | rgb->height = image->height; |
702 | 10.2k | rgb->depth = image->depth; |
703 | 10.2k | rgb->format = AVIF_RGB_FORMAT_RGBA; |
704 | 10.2k | rgb->chromaUpsampling = AVIF_CHROMA_UPSAMPLING_AUTOMATIC; |
705 | 10.2k | rgb->chromaDownsampling = AVIF_CHROMA_DOWNSAMPLING_AUTOMATIC; |
706 | 10.2k | rgb->avoidLibYUV = AVIF_FALSE; |
707 | 10.2k | rgb->ignoreAlpha = AVIF_FALSE; |
708 | 10.2k | rgb->pixels = NULL; |
709 | 10.2k | rgb->rowBytes = 0; |
710 | 10.2k | rgb->alphaPremultiplied = AVIF_FALSE; // Most expect RGBA output to *not* be premultiplied. Those that do can opt-in by |
711 | | // setting this to match image->alphaPremultiplied or forcing this to true |
712 | | // after calling avifRGBImageSetDefaults(), |
713 | 10.2k | rgb->isFloat = AVIF_FALSE; |
714 | 10.2k | rgb->maxThreads = 1; |
715 | 10.2k | } |
716 | | |
717 | | avifResult avifRGBImageAllocatePixels(avifRGBImage * rgb) |
718 | 10.2k | { |
719 | 10.2k | avifRGBImageFreePixels(rgb); |
720 | 10.2k | const uint32_t pixelSize = avifRGBImagePixelSize(rgb); |
721 | 10.2k | if (rgb->width > UINT32_MAX / pixelSize) { |
722 | 0 | return AVIF_RESULT_INVALID_ARGUMENT; |
723 | 0 | } |
724 | 10.2k | const uint32_t rowBytes = rgb->width * pixelSize; |
725 | 10.2k | if (rgb->height > PTRDIFF_MAX / rowBytes) { |
726 | 0 | return AVIF_RESULT_INVALID_ARGUMENT; |
727 | 0 | } |
728 | 10.2k | rgb->pixels = (uint8_t *)avifAlloc((size_t)rowBytes * rgb->height); |
729 | 10.2k | AVIF_CHECKERR(rgb->pixels, AVIF_RESULT_OUT_OF_MEMORY); |
730 | 10.2k | rgb->rowBytes = rowBytes; |
731 | 10.2k | return AVIF_RESULT_OK; |
732 | 10.2k | } |
733 | | |
734 | | void avifRGBImageFreePixels(avifRGBImage * rgb) |
735 | 23.5k | { |
736 | 23.5k | if (rgb->pixels) { |
737 | 10.2k | avifFree(rgb->pixels); |
738 | 10.2k | } |
739 | | |
740 | 23.5k | rgb->pixels = NULL; |
741 | 23.5k | rgb->rowBytes = 0; |
742 | 23.5k | } |
743 | | |
744 | | // --------------------------------------------------------------------------- |
745 | | // avifCropRect |
746 | | |
747 | | static avifFraction calcCenter(int32_t dim) |
748 | 464 | { |
749 | 464 | avifFraction f; |
750 | 464 | f.n = dim >> 1; |
751 | 464 | f.d = 1; |
752 | 464 | if ((dim % 2) != 0) { |
753 | 73 | f.n = dim; |
754 | 73 | f.d = 2; |
755 | 73 | } |
756 | 464 | return f; |
757 | 464 | } |
758 | | |
759 | | static avifBool overflowsInt32(int64_t x) |
760 | 0 | { |
761 | 0 | return (x < INT32_MIN) || (x > INT32_MAX); |
762 | 0 | } |
763 | | |
764 | | static avifBool avifCropRectIsValid(const avifCropRect * cropRect, uint32_t imageW, uint32_t imageH, avifDiagnostics * diag) |
765 | 46 | { |
766 | 46 | if ((cropRect->width == 0) || (cropRect->height == 0)) { |
767 | 1 | avifDiagnosticsPrintf(diag, "[Strict] crop rect width and height must be nonzero"); |
768 | 1 | return AVIF_FALSE; |
769 | 1 | } |
770 | 45 | if ((cropRect->x > (UINT32_MAX - cropRect->width)) || ((cropRect->x + cropRect->width) > imageW) || |
771 | 45 | (cropRect->y > (UINT32_MAX - cropRect->height)) || ((cropRect->y + cropRect->height) > imageH)) { |
772 | 45 | avifDiagnosticsPrintf(diag, "[Strict] crop rect is out of the image's bounds"); |
773 | 45 | return AVIF_FALSE; |
774 | 45 | } |
775 | 0 | return AVIF_TRUE; |
776 | 45 | } |
777 | | |
778 | | avifBool avifCropRectFromCleanApertureBox(avifCropRect * cropRect, |
779 | | const avifCleanApertureBox * clap, |
780 | | uint32_t imageW, |
781 | | uint32_t imageH, |
782 | | avifDiagnostics * diag) |
783 | 288 | { |
784 | 288 | avifDiagnosticsClearError(diag); |
785 | | |
786 | | // ISO/IEC 14496-12:2022, Section 12.1.4.1: |
787 | | // For horizOff and vertOff, D shall be strictly positive and N may be |
788 | | // positive or negative. For cleanApertureWidth and cleanApertureHeight, |
789 | | // N shall be positive and D shall be strictly positive. |
790 | | |
791 | 288 | const int32_t widthN = (int32_t)clap->widthN; |
792 | 288 | const int32_t widthD = (int32_t)clap->widthD; |
793 | 288 | const int32_t heightN = (int32_t)clap->heightN; |
794 | 288 | const int32_t heightD = (int32_t)clap->heightD; |
795 | 288 | const int32_t horizOffN = (int32_t)clap->horizOffN; |
796 | 288 | const int32_t horizOffD = (int32_t)clap->horizOffD; |
797 | 288 | const int32_t vertOffN = (int32_t)clap->vertOffN; |
798 | 288 | const int32_t vertOffD = (int32_t)clap->vertOffD; |
799 | 288 | if ((widthD <= 0) || (heightD <= 0) || (horizOffD <= 0) || (vertOffD <= 0)) { |
800 | 33 | avifDiagnosticsPrintf(diag, "[Strict] clap contains a denominator that is not strictly positive"); |
801 | 33 | return AVIF_FALSE; |
802 | 33 | } |
803 | 255 | if ((widthN < 0) || (heightN < 0)) { |
804 | 14 | avifDiagnosticsPrintf(diag, "[Strict] clap width or height is negative"); |
805 | 14 | return AVIF_FALSE; |
806 | 14 | } |
807 | | |
808 | | // ISO/IEC 23000-22:2019/Amd. 2:2021, Section 7.3.6.7: |
809 | | // - cleanApertureWidth and cleanApertureHeight shall be integers; |
810 | | // - The leftmost pixel and the topmost line of the clean aperture as |
811 | | // defined in ISO/IEC 14496-12:2020, Section 12.1.4.1 shall be integers; |
812 | | // ... |
813 | | |
814 | 241 | if ((widthN % widthD) != 0) { |
815 | 8 | avifDiagnosticsPrintf(diag, "[Strict] clap width %d/%d is not an integer", widthN, widthD); |
816 | 8 | return AVIF_FALSE; |
817 | 8 | } |
818 | 233 | if ((heightN % heightD) != 0) { |
819 | 1 | avifDiagnosticsPrintf(diag, "[Strict] clap height %d/%d is not an integer", heightN, heightD); |
820 | 1 | return AVIF_FALSE; |
821 | 1 | } |
822 | 232 | const int32_t clapW = widthN / widthD; |
823 | 232 | const int32_t clapH = heightN / heightD; |
824 | | |
825 | 232 | if ((imageW > INT32_MAX) || (imageH > INT32_MAX)) { |
826 | 0 | avifDiagnosticsPrintf(diag, "[Strict] image width %u or height %u is greater than INT32_MAX", imageW, imageH); |
827 | 0 | return AVIF_FALSE; |
828 | 0 | } |
829 | 232 | avifFraction uncroppedCenterX = calcCenter((int32_t)imageW); |
830 | 232 | avifFraction uncroppedCenterY = calcCenter((int32_t)imageH); |
831 | | |
832 | 232 | avifFraction horizOff; |
833 | 232 | horizOff.n = horizOffN; |
834 | 232 | horizOff.d = horizOffD; |
835 | 232 | avifFraction croppedCenterX; |
836 | 232 | if (!avifFractionAdd(uncroppedCenterX, horizOff, &croppedCenterX)) { |
837 | 3 | avifDiagnosticsPrintf(diag, "[Strict] croppedCenterX overflowed"); |
838 | 3 | return AVIF_FALSE; |
839 | 3 | } |
840 | | |
841 | 229 | avifFraction vertOff; |
842 | 229 | vertOff.n = vertOffN; |
843 | 229 | vertOff.d = vertOffD; |
844 | 229 | avifFraction croppedCenterY; |
845 | 229 | if (!avifFractionAdd(uncroppedCenterY, vertOff, &croppedCenterY)) { |
846 | 18 | avifDiagnosticsPrintf(diag, "[Strict] croppedCenterY overflowed"); |
847 | 18 | return AVIF_FALSE; |
848 | 18 | } |
849 | | |
850 | 211 | avifFraction halfW; |
851 | 211 | halfW.n = clapW; |
852 | 211 | halfW.d = 2; |
853 | 211 | avifFraction cropX; |
854 | 211 | if (!avifFractionSub(croppedCenterX, halfW, &cropX)) { |
855 | 40 | avifDiagnosticsPrintf(diag, "[Strict] cropX overflowed"); |
856 | 40 | return AVIF_FALSE; |
857 | 40 | } |
858 | 171 | if ((cropX.n % cropX.d) != 0) { |
859 | 44 | avifDiagnosticsPrintf(diag, "[Strict] calculated crop X offset %d/%d is not an integer", cropX.n, cropX.d); |
860 | 44 | return AVIF_FALSE; |
861 | 44 | } |
862 | | |
863 | 127 | avifFraction halfH; |
864 | 127 | halfH.n = clapH; |
865 | 127 | halfH.d = 2; |
866 | 127 | avifFraction cropY; |
867 | 127 | if (!avifFractionSub(croppedCenterY, halfH, &cropY)) { |
868 | 21 | avifDiagnosticsPrintf(diag, "[Strict] cropY overflowed"); |
869 | 21 | return AVIF_FALSE; |
870 | 21 | } |
871 | 106 | if ((cropY.n % cropY.d) != 0) { |
872 | 30 | avifDiagnosticsPrintf(diag, "[Strict] calculated crop Y offset %d/%d is not an integer", cropY.n, cropY.d); |
873 | 30 | return AVIF_FALSE; |
874 | 30 | } |
875 | | |
876 | 76 | if ((cropX.n < 0) || (cropY.n < 0)) { |
877 | 30 | avifDiagnosticsPrintf(diag, "[Strict] at least one crop offset is not positive"); |
878 | 30 | return AVIF_FALSE; |
879 | 30 | } |
880 | | |
881 | 46 | cropRect->x = (uint32_t)(cropX.n / cropX.d); |
882 | 46 | cropRect->y = (uint32_t)(cropY.n / cropY.d); |
883 | 46 | cropRect->width = (uint32_t)clapW; |
884 | 46 | cropRect->height = (uint32_t)clapH; |
885 | 46 | return avifCropRectIsValid(cropRect, imageW, imageH, diag); |
886 | 76 | } |
887 | | |
888 | | avifBool avifCleanApertureBoxFromCropRect(avifCleanApertureBox * clap, |
889 | | const avifCropRect * cropRect, |
890 | | uint32_t imageW, |
891 | | uint32_t imageH, |
892 | | avifDiagnostics * diag) |
893 | 0 | { |
894 | 0 | avifDiagnosticsClearError(diag); |
895 | |
|
896 | 0 | if (!avifCropRectIsValid(cropRect, imageW, imageH, diag)) { |
897 | 0 | return AVIF_FALSE; |
898 | 0 | } |
899 | | |
900 | 0 | if ((imageW > INT32_MAX) || (imageH > INT32_MAX)) { |
901 | 0 | avifDiagnosticsPrintf(diag, "[Strict] image width %u or height %u is greater than INT32_MAX", imageW, imageH); |
902 | 0 | return AVIF_FALSE; |
903 | 0 | } |
904 | 0 | avifFraction uncroppedCenterX = calcCenter((int32_t)imageW); |
905 | 0 | avifFraction uncroppedCenterY = calcCenter((int32_t)imageH); |
906 | |
|
907 | 0 | if ((cropRect->width > INT32_MAX) || (cropRect->height > INT32_MAX)) { |
908 | 0 | avifDiagnosticsPrintf(diag, |
909 | 0 | "[Strict] crop rect width %u or height %u is greater than INT32_MAX", |
910 | 0 | cropRect->width, |
911 | 0 | cropRect->height); |
912 | 0 | return AVIF_FALSE; |
913 | 0 | } |
914 | 0 | avifFraction croppedCenterX = calcCenter((int32_t)cropRect->width); |
915 | 0 | const int64_t croppedCenterXN = croppedCenterX.n + (int64_t)cropRect->x * croppedCenterX.d; |
916 | 0 | if (overflowsInt32(croppedCenterXN)) { |
917 | 0 | avifDiagnosticsPrintf(diag, "[Strict] croppedCenterX overflowed"); |
918 | 0 | return AVIF_FALSE; |
919 | 0 | } |
920 | 0 | croppedCenterX.n = (int32_t)croppedCenterXN; |
921 | 0 | avifFraction croppedCenterY = calcCenter((int32_t)cropRect->height); |
922 | 0 | const int64_t croppedCenterYN = croppedCenterY.n + (int64_t)cropRect->y * croppedCenterY.d; |
923 | 0 | if (overflowsInt32(croppedCenterYN)) { |
924 | 0 | avifDiagnosticsPrintf(diag, "[Strict] croppedCenterY overflowed"); |
925 | 0 | return AVIF_FALSE; |
926 | 0 | } |
927 | 0 | croppedCenterY.n = (int32_t)croppedCenterYN; |
928 | |
|
929 | 0 | avifFraction horizOff; |
930 | 0 | if (!avifFractionSub(croppedCenterX, uncroppedCenterX, &horizOff)) { |
931 | 0 | avifDiagnosticsPrintf(diag, "[Strict] horizOff overflowed"); |
932 | 0 | return AVIF_FALSE; |
933 | 0 | } |
934 | 0 | avifFraction vertOff; |
935 | 0 | if (!avifFractionSub(croppedCenterY, uncroppedCenterY, &vertOff)) { |
936 | 0 | avifDiagnosticsPrintf(diag, "[Strict] vertOff overflowed"); |
937 | 0 | return AVIF_FALSE; |
938 | 0 | } |
939 | | |
940 | 0 | clap->widthN = cropRect->width; |
941 | 0 | clap->widthD = 1; |
942 | 0 | clap->heightN = cropRect->height; |
943 | 0 | clap->heightD = 1; |
944 | 0 | clap->horizOffN = horizOff.n; |
945 | 0 | clap->horizOffD = horizOff.d; |
946 | 0 | clap->vertOffN = vertOff.n; |
947 | 0 | clap->vertOffD = vertOff.d; |
948 | 0 | return AVIF_TRUE; |
949 | 0 | } |
950 | | |
951 | | avifBool avifCropRectRequiresUpsampling(const avifCropRect * cropRect, avifPixelFormat yuvFormat) |
952 | 0 | { |
953 | | // ISO/IEC 23000-22:2024 FDIS, Section 7.3.6.7: |
954 | | // - If any of the following conditions hold true, the image is first implicitly upsampled to 4:4:4: |
955 | | // - chroma is subsampled horizontally (i.e., 4:2:2 and 4:2:0) and cleanApertureWidth is odd |
956 | | // - chroma is subsampled horizontally (i.e., 4:2:2 and 4:2:0) and left-most pixel is on an odd position |
957 | | // - chroma is subsampled vertically (i.e., 4:2:0) and cleanApertureHeight is odd |
958 | | // - chroma is subsampled vertically (i.e., 4:2:0) and topmost line is on an odd position |
959 | | |
960 | | // AV1 supports odd dimensions with chroma subsampling in those directions, so only look for x and y. |
961 | 0 | return ((yuvFormat == AVIF_PIXEL_FORMAT_YUV420 || yuvFormat == AVIF_PIXEL_FORMAT_YUV422) && (cropRect->x % 2)) || |
962 | 0 | (yuvFormat == AVIF_PIXEL_FORMAT_YUV420 && (cropRect->y % 2)); |
963 | 0 | } |
964 | | |
965 | | avifBool avifCropRectConvertCleanApertureBox(avifCropRect * cropRect, |
966 | | const avifCleanApertureBox * clap, |
967 | | uint32_t imageW, |
968 | | uint32_t imageH, |
969 | | avifPixelFormat yuvFormat, |
970 | | avifDiagnostics * diag) |
971 | 0 | { |
972 | 0 | if (!avifCropRectFromCleanApertureBox(cropRect, clap, imageW, imageH, diag)) { |
973 | 0 | return AVIF_FALSE; |
974 | 0 | } |
975 | | // Keep the same pre-deprecation behavior. |
976 | | |
977 | | // ISO/IEC 23000-22:2019/Amd. 2:2021, Section 7.3.6.7: |
978 | | // - If chroma is subsampled horizontally (i.e., 4:2:2 and 4:2:0), |
979 | | // the leftmost pixel of the clean aperture shall be even numbers; |
980 | | // - If chroma is subsampled vertically (i.e., 4:2:0), |
981 | | // the topmost line of the clean aperture shall be even numbers. |
982 | | |
983 | 0 | if (avifCropRectRequiresUpsampling(cropRect, yuvFormat)) { |
984 | 0 | avifDiagnosticsPrintf(diag, "[Strict] crop rect X and Y offsets must be even due to this image's YUV subsampling"); |
985 | 0 | return AVIF_FALSE; |
986 | 0 | } |
987 | 0 | return AVIF_TRUE; |
988 | 0 | } |
989 | | |
990 | | avifBool avifCleanApertureBoxConvertCropRect(avifCleanApertureBox * clap, |
991 | | const avifCropRect * cropRect, |
992 | | uint32_t imageW, |
993 | | uint32_t imageH, |
994 | | avifPixelFormat yuvFormat, |
995 | | avifDiagnostics * diag) |
996 | 0 | { |
997 | | // Keep the same pre-deprecation behavior. |
998 | | |
999 | | // ISO/IEC 23000-22:2019/Amd. 2:2021, Section 7.3.6.7: |
1000 | | // - If chroma is subsampled horizontally (i.e., 4:2:2 and 4:2:0), |
1001 | | // the leftmost pixel of the clean aperture shall be even numbers; |
1002 | | // - If chroma is subsampled vertically (i.e., 4:2:0), |
1003 | | // the topmost line of the clean aperture shall be even numbers. |
1004 | |
|
1005 | 0 | if (avifCropRectRequiresUpsampling(cropRect, yuvFormat)) { |
1006 | 0 | avifDiagnosticsPrintf(diag, "[Strict] crop rect X and Y offsets must be even due to this image's YUV subsampling"); |
1007 | 0 | return AVIF_FALSE; |
1008 | 0 | } |
1009 | | |
1010 | 0 | return avifCleanApertureBoxFromCropRect(clap, cropRect, imageW, imageH, diag); |
1011 | 0 | } |
1012 | | |
1013 | | // --------------------------------------------------------------------------- |
1014 | | |
1015 | | avifBool avifIsAlpha(avifItemCategory itemCategory) |
1016 | 1.81M | { |
1017 | 1.81M | if (itemCategory == AVIF_ITEM_ALPHA) { |
1018 | 323k | return AVIF_TRUE; |
1019 | 323k | } |
1020 | | #if defined(AVIF_ENABLE_EXPERIMENTAL_SAMPLE_TRANSFORM) |
1021 | | if (itemCategory >= AVIF_ITEM_SAMPLE_TRANSFORM_INPUT_0_ALPHA && |
1022 | | itemCategory < AVIF_ITEM_SAMPLE_TRANSFORM_INPUT_0_ALPHA + AVIF_SAMPLE_TRANSFORM_MAX_NUM_EXTRA_INPUT_IMAGE_ITEMS) { |
1023 | | return AVIF_TRUE; |
1024 | | } |
1025 | | #endif |
1026 | 1.49M | return AVIF_FALSE; |
1027 | 1.81M | } |
1028 | | |
1029 | | // --------------------------------------------------------------------------- |
1030 | | |
1031 | | avifBool avifAreGridDimensionsValid(avifPixelFormat yuvFormat, uint32_t imageW, uint32_t imageH, uint32_t tileW, uint32_t tileH, avifDiagnostics * diag) |
1032 | 4.47k | { |
1033 | | // ISO/IEC 23000-22:2019, Section 7.3.11.4.2: |
1034 | | // - the tile_width shall be greater than or equal to 64, and should be a multiple of 64 |
1035 | | // - the tile_height shall be greater than or equal to 64, and should be a multiple of 64 |
1036 | | // The "should" part is ignored here. |
1037 | 4.47k | if ((tileW < 64) || (tileH < 64)) { |
1038 | 194 | avifDiagnosticsPrintf(diag, |
1039 | 194 | "Grid image tile width (%u) or height (%u) cannot be smaller than 64. " |
1040 | 194 | "See MIAF (ISO/IEC 23000-22:2019), Section 7.3.11.4.2", |
1041 | 194 | tileW, |
1042 | 194 | tileH); |
1043 | 194 | return AVIF_FALSE; |
1044 | 194 | } |
1045 | | |
1046 | | // ISO/IEC 23000-22:2019, Section 7.3.11.4.2: |
1047 | | // - when the images are in the 4:2:2 chroma sampling format the horizontal tile offsets and widths, |
1048 | | // and the output width, shall be even numbers; |
1049 | | // - when the images are in the 4:2:0 chroma sampling format both the horizontal and vertical tile |
1050 | | // offsets and widths, and the output width and height, shall be even numbers. |
1051 | | // If the rules above were not respected, the following problematic situation may happen: |
1052 | | // Some 4:2:0 image is 650 pixels wide and has 10 cell columns, each being 65 pixels wide. |
1053 | | // The chroma plane of the whole image is 325 pixels wide. The chroma plane of each cell is 33 pixels wide. |
1054 | | // 33*10 - 325 gives 5 extra pixels with no specified destination in the reconstructed image. |
1055 | | |
1056 | | // Tile offsets are not enforced since they depend on tile size (ISO/IEC 23008-12:2017, Section 6.6.2.3.1): |
1057 | | // The reconstructed image is formed by tiling the input images into a grid [...] without gap or overlap |
1058 | 4.27k | if ((((yuvFormat == AVIF_PIXEL_FORMAT_YUV420) || (yuvFormat == AVIF_PIXEL_FORMAT_YUV422)) && |
1059 | 4.27k | (((imageW % 2) != 0) || ((tileW % 2) != 0))) || |
1060 | 4.27k | ((yuvFormat == AVIF_PIXEL_FORMAT_YUV420) && (((imageH % 2) != 0) || ((tileH % 2) != 0)))) { |
1061 | 22 | avifDiagnosticsPrintf(diag, |
1062 | 22 | "Grid image width (%u) or height (%u) or tile width (%u) or height (%u) " |
1063 | 22 | "shall be even if chroma is subsampled in that dimension. " |
1064 | 22 | "See MIAF (ISO/IEC 23000-22:2019), Section 7.3.11.4.2", |
1065 | 22 | imageW, |
1066 | 22 | imageH, |
1067 | 22 | tileW, |
1068 | 22 | tileH); |
1069 | 22 | return AVIF_FALSE; |
1070 | 22 | } |
1071 | 4.25k | return AVIF_TRUE; |
1072 | 4.27k | } |
1073 | | |
1074 | | // --------------------------------------------------------------------------- |
1075 | | // avifCodecSpecificOption |
1076 | | |
1077 | | // Returns NULL if a memory allocation failed. |
1078 | | static char * avifStrdup(const char * str) |
1079 | 0 | { |
1080 | 0 | size_t len = strlen(str); |
1081 | 0 | char * dup = (char *)avifAlloc(len + 1); |
1082 | 0 | if (!dup) { |
1083 | 0 | return NULL; |
1084 | 0 | } |
1085 | 0 | memcpy(dup, str, len + 1); |
1086 | 0 | return dup; |
1087 | 0 | } |
1088 | | |
1089 | | avifCodecSpecificOptions * avifCodecSpecificOptionsCreate(void) |
1090 | 44.9k | { |
1091 | 44.9k | avifCodecSpecificOptions * ava = (avifCodecSpecificOptions *)avifAlloc(sizeof(avifCodecSpecificOptions)); |
1092 | 44.9k | if (!ava || !avifArrayCreate(ava, sizeof(avifCodecSpecificOption), 4)) { |
1093 | 0 | goto error; |
1094 | 0 | } |
1095 | 44.9k | return ava; |
1096 | | |
1097 | 0 | error: |
1098 | 0 | avifFree(ava); |
1099 | 0 | return NULL; |
1100 | 44.9k | } |
1101 | | |
1102 | | void avifCodecSpecificOptionsClear(avifCodecSpecificOptions * csOptions) |
1103 | 121k | { |
1104 | 121k | for (uint32_t i = 0; i < csOptions->count; ++i) { |
1105 | 0 | avifCodecSpecificOption * entry = &csOptions->entries[i]; |
1106 | 0 | avifFree(entry->key); |
1107 | 0 | avifFree(entry->value); |
1108 | 0 | } |
1109 | | |
1110 | 121k | csOptions->count = 0; |
1111 | 121k | } |
1112 | | |
1113 | | void avifCodecSpecificOptionsDestroy(avifCodecSpecificOptions * csOptions) |
1114 | 44.9k | { |
1115 | 44.9k | avifCodecSpecificOptionsClear(csOptions); |
1116 | 44.9k | avifArrayDestroy(csOptions); |
1117 | 44.9k | avifFree(csOptions); |
1118 | 44.9k | } |
1119 | | |
1120 | | avifResult avifCodecSpecificOptionsSet(avifCodecSpecificOptions * csOptions, const char * key, const char * value) |
1121 | 0 | { |
1122 | | // Check to see if a key must be replaced |
1123 | 0 | for (uint32_t i = 0; i < csOptions->count; ++i) { |
1124 | 0 | avifCodecSpecificOption * entry = &csOptions->entries[i]; |
1125 | 0 | if (!strcmp(entry->key, key)) { |
1126 | 0 | if (value) { |
1127 | | // Update the value |
1128 | 0 | avifFree(entry->value); |
1129 | 0 | entry->value = avifStrdup(value); |
1130 | 0 | AVIF_CHECKERR(entry->value, AVIF_RESULT_OUT_OF_MEMORY); |
1131 | 0 | } else { |
1132 | | // Delete the value |
1133 | 0 | avifFree(entry->key); |
1134 | 0 | avifFree(entry->value); |
1135 | 0 | --csOptions->count; |
1136 | 0 | if (csOptions->count > 0) { |
1137 | 0 | memmove(&csOptions->entries[i], &csOptions->entries[i + 1], (csOptions->count - i) * (size_t)csOptions->elementSize); |
1138 | 0 | } |
1139 | 0 | } |
1140 | 0 | return AVIF_RESULT_OK; |
1141 | 0 | } |
1142 | 0 | } |
1143 | | |
1144 | 0 | if (value) { |
1145 | | // Add a new key |
1146 | 0 | avifCodecSpecificOption * entry = (avifCodecSpecificOption *)avifArrayPush(csOptions); |
1147 | 0 | AVIF_CHECKERR(entry, AVIF_RESULT_OUT_OF_MEMORY); |
1148 | 0 | entry->key = avifStrdup(key); |
1149 | 0 | AVIF_CHECKERR(entry->key, AVIF_RESULT_OUT_OF_MEMORY); |
1150 | 0 | entry->value = avifStrdup(value); |
1151 | 0 | AVIF_CHECKERR(entry->value, AVIF_RESULT_OUT_OF_MEMORY); |
1152 | 0 | } |
1153 | 0 | return AVIF_RESULT_OK; |
1154 | 0 | } |
1155 | | |
1156 | | // --------------------------------------------------------------------------- |
1157 | | // Codec availability and versions |
1158 | | |
1159 | | typedef const char * (*versionFunc)(void); |
1160 | | typedef avifCodec * (*avifCodecCreateFunc)(void); |
1161 | | |
1162 | | struct AvailableCodec |
1163 | | { |
1164 | | avifCodecChoice choice; |
1165 | | avifCodecType type; |
1166 | | const char * name; |
1167 | | versionFunc version; |
1168 | | avifCodecCreateFunc create; |
1169 | | uint32_t flags; |
1170 | | }; |
1171 | | |
1172 | | // This is the main codec table; it determines all usage/availability in libavif. |
1173 | | |
1174 | | static struct AvailableCodec availableCodecs[] = { |
1175 | | // Ordered by preference (for AUTO) |
1176 | | |
1177 | | #if defined(AVIF_CODEC_DAV1D) |
1178 | | { AVIF_CODEC_CHOICE_DAV1D, AVIF_CODEC_TYPE_AV1, "dav1d", avifCodecVersionDav1d, avifCodecCreateDav1d, AVIF_CODEC_FLAG_CAN_DECODE }, |
1179 | | #endif |
1180 | | #if defined(AVIF_CODEC_LIBGAV1) |
1181 | | { AVIF_CODEC_CHOICE_LIBGAV1, AVIF_CODEC_TYPE_AV1, "libgav1", avifCodecVersionGav1, avifCodecCreateGav1, AVIF_CODEC_FLAG_CAN_DECODE }, |
1182 | | #endif |
1183 | | #if defined(AVIF_CODEC_AOM) |
1184 | | { AVIF_CODEC_CHOICE_AOM, |
1185 | | AVIF_CODEC_TYPE_AV1, |
1186 | | "aom", |
1187 | | avifCodecVersionAOM, |
1188 | | avifCodecCreateAOM, |
1189 | | #if defined(AVIF_CODEC_AOM_DECODE) && defined(AVIF_CODEC_AOM_ENCODE) |
1190 | | AVIF_CODEC_FLAG_CAN_DECODE | AVIF_CODEC_FLAG_CAN_ENCODE |
1191 | | #elif defined(AVIF_CODEC_AOM_DECODE) |
1192 | | AVIF_CODEC_FLAG_CAN_DECODE |
1193 | | #elif defined(AVIF_CODEC_AOM_ENCODE) |
1194 | | AVIF_CODEC_FLAG_CAN_ENCODE |
1195 | | #else |
1196 | | #error AVIF_CODEC_AOM_DECODE or AVIF_CODEC_AOM_ENCODE must be defined |
1197 | | #endif |
1198 | | }, |
1199 | | #endif |
1200 | | #if defined(AVIF_CODEC_RAV1E) |
1201 | | { AVIF_CODEC_CHOICE_RAV1E, AVIF_CODEC_TYPE_AV1, "rav1e", avifCodecVersionRav1e, avifCodecCreateRav1e, AVIF_CODEC_FLAG_CAN_ENCODE }, |
1202 | | #endif |
1203 | | #if defined(AVIF_CODEC_SVT) |
1204 | | { AVIF_CODEC_CHOICE_SVT, AVIF_CODEC_TYPE_AV1, "svt", avifCodecVersionSvt, avifCodecCreateSvt, AVIF_CODEC_FLAG_CAN_ENCODE }, |
1205 | | #endif |
1206 | | #if defined(AVIF_CODEC_AVM) |
1207 | | { AVIF_CODEC_CHOICE_AVM, AVIF_CODEC_TYPE_AV2, "avm", avifCodecVersionAVM, avifCodecCreateAVM, AVIF_CODEC_FLAG_CAN_DECODE | AVIF_CODEC_FLAG_CAN_ENCODE }, |
1208 | | #endif |
1209 | | { AVIF_CODEC_CHOICE_AUTO, AVIF_CODEC_TYPE_UNKNOWN, NULL, NULL, NULL, 0 } |
1210 | | }; |
1211 | | |
1212 | | static const int availableCodecsCount = (sizeof(availableCodecs) / sizeof(availableCodecs[0])) - 1; |
1213 | | |
1214 | | static struct AvailableCodec * findAvailableCodec(avifCodecChoice choice, avifCodecFlags requiredFlags) |
1215 | 508k | { |
1216 | 878k | for (int i = 0; i < availableCodecsCount; ++i) { |
1217 | 878k | if ((choice != AVIF_CODEC_CHOICE_AUTO) && (availableCodecs[i].choice != choice)) { |
1218 | 233k | continue; |
1219 | 233k | } |
1220 | 645k | if (requiredFlags && ((availableCodecs[i].flags & requiredFlags) != requiredFlags)) { |
1221 | 136k | continue; |
1222 | 136k | } |
1223 | 508k | if ((choice == AVIF_CODEC_CHOICE_AUTO) && (availableCodecs[i].choice == AVIF_CODEC_CHOICE_AVM)) { |
1224 | | // AV2 is experimental and cannot be the default, it must be explicitly selected. |
1225 | 0 | continue; |
1226 | 0 | } |
1227 | 508k | return &availableCodecs[i]; |
1228 | 508k | } |
1229 | 0 | return NULL; |
1230 | 508k | } |
1231 | | |
1232 | | const char * avifCodecName(avifCodecChoice choice, avifCodecFlags requiredFlags) |
1233 | 76.9k | { |
1234 | 76.9k | struct AvailableCodec * availableCodec = findAvailableCodec(choice, requiredFlags); |
1235 | 76.9k | if (availableCodec) { |
1236 | 76.9k | return availableCodec->name; |
1237 | 76.9k | } |
1238 | 0 | return NULL; |
1239 | 76.9k | } |
1240 | | |
1241 | | avifCodecType avifCodecTypeFromChoice(avifCodecChoice choice, avifCodecFlags requiredFlags) |
1242 | 239k | { |
1243 | 239k | struct AvailableCodec * availableCodec = findAvailableCodec(choice, requiredFlags); |
1244 | 239k | if (availableCodec) { |
1245 | 239k | return availableCodec->type; |
1246 | 239k | } |
1247 | 0 | return AVIF_CODEC_TYPE_UNKNOWN; |
1248 | 239k | } |
1249 | | |
1250 | | avifCodecChoice avifCodecChoiceFromName(const char * name) |
1251 | 0 | { |
1252 | 0 | for (int i = 0; i < availableCodecsCount; ++i) { |
1253 | 0 | if (!strcmp(availableCodecs[i].name, name)) { |
1254 | 0 | return availableCodecs[i].choice; |
1255 | 0 | } |
1256 | 0 | } |
1257 | 0 | return AVIF_CODEC_CHOICE_AUTO; |
1258 | 0 | } |
1259 | | |
1260 | | avifResult avifCodecCreate(avifCodecChoice choice, avifCodecFlags requiredFlags, avifCodec ** codec) |
1261 | 192k | { |
1262 | 192k | *codec = NULL; |
1263 | 192k | struct AvailableCodec * availableCodec = findAvailableCodec(choice, requiredFlags); |
1264 | 192k | AVIF_CHECKERR(availableCodec != NULL, AVIF_RESULT_NO_CODEC_AVAILABLE); |
1265 | 192k | *codec = availableCodec->create(); |
1266 | 192k | AVIF_CHECKERR(*codec != NULL, AVIF_RESULT_OUT_OF_MEMORY); |
1267 | 192k | return AVIF_RESULT_OK; |
1268 | 192k | } |
1269 | | |
1270 | | static void append(char ** writePos, size_t * remainingLen, const char * appendStr) |
1271 | 0 | { |
1272 | 0 | size_t appendLen = strlen(appendStr); |
1273 | 0 | if (appendLen > *remainingLen) { |
1274 | 0 | appendLen = *remainingLen; |
1275 | 0 | } |
1276 | |
|
1277 | 0 | memcpy(*writePos, appendStr, appendLen); |
1278 | 0 | *remainingLen -= appendLen; |
1279 | 0 | *writePos += appendLen; |
1280 | 0 | *(*writePos) = 0; |
1281 | 0 | } |
1282 | | |
1283 | | void avifCodecVersions(char outBuffer[256]) |
1284 | 0 | { |
1285 | 0 | size_t remainingLen = 255; |
1286 | 0 | char * writePos = outBuffer; |
1287 | 0 | *writePos = 0; |
1288 | |
|
1289 | 0 | for (int i = 0; i < availableCodecsCount; ++i) { |
1290 | 0 | if (i > 0) { |
1291 | 0 | append(&writePos, &remainingLen, ", "); |
1292 | 0 | } |
1293 | 0 | append(&writePos, &remainingLen, availableCodecs[i].name); |
1294 | 0 | if ((availableCodecs[i].flags & (AVIF_CODEC_FLAG_CAN_ENCODE | AVIF_CODEC_FLAG_CAN_DECODE)) == |
1295 | 0 | (AVIF_CODEC_FLAG_CAN_ENCODE | AVIF_CODEC_FLAG_CAN_DECODE)) { |
1296 | 0 | append(&writePos, &remainingLen, " [enc/dec]"); |
1297 | 0 | } else if (availableCodecs[i].flags & AVIF_CODEC_FLAG_CAN_ENCODE) { |
1298 | 0 | append(&writePos, &remainingLen, " [enc]"); |
1299 | 0 | } else if (availableCodecs[i].flags & AVIF_CODEC_FLAG_CAN_DECODE) { |
1300 | 0 | append(&writePos, &remainingLen, " [dec]"); |
1301 | 0 | } |
1302 | 0 | append(&writePos, &remainingLen, ":"); |
1303 | 0 | append(&writePos, &remainingLen, availableCodecs[i].version()); |
1304 | 0 | } |
1305 | 0 | } |
1306 | | |
1307 | | avifGainMap * avifGainMapCreate(void) |
1308 | 21.1k | { |
1309 | 21.1k | avifGainMap * gainMap = (avifGainMap *)avifAlloc(sizeof(avifGainMap)); |
1310 | 21.1k | if (!gainMap) { |
1311 | 0 | return NULL; |
1312 | 0 | } |
1313 | 21.1k | avifGainMapSetDefaults(gainMap); |
1314 | | // Note that some functions like avifDecoderFindGainMapItem() allocate avifGainMap directly on |
1315 | | // the stack instead of calling avifGainMapCreate() to simplify error handling. This works under |
1316 | | // the assumption that no complex initialization (such as dynamic allocation of fields) takes |
1317 | | // place here. If this function becomes more complex than one alloc + setDefaults, such code |
1318 | | // might need to be changed. |
1319 | 21.1k | return gainMap; |
1320 | 21.1k | } |
1321 | | |
1322 | | void avifGainMapSetDefaults(avifGainMap * gainMap) |
1323 | 67.1k | { |
1324 | 67.1k | memset(gainMap, 0, sizeof(avifGainMap)); |
1325 | 67.1k | gainMap->altColorPrimaries = AVIF_COLOR_PRIMARIES_UNSPECIFIED; |
1326 | 67.1k | gainMap->altTransferCharacteristics = AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED; |
1327 | 67.1k | gainMap->altMatrixCoefficients = AVIF_MATRIX_COEFFICIENTS_UNSPECIFIED; |
1328 | 67.1k | gainMap->altYUVRange = AVIF_RANGE_FULL; |
1329 | 67.1k | gainMap->useBaseColorSpace = AVIF_TRUE; |
1330 | | // Set all denominators to valid values (1). |
1331 | 268k | for (int i = 0; i < 3; ++i) { |
1332 | 201k | gainMap->gainMapMin[i].d = 1; |
1333 | 201k | gainMap->gainMapMax[i].d = 1; |
1334 | 201k | gainMap->gainMapGamma[i].n = 1; |
1335 | 201k | gainMap->gainMapGamma[i].d = 1; |
1336 | 201k | gainMap->baseOffset[i].d = 1; |
1337 | 201k | gainMap->alternateOffset[i].d = 1; |
1338 | 201k | } |
1339 | 67.1k | gainMap->baseHdrHeadroom.d = 1; |
1340 | 67.1k | gainMap->alternateHdrHeadroom.d = 1; |
1341 | 67.1k | } |
1342 | | |
1343 | | void avifGainMapDestroy(avifGainMap * gainMap) |
1344 | 21.1k | { |
1345 | 21.1k | if (gainMap->image) { |
1346 | 11.1k | avifImageDestroy(gainMap->image); |
1347 | 11.1k | } |
1348 | 21.1k | avifRWDataFree(&gainMap->altICC); |
1349 | 21.1k | avifFree(gainMap); |
1350 | 21.1k | } |