/src/libavif/apps/shared/y4m.c
Line | Count | Source |
1 | | // Copyright 2019 Joe Drago. All rights reserved. |
2 | | // SPDX-License-Identifier: BSD-2-Clause |
3 | | |
4 | | // This is a barebones y4m reader/writer for basic libavif testing. It is NOT comprehensive! |
5 | | |
6 | | #include "y4m.h" |
7 | | |
8 | | #include <assert.h> |
9 | | #include <inttypes.h> |
10 | | #include <limits.h> |
11 | | #include <stdio.h> |
12 | | #include <stdlib.h> |
13 | | #include <string.h> |
14 | | |
15 | | #include "avif/avif.h" |
16 | | #include "avifexif.h" |
17 | | #include "avifutil.h" |
18 | | |
19 | 86 | #define Y4M_MAX_LINE_SIZE 2048 // Arbitrary limit. Y4M headers should be much smaller than this |
20 | | |
21 | | struct y4mFrameIterator |
22 | | { |
23 | | int width; |
24 | | int height; |
25 | | int depth; |
26 | | avifBool hasAlpha; |
27 | | avifPixelFormat format; |
28 | | avifRange range; |
29 | | avifChromaSamplePosition chromaSamplePosition; |
30 | | avifAppSourceTiming sourceTiming; |
31 | | |
32 | | FILE * inputFile; |
33 | | const char * displayFilename; |
34 | | }; |
35 | | |
36 | | // Sets frame->format, frame->depth, frame->hasAlpha, and frame->chromaSamplePosition. |
37 | | static avifBool y4mColorSpaceParse(const char * formatString, struct y4mFrameIterator * frame) |
38 | 31 | { |
39 | 31 | frame->hasAlpha = AVIF_FALSE; |
40 | 31 | frame->chromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN; |
41 | | |
42 | 31 | if (!strcmp(formatString, "C420jpeg")) { |
43 | 17 | frame->format = AVIF_PIXEL_FORMAT_YUV420; |
44 | 17 | frame->depth = 8; |
45 | | // Chroma sample position is center. |
46 | 17 | return AVIF_TRUE; |
47 | 17 | } |
48 | 14 | if (!strcmp(formatString, "C420mpeg2")) { |
49 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV420; |
50 | 0 | frame->depth = 8; |
51 | 0 | frame->chromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_VERTICAL; |
52 | 0 | return AVIF_TRUE; |
53 | 0 | } |
54 | 14 | if (!strcmp(formatString, "C420paldv")) { |
55 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV420; |
56 | 0 | frame->depth = 8; |
57 | 0 | frame->chromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_COLOCATED; |
58 | 0 | return AVIF_TRUE; |
59 | 0 | } |
60 | 14 | if (!strcmp(formatString, "C444p10")) { |
61 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV444; |
62 | 0 | frame->depth = 10; |
63 | 0 | return AVIF_TRUE; |
64 | 0 | } |
65 | 14 | if (!strcmp(formatString, "C422p10")) { |
66 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV422; |
67 | 0 | frame->depth = 10; |
68 | 0 | return AVIF_TRUE; |
69 | 0 | } |
70 | 14 | if (!strcmp(formatString, "C420p10")) { |
71 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV420; |
72 | 0 | frame->depth = 10; |
73 | 0 | return AVIF_TRUE; |
74 | 0 | } |
75 | 14 | if (!strcmp(formatString, "C444p12")) { |
76 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV444; |
77 | 0 | frame->depth = 12; |
78 | 0 | return AVIF_TRUE; |
79 | 0 | } |
80 | 14 | if (!strcmp(formatString, "C422p12")) { |
81 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV422; |
82 | 0 | frame->depth = 12; |
83 | 0 | return AVIF_TRUE; |
84 | 0 | } |
85 | 14 | if (!strcmp(formatString, "C420p12")) { |
86 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV420; |
87 | 0 | frame->depth = 12; |
88 | 0 | return AVIF_TRUE; |
89 | 0 | } |
90 | 14 | if (!strcmp(formatString, "C444")) { |
91 | 6 | frame->format = AVIF_PIXEL_FORMAT_YUV444; |
92 | 6 | frame->depth = 8; |
93 | 6 | return AVIF_TRUE; |
94 | 6 | } |
95 | 8 | if (!strcmp(formatString, "C444alpha")) { |
96 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV444; |
97 | 0 | frame->depth = 8; |
98 | 0 | frame->hasAlpha = AVIF_TRUE; |
99 | 0 | return AVIF_TRUE; |
100 | 0 | } |
101 | 8 | if (!strcmp(formatString, "C422")) { |
102 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV422; |
103 | 0 | frame->depth = 8; |
104 | 0 | return AVIF_TRUE; |
105 | 0 | } |
106 | 8 | if (!strcmp(formatString, "C420")) { |
107 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV420; |
108 | 0 | frame->depth = 8; |
109 | | // Chroma sample position is center. |
110 | 0 | return AVIF_TRUE; |
111 | 0 | } |
112 | 8 | if (!strcmp(formatString, "Cmono")) { |
113 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV400; |
114 | 0 | frame->depth = 8; |
115 | 0 | return AVIF_TRUE; |
116 | 0 | } |
117 | 8 | if (!strcmp(formatString, "Cmono10")) { |
118 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV400; |
119 | 0 | frame->depth = 10; |
120 | 0 | return AVIF_TRUE; |
121 | 0 | } |
122 | 8 | if (!strcmp(formatString, "Cmono12")) { |
123 | 0 | frame->format = AVIF_PIXEL_FORMAT_YUV400; |
124 | 0 | frame->depth = 12; |
125 | 0 | return AVIF_TRUE; |
126 | 0 | } |
127 | 8 | return AVIF_FALSE; |
128 | 8 | } |
129 | | |
130 | | // Returns an unsigned integer value parsed from [start:end[. |
131 | | // Returns -1 in case of failure. |
132 | | static int y4mReadUnsignedInt(const char * start, const char * end) |
133 | 59 | { |
134 | 59 | const char * p = start; |
135 | 59 | int64_t value = 0; |
136 | 238 | while (p < end && *p >= '0' && *p <= '9') { |
137 | 179 | value = value * 10 + (*(p++) - '0'); |
138 | 179 | if (value > INT_MAX) { |
139 | 0 | return -1; |
140 | 0 | } |
141 | 179 | } |
142 | 59 | return (p == start) ? -1 : (int)value; |
143 | 59 | } |
144 | | |
145 | | // Note: this modifies framerateString |
146 | | static avifBool y4mFramerateParse(char * framerateString, avifAppSourceTiming * sourceTiming) |
147 | 18 | { |
148 | 18 | if (framerateString[0] != 'F') { |
149 | 0 | return AVIF_FALSE; |
150 | 0 | } |
151 | 18 | ++framerateString; // skip past 'F' |
152 | | |
153 | 18 | char * colonLocation = strchr(framerateString, ':'); |
154 | 18 | if (!colonLocation) { |
155 | 0 | return AVIF_FALSE; |
156 | 0 | } |
157 | 18 | *colonLocation = 0; |
158 | 18 | ++colonLocation; |
159 | | |
160 | 18 | int numerator = atoi(framerateString); |
161 | 18 | int denominator = atoi(colonLocation); |
162 | 18 | if ((numerator < 1) || (denominator < 1)) { |
163 | 2 | return AVIF_FALSE; |
164 | 2 | } |
165 | | |
166 | 16 | sourceTiming->timescale = (uint64_t)numerator; |
167 | 16 | sourceTiming->duration = (uint64_t)denominator; |
168 | 16 | return AVIF_TRUE; |
169 | 18 | } |
170 | | |
171 | | static avifBool getHeaderString(uint8_t * p, uint8_t * end, char * out, size_t maxChars) |
172 | 94 | { |
173 | 94 | uint8_t * headerEnd = p; |
174 | 1.08k | while ((*headerEnd != ' ') && (*headerEnd != '\n')) { |
175 | 990 | if (headerEnd >= end) { |
176 | 0 | return AVIF_FALSE; |
177 | 0 | } |
178 | 990 | ++headerEnd; |
179 | 990 | } |
180 | 94 | size_t formatLen = headerEnd - p; |
181 | 94 | if (formatLen > maxChars) { |
182 | 0 | return AVIF_FALSE; |
183 | 0 | } |
184 | | |
185 | 94 | strncpy(out, (const char *)p, formatLen); |
186 | 94 | out[formatLen] = 0; |
187 | 94 | return AVIF_TRUE; |
188 | 94 | } |
189 | | |
190 | | static int y4mReadLine(FILE * inputFile, avifRWData * raw, const char * displayFilename) |
191 | 54 | { |
192 | 54 | static const int maxBytes = Y4M_MAX_LINE_SIZE; |
193 | 54 | int bytesRead = 0; |
194 | 54 | uint8_t * front = raw->data; |
195 | | |
196 | 4.46k | for (;;) { |
197 | 4.46k | if (fread(front, 1, 1, inputFile) != 1) { |
198 | 0 | fprintf(stderr, "Failed to read line: %s\n", displayFilename); |
199 | 0 | break; |
200 | 0 | } |
201 | | |
202 | 4.46k | ++bytesRead; |
203 | 4.46k | if (bytesRead >= maxBytes) { |
204 | 1 | break; |
205 | 1 | } |
206 | | |
207 | 4.46k | if (*front == '\n') { |
208 | 53 | return bytesRead; |
209 | 53 | } |
210 | 4.41k | ++front; |
211 | 4.41k | } |
212 | 1 | return -1; |
213 | 54 | } |
214 | | |
215 | | // Limits each sample value to fit into avif->depth bits. |
216 | | // Returns AVIF_TRUE if any sample was clamped this way. |
217 | | static avifBool y4mClampSamples(avifImage * avif) |
218 | 6 | { |
219 | 6 | if (!avifImageUsesU16(avif)) { |
220 | 6 | assert(avif->depth == 8); |
221 | 6 | return AVIF_FALSE; |
222 | 6 | } |
223 | 6 | assert(avif->depth < 16); // Otherwise it could be skipped too. |
224 | | |
225 | | // AV1 encoders and decoders do not care whether the samples are full range or limited range |
226 | | // for the internal computation: it is only passed as an informative tag, so ignore avif->yuvRange. |
227 | 0 | const uint16_t maxSampleValue = (uint16_t)((1u << avif->depth) - 1u); |
228 | |
|
229 | 0 | avifBool samplesWereClamped = AVIF_FALSE; |
230 | 0 | for (int plane = AVIF_CHAN_Y; plane <= AVIF_CHAN_A; ++plane) { |
231 | 0 | uint32_t planeHeight = avifImagePlaneHeight(avif, plane); // 0 for UV if 4:0:0. |
232 | 0 | uint32_t planeWidth = avifImagePlaneWidth(avif, plane); |
233 | 0 | uint8_t * row = avifImagePlane(avif, plane); |
234 | 0 | uint32_t rowBytes = avifImagePlaneRowBytes(avif, plane); |
235 | 0 | for (uint32_t y = 0; y < planeHeight; ++y) { |
236 | 0 | uint16_t * row16 = (uint16_t *)row; |
237 | 0 | for (uint32_t x = 0; x < planeWidth; ++x) { |
238 | 0 | if (row16[x] > maxSampleValue) { |
239 | 0 | row16[x] = maxSampleValue; |
240 | 0 | samplesWereClamped = AVIF_TRUE; |
241 | 0 | } |
242 | 0 | } |
243 | 0 | row += rowBytes; |
244 | 0 | } |
245 | 0 | } |
246 | 0 | return samplesWereClamped; |
247 | 6 | } |
248 | | |
249 | | #define ADVANCE(BYTES) \ |
250 | 1.57k | do { \ |
251 | 1.57k | p += BYTES; \ |
252 | 1.57k | if (p >= end) \ |
253 | 1.57k | goto cleanup; \ |
254 | 1.57k | } while (0) |
255 | | |
256 | | avifBool y4mRead(const char * inputFilename, |
257 | | uint32_t imageSizeLimit, |
258 | | avifImage * avif, |
259 | | avifAppSourceTiming * sourceTiming, |
260 | | struct y4mFrameIterator ** iter) |
261 | 32 | { |
262 | 32 | avifBool result = AVIF_FALSE; |
263 | | |
264 | 32 | struct y4mFrameIterator frame; |
265 | 32 | frame.width = -1; |
266 | 32 | frame.height = -1; |
267 | | // Default to the color space "C420" to match the defaults of aomenc and ffmpeg. |
268 | 32 | frame.depth = 8; |
269 | 32 | frame.hasAlpha = AVIF_FALSE; |
270 | 32 | frame.format = AVIF_PIXEL_FORMAT_YUV420; |
271 | 32 | frame.range = AVIF_RANGE_LIMITED; |
272 | 32 | frame.chromaSamplePosition = AVIF_CHROMA_SAMPLE_POSITION_UNKNOWN; |
273 | 32 | memset(&frame.sourceTiming, 0, sizeof(avifAppSourceTiming)); |
274 | 32 | frame.inputFile = NULL; |
275 | 32 | frame.displayFilename = inputFilename; |
276 | | |
277 | 32 | avifRWData raw = AVIF_DATA_EMPTY; |
278 | 32 | if (avifRWDataRealloc(&raw, Y4M_MAX_LINE_SIZE) != AVIF_RESULT_OK) { |
279 | 0 | fprintf(stderr, "Out of memory\n"); |
280 | 0 | goto cleanup; |
281 | 0 | } |
282 | | |
283 | 32 | if (iter && *iter) { |
284 | | // Continue reading FRAMEs from this y4m stream |
285 | 0 | frame = **iter; |
286 | 32 | } else { |
287 | | // Open a fresh y4m and read its header |
288 | | |
289 | 32 | if (inputFilename) { |
290 | 32 | frame.inputFile = fopen(inputFilename, "rb"); |
291 | 32 | if (!frame.inputFile) { |
292 | 0 | fprintf(stderr, "Cannot open file for read: %s\n", inputFilename); |
293 | 0 | goto cleanup; |
294 | 0 | } |
295 | 32 | } else { |
296 | 0 | frame.inputFile = stdin; |
297 | 0 | frame.displayFilename = "(stdin)"; |
298 | 0 | } |
299 | | |
300 | 32 | int headerBytes = y4mReadLine(frame.inputFile, &raw, frame.displayFilename); |
301 | 32 | if (headerBytes < 0) { |
302 | 0 | fprintf(stderr, "Y4M header too large: %s\n", frame.displayFilename); |
303 | 0 | goto cleanup; |
304 | 0 | } |
305 | 32 | if (headerBytes < 10) { |
306 | 0 | fprintf(stderr, "Y4M header too small: %s\n", frame.displayFilename); |
307 | 0 | goto cleanup; |
308 | 0 | } |
309 | | |
310 | 32 | uint8_t * end = raw.data + headerBytes; |
311 | 32 | uint8_t * p = raw.data; |
312 | | |
313 | 32 | if (memcmp(p, "YUV4MPEG2 ", 10) != 0) { |
314 | 0 | fprintf(stderr, "Not a y4m file: %s\n", frame.displayFilename); |
315 | 0 | goto cleanup; |
316 | 0 | } |
317 | 32 | ADVANCE(10); // skip past header |
318 | | |
319 | 32 | char tmpBuffer[32]; |
320 | | |
321 | 213 | while (p != end) { |
322 | 213 | switch (*p) { |
323 | 30 | case 'W': // width |
324 | 30 | frame.width = y4mReadUnsignedInt((const char *)p + 1, (const char *)end); |
325 | 30 | break; |
326 | 29 | case 'H': // height |
327 | 29 | frame.height = y4mReadUnsignedInt((const char *)p + 1, (const char *)end); |
328 | 29 | break; |
329 | 31 | case 'C': // color space |
330 | 31 | if (!getHeaderString(p, end, tmpBuffer, 31)) { |
331 | 0 | fprintf(stderr, "Bad y4m header: %s\n", frame.displayFilename); |
332 | 0 | goto cleanup; |
333 | 0 | } |
334 | 31 | if (!y4mColorSpaceParse(tmpBuffer, &frame)) { |
335 | 8 | fprintf(stderr, "Unsupported y4m pixel format: %s\n", frame.displayFilename); |
336 | 8 | goto cleanup; |
337 | 8 | } |
338 | 23 | break; |
339 | 23 | case 'F': // framerate |
340 | 18 | if (!getHeaderString(p, end, tmpBuffer, 31)) { |
341 | 0 | fprintf(stderr, "Bad y4m header: %s\n", frame.displayFilename); |
342 | 0 | goto cleanup; |
343 | 0 | } |
344 | 18 | if (!y4mFramerateParse(tmpBuffer, &frame.sourceTiming)) { |
345 | 2 | fprintf(stderr, "Unsupported framerate: %s\n", frame.displayFilename); |
346 | 2 | goto cleanup; |
347 | 2 | } |
348 | 16 | break; |
349 | 45 | case 'X': |
350 | 45 | if (!getHeaderString(p, end, tmpBuffer, 31)) { |
351 | 0 | fprintf(stderr, "Bad y4m header: %s\n", frame.displayFilename); |
352 | 0 | goto cleanup; |
353 | 0 | } |
354 | 45 | if (!strcmp(tmpBuffer, "XCOLORRANGE=FULL")) { |
355 | 6 | frame.range = AVIF_RANGE_FULL; |
356 | 6 | } |
357 | 45 | break; |
358 | 60 | default: |
359 | 60 | break; |
360 | 213 | } |
361 | | |
362 | | // Advance past header section |
363 | 1.56k | while ((*p != '\n') && (*p != ' ')) { |
364 | 1.36k | ADVANCE(1); |
365 | 1.36k | } |
366 | 203 | if (*p == '\n') { |
367 | | // Done with y4m header |
368 | 22 | break; |
369 | 22 | } |
370 | | |
371 | 181 | ADVANCE(1); |
372 | 181 | } |
373 | | |
374 | 22 | if (*p != '\n') { |
375 | 0 | fprintf(stderr, "Truncated y4m header (no newline): %s\n", frame.displayFilename); |
376 | 0 | goto cleanup; |
377 | 0 | } |
378 | 22 | } |
379 | | |
380 | 22 | int frameHeaderBytes = y4mReadLine(frame.inputFile, &raw, frame.displayFilename); |
381 | 22 | if (frameHeaderBytes < 0) { |
382 | 1 | fprintf(stderr, "Y4M frame header too large: %s\n", frame.displayFilename); |
383 | 1 | goto cleanup; |
384 | 1 | } |
385 | 21 | if (frameHeaderBytes < 6) { |
386 | 1 | fprintf(stderr, "Y4M frame header too small: %s\n", frame.displayFilename); |
387 | 1 | goto cleanup; |
388 | 1 | } |
389 | 20 | if (memcmp(raw.data, "FRAME", 5) != 0) { |
390 | 1 | fprintf(stderr, "Truncated y4m (no frame): %s\n", frame.displayFilename); |
391 | 1 | goto cleanup; |
392 | 1 | } |
393 | | |
394 | 19 | if ((frame.width < 1) || (frame.height < 1) || ((frame.depth != 8) && (frame.depth != 10) && (frame.depth != 12))) { |
395 | 2 | fprintf(stderr, "Failed to parse y4m header (not enough information): %s\n", frame.displayFilename); |
396 | 2 | goto cleanup; |
397 | 2 | } |
398 | 17 | if ((uint32_t)frame.width > imageSizeLimit / (uint32_t)frame.height) { |
399 | 2 | fprintf(stderr, "Too big y4m dimensions (%d x %d > %u px): %s\n", frame.width, frame.height, imageSizeLimit, frame.displayFilename); |
400 | 2 | goto cleanup; |
401 | 2 | } |
402 | | |
403 | 15 | if (sourceTiming) { |
404 | 15 | *sourceTiming = frame.sourceTiming; |
405 | 15 | } |
406 | | |
407 | 15 | avifImageFreePlanes(avif, AVIF_PLANES_ALL); |
408 | 15 | avif->width = frame.width; |
409 | 15 | avif->height = frame.height; |
410 | 15 | avif->depth = frame.depth; |
411 | 15 | avif->yuvFormat = frame.format; |
412 | 15 | avif->yuvRange = frame.range; |
413 | 15 | avif->yuvChromaSamplePosition = frame.chromaSamplePosition; |
414 | 15 | avifResult allocationResult = avifImageAllocatePlanes(avif, frame.hasAlpha ? AVIF_PLANES_ALL : AVIF_PLANES_YUV); |
415 | 15 | if (allocationResult != AVIF_RESULT_OK) { |
416 | 0 | fprintf(stderr, "Failed to allocate the planes: %s\n", avifResultToString(allocationResult)); |
417 | 0 | goto cleanup; |
418 | 0 | } |
419 | | |
420 | 43 | for (int plane = AVIF_CHAN_Y; plane <= AVIF_CHAN_A; ++plane) { |
421 | 37 | uint32_t planeHeight = avifImagePlaneHeight(avif, plane); // 0 for A if no alpha and 0 for UV if 4:0:0. |
422 | 37 | uint32_t planeWidthBytes = avifImagePlaneWidth(avif, plane) << (avif->depth > 8); |
423 | 37 | uint8_t * row = avifImagePlane(avif, plane); |
424 | 37 | uint32_t rowBytes = avifImagePlaneRowBytes(avif, plane); |
425 | 18.2k | for (uint32_t y = 0; y < planeHeight; ++y) { |
426 | 18.1k | uint32_t bytesRead = (uint32_t)fread(row, 1, planeWidthBytes, frame.inputFile); |
427 | 18.1k | if (bytesRead != planeWidthBytes) { |
428 | 9 | fprintf(stderr, |
429 | 9 | "Failed to read y4m row (not enough data, wanted %" PRIu32 ", got %" PRIu32 "): %s\n", |
430 | 9 | planeWidthBytes, |
431 | 9 | bytesRead, |
432 | 9 | frame.displayFilename); |
433 | 9 | goto cleanup; |
434 | 9 | } |
435 | 18.1k | row += rowBytes; |
436 | 18.1k | } |
437 | 37 | } |
438 | | |
439 | | // libavif API does not guarantee the absence of undefined behavior if samples exceed the specified avif->depth. |
440 | | // Avoid that by making sure input values are within the correct range. |
441 | 6 | if (y4mClampSamples(avif)) { |
442 | 0 | fprintf(stderr, "WARNING: some samples were clamped to fit into %u bits per sample\n", avif->depth); |
443 | 0 | } |
444 | | |
445 | 6 | result = AVIF_TRUE; |
446 | 32 | cleanup: |
447 | 32 | if (iter) { |
448 | 0 | if (*iter) { |
449 | 0 | free(*iter); |
450 | 0 | *iter = NULL; |
451 | 0 | } |
452 | |
|
453 | 0 | if (result && frame.inputFile) { |
454 | 0 | ungetc(fgetc(frame.inputFile), frame.inputFile); // Kick frame.inputFile to force EOF |
455 | |
|
456 | 0 | if (!feof(frame.inputFile)) { |
457 | | // Remember y4m state for next time |
458 | 0 | *iter = malloc(sizeof(struct y4mFrameIterator)); |
459 | 0 | if (*iter == NULL) { |
460 | 0 | fprintf(stderr, "Inter-frame state memory allocation failure\n"); |
461 | 0 | result = AVIF_FALSE; |
462 | 0 | } else { |
463 | 0 | **iter = frame; |
464 | 0 | } |
465 | 0 | } |
466 | 0 | } |
467 | 0 | } |
468 | | |
469 | 32 | if (inputFilename && frame.inputFile && (!iter || !(*iter))) { |
470 | 32 | fclose(frame.inputFile); |
471 | 32 | } |
472 | 32 | avifRWDataFree(&raw); |
473 | 32 | return result; |
474 | 6 | } |
475 | | |
476 | | avifBool y4mWrite(const char * outputFilename, const avifImage * avif) |
477 | 0 | { |
478 | 0 | avifBool hasAlpha = (avif->alphaPlane != NULL) && (avif->alphaRowBytes > 0); |
479 | 0 | avifBool writeAlpha = AVIF_FALSE; |
480 | 0 | char * y4mHeaderFormat = NULL; |
481 | |
|
482 | 0 | if (hasAlpha && ((avif->depth != 8) || (avif->yuvFormat != AVIF_PIXEL_FORMAT_YUV444))) { |
483 | 0 | fprintf(stderr, "WARNING: writing alpha is currently only supported in 8bpc YUV444, ignoring alpha channel: %s\n", outputFilename); |
484 | 0 | } |
485 | |
|
486 | 0 | if (avif->transformFlags & AVIF_TRANSFORM_CLAP) { |
487 | 0 | avifCropRect cropRect; |
488 | 0 | avifDiagnostics diag; |
489 | 0 | if (avifCropRectFromCleanApertureBox(&cropRect, &avif->clap, avif->width, avif->height, &diag) && |
490 | 0 | (cropRect.x != 0 || cropRect.y != 0 || cropRect.width != avif->width || cropRect.height != avif->height)) { |
491 | | // TODO: https://github.com/AOMediaCodec/libavif/issues/2427 - Implement. |
492 | 0 | fprintf(stderr, |
493 | 0 | "Warning: Clean Aperture values were ignored, the output image was NOT cropped to rectangle {%u,%u,%u,%u}\n", |
494 | 0 | cropRect.x, |
495 | 0 | cropRect.y, |
496 | 0 | cropRect.width, |
497 | 0 | cropRect.height); |
498 | 0 | } |
499 | 0 | } |
500 | 0 | if (avifImageGetExifOrientationFromIrotImir(avif) != 1) { |
501 | | // TODO: https://github.com/AOMediaCodec/libavif/issues/2427 - Rotate the samples. |
502 | 0 | fprintf(stderr, |
503 | 0 | "Warning: Orientation %u was ignored, the output image was NOT rotated or mirrored\n", |
504 | 0 | avifImageGetExifOrientationFromIrotImir(avif)); |
505 | 0 | } |
506 | |
|
507 | 0 | switch (avif->depth) { |
508 | 0 | case 8: |
509 | 0 | switch (avif->yuvFormat) { |
510 | 0 | case AVIF_PIXEL_FORMAT_YUV444: |
511 | 0 | if (hasAlpha) { |
512 | 0 | y4mHeaderFormat = "C444alpha XYSCSS=444"; |
513 | 0 | writeAlpha = AVIF_TRUE; |
514 | 0 | } else { |
515 | 0 | y4mHeaderFormat = "C444 XYSCSS=444"; |
516 | 0 | } |
517 | 0 | break; |
518 | 0 | case AVIF_PIXEL_FORMAT_YUV422: |
519 | 0 | y4mHeaderFormat = "C422 XYSCSS=422"; |
520 | 0 | break; |
521 | 0 | case AVIF_PIXEL_FORMAT_YUV420: |
522 | 0 | y4mHeaderFormat = "C420jpeg XYSCSS=420JPEG"; |
523 | 0 | break; |
524 | 0 | case AVIF_PIXEL_FORMAT_YUV400: |
525 | 0 | y4mHeaderFormat = "Cmono XYSCSS=400"; |
526 | 0 | break; |
527 | 0 | case AVIF_PIXEL_FORMAT_NONE: |
528 | 0 | case AVIF_PIXEL_FORMAT_COUNT: |
529 | | // will error later; these cases are here for warning's sake |
530 | 0 | break; |
531 | 0 | } |
532 | 0 | break; |
533 | 0 | case 10: |
534 | 0 | switch (avif->yuvFormat) { |
535 | 0 | case AVIF_PIXEL_FORMAT_YUV444: |
536 | 0 | y4mHeaderFormat = "C444p10 XYSCSS=444P10"; |
537 | 0 | break; |
538 | 0 | case AVIF_PIXEL_FORMAT_YUV422: |
539 | 0 | y4mHeaderFormat = "C422p10 XYSCSS=422P10"; |
540 | 0 | break; |
541 | 0 | case AVIF_PIXEL_FORMAT_YUV420: |
542 | 0 | y4mHeaderFormat = "C420p10 XYSCSS=420P10"; |
543 | 0 | break; |
544 | 0 | case AVIF_PIXEL_FORMAT_YUV400: |
545 | 0 | y4mHeaderFormat = "Cmono10 XYSCSS=400"; |
546 | 0 | break; |
547 | 0 | case AVIF_PIXEL_FORMAT_NONE: |
548 | 0 | case AVIF_PIXEL_FORMAT_COUNT: |
549 | | // will error later; these cases are here for warning's sake |
550 | 0 | break; |
551 | 0 | } |
552 | 0 | break; |
553 | 0 | case 12: |
554 | 0 | switch (avif->yuvFormat) { |
555 | 0 | case AVIF_PIXEL_FORMAT_YUV444: |
556 | 0 | y4mHeaderFormat = "C444p12 XYSCSS=444P12"; |
557 | 0 | break; |
558 | 0 | case AVIF_PIXEL_FORMAT_YUV422: |
559 | 0 | y4mHeaderFormat = "C422p12 XYSCSS=422P12"; |
560 | 0 | break; |
561 | 0 | case AVIF_PIXEL_FORMAT_YUV420: |
562 | 0 | y4mHeaderFormat = "C420p12 XYSCSS=420P12"; |
563 | 0 | break; |
564 | 0 | case AVIF_PIXEL_FORMAT_YUV400: |
565 | 0 | y4mHeaderFormat = "Cmono12 XYSCSS=400"; |
566 | 0 | break; |
567 | 0 | case AVIF_PIXEL_FORMAT_NONE: |
568 | 0 | case AVIF_PIXEL_FORMAT_COUNT: |
569 | | // will error later; these cases are here for warning's sake |
570 | 0 | break; |
571 | 0 | } |
572 | 0 | break; |
573 | 0 | default: |
574 | 0 | fprintf(stderr, "ERROR: y4mWrite unsupported depth: %d\n", avif->depth); |
575 | 0 | return AVIF_FALSE; |
576 | 0 | } |
577 | | |
578 | 0 | if (y4mHeaderFormat == NULL) { |
579 | 0 | fprintf(stderr, "ERROR: unsupported format\n"); |
580 | 0 | return AVIF_FALSE; |
581 | 0 | } |
582 | | |
583 | 0 | const char * rangeString = "XCOLORRANGE=FULL"; |
584 | 0 | if (avif->yuvRange == AVIF_RANGE_LIMITED) { |
585 | 0 | rangeString = "XCOLORRANGE=LIMITED"; |
586 | 0 | } |
587 | |
|
588 | 0 | FILE * f = fopen(outputFilename, "wb"); |
589 | 0 | if (!f) { |
590 | 0 | fprintf(stderr, "Cannot open file for write: %s\n", outputFilename); |
591 | 0 | return AVIF_FALSE; |
592 | 0 | } |
593 | | |
594 | 0 | avifBool success = AVIF_TRUE; |
595 | 0 | if (fprintf(f, "YUV4MPEG2 W%d H%d F25:1 Ip A0:0 %s %s\nFRAME\n", avif->width, avif->height, y4mHeaderFormat, rangeString) < 0) { |
596 | 0 | fprintf(stderr, "Cannot write to file: %s\n", outputFilename); |
597 | 0 | success = AVIF_FALSE; |
598 | 0 | goto cleanup; |
599 | 0 | } |
600 | | |
601 | 0 | const int lastPlane = writeAlpha ? AVIF_CHAN_A : AVIF_CHAN_V; |
602 | 0 | for (int plane = AVIF_CHAN_Y; plane <= lastPlane; ++plane) { |
603 | 0 | uint32_t planeHeight = avifImagePlaneHeight(avif, plane); // 0 for UV if 4:0:0. |
604 | 0 | uint32_t planeWidthBytes = avifImagePlaneWidth(avif, plane) << (avif->depth > 8); |
605 | 0 | const uint8_t * row = avifImagePlane(avif, plane); |
606 | 0 | uint32_t rowBytes = avifImagePlaneRowBytes(avif, plane); |
607 | 0 | for (uint32_t y = 0; y < planeHeight; ++y) { |
608 | 0 | if (fwrite(row, 1, planeWidthBytes, f) != planeWidthBytes) { |
609 | 0 | fprintf(stderr, "Failed to write %" PRIu32 " bytes: %s\n", planeWidthBytes, outputFilename); |
610 | 0 | success = AVIF_FALSE; |
611 | 0 | goto cleanup; |
612 | 0 | } |
613 | 0 | row += rowBytes; |
614 | 0 | } |
615 | 0 | } |
616 | | |
617 | 0 | cleanup: |
618 | 0 | fclose(f); |
619 | 0 | if (success) { |
620 | 0 | printf("Wrote Y4M: %s\n", outputFilename); |
621 | 0 | } |
622 | 0 | return success; |
623 | 0 | } |