/src/libavif/tests/gtest/aviftest_helpers.cc
Line | Count | Source (jump to first uncovered line) |
1 | | // Copyright 2022 Google LLC |
2 | | // SPDX-License-Identifier: BSD-2-Clause |
3 | | |
4 | | #include "aviftest_helpers.h" |
5 | | |
6 | | #include <algorithm> |
7 | | #include <cassert> |
8 | | #include <cmath> |
9 | | #include <cstdint> |
10 | | #include <cstdlib> |
11 | | #include <cstring> |
12 | | #include <fstream> |
13 | | #include <limits> |
14 | | #include <string> |
15 | | #include <vector> |
16 | | |
17 | | #include "avif/avif.h" |
18 | | #include "avif/avif_cxx.h" |
19 | | #include "avif/internal.h" |
20 | | #include "avifpng.h" |
21 | | #include "avifutil.h" |
22 | | |
23 | | namespace avif { |
24 | | namespace testutil { |
25 | | |
26 | | //------------------------------------------------------------------------------ |
27 | | // CopyImageSamples is a copy of avifImageCopySamples |
28 | | |
29 | | namespace { |
30 | | void CopyImageSamples(avifImage* dstImage, const avifImage* srcImage, |
31 | 0 | avifPlanesFlags planes) { |
32 | 0 | assert(srcImage->depth == dstImage->depth); |
33 | 0 | if (planes & AVIF_PLANES_YUV) { |
34 | 0 | assert(srcImage->yuvFormat == dstImage->yuvFormat); |
35 | | // Note that there may be a mismatch between srcImage->yuvRange and |
36 | | // dstImage->yuvRange because libavif allows for 'colr' and AV1 OBU video |
37 | | // range values to differ. |
38 | 0 | } |
39 | 0 | const size_t bytesPerPixel = avifImageUsesU16(srcImage) ? 2 : 1; |
40 | |
|
41 | 0 | const avifBool skipColor = !(planes & AVIF_PLANES_YUV); |
42 | 0 | const avifBool skipAlpha = !(planes & AVIF_PLANES_A); |
43 | 0 | for (int c = AVIF_CHAN_Y; c <= AVIF_CHAN_A; ++c) { |
44 | 0 | const avifBool alpha = c == AVIF_CHAN_A; |
45 | 0 | if ((skipColor && !alpha) || (skipAlpha && alpha)) { |
46 | 0 | continue; |
47 | 0 | } |
48 | | |
49 | 0 | const uint32_t planeWidth = avifImagePlaneWidth(srcImage, c); |
50 | 0 | const uint32_t planeHeight = avifImagePlaneHeight(srcImage, c); |
51 | 0 | const uint8_t* srcRow = avifImagePlane(srcImage, c); |
52 | 0 | uint8_t* dstRow = avifImagePlane(dstImage, c); |
53 | 0 | const uint32_t srcRowBytes = avifImagePlaneRowBytes(srcImage, c); |
54 | 0 | const uint32_t dstRowBytes = avifImagePlaneRowBytes(dstImage, c); |
55 | 0 | assert(!srcRow == !dstRow); |
56 | 0 | if (!srcRow) { |
57 | 0 | continue; |
58 | 0 | } |
59 | 0 | assert(planeWidth == avifImagePlaneWidth(dstImage, c)); |
60 | 0 | assert(planeHeight == avifImagePlaneHeight(dstImage, c)); |
61 | |
|
62 | 0 | const size_t planeWidthBytes = planeWidth * bytesPerPixel; |
63 | 0 | for (uint32_t y = 0; y < planeHeight; ++y) { |
64 | 0 | memcpy(dstRow, srcRow, planeWidthBytes); |
65 | 0 | srcRow += srcRowBytes; |
66 | 0 | dstRow += dstRowBytes; |
67 | 0 | } |
68 | 0 | } |
69 | 0 | } |
70 | | } // namespace |
71 | | |
72 | | //------------------------------------------------------------------------------ |
73 | | |
74 | | AvifRgbImage::AvifRgbImage(const avifImage* yuv, int rgbDepth, |
75 | 4.93k | avifRGBFormat rgbFormat) { |
76 | 4.93k | avifRGBImageSetDefaults(this, yuv); |
77 | 4.93k | depth = rgbDepth; |
78 | 4.93k | format = rgbFormat; |
79 | 4.93k | if (avifRGBImageAllocatePixels(this) != AVIF_RESULT_OK) { |
80 | 0 | std::abort(); |
81 | 0 | } |
82 | 4.93k | } |
83 | | |
84 | 0 | AvifRwData::AvifRwData(AvifRwData&& other) : avifRWData{other} { |
85 | 0 | other.data = nullptr; |
86 | 0 | other.size = 0; |
87 | 0 | } |
88 | | |
89 | | //------------------------------------------------------------------------------ |
90 | | |
91 | 0 | RgbChannelOffsets GetRgbChannelOffsets(avifRGBFormat format) { |
92 | 0 | switch (format) { |
93 | 0 | case AVIF_RGB_FORMAT_RGB: |
94 | 0 | return {/*r=*/0, /*g=*/1, /*b=*/2, /*a=*/0}; |
95 | 0 | case AVIF_RGB_FORMAT_RGBA: |
96 | 0 | return {/*r=*/0, /*g=*/1, /*b=*/2, /*a=*/3}; |
97 | 0 | case AVIF_RGB_FORMAT_ARGB: |
98 | 0 | return {/*r=*/1, /*g=*/2, /*b=*/3, /*a=*/0}; |
99 | 0 | case AVIF_RGB_FORMAT_BGR: |
100 | 0 | return {/*r=*/2, /*g=*/1, /*b=*/0, /*a=*/0}; |
101 | 0 | case AVIF_RGB_FORMAT_BGRA: |
102 | 0 | return {/*r=*/2, /*g=*/1, /*b=*/0, /*a=*/3}; |
103 | 0 | case AVIF_RGB_FORMAT_ABGR: |
104 | 0 | return {/*r=*/3, /*g=*/2, /*b=*/1, /*a=*/0}; |
105 | 0 | case AVIF_RGB_FORMAT_RGB_565: |
106 | 0 | case AVIF_RGB_FORMAT_COUNT: |
107 | 0 | default: |
108 | 0 | return {/*r=*/0, /*g=*/0, /*b=*/0, /*a=*/0}; |
109 | 0 | } |
110 | 0 | } |
111 | | |
112 | | //------------------------------------------------------------------------------ |
113 | | |
114 | | ImagePtr CreateImage(int width, int height, int depth, |
115 | | avifPixelFormat yuv_format, avifPlanesFlags planes, |
116 | 0 | avifRange yuv_range) { |
117 | 0 | ImagePtr image(avifImageCreate(width, height, depth, yuv_format)); |
118 | 0 | if (!image) { |
119 | 0 | return nullptr; |
120 | 0 | } |
121 | 0 | image->yuvRange = yuv_range; |
122 | 0 | if (avifImageAllocatePlanes(image.get(), planes) != AVIF_RESULT_OK) { |
123 | 0 | return nullptr; |
124 | 0 | } |
125 | 0 | return image; |
126 | 0 | } |
127 | | |
128 | 0 | void FillImagePlain(avifImage* image, const uint32_t yuva[4]) { |
129 | 0 | for (avifChannelIndex c : |
130 | 0 | {AVIF_CHAN_Y, AVIF_CHAN_U, AVIF_CHAN_V, AVIF_CHAN_A}) { |
131 | 0 | const uint32_t plane_width = avifImagePlaneWidth(image, c); |
132 | | // 0 for A if no alpha and 0 for UV if 4:0:0. |
133 | 0 | const uint32_t plane_height = avifImagePlaneHeight(image, c); |
134 | 0 | uint8_t* row = avifImagePlane(image, c); |
135 | 0 | const uint32_t row_bytes = avifImagePlaneRowBytes(image, c); |
136 | 0 | for (uint32_t y = 0; y < plane_height; ++y) { |
137 | 0 | if (avifImageUsesU16(image)) { |
138 | 0 | std::fill(reinterpret_cast<uint16_t*>(row), |
139 | 0 | reinterpret_cast<uint16_t*>(row) + plane_width, |
140 | 0 | static_cast<uint16_t>(yuva[c])); |
141 | 0 | } else { |
142 | 0 | std::fill(row, row + plane_width, static_cast<uint8_t>(yuva[c])); |
143 | 0 | } |
144 | 0 | row += row_bytes; |
145 | 0 | } |
146 | 0 | } |
147 | 0 | } |
148 | | |
149 | 0 | void FillImageGradient(avifImage* image, int offset) { |
150 | 0 | for (avifChannelIndex c : |
151 | 0 | {AVIF_CHAN_Y, AVIF_CHAN_U, AVIF_CHAN_V, AVIF_CHAN_A}) { |
152 | 0 | const uint32_t limitedRangeMin = |
153 | 0 | c == AVIF_CHAN_Y ? 16 << (image->depth - 8) : 0; |
154 | 0 | const uint32_t limitedRangeMax = (c == AVIF_CHAN_Y ? 219 : 224) |
155 | 0 | << (image->depth - 8); |
156 | |
|
157 | 0 | const uint32_t plane_width = avifImagePlaneWidth(image, c); |
158 | | // 0 for A if no alpha and 0 for UV if 4:0:0. |
159 | 0 | const uint32_t plane_height = avifImagePlaneHeight(image, c); |
160 | 0 | uint8_t* row = avifImagePlane(image, c); |
161 | 0 | const uint32_t row_bytes = avifImagePlaneRowBytes(image, c); |
162 | 0 | const uint32_t max_xy_sum = plane_width + plane_height - 2; |
163 | 0 | for (uint32_t y = 0; y < plane_height; ++y) { |
164 | 0 | for (uint32_t x = 0; x < plane_width; ++x) { |
165 | 0 | uint32_t value = (x + y + offset) % (max_xy_sum + 1); |
166 | 0 | if (image->yuvRange == AVIF_RANGE_FULL || c == AVIF_CHAN_A) { |
167 | 0 | value = |
168 | 0 | value * ((1u << image->depth) - 1u) / std::max(1u, max_xy_sum); |
169 | 0 | } else { |
170 | 0 | value = limitedRangeMin + value * |
171 | 0 | (limitedRangeMax - limitedRangeMin) / |
172 | 0 | std::max(1u, max_xy_sum); |
173 | 0 | } |
174 | 0 | if (avifImageUsesU16(image)) { |
175 | 0 | reinterpret_cast<uint16_t*>(row)[x] = static_cast<uint16_t>(value); |
176 | 0 | } else { |
177 | 0 | row[x] = static_cast<uint8_t>(value); |
178 | 0 | } |
179 | 0 | } |
180 | 0 | row += row_bytes; |
181 | 0 | } |
182 | 0 | } |
183 | 0 | } |
184 | | |
185 | | namespace { |
186 | | template <typename PixelType> |
187 | | void FillImageChannel(avifRGBImage* image, uint32_t channel_offset, |
188 | 0 | uint32_t value) { |
189 | 0 | const uint32_t channel_count = avifRGBFormatChannelCount(image->format); |
190 | 0 | assert(channel_offset < channel_count); |
191 | 0 | for (uint32_t y = 0; y < image->height; ++y) { |
192 | 0 | PixelType* pixel = |
193 | 0 | reinterpret_cast<PixelType*>(image->pixels + image->rowBytes * y); |
194 | 0 | for (uint32_t x = 0; x < image->width; ++x) { |
195 | 0 | pixel[channel_offset] = static_cast<PixelType>(value); |
196 | 0 | pixel += channel_count; |
197 | 0 | } |
198 | 0 | } |
199 | 0 | } Unexecuted instantiation: aviftest_helpers.cc:void avif::testutil::(anonymous namespace)::FillImageChannel<unsigned char>(avifRGBImage*, unsigned int, unsigned int) Unexecuted instantiation: aviftest_helpers.cc:void avif::testutil::(anonymous namespace)::FillImageChannel<unsigned short>(avifRGBImage*, unsigned int, unsigned int) |
200 | | } // namespace |
201 | | |
202 | | void FillImageChannel(avifRGBImage* image, uint32_t channel_offset, |
203 | 0 | uint32_t value) { |
204 | 0 | (image->depth <= 8) |
205 | 0 | ? FillImageChannel<uint8_t>(image, channel_offset, value) |
206 | 0 | : FillImageChannel<uint16_t>(image, channel_offset, value); |
207 | 0 | } |
208 | | |
209 | | //------------------------------------------------------------------------------ |
210 | | |
211 | | bool AreByteSequencesEqual(const uint8_t data1[], size_t data1_length, |
212 | 0 | const uint8_t data2[], size_t data2_length) { |
213 | 0 | if (data1_length != data2_length) return false; |
214 | 0 | return data1_length == 0 || std::equal(data1, data1 + data1_length, data2); |
215 | 0 | } |
216 | | |
217 | 0 | bool AreByteSequencesEqual(const avifRWData& data1, const avifRWData& data2) { |
218 | 0 | return AreByteSequencesEqual(data1.data, data1.size, data2.data, data2.size); |
219 | 0 | } |
220 | | |
221 | | namespace { |
222 | | // Returns true if all properties of image1 are present in image2 and equal. |
223 | | bool MatchEachPropertyOfFirstImageInSecondImage(const avifImage& image1, |
224 | 0 | const avifImage& image2) { |
225 | 0 | for (size_t i = 0; i < image1.numProperties; ++i) { |
226 | 0 | const avifImageItemProperty& property1 = image1.properties[i]; |
227 | | |
228 | | // libavif may write a 'ccsp' box in Sample Entries. |
229 | | // libavif does not read and expose those except in avifImage::properties. |
230 | | // Ignore these boxes because it is an easy source of valid difference |
231 | | // between an original avifImage and a decoded avifImage. |
232 | 0 | if (AreByteSequencesEqual(property1.boxtype, sizeof(property1.boxtype), |
233 | 0 | reinterpret_cast<const uint8_t*>("ccst"), 4)) { |
234 | 0 | continue; |
235 | 0 | } |
236 | | |
237 | 0 | bool found = false; |
238 | 0 | for (size_t j = 0; j < image2.numProperties; ++j) { |
239 | 0 | const avifImageItemProperty& property2 = image2.properties[j]; |
240 | 0 | if (!AreByteSequencesEqual(property1.boxtype, sizeof(property1.boxtype), |
241 | 0 | property2.boxtype, |
242 | 0 | sizeof(property2.boxtype))) { |
243 | 0 | continue; |
244 | 0 | } |
245 | 0 | if (AreByteSequencesEqual(property1.boxtype, sizeof(property1.boxtype), |
246 | 0 | reinterpret_cast<const uint8_t*>("uuid"), 4) && |
247 | 0 | !AreByteSequencesEqual(property1.usertype, sizeof(property1.usertype), |
248 | 0 | property2.usertype, |
249 | 0 | sizeof(property2.usertype))) { |
250 | 0 | continue; |
251 | 0 | } |
252 | 0 | if (found) return false; // Consider duplicates as invalid. |
253 | 0 | found = true; |
254 | 0 | if (!AreByteSequencesEqual(property1.boxPayload, property2.boxPayload)) { |
255 | 0 | return false; |
256 | 0 | } |
257 | 0 | } |
258 | 0 | if (!found) return false; |
259 | 0 | } |
260 | 0 | return true; |
261 | 0 | } |
262 | | |
263 | | // Returns true if image1 and image2 are identical, pixel values excepted. |
264 | | bool AreImageFeaturesEqual(const avifImage& image1, const avifImage& image2, |
265 | 0 | bool ignore_alpha) { |
266 | 0 | if (image1.width != image2.width || image1.height != image2.height || |
267 | 0 | image1.depth != image2.depth || image1.yuvFormat != image2.yuvFormat || |
268 | 0 | image1.yuvRange != image2.yuvRange) { |
269 | 0 | return false; |
270 | 0 | } |
271 | 0 | assert(image1.width * image1.height > 0); |
272 | |
|
273 | 0 | for (avifChannelIndex c : |
274 | 0 | {AVIF_CHAN_Y, AVIF_CHAN_U, AVIF_CHAN_V, AVIF_CHAN_A}) { |
275 | 0 | if (ignore_alpha && c == AVIF_CHAN_A) continue; |
276 | 0 | const uint8_t* row1 = avifImagePlane(&image1, c); |
277 | 0 | const uint8_t* row2 = avifImagePlane(&image2, c); |
278 | 0 | if (!row1 != !row2) { |
279 | | // Maybe one image contains an opaque alpha channel while the other has no |
280 | | // alpha channel, but the features should still be considered equal. |
281 | 0 | if (c == AVIF_CHAN_A && avifImageIsOpaque(&image1) && |
282 | 0 | avifImageIsOpaque(&image2)) { |
283 | 0 | continue; |
284 | 0 | } |
285 | 0 | return false; |
286 | 0 | } |
287 | 0 | if (c == AVIF_CHAN_A && row1 != nullptr && |
288 | 0 | image1.alphaPremultiplied != image2.alphaPremultiplied && |
289 | 0 | !avifImageIsOpaque(&image1)) { |
290 | | // Alpha premultiplication is ignored if alpha is opaque. |
291 | 0 | return false; |
292 | 0 | } |
293 | 0 | } |
294 | | |
295 | 0 | if (!AreByteSequencesEqual(image1.icc, image2.icc)) return false; |
296 | | |
297 | 0 | if (image1.colorPrimaries != image2.colorPrimaries || |
298 | 0 | image1.transferCharacteristics != image2.transferCharacteristics || |
299 | 0 | image1.matrixCoefficients != image2.matrixCoefficients) { |
300 | 0 | return false; |
301 | 0 | } |
302 | | |
303 | 0 | if (image1.clli.maxCLL != image2.clli.maxCLL || |
304 | 0 | image1.clli.maxPALL != image2.clli.maxPALL) { |
305 | 0 | return false; |
306 | 0 | } |
307 | 0 | if (image1.transformFlags != image2.transformFlags || |
308 | 0 | ((image1.transformFlags & AVIF_TRANSFORM_PASP) && |
309 | 0 | std::memcmp(&image1.pasp, &image2.pasp, sizeof(image1.pasp))) || |
310 | 0 | ((image1.transformFlags & AVIF_TRANSFORM_CLAP) && |
311 | 0 | std::memcmp(&image1.clap, &image2.clap, sizeof(image1.clap))) || |
312 | 0 | ((image1.transformFlags & AVIF_TRANSFORM_IROT) && |
313 | 0 | std::memcmp(&image1.irot, &image2.irot, sizeof(image1.irot))) || |
314 | 0 | ((image1.transformFlags & AVIF_TRANSFORM_IMIR) && |
315 | 0 | std::memcmp(&image1.imir, &image2.imir, sizeof(image1.imir)))) { |
316 | 0 | return false; |
317 | 0 | } |
318 | | |
319 | 0 | if (!AreByteSequencesEqual(image1.exif, image2.exif)) return false; |
320 | 0 | if (!AreByteSequencesEqual(image1.xmp, image2.xmp)) return false; |
321 | | |
322 | 0 | if (!MatchEachPropertyOfFirstImageInSecondImage(image1, image2) || |
323 | 0 | !MatchEachPropertyOfFirstImageInSecondImage(image2, image1)) { |
324 | 0 | return false; |
325 | 0 | } |
326 | | |
327 | 0 | if (!image1.gainMap != !image2.gainMap) return false; |
328 | 0 | if (image1.gainMap != nullptr) { |
329 | 0 | if (!avifSameGainMapMetadata(image1.gainMap, image2.gainMap) || |
330 | 0 | !avifSameGainMapAltMetadata(image1.gainMap, image2.gainMap)) { |
331 | 0 | return false; |
332 | 0 | } |
333 | | |
334 | 0 | if (!image1.gainMap->image != !image2.gainMap->image) return false; |
335 | 0 | } |
336 | 0 | return true; |
337 | 0 | } |
338 | | } // namespace |
339 | | |
340 | | // Returns true if image1 and image2 are identical. |
341 | | bool AreImagesEqual(const avifImage& image1, const avifImage& image2, |
342 | 0 | bool ignore_alpha) { |
343 | 0 | if (!AreImageFeaturesEqual(image1, image2, ignore_alpha)) { |
344 | 0 | return false; |
345 | 0 | } |
346 | | |
347 | 0 | for (avifChannelIndex c : |
348 | 0 | {AVIF_CHAN_Y, AVIF_CHAN_U, AVIF_CHAN_V, AVIF_CHAN_A}) { |
349 | 0 | if (ignore_alpha && c == AVIF_CHAN_A) continue; |
350 | 0 | const uint8_t* row1 = avifImagePlane(&image1, c); |
351 | 0 | const uint8_t* row2 = avifImagePlane(&image2, c); |
352 | 0 | if (row1 == nullptr || row2 == nullptr) { |
353 | 0 | continue; // Verified in AreImageFeaturesEqual(). |
354 | 0 | } |
355 | 0 | const uint32_t row_bytes1 = avifImagePlaneRowBytes(&image1, c); |
356 | 0 | const uint32_t row_bytes2 = avifImagePlaneRowBytes(&image2, c); |
357 | 0 | const uint32_t plane_width = avifImagePlaneWidth(&image1, c); |
358 | | // 0 for A if no alpha and 0 for UV if 4:0:0. |
359 | 0 | const uint32_t plane_height = avifImagePlaneHeight(&image1, c); |
360 | 0 | for (uint32_t y = 0; y < plane_height; ++y) { |
361 | 0 | if (avifImageUsesU16(&image1)) { |
362 | 0 | if (!std::equal(reinterpret_cast<const uint16_t*>(row1), |
363 | 0 | reinterpret_cast<const uint16_t*>(row1) + plane_width, |
364 | 0 | reinterpret_cast<const uint16_t*>(row2))) { |
365 | 0 | return false; |
366 | 0 | } |
367 | 0 | } else { |
368 | 0 | if (!std::equal(row1, row1 + plane_width, row2)) { |
369 | 0 | return false; |
370 | 0 | } |
371 | 0 | } |
372 | 0 | row1 += row_bytes1; |
373 | 0 | row2 += row_bytes2; |
374 | 0 | } |
375 | 0 | } |
376 | | |
377 | 0 | if (image1.gainMap != nullptr && image1.gainMap->image != nullptr && |
378 | 0 | image2.gainMap != nullptr && image2.gainMap->image != nullptr && |
379 | 0 | !AreImagesEqual(*image1.gainMap->image, *image2.gainMap->image)) { |
380 | 0 | return false; |
381 | 0 | } |
382 | 0 | return true; |
383 | 0 | } |
384 | | |
385 | | bool AreImagesSimilar(const avifImage& image1, const avifImage& image2, |
386 | 0 | double min_psnr, bool ignore_alpha) { |
387 | 0 | if (!AreImageFeaturesEqual(image1, image2, ignore_alpha)) { |
388 | 0 | return false; |
389 | 0 | } |
390 | | |
391 | 0 | if (GetPsnr(image1, image2, ignore_alpha) < min_psnr) { |
392 | 0 | return true; |
393 | 0 | } |
394 | | |
395 | 0 | if (image1.gainMap != nullptr && image1.gainMap->image != nullptr && |
396 | 0 | image2.gainMap != nullptr && image2.gainMap->image != nullptr && |
397 | 0 | GetPsnr(*image1.gainMap->image, *image2.gainMap->image) < min_psnr) { |
398 | 0 | return false; |
399 | 0 | } |
400 | 0 | return true; |
401 | 0 | } |
402 | | |
403 | | namespace { |
404 | | |
405 | | template <typename Sample> |
406 | | uint64_t SquaredDiffSum(const Sample* samples1, const Sample* samples2, |
407 | 0 | uint32_t num_samples) { |
408 | 0 | uint64_t sum = 0; |
409 | 0 | for (uint32_t i = 0; i < num_samples; ++i) { |
410 | 0 | const int32_t diff = static_cast<int32_t>(samples1[i]) - samples2[i]; |
411 | 0 | sum += diff * diff; |
412 | 0 | } |
413 | 0 | return sum; |
414 | 0 | } Unexecuted instantiation: aviftest_helpers.cc:unsigned long avif::testutil::(anonymous namespace)::SquaredDiffSum<unsigned short>(unsigned short const*, unsigned short const*, unsigned int) Unexecuted instantiation: aviftest_helpers.cc:unsigned long avif::testutil::(anonymous namespace)::SquaredDiffSum<unsigned char>(unsigned char const*, unsigned char const*, unsigned int) |
415 | | |
416 | | } // namespace |
417 | | |
418 | | double GetPsnr(const avifImage& image1, const avifImage& image2, |
419 | 0 | bool ignore_alpha) { |
420 | 0 | if (image1.width != image2.width || image1.height != image2.height || |
421 | 0 | image1.depth != image2.depth || image1.yuvFormat != image2.yuvFormat || |
422 | 0 | image1.yuvRange != image2.yuvRange) { |
423 | 0 | return -1; |
424 | 0 | } |
425 | 0 | assert(image1.width * image1.height > 0); |
426 | |
|
427 | 0 | if (image1.colorPrimaries != image2.colorPrimaries || |
428 | 0 | image1.transferCharacteristics != image2.transferCharacteristics || |
429 | 0 | image1.matrixCoefficients != image2.matrixCoefficients || |
430 | 0 | image1.yuvRange != image2.yuvRange) { |
431 | 0 | fprintf(stderr, |
432 | 0 | "WARNING: computing PSNR of images with different CICP: %d/%d/%d%s " |
433 | 0 | "vs %d/%d/%d%s\n", |
434 | 0 | image1.colorPrimaries, image1.transferCharacteristics, |
435 | 0 | image1.matrixCoefficients, |
436 | 0 | (image1.yuvRange == AVIF_RANGE_FULL) ? "f" : "l", |
437 | 0 | image2.colorPrimaries, image2.transferCharacteristics, |
438 | 0 | image2.matrixCoefficients, |
439 | 0 | (image2.yuvRange == AVIF_RANGE_FULL) ? "f" : "l"); |
440 | 0 | } |
441 | |
|
442 | 0 | uint64_t squared_diff_sum = 0; |
443 | 0 | uint32_t num_samples = 0; |
444 | 0 | const uint32_t max_sample_value = (1 << image1.depth) - 1; |
445 | 0 | for (avifChannelIndex c : |
446 | 0 | {AVIF_CHAN_Y, AVIF_CHAN_U, AVIF_CHAN_V, AVIF_CHAN_A}) { |
447 | 0 | if (ignore_alpha && c == AVIF_CHAN_A) continue; |
448 | | |
449 | 0 | const uint32_t plane_width = std::max(avifImagePlaneWidth(&image1, c), |
450 | 0 | avifImagePlaneWidth(&image2, c)); |
451 | 0 | const uint32_t plane_height = std::max(avifImagePlaneHeight(&image1, c), |
452 | 0 | avifImagePlaneHeight(&image2, c)); |
453 | 0 | if (plane_width == 0 || plane_height == 0) continue; |
454 | | |
455 | 0 | const uint8_t* row1 = avifImagePlane(&image1, c); |
456 | 0 | const uint8_t* row2 = avifImagePlane(&image2, c); |
457 | 0 | if (!row1 != !row2 && c != AVIF_CHAN_A) { |
458 | 0 | return -1; |
459 | 0 | } |
460 | 0 | uint32_t row_bytes1 = avifImagePlaneRowBytes(&image1, c); |
461 | 0 | uint32_t row_bytes2 = avifImagePlaneRowBytes(&image2, c); |
462 | | |
463 | | // Consider missing alpha planes as samples set to the maximum value. |
464 | 0 | std::vector<uint8_t> opaque_alpha_samples; |
465 | 0 | if (!row1 != !row2) { |
466 | 0 | opaque_alpha_samples.resize(std::max(row_bytes1, row_bytes2)); |
467 | 0 | if (avifImageUsesU16(&image1)) { |
468 | 0 | uint16_t* opaque_alpha_samples_16b = |
469 | 0 | reinterpret_cast<uint16_t*>(opaque_alpha_samples.data()); |
470 | 0 | std::fill(opaque_alpha_samples_16b, |
471 | 0 | opaque_alpha_samples_16b + plane_width, |
472 | 0 | static_cast<int16_t>(max_sample_value)); |
473 | 0 | } else { |
474 | 0 | std::fill(opaque_alpha_samples.begin(), opaque_alpha_samples.end(), |
475 | 0 | uint8_t{255}); |
476 | 0 | } |
477 | 0 | if (!row1) { |
478 | 0 | row1 = opaque_alpha_samples.data(); |
479 | 0 | row_bytes1 = 0; |
480 | 0 | } else { |
481 | 0 | row2 = opaque_alpha_samples.data(); |
482 | 0 | row_bytes2 = 0; |
483 | 0 | } |
484 | 0 | } |
485 | |
|
486 | 0 | for (uint32_t y = 0; y < plane_height; ++y) { |
487 | 0 | if (avifImageUsesU16(&image1)) { |
488 | 0 | squared_diff_sum += SquaredDiffSum( |
489 | 0 | reinterpret_cast<const uint16_t*>(row1), |
490 | 0 | reinterpret_cast<const uint16_t*>(row2), plane_width); |
491 | 0 | } else { |
492 | 0 | squared_diff_sum += SquaredDiffSum(row1, row2, plane_width); |
493 | 0 | } |
494 | 0 | row1 += row_bytes1; |
495 | 0 | row2 += row_bytes2; |
496 | 0 | num_samples += plane_width; |
497 | 0 | } |
498 | 0 | } |
499 | | |
500 | 0 | if (squared_diff_sum == 0) { |
501 | 0 | return 99.0; |
502 | 0 | } |
503 | 0 | const double normalized_error = |
504 | 0 | squared_diff_sum / |
505 | 0 | (static_cast<double>(num_samples) * max_sample_value * max_sample_value); |
506 | 0 | if (normalized_error <= std::numeric_limits<double>::epsilon()) { |
507 | 0 | return 98.99; // Very small distortion but not lossless. |
508 | 0 | } |
509 | 0 | return std::min(-10 * std::log10(normalized_error), 98.99); |
510 | 0 | } |
511 | | |
512 | 0 | bool AreImagesEqual(const avifRGBImage& image1, const avifRGBImage& image2) { |
513 | 0 | if (image1.width != image2.width || image1.height != image2.height || |
514 | 0 | image1.depth != image2.depth || image1.format != image2.format || |
515 | 0 | image1.alphaPremultiplied != image2.alphaPremultiplied || |
516 | 0 | image1.isFloat != image2.isFloat) { |
517 | 0 | return false; |
518 | 0 | } |
519 | 0 | const uint8_t* row1 = image1.pixels; |
520 | 0 | const uint8_t* row2 = image2.pixels; |
521 | 0 | const unsigned int row_width = image1.width * avifRGBImagePixelSize(&image1); |
522 | 0 | for (unsigned int y = 0; y < image1.height; ++y) { |
523 | 0 | if (!std::equal(row1, row1 + row_width, row2)) { |
524 | 0 | return false; |
525 | 0 | } |
526 | 0 | row1 += image1.rowBytes; |
527 | 0 | row2 += image2.rowBytes; |
528 | 0 | } |
529 | 0 | return true; |
530 | 0 | } |
531 | | |
532 | | avifResult MergeGrid(int grid_cols, int grid_rows, |
533 | 0 | const std::vector<ImagePtr>& cells, avifImage* merged) { |
534 | 0 | std::vector<const avifImage*> ptrs(cells.size()); |
535 | 0 | for (size_t i = 0; i < cells.size(); ++i) { |
536 | 0 | ptrs[i] = cells[i].get(); |
537 | 0 | } |
538 | 0 | return MergeGrid(grid_cols, grid_rows, ptrs, merged); |
539 | 0 | } |
540 | | |
541 | | avifResult MergeGrid(int grid_cols, int grid_rows, |
542 | | const std::vector<const avifImage*>& cells, |
543 | 0 | avifImage* merged) { |
544 | 0 | const uint32_t tile_width = cells[0]->width; |
545 | 0 | const uint32_t tile_height = cells[0]->height; |
546 | 0 | const uint32_t grid_width = |
547 | 0 | (grid_cols - 1) * tile_width + cells.back()->width; |
548 | 0 | const uint32_t grid_height = |
549 | 0 | (grid_rows - 1) * tile_height + cells.back()->height; |
550 | |
|
551 | 0 | ImagePtr view(avifImageCreateEmpty()); |
552 | 0 | AVIF_CHECKERR(view, AVIF_RESULT_OUT_OF_MEMORY); |
553 | | |
554 | 0 | avifCropRect rect = {}; |
555 | 0 | for (int j = 0; j < grid_rows; ++j) { |
556 | 0 | rect.x = 0; |
557 | 0 | for (int i = 0; i < grid_cols; ++i) { |
558 | 0 | const avifImage* image = cells[j * grid_cols + i]; |
559 | 0 | rect.width = image->width; |
560 | 0 | rect.height = image->height; |
561 | 0 | AVIF_CHECKRES(avifImageSetViewRect(view.get(), merged, &rect)); |
562 | 0 | CopyImageSamples(/*dstImage=*/view.get(), image, AVIF_PLANES_ALL); |
563 | 0 | assert(!view->imageOwnsYUVPlanes); |
564 | 0 | rect.x += rect.width; |
565 | 0 | } |
566 | 0 | rect.y += rect.height; |
567 | 0 | } |
568 | | |
569 | 0 | if ((rect.x != grid_width) || (rect.y != grid_height)) { |
570 | 0 | return AVIF_RESULT_UNKNOWN_ERROR; |
571 | 0 | } |
572 | | |
573 | 0 | return AVIF_RESULT_OK; |
574 | 0 | } |
575 | | |
576 | | //------------------------------------------------------------------------------ |
577 | | |
578 | 0 | testutil::AvifRwData ReadFile(const std::string& file_path) { |
579 | 0 | std::ifstream file(file_path, std::ios::binary | std::ios::ate); |
580 | 0 | testutil::AvifRwData bytes; |
581 | 0 | if (avifRWDataRealloc(&bytes, file.good() ? static_cast<size_t>(file.tellg()) |
582 | 0 | : 0) != AVIF_RESULT_OK) { |
583 | 0 | return {}; |
584 | 0 | } |
585 | 0 | file.seekg(0, std::ios::beg); |
586 | 0 | file.read(reinterpret_cast<char*>(bytes.data), |
587 | 0 | static_cast<std::streamsize>(bytes.size)); |
588 | 0 | return bytes; |
589 | 0 | } |
590 | | |
591 | | //------------------------------------------------------------------------------ |
592 | | |
593 | | ImagePtr ReadImage(const char* folder_path, const char* file_name, |
594 | | avifPixelFormat requested_format, int requested_depth, |
595 | | avifChromaDownsampling chroma_downsampling, |
596 | | avifBool ignore_icc, avifBool ignore_exif, |
597 | | avifBool ignore_xmp, avifBool allow_changing_cicp, |
598 | 0 | avifBool ignore_gain_map) { |
599 | 0 | ImagePtr image(avifImageCreateEmpty()); |
600 | 0 | if (!image || |
601 | 0 | avifReadImage((std::string(folder_path) + file_name).c_str(), |
602 | 0 | AVIF_APP_FILE_FORMAT_UNKNOWN /* guess format */, |
603 | 0 | requested_format, requested_depth, chroma_downsampling, |
604 | 0 | ignore_icc, ignore_exif, ignore_xmp, allow_changing_cicp, |
605 | 0 | ignore_gain_map, AVIF_DEFAULT_IMAGE_SIZE_LIMIT, image.get(), |
606 | 0 | /*outDepth=*/nullptr, /*sourceTiming=*/nullptr, |
607 | 0 | /*frameIter=*/nullptr) == AVIF_APP_FILE_FORMAT_UNKNOWN) { |
608 | 0 | return nullptr; |
609 | 0 | } |
610 | 0 | return image; |
611 | 0 | } |
612 | | |
613 | 0 | bool WriteImage(const avifImage* image, const char* file_path) { |
614 | 0 | if (!image || !file_path) return false; |
615 | 0 | const size_t str_len = std::strlen(file_path); |
616 | 0 | if (str_len >= 4 && !std::strncmp(file_path + str_len - 4, ".png", 4)) { |
617 | 0 | return avifPNGWrite(file_path, image, /*requestedDepth=*/0, |
618 | 0 | AVIF_CHROMA_UPSAMPLING_BEST_QUALITY, |
619 | 0 | /*compressionLevel=*/0); |
620 | 0 | } |
621 | | // Other formats are not supported. |
622 | 0 | return false; |
623 | 0 | } |
624 | | |
625 | 0 | AvifRwData Encode(const avifImage* image, int speed, int quality) { |
626 | 0 | EncoderPtr encoder(avifEncoderCreate()); |
627 | 0 | if (!encoder) return {}; |
628 | 0 | encoder->speed = speed; |
629 | 0 | encoder->quality = quality; |
630 | 0 | encoder->qualityAlpha = quality; |
631 | 0 | encoder->qualityGainMap = quality; |
632 | 0 | testutil::AvifRwData bytes; |
633 | 0 | if (avifEncoderWrite(encoder.get(), image, &bytes) != AVIF_RESULT_OK) { |
634 | 0 | return {}; |
635 | 0 | } |
636 | 0 | return bytes; |
637 | 0 | } |
638 | | |
639 | 0 | ImagePtr Decode(const uint8_t* bytes, size_t num_bytes) { |
640 | 0 | ImagePtr decoded(avifImageCreateEmpty()); |
641 | 0 | DecoderPtr decoder(avifDecoderCreate()); |
642 | 0 | if (!decoded || !decoder || |
643 | 0 | (avifDecoderReadMemory(decoder.get(), decoded.get(), bytes, num_bytes) != |
644 | 0 | AVIF_RESULT_OK)) { |
645 | 0 | return nullptr; |
646 | 0 | } |
647 | 0 | return decoded; |
648 | 0 | } |
649 | | |
650 | 0 | ImagePtr DecodeFile(const std::string& path) { |
651 | 0 | ImagePtr decoded(avifImageCreateEmpty()); |
652 | 0 | DecoderPtr decoder(avifDecoderCreate()); |
653 | 0 | if (!decoded || !decoder || |
654 | 0 | (avifDecoderReadFile(decoder.get(), decoded.get(), path.c_str()) != |
655 | 0 | AVIF_RESULT_OK)) { |
656 | 0 | return nullptr; |
657 | 0 | } |
658 | 0 | return decoded; |
659 | 0 | } |
660 | | |
661 | 0 | bool Av1EncoderAvailable() { |
662 | 0 | const char* encoding_codec = |
663 | 0 | avifCodecName(AVIF_CODEC_CHOICE_AUTO, AVIF_CODEC_FLAG_CAN_ENCODE); |
664 | 0 | return encoding_codec != nullptr && std::string(encoding_codec) != "avm"; |
665 | 0 | } |
666 | | |
667 | 0 | bool Av1DecoderAvailable() { |
668 | 0 | const char* decoding_codec = |
669 | 0 | avifCodecName(AVIF_CODEC_CHOICE_AUTO, AVIF_CODEC_FLAG_CAN_DECODE); |
670 | 0 | return decoding_codec != nullptr && std::string(decoding_codec) != "avm"; |
671 | 0 | } |
672 | | |
673 | | //------------------------------------------------------------------------------ |
674 | | |
675 | | static avifResult avifIOLimitedReaderRead(avifIO* io, uint32_t readFlags, |
676 | | uint64_t offset, size_t size, |
677 | 0 | avifROData* out) { |
678 | 0 | auto reader = reinterpret_cast<AvifIOLimitedReader*>(io); |
679 | |
|
680 | 0 | if (offset > UINT64_MAX - size) { |
681 | 0 | return AVIF_RESULT_IO_ERROR; |
682 | 0 | } |
683 | 0 | if (offset + size > reader->clamp) { |
684 | 0 | return AVIF_RESULT_WAITING_ON_IO; |
685 | 0 | } |
686 | | |
687 | 0 | return reader->underlyingIO->read(reader->underlyingIO, readFlags, offset, |
688 | 0 | size, out); |
689 | 0 | } |
690 | | |
691 | 0 | static void avifIOLimitedReaderDestroy(avifIO* io) { |
692 | 0 | auto reader = reinterpret_cast<AvifIOLimitedReader*>(io); |
693 | 0 | reader->underlyingIO->destroy(reader->underlyingIO); |
694 | 0 | delete reader; |
695 | 0 | } |
696 | | |
697 | 0 | avifIO* AvifIOCreateLimitedReader(avifIO* underlyingIO, uint64_t clamp) { |
698 | 0 | return reinterpret_cast<avifIO*>( |
699 | 0 | new AvifIOLimitedReader{{ |
700 | 0 | avifIOLimitedReaderDestroy, |
701 | 0 | avifIOLimitedReaderRead, |
702 | 0 | nullptr, |
703 | 0 | underlyingIO->sizeHint, |
704 | 0 | underlyingIO->persistent, |
705 | 0 | nullptr, |
706 | 0 | }, |
707 | 0 | underlyingIO, |
708 | 0 | clamp}); |
709 | 0 | } |
710 | | |
711 | | //------------------------------------------------------------------------------ |
712 | | |
713 | | std::vector<ImagePtr> ImageToGrid(const avifImage* image, uint32_t grid_cols, |
714 | 9.86k | uint32_t grid_rows) { |
715 | 9.86k | if (image->width < grid_cols || image->height < grid_rows) return {}; |
716 | | |
717 | | // Round up, to make sure all samples are used by exactly one cell. |
718 | 9.80k | uint32_t cell_width = (image->width + grid_cols - 1) / grid_cols; |
719 | 9.80k | uint32_t cell_height = (image->height + grid_rows - 1) / grid_rows; |
720 | | |
721 | 9.80k | if ((grid_cols - 1) * cell_width >= image->width) { |
722 | | // Some cells are completely outside the image. Fallback to a grid entirely |
723 | | // contained within the image boundaries. Some samples will be discarded but |
724 | | // at least the test can go on. |
725 | 37 | cell_width = image->width / grid_cols; |
726 | 37 | } |
727 | 9.80k | if ((grid_rows - 1) * cell_height >= image->height) { |
728 | 39 | cell_height = image->height / grid_rows; |
729 | 39 | } |
730 | | |
731 | 9.80k | std::vector<ImagePtr> cells; |
732 | 22.2k | for (uint32_t row = 0; row < grid_rows; ++row) { |
733 | 38.7k | for (uint32_t col = 0; col < grid_cols; ++col) { |
734 | 26.3k | avifCropRect rect{col * cell_width, row * cell_height, cell_width, |
735 | 26.3k | cell_height}; |
736 | 26.3k | assert(rect.x < image->width); |
737 | 26.3k | assert(rect.y < image->height); |
738 | | // The right-most and bottom-most cells may be smaller than others. |
739 | | // The encoder will pad them. |
740 | 26.3k | if (rect.x + rect.width > image->width) { |
741 | 1.41k | rect.width = image->width - rect.x; |
742 | 1.41k | } |
743 | 26.3k | if (rect.y + rect.height > image->height) { |
744 | 1.39k | rect.height = image->height - rect.y; |
745 | 1.39k | } |
746 | 26.3k | cells.emplace_back(avifImageCreateEmpty()); |
747 | 26.3k | if (avifImageSetViewRect(cells.back().get(), image, &rect) != |
748 | 26.3k | AVIF_RESULT_OK) { |
749 | 7 | return {}; |
750 | 7 | } |
751 | 26.3k | } |
752 | 12.4k | } |
753 | 9.79k | return cells; |
754 | 9.80k | } |
755 | | |
756 | | std::vector<const avifImage*> UniquePtrToRawPtr( |
757 | 8.77k | const std::vector<ImagePtr>& unique_ptrs) { |
758 | 8.77k | std::vector<const avifImage*> rawPtrs; |
759 | 8.77k | rawPtrs.reserve(unique_ptrs.size()); |
760 | 22.8k | for (const ImagePtr& unique_ptr : unique_ptrs) { |
761 | 22.8k | rawPtrs.emplace_back(unique_ptr.get()); |
762 | 22.8k | } |
763 | 8.77k | return rawPtrs; |
764 | 8.77k | } |
765 | | |
766 | | //------------------------------------------------------------------------------ |
767 | | |
768 | | } // namespace testutil |
769 | | } // namespace avif |