/src/mozilla-central/media/libyuv/libyuv/source/convert_jpeg.cc
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright 2011 The LibYuv Project Authors. All rights reserved. |
3 | | * |
4 | | * Use of this source code is governed by a BSD-style license |
5 | | * that can be found in the LICENSE file in the root of the source |
6 | | * tree. An additional intellectual property rights grant can be found |
7 | | * in the file PATENTS. All contributing project authors may |
8 | | * be found in the AUTHORS file in the root of the source tree. |
9 | | */ |
10 | | |
11 | | #include "libyuv/convert.h" |
12 | | #include "libyuv/convert_argb.h" |
13 | | |
14 | | #ifdef HAVE_JPEG |
15 | | #include "libyuv/mjpeg_decoder.h" |
16 | | #endif |
17 | | |
18 | | #ifdef __cplusplus |
19 | | namespace libyuv { |
20 | | extern "C" { |
21 | | #endif |
22 | | |
23 | | #ifdef HAVE_JPEG |
24 | | struct I420Buffers { |
25 | | uint8_t* y; |
26 | | int y_stride; |
27 | | uint8_t* u; |
28 | | int u_stride; |
29 | | uint8_t* v; |
30 | | int v_stride; |
31 | | int w; |
32 | | int h; |
33 | | }; |
34 | | |
35 | | static void JpegCopyI420(void* opaque, |
36 | | const uint8_t* const* data, |
37 | | const int* strides, |
38 | 0 | int rows) { |
39 | 0 | I420Buffers* dest = (I420Buffers*)(opaque); |
40 | 0 | I420Copy(data[0], strides[0], data[1], strides[1], data[2], strides[2], |
41 | 0 | dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v, |
42 | 0 | dest->v_stride, dest->w, rows); |
43 | 0 | dest->y += rows * dest->y_stride; |
44 | 0 | dest->u += ((rows + 1) >> 1) * dest->u_stride; |
45 | 0 | dest->v += ((rows + 1) >> 1) * dest->v_stride; |
46 | 0 | dest->h -= rows; |
47 | 0 | } |
48 | | |
49 | | static void JpegI422ToI420(void* opaque, |
50 | | const uint8_t* const* data, |
51 | | const int* strides, |
52 | 0 | int rows) { |
53 | 0 | I420Buffers* dest = (I420Buffers*)(opaque); |
54 | 0 | I422ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2], |
55 | 0 | dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v, |
56 | 0 | dest->v_stride, dest->w, rows); |
57 | 0 | dest->y += rows * dest->y_stride; |
58 | 0 | dest->u += ((rows + 1) >> 1) * dest->u_stride; |
59 | 0 | dest->v += ((rows + 1) >> 1) * dest->v_stride; |
60 | 0 | dest->h -= rows; |
61 | 0 | } |
62 | | |
63 | | static void JpegI444ToI420(void* opaque, |
64 | | const uint8_t* const* data, |
65 | | const int* strides, |
66 | 0 | int rows) { |
67 | 0 | I420Buffers* dest = (I420Buffers*)(opaque); |
68 | 0 | I444ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2], |
69 | 0 | dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v, |
70 | 0 | dest->v_stride, dest->w, rows); |
71 | 0 | dest->y += rows * dest->y_stride; |
72 | 0 | dest->u += ((rows + 1) >> 1) * dest->u_stride; |
73 | 0 | dest->v += ((rows + 1) >> 1) * dest->v_stride; |
74 | 0 | dest->h -= rows; |
75 | 0 | } |
76 | | |
77 | | static void JpegI400ToI420(void* opaque, |
78 | | const uint8_t* const* data, |
79 | | const int* strides, |
80 | 0 | int rows) { |
81 | 0 | I420Buffers* dest = (I420Buffers*)(opaque); |
82 | 0 | I400ToI420(data[0], strides[0], dest->y, dest->y_stride, dest->u, |
83 | 0 | dest->u_stride, dest->v, dest->v_stride, dest->w, rows); |
84 | 0 | dest->y += rows * dest->y_stride; |
85 | 0 | dest->u += ((rows + 1) >> 1) * dest->u_stride; |
86 | 0 | dest->v += ((rows + 1) >> 1) * dest->v_stride; |
87 | 0 | dest->h -= rows; |
88 | 0 | } |
89 | | |
90 | | // Query size of MJPG in pixels. |
91 | | LIBYUV_API |
92 | | int MJPGSize(const uint8_t* sample, |
93 | | size_t sample_size, |
94 | | int* width, |
95 | 0 | int* height) { |
96 | 0 | MJpegDecoder mjpeg_decoder; |
97 | 0 | LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size); |
98 | 0 | if (ret) { |
99 | 0 | *width = mjpeg_decoder.GetWidth(); |
100 | 0 | *height = mjpeg_decoder.GetHeight(); |
101 | 0 | } |
102 | 0 | mjpeg_decoder.UnloadFrame(); |
103 | 0 | return ret ? 0 : -1; // -1 for runtime failure. |
104 | 0 | } |
105 | | |
106 | | // MJPG (Motion JPeg) to I420 |
107 | | // TODO(fbarchard): review src_width and src_height requirement. dst_width and |
108 | | // dst_height may be enough. |
109 | | LIBYUV_API |
110 | | int MJPGToI420(const uint8_t* sample, |
111 | | size_t sample_size, |
112 | | uint8_t* dst_y, |
113 | | int dst_stride_y, |
114 | | uint8_t* dst_u, |
115 | | int dst_stride_u, |
116 | | uint8_t* dst_v, |
117 | | int dst_stride_v, |
118 | | int src_width, |
119 | | int src_height, |
120 | | int dst_width, |
121 | 0 | int dst_height) { |
122 | 0 | if (sample_size == kUnknownDataSize) { |
123 | 0 | // ERROR: MJPEG frame size unknown |
124 | 0 | return -1; |
125 | 0 | } |
126 | 0 | |
127 | 0 | // TODO(fbarchard): Port MJpeg to C. |
128 | 0 | MJpegDecoder mjpeg_decoder; |
129 | 0 | LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size); |
130 | 0 | if (ret && (mjpeg_decoder.GetWidth() != src_width || |
131 | 0 | mjpeg_decoder.GetHeight() != src_height)) { |
132 | 0 | // ERROR: MJPEG frame has unexpected dimensions |
133 | 0 | mjpeg_decoder.UnloadFrame(); |
134 | 0 | return 1; // runtime failure |
135 | 0 | } |
136 | 0 | if (ret) { |
137 | 0 | I420Buffers bufs = {dst_y, dst_stride_y, dst_u, dst_stride_u, |
138 | 0 | dst_v, dst_stride_v, dst_width, dst_height}; |
139 | 0 | // YUV420 |
140 | 0 | if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr && |
141 | 0 | mjpeg_decoder.GetNumComponents() == 3 && |
142 | 0 | mjpeg_decoder.GetVertSampFactor(0) == 2 && |
143 | 0 | mjpeg_decoder.GetHorizSampFactor(0) == 2 && |
144 | 0 | mjpeg_decoder.GetVertSampFactor(1) == 1 && |
145 | 0 | mjpeg_decoder.GetHorizSampFactor(1) == 1 && |
146 | 0 | mjpeg_decoder.GetVertSampFactor(2) == 1 && |
147 | 0 | mjpeg_decoder.GetHorizSampFactor(2) == 1) { |
148 | 0 | ret = mjpeg_decoder.DecodeToCallback(&JpegCopyI420, &bufs, dst_width, |
149 | 0 | dst_height); |
150 | 0 | // YUV422 |
151 | 0 | } else if (mjpeg_decoder.GetColorSpace() == |
152 | 0 | MJpegDecoder::kColorSpaceYCbCr && |
153 | 0 | mjpeg_decoder.GetNumComponents() == 3 && |
154 | 0 | mjpeg_decoder.GetVertSampFactor(0) == 1 && |
155 | 0 | mjpeg_decoder.GetHorizSampFactor(0) == 2 && |
156 | 0 | mjpeg_decoder.GetVertSampFactor(1) == 1 && |
157 | 0 | mjpeg_decoder.GetHorizSampFactor(1) == 1 && |
158 | 0 | mjpeg_decoder.GetVertSampFactor(2) == 1 && |
159 | 0 | mjpeg_decoder.GetHorizSampFactor(2) == 1) { |
160 | 0 | ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToI420, &bufs, dst_width, |
161 | 0 | dst_height); |
162 | 0 | // YUV444 |
163 | 0 | } else if (mjpeg_decoder.GetColorSpace() == |
164 | 0 | MJpegDecoder::kColorSpaceYCbCr && |
165 | 0 | mjpeg_decoder.GetNumComponents() == 3 && |
166 | 0 | mjpeg_decoder.GetVertSampFactor(0) == 1 && |
167 | 0 | mjpeg_decoder.GetHorizSampFactor(0) == 1 && |
168 | 0 | mjpeg_decoder.GetVertSampFactor(1) == 1 && |
169 | 0 | mjpeg_decoder.GetHorizSampFactor(1) == 1 && |
170 | 0 | mjpeg_decoder.GetVertSampFactor(2) == 1 && |
171 | 0 | mjpeg_decoder.GetHorizSampFactor(2) == 1) { |
172 | 0 | ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToI420, &bufs, dst_width, |
173 | 0 | dst_height); |
174 | 0 | // YUV400 |
175 | 0 | } else if (mjpeg_decoder.GetColorSpace() == |
176 | 0 | MJpegDecoder::kColorSpaceGrayscale && |
177 | 0 | mjpeg_decoder.GetNumComponents() == 1 && |
178 | 0 | mjpeg_decoder.GetVertSampFactor(0) == 1 && |
179 | 0 | mjpeg_decoder.GetHorizSampFactor(0) == 1) { |
180 | 0 | ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToI420, &bufs, dst_width, |
181 | 0 | dst_height); |
182 | 0 | } else { |
183 | 0 | // TODO(fbarchard): Implement conversion for any other colorspace/sample |
184 | 0 | // factors that occur in practice. |
185 | 0 | // ERROR: Unable to convert MJPEG frame because format is not supported |
186 | 0 | mjpeg_decoder.UnloadFrame(); |
187 | 0 | return 1; |
188 | 0 | } |
189 | 0 | } |
190 | 0 | return ret ? 0 : 1; |
191 | 0 | } |
192 | | |
193 | | #ifdef HAVE_JPEG |
194 | | struct ARGBBuffers { |
195 | | uint8_t* argb; |
196 | | int argb_stride; |
197 | | int w; |
198 | | int h; |
199 | | }; |
200 | | |
201 | | static void JpegI420ToARGB(void* opaque, |
202 | | const uint8_t* const* data, |
203 | | const int* strides, |
204 | 0 | int rows) { |
205 | 0 | ARGBBuffers* dest = (ARGBBuffers*)(opaque); |
206 | 0 | I420ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2], |
207 | 0 | dest->argb, dest->argb_stride, dest->w, rows); |
208 | 0 | dest->argb += rows * dest->argb_stride; |
209 | 0 | dest->h -= rows; |
210 | 0 | } |
211 | | |
212 | | static void JpegI422ToARGB(void* opaque, |
213 | | const uint8_t* const* data, |
214 | | const int* strides, |
215 | 0 | int rows) { |
216 | 0 | ARGBBuffers* dest = (ARGBBuffers*)(opaque); |
217 | 0 | I422ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2], |
218 | 0 | dest->argb, dest->argb_stride, dest->w, rows); |
219 | 0 | dest->argb += rows * dest->argb_stride; |
220 | 0 | dest->h -= rows; |
221 | 0 | } |
222 | | |
223 | | static void JpegI444ToARGB(void* opaque, |
224 | | const uint8_t* const* data, |
225 | | const int* strides, |
226 | 0 | int rows) { |
227 | 0 | ARGBBuffers* dest = (ARGBBuffers*)(opaque); |
228 | 0 | I444ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2], |
229 | 0 | dest->argb, dest->argb_stride, dest->w, rows); |
230 | 0 | dest->argb += rows * dest->argb_stride; |
231 | 0 | dest->h -= rows; |
232 | 0 | } |
233 | | |
234 | | static void JpegI400ToARGB(void* opaque, |
235 | | const uint8_t* const* data, |
236 | | const int* strides, |
237 | 0 | int rows) { |
238 | 0 | ARGBBuffers* dest = (ARGBBuffers*)(opaque); |
239 | 0 | I400ToARGB(data[0], strides[0], dest->argb, dest->argb_stride, dest->w, rows); |
240 | 0 | dest->argb += rows * dest->argb_stride; |
241 | 0 | dest->h -= rows; |
242 | 0 | } |
243 | | |
244 | | // MJPG (Motion JPeg) to ARGB |
245 | | // TODO(fbarchard): review src_width and src_height requirement. dst_width and |
246 | | // dst_height may be enough. |
247 | | LIBYUV_API |
248 | | int MJPGToARGB(const uint8_t* sample, |
249 | | size_t sample_size, |
250 | | uint8_t* dst_argb, |
251 | | int dst_stride_argb, |
252 | | int src_width, |
253 | | int src_height, |
254 | | int dst_width, |
255 | 0 | int dst_height) { |
256 | 0 | if (sample_size == kUnknownDataSize) { |
257 | 0 | // ERROR: MJPEG frame size unknown |
258 | 0 | return -1; |
259 | 0 | } |
260 | 0 | |
261 | 0 | // TODO(fbarchard): Port MJpeg to C. |
262 | 0 | MJpegDecoder mjpeg_decoder; |
263 | 0 | LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size); |
264 | 0 | if (ret && (mjpeg_decoder.GetWidth() != src_width || |
265 | 0 | mjpeg_decoder.GetHeight() != src_height)) { |
266 | 0 | // ERROR: MJPEG frame has unexpected dimensions |
267 | 0 | mjpeg_decoder.UnloadFrame(); |
268 | 0 | return 1; // runtime failure |
269 | 0 | } |
270 | 0 | if (ret) { |
271 | 0 | ARGBBuffers bufs = {dst_argb, dst_stride_argb, dst_width, dst_height}; |
272 | 0 | // YUV420 |
273 | 0 | if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr && |
274 | 0 | mjpeg_decoder.GetNumComponents() == 3 && |
275 | 0 | mjpeg_decoder.GetVertSampFactor(0) == 2 && |
276 | 0 | mjpeg_decoder.GetHorizSampFactor(0) == 2 && |
277 | 0 | mjpeg_decoder.GetVertSampFactor(1) == 1 && |
278 | 0 | mjpeg_decoder.GetHorizSampFactor(1) == 1 && |
279 | 0 | mjpeg_decoder.GetVertSampFactor(2) == 1 && |
280 | 0 | mjpeg_decoder.GetHorizSampFactor(2) == 1) { |
281 | 0 | ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToARGB, &bufs, dst_width, |
282 | 0 | dst_height); |
283 | 0 | // YUV422 |
284 | 0 | } else if (mjpeg_decoder.GetColorSpace() == |
285 | 0 | MJpegDecoder::kColorSpaceYCbCr && |
286 | 0 | mjpeg_decoder.GetNumComponents() == 3 && |
287 | 0 | mjpeg_decoder.GetVertSampFactor(0) == 1 && |
288 | 0 | mjpeg_decoder.GetHorizSampFactor(0) == 2 && |
289 | 0 | mjpeg_decoder.GetVertSampFactor(1) == 1 && |
290 | 0 | mjpeg_decoder.GetHorizSampFactor(1) == 1 && |
291 | 0 | mjpeg_decoder.GetVertSampFactor(2) == 1 && |
292 | 0 | mjpeg_decoder.GetHorizSampFactor(2) == 1) { |
293 | 0 | ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToARGB, &bufs, dst_width, |
294 | 0 | dst_height); |
295 | 0 | // YUV444 |
296 | 0 | } else if (mjpeg_decoder.GetColorSpace() == |
297 | 0 | MJpegDecoder::kColorSpaceYCbCr && |
298 | 0 | mjpeg_decoder.GetNumComponents() == 3 && |
299 | 0 | mjpeg_decoder.GetVertSampFactor(0) == 1 && |
300 | 0 | mjpeg_decoder.GetHorizSampFactor(0) == 1 && |
301 | 0 | mjpeg_decoder.GetVertSampFactor(1) == 1 && |
302 | 0 | mjpeg_decoder.GetHorizSampFactor(1) == 1 && |
303 | 0 | mjpeg_decoder.GetVertSampFactor(2) == 1 && |
304 | 0 | mjpeg_decoder.GetHorizSampFactor(2) == 1) { |
305 | 0 | ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToARGB, &bufs, dst_width, |
306 | 0 | dst_height); |
307 | 0 | // YUV400 |
308 | 0 | } else if (mjpeg_decoder.GetColorSpace() == |
309 | 0 | MJpegDecoder::kColorSpaceGrayscale && |
310 | 0 | mjpeg_decoder.GetNumComponents() == 1 && |
311 | 0 | mjpeg_decoder.GetVertSampFactor(0) == 1 && |
312 | 0 | mjpeg_decoder.GetHorizSampFactor(0) == 1) { |
313 | 0 | ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToARGB, &bufs, dst_width, |
314 | 0 | dst_height); |
315 | 0 | } else { |
316 | 0 | // TODO(fbarchard): Implement conversion for any other colorspace/sample |
317 | 0 | // factors that occur in practice. |
318 | 0 | // ERROR: Unable to convert MJPEG frame because format is not supported |
319 | 0 | mjpeg_decoder.UnloadFrame(); |
320 | 0 | return 1; |
321 | 0 | } |
322 | 0 | } |
323 | 0 | return ret ? 0 : 1; |
324 | 0 | } |
325 | | #endif |
326 | | |
327 | | #endif |
328 | | |
329 | | #ifdef __cplusplus |
330 | | } // extern "C" |
331 | | } // namespace libyuv |
332 | | #endif |