/src/ffmpeg/libavcodec/v4l2_context.c
Line | Count | Source |
1 | | /* |
2 | | * V4L2 context helper functions. |
3 | | * |
4 | | * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org> |
5 | | * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org> |
6 | | * |
7 | | * This file is part of FFmpeg. |
8 | | * |
9 | | * FFmpeg is free software; you can redistribute it and/or |
10 | | * modify it under the terms of the GNU Lesser General Public |
11 | | * License as published by the Free Software Foundation; either |
12 | | * version 2.1 of the License, or (at your option) any later version. |
13 | | * |
14 | | * FFmpeg is distributed in the hope that it will be useful, |
15 | | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
16 | | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
17 | | * Lesser General Public License for more details. |
18 | | * |
19 | | * You should have received a copy of the GNU Lesser General Public |
20 | | * License along with FFmpeg; if not, write to the Free Software |
21 | | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
22 | | */ |
23 | | |
24 | | #include <linux/videodev2.h> |
25 | | #include <sys/ioctl.h> |
26 | | #include <sys/mman.h> |
27 | | #include <unistd.h> |
28 | | #include <fcntl.h> |
29 | | #include <poll.h> |
30 | | #include "libavutil/mem.h" |
31 | | #include "libavcodec/avcodec.h" |
32 | | #include "decode.h" |
33 | | #include "v4l2_buffers.h" |
34 | | #include "v4l2_fmt.h" |
35 | | #include "v4l2_m2m.h" |
36 | | |
37 | | struct v4l2_format_update { |
38 | | uint32_t v4l2_fmt; |
39 | | int update_v4l2; |
40 | | |
41 | | enum AVPixelFormat av_fmt; |
42 | | int update_avfmt; |
43 | | }; |
44 | | |
45 | | static inline V4L2m2mContext *ctx_to_m2mctx(V4L2Context *ctx) |
46 | 0 | { |
47 | 0 | return V4L2_TYPE_IS_OUTPUT(ctx->type) ? |
48 | 0 | container_of(ctx, V4L2m2mContext, output) : |
49 | 0 | container_of(ctx, V4L2m2mContext, capture); |
50 | 0 | } |
51 | | |
52 | | static inline AVCodecContext *logger(V4L2Context *ctx) |
53 | 0 | { |
54 | 0 | return ctx_to_m2mctx(ctx)->avctx; |
55 | 0 | } |
56 | | |
57 | | static inline unsigned int v4l2_get_width(struct v4l2_format *fmt) |
58 | 0 | { |
59 | 0 | return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.width : fmt->fmt.pix.width; |
60 | 0 | } |
61 | | |
62 | | static inline unsigned int v4l2_get_height(struct v4l2_format *fmt) |
63 | 0 | { |
64 | 0 | return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.height : fmt->fmt.pix.height; |
65 | 0 | } |
66 | | |
67 | | static AVRational v4l2_get_sar(V4L2Context *ctx) |
68 | 0 | { |
69 | 0 | struct AVRational sar = { 0, 1 }; |
70 | 0 | struct v4l2_cropcap cropcap; |
71 | 0 | int ret; |
72 | |
|
73 | 0 | memset(&cropcap, 0, sizeof(cropcap)); |
74 | 0 | cropcap.type = ctx->type; |
75 | |
|
76 | 0 | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_CROPCAP, &cropcap); |
77 | 0 | if (ret) |
78 | 0 | return sar; |
79 | | |
80 | 0 | sar.num = cropcap.pixelaspect.numerator; |
81 | 0 | sar.den = cropcap.pixelaspect.denominator; |
82 | 0 | return sar; |
83 | 0 | } |
84 | | |
85 | | static inline unsigned int v4l2_resolution_changed(V4L2Context *ctx, struct v4l2_format *fmt2) |
86 | 0 | { |
87 | 0 | struct v4l2_format *fmt1 = &ctx->format; |
88 | 0 | int ret = V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? |
89 | 0 | fmt1->fmt.pix_mp.width != fmt2->fmt.pix_mp.width || |
90 | 0 | fmt1->fmt.pix_mp.height != fmt2->fmt.pix_mp.height |
91 | 0 | : |
92 | 0 | fmt1->fmt.pix.width != fmt2->fmt.pix.width || |
93 | 0 | fmt1->fmt.pix.height != fmt2->fmt.pix.height; |
94 | |
|
95 | 0 | if (ret) |
96 | 0 | av_log(logger(ctx), AV_LOG_DEBUG, "%s changed (%dx%d) -> (%dx%d)\n", |
97 | 0 | ctx->name, |
98 | 0 | v4l2_get_width(fmt1), v4l2_get_height(fmt1), |
99 | 0 | v4l2_get_width(fmt2), v4l2_get_height(fmt2)); |
100 | |
|
101 | 0 | return ret; |
102 | 0 | } |
103 | | |
104 | | static inline int v4l2_type_supported(V4L2Context *ctx) |
105 | 0 | { |
106 | 0 | return ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || |
107 | 0 | ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE || |
108 | 0 | ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || |
109 | 0 | ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT; |
110 | 0 | } |
111 | | |
112 | | static inline int v4l2_get_framesize_compressed(V4L2Context* ctx, int width, int height) |
113 | 0 | { |
114 | 0 | V4L2m2mContext *s = ctx_to_m2mctx(ctx); |
115 | 0 | const int SZ_4K = 0x1000; |
116 | 0 | int size; |
117 | |
|
118 | 0 | if (s->avctx && av_codec_is_decoder(s->avctx->codec)) |
119 | 0 | return ((width * height * 3 / 2) / 2) + 128; |
120 | | |
121 | | /* encoder */ |
122 | 0 | size = FFALIGN(height, 32) * FFALIGN(width, 32) * 3 / 2 / 2; |
123 | 0 | return FFALIGN(size, SZ_4K); |
124 | 0 | } |
125 | | |
126 | | static inline void v4l2_save_to_context(V4L2Context* ctx, struct v4l2_format_update *fmt) |
127 | 0 | { |
128 | 0 | ctx->format.type = ctx->type; |
129 | |
|
130 | 0 | if (fmt->update_avfmt) |
131 | 0 | ctx->av_pix_fmt = fmt->av_fmt; |
132 | |
|
133 | 0 | if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { |
134 | | /* update the sizes to handle the reconfiguration of the capture stream at runtime */ |
135 | 0 | ctx->format.fmt.pix_mp.height = ctx->height; |
136 | 0 | ctx->format.fmt.pix_mp.width = ctx->width; |
137 | 0 | if (fmt->update_v4l2) { |
138 | 0 | ctx->format.fmt.pix_mp.pixelformat = fmt->v4l2_fmt; |
139 | | |
140 | | /* s5p-mfc requires the user to specify a buffer size */ |
141 | 0 | ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage = |
142 | 0 | v4l2_get_framesize_compressed(ctx, ctx->width, ctx->height); |
143 | 0 | } |
144 | 0 | } else { |
145 | 0 | ctx->format.fmt.pix.height = ctx->height; |
146 | 0 | ctx->format.fmt.pix.width = ctx->width; |
147 | 0 | if (fmt->update_v4l2) { |
148 | 0 | ctx->format.fmt.pix.pixelformat = fmt->v4l2_fmt; |
149 | | |
150 | | /* s5p-mfc requires the user to specify a buffer size */ |
151 | 0 | ctx->format.fmt.pix.sizeimage = |
152 | 0 | v4l2_get_framesize_compressed(ctx, ctx->width, ctx->height); |
153 | 0 | } |
154 | 0 | } |
155 | 0 | } |
156 | | |
157 | | static int v4l2_start_decode(V4L2Context *ctx) |
158 | 0 | { |
159 | 0 | struct v4l2_decoder_cmd cmd = { |
160 | 0 | .cmd = V4L2_DEC_CMD_START, |
161 | 0 | .flags = 0, |
162 | 0 | }; |
163 | 0 | int ret; |
164 | |
|
165 | 0 | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DECODER_CMD, &cmd); |
166 | 0 | if (ret) |
167 | 0 | return AVERROR(errno); |
168 | | |
169 | 0 | return 0; |
170 | 0 | } |
171 | | |
172 | | /** |
173 | | * handle resolution change event and end of stream event |
174 | | * returns 1 if reinit was successful, negative if it failed |
175 | | * returns 0 if reinit was not executed |
176 | | */ |
177 | | static int v4l2_handle_event(V4L2Context *ctx) |
178 | 0 | { |
179 | 0 | V4L2m2mContext *s = ctx_to_m2mctx(ctx); |
180 | 0 | struct v4l2_format cap_fmt = s->capture.format; |
181 | 0 | struct v4l2_event evt = { 0 }; |
182 | 0 | int ret; |
183 | |
|
184 | 0 | ret = ioctl(s->fd, VIDIOC_DQEVENT, &evt); |
185 | 0 | if (ret < 0) { |
186 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_DQEVENT\n", ctx->name); |
187 | 0 | return 0; |
188 | 0 | } |
189 | | |
190 | 0 | if (evt.type == V4L2_EVENT_EOS) { |
191 | 0 | ctx->done = 1; |
192 | 0 | return 0; |
193 | 0 | } |
194 | | |
195 | 0 | if (evt.type != V4L2_EVENT_SOURCE_CHANGE) |
196 | 0 | return 0; |
197 | | |
198 | 0 | ret = ioctl(s->fd, VIDIOC_G_FMT, &cap_fmt); |
199 | 0 | if (ret) { |
200 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT\n", s->capture.name); |
201 | 0 | return 0; |
202 | 0 | } |
203 | | |
204 | 0 | if (v4l2_resolution_changed(&s->capture, &cap_fmt)) { |
205 | 0 | s->capture.height = v4l2_get_height(&cap_fmt); |
206 | 0 | s->capture.width = v4l2_get_width(&cap_fmt); |
207 | 0 | s->capture.sample_aspect_ratio = v4l2_get_sar(&s->capture); |
208 | 0 | } else { |
209 | 0 | v4l2_start_decode(ctx); |
210 | 0 | return 0; |
211 | 0 | } |
212 | | |
213 | 0 | s->reinit = 1; |
214 | |
|
215 | 0 | if (s->avctx) |
216 | 0 | ret = ff_set_dimensions(s->avctx, s->capture.width, s->capture.height); |
217 | 0 | if (ret < 0) |
218 | 0 | av_log(logger(ctx), AV_LOG_WARNING, "update avcodec height and width\n"); |
219 | |
|
220 | 0 | ret = ff_v4l2_m2m_codec_reinit(s); |
221 | 0 | if (ret) { |
222 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "v4l2_m2m_codec_reinit\n"); |
223 | 0 | return AVERROR(EINVAL); |
224 | 0 | } |
225 | | |
226 | | /* reinit executed */ |
227 | 0 | return 1; |
228 | 0 | } |
229 | | |
230 | | static int v4l2_stop_decode(V4L2Context *ctx) |
231 | 0 | { |
232 | 0 | struct v4l2_decoder_cmd cmd = { |
233 | 0 | .cmd = V4L2_DEC_CMD_STOP, |
234 | 0 | .flags = 0, |
235 | 0 | }; |
236 | 0 | int ret; |
237 | |
|
238 | 0 | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DECODER_CMD, &cmd); |
239 | 0 | if (ret) { |
240 | | /* DECODER_CMD is optional */ |
241 | 0 | if (errno == ENOTTY) |
242 | 0 | return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF); |
243 | 0 | else |
244 | 0 | return AVERROR(errno); |
245 | 0 | } |
246 | | |
247 | 0 | return 0; |
248 | 0 | } |
249 | | |
250 | | static int v4l2_stop_encode(V4L2Context *ctx) |
251 | 0 | { |
252 | 0 | struct v4l2_encoder_cmd cmd = { |
253 | 0 | .cmd = V4L2_ENC_CMD_STOP, |
254 | 0 | .flags = 0, |
255 | 0 | }; |
256 | 0 | int ret; |
257 | |
|
258 | 0 | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENCODER_CMD, &cmd); |
259 | 0 | if (ret) { |
260 | | /* ENCODER_CMD is optional */ |
261 | 0 | if (errno == ENOTTY) |
262 | 0 | return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF); |
263 | 0 | else |
264 | 0 | return AVERROR(errno); |
265 | 0 | } |
266 | | |
267 | 0 | return 0; |
268 | 0 | } |
269 | | |
270 | | static V4L2Buffer* v4l2_dequeue_v4l2buf(V4L2Context *ctx, int timeout) |
271 | 0 | { |
272 | 0 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; |
273 | 0 | struct v4l2_buffer buf = { 0 }; |
274 | 0 | V4L2Buffer *avbuf; |
275 | 0 | struct pollfd pfd = { |
276 | 0 | .events = POLLIN | POLLRDNORM | POLLPRI | POLLOUT | POLLWRNORM, /* default blocking capture */ |
277 | 0 | .fd = ctx_to_m2mctx(ctx)->fd, |
278 | 0 | }; |
279 | 0 | int i, ret; |
280 | |
|
281 | 0 | if (!V4L2_TYPE_IS_OUTPUT(ctx->type) && ctx->buffers) { |
282 | 0 | for (i = 0; i < ctx->num_buffers; i++) { |
283 | 0 | if (ctx->buffers[i].status == V4L2BUF_IN_DRIVER) |
284 | 0 | break; |
285 | 0 | } |
286 | 0 | if (i == ctx->num_buffers) |
287 | 0 | av_log(logger(ctx), AV_LOG_WARNING, "All capture buffers returned to " |
288 | 0 | "userspace. Increase num_capture_buffers " |
289 | 0 | "to prevent device deadlock or dropped " |
290 | 0 | "packets/frames.\n"); |
291 | 0 | } |
292 | | |
293 | | /* if we are draining and there are no more capture buffers queued in the driver we are done */ |
294 | 0 | if (!V4L2_TYPE_IS_OUTPUT(ctx->type) && ctx_to_m2mctx(ctx)->draining) { |
295 | 0 | for (i = 0; i < ctx->num_buffers; i++) { |
296 | | /* capture buffer initialization happens during decode hence |
297 | | * detection happens at runtime |
298 | | */ |
299 | 0 | if (!ctx->buffers) |
300 | 0 | break; |
301 | | |
302 | 0 | if (ctx->buffers[i].status == V4L2BUF_IN_DRIVER) |
303 | 0 | goto start; |
304 | 0 | } |
305 | 0 | ctx->done = 1; |
306 | 0 | return NULL; |
307 | 0 | } |
308 | | |
309 | 0 | start: |
310 | 0 | if (V4L2_TYPE_IS_OUTPUT(ctx->type)) |
311 | 0 | pfd.events = POLLOUT | POLLWRNORM; |
312 | 0 | else { |
313 | | /* no need to listen to requests for more input while draining */ |
314 | 0 | if (ctx_to_m2mctx(ctx)->draining) |
315 | 0 | pfd.events = POLLIN | POLLRDNORM | POLLPRI; |
316 | 0 | } |
317 | |
|
318 | 0 | for (;;) { |
319 | 0 | ret = poll(&pfd, 1, timeout); |
320 | 0 | if (ret > 0) |
321 | 0 | break; |
322 | 0 | if (errno == EINTR) |
323 | 0 | continue; |
324 | 0 | return NULL; |
325 | 0 | } |
326 | | |
327 | | /* 0. handle errors */ |
328 | 0 | if (pfd.revents & POLLERR) { |
329 | | /* if we are trying to get free buffers but none have been queued yet, |
330 | | * or if no buffers have been allocated yet, no need to raise a warning |
331 | | */ |
332 | 0 | if (timeout == 0) { |
333 | 0 | if (!ctx->buffers) |
334 | 0 | return NULL; |
335 | | |
336 | 0 | for (i = 0; i < ctx->num_buffers; i++) { |
337 | 0 | if (ctx->buffers[i].status != V4L2BUF_AVAILABLE) |
338 | 0 | av_log(logger(ctx), AV_LOG_WARNING, "%s POLLERR\n", ctx->name); |
339 | 0 | } |
340 | 0 | } |
341 | 0 | else |
342 | 0 | av_log(logger(ctx), AV_LOG_WARNING, "%s POLLERR\n", ctx->name); |
343 | | |
344 | 0 | return NULL; |
345 | 0 | } |
346 | | |
347 | | /* 1. handle resolution changes */ |
348 | 0 | if (pfd.revents & POLLPRI) { |
349 | 0 | ret = v4l2_handle_event(ctx); |
350 | 0 | if (ret < 0) { |
351 | | /* if re-init failed, abort */ |
352 | 0 | ctx->done = 1; |
353 | 0 | return NULL; |
354 | 0 | } |
355 | 0 | if (ret) { |
356 | | /* if re-init was successful drop the buffer (if there was one) |
357 | | * since we had to reconfigure capture (unmap all buffers) |
358 | | */ |
359 | 0 | return NULL; |
360 | 0 | } |
361 | 0 | } |
362 | | |
363 | | /* 2. dequeue the buffer */ |
364 | 0 | if (pfd.revents & (POLLIN | POLLRDNORM | POLLOUT | POLLWRNORM)) { |
365 | |
|
366 | 0 | if (!V4L2_TYPE_IS_OUTPUT(ctx->type)) { |
367 | | /* there is a capture buffer ready */ |
368 | 0 | if (pfd.revents & (POLLIN | POLLRDNORM)) |
369 | 0 | goto dequeue; |
370 | | |
371 | | /* the driver is ready to accept more input; instead of waiting for the capture |
372 | | * buffer to complete we return NULL so input can proceed (we are single threaded) |
373 | | */ |
374 | 0 | if (pfd.revents & (POLLOUT | POLLWRNORM)) |
375 | 0 | return NULL; |
376 | 0 | } |
377 | | |
378 | 0 | dequeue: |
379 | 0 | memset(&buf, 0, sizeof(buf)); |
380 | 0 | buf.memory = V4L2_MEMORY_MMAP; |
381 | 0 | buf.type = ctx->type; |
382 | 0 | if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { |
383 | 0 | memset(planes, 0, sizeof(planes)); |
384 | 0 | buf.length = VIDEO_MAX_PLANES; |
385 | 0 | buf.m.planes = planes; |
386 | 0 | } |
387 | |
|
388 | 0 | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DQBUF, &buf); |
389 | 0 | if (ret) { |
390 | 0 | if (errno != EAGAIN) { |
391 | 0 | ctx->done = 1; |
392 | 0 | if (errno != EPIPE) |
393 | 0 | av_log(logger(ctx), AV_LOG_DEBUG, "%s VIDIOC_DQBUF, errno (%s)\n", |
394 | 0 | ctx->name, av_err2str(AVERROR(errno))); |
395 | 0 | } |
396 | 0 | return NULL; |
397 | 0 | } |
398 | | |
399 | 0 | if (ctx_to_m2mctx(ctx)->draining && !V4L2_TYPE_IS_OUTPUT(ctx->type)) { |
400 | 0 | int bytesused = V4L2_TYPE_IS_MULTIPLANAR(buf.type) ? |
401 | 0 | buf.m.planes[0].bytesused : buf.bytesused; |
402 | 0 | if (bytesused == 0) { |
403 | 0 | ctx->done = 1; |
404 | 0 | return NULL; |
405 | 0 | } |
406 | 0 | #ifdef V4L2_BUF_FLAG_LAST |
407 | 0 | if (buf.flags & V4L2_BUF_FLAG_LAST) |
408 | 0 | ctx->done = 1; |
409 | 0 | #endif |
410 | 0 | } |
411 | | |
412 | 0 | avbuf = &ctx->buffers[buf.index]; |
413 | 0 | avbuf->status = V4L2BUF_AVAILABLE; |
414 | 0 | avbuf->buf = buf; |
415 | 0 | if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { |
416 | 0 | memcpy(avbuf->planes, planes, sizeof(planes)); |
417 | 0 | avbuf->buf.m.planes = avbuf->planes; |
418 | 0 | } |
419 | 0 | return avbuf; |
420 | 0 | } |
421 | | |
422 | 0 | return NULL; |
423 | 0 | } |
424 | | |
425 | | static V4L2Buffer* v4l2_getfree_v4l2buf(V4L2Context *ctx) |
426 | 0 | { |
427 | 0 | int timeout = 0; /* return when no more buffers to dequeue */ |
428 | 0 | int i; |
429 | | |
430 | | /* get back as many output buffers as possible */ |
431 | 0 | if (V4L2_TYPE_IS_OUTPUT(ctx->type)) { |
432 | 0 | do { |
433 | 0 | } while (v4l2_dequeue_v4l2buf(ctx, timeout)); |
434 | 0 | } |
435 | |
|
436 | 0 | for (i = 0; i < ctx->num_buffers; i++) { |
437 | 0 | if (ctx->buffers[i].status == V4L2BUF_AVAILABLE) |
438 | 0 | return &ctx->buffers[i]; |
439 | 0 | } |
440 | | |
441 | 0 | return NULL; |
442 | 0 | } |
443 | | |
444 | | static int v4l2_release_buffers(V4L2Context* ctx) |
445 | 0 | { |
446 | 0 | struct v4l2_requestbuffers req = { |
447 | 0 | .memory = V4L2_MEMORY_MMAP, |
448 | 0 | .type = ctx->type, |
449 | 0 | .count = 0, /* 0 -> unmaps buffers from the driver */ |
450 | 0 | }; |
451 | 0 | int i, j; |
452 | |
|
453 | 0 | for (i = 0; i < ctx->num_buffers; i++) { |
454 | 0 | V4L2Buffer *buffer = &ctx->buffers[i]; |
455 | |
|
456 | 0 | for (j = 0; j < buffer->num_planes; j++) { |
457 | 0 | struct V4L2Plane_info *p = &buffer->plane_info[j]; |
458 | 0 | if (p->mm_addr && p->length) |
459 | 0 | if (munmap(p->mm_addr, p->length) < 0) |
460 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "%s unmap plane (%s))\n", ctx->name, av_err2str(AVERROR(errno))); |
461 | 0 | } |
462 | 0 | } |
463 | |
|
464 | 0 | return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_REQBUFS, &req); |
465 | 0 | } |
466 | | |
467 | | static inline int v4l2_try_raw_format(V4L2Context* ctx, enum AVPixelFormat pixfmt) |
468 | 0 | { |
469 | 0 | struct v4l2_format *fmt = &ctx->format; |
470 | 0 | uint32_t v4l2_fmt; |
471 | 0 | int ret; |
472 | |
|
473 | 0 | v4l2_fmt = ff_v4l2_format_avfmt_to_v4l2(pixfmt); |
474 | 0 | if (!v4l2_fmt) |
475 | 0 | return AVERROR(EINVAL); |
476 | | |
477 | 0 | if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) |
478 | 0 | fmt->fmt.pix_mp.pixelformat = v4l2_fmt; |
479 | 0 | else |
480 | 0 | fmt->fmt.pix.pixelformat = v4l2_fmt; |
481 | |
|
482 | 0 | fmt->type = ctx->type; |
483 | |
|
484 | 0 | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, fmt); |
485 | 0 | if (ret) |
486 | 0 | return AVERROR(EINVAL); |
487 | | |
488 | 0 | return 0; |
489 | 0 | } |
490 | | |
491 | | static int v4l2_get_raw_format(V4L2Context* ctx, enum AVPixelFormat *p) |
492 | 0 | { |
493 | 0 | enum AVPixelFormat pixfmt = ctx->av_pix_fmt; |
494 | 0 | struct v4l2_fmtdesc fdesc; |
495 | 0 | int ret; |
496 | |
|
497 | 0 | memset(&fdesc, 0, sizeof(fdesc)); |
498 | 0 | fdesc.type = ctx->type; |
499 | |
|
500 | 0 | if (pixfmt != AV_PIX_FMT_NONE) { |
501 | 0 | ret = v4l2_try_raw_format(ctx, pixfmt); |
502 | 0 | if (!ret) |
503 | 0 | return 0; |
504 | 0 | } |
505 | | |
506 | 0 | for (;;) { |
507 | 0 | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc); |
508 | 0 | if (ret) |
509 | 0 | return AVERROR(EINVAL); |
510 | | |
511 | 0 | pixfmt = ff_v4l2_format_v4l2_to_avfmt(fdesc.pixelformat, AV_CODEC_ID_RAWVIDEO); |
512 | 0 | ret = v4l2_try_raw_format(ctx, pixfmt); |
513 | 0 | if (ret){ |
514 | 0 | fdesc.index++; |
515 | 0 | continue; |
516 | 0 | } |
517 | | |
518 | 0 | *p = pixfmt; |
519 | |
|
520 | 0 | return 0; |
521 | 0 | } |
522 | | |
523 | 0 | return AVERROR(EINVAL); |
524 | 0 | } |
525 | | |
526 | | static int v4l2_get_coded_format(V4L2Context* ctx, uint32_t *p) |
527 | 0 | { |
528 | 0 | struct v4l2_fmtdesc fdesc; |
529 | 0 | uint32_t v4l2_fmt; |
530 | 0 | int ret; |
531 | | |
532 | | /* translate to a valid v4l2 format */ |
533 | 0 | v4l2_fmt = ff_v4l2_format_avcodec_to_v4l2(ctx->av_codec_id); |
534 | 0 | if (!v4l2_fmt) |
535 | 0 | return AVERROR(EINVAL); |
536 | | |
537 | | /* check if the driver supports this format */ |
538 | 0 | memset(&fdesc, 0, sizeof(fdesc)); |
539 | 0 | fdesc.type = ctx->type; |
540 | |
|
541 | 0 | for (;;) { |
542 | 0 | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc); |
543 | 0 | if (ret) |
544 | 0 | return AVERROR(EINVAL); |
545 | | |
546 | 0 | if (fdesc.pixelformat == v4l2_fmt) |
547 | 0 | break; |
548 | | |
549 | 0 | fdesc.index++; |
550 | 0 | } |
551 | | |
552 | 0 | *p = v4l2_fmt; |
553 | |
|
554 | 0 | return 0; |
555 | 0 | } |
556 | | |
557 | | /***************************************************************************** |
558 | | * |
559 | | * V4L2 Context Interface |
560 | | * |
561 | | *****************************************************************************/ |
562 | | |
563 | | int ff_v4l2_context_set_status(V4L2Context* ctx, uint32_t cmd) |
564 | 0 | { |
565 | 0 | int type = ctx->type; |
566 | 0 | int ret; |
567 | |
|
568 | 0 | ret = ioctl(ctx_to_m2mctx(ctx)->fd, cmd, &type); |
569 | 0 | if (ret < 0) |
570 | 0 | return AVERROR(errno); |
571 | | |
572 | 0 | ctx->streamon = (cmd == VIDIOC_STREAMON); |
573 | |
|
574 | 0 | return 0; |
575 | 0 | } |
576 | | |
577 | | int ff_v4l2_context_enqueue_frame(V4L2Context* ctx, const AVFrame* frame) |
578 | 0 | { |
579 | 0 | V4L2m2mContext *s = ctx_to_m2mctx(ctx); |
580 | 0 | V4L2Buffer* avbuf; |
581 | 0 | int ret; |
582 | |
|
583 | 0 | if (!frame) { |
584 | 0 | ret = v4l2_stop_encode(ctx); |
585 | 0 | if (ret) |
586 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "%s stop_encode\n", ctx->name); |
587 | 0 | s->draining= 1; |
588 | 0 | return 0; |
589 | 0 | } |
590 | | |
591 | 0 | avbuf = v4l2_getfree_v4l2buf(ctx); |
592 | 0 | if (!avbuf) |
593 | 0 | return AVERROR(EAGAIN); |
594 | | |
595 | 0 | ret = ff_v4l2_buffer_avframe_to_buf(frame, avbuf); |
596 | 0 | if (ret) |
597 | 0 | return ret; |
598 | | |
599 | 0 | return ff_v4l2_buffer_enqueue(avbuf); |
600 | 0 | } |
601 | | |
602 | | int ff_v4l2_context_enqueue_packet(V4L2Context* ctx, const AVPacket* pkt) |
603 | 0 | { |
604 | 0 | V4L2m2mContext *s = ctx_to_m2mctx(ctx); |
605 | 0 | V4L2Buffer* avbuf; |
606 | 0 | int ret; |
607 | |
|
608 | 0 | if (!pkt->size) { |
609 | 0 | ret = v4l2_stop_decode(ctx); |
610 | 0 | if (ret) |
611 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "%s stop_decode\n", ctx->name); |
612 | 0 | s->draining = 1; |
613 | 0 | return 0; |
614 | 0 | } |
615 | | |
616 | 0 | avbuf = v4l2_getfree_v4l2buf(ctx); |
617 | 0 | if (!avbuf) |
618 | 0 | return AVERROR(EAGAIN); |
619 | | |
620 | 0 | ret = ff_v4l2_buffer_avpkt_to_buf(pkt, avbuf); |
621 | 0 | if (ret) |
622 | 0 | return ret; |
623 | | |
624 | 0 | return ff_v4l2_buffer_enqueue(avbuf); |
625 | 0 | } |
626 | | |
627 | | int ff_v4l2_context_dequeue_frame(V4L2Context* ctx, AVFrame* frame, int timeout) |
628 | 0 | { |
629 | 0 | V4L2Buffer *avbuf; |
630 | | |
631 | | /* |
632 | | * timeout=-1 blocks until: |
633 | | * 1. decoded frame available |
634 | | * 2. an input buffer is ready to be dequeued |
635 | | */ |
636 | 0 | avbuf = v4l2_dequeue_v4l2buf(ctx, timeout); |
637 | 0 | if (!avbuf) { |
638 | 0 | if (ctx->done) |
639 | 0 | return AVERROR_EOF; |
640 | | |
641 | 0 | return AVERROR(EAGAIN); |
642 | 0 | } |
643 | | |
644 | 0 | return ff_v4l2_buffer_buf_to_avframe(frame, avbuf); |
645 | 0 | } |
646 | | |
647 | | int ff_v4l2_context_dequeue_packet(V4L2Context* ctx, AVPacket* pkt) |
648 | 0 | { |
649 | 0 | V4L2Buffer *avbuf; |
650 | | |
651 | | /* |
652 | | * blocks until: |
653 | | * 1. encoded packet available |
654 | | * 2. an input buffer ready to be dequeued |
655 | | */ |
656 | 0 | avbuf = v4l2_dequeue_v4l2buf(ctx, -1); |
657 | 0 | if (!avbuf) { |
658 | 0 | if (ctx->done) |
659 | 0 | return AVERROR_EOF; |
660 | | |
661 | 0 | return AVERROR(EAGAIN); |
662 | 0 | } |
663 | | |
664 | 0 | return ff_v4l2_buffer_buf_to_avpkt(pkt, avbuf); |
665 | 0 | } |
666 | | |
667 | | int ff_v4l2_context_get_format(V4L2Context* ctx, int probe) |
668 | 0 | { |
669 | 0 | struct v4l2_format_update fmt = { 0 }; |
670 | 0 | int ret; |
671 | |
|
672 | 0 | if (ctx->av_codec_id == AV_CODEC_ID_RAWVIDEO) { |
673 | 0 | ret = v4l2_get_raw_format(ctx, &fmt.av_fmt); |
674 | 0 | if (ret) |
675 | 0 | return ret; |
676 | | |
677 | 0 | fmt.update_avfmt = !probe; |
678 | 0 | v4l2_save_to_context(ctx, &fmt); |
679 | | |
680 | | /* format has been tried already */ |
681 | 0 | return ret; |
682 | 0 | } |
683 | | |
684 | 0 | ret = v4l2_get_coded_format(ctx, &fmt.v4l2_fmt); |
685 | 0 | if (ret) |
686 | 0 | return ret; |
687 | | |
688 | 0 | fmt.update_v4l2 = 1; |
689 | 0 | v4l2_save_to_context(ctx, &fmt); |
690 | |
|
691 | 0 | return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, &ctx->format); |
692 | 0 | } |
693 | | |
694 | | int ff_v4l2_context_set_format(V4L2Context* ctx) |
695 | 0 | { |
696 | 0 | return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_S_FMT, &ctx->format); |
697 | 0 | } |
698 | | |
699 | | void ff_v4l2_context_release(V4L2Context* ctx) |
700 | 4.70k | { |
701 | 4.70k | int ret; |
702 | | |
703 | 4.70k | if (!ctx->buffers) |
704 | 4.70k | return; |
705 | | |
706 | 0 | ret = v4l2_release_buffers(ctx); |
707 | 0 | if (ret) |
708 | 0 | av_log(logger(ctx), AV_LOG_WARNING, "V4L2 failed to unmap the %s buffers\n", ctx->name); |
709 | |
|
710 | 0 | av_freep(&ctx->buffers); |
711 | 0 | } |
712 | | |
713 | | int ff_v4l2_context_init(V4L2Context* ctx) |
714 | 0 | { |
715 | 0 | V4L2m2mContext *s = ctx_to_m2mctx(ctx); |
716 | 0 | struct v4l2_requestbuffers req; |
717 | 0 | int ret, i; |
718 | |
|
719 | 0 | if (!v4l2_type_supported(ctx)) { |
720 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "type %i not supported\n", ctx->type); |
721 | 0 | return AVERROR_PATCHWELCOME; |
722 | 0 | } |
723 | | |
724 | 0 | ret = ioctl(s->fd, VIDIOC_G_FMT, &ctx->format); |
725 | 0 | if (ret) |
726 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT failed\n", ctx->name); |
727 | |
|
728 | 0 | memset(&req, 0, sizeof(req)); |
729 | 0 | req.count = ctx->num_buffers; |
730 | 0 | req.memory = V4L2_MEMORY_MMAP; |
731 | 0 | req.type = ctx->type; |
732 | 0 | ret = ioctl(s->fd, VIDIOC_REQBUFS, &req); |
733 | 0 | if (ret < 0) { |
734 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_REQBUFS failed: %s\n", ctx->name, strerror(errno)); |
735 | 0 | return AVERROR(errno); |
736 | 0 | } |
737 | | |
738 | 0 | ctx->num_buffers = req.count; |
739 | 0 | ctx->buffers = av_mallocz(ctx->num_buffers * sizeof(V4L2Buffer)); |
740 | 0 | if (!ctx->buffers) { |
741 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "%s malloc enomem\n", ctx->name); |
742 | 0 | return AVERROR(ENOMEM); |
743 | 0 | } |
744 | | |
745 | 0 | for (i = 0; i < req.count; i++) { |
746 | 0 | ctx->buffers[i].context = ctx; |
747 | 0 | ret = ff_v4l2_buffer_initialize(&ctx->buffers[i], i); |
748 | 0 | if (ret < 0) { |
749 | 0 | av_log(logger(ctx), AV_LOG_ERROR, "%s buffer[%d] initialization (%s)\n", ctx->name, i, av_err2str(ret)); |
750 | 0 | goto error; |
751 | 0 | } |
752 | 0 | } |
753 | | |
754 | 0 | av_log(logger(ctx), AV_LOG_DEBUG, "%s: %s %02d buffers initialized: %04ux%04u, sizeimage %08u, bytesperline %08u\n", ctx->name, |
755 | 0 | V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? av_fourcc2str(ctx->format.fmt.pix_mp.pixelformat) : av_fourcc2str(ctx->format.fmt.pix.pixelformat), |
756 | 0 | req.count, |
757 | 0 | v4l2_get_width(&ctx->format), |
758 | 0 | v4l2_get_height(&ctx->format), |
759 | 0 | V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage : ctx->format.fmt.pix.sizeimage, |
760 | 0 | V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].bytesperline : ctx->format.fmt.pix.bytesperline); |
761 | |
|
762 | 0 | return 0; |
763 | | |
764 | 0 | error: |
765 | 0 | v4l2_release_buffers(ctx); |
766 | |
|
767 | 0 | av_freep(&ctx->buffers); |
768 | |
|
769 | 0 | return ret; |
770 | 0 | } |