/src/ffmpeg/libavutil/hwcontext.c
Line | Count | Source |
1 | | /* |
2 | | * This file is part of FFmpeg. |
3 | | * |
4 | | * FFmpeg is free software; you can redistribute it and/or |
5 | | * modify it under the terms of the GNU Lesser General Public |
6 | | * License as published by the Free Software Foundation; either |
7 | | * version 2.1 of the License, or (at your option) any later version. |
8 | | * |
9 | | * FFmpeg is distributed in the hope that it will be useful, |
10 | | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
11 | | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
12 | | * Lesser General Public License for more details. |
13 | | * |
14 | | * You should have received a copy of the GNU Lesser General Public |
15 | | * License along with FFmpeg; if not, write to the Free Software |
16 | | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
17 | | */ |
18 | | |
19 | | #include "config.h" |
20 | | |
21 | | #include "avassert.h" |
22 | | #include "buffer.h" |
23 | | #include "common.h" |
24 | | #include "hwcontext.h" |
25 | | #include "hwcontext_internal.h" |
26 | | #include "imgutils.h" |
27 | | #include "log.h" |
28 | | #include "mem.h" |
29 | | #include "pixdesc.h" |
30 | | #include "pixfmt.h" |
31 | | |
32 | | static const HWContextType * const hw_table[] = { |
33 | | #if CONFIG_CUDA |
34 | | &ff_hwcontext_type_cuda, |
35 | | #endif |
36 | | #if CONFIG_D3D11VA |
37 | | &ff_hwcontext_type_d3d11va, |
38 | | #endif |
39 | | #if CONFIG_D3D12VA |
40 | | &ff_hwcontext_type_d3d12va, |
41 | | #endif |
42 | | #if CONFIG_LIBDRM |
43 | | &ff_hwcontext_type_drm, |
44 | | #endif |
45 | | #if CONFIG_DXVA2 |
46 | | &ff_hwcontext_type_dxva2, |
47 | | #endif |
48 | | #if CONFIG_OPENCL |
49 | | &ff_hwcontext_type_opencl, |
50 | | #endif |
51 | | #if CONFIG_QSV |
52 | | &ff_hwcontext_type_qsv, |
53 | | #endif |
54 | | #if CONFIG_VAAPI |
55 | | &ff_hwcontext_type_vaapi, |
56 | | #endif |
57 | | #if CONFIG_VDPAU |
58 | | &ff_hwcontext_type_vdpau, |
59 | | #endif |
60 | | #if CONFIG_VIDEOTOOLBOX |
61 | | &ff_hwcontext_type_videotoolbox, |
62 | | #endif |
63 | | #if CONFIG_MEDIACODEC |
64 | | &ff_hwcontext_type_mediacodec, |
65 | | #endif |
66 | | #if CONFIG_VULKAN |
67 | | &ff_hwcontext_type_vulkan, |
68 | | #endif |
69 | | #if CONFIG_AMF |
70 | | &ff_hwcontext_type_amf, |
71 | | #endif |
72 | | #if CONFIG_OHCODEC |
73 | | &ff_hwcontext_type_oh, |
74 | | #endif |
75 | | NULL, |
76 | | }; |
77 | | |
78 | | static const char *const hw_type_names[] = { |
79 | | [AV_HWDEVICE_TYPE_CUDA] = "cuda", |
80 | | [AV_HWDEVICE_TYPE_DRM] = "drm", |
81 | | [AV_HWDEVICE_TYPE_DXVA2] = "dxva2", |
82 | | [AV_HWDEVICE_TYPE_D3D11VA] = "d3d11va", |
83 | | [AV_HWDEVICE_TYPE_D3D12VA] = "d3d12va", |
84 | | [AV_HWDEVICE_TYPE_OPENCL] = "opencl", |
85 | | [AV_HWDEVICE_TYPE_QSV] = "qsv", |
86 | | [AV_HWDEVICE_TYPE_VAAPI] = "vaapi", |
87 | | [AV_HWDEVICE_TYPE_VDPAU] = "vdpau", |
88 | | [AV_HWDEVICE_TYPE_VIDEOTOOLBOX] = "videotoolbox", |
89 | | [AV_HWDEVICE_TYPE_MEDIACODEC] = "mediacodec", |
90 | | [AV_HWDEVICE_TYPE_VULKAN] = "vulkan", |
91 | | [AV_HWDEVICE_TYPE_AMF] = "amf", |
92 | | [AV_HWDEVICE_TYPE_OHCODEC] = "ohcodec", |
93 | | }; |
94 | | |
95 | | typedef struct FFHWDeviceContext { |
96 | | /** |
97 | | * The public AVHWDeviceContext. See hwcontext.h for it. |
98 | | */ |
99 | | AVHWDeviceContext p; |
100 | | |
101 | | const HWContextType *hw_type; |
102 | | |
103 | | /** |
104 | | * For a derived device, a reference to the original device |
105 | | * context it was derived from. |
106 | | */ |
107 | | AVBufferRef *source_device; |
108 | | } FFHWDeviceContext; |
109 | | |
110 | | enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name) |
111 | 0 | { |
112 | 0 | int type; |
113 | 0 | for (type = 0; type < FF_ARRAY_ELEMS(hw_type_names); type++) { |
114 | 0 | if (hw_type_names[type] && !strcmp(hw_type_names[type], name)) |
115 | 0 | return type; |
116 | 0 | } |
117 | 0 | return AV_HWDEVICE_TYPE_NONE; |
118 | 0 | } |
119 | | |
120 | | const char *av_hwdevice_get_type_name(enum AVHWDeviceType type) |
121 | 0 | { |
122 | 0 | if (type > AV_HWDEVICE_TYPE_NONE && |
123 | 0 | type < FF_ARRAY_ELEMS(hw_type_names)) |
124 | 0 | return hw_type_names[type]; |
125 | 0 | else |
126 | 0 | return NULL; |
127 | 0 | } |
128 | | |
129 | | enum AVHWDeviceType av_hwdevice_iterate_types(enum AVHWDeviceType prev) |
130 | 0 | { |
131 | 0 | enum AVHWDeviceType next; |
132 | 0 | int i, set = 0; |
133 | 0 | for (i = 0; hw_table[i]; i++) { |
134 | 0 | if (prev != AV_HWDEVICE_TYPE_NONE && hw_table[i]->type <= prev) |
135 | 0 | continue; |
136 | 0 | if (!set || hw_table[i]->type < next) { |
137 | 0 | next = hw_table[i]->type; |
138 | 0 | set = 1; |
139 | 0 | } |
140 | 0 | } |
141 | 0 | return set ? next : AV_HWDEVICE_TYPE_NONE; |
142 | 0 | } |
143 | | |
144 | | static const char *hwdevice_ctx_get_name(void *ptr) |
145 | 0 | { |
146 | 0 | FFHWDeviceContext *ctx = ptr; |
147 | 0 | return ctx->hw_type->name; |
148 | 0 | } |
149 | | |
150 | | static const AVClass hwdevice_ctx_class = { |
151 | | .class_name = "AVHWDeviceContext", |
152 | | .item_name = hwdevice_ctx_get_name, |
153 | | .category = AV_CLASS_CATEGORY_HWDEVICE, |
154 | | .version = LIBAVUTIL_VERSION_INT, |
155 | | }; |
156 | | |
157 | | static void hwdevice_ctx_free(void *opaque, uint8_t *data) |
158 | 0 | { |
159 | 0 | FFHWDeviceContext *ctxi = (FFHWDeviceContext*)data; |
160 | 0 | AVHWDeviceContext *ctx = &ctxi->p; |
161 | | |
162 | | /* uninit might still want access the hw context and the user |
163 | | * free() callback might destroy it, so uninit has to be called first */ |
164 | 0 | if (ctxi->hw_type->device_uninit) |
165 | 0 | ctxi->hw_type->device_uninit(ctx); |
166 | |
|
167 | 0 | if (ctx->free) |
168 | 0 | ctx->free(ctx); |
169 | |
|
170 | 0 | av_buffer_unref(&ctxi->source_device); |
171 | |
|
172 | 0 | av_freep(&ctx->hwctx); |
173 | 0 | av_freep(&ctx); |
174 | 0 | } |
175 | | |
176 | | AVBufferRef *av_hwdevice_ctx_alloc(enum AVHWDeviceType type) |
177 | 0 | { |
178 | 0 | FFHWDeviceContext *ctxi; |
179 | 0 | AVHWDeviceContext *ctx; |
180 | 0 | AVBufferRef *buf; |
181 | 0 | const HWContextType *hw_type = NULL; |
182 | 0 | int i; |
183 | |
|
184 | 0 | for (i = 0; hw_table[i]; i++) { |
185 | 0 | if (hw_table[i]->type == type) { |
186 | 0 | hw_type = hw_table[i]; |
187 | 0 | break; |
188 | 0 | } |
189 | 0 | } |
190 | 0 | if (!hw_type) |
191 | 0 | return NULL; |
192 | | |
193 | 0 | ctxi = av_mallocz(sizeof(*ctxi)); |
194 | 0 | if (!ctxi) |
195 | 0 | return NULL; |
196 | 0 | ctx = &ctxi->p; |
197 | |
|
198 | 0 | if (hw_type->device_hwctx_size) { |
199 | 0 | ctx->hwctx = av_mallocz(hw_type->device_hwctx_size); |
200 | 0 | if (!ctx->hwctx) |
201 | 0 | goto fail; |
202 | 0 | } |
203 | | |
204 | 0 | buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx), |
205 | 0 | hwdevice_ctx_free, NULL, |
206 | 0 | AV_BUFFER_FLAG_READONLY); |
207 | 0 | if (!buf) |
208 | 0 | goto fail; |
209 | | |
210 | 0 | ctx->type = type; |
211 | 0 | ctx->av_class = &hwdevice_ctx_class; |
212 | |
|
213 | 0 | ctxi->hw_type = hw_type; |
214 | |
|
215 | 0 | return buf; |
216 | | |
217 | 0 | fail: |
218 | 0 | av_freep(&ctx->hwctx); |
219 | 0 | av_freep(&ctx); |
220 | 0 | return NULL; |
221 | 0 | } |
222 | | |
223 | | int av_hwdevice_ctx_init(AVBufferRef *ref) |
224 | 0 | { |
225 | 0 | FFHWDeviceContext *ctxi = (FFHWDeviceContext*)ref->data; |
226 | 0 | AVHWDeviceContext *ctx = &ctxi->p; |
227 | 0 | int ret = 0; |
228 | |
|
229 | 0 | if (ctxi->hw_type->device_init) |
230 | 0 | ret = ctxi->hw_type->device_init(ctx); |
231 | |
|
232 | 0 | return ret; |
233 | 0 | } |
234 | | |
235 | | static const AVClass hwframe_ctx_class = { |
236 | | .class_name = "AVHWFramesContext", |
237 | | .item_name = av_default_item_name, |
238 | | .version = LIBAVUTIL_VERSION_INT, |
239 | | }; |
240 | | |
241 | | static void hwframe_ctx_free(void *opaque, uint8_t *data) |
242 | 0 | { |
243 | 0 | FFHWFramesContext *ctxi = (FFHWFramesContext*)data; |
244 | 0 | AVHWFramesContext *ctx = &ctxi->p; |
245 | |
|
246 | 0 | if (ctxi->pool_internal) |
247 | 0 | av_buffer_pool_uninit(&ctxi->pool_internal); |
248 | |
|
249 | 0 | if (ctxi->hw_type->frames_uninit) |
250 | 0 | ctxi->hw_type->frames_uninit(ctx); |
251 | |
|
252 | 0 | if (ctx->free) |
253 | 0 | ctx->free(ctx); |
254 | |
|
255 | 0 | av_buffer_unref(&ctxi->source_frames); |
256 | |
|
257 | 0 | av_buffer_unref(&ctx->device_ref); |
258 | |
|
259 | 0 | av_freep(&ctx->hwctx); |
260 | 0 | av_freep(&ctx); |
261 | 0 | } |
262 | | |
263 | | AVBufferRef *av_hwframe_ctx_alloc(AVBufferRef *device_ref_in) |
264 | 0 | { |
265 | 0 | FFHWDeviceContext *device_ctx = (FFHWDeviceContext*)device_ref_in->data; |
266 | 0 | const HWContextType *hw_type = device_ctx->hw_type; |
267 | 0 | FFHWFramesContext *ctxi; |
268 | 0 | AVHWFramesContext *ctx; |
269 | 0 | AVBufferRef *buf, *device_ref = NULL; |
270 | |
|
271 | 0 | ctxi = av_mallocz(sizeof(*ctxi)); |
272 | 0 | if (!ctxi) |
273 | 0 | return NULL; |
274 | 0 | ctx = &ctxi->p; |
275 | |
|
276 | 0 | if (hw_type->frames_hwctx_size) { |
277 | 0 | ctx->hwctx = av_mallocz(hw_type->frames_hwctx_size); |
278 | 0 | if (!ctx->hwctx) |
279 | 0 | goto fail; |
280 | 0 | } |
281 | | |
282 | 0 | device_ref = av_buffer_ref(device_ref_in); |
283 | 0 | if (!device_ref) |
284 | 0 | goto fail; |
285 | | |
286 | 0 | buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx), |
287 | 0 | hwframe_ctx_free, NULL, |
288 | 0 | AV_BUFFER_FLAG_READONLY); |
289 | 0 | if (!buf) |
290 | 0 | goto fail; |
291 | | |
292 | 0 | ctx->av_class = &hwframe_ctx_class; |
293 | 0 | ctx->device_ref = device_ref; |
294 | 0 | ctx->device_ctx = &device_ctx->p; |
295 | 0 | ctx->format = AV_PIX_FMT_NONE; |
296 | 0 | ctx->sw_format = AV_PIX_FMT_NONE; |
297 | |
|
298 | 0 | ctxi->hw_type = hw_type; |
299 | |
|
300 | 0 | return buf; |
301 | | |
302 | 0 | fail: |
303 | 0 | av_buffer_unref(&device_ref); |
304 | 0 | av_freep(&ctx->hwctx); |
305 | 0 | av_freep(&ctx); |
306 | 0 | return NULL; |
307 | 0 | } |
308 | | |
309 | | static int hwframe_pool_prealloc(AVBufferRef *ref) |
310 | 0 | { |
311 | 0 | AVHWFramesContext *ctx = (AVHWFramesContext*)ref->data; |
312 | 0 | AVFrame **frames; |
313 | 0 | int i, ret = 0; |
314 | |
|
315 | 0 | frames = av_calloc(ctx->initial_pool_size, sizeof(*frames)); |
316 | 0 | if (!frames) |
317 | 0 | return AVERROR(ENOMEM); |
318 | | |
319 | 0 | for (i = 0; i < ctx->initial_pool_size; i++) { |
320 | 0 | frames[i] = av_frame_alloc(); |
321 | 0 | if (!frames[i]) |
322 | 0 | goto fail; |
323 | | |
324 | 0 | ret = av_hwframe_get_buffer(ref, frames[i], 0); |
325 | 0 | if (ret < 0) |
326 | 0 | goto fail; |
327 | 0 | } |
328 | | |
329 | 0 | fail: |
330 | 0 | for (i = 0; i < ctx->initial_pool_size; i++) |
331 | 0 | av_frame_free(&frames[i]); |
332 | 0 | av_freep(&frames); |
333 | |
|
334 | 0 | return ret; |
335 | 0 | } |
336 | | |
337 | | int av_hwframe_ctx_init(AVBufferRef *ref) |
338 | 0 | { |
339 | 0 | FFHWFramesContext *ctxi = (FFHWFramesContext*)ref->data; |
340 | 0 | AVHWFramesContext *ctx = &ctxi->p; |
341 | 0 | const enum AVPixelFormat *pix_fmt; |
342 | 0 | int ret; |
343 | |
|
344 | 0 | if (ctxi->source_frames) { |
345 | | /* A derived frame context is already initialised. */ |
346 | 0 | return 0; |
347 | 0 | } |
348 | | |
349 | | /* validate the pixel format */ |
350 | 0 | for (pix_fmt = ctxi->hw_type->pix_fmts; *pix_fmt != AV_PIX_FMT_NONE; pix_fmt++) { |
351 | 0 | if (*pix_fmt == ctx->format) |
352 | 0 | break; |
353 | 0 | } |
354 | 0 | if (*pix_fmt == AV_PIX_FMT_NONE) { |
355 | 0 | av_log(ctx, AV_LOG_ERROR, |
356 | 0 | "The hardware pixel format '%s' is not supported by the device type '%s'\n", |
357 | 0 | av_get_pix_fmt_name(ctx->format), ctxi->hw_type->name); |
358 | 0 | return AVERROR(ENOSYS); |
359 | 0 | } |
360 | | |
361 | | /* validate the dimensions */ |
362 | 0 | ret = av_image_check_size(ctx->width, ctx->height, 0, ctx); |
363 | 0 | if (ret < 0) |
364 | 0 | return ret; |
365 | | |
366 | | /* format-specific init */ |
367 | 0 | if (ctxi->hw_type->frames_init) { |
368 | 0 | ret = ctxi->hw_type->frames_init(ctx); |
369 | 0 | if (ret < 0) |
370 | 0 | return ret; |
371 | 0 | } |
372 | | |
373 | 0 | if (ctxi->pool_internal && !ctx->pool) |
374 | 0 | ctx->pool = ctxi->pool_internal; |
375 | | |
376 | | /* preallocate the frames in the pool, if requested */ |
377 | 0 | if (ctx->initial_pool_size > 0) { |
378 | 0 | ret = hwframe_pool_prealloc(ref); |
379 | 0 | if (ret < 0) |
380 | 0 | return ret; |
381 | 0 | } |
382 | | |
383 | 0 | return 0; |
384 | 0 | } |
385 | | |
386 | | int av_hwframe_transfer_get_formats(AVBufferRef *hwframe_ref, |
387 | | enum AVHWFrameTransferDirection dir, |
388 | | enum AVPixelFormat **formats, int flags) |
389 | 0 | { |
390 | 0 | FFHWFramesContext *ctxi = (FFHWFramesContext*)hwframe_ref->data; |
391 | |
|
392 | 0 | if (!ctxi->hw_type->transfer_get_formats) |
393 | 0 | return AVERROR(ENOSYS); |
394 | | |
395 | 0 | return ctxi->hw_type->transfer_get_formats(&ctxi->p, dir, formats); |
396 | 0 | } |
397 | | |
398 | | static int transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags) |
399 | 0 | { |
400 | 0 | AVHWFramesContext *ctx; |
401 | 0 | AVFrame *frame_tmp; |
402 | 0 | int ret = 0; |
403 | |
|
404 | 0 | if (!src->hw_frames_ctx) |
405 | 0 | return AVERROR(EINVAL); |
406 | 0 | ctx = (AVHWFramesContext*)src->hw_frames_ctx->data; |
407 | |
|
408 | 0 | frame_tmp = av_frame_alloc(); |
409 | 0 | if (!frame_tmp) |
410 | 0 | return AVERROR(ENOMEM); |
411 | | |
412 | | /* if the format is set, use that |
413 | | * otherwise pick the first supported one */ |
414 | 0 | if (dst->format >= 0) { |
415 | 0 | frame_tmp->format = dst->format; |
416 | 0 | } else { |
417 | 0 | enum AVPixelFormat *formats; |
418 | |
|
419 | 0 | ret = av_hwframe_transfer_get_formats(src->hw_frames_ctx, |
420 | 0 | AV_HWFRAME_TRANSFER_DIRECTION_FROM, |
421 | 0 | &formats, 0); |
422 | 0 | if (ret < 0) |
423 | 0 | goto fail; |
424 | 0 | frame_tmp->format = formats[0]; |
425 | 0 | av_freep(&formats); |
426 | 0 | } |
427 | 0 | frame_tmp->width = ctx->width; |
428 | 0 | frame_tmp->height = ctx->height; |
429 | |
|
430 | 0 | ret = av_frame_get_buffer(frame_tmp, 0); |
431 | 0 | if (ret < 0) |
432 | 0 | goto fail; |
433 | | |
434 | 0 | ret = av_hwframe_transfer_data(frame_tmp, src, flags); |
435 | 0 | if (ret < 0) |
436 | 0 | goto fail; |
437 | | |
438 | 0 | frame_tmp->width = src->width; |
439 | 0 | frame_tmp->height = src->height; |
440 | |
|
441 | 0 | av_frame_move_ref(dst, frame_tmp); |
442 | |
|
443 | 0 | fail: |
444 | 0 | av_frame_free(&frame_tmp); |
445 | 0 | return ret; |
446 | 0 | } |
447 | | |
448 | | int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags) |
449 | 0 | { |
450 | 0 | int ret; |
451 | |
|
452 | 0 | if (!dst->buf[0]) |
453 | 0 | return transfer_data_alloc(dst, src, flags); |
454 | | |
455 | | /* |
456 | | * Hardware -> Hardware Transfer. |
457 | | * Unlike Software -> Hardware or Hardware -> Software, the transfer |
458 | | * function could be provided by either the src or dst, depending on |
459 | | * the specific combination of hardware. |
460 | | */ |
461 | 0 | if (src->hw_frames_ctx && dst->hw_frames_ctx) { |
462 | 0 | FFHWFramesContext *src_ctx = |
463 | 0 | (FFHWFramesContext*)src->hw_frames_ctx->data; |
464 | 0 | FFHWFramesContext *dst_ctx = |
465 | 0 | (FFHWFramesContext*)dst->hw_frames_ctx->data; |
466 | |
|
467 | 0 | if (src_ctx->source_frames) { |
468 | 0 | av_log(src_ctx, AV_LOG_ERROR, |
469 | 0 | "A device with a derived frame context cannot be used as " |
470 | 0 | "the source of a HW -> HW transfer."); |
471 | 0 | return AVERROR(ENOSYS); |
472 | 0 | } |
473 | | |
474 | 0 | if (dst_ctx->source_frames) { |
475 | 0 | av_log(src_ctx, AV_LOG_ERROR, |
476 | 0 | "A device with a derived frame context cannot be used as " |
477 | 0 | "the destination of a HW -> HW transfer."); |
478 | 0 | return AVERROR(ENOSYS); |
479 | 0 | } |
480 | | |
481 | 0 | ret = src_ctx->hw_type->transfer_data_from(&src_ctx->p, dst, src); |
482 | 0 | if (ret == AVERROR(ENOSYS)) |
483 | 0 | ret = dst_ctx->hw_type->transfer_data_to(&dst_ctx->p, dst, src); |
484 | 0 | if (ret < 0) |
485 | 0 | return ret; |
486 | 0 | } else { |
487 | 0 | if (src->hw_frames_ctx) { |
488 | 0 | FFHWFramesContext *ctx = (FFHWFramesContext*)src->hw_frames_ctx->data; |
489 | |
|
490 | 0 | ret = ctx->hw_type->transfer_data_from(&ctx->p, dst, src); |
491 | 0 | if (ret < 0) |
492 | 0 | return ret; |
493 | 0 | } else if (dst->hw_frames_ctx) { |
494 | 0 | FFHWFramesContext *ctx = (FFHWFramesContext*)dst->hw_frames_ctx->data; |
495 | |
|
496 | 0 | ret = ctx->hw_type->transfer_data_to(&ctx->p, dst, src); |
497 | 0 | if (ret < 0) |
498 | 0 | return ret; |
499 | 0 | } else { |
500 | 0 | return AVERROR(ENOSYS); |
501 | 0 | } |
502 | 0 | } |
503 | 0 | return 0; |
504 | 0 | } |
505 | | |
506 | | int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags) |
507 | 0 | { |
508 | 0 | FFHWFramesContext *ctxi = (FFHWFramesContext*)hwframe_ref->data; |
509 | 0 | AVHWFramesContext *ctx = &ctxi->p; |
510 | 0 | int ret; |
511 | |
|
512 | 0 | if (ctxi->source_frames) { |
513 | | // This is a derived frame context, so we allocate in the source |
514 | | // and map the frame immediately. |
515 | 0 | AVFrame *src_frame; |
516 | |
|
517 | 0 | frame->format = ctx->format; |
518 | 0 | frame->hw_frames_ctx = av_buffer_ref(hwframe_ref); |
519 | 0 | if (!frame->hw_frames_ctx) |
520 | 0 | return AVERROR(ENOMEM); |
521 | | |
522 | 0 | src_frame = av_frame_alloc(); |
523 | 0 | if (!src_frame) |
524 | 0 | return AVERROR(ENOMEM); |
525 | | |
526 | 0 | ret = av_hwframe_get_buffer(ctxi->source_frames, |
527 | 0 | src_frame, 0); |
528 | 0 | if (ret < 0) { |
529 | 0 | av_frame_free(&src_frame); |
530 | 0 | return ret; |
531 | 0 | } |
532 | | |
533 | 0 | ret = av_hwframe_map(frame, src_frame, |
534 | 0 | ctxi->source_allocation_map_flags); |
535 | 0 | if (ret) { |
536 | 0 | av_log(ctx, AV_LOG_ERROR, "Failed to map frame into derived " |
537 | 0 | "frame context: %d.\n", ret); |
538 | 0 | av_frame_free(&src_frame); |
539 | 0 | return ret; |
540 | 0 | } |
541 | | |
542 | | // Free the source frame immediately - the mapped frame still |
543 | | // contains a reference to it. |
544 | 0 | av_frame_free(&src_frame); |
545 | |
|
546 | 0 | return 0; |
547 | 0 | } |
548 | | |
549 | 0 | if (!ctxi->hw_type->frames_get_buffer) |
550 | 0 | return AVERROR(ENOSYS); |
551 | | |
552 | 0 | if (!ctx->pool) |
553 | 0 | return AVERROR(EINVAL); |
554 | | |
555 | 0 | frame->hw_frames_ctx = av_buffer_ref(hwframe_ref); |
556 | 0 | if (!frame->hw_frames_ctx) |
557 | 0 | return AVERROR(ENOMEM); |
558 | | |
559 | 0 | ret = ctxi->hw_type->frames_get_buffer(ctx, frame); |
560 | 0 | if (ret < 0) { |
561 | 0 | av_buffer_unref(&frame->hw_frames_ctx); |
562 | 0 | return ret; |
563 | 0 | } |
564 | | |
565 | 0 | frame->extended_data = frame->data; |
566 | |
|
567 | 0 | return 0; |
568 | 0 | } |
569 | | |
570 | | void *av_hwdevice_hwconfig_alloc(AVBufferRef *ref) |
571 | 0 | { |
572 | 0 | FFHWDeviceContext *ctx = (FFHWDeviceContext*)ref->data; |
573 | 0 | const HWContextType *hw_type = ctx->hw_type; |
574 | |
|
575 | 0 | if (hw_type->device_hwconfig_size == 0) |
576 | 0 | return NULL; |
577 | | |
578 | 0 | return av_mallocz(hw_type->device_hwconfig_size); |
579 | 0 | } |
580 | | |
581 | | AVHWFramesConstraints *av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, |
582 | | const void *hwconfig) |
583 | 0 | { |
584 | 0 | FFHWDeviceContext *ctx = (FFHWDeviceContext*)ref->data; |
585 | 0 | const HWContextType *hw_type = ctx->hw_type; |
586 | 0 | AVHWFramesConstraints *constraints; |
587 | |
|
588 | 0 | if (!hw_type->frames_get_constraints) |
589 | 0 | return NULL; |
590 | | |
591 | 0 | constraints = av_mallocz(sizeof(*constraints)); |
592 | 0 | if (!constraints) |
593 | 0 | return NULL; |
594 | | |
595 | 0 | constraints->min_width = constraints->min_height = 0; |
596 | 0 | constraints->max_width = constraints->max_height = INT_MAX; |
597 | |
|
598 | 0 | if (hw_type->frames_get_constraints(&ctx->p, hwconfig, constraints) >= 0) { |
599 | 0 | return constraints; |
600 | 0 | } else { |
601 | 0 | av_hwframe_constraints_free(&constraints); |
602 | 0 | return NULL; |
603 | 0 | } |
604 | 0 | } |
605 | | |
606 | | void av_hwframe_constraints_free(AVHWFramesConstraints **constraints) |
607 | 0 | { |
608 | 0 | if (*constraints) { |
609 | 0 | av_freep(&(*constraints)->valid_hw_formats); |
610 | 0 | av_freep(&(*constraints)->valid_sw_formats); |
611 | 0 | } |
612 | 0 | av_freep(constraints); |
613 | 0 | } |
614 | | |
615 | | int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, |
616 | | const char *device, AVDictionary *opts, int flags) |
617 | 0 | { |
618 | 0 | AVBufferRef *device_ref = NULL; |
619 | 0 | FFHWDeviceContext *device_ctx; |
620 | 0 | int ret = 0; |
621 | |
|
622 | 0 | device_ref = av_hwdevice_ctx_alloc(type); |
623 | 0 | if (!device_ref) { |
624 | 0 | ret = AVERROR(ENOMEM); |
625 | 0 | goto fail; |
626 | 0 | } |
627 | 0 | device_ctx = (FFHWDeviceContext*)device_ref->data; |
628 | |
|
629 | 0 | if (!device_ctx->hw_type->device_create) { |
630 | 0 | ret = AVERROR(ENOSYS); |
631 | 0 | goto fail; |
632 | 0 | } |
633 | | |
634 | 0 | ret = device_ctx->hw_type->device_create(&device_ctx->p, device, |
635 | 0 | opts, flags); |
636 | 0 | if (ret < 0) |
637 | 0 | goto fail; |
638 | | |
639 | 0 | ret = av_hwdevice_ctx_init(device_ref); |
640 | 0 | if (ret < 0) |
641 | 0 | goto fail; |
642 | | |
643 | 0 | *pdevice_ref = device_ref; |
644 | 0 | return 0; |
645 | 0 | fail: |
646 | 0 | av_buffer_unref(&device_ref); |
647 | 0 | *pdevice_ref = NULL; |
648 | 0 | return ret; |
649 | 0 | } |
650 | | |
651 | | int av_hwdevice_ctx_create_derived_opts(AVBufferRef **dst_ref_ptr, |
652 | | enum AVHWDeviceType type, |
653 | | AVBufferRef *src_ref, |
654 | | AVDictionary *options, int flags) |
655 | 0 | { |
656 | 0 | AVBufferRef *dst_ref = NULL, *tmp_ref; |
657 | 0 | FFHWDeviceContext *dst_ctx; |
658 | 0 | int ret = 0; |
659 | |
|
660 | 0 | tmp_ref = src_ref; |
661 | 0 | while (tmp_ref) { |
662 | 0 | FFHWDeviceContext *tmp_ctx = (FFHWDeviceContext*)tmp_ref->data; |
663 | 0 | if (tmp_ctx->p.type == type) { |
664 | 0 | dst_ref = av_buffer_ref(tmp_ref); |
665 | 0 | if (!dst_ref) { |
666 | 0 | ret = AVERROR(ENOMEM); |
667 | 0 | goto fail; |
668 | 0 | } |
669 | 0 | goto done; |
670 | 0 | } |
671 | 0 | tmp_ref = tmp_ctx->source_device; |
672 | 0 | } |
673 | | |
674 | 0 | dst_ref = av_hwdevice_ctx_alloc(type); |
675 | 0 | if (!dst_ref) { |
676 | 0 | ret = AVERROR(ENOMEM); |
677 | 0 | goto fail; |
678 | 0 | } |
679 | 0 | dst_ctx = (FFHWDeviceContext*)dst_ref->data; |
680 | |
|
681 | 0 | tmp_ref = src_ref; |
682 | 0 | while (tmp_ref) { |
683 | 0 | FFHWDeviceContext *tmp_ctx = (FFHWDeviceContext*)tmp_ref->data; |
684 | 0 | if (dst_ctx->hw_type->device_derive) { |
685 | 0 | ret = dst_ctx->hw_type->device_derive(&dst_ctx->p, |
686 | 0 | &tmp_ctx->p, |
687 | 0 | options, flags); |
688 | 0 | if (ret == 0) { |
689 | 0 | dst_ctx->source_device = av_buffer_ref(src_ref); |
690 | 0 | if (!dst_ctx->source_device) { |
691 | 0 | ret = AVERROR(ENOMEM); |
692 | 0 | goto fail; |
693 | 0 | } |
694 | 0 | ret = av_hwdevice_ctx_init(dst_ref); |
695 | 0 | if (ret < 0) |
696 | 0 | goto fail; |
697 | 0 | goto done; |
698 | 0 | } |
699 | 0 | if (ret != AVERROR(ENOSYS)) |
700 | 0 | goto fail; |
701 | 0 | } |
702 | 0 | tmp_ref = tmp_ctx->source_device; |
703 | 0 | } |
704 | | |
705 | 0 | ret = AVERROR(ENOSYS); |
706 | 0 | goto fail; |
707 | | |
708 | 0 | done: |
709 | 0 | *dst_ref_ptr = dst_ref; |
710 | 0 | return 0; |
711 | | |
712 | 0 | fail: |
713 | 0 | av_buffer_unref(&dst_ref); |
714 | 0 | *dst_ref_ptr = NULL; |
715 | 0 | return ret; |
716 | 0 | } |
717 | | |
718 | | int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, |
719 | | enum AVHWDeviceType type, |
720 | | AVBufferRef *src_ref, int flags) |
721 | 0 | { |
722 | 0 | return av_hwdevice_ctx_create_derived_opts(dst_ref_ptr, type, src_ref, |
723 | 0 | NULL, flags); |
724 | 0 | } |
725 | | |
726 | | static void ff_hwframe_unmap(void *opaque, uint8_t *data) |
727 | 0 | { |
728 | 0 | HWMapDescriptor *hwmap = (HWMapDescriptor*)data; |
729 | 0 | AVHWFramesContext *ctx = opaque; |
730 | |
|
731 | 0 | if (hwmap->unmap) |
732 | 0 | hwmap->unmap(ctx, hwmap); |
733 | |
|
734 | 0 | av_frame_free(&hwmap->source); |
735 | |
|
736 | 0 | av_buffer_unref(&hwmap->hw_frames_ctx); |
737 | |
|
738 | 0 | av_free(hwmap); |
739 | 0 | } |
740 | | |
741 | | int ff_hwframe_map_create(AVBufferRef *hwframe_ref, |
742 | | AVFrame *dst, const AVFrame *src, |
743 | | void (*unmap)(AVHWFramesContext *ctx, |
744 | | HWMapDescriptor *hwmap), |
745 | | void *priv) |
746 | 0 | { |
747 | 0 | AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data; |
748 | 0 | HWMapDescriptor *hwmap; |
749 | 0 | int ret; |
750 | |
|
751 | 0 | hwmap = av_mallocz(sizeof(*hwmap)); |
752 | 0 | if (!hwmap) { |
753 | 0 | ret = AVERROR(ENOMEM); |
754 | 0 | goto fail; |
755 | 0 | } |
756 | | |
757 | 0 | hwmap->source = av_frame_alloc(); |
758 | 0 | if (!hwmap->source) { |
759 | 0 | ret = AVERROR(ENOMEM); |
760 | 0 | goto fail; |
761 | 0 | } |
762 | 0 | ret = av_frame_ref(hwmap->source, src); |
763 | 0 | if (ret < 0) |
764 | 0 | goto fail; |
765 | | |
766 | 0 | hwmap->hw_frames_ctx = av_buffer_ref(hwframe_ref); |
767 | 0 | if (!hwmap->hw_frames_ctx) { |
768 | 0 | ret = AVERROR(ENOMEM); |
769 | 0 | goto fail; |
770 | 0 | } |
771 | | |
772 | 0 | hwmap->unmap = unmap; |
773 | 0 | hwmap->priv = priv; |
774 | |
|
775 | 0 | dst->buf[0] = av_buffer_create((uint8_t*)hwmap, sizeof(*hwmap), |
776 | 0 | &ff_hwframe_unmap, ctx, 0); |
777 | 0 | if (!dst->buf[0]) { |
778 | 0 | ret = AVERROR(ENOMEM); |
779 | 0 | goto fail; |
780 | 0 | } |
781 | | |
782 | 0 | return 0; |
783 | | |
784 | 0 | fail: |
785 | 0 | if (hwmap) { |
786 | 0 | av_buffer_unref(&hwmap->hw_frames_ctx); |
787 | 0 | av_frame_free(&hwmap->source); |
788 | 0 | } |
789 | 0 | av_free(hwmap); |
790 | 0 | return ret; |
791 | 0 | } |
792 | | |
793 | | int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags) |
794 | 0 | { |
795 | 0 | AVBufferRef *orig_dst_frames = dst->hw_frames_ctx; |
796 | 0 | enum AVPixelFormat orig_dst_fmt = dst->format; |
797 | 0 | HWMapDescriptor *hwmap; |
798 | 0 | int ret; |
799 | |
|
800 | 0 | if (src->hw_frames_ctx && dst->hw_frames_ctx) { |
801 | 0 | FFHWFramesContext *src_frames = (FFHWFramesContext*)src->hw_frames_ctx->data; |
802 | 0 | FFHWFramesContext *dst_frames = (FFHWFramesContext*)dst->hw_frames_ctx->data; |
803 | |
|
804 | 0 | if ((src_frames == dst_frames && |
805 | 0 | src->format == dst_frames->p.sw_format && |
806 | 0 | dst->format == dst_frames->p.format) || |
807 | 0 | (src_frames->source_frames && |
808 | 0 | src_frames->source_frames->data == |
809 | 0 | (uint8_t*)dst_frames)) { |
810 | | // This is an unmap operation. We don't need to directly |
811 | | // do anything here other than fill in the original frame, |
812 | | // because the real unmap will be invoked when the last |
813 | | // reference to the mapped frame disappears. |
814 | 0 | if (!src->buf[0]) { |
815 | 0 | av_log(src_frames, AV_LOG_ERROR, "Invalid mapping " |
816 | 0 | "found when attempting unmap.\n"); |
817 | 0 | return AVERROR(EINVAL); |
818 | 0 | } |
819 | 0 | hwmap = (HWMapDescriptor*)src->buf[0]->data; |
820 | 0 | return av_frame_replace(dst, hwmap->source); |
821 | 0 | } |
822 | 0 | } |
823 | | |
824 | 0 | if (src->hw_frames_ctx) { |
825 | 0 | FFHWFramesContext *src_frames = (FFHWFramesContext*)src->hw_frames_ctx->data; |
826 | |
|
827 | 0 | if (src_frames->p.format == src->format && |
828 | 0 | src_frames->hw_type->map_from) { |
829 | 0 | ret = src_frames->hw_type->map_from(&src_frames->p, |
830 | 0 | dst, src, flags); |
831 | 0 | if (ret >= 0) |
832 | 0 | return ret; |
833 | 0 | else if (ret != AVERROR(ENOSYS)) |
834 | 0 | goto fail; |
835 | 0 | } |
836 | 0 | } |
837 | | |
838 | 0 | if (dst->hw_frames_ctx) { |
839 | 0 | FFHWFramesContext *dst_frames = (FFHWFramesContext*)dst->hw_frames_ctx->data; |
840 | |
|
841 | 0 | if (dst_frames->p.format == dst->format && |
842 | 0 | dst_frames->hw_type->map_to) { |
843 | 0 | ret = dst_frames->hw_type->map_to(&dst_frames->p, |
844 | 0 | dst, src, flags); |
845 | 0 | if (ret >= 0) |
846 | 0 | return ret; |
847 | 0 | else if (ret != AVERROR(ENOSYS)) |
848 | 0 | goto fail; |
849 | 0 | } |
850 | 0 | } |
851 | | |
852 | 0 | return AVERROR(ENOSYS); |
853 | | |
854 | 0 | fail: |
855 | | // if the caller provided dst frames context, it should be preserved |
856 | | // by this function |
857 | 0 | av_assert0(orig_dst_frames == NULL || |
858 | 0 | orig_dst_frames == dst->hw_frames_ctx); |
859 | | |
860 | | // preserve user-provided dst frame fields, but clean |
861 | | // anything we might have set |
862 | 0 | dst->hw_frames_ctx = NULL; |
863 | 0 | av_frame_unref(dst); |
864 | |
|
865 | 0 | dst->hw_frames_ctx = orig_dst_frames; |
866 | 0 | dst->format = orig_dst_fmt; |
867 | |
|
868 | 0 | return ret; |
869 | 0 | } |
870 | | |
871 | | int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, |
872 | | enum AVPixelFormat format, |
873 | | AVBufferRef *derived_device_ctx, |
874 | | AVBufferRef *source_frame_ctx, |
875 | | int flags) |
876 | 0 | { |
877 | 0 | AVBufferRef *dst_ref = NULL; |
878 | 0 | FFHWFramesContext *dsti = NULL; |
879 | 0 | FFHWFramesContext *srci = (FFHWFramesContext*)source_frame_ctx->data; |
880 | 0 | AVHWFramesContext *dst, *src = &srci->p; |
881 | 0 | int ret; |
882 | |
|
883 | 0 | if (srci->source_frames) { |
884 | 0 | AVHWFramesContext *src_src = |
885 | 0 | (AVHWFramesContext*)srci->source_frames->data; |
886 | 0 | AVHWDeviceContext *dst_dev = |
887 | 0 | (AVHWDeviceContext*)derived_device_ctx->data; |
888 | |
|
889 | 0 | if (src_src->device_ctx == dst_dev) { |
890 | | // This is actually an unmapping, so we just return a |
891 | | // reference to the source frame context. |
892 | 0 | *derived_frame_ctx = av_buffer_ref(srci->source_frames); |
893 | 0 | if (!*derived_frame_ctx) { |
894 | 0 | ret = AVERROR(ENOMEM); |
895 | 0 | goto fail; |
896 | 0 | } |
897 | 0 | return 0; |
898 | 0 | } |
899 | 0 | } |
900 | | |
901 | 0 | dst_ref = av_hwframe_ctx_alloc(derived_device_ctx); |
902 | 0 | if (!dst_ref) { |
903 | 0 | ret = AVERROR(ENOMEM); |
904 | 0 | goto fail; |
905 | 0 | } |
906 | | |
907 | 0 | dsti = (FFHWFramesContext*)dst_ref->data; |
908 | 0 | dst = &dsti->p; |
909 | |
|
910 | 0 | dst->format = format; |
911 | 0 | dst->sw_format = src->sw_format; |
912 | 0 | dst->width = src->width; |
913 | 0 | dst->height = src->height; |
914 | |
|
915 | 0 | dsti->source_frames = av_buffer_ref(source_frame_ctx); |
916 | 0 | if (!dsti->source_frames) { |
917 | 0 | ret = AVERROR(ENOMEM); |
918 | 0 | goto fail; |
919 | 0 | } |
920 | | |
921 | 0 | dsti->source_allocation_map_flags = |
922 | 0 | flags & (AV_HWFRAME_MAP_READ | |
923 | 0 | AV_HWFRAME_MAP_WRITE | |
924 | 0 | AV_HWFRAME_MAP_OVERWRITE | |
925 | 0 | AV_HWFRAME_MAP_DIRECT); |
926 | |
|
927 | 0 | ret = AVERROR(ENOSYS); |
928 | 0 | if (srci->hw_type->frames_derive_from) |
929 | 0 | ret = srci->hw_type->frames_derive_from(dst, src, flags); |
930 | 0 | if (ret == AVERROR(ENOSYS) && |
931 | 0 | dsti->hw_type->frames_derive_to) |
932 | 0 | ret = dsti->hw_type->frames_derive_to(dst, src, flags); |
933 | 0 | if (ret == AVERROR(ENOSYS)) |
934 | 0 | ret = 0; |
935 | 0 | if (ret) |
936 | 0 | goto fail; |
937 | | |
938 | 0 | *derived_frame_ctx = dst_ref; |
939 | 0 | return 0; |
940 | | |
941 | 0 | fail: |
942 | 0 | if (dsti) |
943 | 0 | av_buffer_unref(&dsti->source_frames); |
944 | 0 | av_buffer_unref(&dst_ref); |
945 | 0 | return ret; |
946 | 0 | } |
947 | | |
948 | | int ff_hwframe_map_replace(AVFrame *dst, const AVFrame *src) |
949 | 0 | { |
950 | 0 | HWMapDescriptor *hwmap = (HWMapDescriptor*)dst->buf[0]->data; |
951 | 0 | return av_frame_replace(hwmap->source, src); |
952 | 0 | } |