FFmpeg: libavutil/hwcontext.c Source File
Go to the documentation of this file. 1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #include "config.h"
20
30
32 #if CONFIG_CUDA
34 #endif
35 #if CONFIG_D3D11VA
37 #endif
38 #if CONFIG_LIBDRM
40 #endif
41 #if CONFIG_DXVA2
43 #endif
44 #if CONFIG_OPENCL
46 #endif
47 #if CONFIG_QSV
49 #endif
50 #if CONFIG_VAAPI
52 #endif
53 #if CONFIG_VDPAU
55 #endif
56 #if CONFIG_VIDEOTOOLBOX
58 #endif
59 #if CONFIG_MEDIACODEC
61 #endif
62 #if CONFIG_VULKAN
64 #endif
66 };
67
80 };
81
83 {
88 }
90 }
91
93 {
97 else
99 }
100
102 {
107 continue;
111 }
112 }
114 }
115
120 };
121
123 {
125
126 /* uninit might still want access the hw context and the user
127 * free() callback might destroy it, so uninit has to be called first */
128 if (
ctx->internal->hw_type->device_uninit)
129 ctx->internal->hw_type->device_uninit(
ctx);
130
133
135
140 }
141
143 {
148
152 break;
153 }
154 }
155 if (!hw_type)
157
161
165
168 if (!
ctx->internal->priv)
170 }
171
176 }
177
181 if (!buf)
183
186
187 ctx->internal->hw_type = hw_type;
188
189 return buf;
190
198 }
199
201 {
204
205 if (
ctx->internal->hw_type->device_init) {
206 ret =
ctx->internal->hw_type->device_init(
ctx);
209 }
210
211 return 0;
213 if (
ctx->internal->hw_type->device_uninit)
214 ctx->internal->hw_type->device_uninit(
ctx);
216 }
217
222 };
223
225 {
227
228 if (
ctx->internal->pool_internal)
230
231 if (
ctx->internal->hw_type->frames_uninit)
232 ctx->internal->hw_type->frames_uninit(
ctx);
233
236
238
240
245 }
246
248 {
253
257
261
262 if (hw_type->frames_priv_size) {
264 if (!
ctx->internal->priv)
266 }
267
268 if (hw_type->frames_hwctx_size) {
272 }
273
275 if (!device_ref)
277
281 if (!buf)
283
285 ctx->device_ref = device_ref;
286 ctx->device_ctx = device_ctx;
289
290 ctx->internal->hw_type = hw_type;
291
292 return buf;
293
295 if (device_ref)
303 }
304
306 {
310
314
315 for (
i = 0;
i <
ctx->initial_pool_size;
i++) {
319
323 }
324
326 for (
i = 0;
i <
ctx->initial_pool_size;
i++)
329
331 }
332
334 {
338
339 if (
ctx->internal->source_frames) {
340 /* A derived frame context is already initialised. */
341 return 0;
342 }
343
344 /* validate the pixel format */
347 break;
348 }
351 "The hardware pixel format '%s' is not supported by the device type '%s'\n",
354 }
355
356 /* validate the dimensions */
360
361 /* format-specific init */
362 if (
ctx->internal->hw_type->frames_init) {
363 ret =
ctx->internal->hw_type->frames_init(
ctx);
366 }
367
368 if (
ctx->internal->pool_internal && !
ctx->pool)
369 ctx->pool =
ctx->internal->pool_internal;
370
371 /* preallocate the frames in the pool, if requested */
372 if (
ctx->initial_pool_size > 0) {
376 }
377
378 return 0;
380 if (
ctx->internal->hw_type->frames_uninit)
381 ctx->internal->hw_type->frames_uninit(
ctx);
383 }
384
388 {
390
391 if (!
ctx->internal->hw_type->transfer_get_formats)
393
394 return ctx->internal->hw_type->transfer_get_formats(
ctx, dir,
formats);
395 }
396
398 {
402
404 if (!frame_tmp)
406
407 /* if the format is set, use that
408 * otherwise pick the first supported one */
410 frame_tmp->format = dst->
format;
411 } else {
413
419 frame_tmp->format =
formats[0];
421 }
422 frame_tmp->width =
ctx->width;
423 frame_tmp->height =
ctx->height;
424
428
432
433 frame_tmp->width =
src->width;
434 frame_tmp->height =
src->height;
435
437
441 }
442
444 {
447
450
451 /*
452 * Hardware -> Hardware Transfer.
453 * Unlike Software -> Hardware or Hardware -> Software, the transfer
454 * function could be provided by either the src or dst, depending on
455 * the specific combination of hardware.
456 */
462
465 "A device with a derived frame context cannot be used as "
466 "the source of a HW -> HW transfer.");
468 }
469
470 if (dst_ctx->internal->source_frames) {
472 "A device with a derived frame context cannot be used as "
473 "the destination of a HW -> HW transfer.");
475 }
476
479 ret = dst_ctx->internal->hw_type->transfer_data_to(dst_ctx, dst,
src);
482 } else {
483 if (
src->hw_frames_ctx) {
485
486 ret =
ctx->internal->hw_type->transfer_data_from(
ctx, dst,
src);
491
492 ret =
ctx->internal->hw_type->transfer_data_to(
ctx, dst,
src);
495 } else {
497 }
498 }
499 return 0;
500 }
501
503 {
506
507 if (
ctx->internal->source_frames) {
508 // This is a derived frame context, so we allocate in the source
509 // and map the frame immediately.
511
514 if (!
frame->hw_frames_ctx)
516
518 if (!src_frame)
520
522 src_frame, 0);
526 }
527
529 ctx->internal->source_allocation_map_flags);
532 "frame context: %d.\n",
ret);
535 }
536
537 // Free the source frame immediately - the mapped frame still
538 // contains a reference to it.
540
541 return 0;
542 }
543
544 if (!
ctx->internal->hw_type->frames_get_buffer)
546
549
551 if (!
frame->hw_frames_ctx)
553
558 }
559
561
562 return 0;
563 }
564
566 {
569
570 if (hw_type->device_hwconfig_size == 0)
572
573 return av_mallocz(hw_type->device_hwconfig_size);
574 }
575
577 const void *hwconfig)
578 {
582
583 if (!hw_type->frames_get_constraints)
585
586 constraints =
av_mallocz(
sizeof(*constraints));
587 if (!constraints)
589
590 constraints->min_width = constraints->min_height = 0;
591 constraints->max_width = constraints->max_height = INT_MAX;
592
593 if (hw_type->frames_get_constraints(
ctx, hwconfig, constraints) >= 0) {
594 return constraints;
595 } else {
598 }
599 }
600
602 {
603 if (*constraints) {
604 av_freep(&(*constraints)->valid_hw_formats);
605 av_freep(&(*constraints)->valid_sw_formats);
606 }
608 }
609
612 {
616
618 if (!device_ref) {
621 }
623
627 }
628
633
637
638 *pdevice_ref = device_ref;
639 return 0;
644 }
645
650 {
654
655 tmp_ref = src_ref;
656 while (tmp_ref) {
660 if (!dst_ref) {
663 }
664 goto done;
665 }
667 }
668
670 if (!dst_ref) {
673 }
675
676 tmp_ref = src_ref;
677 while (tmp_ref) {
681 tmp_ctx,
689 }
693 goto done;
694 }
697 }
699 }
700
703
704 done:
705 *dst_ref_ptr = dst_ref;
706 return 0;
707
712 }
713
717 {
720 }
721
723 {
726
729
731
733
735 }
736
741 void *priv)
742 {
746
748 if (!hwmap) {
751 }
752
757 }
761
766 }
767
768 hwmap->
unmap = unmap;
770
776 }
777
778 return 0;
779
781 if (hwmap) {
784 }
787 }
788
790 {
794
798
799 if ((src_frames == dst_frames &&
804 (uint8_t*)dst_frames)) {
805 // This is an unmap operation. We don't need to directly
806 // do anything here other than fill in the original frame,
807 // because the real unmap will be invoked when the last
808 // reference to the mapped frame disappears.
811 "found when attempting unmap.\n");
813 }
817 }
818 }
819
820 if (
src->hw_frames_ctx) {
822
829 }
830 }
831
834
841 }
842 }
843
845 }
846
852 {
857
858 if (
src->internal->source_frames) {
863
865 // This is actually an unmapping, so we just return a
866 // reference to the source frame context.
867 *derived_frame_ctx =
869 if (!*derived_frame_ctx) {
872 }
873 return 0;
874 }
875 }
876
878 if (!dst_ref) {
881 }
882
884
889
894 }
895
901
903 if (
src->internal->hw_type->frames_derive_from)
904 ret =
src->internal->hw_type->frames_derive_from(dst,
src,
flags);
912
913 *derived_frame_ctx = dst_ref;
914 return 0;
915
917 if (dst)
921 }
922
924 {
928 }
static void hwframe_ctx_free(void *opaque, uint8_t *data)
AVPixelFormat
Pixel format.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
AVBufferRef * source_device
For a derived device, a reference to the original device context it was derived from.
Filter the word "frame" indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
@ AV_HWFRAME_TRANSFER_DIRECTION_FROM
Transfer the data from the queried hw frame.
AVFrame * source
A reference to the original source of the mapping.
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
static int transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags)
void * av_hwdevice_hwconfig_alloc(AVBufferRef *ref)
Allocate a HW-specific configuration structure for a given HW device.
uint8_t * data
The data buffer.
static void ff_hwframe_unmap(void *opaque, uint8_t *data)
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
static void hwdevice_ctx_free(void *opaque, uint8_t *data)
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
This structure describes decoded (raw) audio or video data.
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
const HWContextType ff_hwcontext_type_qsv
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
const HWContextType ff_hwcontext_type_drm
@ AV_HWDEVICE_TYPE_MEDIACODEC
int(* map_to)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
const HWContextType ff_hwcontext_type_vdpau
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
const HWContextType ff_hwcontext_type_vaapi
int(* map_from)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
enum AVHWDeviceType av_hwdevice_iterate_types(enum AVHWDeviceType prev)
Iterate over supported device types.
AVHWDeviceInternal * internal
Private data used internally by libavutil.
AVHWFramesInternal * internal
Private data used internally by libavutil.
AVBufferRef * hw_frames_ctx
A reference to the hardware frames context in which this mapping was made.
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
@ AV_HWDEVICE_TYPE_VIDEOTOOLBOX
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
void * priv
Hardware-specific private data associated with the mapping.
int width
The allocated dimensions of the frames in this pool.
AVHWFramesConstraints * av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, const void *hwconfig)
Get the constraints on HW frames given a device and the HW-specific configuration to be used with tha...
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
AVBufferRef * source_frames
For a derived context, a reference to the original frames context it was derived from.
@ AV_HWDEVICE_TYPE_VULKAN
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
const HWContextType ff_hwcontext_type_d3d11va
int(* device_derive)(AVHWDeviceContext *dst_ctx, AVHWDeviceContext *src_ctx, AVDictionary *opts, int flags)
if it could not because there are no more frames
@ AV_HWDEVICE_TYPE_D3D11VA
int av_hwdevice_ctx_create_derived_opts(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, AVDictionary *options, int flags)
Create a new device of the specified type from an existing device.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
const HWContextType ff_hwcontext_type_mediacodec
int source_allocation_map_flags
Flags to apply to the mapping from the source to the derived frame context when trying to allocate in...
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
static int hwframe_pool_prealloc(AVBufferRef *ref)
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
const HWContextType ff_hwcontext_type_dxva2
#define FF_ARRAY_ELEMS(a)
static void set(uint8_t *a[], int ch, int index, int ch_count, enum AVSampleFormat f, double v)
static const char *const hw_type_names[]
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
static const HWContextType *const hw_table[]
void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
Free an AVHWFrameConstraints structure.
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
static enum AVPixelFormat pix_fmt
int(* device_create)(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
const HWContextType ff_hwcontext_type_videotoolbox
#define LIBAVUTIL_VERSION_INT
Describe the class of an AVClass context structure.
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
const char * av_default_item_name(void *ptr)
Return the context name.
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, enum AVPixelFormat format, AVBufferRef *derived_device_ctx, AVBufferRef *source_frame_ctx, int flags)
Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a di...
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
const OptionDef options[]
int(* frames_derive_to)(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
@ AV_HWDEVICE_TYPE_OPENCL
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
const HWContextType ff_hwcontext_type_cuda
const HWContextType * hw_type
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
static const AVClass hwframe_ctx_class
static const AVClass hwdevice_ctx_class
const HWContextType ff_hwcontext_type_vulkan
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
size_t device_hwctx_size
size of the public hardware-specific context, i.e.
#define i(width, name, range_min, range_max)
void(* unmap)(AVHWFramesContext *ctx, struct HWMapDescriptor *hwmap)
Unmap function.
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
const HWContextType * hw_type
int ff_hwframe_map_replace(AVFrame *dst, const AVFrame *src)
Replace the current hwmap of dst with the one from src, used for indirect mappings like VAAPI->(DRM)-...
void * av_calloc(size_t nmemb, size_t size)
AVHWFrameTransferDirection
This struct describes a set or pool of "hardware" frames (i.e.
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
const AVClass * av_class
A class for logging and AVOptions.
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
static int ref[MAX_W *MAX_W]
size_t device_priv_size
size of the private data, i.e.
int av_hwframe_transfer_get_formats(AVBufferRef *hwframe_ref, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats, int flags)
Get a list of possible source or target formats usable in av_hwframe_transfer_data().
A reference to a data buffer.
#define flags(name, subs,...)
int(* transfer_data_from)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
const HWContextType ff_hwcontext_type_opencl
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Generated on Wed Aug 24 2022 21:38:29 for FFmpeg by
doxygen
1.8.17