]> git.sesse.net Git - ffmpeg/blob - libavutil/hwcontext_vulkan.c
hwcontext_vulkan: support user-provided pools
[ffmpeg] / libavutil / hwcontext_vulkan.c
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18
19 #include "config.h"
20 #include "pixdesc.h"
21 #include "avstring.h"
22 #include "imgutils.h"
23 #include "hwcontext.h"
24 #include "hwcontext_internal.h"
25 #include "hwcontext_vulkan.h"
26
27 #if CONFIG_LIBDRM
28 #include <unistd.h>
29 #include <xf86drm.h>
30 #include <drm_fourcc.h>
31 #include "hwcontext_drm.h"
32 #if CONFIG_VAAPI
33 #include <va/va_drmcommon.h>
34 #include "hwcontext_vaapi.h"
35 #endif
36 #endif
37
38 #if CONFIG_CUDA
39 #include "hwcontext_cuda_internal.h"
40 #include "cuda_check.h"
41 #define CHECK_CU(x) FF_CUDA_CHECK_DL(cuda_cu, cu, x)
42 #endif
43
44 typedef struct VulkanQueueCtx {
45     VkFence fence;
46     VkQueue queue;
47     int was_synchronous;
48
49     /* Buffer dependencies */
50     AVBufferRef **buf_deps;
51     int nb_buf_deps;
52     int buf_deps_alloc_size;
53 } VulkanQueueCtx;
54
55 typedef struct VulkanExecCtx {
56     VkCommandPool pool;
57     VkCommandBuffer *bufs;
58     VulkanQueueCtx *queues;
59     int nb_queues;
60     int cur_queue_idx;
61 } VulkanExecCtx;
62
63 typedef struct VulkanDevicePriv {
64     /* Properties */
65     VkPhysicalDeviceProperties props;
66     VkPhysicalDeviceMemoryProperties mprops;
67
68     /* Queues */
69     uint32_t qfs[3];
70     int num_qfs;
71
72     /* Debug callback */
73     VkDebugUtilsMessengerEXT debug_ctx;
74
75     /* Image transfers */
76     VulkanExecCtx upload_ctx;
77     VulkanExecCtx download_ctx;
78
79     /* Extensions */
80     uint64_t extensions;
81
82     /* Settings */
83     int use_linear_images;
84
85     /* Nvidia */
86     int dev_is_nvidia;
87 } VulkanDevicePriv;
88
89 typedef struct VulkanFramesPriv {
90     VulkanExecCtx cmd;
91 } VulkanFramesPriv;
92
93 typedef struct AVVkFrameInternal {
94 #if CONFIG_CUDA
95     /* Importing external memory into cuda is really expensive so we keep the
96      * memory imported all the time */
97     AVBufferRef *cuda_fc_ref; /* Need to keep it around for uninit */
98     CUexternalMemory ext_mem[AV_NUM_DATA_POINTERS];
99     CUmipmappedArray cu_mma[AV_NUM_DATA_POINTERS];
100     CUarray cu_array[AV_NUM_DATA_POINTERS];
101     CUexternalSemaphore cu_sem[AV_NUM_DATA_POINTERS];
102 #endif
103 } AVVkFrameInternal;
104
105 #define GET_QUEUE_COUNT(hwctx, graph, comp, tx) (                   \
106     graph ?  hwctx->nb_graphics_queues :                            \
107     comp  ? (hwctx->nb_comp_queues ?                                \
108              hwctx->nb_comp_queues : hwctx->nb_graphics_queues) :   \
109     tx    ? (hwctx->nb_tx_queues ? hwctx->nb_tx_queues :            \
110              (hwctx->nb_comp_queues ?                               \
111               hwctx->nb_comp_queues : hwctx->nb_graphics_queues)) : \
112     0                                                               \
113 )
114
115 #define VK_LOAD_PFN(inst, name) PFN_##name pfn_##name = (PFN_##name)           \
116                                               vkGetInstanceProcAddr(inst, #name)
117
118 #define DEFAULT_USAGE_FLAGS (VK_IMAGE_USAGE_SAMPLED_BIT      |                 \
119                              VK_IMAGE_USAGE_STORAGE_BIT      |                 \
120                              VK_IMAGE_USAGE_TRANSFER_SRC_BIT |                 \
121                              VK_IMAGE_USAGE_TRANSFER_DST_BIT)
122
123 #define ADD_VAL_TO_LIST(list, count, val)                                      \
124     do {                                                                       \
125         list = av_realloc_array(list, sizeof(*list), ++count);                 \
126         if (!list) {                                                           \
127             err = AVERROR(ENOMEM);                                             \
128             goto fail;                                                         \
129         }                                                                      \
130         list[count - 1] = av_strdup(val);                                      \
131         if (!list[count - 1]) {                                                \
132             err = AVERROR(ENOMEM);                                             \
133             goto fail;                                                         \
134         }                                                                      \
135     } while(0)
136
137 static const struct {
138     enum AVPixelFormat pixfmt;
139     const VkFormat vkfmts[3];
140 } vk_pixfmt_map[] = {
141     { AV_PIX_FMT_GRAY8,   { VK_FORMAT_R8_UNORM } },
142     { AV_PIX_FMT_GRAY16,  { VK_FORMAT_R16_UNORM } },
143     { AV_PIX_FMT_GRAYF32, { VK_FORMAT_R32_SFLOAT } },
144
145     { AV_PIX_FMT_NV12, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
146     { AV_PIX_FMT_P010, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
147     { AV_PIX_FMT_P016, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
148
149     { AV_PIX_FMT_YUV420P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
150     { AV_PIX_FMT_YUV422P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
151     { AV_PIX_FMT_YUV444P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
152
153     { AV_PIX_FMT_YUV420P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
154     { AV_PIX_FMT_YUV422P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
155     { AV_PIX_FMT_YUV444P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
156
157     { AV_PIX_FMT_ABGR,   { VK_FORMAT_A8B8G8R8_UNORM_PACK32 } },
158     { AV_PIX_FMT_BGRA,   { VK_FORMAT_B8G8R8A8_UNORM } },
159     { AV_PIX_FMT_RGBA,   { VK_FORMAT_R8G8B8A8_UNORM } },
160     { AV_PIX_FMT_RGB24,  { VK_FORMAT_R8G8B8_UNORM } },
161     { AV_PIX_FMT_BGR24,  { VK_FORMAT_B8G8R8_UNORM } },
162     { AV_PIX_FMT_RGB48,  { VK_FORMAT_R16G16B16_UNORM } },
163     { AV_PIX_FMT_RGBA64, { VK_FORMAT_R16G16B16A16_UNORM } },
164     { AV_PIX_FMT_RGB565, { VK_FORMAT_R5G6B5_UNORM_PACK16 } },
165     { AV_PIX_FMT_BGR565, { VK_FORMAT_B5G6R5_UNORM_PACK16 } },
166     { AV_PIX_FMT_BGR0,   { VK_FORMAT_B8G8R8A8_UNORM } },
167     { AV_PIX_FMT_0BGR,   { VK_FORMAT_A8B8G8R8_UNORM_PACK32 } },
168     { AV_PIX_FMT_RGB0,   { VK_FORMAT_R8G8B8A8_UNORM } },
169
170     { AV_PIX_FMT_GBRPF32, { VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT } },
171 };
172
173 const VkFormat *av_vkfmt_from_pixfmt(enum AVPixelFormat p)
174 {
175     for (enum AVPixelFormat i = 0; i < FF_ARRAY_ELEMS(vk_pixfmt_map); i++)
176         if (vk_pixfmt_map[i].pixfmt == p)
177             return vk_pixfmt_map[i].vkfmts;
178     return NULL;
179 }
180
181 static int pixfmt_is_supported(AVVulkanDeviceContext *hwctx, enum AVPixelFormat p,
182                                int linear)
183 {
184     const VkFormat *fmt = av_vkfmt_from_pixfmt(p);
185     int planes = av_pix_fmt_count_planes(p);
186
187     if (!fmt)
188         return 0;
189
190     for (int i = 0; i < planes; i++) {
191         VkFormatFeatureFlags flags;
192         VkFormatProperties2 prop = {
193             .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
194         };
195         vkGetPhysicalDeviceFormatProperties2(hwctx->phys_dev, fmt[i], &prop);
196         flags = linear ? prop.formatProperties.linearTilingFeatures :
197                          prop.formatProperties.optimalTilingFeatures;
198         if (!(flags & DEFAULT_USAGE_FLAGS))
199             return 0;
200     }
201
202     return 1;
203 }
204
205 enum VulkanExtensions {
206     EXT_EXTERNAL_DMABUF_MEMORY = 1ULL <<  0, /* VK_EXT_external_memory_dma_buf */
207     EXT_DRM_MODIFIER_FLAGS     = 1ULL <<  1, /* VK_EXT_image_drm_format_modifier */
208     EXT_EXTERNAL_FD_MEMORY     = 1ULL <<  2, /* VK_KHR_external_memory_fd */
209     EXT_EXTERNAL_FD_SEM        = 1ULL <<  3, /* VK_KHR_external_semaphore_fd */
210
211     EXT_NO_FLAG                = 1ULL << 63,
212 };
213
214 typedef struct VulkanOptExtension {
215     const char *name;
216     uint64_t flag;
217 } VulkanOptExtension;
218
219 static const VulkanOptExtension optional_instance_exts[] = {
220     /* For future use */
221 };
222
223 static const VulkanOptExtension optional_device_exts[] = {
224     { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,               EXT_EXTERNAL_FD_MEMORY,     },
225     { VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,          EXT_EXTERNAL_DMABUF_MEMORY, },
226     { VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME,        EXT_DRM_MODIFIER_FLAGS,     },
227     { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,            EXT_EXTERNAL_FD_SEM,        },
228 };
229
230 /* Converts return values to strings */
231 static const char *vk_ret2str(VkResult res)
232 {
233 #define CASE(VAL) case VAL: return #VAL
234     switch (res) {
235     CASE(VK_SUCCESS);
236     CASE(VK_NOT_READY);
237     CASE(VK_TIMEOUT);
238     CASE(VK_EVENT_SET);
239     CASE(VK_EVENT_RESET);
240     CASE(VK_INCOMPLETE);
241     CASE(VK_ERROR_OUT_OF_HOST_MEMORY);
242     CASE(VK_ERROR_OUT_OF_DEVICE_MEMORY);
243     CASE(VK_ERROR_INITIALIZATION_FAILED);
244     CASE(VK_ERROR_DEVICE_LOST);
245     CASE(VK_ERROR_MEMORY_MAP_FAILED);
246     CASE(VK_ERROR_LAYER_NOT_PRESENT);
247     CASE(VK_ERROR_EXTENSION_NOT_PRESENT);
248     CASE(VK_ERROR_FEATURE_NOT_PRESENT);
249     CASE(VK_ERROR_INCOMPATIBLE_DRIVER);
250     CASE(VK_ERROR_TOO_MANY_OBJECTS);
251     CASE(VK_ERROR_FORMAT_NOT_SUPPORTED);
252     CASE(VK_ERROR_FRAGMENTED_POOL);
253     CASE(VK_ERROR_SURFACE_LOST_KHR);
254     CASE(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
255     CASE(VK_SUBOPTIMAL_KHR);
256     CASE(VK_ERROR_OUT_OF_DATE_KHR);
257     CASE(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
258     CASE(VK_ERROR_VALIDATION_FAILED_EXT);
259     CASE(VK_ERROR_INVALID_SHADER_NV);
260     CASE(VK_ERROR_OUT_OF_POOL_MEMORY);
261     CASE(VK_ERROR_INVALID_EXTERNAL_HANDLE);
262     CASE(VK_ERROR_NOT_PERMITTED_EXT);
263     CASE(VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT);
264     CASE(VK_ERROR_INVALID_DEVICE_ADDRESS_EXT);
265     CASE(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT);
266     default: return "Unknown error";
267     }
268 #undef CASE
269 }
270
271 static VkBool32 vk_dbg_callback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
272                                 VkDebugUtilsMessageTypeFlagsEXT messageType,
273                                 const VkDebugUtilsMessengerCallbackDataEXT *data,
274                                 void *priv)
275 {
276     int l;
277     AVHWDeviceContext *ctx = priv;
278
279     switch (severity) {
280     case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: l = AV_LOG_VERBOSE; break;
281     case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:    l = AV_LOG_INFO;    break;
282     case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: l = AV_LOG_WARNING; break;
283     case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:   l = AV_LOG_ERROR;   break;
284     default:                                              l = AV_LOG_DEBUG;   break;
285     }
286
287     av_log(ctx, l, "%s\n", data->pMessage);
288     for (int i = 0; i < data->cmdBufLabelCount; i++)
289         av_log(ctx, l, "\t%i: %s\n", i, data->pCmdBufLabels[i].pLabelName);
290
291     return 0;
292 }
293
294 static int check_extensions(AVHWDeviceContext *ctx, int dev, AVDictionary *opts,
295                             const char * const **dst, uint32_t *num, int debug)
296 {
297     const char *tstr;
298     const char **extension_names = NULL;
299     VulkanDevicePriv *p = ctx->internal->priv;
300     AVVulkanDeviceContext *hwctx = ctx->hwctx;
301     int err = 0, found, extensions_found = 0;
302
303     const char *mod;
304     int optional_exts_num;
305     uint32_t sup_ext_count;
306     char *user_exts_str = NULL;
307     AVDictionaryEntry *user_exts;
308     VkExtensionProperties *sup_ext;
309     const VulkanOptExtension *optional_exts;
310
311     if (!dev) {
312         mod = "instance";
313         optional_exts = optional_instance_exts;
314         optional_exts_num = FF_ARRAY_ELEMS(optional_instance_exts);
315         user_exts = av_dict_get(opts, "instance_extensions", NULL, 0);
316         if (user_exts) {
317             user_exts_str = av_strdup(user_exts->value);
318             if (!user_exts_str) {
319                 err = AVERROR(ENOMEM);
320                 goto fail;
321             }
322         }
323         vkEnumerateInstanceExtensionProperties(NULL, &sup_ext_count, NULL);
324         sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
325         if (!sup_ext)
326             return AVERROR(ENOMEM);
327         vkEnumerateInstanceExtensionProperties(NULL, &sup_ext_count, sup_ext);
328     } else {
329         mod = "device";
330         optional_exts = optional_device_exts;
331         optional_exts_num = FF_ARRAY_ELEMS(optional_device_exts);
332         user_exts = av_dict_get(opts, "device_extensions", NULL, 0);
333         if (user_exts) {
334             user_exts_str = av_strdup(user_exts->value);
335             if (!user_exts_str) {
336                 err = AVERROR(ENOMEM);
337                 goto fail;
338             }
339         }
340         vkEnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
341                                              &sup_ext_count, NULL);
342         sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
343         if (!sup_ext)
344             return AVERROR(ENOMEM);
345         vkEnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
346                                              &sup_ext_count, sup_ext);
347     }
348
349     for (int i = 0; i < optional_exts_num; i++) {
350         tstr = optional_exts[i].name;
351         found = 0;
352         for (int j = 0; j < sup_ext_count; j++) {
353             if (!strcmp(tstr, sup_ext[j].extensionName)) {
354                 found = 1;
355                 break;
356             }
357         }
358         if (!found)
359             continue;
360
361         av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, tstr);
362         p->extensions |= optional_exts[i].flag;
363         ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
364     }
365
366     if (debug && !dev) {
367         tstr = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
368         found = 0;
369         for (int j = 0; j < sup_ext_count; j++) {
370             if (!strcmp(tstr, sup_ext[j].extensionName)) {
371                 found = 1;
372                 break;
373             }
374         }
375         if (found) {
376             av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, tstr);
377             ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
378         } else {
379             av_log(ctx, AV_LOG_ERROR, "Debug extension \"%s\" not found!\n",
380                    tstr);
381             err = AVERROR(EINVAL);
382             goto fail;
383         }
384     }
385
386     if (user_exts_str) {
387         char *save, *token = av_strtok(user_exts_str, "+", &save);
388         while (token) {
389             found = 0;
390             for (int j = 0; j < sup_ext_count; j++) {
391                 if (!strcmp(token, sup_ext[j].extensionName)) {
392                     found = 1;
393                     break;
394                 }
395             }
396             if (found) {
397                 av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, token);
398                 ADD_VAL_TO_LIST(extension_names, extensions_found, token);
399             } else {
400                 av_log(ctx, AV_LOG_WARNING, "%s extension \"%s\" not found, excluding.\n",
401                        mod, token);
402             }
403             token = av_strtok(NULL, "+", &save);
404         }
405     }
406
407     *dst = extension_names;
408     *num = extensions_found;
409
410     av_free(user_exts_str);
411     av_free(sup_ext);
412     return 0;
413
414 fail:
415     if (extension_names)
416         for (int i = 0; i < extensions_found; i++)
417             av_free((void *)extension_names[i]);
418     av_free(extension_names);
419     av_free(user_exts_str);
420     av_free(sup_ext);
421     return err;
422 }
423
424 /* Creates a VkInstance */
425 static int create_instance(AVHWDeviceContext *ctx, AVDictionary *opts)
426 {
427     int err = 0;
428     VkResult ret;
429     VulkanDevicePriv *p = ctx->internal->priv;
430     AVVulkanDeviceContext *hwctx = ctx->hwctx;
431     AVDictionaryEntry *debug_opt = av_dict_get(opts, "debug", NULL, 0);
432     const int debug_mode = debug_opt && strtol(debug_opt->value, NULL, 10);
433     VkApplicationInfo application_info = {
434         .sType              = VK_STRUCTURE_TYPE_APPLICATION_INFO,
435         .pEngineName        = "libavutil",
436         .apiVersion         = VK_API_VERSION_1_1,
437         .engineVersion      = VK_MAKE_VERSION(LIBAVUTIL_VERSION_MAJOR,
438                                               LIBAVUTIL_VERSION_MINOR,
439                                               LIBAVUTIL_VERSION_MICRO),
440     };
441     VkInstanceCreateInfo inst_props = {
442         .sType            = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
443         .pApplicationInfo = &application_info,
444     };
445
446     /* Check for present/missing extensions */
447     err = check_extensions(ctx, 0, opts, &inst_props.ppEnabledExtensionNames,
448                            &inst_props.enabledExtensionCount, debug_mode);
449     if (err < 0)
450         return err;
451
452     if (debug_mode) {
453         static const char *layers[] = { "VK_LAYER_KHRONOS_validation" };
454         inst_props.ppEnabledLayerNames = layers;
455         inst_props.enabledLayerCount = FF_ARRAY_ELEMS(layers);
456     }
457
458     /* Try to create the instance */
459     ret = vkCreateInstance(&inst_props, hwctx->alloc, &hwctx->inst);
460
461     /* Check for errors */
462     if (ret != VK_SUCCESS) {
463         av_log(ctx, AV_LOG_ERROR, "Instance creation failure: %s\n",
464                vk_ret2str(ret));
465         for (int i = 0; i < inst_props.enabledExtensionCount; i++)
466             av_free((void *)inst_props.ppEnabledExtensionNames[i]);
467         av_free((void *)inst_props.ppEnabledExtensionNames);
468         return AVERROR_EXTERNAL;
469     }
470
471     if (debug_mode) {
472         VkDebugUtilsMessengerCreateInfoEXT dbg = {
473             .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
474             .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT |
475                                VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT    |
476                                VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
477                                VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT,
478             .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT    |
479                            VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
480                            VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
481             .pfnUserCallback = vk_dbg_callback,
482             .pUserData = ctx,
483         };
484         VK_LOAD_PFN(hwctx->inst, vkCreateDebugUtilsMessengerEXT);
485
486         pfn_vkCreateDebugUtilsMessengerEXT(hwctx->inst, &dbg,
487                                            hwctx->alloc, &p->debug_ctx);
488     }
489
490     hwctx->enabled_inst_extensions = inst_props.ppEnabledExtensionNames;
491     hwctx->nb_enabled_inst_extensions = inst_props.enabledExtensionCount;
492
493     return 0;
494 }
495
496 typedef struct VulkanDeviceSelection {
497     uint8_t uuid[VK_UUID_SIZE]; /* Will use this first unless !has_uuid */
498     int has_uuid;
499     const char *name; /* Will use this second unless NULL */
500     uint32_t pci_device; /* Will use this third unless 0x0 */
501     uint32_t vendor_id; /* Last resort to find something deterministic */
502     int index; /* Finally fall back to index */
503 } VulkanDeviceSelection;
504
505 static const char *vk_dev_type(enum VkPhysicalDeviceType type)
506 {
507     switch (type) {
508     case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU: return "integrated";
509     case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:   return "discrete";
510     case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:    return "virtual";
511     case VK_PHYSICAL_DEVICE_TYPE_CPU:            return "software";
512     default:                                     return "unknown";
513     }
514 }
515
516 /* Finds a device */
517 static int find_device(AVHWDeviceContext *ctx, VulkanDeviceSelection *select)
518 {
519     int err = 0, choice = -1;
520     uint32_t num;
521     VkResult ret;
522     VkPhysicalDevice *devices = NULL;
523     VkPhysicalDeviceIDProperties *idp = NULL;
524     VkPhysicalDeviceProperties2 *prop = NULL;
525     VulkanDevicePriv *p = ctx->internal->priv;
526     AVVulkanDeviceContext *hwctx = ctx->hwctx;
527
528     ret = vkEnumeratePhysicalDevices(hwctx->inst, &num, NULL);
529     if (ret != VK_SUCCESS || !num) {
530         av_log(ctx, AV_LOG_ERROR, "No devices found: %s!\n", vk_ret2str(ret));
531         return AVERROR(ENODEV);
532     }
533
534     devices = av_malloc_array(num, sizeof(VkPhysicalDevice));
535     if (!devices)
536         return AVERROR(ENOMEM);
537
538     ret = vkEnumeratePhysicalDevices(hwctx->inst, &num, devices);
539     if (ret != VK_SUCCESS) {
540         av_log(ctx, AV_LOG_ERROR, "Failed enumerating devices: %s\n",
541                vk_ret2str(ret));
542         err = AVERROR(ENODEV);
543         goto end;
544     }
545
546     prop = av_mallocz_array(num, sizeof(*prop));
547     if (!prop) {
548         err = AVERROR(ENOMEM);
549         goto end;
550     }
551
552     idp = av_mallocz_array(num, sizeof(*idp));
553     if (!idp) {
554         err = AVERROR(ENOMEM);
555         goto end;
556     }
557
558     av_log(ctx, AV_LOG_VERBOSE, "GPU listing:\n");
559     for (int i = 0; i < num; i++) {
560         idp[i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
561         prop[i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
562         prop[i].pNext = &idp[i];
563
564         vkGetPhysicalDeviceProperties2(devices[i], &prop[i]);
565         av_log(ctx, AV_LOG_VERBOSE, "    %d: %s (%s) (0x%x)\n", i,
566                prop[i].properties.deviceName,
567                vk_dev_type(prop[i].properties.deviceType),
568                prop[i].properties.deviceID);
569     }
570
571     if (select->has_uuid) {
572         for (int i = 0; i < num; i++) {
573             if (!strncmp(idp[i].deviceUUID, select->uuid, VK_UUID_SIZE)) {
574                 choice = i;
575                 goto end;
576              }
577         }
578         av_log(ctx, AV_LOG_ERROR, "Unable to find device by given UUID!\n");
579         err = AVERROR(ENODEV);
580         goto end;
581     } else if (select->name) {
582         av_log(ctx, AV_LOG_VERBOSE, "Requested device: %s\n", select->name);
583         for (int i = 0; i < num; i++) {
584             if (strstr(prop[i].properties.deviceName, select->name)) {
585                 choice = i;
586                 goto end;
587              }
588         }
589         av_log(ctx, AV_LOG_ERROR, "Unable to find device \"%s\"!\n",
590                select->name);
591         err = AVERROR(ENODEV);
592         goto end;
593     } else if (select->pci_device) {
594         av_log(ctx, AV_LOG_VERBOSE, "Requested device: 0x%x\n", select->pci_device);
595         for (int i = 0; i < num; i++) {
596             if (select->pci_device == prop[i].properties.deviceID) {
597                 choice = i;
598                 goto end;
599             }
600         }
601         av_log(ctx, AV_LOG_ERROR, "Unable to find device with PCI ID 0x%x!\n",
602                select->pci_device);
603         err = AVERROR(EINVAL);
604         goto end;
605     } else if (select->vendor_id) {
606         av_log(ctx, AV_LOG_VERBOSE, "Requested vendor: 0x%x\n", select->vendor_id);
607         for (int i = 0; i < num; i++) {
608             if (select->vendor_id == prop[i].properties.vendorID) {
609                 choice = i;
610                 goto end;
611             }
612         }
613         av_log(ctx, AV_LOG_ERROR, "Unable to find device with Vendor ID 0x%x!\n",
614                select->vendor_id);
615         err = AVERROR(ENODEV);
616         goto end;
617     } else {
618         if (select->index < num) {
619             choice = select->index;
620             goto end;
621         }
622         av_log(ctx, AV_LOG_ERROR, "Unable to find device with index %i!\n",
623                select->index);
624         err = AVERROR(ENODEV);
625         goto end;
626     }
627
628 end:
629     if (choice > -1) {
630         p->dev_is_nvidia = (prop[choice].properties.vendorID == 0x10de);
631         hwctx->phys_dev = devices[choice];
632     }
633     av_free(devices);
634     av_free(prop);
635     av_free(idp);
636
637     return err;
638 }
639
640 static int search_queue_families(AVHWDeviceContext *ctx, VkDeviceCreateInfo *cd)
641 {
642     uint32_t num;
643     float *weights;
644     VkQueueFamilyProperties *qs = NULL;
645     AVVulkanDeviceContext *hwctx = ctx->hwctx;
646     int graph_index = -1, comp_index = -1, tx_index = -1;
647     VkDeviceQueueCreateInfo *pc = (VkDeviceQueueCreateInfo *)cd->pQueueCreateInfos;
648
649     /* First get the number of queue families */
650     vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, NULL);
651     if (!num) {
652         av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
653         return AVERROR_EXTERNAL;
654     }
655
656     /* Then allocate memory */
657     qs = av_malloc_array(num, sizeof(VkQueueFamilyProperties));
658     if (!qs)
659         return AVERROR(ENOMEM);
660
661     /* Finally retrieve the queue families */
662     vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, qs);
663
664 #define SEARCH_FLAGS(expr, out)                                                \
665     for (int i = 0; i < num; i++) {                                            \
666         const VkQueueFlagBits flags = qs[i].queueFlags;                        \
667         if (expr) {                                                            \
668             out = i;                                                           \
669             break;                                                             \
670         }                                                                      \
671     }
672
673     SEARCH_FLAGS(flags & VK_QUEUE_GRAPHICS_BIT, graph_index)
674
675     SEARCH_FLAGS((flags &  VK_QUEUE_COMPUTE_BIT) && (i != graph_index),
676                  comp_index)
677
678     SEARCH_FLAGS((flags & VK_QUEUE_TRANSFER_BIT) && (i != graph_index) &&
679                  (i != comp_index), tx_index)
680
681 #undef SEARCH_FLAGS
682 #define ADD_QUEUE(fidx, graph, comp, tx)                                                 \
683     av_log(ctx, AV_LOG_VERBOSE, "Using queue family %i (total queues: %i) for %s%s%s\n", \
684            fidx, qs[fidx].queueCount, graph ? "graphics " : "",                          \
685            comp ? "compute " : "", tx ? "transfers " : "");                              \
686     av_log(ctx, AV_LOG_VERBOSE, "    QF %i flags: %s%s%s%s\n", fidx,                     \
687            ((qs[fidx].queueFlags) & VK_QUEUE_GRAPHICS_BIT) ? "(graphics) " : "",         \
688            ((qs[fidx].queueFlags) & VK_QUEUE_COMPUTE_BIT) ? "(compute) " : "",           \
689            ((qs[fidx].queueFlags) & VK_QUEUE_TRANSFER_BIT) ? "(transfers) " : "",        \
690            ((qs[fidx].queueFlags) & VK_QUEUE_SPARSE_BINDING_BIT) ? "(sparse) " : "");    \
691     pc[cd->queueCreateInfoCount].queueFamilyIndex = fidx;                                \
692     pc[cd->queueCreateInfoCount].queueCount = qs[fidx].queueCount;                       \
693     weights = av_malloc(qs[fidx].queueCount * sizeof(float));                            \
694     pc[cd->queueCreateInfoCount].pQueuePriorities = weights;                             \
695     if (!weights)                                                                        \
696         goto fail;                                                                       \
697     for (int i = 0; i < qs[fidx].queueCount; i++)                                        \
698         weights[i] = 1.0f;                                                               \
699     cd->queueCreateInfoCount++;
700
701     ADD_QUEUE(graph_index, 1, comp_index < 0, tx_index < 0 && comp_index < 0)
702     hwctx->queue_family_index      = graph_index;
703     hwctx->queue_family_comp_index = graph_index;
704     hwctx->queue_family_tx_index   = graph_index;
705     hwctx->nb_graphics_queues      = qs[graph_index].queueCount;
706
707     if (comp_index != -1) {
708         ADD_QUEUE(comp_index, 0, 1, tx_index < 0)
709         hwctx->queue_family_tx_index   = comp_index;
710         hwctx->queue_family_comp_index = comp_index;
711         hwctx->nb_comp_queues          = qs[comp_index].queueCount;
712     }
713
714     if (tx_index != -1) {
715         ADD_QUEUE(tx_index, 0, 0, 1)
716         hwctx->queue_family_tx_index = tx_index;
717         hwctx->nb_tx_queues          = qs[tx_index].queueCount;
718     }
719
720 #undef ADD_QUEUE
721     av_free(qs);
722
723     return 0;
724
725 fail:
726     av_freep(&pc[0].pQueuePriorities);
727     av_freep(&pc[1].pQueuePriorities);
728     av_freep(&pc[2].pQueuePriorities);
729     av_free(qs);
730
731     return AVERROR(ENOMEM);
732 }
733
734 static int create_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
735                            int queue_family_index, int num_queues)
736 {
737     VkResult ret;
738     AVVulkanDeviceContext *hwctx = ctx->hwctx;
739
740     VkCommandPoolCreateInfo cqueue_create = {
741         .sType              = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
742         .flags              = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
743         .queueFamilyIndex   = queue_family_index,
744     };
745     VkCommandBufferAllocateInfo cbuf_create = {
746         .sType              = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
747         .level              = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
748         .commandBufferCount = num_queues,
749     };
750
751     cmd->nb_queues = num_queues;
752
753     cmd->queues = av_mallocz(num_queues * sizeof(*cmd->queues));
754     if (!cmd->queues)
755         return AVERROR(ENOMEM);
756
757     cmd->bufs = av_mallocz(num_queues * sizeof(*cmd->bufs));
758     if (!cmd->bufs)
759         return AVERROR(ENOMEM);
760
761     /* Create command pool */
762     ret = vkCreateCommandPool(hwctx->act_dev, &cqueue_create,
763                               hwctx->alloc, &cmd->pool);
764     if (ret != VK_SUCCESS) {
765         av_log(ctx, AV_LOG_ERROR, "Command pool creation failure: %s\n",
766                vk_ret2str(ret));
767         return AVERROR_EXTERNAL;
768     }
769
770     cbuf_create.commandPool = cmd->pool;
771
772     /* Allocate command buffer */
773     ret = vkAllocateCommandBuffers(hwctx->act_dev, &cbuf_create, cmd->bufs);
774     if (ret != VK_SUCCESS) {
775         av_log(ctx, AV_LOG_ERROR, "Command buffer alloc failure: %s\n",
776                vk_ret2str(ret));
777         return AVERROR_EXTERNAL;
778     }
779
780     for (int i = 0; i < num_queues; i++) {
781         VulkanQueueCtx *q = &cmd->queues[i];
782         vkGetDeviceQueue(hwctx->act_dev, queue_family_index, i, &q->queue);
783         q->was_synchronous = 1;
784     }
785
786     return 0;
787 }
788
789 static void free_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
790 {
791     AVVulkanDeviceContext *hwctx = ctx->hwctx;
792
793     /* Make sure all queues have finished executing */
794     for (int i = 0; i < cmd->nb_queues; i++) {
795         VulkanQueueCtx *q = &cmd->queues[i];
796
797         if (q->fence && !q->was_synchronous) {
798             vkWaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
799             vkResetFences(hwctx->act_dev, 1, &q->fence);
800         }
801
802         /* Free the fence */
803         if (q->fence)
804             vkDestroyFence(hwctx->act_dev, q->fence, hwctx->alloc);
805
806         /* Free buffer dependencies */
807         for (int j = 0; j < q->nb_buf_deps; j++)
808             av_buffer_unref(&q->buf_deps[j]);
809         av_free(q->buf_deps);
810     }
811
812     if (cmd->bufs)
813         vkFreeCommandBuffers(hwctx->act_dev, cmd->pool, cmd->nb_queues, cmd->bufs);
814     if (cmd->pool)
815         vkDestroyCommandPool(hwctx->act_dev, cmd->pool, hwctx->alloc);
816
817     av_freep(&cmd->bufs);
818     av_freep(&cmd->queues);
819 }
820
821 static VkCommandBuffer get_buf_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
822 {
823     return cmd->bufs[cmd->cur_queue_idx];
824 }
825
826 static void unref_exec_ctx_deps(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
827 {
828     VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
829
830     for (int j = 0; j < q->nb_buf_deps; j++)
831         av_buffer_unref(&q->buf_deps[j]);
832     q->nb_buf_deps = 0;
833 }
834
835 static int wait_start_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
836 {
837     VkResult ret;
838     AVVulkanDeviceContext *hwctx = ctx->hwctx;
839     VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
840
841     VkCommandBufferBeginInfo cmd_start = {
842         .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
843         .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
844     };
845
846     /* Create the fence and don't wait for it initially */
847     if (!q->fence) {
848         VkFenceCreateInfo fence_spawn = {
849             .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
850         };
851         ret = vkCreateFence(hwctx->act_dev, &fence_spawn, hwctx->alloc,
852                             &q->fence);
853         if (ret != VK_SUCCESS) {
854             av_log(ctx, AV_LOG_ERROR, "Failed to queue frame fence: %s\n",
855                    vk_ret2str(ret));
856             return AVERROR_EXTERNAL;
857         }
858     } else if (!q->was_synchronous) {
859         vkWaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
860         vkResetFences(hwctx->act_dev, 1, &q->fence);
861     }
862
863     /* Discard queue dependencies */
864     unref_exec_ctx_deps(ctx, cmd);
865
866     ret = vkBeginCommandBuffer(cmd->bufs[cmd->cur_queue_idx], &cmd_start);
867     if (ret != VK_SUCCESS) {
868         av_log(ctx, AV_LOG_ERROR, "Unable to init command buffer: %s\n",
869                vk_ret2str(ret));
870         return AVERROR_EXTERNAL;
871     }
872
873     return 0;
874 }
875
876 static int add_buf_dep_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
877                                 AVBufferRef * const *deps, int nb_deps)
878 {
879     AVBufferRef **dst;
880     VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
881
882     if (!deps || !nb_deps)
883         return 0;
884
885     dst = av_fast_realloc(q->buf_deps, &q->buf_deps_alloc_size,
886                           (q->nb_buf_deps + nb_deps) * sizeof(*dst));
887     if (!dst)
888         goto err;
889
890     q->buf_deps = dst;
891
892     for (int i = 0; i < nb_deps; i++) {
893         q->buf_deps[q->nb_buf_deps] = av_buffer_ref(deps[i]);
894         if (!q->buf_deps[q->nb_buf_deps])
895             goto err;
896         q->nb_buf_deps++;
897     }
898
899     return 0;
900
901 err:
902     unref_exec_ctx_deps(ctx, cmd);
903     return AVERROR(ENOMEM);
904 }
905
906 static int submit_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
907                            VkSubmitInfo *s_info, int synchronous)
908 {
909     VkResult ret;
910     VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
911
912     ret = vkEndCommandBuffer(cmd->bufs[cmd->cur_queue_idx]);
913     if (ret != VK_SUCCESS) {
914         av_log(ctx, AV_LOG_ERROR, "Unable to finish command buffer: %s\n",
915                vk_ret2str(ret));
916         unref_exec_ctx_deps(ctx, cmd);
917         return AVERROR_EXTERNAL;
918     }
919
920     s_info->pCommandBuffers = &cmd->bufs[cmd->cur_queue_idx];
921     s_info->commandBufferCount = 1;
922
923     ret = vkQueueSubmit(q->queue, 1, s_info, q->fence);
924     if (ret != VK_SUCCESS) {
925         unref_exec_ctx_deps(ctx, cmd);
926         return AVERROR_EXTERNAL;
927     }
928
929     q->was_synchronous = synchronous;
930
931     if (synchronous) {
932         AVVulkanDeviceContext *hwctx = ctx->hwctx;
933         vkWaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
934         vkResetFences(hwctx->act_dev, 1, &q->fence);
935         unref_exec_ctx_deps(ctx, cmd);
936     } else { /* Rotate queues */
937         cmd->cur_queue_idx = (cmd->cur_queue_idx + 1) % cmd->nb_queues;
938     }
939
940     return 0;
941 }
942
943 static void vulkan_device_free(AVHWDeviceContext *ctx)
944 {
945     VulkanDevicePriv *p = ctx->internal->priv;
946     AVVulkanDeviceContext *hwctx = ctx->hwctx;
947
948     free_exec_ctx(ctx, &p->cmd);
949
950     vkDestroyDevice(hwctx->act_dev, hwctx->alloc);
951
952     if (p->debug_ctx) {
953         VK_LOAD_PFN(hwctx->inst, vkDestroyDebugUtilsMessengerEXT);
954         pfn_vkDestroyDebugUtilsMessengerEXT(hwctx->inst, p->debug_ctx,
955                                             hwctx->alloc);
956     }
957
958     vkDestroyInstance(hwctx->inst, hwctx->alloc);
959
960     for (int i = 0; i < hwctx->nb_enabled_inst_extensions; i++)
961         av_free((void *)hwctx->enabled_inst_extensions[i]);
962     av_free((void *)hwctx->enabled_inst_extensions);
963
964     for (int i = 0; i < hwctx->nb_enabled_dev_extensions; i++)
965         av_free((void *)hwctx->enabled_dev_extensions[i]);
966     av_free((void *)hwctx->enabled_dev_extensions);
967 }
968
969 static int vulkan_device_create_internal(AVHWDeviceContext *ctx,
970                                          VulkanDeviceSelection *dev_select,
971                                          AVDictionary *opts, int flags)
972 {
973     int err = 0;
974     VkResult ret;
975     AVDictionaryEntry *opt_d;
976     VulkanDevicePriv *p = ctx->internal->priv;
977     AVVulkanDeviceContext *hwctx = ctx->hwctx;
978     VkPhysicalDeviceFeatures dev_features = { 0 };
979     VkDeviceQueueCreateInfo queue_create_info[3] = {
980         { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, },
981         { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, },
982         { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, },
983     };
984
985     VkDeviceCreateInfo dev_info = {
986         .sType                = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
987         .pNext                = &hwctx->device_features,
988         .pQueueCreateInfos    = queue_create_info,
989         .queueCreateInfoCount = 0,
990     };
991
992     hwctx->device_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
993     ctx->free = vulkan_device_free;
994
995     /* Create an instance if not given one */
996     if ((err = create_instance(ctx, opts)))
997         goto end;
998
999     /* Find a device (if not given one) */
1000     if ((err = find_device(ctx, dev_select)))
1001         goto end;
1002
1003     vkGetPhysicalDeviceProperties(hwctx->phys_dev, &p->props);
1004     av_log(ctx, AV_LOG_VERBOSE, "Using device: %s\n", p->props.deviceName);
1005     av_log(ctx, AV_LOG_VERBOSE, "Alignments:\n");
1006     av_log(ctx, AV_LOG_VERBOSE, "    optimalBufferCopyOffsetAlignment:   %li\n",
1007            p->props.limits.optimalBufferCopyOffsetAlignment);
1008     av_log(ctx, AV_LOG_VERBOSE, "    optimalBufferCopyRowPitchAlignment: %li\n",
1009            p->props.limits.optimalBufferCopyRowPitchAlignment);
1010     av_log(ctx, AV_LOG_VERBOSE, "    minMemoryMapAlignment:              %li\n",
1011            p->props.limits.minMemoryMapAlignment);
1012
1013     vkGetPhysicalDeviceFeatures(hwctx->phys_dev, &dev_features);
1014 #define COPY_FEATURE(DST, NAME) (DST).features.NAME = dev_features.NAME;
1015     COPY_FEATURE(hwctx->device_features, shaderImageGatherExtended)
1016     COPY_FEATURE(hwctx->device_features, fragmentStoresAndAtomics)
1017     COPY_FEATURE(hwctx->device_features, vertexPipelineStoresAndAtomics)
1018     COPY_FEATURE(hwctx->device_features, shaderInt64)
1019 #undef COPY_FEATURE
1020
1021     /* Search queue family */
1022     if ((err = search_queue_families(ctx, &dev_info)))
1023         goto end;
1024
1025     if ((err = check_extensions(ctx, 1, opts, &dev_info.ppEnabledExtensionNames,
1026                                 &dev_info.enabledExtensionCount, 0))) {
1027         av_free((void *)queue_create_info[0].pQueuePriorities);
1028         av_free((void *)queue_create_info[1].pQueuePriorities);
1029         av_free((void *)queue_create_info[2].pQueuePriorities);
1030         goto end;
1031     }
1032
1033     ret = vkCreateDevice(hwctx->phys_dev, &dev_info, hwctx->alloc,
1034                          &hwctx->act_dev);
1035
1036     av_free((void *)queue_create_info[0].pQueuePriorities);
1037     av_free((void *)queue_create_info[1].pQueuePriorities);
1038     av_free((void *)queue_create_info[2].pQueuePriorities);
1039
1040     if (ret != VK_SUCCESS) {
1041         av_log(ctx, AV_LOG_ERROR, "Device creation failure: %s\n",
1042                vk_ret2str(ret));
1043         for (int i = 0; i < dev_info.enabledExtensionCount; i++)
1044             av_free((void *)dev_info.ppEnabledExtensionNames[i]);
1045         av_free((void *)dev_info.ppEnabledExtensionNames);
1046         err = AVERROR_EXTERNAL;
1047         goto end;
1048     }
1049
1050     /* Tiled images setting, use them by default */
1051     opt_d = av_dict_get(opts, "linear_images", NULL, 0);
1052     if (opt_d)
1053         p->use_linear_images = strtol(opt_d->value, NULL, 10);
1054
1055     hwctx->enabled_dev_extensions = dev_info.ppEnabledExtensionNames;
1056     hwctx->nb_enabled_dev_extensions = dev_info.enabledExtensionCount;
1057
1058 end:
1059     return err;
1060 }
1061
1062 static int vulkan_device_init(AVHWDeviceContext *ctx)
1063 {
1064     int err;
1065     uint32_t queue_num;
1066     AVVulkanDeviceContext *hwctx = ctx->hwctx;
1067     VulkanDevicePriv *p = ctx->internal->priv;
1068
1069     /* Set device extension flags */
1070     for (int i = 0; i < hwctx->nb_enabled_dev_extensions; i++) {
1071         for (int j = 0; j < FF_ARRAY_ELEMS(optional_device_exts); j++) {
1072             if (!strcmp(hwctx->enabled_dev_extensions[i],
1073                         optional_device_exts[j].name)) {
1074                 p->extensions |= optional_device_exts[j].flag;
1075                 break;
1076             }
1077         }
1078     }
1079
1080     vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &queue_num, NULL);
1081     if (!queue_num) {
1082         av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
1083         return AVERROR_EXTERNAL;
1084     }
1085
1086 #define CHECK_QUEUE(type, n)                                                         \
1087 if (n >= queue_num) {                                                                \
1088     av_log(ctx, AV_LOG_ERROR, "Invalid %s queue index %i (device has %i queues)!\n", \
1089            type, n, queue_num);                                                      \
1090     return AVERROR(EINVAL);                                                          \
1091 }
1092
1093     CHECK_QUEUE("graphics", hwctx->queue_family_index)
1094     CHECK_QUEUE("upload",   hwctx->queue_family_tx_index)
1095     CHECK_QUEUE("compute",  hwctx->queue_family_comp_index)
1096
1097 #undef CHECK_QUEUE
1098
1099     p->qfs[p->num_qfs++] = hwctx->queue_family_index;
1100     if ((hwctx->queue_family_tx_index != hwctx->queue_family_index) &&
1101         (hwctx->queue_family_tx_index != hwctx->queue_family_comp_index))
1102         p->qfs[p->num_qfs++] = hwctx->queue_family_tx_index;
1103     if ((hwctx->queue_family_comp_index != hwctx->queue_family_index) &&
1104         (hwctx->queue_family_comp_index != hwctx->queue_family_tx_index))
1105         p->qfs[p->num_qfs++] = hwctx->queue_family_comp_index;
1106
1107     /* Create exec context - if there's something invalid this will error out */
1108     err = create_exec_ctx(ctx, &p->cmd, hwctx->queue_family_tx_index,
1109                           GET_QUEUE_COUNT(hwctx, 0, 0, 1));
1110     if (err)
1111         return err;
1112
1113     /* Get device capabilities */
1114     vkGetPhysicalDeviceMemoryProperties(hwctx->phys_dev, &p->mprops);
1115
1116     return 0;
1117 }
1118
1119 static int vulkan_device_create(AVHWDeviceContext *ctx, const char *device,
1120                                 AVDictionary *opts, int flags)
1121 {
1122     VulkanDeviceSelection dev_select = { 0 };
1123     if (device && device[0]) {
1124         char *end = NULL;
1125         dev_select.index = strtol(device, &end, 10);
1126         if (end == device) {
1127             dev_select.index = 0;
1128             dev_select.name  = device;
1129         }
1130     }
1131
1132     return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1133 }
1134
1135 static int vulkan_device_derive(AVHWDeviceContext *ctx,
1136                                 AVHWDeviceContext *src_ctx,
1137                                 AVDictionary *opts, int flags)
1138 {
1139     av_unused VulkanDeviceSelection dev_select = { 0 };
1140
1141     /* If there's only one device on the system, then even if its not covered
1142      * by the following checks (e.g. non-PCIe ARM GPU), having an empty
1143      * dev_select will mean it'll get picked. */
1144     switch(src_ctx->type) {
1145 #if CONFIG_LIBDRM
1146 #if CONFIG_VAAPI
1147     case AV_HWDEVICE_TYPE_VAAPI: {
1148         AVVAAPIDeviceContext *src_hwctx = src_ctx->hwctx;
1149
1150         const char *vendor = vaQueryVendorString(src_hwctx->display);
1151         if (!vendor) {
1152             av_log(ctx, AV_LOG_ERROR, "Unable to get device info from VAAPI!\n");
1153             return AVERROR_EXTERNAL;
1154         }
1155
1156         if (strstr(vendor, "Intel"))
1157             dev_select.vendor_id = 0x8086;
1158         if (strstr(vendor, "AMD"))
1159             dev_select.vendor_id = 0x1002;
1160
1161         return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1162     }
1163 #endif
1164     case AV_HWDEVICE_TYPE_DRM: {
1165         AVDRMDeviceContext *src_hwctx = src_ctx->hwctx;
1166
1167         drmDevice *drm_dev_info;
1168         int err = drmGetDevice(src_hwctx->fd, &drm_dev_info);
1169         if (err) {
1170             av_log(ctx, AV_LOG_ERROR, "Unable to get device info from DRM fd!\n");
1171             return AVERROR_EXTERNAL;
1172         }
1173
1174         if (drm_dev_info->bustype == DRM_BUS_PCI)
1175             dev_select.pci_device = drm_dev_info->deviceinfo.pci->device_id;
1176
1177         drmFreeDevice(&drm_dev_info);
1178
1179         return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1180     }
1181 #endif
1182 #if CONFIG_CUDA
1183     case AV_HWDEVICE_TYPE_CUDA: {
1184         AVHWDeviceContext *cuda_cu = src_ctx;
1185         AVCUDADeviceContext *src_hwctx = src_ctx->hwctx;
1186         AVCUDADeviceContextInternal *cu_internal = src_hwctx->internal;
1187         CudaFunctions *cu = cu_internal->cuda_dl;
1188
1189         int ret = CHECK_CU(cu->cuDeviceGetUuid((CUuuid *)&dev_select.uuid,
1190                                                cu_internal->cuda_device));
1191         if (ret < 0) {
1192             av_log(ctx, AV_LOG_ERROR, "Unable to get UUID from CUDA!\n");
1193             return AVERROR_EXTERNAL;
1194         }
1195
1196         dev_select.has_uuid = 1;
1197
1198         return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1199     }
1200 #endif
1201     default:
1202         return AVERROR(ENOSYS);
1203     }
1204 }
1205
1206 static int vulkan_frames_get_constraints(AVHWDeviceContext *ctx,
1207                                          const void *hwconfig,
1208                                          AVHWFramesConstraints *constraints)
1209 {
1210     int count = 0;
1211     AVVulkanDeviceContext *hwctx = ctx->hwctx;
1212     VulkanDevicePriv *p = ctx->internal->priv;
1213
1214     for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
1215         count += pixfmt_is_supported(hwctx, i, p->use_linear_images);
1216
1217 #if CONFIG_CUDA
1218     if (p->dev_is_nvidia)
1219         count++;
1220 #endif
1221
1222     constraints->valid_sw_formats = av_malloc_array(count + 1,
1223                                                     sizeof(enum AVPixelFormat));
1224     if (!constraints->valid_sw_formats)
1225         return AVERROR(ENOMEM);
1226
1227     count = 0;
1228     for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
1229         if (pixfmt_is_supported(hwctx, i, p->use_linear_images))
1230             constraints->valid_sw_formats[count++] = i;
1231
1232 #if CONFIG_CUDA
1233     if (p->dev_is_nvidia)
1234         constraints->valid_sw_formats[count++] = AV_PIX_FMT_CUDA;
1235 #endif
1236     constraints->valid_sw_formats[count++] = AV_PIX_FMT_NONE;
1237
1238     constraints->min_width  = 0;
1239     constraints->min_height = 0;
1240     constraints->max_width  = p->props.limits.maxImageDimension2D;
1241     constraints->max_height = p->props.limits.maxImageDimension2D;
1242
1243     constraints->valid_hw_formats = av_malloc_array(2, sizeof(enum AVPixelFormat));
1244     if (!constraints->valid_hw_formats)
1245         return AVERROR(ENOMEM);
1246
1247     constraints->valid_hw_formats[0] = AV_PIX_FMT_VULKAN;
1248     constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1249
1250     return 0;
1251 }
1252
1253 static int alloc_mem(AVHWDeviceContext *ctx, VkMemoryRequirements *req,
1254                      VkMemoryPropertyFlagBits req_flags, void *alloc_extension,
1255                      VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
1256 {
1257     VkResult ret;
1258     int index = -1;
1259     VulkanDevicePriv *p = ctx->internal->priv;
1260     AVVulkanDeviceContext *dev_hwctx = ctx->hwctx;
1261     VkMemoryAllocateInfo alloc_info = {
1262         .sType           = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
1263         .pNext           = alloc_extension,
1264     };
1265
1266     /* Align if we need to */
1267     if (req_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
1268         req->size = FFALIGN(req->size, p->props.limits.minMemoryMapAlignment);
1269
1270     alloc_info.allocationSize = req->size;
1271
1272     /* The vulkan spec requires memory types to be sorted in the "optimal"
1273      * order, so the first matching type we find will be the best/fastest one */
1274     for (int i = 0; i < p->mprops.memoryTypeCount; i++) {
1275         /* The memory type must be supported by the requirements (bitfield) */
1276         if (!(req->memoryTypeBits & (1 << i)))
1277             continue;
1278
1279         /* The memory type flags must include our properties */
1280         if ((p->mprops.memoryTypes[i].propertyFlags & req_flags) != req_flags)
1281             continue;
1282
1283         /* Found a suitable memory type */
1284         index = i;
1285         break;
1286     }
1287
1288     if (index < 0) {
1289         av_log(ctx, AV_LOG_ERROR, "No memory type found for flags 0x%x\n",
1290                req_flags);
1291         return AVERROR(EINVAL);
1292     }
1293
1294     alloc_info.memoryTypeIndex = index;
1295
1296     ret = vkAllocateMemory(dev_hwctx->act_dev, &alloc_info,
1297                            dev_hwctx->alloc, mem);
1298     if (ret != VK_SUCCESS) {
1299         av_log(ctx, AV_LOG_ERROR, "Failed to allocate memory: %s\n",
1300                vk_ret2str(ret));
1301         return AVERROR(ENOMEM);
1302     }
1303
1304     *mem_flags |= p->mprops.memoryTypes[index].propertyFlags;
1305
1306     return 0;
1307 }
1308
1309 static void vulkan_free_internal(AVVkFrameInternal *internal)
1310 {
1311     if (!internal)
1312         return;
1313
1314 #if CONFIG_CUDA
1315     if (internal->cuda_fc_ref) {
1316         AVHWFramesContext *cuda_fc = (AVHWFramesContext *)internal->cuda_fc_ref->data;
1317         int planes = av_pix_fmt_count_planes(cuda_fc->sw_format);
1318         AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
1319         AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
1320         AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
1321         CudaFunctions *cu = cu_internal->cuda_dl;
1322
1323         for (int i = 0; i < planes; i++) {
1324             if (internal->cu_sem[i])
1325                 CHECK_CU(cu->cuDestroyExternalSemaphore(internal->cu_sem[i]));
1326             if (internal->cu_mma[i])
1327                 CHECK_CU(cu->cuMipmappedArrayDestroy(internal->cu_mma[i]));
1328             if (internal->ext_mem[i])
1329                 CHECK_CU(cu->cuDestroyExternalMemory(internal->ext_mem[i]));
1330         }
1331
1332         av_buffer_unref(&internal->cuda_fc_ref);
1333     }
1334 #endif
1335
1336     av_free(internal);
1337 }
1338
1339 static void vulkan_frame_free(void *opaque, uint8_t *data)
1340 {
1341     AVVkFrame *f = (AVVkFrame *)data;
1342     AVHWFramesContext *hwfc = opaque;
1343     AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1344     int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1345
1346     vulkan_free_internal(f->internal);
1347
1348     for (int i = 0; i < planes; i++) {
1349         vkDestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
1350         vkFreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
1351         vkDestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
1352     }
1353
1354     av_free(f);
1355 }
1356
1357 static int alloc_bind_mem(AVHWFramesContext *hwfc, AVVkFrame *f,
1358                           void *alloc_pnext, size_t alloc_pnext_stride)
1359 {
1360     int err;
1361     VkResult ret;
1362     AVHWDeviceContext *ctx = hwfc->device_ctx;
1363     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1364     VkBindImageMemoryInfo bind_info[AV_NUM_DATA_POINTERS] = { { 0 } };
1365
1366     AVVulkanDeviceContext *hwctx = ctx->hwctx;
1367
1368     for (int i = 0; i < planes; i++) {
1369         int use_ded_mem;
1370         VkImageMemoryRequirementsInfo2 req_desc = {
1371             .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
1372             .image = f->img[i],
1373         };
1374         VkMemoryDedicatedAllocateInfo ded_alloc = {
1375             .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
1376             .pNext = (void *)(((uint8_t *)alloc_pnext) + i*alloc_pnext_stride),
1377         };
1378         VkMemoryDedicatedRequirements ded_req = {
1379             .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
1380         };
1381         VkMemoryRequirements2 req = {
1382             .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
1383             .pNext = &ded_req,
1384         };
1385
1386         vkGetImageMemoryRequirements2(hwctx->act_dev, &req_desc, &req);
1387
1388         /* In case the implementation prefers/requires dedicated allocation */
1389         use_ded_mem = ded_req.prefersDedicatedAllocation |
1390                       ded_req.requiresDedicatedAllocation;
1391         if (use_ded_mem)
1392             ded_alloc.image = f->img[i];
1393
1394         /* Allocate memory */
1395         if ((err = alloc_mem(ctx, &req.memoryRequirements,
1396                              f->tiling == VK_IMAGE_TILING_LINEAR ?
1397                              VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT :
1398                              VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
1399                              use_ded_mem ? &ded_alloc : (void *)ded_alloc.pNext,
1400                              &f->flags, &f->mem[i])))
1401             return err;
1402
1403         f->size[i] = req.memoryRequirements.size;
1404         bind_info[i].sType  = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
1405         bind_info[i].image  = f->img[i];
1406         bind_info[i].memory = f->mem[i];
1407     }
1408
1409     /* Bind the allocated memory to the images */
1410     ret = vkBindImageMemory2(hwctx->act_dev, planes, bind_info);
1411     if (ret != VK_SUCCESS) {
1412         av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
1413                vk_ret2str(ret));
1414         return AVERROR_EXTERNAL;
1415     }
1416
1417     return 0;
1418 }
1419
1420 enum PrepMode {
1421     PREP_MODE_WRITE,
1422     PREP_MODE_RO_SHADER,
1423     PREP_MODE_EXTERNAL_EXPORT,
1424 };
1425
1426 static int prepare_frame(AVHWFramesContext *hwfc, VulkanExecCtx *ectx,
1427                          AVVkFrame *frame, enum PrepMode pmode)
1428 {
1429     int err;
1430     uint32_t dst_qf;
1431     VkImageLayout new_layout;
1432     VkAccessFlags new_access;
1433     AVHWDeviceContext *ctx = hwfc->device_ctx;
1434     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1435
1436     VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
1437
1438     VkSubmitInfo s_info = {
1439         .sType                = VK_STRUCTURE_TYPE_SUBMIT_INFO,
1440         .pSignalSemaphores    = frame->sem,
1441         .signalSemaphoreCount = planes,
1442     };
1443
1444     VkPipelineStageFlagBits wait_st[AV_NUM_DATA_POINTERS];
1445     for (int i = 0; i < planes; i++)
1446         wait_st[i] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1447
1448     switch (pmode) {
1449     case PREP_MODE_WRITE:
1450         new_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1451         new_access = VK_ACCESS_TRANSFER_WRITE_BIT;
1452         dst_qf     = VK_QUEUE_FAMILY_IGNORED;
1453         break;
1454     case PREP_MODE_RO_SHADER:
1455         new_layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
1456         new_access = VK_ACCESS_TRANSFER_READ_BIT;
1457         dst_qf     = VK_QUEUE_FAMILY_IGNORED;
1458         break;
1459     case PREP_MODE_EXTERNAL_EXPORT:
1460         new_layout = VK_IMAGE_LAYOUT_GENERAL;
1461         new_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
1462         dst_qf     = VK_QUEUE_FAMILY_EXTERNAL_KHR;
1463         s_info.pWaitSemaphores = frame->sem;
1464         s_info.pWaitDstStageMask = wait_st;
1465         s_info.waitSemaphoreCount = planes;
1466         break;
1467     }
1468
1469     if ((err = wait_start_exec_ctx(ctx, ectx)))
1470         return err;
1471
1472     /* Change the image layout to something more optimal for writes.
1473      * This also signals the newly created semaphore, making it usable
1474      * for synchronization */
1475     for (int i = 0; i < planes; i++) {
1476         img_bar[i].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1477         img_bar[i].srcAccessMask = 0x0;
1478         img_bar[i].dstAccessMask = new_access;
1479         img_bar[i].oldLayout = frame->layout[i];
1480         img_bar[i].newLayout = new_layout;
1481         img_bar[i].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1482         img_bar[i].dstQueueFamilyIndex = dst_qf;
1483         img_bar[i].image = frame->img[i];
1484         img_bar[i].subresourceRange.levelCount = 1;
1485         img_bar[i].subresourceRange.layerCount = 1;
1486         img_bar[i].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1487
1488         frame->layout[i] = img_bar[i].newLayout;
1489         frame->access[i] = img_bar[i].dstAccessMask;
1490     }
1491
1492     vkCmdPipelineBarrier(get_buf_exec_ctx(ctx, ectx),
1493                          VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
1494                          VK_PIPELINE_STAGE_TRANSFER_BIT,
1495                          0, 0, NULL, 0, NULL, planes, img_bar);
1496
1497     return submit_exec_ctx(ctx, ectx, &s_info, 0);
1498 }
1499
1500 static int create_frame(AVHWFramesContext *hwfc, AVVkFrame **frame,
1501                         VkImageTiling tiling, VkImageUsageFlagBits usage,
1502                         void *create_pnext)
1503 {
1504     int err;
1505     VkResult ret;
1506     AVHWDeviceContext *ctx = hwfc->device_ctx;
1507     VulkanDevicePriv *p = ctx->internal->priv;
1508     AVVulkanDeviceContext *hwctx = ctx->hwctx;
1509     enum AVPixelFormat format = hwfc->sw_format;
1510     const VkFormat *img_fmts = av_vkfmt_from_pixfmt(format);
1511     const int planes = av_pix_fmt_count_planes(format);
1512
1513     VkExportSemaphoreCreateInfo ext_sem_info = {
1514         .sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
1515         .handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
1516     };
1517
1518     VkSemaphoreCreateInfo sem_spawn = {
1519         .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
1520         .pNext = p->extensions & EXT_EXTERNAL_FD_SEM ? &ext_sem_info : NULL,
1521     };
1522
1523     AVVkFrame *f = av_vk_frame_alloc();
1524     if (!f) {
1525         av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
1526         return AVERROR(ENOMEM);
1527     }
1528
1529     /* Create the images */
1530     for (int i = 0; i < planes; i++) {
1531         const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(format);
1532         int w = hwfc->width;
1533         int h = hwfc->height;
1534         const int p_w = i > 0 ? AV_CEIL_RSHIFT(w, desc->log2_chroma_w) : w;
1535         const int p_h = i > 0 ? AV_CEIL_RSHIFT(h, desc->log2_chroma_h) : h;
1536
1537         VkImageCreateInfo image_create_info = {
1538             .sType                 = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1539             .pNext                 = create_pnext,
1540             .imageType             = VK_IMAGE_TYPE_2D,
1541             .format                = img_fmts[i],
1542             .extent.width          = p_w,
1543             .extent.height         = p_h,
1544             .extent.depth          = 1,
1545             .mipLevels             = 1,
1546             .arrayLayers           = 1,
1547             .flags                 = VK_IMAGE_CREATE_ALIAS_BIT,
1548             .tiling                = tiling,
1549             .initialLayout         = VK_IMAGE_LAYOUT_UNDEFINED,
1550             .usage                 = usage,
1551             .samples               = VK_SAMPLE_COUNT_1_BIT,
1552             .pQueueFamilyIndices   = p->qfs,
1553             .queueFamilyIndexCount = p->num_qfs,
1554             .sharingMode           = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
1555                                                       VK_SHARING_MODE_EXCLUSIVE,
1556         };
1557
1558         ret = vkCreateImage(hwctx->act_dev, &image_create_info,
1559                             hwctx->alloc, &f->img[i]);
1560         if (ret != VK_SUCCESS) {
1561             av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
1562                    vk_ret2str(ret));
1563             err = AVERROR(EINVAL);
1564             goto fail;
1565         }
1566
1567         /* Create semaphore */
1568         ret = vkCreateSemaphore(hwctx->act_dev, &sem_spawn,
1569                                 hwctx->alloc, &f->sem[i]);
1570         if (ret != VK_SUCCESS) {
1571             av_log(hwctx, AV_LOG_ERROR, "Failed to create semaphore: %s\n",
1572                    vk_ret2str(ret));
1573             return AVERROR_EXTERNAL;
1574         }
1575
1576         f->layout[i] = image_create_info.initialLayout;
1577         f->access[i] = 0x0;
1578     }
1579
1580     f->flags     = 0x0;
1581     f->tiling    = tiling;
1582
1583     *frame = f;
1584     return 0;
1585
1586 fail:
1587     vulkan_frame_free(hwfc, (uint8_t *)f);
1588     return err;
1589 }
1590
1591 /* Checks if an export flag is enabled, and if it is ORs it with *iexp */
1592 static void try_export_flags(AVHWFramesContext *hwfc,
1593                              VkExternalMemoryHandleTypeFlags *comp_handle_types,
1594                              VkExternalMemoryHandleTypeFlagBits *iexp,
1595                              VkExternalMemoryHandleTypeFlagBits exp)
1596 {
1597     VkResult ret;
1598     AVVulkanFramesContext *hwctx = hwfc->hwctx;
1599     AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
1600     VkExternalImageFormatProperties eprops = {
1601         .sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
1602     };
1603     VkImageFormatProperties2 props = {
1604         .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
1605         .pNext = &eprops,
1606     };
1607     VkPhysicalDeviceExternalImageFormatInfo enext = {
1608         .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
1609         .handleType = exp,
1610     };
1611     VkPhysicalDeviceImageFormatInfo2 pinfo = {
1612         .sType  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
1613         .pNext  = !exp ? NULL : &enext,
1614         .format = av_vkfmt_from_pixfmt(hwfc->sw_format)[0],
1615         .type   = VK_IMAGE_TYPE_2D,
1616         .tiling = hwctx->tiling,
1617         .usage  = hwctx->usage,
1618         .flags  = VK_IMAGE_CREATE_ALIAS_BIT,
1619     };
1620
1621     ret = vkGetPhysicalDeviceImageFormatProperties2(dev_hwctx->phys_dev,
1622                                                     &pinfo, &props);
1623     if (ret == VK_SUCCESS) {
1624         *iexp |= exp;
1625         *comp_handle_types |= eprops.externalMemoryProperties.compatibleHandleTypes;
1626     }
1627 }
1628
1629 static AVBufferRef *vulkan_pool_alloc(void *opaque, int size)
1630 {
1631     int err;
1632     AVVkFrame *f;
1633     AVBufferRef *avbuf = NULL;
1634     AVHWFramesContext *hwfc = opaque;
1635     AVVulkanFramesContext *hwctx = hwfc->hwctx;
1636     VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
1637     VkExportMemoryAllocateInfo eminfo[AV_NUM_DATA_POINTERS];
1638     VkExternalMemoryHandleTypeFlags e = 0x0;
1639
1640     VkExternalMemoryImageCreateInfo eiinfo = {
1641         .sType       = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
1642         .pNext       = hwctx->create_pnext,
1643     };
1644
1645     if (p->extensions & EXT_EXTERNAL_FD_MEMORY)
1646         try_export_flags(hwfc, &eiinfo.handleTypes, &e,
1647                          VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
1648
1649     if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
1650         try_export_flags(hwfc, &eiinfo.handleTypes, &e,
1651                          VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1652
1653     for (int i = 0; i < av_pix_fmt_count_planes(hwfc->sw_format); i++) {
1654         eminfo[i].sType       = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
1655         eminfo[i].pNext       = hwctx->alloc_pnext[i];
1656         eminfo[i].handleTypes = e;
1657     }
1658
1659     err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
1660                        eiinfo.handleTypes ? &eiinfo : NULL);
1661     if (err)
1662         return NULL;
1663
1664     err = alloc_bind_mem(hwfc, f, eminfo, sizeof(*eminfo));
1665     if (err)
1666         goto fail;
1667
1668     err = prepare_frame(hwfc, &p->cmd, f, PREP_MODE_WRITE);
1669     if (err)
1670         goto fail;
1671
1672     avbuf = av_buffer_create((uint8_t *)f, sizeof(AVVkFrame),
1673                              vulkan_frame_free, hwfc, 0);
1674     if (!avbuf)
1675         goto fail;
1676
1677     return avbuf;
1678
1679 fail:
1680     vulkan_frame_free(hwfc, (uint8_t *)f);
1681     return NULL;
1682 }
1683
1684 static void vulkan_frames_uninit(AVHWFramesContext *hwfc)
1685 {
1686     VulkanFramesPriv *fp = hwfc->internal->priv;
1687
1688     free_exec_ctx(hwfc->device_ctx, &fp->cmd);
1689 }
1690
1691 static int vulkan_frames_init(AVHWFramesContext *hwfc)
1692 {
1693     int err;
1694     AVVkFrame *f;
1695     AVVulkanFramesContext *hwctx = hwfc->hwctx;
1696     VulkanFramesPriv *fp = hwfc->internal->priv;
1697     AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
1698     VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
1699
1700     /* Default pool flags */
1701     hwctx->tiling = hwctx->tiling ? hwctx->tiling : p->use_linear_images ?
1702                     VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
1703
1704     hwctx->usage |= DEFAULT_USAGE_FLAGS;
1705
1706     err = create_exec_ctx(hwfc->device_ctx, &fp->cmd,
1707                           dev_hwctx->queue_family_tx_index,
1708                           GET_QUEUE_COUNT(dev_hwctx, 0, 0, 1));
1709     if (err)
1710         return err;
1711
1712     /* Test to see if allocation will fail */
1713     err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
1714                        hwctx->create_pnext);
1715     if (err) {
1716         free_exec_ctx(hwfc->device_ctx, &p->cmd);
1717         return err;
1718     }
1719
1720     vulkan_frame_free(hwfc, (uint8_t *)f);
1721
1722     /* If user did not specify a pool, hwfc->pool will be set to the internal one
1723      * in hwcontext.c just after this gets called */
1724     if (!hwfc->pool) {
1725         hwfc->internal->pool_internal = av_buffer_pool_init2(sizeof(AVVkFrame),
1726                                                              hwfc, vulkan_pool_alloc,
1727                                                              NULL);
1728         if (!hwfc->internal->pool_internal) {
1729             free_exec_ctx(hwfc->device_ctx, &p->cmd);
1730             return AVERROR(ENOMEM);
1731         }
1732     }
1733
1734     return 0;
1735 }
1736
1737 static int vulkan_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame)
1738 {
1739     frame->buf[0] = av_buffer_pool_get(hwfc->pool);
1740     if (!frame->buf[0])
1741         return AVERROR(ENOMEM);
1742
1743     frame->data[0] = frame->buf[0]->data;
1744     frame->format  = AV_PIX_FMT_VULKAN;
1745     frame->width   = hwfc->width;
1746     frame->height  = hwfc->height;
1747
1748     return 0;
1749 }
1750
1751 static int vulkan_transfer_get_formats(AVHWFramesContext *hwfc,
1752                                        enum AVHWFrameTransferDirection dir,
1753                                        enum AVPixelFormat **formats)
1754 {
1755     enum AVPixelFormat *fmts = av_malloc_array(2, sizeof(*fmts));
1756     if (!fmts)
1757         return AVERROR(ENOMEM);
1758
1759     fmts[0] = hwfc->sw_format;
1760     fmts[1] = AV_PIX_FMT_NONE;
1761
1762     *formats = fmts;
1763     return 0;
1764 }
1765
1766 typedef struct VulkanMapping {
1767     AVVkFrame *frame;
1768     int flags;
1769 } VulkanMapping;
1770
1771 static void vulkan_unmap_frame(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
1772 {
1773     VulkanMapping *map = hwmap->priv;
1774     AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1775     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1776
1777     /* Check if buffer needs flushing */
1778     if ((map->flags & AV_HWFRAME_MAP_WRITE) &&
1779         !(map->frame->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
1780         VkResult ret;
1781         VkMappedMemoryRange flush_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
1782
1783         for (int i = 0; i < planes; i++) {
1784             flush_ranges[i].sType  = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1785             flush_ranges[i].memory = map->frame->mem[i];
1786             flush_ranges[i].size   = VK_WHOLE_SIZE;
1787         }
1788
1789         ret = vkFlushMappedMemoryRanges(hwctx->act_dev, planes,
1790                                         flush_ranges);
1791         if (ret != VK_SUCCESS) {
1792             av_log(hwfc, AV_LOG_ERROR, "Failed to flush memory: %s\n",
1793                    vk_ret2str(ret));
1794         }
1795     }
1796
1797     for (int i = 0; i < planes; i++)
1798         vkUnmapMemory(hwctx->act_dev, map->frame->mem[i]);
1799
1800     av_free(map);
1801 }
1802
1803 static int vulkan_map_frame_to_mem(AVHWFramesContext *hwfc, AVFrame *dst,
1804                                    const AVFrame *src, int flags)
1805 {
1806     VkResult ret;
1807     int err, mapped_mem_count = 0;
1808     AVVkFrame *f = (AVVkFrame *)src->data[0];
1809     AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1810     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1811
1812     VulkanMapping *map = av_mallocz(sizeof(VulkanMapping));
1813     if (!map)
1814         return AVERROR(EINVAL);
1815
1816     if (src->format != AV_PIX_FMT_VULKAN) {
1817         av_log(hwfc, AV_LOG_ERROR, "Cannot map from pixel format %s!\n",
1818                av_get_pix_fmt_name(src->format));
1819         err = AVERROR(EINVAL);
1820         goto fail;
1821     }
1822
1823     if (!(f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) ||
1824         !(f->tiling == VK_IMAGE_TILING_LINEAR)) {
1825         av_log(hwfc, AV_LOG_ERROR, "Unable to map frame, not host visible "
1826                "and linear!\n");
1827         err = AVERROR(EINVAL);
1828         goto fail;
1829     }
1830
1831     dst->width  = src->width;
1832     dst->height = src->height;
1833
1834     for (int i = 0; i < planes; i++) {
1835         ret = vkMapMemory(hwctx->act_dev, f->mem[i], 0,
1836                           VK_WHOLE_SIZE, 0, (void **)&dst->data[i]);
1837         if (ret != VK_SUCCESS) {
1838             av_log(hwfc, AV_LOG_ERROR, "Failed to map image memory: %s\n",
1839                 vk_ret2str(ret));
1840             err = AVERROR_EXTERNAL;
1841             goto fail;
1842         }
1843         mapped_mem_count++;
1844     }
1845
1846     /* Check if the memory contents matter */
1847     if (((flags & AV_HWFRAME_MAP_READ) || !(flags & AV_HWFRAME_MAP_OVERWRITE)) &&
1848         !(f->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
1849         VkMappedMemoryRange map_mem_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
1850         for (int i = 0; i < planes; i++) {
1851             map_mem_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1852             map_mem_ranges[i].size = VK_WHOLE_SIZE;
1853             map_mem_ranges[i].memory = f->mem[i];
1854         }
1855
1856         ret = vkInvalidateMappedMemoryRanges(hwctx->act_dev, planes,
1857                                              map_mem_ranges);
1858         if (ret != VK_SUCCESS) {
1859             av_log(hwfc, AV_LOG_ERROR, "Failed to invalidate memory: %s\n",
1860                    vk_ret2str(ret));
1861             err = AVERROR_EXTERNAL;
1862             goto fail;
1863         }
1864     }
1865
1866     for (int i = 0; i < planes; i++) {
1867         VkImageSubresource sub = {
1868             .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
1869         };
1870         VkSubresourceLayout layout;
1871         vkGetImageSubresourceLayout(hwctx->act_dev, f->img[i], &sub, &layout);
1872         dst->linesize[i] = layout.rowPitch;
1873     }
1874
1875     map->frame = f;
1876     map->flags = flags;
1877
1878     err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src,
1879                                 &vulkan_unmap_frame, map);
1880     if (err < 0)
1881         goto fail;
1882
1883     return 0;
1884
1885 fail:
1886     for (int i = 0; i < mapped_mem_count; i++)
1887         vkUnmapMemory(hwctx->act_dev, f->mem[i]);
1888
1889     av_free(map);
1890     return err;
1891 }
1892
1893 #if CONFIG_LIBDRM
1894 static void vulkan_unmap_from(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
1895 {
1896     VulkanMapping *map = hwmap->priv;
1897     AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1898     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1899
1900     for (int i = 0; i < planes; i++) {
1901         vkDestroyImage(hwctx->act_dev, map->frame->img[i], hwctx->alloc);
1902         vkFreeMemory(hwctx->act_dev, map->frame->mem[i], hwctx->alloc);
1903         vkDestroySemaphore(hwctx->act_dev, map->frame->sem[i], hwctx->alloc);
1904     }
1905
1906     av_freep(&map->frame);
1907 }
1908
1909 static const struct {
1910     uint32_t drm_fourcc;
1911     VkFormat vk_format;
1912 } vulkan_drm_format_map[] = {
1913     { DRM_FORMAT_R8,       VK_FORMAT_R8_UNORM       },
1914     { DRM_FORMAT_R16,      VK_FORMAT_R16_UNORM      },
1915     { DRM_FORMAT_GR88,     VK_FORMAT_R8G8_UNORM     },
1916     { DRM_FORMAT_RG88,     VK_FORMAT_R8G8_UNORM     },
1917     { DRM_FORMAT_GR1616,   VK_FORMAT_R16G16_UNORM   },
1918     { DRM_FORMAT_RG1616,   VK_FORMAT_R16G16_UNORM   },
1919     { DRM_FORMAT_ARGB8888, VK_FORMAT_B8G8R8A8_UNORM },
1920     { DRM_FORMAT_XRGB8888, VK_FORMAT_B8G8R8A8_UNORM },
1921     { DRM_FORMAT_ABGR8888, VK_FORMAT_R8G8B8A8_UNORM },
1922     { DRM_FORMAT_XBGR8888, VK_FORMAT_R8G8B8A8_UNORM },
1923 };
1924
1925 static inline VkFormat drm_to_vulkan_fmt(uint32_t drm_fourcc)
1926 {
1927     for (int i = 0; i < FF_ARRAY_ELEMS(vulkan_drm_format_map); i++)
1928         if (vulkan_drm_format_map[i].drm_fourcc == drm_fourcc)
1929             return vulkan_drm_format_map[i].vk_format;
1930     return VK_FORMAT_UNDEFINED;
1931 }
1932
1933 static int vulkan_map_from_drm_frame_desc(AVHWFramesContext *hwfc, AVVkFrame **frame,
1934                                           AVDRMFrameDescriptor *desc)
1935 {
1936     int err = 0;
1937     VkResult ret;
1938     AVVkFrame *f;
1939     int bind_counts = 0;
1940     AVHWDeviceContext *ctx = hwfc->device_ctx;
1941     AVVulkanDeviceContext *hwctx = ctx->hwctx;
1942     VulkanDevicePriv *p = ctx->internal->priv;
1943     const AVPixFmtDescriptor *fmt_desc = av_pix_fmt_desc_get(hwfc->sw_format);
1944     const int has_modifiers = p->extensions & EXT_DRM_MODIFIER_FLAGS;
1945     VkSubresourceLayout plane_data[AV_NUM_DATA_POINTERS] = { 0 };
1946     VkBindImageMemoryInfo bind_info[AV_NUM_DATA_POINTERS] = { 0 };
1947     VkBindImagePlaneMemoryInfo plane_info[AV_NUM_DATA_POINTERS] = { 0 };
1948     VkExternalMemoryHandleTypeFlagBits htype = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1949
1950     VK_LOAD_PFN(hwctx->inst, vkGetMemoryFdPropertiesKHR);
1951
1952     for (int i = 0; i < desc->nb_layers; i++) {
1953         if (drm_to_vulkan_fmt(desc->layers[i].format) == VK_FORMAT_UNDEFINED) {
1954             av_log(ctx, AV_LOG_ERROR, "Unsupported DMABUF layer format %#08x!\n",
1955                    desc->layers[i].format);
1956             return AVERROR(EINVAL);
1957         }
1958     }
1959
1960     if (!(f = av_vk_frame_alloc())) {
1961         av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
1962         err = AVERROR(ENOMEM);
1963         goto fail;
1964     }
1965
1966     for (int i = 0; i < desc->nb_objects; i++) {
1967         VkMemoryFdPropertiesKHR fdmp = {
1968             .sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
1969         };
1970         VkMemoryRequirements req = {
1971             .size = desc->objects[i].size,
1972         };
1973         VkImportMemoryFdInfoKHR idesc = {
1974             .sType      = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
1975             .handleType = htype,
1976             .fd         = dup(desc->objects[i].fd),
1977         };
1978
1979         ret = pfn_vkGetMemoryFdPropertiesKHR(hwctx->act_dev, htype,
1980                                              idesc.fd, &fdmp);
1981         if (ret != VK_SUCCESS) {
1982             av_log(hwfc, AV_LOG_ERROR, "Failed to get FD properties: %s\n",
1983                    vk_ret2str(ret));
1984             err = AVERROR_EXTERNAL;
1985             close(idesc.fd);
1986             goto fail;
1987         }
1988
1989         req.memoryTypeBits = fdmp.memoryTypeBits;
1990
1991         err = alloc_mem(ctx, &req, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
1992                         &idesc, &f->flags, &f->mem[i]);
1993         if (err) {
1994             close(idesc.fd);
1995             return err;
1996         }
1997
1998         f->size[i] = desc->objects[i].size;
1999     }
2000
2001     f->tiling = has_modifiers ? VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT :
2002                 desc->objects[0].format_modifier == DRM_FORMAT_MOD_LINEAR ?
2003                 VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
2004
2005     for (int i = 0; i < desc->nb_layers; i++) {
2006         const int planes = desc->layers[i].nb_planes;
2007         const int signal_p = has_modifiers && (planes > 1);
2008
2009         VkImageDrmFormatModifierExplicitCreateInfoEXT drm_info = {
2010             .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
2011             .drmFormatModifier = desc->objects[0].format_modifier,
2012             .drmFormatModifierPlaneCount = planes,
2013             .pPlaneLayouts = (const VkSubresourceLayout *)&plane_data,
2014         };
2015
2016         VkExternalMemoryImageCreateInfo einfo = {
2017             .sType       = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
2018             .pNext       = has_modifiers ? &drm_info : NULL,
2019             .handleTypes = htype,
2020         };
2021
2022         VkSemaphoreCreateInfo sem_spawn = {
2023             .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
2024         };
2025
2026         const int p_w = i > 0 ? AV_CEIL_RSHIFT(hwfc->width, fmt_desc->log2_chroma_w) : hwfc->width;
2027         const int p_h = i > 0 ? AV_CEIL_RSHIFT(hwfc->height, fmt_desc->log2_chroma_h) : hwfc->height;
2028
2029         VkImageCreateInfo image_create_info = {
2030             .sType                 = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2031             .pNext                 = &einfo,
2032             .imageType             = VK_IMAGE_TYPE_2D,
2033             .format                = drm_to_vulkan_fmt(desc->layers[i].format),
2034             .extent.width          = p_w,
2035             .extent.height         = p_h,
2036             .extent.depth          = 1,
2037             .mipLevels             = 1,
2038             .arrayLayers           = 1,
2039             .flags                 = VK_IMAGE_CREATE_ALIAS_BIT,
2040             .tiling                = f->tiling,
2041             .initialLayout         = VK_IMAGE_LAYOUT_UNDEFINED, /* specs say so */
2042             .usage                 = DEFAULT_USAGE_FLAGS,
2043             .samples               = VK_SAMPLE_COUNT_1_BIT,
2044             .pQueueFamilyIndices   = p->qfs,
2045             .queueFamilyIndexCount = p->num_qfs,
2046             .sharingMode           = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
2047                                                       VK_SHARING_MODE_EXCLUSIVE,
2048         };
2049
2050         for (int j = 0; j < planes; j++) {
2051             plane_data[j].offset     = desc->layers[i].planes[j].offset;
2052             plane_data[j].rowPitch   = desc->layers[i].planes[j].pitch;
2053             plane_data[j].size       = 0; /* The specs say so for all 3 */
2054             plane_data[j].arrayPitch = 0;
2055             plane_data[j].depthPitch = 0;
2056         }
2057
2058         /* Create image */
2059         ret = vkCreateImage(hwctx->act_dev, &image_create_info,
2060                             hwctx->alloc, &f->img[i]);
2061         if (ret != VK_SUCCESS) {
2062             av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
2063                    vk_ret2str(ret));
2064             err = AVERROR(EINVAL);
2065             goto fail;
2066         }
2067
2068         ret = vkCreateSemaphore(hwctx->act_dev, &sem_spawn,
2069                                 hwctx->alloc, &f->sem[i]);
2070         if (ret != VK_SUCCESS) {
2071             av_log(hwctx, AV_LOG_ERROR, "Failed to create semaphore: %s\n",
2072                    vk_ret2str(ret));
2073             return AVERROR_EXTERNAL;
2074         }
2075
2076         /* We'd import a semaphore onto the one we created using
2077          * vkImportSemaphoreFdKHR but unfortunately neither DRM nor VAAPI
2078          * offer us anything we could import and sync with, so instead
2079          * just signal the semaphore we created. */
2080
2081         f->layout[i] = image_create_info.initialLayout;
2082         f->access[i] = 0x0;
2083
2084         for (int j = 0; j < planes; j++) {
2085             VkImageAspectFlagBits aspect = j == 0 ? VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT :
2086                                            j == 1 ? VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT :
2087                                                     VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
2088
2089             plane_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
2090             plane_info[bind_counts].planeAspect = aspect;
2091
2092             bind_info[bind_counts].sType  = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
2093             bind_info[bind_counts].pNext  = signal_p ? &plane_info[bind_counts] : NULL;
2094             bind_info[bind_counts].image  = f->img[i];
2095             bind_info[bind_counts].memory = f->mem[desc->layers[i].planes[j].object_index];
2096             bind_info[bind_counts].memoryOffset = desc->layers[i].planes[j].offset;
2097             bind_counts++;
2098         }
2099     }
2100
2101     /* Bind the allocated memory to the images */
2102     ret = vkBindImageMemory2(hwctx->act_dev, bind_counts, bind_info);
2103     if (ret != VK_SUCCESS) {
2104         av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
2105                vk_ret2str(ret));
2106         return AVERROR_EXTERNAL;
2107     }
2108
2109     /* NOTE: This is completely uneccesary and unneeded once we can import
2110      * semaphores from DRM. Otherwise we have to activate the semaphores.
2111      * We're reusing the exec context that's also used for uploads/downloads. */
2112     err = prepare_frame(hwfc, &p->cmd, f, PREP_MODE_RO_SHADER);
2113     if (err)
2114         goto fail;
2115
2116     *frame = f;
2117
2118     return 0;
2119
2120 fail:
2121     for (int i = 0; i < desc->nb_layers; i++) {
2122         vkDestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
2123         vkDestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
2124     }
2125     for (int i = 0; i < desc->nb_objects; i++)
2126         vkFreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
2127
2128     av_free(f);
2129
2130     return err;
2131 }
2132
2133 static int vulkan_map_from_drm(AVHWFramesContext *hwfc, AVFrame *dst,
2134                                const AVFrame *src, int flags)
2135 {
2136     int err = 0;
2137     AVVkFrame *f;
2138     VulkanMapping *map = NULL;
2139
2140     err = vulkan_map_from_drm_frame_desc(hwfc, &f,
2141                                          (AVDRMFrameDescriptor *)src->data[0]);
2142     if (err)
2143         return err;
2144
2145     /* The unmapping function will free this */
2146     dst->data[0] = (uint8_t *)f;
2147     dst->width   = src->width;
2148     dst->height  = src->height;
2149
2150     map = av_mallocz(sizeof(VulkanMapping));
2151     if (!map)
2152         goto fail;
2153
2154     map->frame = f;
2155     map->flags = flags;
2156
2157     err = ff_hwframe_map_create(dst->hw_frames_ctx, dst, src,
2158                                 &vulkan_unmap_from, map);
2159     if (err < 0)
2160         goto fail;
2161
2162     av_log(hwfc, AV_LOG_DEBUG, "Mapped DRM object to Vulkan!\n");
2163
2164     return 0;
2165
2166 fail:
2167     vulkan_frame_free(hwfc->device_ctx->hwctx, (uint8_t *)f);
2168     av_free(map);
2169     return err;
2170 }
2171
2172 #if CONFIG_VAAPI
2173 static int vulkan_map_from_vaapi(AVHWFramesContext *dst_fc,
2174                                  AVFrame *dst, const AVFrame *src,
2175                                  int flags)
2176 {
2177     int err;
2178     AVFrame *tmp = av_frame_alloc();
2179     AVHWFramesContext *vaapi_fc = (AVHWFramesContext*)src->hw_frames_ctx->data;
2180     AVVAAPIDeviceContext *vaapi_ctx = vaapi_fc->device_ctx->hwctx;
2181     VASurfaceID surface_id = (VASurfaceID)(uintptr_t)src->data[3];
2182
2183     if (!tmp)
2184         return AVERROR(ENOMEM);
2185
2186     /* We have to sync since like the previous comment said, no semaphores */
2187     vaSyncSurface(vaapi_ctx->display, surface_id);
2188
2189     tmp->format = AV_PIX_FMT_DRM_PRIME;
2190
2191     err = av_hwframe_map(tmp, src, flags);
2192     if (err < 0)
2193         goto fail;
2194
2195     err = vulkan_map_from_drm(dst_fc, dst, tmp, flags);
2196     if (err < 0)
2197         goto fail;
2198
2199     err = ff_hwframe_map_replace(dst, src);
2200
2201 fail:
2202     av_frame_free(&tmp);
2203     return err;
2204 }
2205 #endif
2206 #endif
2207
2208 #if CONFIG_CUDA
2209 static int vulkan_export_to_cuda(AVHWFramesContext *hwfc,
2210                                  AVBufferRef *cuda_hwfc,
2211                                  const AVFrame *frame)
2212 {
2213     int err;
2214     VkResult ret;
2215     AVVkFrame *dst_f;
2216     AVVkFrameInternal *dst_int;
2217     AVHWDeviceContext *ctx = hwfc->device_ctx;
2218     AVVulkanDeviceContext *hwctx = ctx->hwctx;
2219     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2220     const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
2221     VK_LOAD_PFN(hwctx->inst, vkGetMemoryFdKHR);
2222     VK_LOAD_PFN(hwctx->inst, vkGetSemaphoreFdKHR);
2223
2224     AVHWFramesContext *cuda_fc = (AVHWFramesContext*)cuda_hwfc->data;
2225     AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
2226     AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
2227     AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
2228     CudaFunctions *cu = cu_internal->cuda_dl;
2229     CUarray_format cufmt = desc->comp[0].depth > 8 ? CU_AD_FORMAT_UNSIGNED_INT16 :
2230                                                      CU_AD_FORMAT_UNSIGNED_INT8;
2231
2232     dst_f = (AVVkFrame *)frame->data[0];
2233
2234     dst_int = dst_f->internal;
2235     if (!dst_int || !dst_int->cuda_fc_ref) {
2236         if (!dst_f->internal)
2237             dst_f->internal = dst_int = av_mallocz(sizeof(*dst_f->internal));
2238
2239         if (!dst_int) {
2240             err = AVERROR(ENOMEM);
2241             goto fail;
2242         }
2243
2244         dst_int->cuda_fc_ref = av_buffer_ref(cuda_hwfc);
2245         if (!dst_int->cuda_fc_ref) {
2246             err = AVERROR(ENOMEM);
2247             goto fail;
2248         }
2249
2250         for (int i = 0; i < planes; i++) {
2251             CUDA_EXTERNAL_MEMORY_MIPMAPPED_ARRAY_DESC tex_desc = {
2252                 .offset = 0,
2253                 .arrayDesc = {
2254                     .Width  = i > 0 ? AV_CEIL_RSHIFT(hwfc->width, desc->log2_chroma_w)
2255                                     : hwfc->width,
2256                     .Height = i > 0 ? AV_CEIL_RSHIFT(hwfc->height, desc->log2_chroma_h)
2257                                     : hwfc->height,
2258                     .Depth = 0,
2259                     .Format = cufmt,
2260                     .NumChannels = 1 + ((planes == 2) && i),
2261                     .Flags = 0,
2262                 },
2263                 .numLevels = 1,
2264             };
2265             CUDA_EXTERNAL_MEMORY_HANDLE_DESC ext_desc = {
2266                 .type = CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD,
2267                 .size = dst_f->size[i],
2268             };
2269             VkMemoryGetFdInfoKHR export_info = {
2270                 .sType      = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
2271                 .memory     = dst_f->mem[i],
2272                 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
2273             };
2274             VkSemaphoreGetFdInfoKHR sem_export = {
2275                 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
2276                 .semaphore = dst_f->sem[i],
2277                 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
2278             };
2279             CUDA_EXTERNAL_SEMAPHORE_HANDLE_DESC ext_sem_desc = {
2280                 .type = CU_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD,
2281             };
2282
2283             ret = pfn_vkGetMemoryFdKHR(hwctx->act_dev, &export_info,
2284                                        &ext_desc.handle.fd);
2285             if (ret != VK_SUCCESS) {
2286                 av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD!\n");
2287                 err = AVERROR_EXTERNAL;
2288                 goto fail;
2289             }
2290
2291             ret = CHECK_CU(cu->cuImportExternalMemory(&dst_int->ext_mem[i], &ext_desc));
2292             if (ret < 0) {
2293                 err = AVERROR_EXTERNAL;
2294                 goto fail;
2295             }
2296
2297             ret = CHECK_CU(cu->cuExternalMemoryGetMappedMipmappedArray(&dst_int->cu_mma[i],
2298                                                                        dst_int->ext_mem[i],
2299                                                                        &tex_desc));
2300             if (ret < 0) {
2301                 err = AVERROR_EXTERNAL;
2302                 goto fail;
2303             }
2304
2305             ret = CHECK_CU(cu->cuMipmappedArrayGetLevel(&dst_int->cu_array[i],
2306                                                         dst_int->cu_mma[i], 0));
2307             if (ret < 0) {
2308                 err = AVERROR_EXTERNAL;
2309                 goto fail;
2310             }
2311
2312             ret = pfn_vkGetSemaphoreFdKHR(hwctx->act_dev, &sem_export,
2313                                           &ext_sem_desc.handle.fd);
2314             if (ret != VK_SUCCESS) {
2315                 av_log(ctx, AV_LOG_ERROR, "Failed to export semaphore: %s\n",
2316                        vk_ret2str(ret));
2317                 err = AVERROR_EXTERNAL;
2318                 goto fail;
2319             }
2320
2321             ret = CHECK_CU(cu->cuImportExternalSemaphore(&dst_int->cu_sem[i],
2322                                                          &ext_sem_desc));
2323             if (ret < 0) {
2324                 err = AVERROR_EXTERNAL;
2325                 goto fail;
2326             }
2327         }
2328     }
2329
2330     return 0;
2331
2332 fail:
2333     return err;
2334 }
2335
2336 static int vulkan_transfer_data_from_cuda(AVHWFramesContext *hwfc,
2337                                           AVFrame *dst, const AVFrame *src)
2338 {
2339     int err;
2340     VkResult ret;
2341     CUcontext dummy;
2342     AVVkFrame *dst_f;
2343     AVVkFrameInternal *dst_int;
2344     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2345     const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
2346
2347     AVHWFramesContext *cuda_fc = (AVHWFramesContext*)src->hw_frames_ctx->data;
2348     AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
2349     AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
2350     AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
2351     CudaFunctions *cu = cu_internal->cuda_dl;
2352     CUDA_EXTERNAL_SEMAPHORE_WAIT_PARAMS s_w_par[AV_NUM_DATA_POINTERS] = { 0 };
2353     CUDA_EXTERNAL_SEMAPHORE_SIGNAL_PARAMS s_s_par[AV_NUM_DATA_POINTERS] = { 0 };
2354
2355     ret = CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
2356     if (ret < 0) {
2357         err = AVERROR_EXTERNAL;
2358         goto fail;
2359     }
2360
2361     dst_f = (AVVkFrame *)dst->data[0];
2362
2363     ret = vulkan_export_to_cuda(hwfc, src->hw_frames_ctx, dst);
2364     if (ret < 0) {
2365         goto fail;
2366     }
2367     dst_int = dst_f->internal;
2368
2369     ret = CHECK_CU(cu->cuWaitExternalSemaphoresAsync(dst_int->cu_sem, s_w_par,
2370                                                      planes, cuda_dev->stream));
2371     if (ret < 0) {
2372         err = AVERROR_EXTERNAL;
2373         goto fail;
2374     }
2375
2376     for (int i = 0; i < planes; i++) {
2377         CUDA_MEMCPY2D cpy = {
2378             .srcMemoryType = CU_MEMORYTYPE_DEVICE,
2379             .srcDevice     = (CUdeviceptr)src->data[i],
2380             .srcPitch      = src->linesize[i],
2381             .srcY          = 0,
2382
2383             .dstMemoryType = CU_MEMORYTYPE_ARRAY,
2384             .dstArray      = dst_int->cu_array[i],
2385             .WidthInBytes  = (i > 0 ? AV_CEIL_RSHIFT(hwfc->width, desc->log2_chroma_w)
2386                                     : hwfc->width) * desc->comp[i].step,
2387             .Height        = i > 0 ? AV_CEIL_RSHIFT(hwfc->height, desc->log2_chroma_h)
2388                                    : hwfc->height,
2389         };
2390
2391         ret = CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
2392         if (ret < 0) {
2393             err = AVERROR_EXTERNAL;
2394             goto fail;
2395         }
2396     }
2397
2398     ret = CHECK_CU(cu->cuSignalExternalSemaphoresAsync(dst_int->cu_sem, s_s_par,
2399                                                        planes, cuda_dev->stream));
2400     if (ret < 0) {
2401         err = AVERROR_EXTERNAL;
2402         goto fail;
2403     }
2404
2405     CHECK_CU(cu->cuCtxPopCurrent(&dummy));
2406
2407     av_log(hwfc, AV_LOG_VERBOSE, "Transfered CUDA image to Vulkan!\n");
2408
2409     return 0;
2410
2411 fail:
2412     CHECK_CU(cu->cuCtxPopCurrent(&dummy));
2413     vulkan_free_internal(dst_int);
2414     dst_f->internal = NULL;
2415     av_buffer_unref(&dst->buf[0]);
2416     return err;
2417 }
2418 #endif
2419
2420 static int vulkan_map_to(AVHWFramesContext *hwfc, AVFrame *dst,
2421                          const AVFrame *src, int flags)
2422 {
2423     av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2424
2425     switch (src->format) {
2426 #if CONFIG_LIBDRM
2427 #if CONFIG_VAAPI
2428     case AV_PIX_FMT_VAAPI:
2429         if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2430             return vulkan_map_from_vaapi(hwfc, dst, src, flags);
2431 #endif
2432     case AV_PIX_FMT_DRM_PRIME:
2433         if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2434             return vulkan_map_from_drm(hwfc, dst, src, flags);
2435 #endif
2436     default:
2437         return AVERROR(ENOSYS);
2438     }
2439 }
2440
2441 #if CONFIG_LIBDRM
2442 typedef struct VulkanDRMMapping {
2443     AVDRMFrameDescriptor drm_desc;
2444     AVVkFrame *source;
2445 } VulkanDRMMapping;
2446
2447 static void vulkan_unmap_to_drm(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
2448 {
2449     AVDRMFrameDescriptor *drm_desc = hwmap->priv;
2450
2451     for (int i = 0; i < drm_desc->nb_objects; i++)
2452         close(drm_desc->objects[i].fd);
2453
2454     av_free(drm_desc);
2455 }
2456
2457 static inline uint32_t vulkan_fmt_to_drm(VkFormat vkfmt)
2458 {
2459     for (int i = 0; i < FF_ARRAY_ELEMS(vulkan_drm_format_map); i++)
2460         if (vulkan_drm_format_map[i].vk_format == vkfmt)
2461             return vulkan_drm_format_map[i].drm_fourcc;
2462     return DRM_FORMAT_INVALID;
2463 }
2464
2465 static int vulkan_map_to_drm(AVHWFramesContext *hwfc, AVFrame *dst,
2466                              const AVFrame *src, int flags)
2467 {
2468     int err = 0;
2469     VkResult ret;
2470     AVVkFrame *f = (AVVkFrame *)src->data[0];
2471     VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2472     AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
2473     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2474     VK_LOAD_PFN(hwctx->inst, vkGetMemoryFdKHR);
2475     VkImageDrmFormatModifierPropertiesEXT drm_mod = {
2476         .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
2477     };
2478
2479     AVDRMFrameDescriptor *drm_desc = av_mallocz(sizeof(*drm_desc));
2480     if (!drm_desc)
2481         return AVERROR(ENOMEM);
2482
2483     err = prepare_frame(hwfc, &p->cmd, f, PREP_MODE_EXTERNAL_EXPORT);
2484     if (err < 0)
2485         goto end;
2486
2487     err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src, &vulkan_unmap_to_drm, drm_desc);
2488     if (err < 0)
2489         goto end;
2490
2491     if (p->extensions & EXT_DRM_MODIFIER_FLAGS) {
2492         VK_LOAD_PFN(hwctx->inst, vkGetImageDrmFormatModifierPropertiesEXT);
2493         ret = pfn_vkGetImageDrmFormatModifierPropertiesEXT(hwctx->act_dev, f->img[0],
2494                                                            &drm_mod);
2495         if (ret != VK_SUCCESS) {
2496             av_log(hwfc, AV_LOG_ERROR, "Failed to retrieve DRM format modifier!\n");
2497             err = AVERROR_EXTERNAL;
2498             goto end;
2499         }
2500     }
2501
2502     for (int i = 0; (i < planes) && (f->mem[i]); i++) {
2503         VkMemoryGetFdInfoKHR export_info = {
2504             .sType      = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
2505             .memory     = f->mem[i],
2506             .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
2507         };
2508
2509         ret = pfn_vkGetMemoryFdKHR(hwctx->act_dev, &export_info,
2510                                    &drm_desc->objects[i].fd);
2511         if (ret != VK_SUCCESS) {
2512             av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD!\n");
2513             err = AVERROR_EXTERNAL;
2514             goto end;
2515         }
2516
2517         drm_desc->nb_objects++;
2518         drm_desc->objects[i].size = f->size[i];
2519         drm_desc->objects[i].format_modifier = drm_mod.drmFormatModifier;
2520     }
2521
2522     drm_desc->nb_layers = planes;
2523     for (int i = 0; i < drm_desc->nb_layers; i++) {
2524         VkSubresourceLayout layout;
2525         VkImageSubresource sub = {
2526             .aspectMask = p->extensions & EXT_DRM_MODIFIER_FLAGS ?
2527                           VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT :
2528                           VK_IMAGE_ASPECT_COLOR_BIT,
2529         };
2530         VkFormat plane_vkfmt = av_vkfmt_from_pixfmt(hwfc->sw_format)[i];
2531
2532         drm_desc->layers[i].format    = vulkan_fmt_to_drm(plane_vkfmt);
2533         drm_desc->layers[i].nb_planes = 1;
2534
2535         if (drm_desc->layers[i].format == DRM_FORMAT_INVALID) {
2536             av_log(hwfc, AV_LOG_ERROR, "Cannot map to DRM layer, unsupported!\n");
2537             err = AVERROR_PATCHWELCOME;
2538             goto end;
2539         }
2540
2541         drm_desc->layers[i].planes[0].object_index = FFMIN(i, drm_desc->nb_objects - 1);
2542
2543         if (f->tiling == VK_IMAGE_TILING_OPTIMAL)
2544             continue;
2545
2546         vkGetImageSubresourceLayout(hwctx->act_dev, f->img[i], &sub, &layout);
2547         drm_desc->layers[i].planes[0].offset       = layout.offset;
2548         drm_desc->layers[i].planes[0].pitch        = layout.rowPitch;
2549     }
2550
2551     dst->width   = src->width;
2552     dst->height  = src->height;
2553     dst->data[0] = (uint8_t *)drm_desc;
2554
2555     av_log(hwfc, AV_LOG_VERBOSE, "Mapped AVVkFrame to a DRM object!\n");
2556
2557     return 0;
2558
2559 end:
2560     av_free(drm_desc);
2561     return err;
2562 }
2563
2564 #if CONFIG_VAAPI
2565 static int vulkan_map_to_vaapi(AVHWFramesContext *hwfc, AVFrame *dst,
2566                                const AVFrame *src, int flags)
2567 {
2568     int err;
2569     AVFrame *tmp = av_frame_alloc();
2570     if (!tmp)
2571         return AVERROR(ENOMEM);
2572
2573     tmp->format = AV_PIX_FMT_DRM_PRIME;
2574
2575     err = vulkan_map_to_drm(hwfc, tmp, src, flags);
2576     if (err < 0)
2577         goto fail;
2578
2579     err = av_hwframe_map(dst, tmp, flags);
2580     if (err < 0)
2581         goto fail;
2582
2583     err = ff_hwframe_map_replace(dst, src);
2584
2585 fail:
2586     av_frame_free(&tmp);
2587     return err;
2588 }
2589 #endif
2590 #endif
2591
2592 static int vulkan_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
2593                            const AVFrame *src, int flags)
2594 {
2595     av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2596
2597     switch (dst->format) {
2598 #if CONFIG_LIBDRM
2599     case AV_PIX_FMT_DRM_PRIME:
2600         if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2601             return vulkan_map_to_drm(hwfc, dst, src, flags);
2602 #if CONFIG_VAAPI
2603     case AV_PIX_FMT_VAAPI:
2604         if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2605             return vulkan_map_to_vaapi(hwfc, dst, src, flags);
2606 #endif
2607 #endif
2608     default:
2609         return vulkan_map_frame_to_mem(hwfc, dst, src, flags);
2610     }
2611 }
2612
2613 typedef struct ImageBuffer {
2614     VkBuffer buf;
2615     VkDeviceMemory mem;
2616     VkMemoryPropertyFlagBits flags;
2617 } ImageBuffer;
2618
2619 static void free_buf(void *opaque, uint8_t *data)
2620 {
2621     AVHWDeviceContext *ctx = opaque;
2622     AVVulkanDeviceContext *hwctx = ctx->hwctx;
2623     ImageBuffer *vkbuf = (ImageBuffer *)data;
2624
2625     if (vkbuf->buf)
2626         vkDestroyBuffer(hwctx->act_dev, vkbuf->buf, hwctx->alloc);
2627     if (vkbuf->mem)
2628         vkFreeMemory(hwctx->act_dev, vkbuf->mem, hwctx->alloc);
2629
2630     av_free(data);
2631 }
2632
2633 static int create_buf(AVHWDeviceContext *ctx, AVBufferRef **buf,
2634                       int height, int *stride, VkBufferUsageFlags usage,
2635                       VkMemoryPropertyFlagBits flags, void *create_pnext,
2636                       void *alloc_pnext)
2637 {
2638     int err;
2639     VkResult ret;
2640     VkMemoryRequirements req;
2641     AVVulkanDeviceContext *hwctx = ctx->hwctx;
2642     VulkanDevicePriv *p = ctx->internal->priv;
2643
2644     VkBufferCreateInfo buf_spawn = {
2645         .sType       = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
2646         .pNext       = create_pnext,
2647         .usage       = usage,
2648         .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
2649     };
2650
2651     ImageBuffer *vkbuf = av_mallocz(sizeof(*vkbuf));
2652     if (!vkbuf)
2653         return AVERROR(ENOMEM);
2654
2655     *stride = FFALIGN(*stride, p->props.limits.optimalBufferCopyRowPitchAlignment);
2656     buf_spawn.size = height*(*stride);
2657
2658     ret = vkCreateBuffer(hwctx->act_dev, &buf_spawn, NULL, &vkbuf->buf);
2659     if (ret != VK_SUCCESS) {
2660         av_log(ctx, AV_LOG_ERROR, "Failed to create buffer: %s\n",
2661                vk_ret2str(ret));
2662         return AVERROR_EXTERNAL;
2663     }
2664
2665     vkGetBufferMemoryRequirements(hwctx->act_dev, vkbuf->buf, &req);
2666
2667     err = alloc_mem(ctx, &req, flags, alloc_pnext, &vkbuf->flags, &vkbuf->mem);
2668     if (err)
2669         return err;
2670
2671     ret = vkBindBufferMemory(hwctx->act_dev, vkbuf->buf, vkbuf->mem, 0);
2672     if (ret != VK_SUCCESS) {
2673         av_log(ctx, AV_LOG_ERROR, "Failed to bind memory to buffer: %s\n",
2674                vk_ret2str(ret));
2675         free_buf(ctx, (uint8_t *)vkbuf);
2676         return AVERROR_EXTERNAL;
2677     }
2678
2679     *buf = av_buffer_create((uint8_t *)vkbuf, sizeof(*vkbuf), free_buf, ctx, 0);
2680     if (!(*buf)) {
2681         free_buf(ctx, (uint8_t *)vkbuf);
2682         return AVERROR(ENOMEM);
2683     }
2684
2685     return 0;
2686 }
2687
2688 static int map_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs, uint8_t *mem[],
2689                        int nb_buffers, int invalidate)
2690 {
2691     VkResult ret;
2692     AVVulkanDeviceContext *hwctx = ctx->hwctx;
2693     VkMappedMemoryRange invalidate_ctx[AV_NUM_DATA_POINTERS];
2694     int invalidate_count = 0;
2695
2696     for (int i = 0; i < nb_buffers; i++) {
2697         ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2698         ret = vkMapMemory(hwctx->act_dev, vkbuf->mem, 0,
2699                           VK_WHOLE_SIZE, 0, (void **)&mem[i]);
2700         if (ret != VK_SUCCESS) {
2701             av_log(ctx, AV_LOG_ERROR, "Failed to map buffer memory: %s\n",
2702                    vk_ret2str(ret));
2703             return AVERROR_EXTERNAL;
2704         }
2705     }
2706
2707     if (!invalidate)
2708         return 0;
2709
2710     for (int i = 0; i < nb_buffers; i++) {
2711         ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2712         const VkMappedMemoryRange ival_buf = {
2713             .sType  = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
2714             .memory = vkbuf->mem,
2715             .size   = VK_WHOLE_SIZE,
2716         };
2717         if (vkbuf->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
2718             continue;
2719         invalidate_ctx[invalidate_count++] = ival_buf;
2720     }
2721
2722     if (invalidate_count) {
2723         ret = vkInvalidateMappedMemoryRanges(hwctx->act_dev, invalidate_count,
2724                                              invalidate_ctx);
2725         if (ret != VK_SUCCESS)
2726             av_log(ctx, AV_LOG_WARNING, "Failed to invalidate memory: %s\n",
2727                    vk_ret2str(ret));
2728     }
2729
2730     return 0;
2731 }
2732
2733 static int unmap_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs,
2734                          int nb_buffers, int flush)
2735 {
2736     int err = 0;
2737     VkResult ret;
2738     AVVulkanDeviceContext *hwctx = ctx->hwctx;
2739     VkMappedMemoryRange flush_ctx[AV_NUM_DATA_POINTERS];
2740     int flush_count = 0;
2741
2742     if (flush) {
2743         for (int i = 0; i < nb_buffers; i++) {
2744             ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2745             const VkMappedMemoryRange flush_buf = {
2746                 .sType  = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
2747                 .memory = vkbuf->mem,
2748                 .size   = VK_WHOLE_SIZE,
2749             };
2750             if (vkbuf->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
2751                 continue;
2752             flush_ctx[flush_count++] = flush_buf;
2753         }
2754     }
2755
2756     if (flush_count) {
2757         ret = vkFlushMappedMemoryRanges(hwctx->act_dev, flush_count, flush_ctx);
2758         if (ret != VK_SUCCESS) {
2759             av_log(ctx, AV_LOG_ERROR, "Failed to flush memory: %s\n",
2760                     vk_ret2str(ret));
2761             err = AVERROR_EXTERNAL; /* We still want to try to unmap them */
2762         }
2763     }
2764
2765     for (int i = 0; i < nb_buffers; i++) {
2766         ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2767         vkUnmapMemory(hwctx->act_dev, vkbuf->mem);
2768     }
2769
2770     return err;
2771 }
2772
2773 static int transfer_image_buf(AVHWDeviceContext *ctx, const AVFrame *f,
2774                               AVBufferRef **bufs, const int *buf_stride, int w,
2775                               int h, enum AVPixelFormat pix_fmt, int to_buf)
2776 {
2777     int err;
2778     AVVkFrame *frame = (AVVkFrame *)f->data[0];
2779     VulkanDevicePriv *s = ctx->internal->priv;
2780
2781     int bar_num = 0;
2782     VkPipelineStageFlagBits sem_wait_dst[AV_NUM_DATA_POINTERS];
2783
2784     const int planes = av_pix_fmt_count_planes(pix_fmt);
2785     const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
2786
2787     VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
2788     VkCommandBuffer cmd_buf = get_buf_exec_ctx(ctx, &s->cmd);
2789
2790     VkSubmitInfo s_info = {
2791         .sType                = VK_STRUCTURE_TYPE_SUBMIT_INFO,
2792         .pSignalSemaphores    = frame->sem,
2793         .pWaitSemaphores      = frame->sem,
2794         .pWaitDstStageMask    = sem_wait_dst,
2795         .signalSemaphoreCount = planes,
2796         .waitSemaphoreCount   = planes,
2797     };
2798
2799     if ((err = wait_start_exec_ctx(ctx, &s->cmd)))
2800         return err;
2801
2802     /* Change the image layout to something more optimal for transfers */
2803     for (int i = 0; i < planes; i++) {
2804         VkImageLayout new_layout = to_buf ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL :
2805                                             VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2806         VkAccessFlags new_access = to_buf ? VK_ACCESS_TRANSFER_READ_BIT :
2807                                             VK_ACCESS_TRANSFER_WRITE_BIT;
2808
2809         sem_wait_dst[i] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2810
2811         /* If the layout matches and we have read access skip the barrier */
2812         if ((frame->layout[i] == new_layout) && (frame->access[i] & new_access))
2813             continue;
2814
2815         img_bar[bar_num].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2816         img_bar[bar_num].srcAccessMask = 0x0;
2817         img_bar[bar_num].dstAccessMask = new_access;
2818         img_bar[bar_num].oldLayout = frame->layout[i];
2819         img_bar[bar_num].newLayout = new_layout;
2820         img_bar[bar_num].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2821         img_bar[bar_num].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2822         img_bar[bar_num].image = frame->img[i];
2823         img_bar[bar_num].subresourceRange.levelCount = 1;
2824         img_bar[bar_num].subresourceRange.layerCount = 1;
2825         img_bar[bar_num].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2826
2827         frame->layout[i] = img_bar[bar_num].newLayout;
2828         frame->access[i] = img_bar[bar_num].dstAccessMask;
2829
2830         bar_num++;
2831     }
2832
2833     if (bar_num)
2834         vkCmdPipelineBarrier(cmd_buf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
2835                              VK_PIPELINE_STAGE_TRANSFER_BIT, 0,
2836                              0, NULL, 0, NULL, bar_num, img_bar);
2837
2838     /* Schedule a copy for each plane */
2839     for (int i = 0; i < planes; i++) {
2840         ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2841         const int p_w = i > 0 ? AV_CEIL_RSHIFT(w, desc->log2_chroma_w) : w;
2842         const int p_h = i > 0 ? AV_CEIL_RSHIFT(h, desc->log2_chroma_h) : h;
2843         VkBufferImageCopy buf_reg = {
2844             .bufferOffset = 0,
2845             /* Buffer stride isn't in bytes, it's in samples, the implementation
2846              * uses the image's VkFormat to know how many bytes per sample
2847              * the buffer has. So we have to convert by dividing. Stupid.
2848              * Won't work with YUVA or other planar formats with alpha. */
2849             .bufferRowLength = buf_stride[i] / desc->comp[i].step,
2850             .bufferImageHeight = p_h,
2851             .imageSubresource.layerCount = 1,
2852             .imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
2853             .imageOffset = { 0, 0, 0, },
2854             .imageExtent = { p_w, p_h, 1, },
2855         };
2856
2857         if (to_buf)
2858             vkCmdCopyImageToBuffer(cmd_buf, frame->img[i], frame->layout[i],
2859                                    vkbuf->buf, 1, &buf_reg);
2860         else
2861             vkCmdCopyBufferToImage(cmd_buf, vkbuf->buf, frame->img[i],
2862                                    frame->layout[i], 1, &buf_reg);
2863     }
2864
2865     /* When uploading, do this asynchronously if the source is refcounted by
2866      * keeping the buffers as a submission dependency.
2867      * The hwcontext is guaranteed to not be freed until all frames are freed
2868      * in the frames_unint function.
2869      * When downloading to buffer, do this synchronously and wait for the
2870      * queue submission to finish executing */
2871     if (!to_buf) {
2872         int ref;
2873         for (ref = 0; ref < AV_NUM_DATA_POINTERS; ref++) {
2874             if (!f->buf[ref])
2875                 break;
2876             if ((err = add_buf_dep_exec_ctx(hwfc, &s->cmd, &f->buf[ref], 1)))
2877                 return err;
2878         }
2879         if (ref && (err = add_buf_dep_exec_ctx(hwfc, &s->cmd, bufs, planes)))
2880             return err;
2881         return submit_exec_ctx(hwfc, &s->cmd, &s_info, !ref);
2882     } else {
2883         return submit_exec_ctx(hwfc, &s->cmd, &s_info,    1);
2884     }
2885 }
2886
2887 /* Technically we can use VK_EXT_external_memory_host to upload and download,
2888  * however the alignment requirements make this unfeasible as both the pointer
2889  * and the size of each plane need to be aligned to the minimum alignment
2890  * requirement, which on all current implementations (anv, radv) is 4096.
2891  * If the requirement gets relaxed (unlikely) this can easily be implemented. */
2892 static int vulkan_transfer_data_from_mem(AVHWFramesContext *hwfc, AVFrame *dst,
2893                                          const AVFrame *src)
2894 {
2895     int err = 0;
2896     AVFrame tmp;
2897     AVVkFrame *f = (AVVkFrame *)dst->data[0];
2898     AVHWDeviceContext *dev_ctx = hwfc->device_ctx;
2899     AVBufferRef *bufs[AV_NUM_DATA_POINTERS] = { 0 };
2900     const int planes = av_pix_fmt_count_planes(src->format);
2901     int log2_chroma = av_pix_fmt_desc_get(src->format)->log2_chroma_h;
2902
2903     if ((src->format != AV_PIX_FMT_NONE && !av_vkfmt_from_pixfmt(src->format))) {
2904         av_log(hwfc, AV_LOG_ERROR, "Unsupported source pixel format!\n");
2905         return AVERROR(EINVAL);
2906     }
2907
2908     if (src->width > hwfc->width || src->height > hwfc->height)
2909         return AVERROR(EINVAL);
2910
2911     /* For linear, host visiable images */
2912     if (f->tiling == VK_IMAGE_TILING_LINEAR &&
2913         f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
2914         AVFrame *map = av_frame_alloc();
2915         if (!map)
2916             return AVERROR(ENOMEM);
2917         map->format = src->format;
2918
2919         err = vulkan_map_frame_to_mem(hwfc, map, dst, AV_HWFRAME_MAP_WRITE);
2920         if (err)
2921             return err;
2922
2923         err = av_frame_copy(map, src);
2924         av_frame_free(&map);
2925         return err;
2926     }
2927
2928     /* Create buffers */
2929     for (int i = 0; i < planes; i++) {
2930         int h = src->height;
2931         int p_height = i > 0 ? AV_CEIL_RSHIFT(h, log2_chroma) : h;
2932
2933         tmp.linesize[i] = FFABS(src->linesize[i]);
2934         err = create_buf(dev_ctx, &bufs[i], p_height,
2935                          &tmp.linesize[i], VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
2936                          VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, NULL, NULL);
2937         if (err)
2938             goto end;
2939     }
2940
2941     /* Map, copy image to buffer, unmap */
2942     if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 0)))
2943         goto end;
2944
2945     av_image_copy(tmp.data, tmp.linesize, (const uint8_t **)src->data,
2946                   src->linesize, src->format, src->width, src->height);
2947
2948     if ((err = unmap_buffers(dev_ctx, bufs, planes, 1)))
2949         goto end;
2950
2951     /* Copy buffers to image */
2952     err = transfer_image_buf(dev_ctx, dst, bufs, tmp.linesize,
2953                              src->width, src->height, src->format, 0);
2954
2955 end:
2956     for (int i = 0; i < planes; i++)
2957         av_buffer_unref(&bufs[i]);
2958
2959     return err;
2960 }
2961
2962 static int vulkan_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst,
2963                                         const AVFrame *src)
2964 {
2965     av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2966
2967     switch (src->format) {
2968 #if CONFIG_CUDA
2969     case AV_PIX_FMT_CUDA:
2970         if ((p->extensions & EXT_EXTERNAL_FD_MEMORY) &&
2971             (p->extensions & EXT_EXTERNAL_FD_SEM))
2972             return vulkan_transfer_data_from_cuda(hwfc, dst, src);
2973 #endif
2974     default:
2975         if (src->hw_frames_ctx)
2976             return AVERROR(ENOSYS);
2977         else
2978             return vulkan_transfer_data_from_mem(hwfc, dst, src);
2979     }
2980 }
2981
2982 #if CONFIG_CUDA
2983 static int vulkan_transfer_data_to_cuda(AVHWFramesContext *hwfc, AVFrame *dst,
2984                                       const AVFrame *src)
2985 {
2986     int err;
2987     VkResult ret;
2988     CUcontext dummy;
2989     AVVkFrame *dst_f;
2990     AVVkFrameInternal *dst_int;
2991     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2992     const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
2993
2994     AVHWFramesContext *cuda_fc = (AVHWFramesContext*)dst->hw_frames_ctx->data;
2995     AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
2996     AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
2997     AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
2998     CudaFunctions *cu = cu_internal->cuda_dl;
2999
3000     ret = CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
3001     if (ret < 0) {
3002         err = AVERROR_EXTERNAL;
3003         goto fail;
3004     }
3005
3006     dst_f = (AVVkFrame *)src->data[0];
3007
3008     err = vulkan_export_to_cuda(hwfc, dst->hw_frames_ctx, src);
3009     if (err < 0) {
3010         goto fail;
3011     }
3012
3013     dst_int = dst_f->internal;
3014
3015     for (int i = 0; i < planes; i++) {
3016         CUDA_MEMCPY2D cpy = {
3017             .dstMemoryType = CU_MEMORYTYPE_DEVICE,
3018             .dstDevice     = (CUdeviceptr)dst->data[i],
3019             .dstPitch      = dst->linesize[i],
3020             .dstY          = 0,
3021
3022             .srcMemoryType = CU_MEMORYTYPE_ARRAY,
3023             .srcArray      = dst_int->cu_array[i],
3024             .WidthInBytes  = (i > 0 ? AV_CEIL_RSHIFT(hwfc->width, desc->log2_chroma_w)
3025                                     : hwfc->width) * desc->comp[i].step,
3026             .Height        = i > 0 ? AV_CEIL_RSHIFT(hwfc->height, desc->log2_chroma_h)
3027                                    : hwfc->height,
3028         };
3029
3030         ret = CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
3031         if (ret < 0) {
3032             err = AVERROR_EXTERNAL;
3033             goto fail;
3034         }
3035     }
3036
3037     CHECK_CU(cu->cuCtxPopCurrent(&dummy));
3038
3039     av_log(hwfc, AV_LOG_VERBOSE, "Transfered Vulkan image to CUDA!\n");
3040
3041     return 0;
3042
3043 fail:
3044     CHECK_CU(cu->cuCtxPopCurrent(&dummy));
3045     vulkan_free_internal(dst_int);
3046     dst_f->internal = NULL;
3047     av_buffer_unref(&dst->buf[0]);
3048     return err;
3049 }
3050 #endif
3051
3052 static int vulkan_transfer_data_to_mem(AVHWFramesContext *hwfc, AVFrame *dst,
3053                                        const AVFrame *src)
3054 {
3055     int err = 0;
3056     AVFrame tmp;
3057     AVVkFrame *f = (AVVkFrame *)src->data[0];
3058     AVHWDeviceContext *dev_ctx = hwfc->device_ctx;
3059     AVBufferRef *bufs[AV_NUM_DATA_POINTERS] = { 0 };
3060     const int planes = av_pix_fmt_count_planes(dst->format);
3061     int log2_chroma = av_pix_fmt_desc_get(dst->format)->log2_chroma_h;
3062
3063     if (dst->width > hwfc->width || dst->height > hwfc->height)
3064         return AVERROR(EINVAL);
3065
3066     /* For linear, host visiable images */
3067     if (f->tiling == VK_IMAGE_TILING_LINEAR &&
3068         f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
3069         AVFrame *map = av_frame_alloc();
3070         if (!map)
3071             return AVERROR(ENOMEM);
3072         map->format = dst->format;
3073
3074         err = vulkan_map_frame_to_mem(hwfc, map, src, AV_HWFRAME_MAP_READ);
3075         if (err)
3076             return err;
3077
3078         err = av_frame_copy(dst, map);
3079         av_frame_free(&map);
3080         return err;
3081     }
3082
3083     /* Create buffers */
3084     for (int i = 0; i < planes; i++) {
3085         int h = dst->height;
3086         int p_height = i > 0 ? AV_CEIL_RSHIFT(h, log2_chroma) : h;
3087
3088         tmp.linesize[i] = FFABS(dst->linesize[i]);
3089         err = create_buf(dev_ctx, &bufs[i], p_height,
3090                          &tmp.linesize[i], VK_BUFFER_USAGE_TRANSFER_DST_BIT,
3091                          VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, NULL, NULL);
3092         if (err)
3093             goto end;
3094     }
3095
3096     /* Copy image to buffer */
3097     if ((err = transfer_image_buf(dev_ctx, src, bufs, tmp.linesize,
3098                                   dst->width, dst->height, dst->format, 1)))
3099         goto end;
3100
3101     /* Map, copy buffer to frame, unmap */
3102     if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 1)))
3103         goto end;
3104
3105     av_image_copy(dst->data, dst->linesize, (const uint8_t **)tmp.data,
3106                   tmp.linesize, dst->format, dst->width, dst->height);
3107
3108     err = unmap_buffers(dev_ctx, bufs, planes, 0);
3109
3110 end:
3111     for (int i = 0; i < planes; i++)
3112         av_buffer_unref(&bufs[i]);
3113
3114     return err;
3115 }
3116
3117 static int vulkan_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst,
3118                                      const AVFrame *src)
3119 {
3120     av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
3121
3122     switch (dst->format) {
3123 #if CONFIG_CUDA
3124     case AV_PIX_FMT_CUDA:
3125         if ((p->extensions & EXT_EXTERNAL_FD_MEMORY) &&
3126             (p->extensions & EXT_EXTERNAL_FD_SEM))
3127             return vulkan_transfer_data_to_cuda(hwfc, dst, src);
3128 #endif
3129     default:
3130         if (dst->hw_frames_ctx)
3131             return AVERROR(ENOSYS);
3132         else
3133             return vulkan_transfer_data_to_mem(hwfc, dst, src);
3134     }
3135 }
3136
3137 AVVkFrame *av_vk_frame_alloc(void)
3138 {
3139     return av_mallocz(sizeof(AVVkFrame));
3140 }
3141
3142 const HWContextType ff_hwcontext_type_vulkan = {
3143     .type                   = AV_HWDEVICE_TYPE_VULKAN,
3144     .name                   = "Vulkan",
3145
3146     .device_hwctx_size      = sizeof(AVVulkanDeviceContext),
3147     .device_priv_size       = sizeof(VulkanDevicePriv),
3148     .frames_hwctx_size      = sizeof(AVVulkanFramesContext),
3149     .frames_priv_size       = sizeof(VulkanFramesPriv),
3150
3151     .device_init            = &vulkan_device_init,
3152     .device_create          = &vulkan_device_create,
3153     .device_derive          = &vulkan_device_derive,
3154
3155     .frames_get_constraints = &vulkan_frames_get_constraints,
3156     .frames_init            = vulkan_frames_init,
3157     .frames_get_buffer      = vulkan_get_buffer,
3158     .frames_uninit          = vulkan_frames_uninit,
3159
3160     .transfer_get_formats   = vulkan_transfer_get_formats,
3161     .transfer_data_to       = vulkan_transfer_data_to,
3162     .transfer_data_from     = vulkan_transfer_data_from,
3163
3164     .map_to                 = vulkan_map_to,
3165     .map_from               = vulkan_map_from,
3166
3167     .pix_fmts = (const enum AVPixelFormat []) {
3168         AV_PIX_FMT_VULKAN,
3169         AV_PIX_FMT_NONE
3170     },
3171 };