2 * This file is part of FFmpeg.
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 #include "hwcontext.h"
24 #include "hwcontext_internal.h"
25 #include "hwcontext_vulkan.h"
30 #include <drm_fourcc.h>
31 #include "hwcontext_drm.h"
33 #include <va/va_drmcommon.h>
34 #include "hwcontext_vaapi.h"
39 #include "hwcontext_cuda_internal.h"
40 #include "cuda_check.h"
41 #define CHECK_CU(x) FF_CUDA_CHECK_DL(cuda_cu, cu, x)
44 typedef struct VulkanQueueCtx {
49 /* Buffer dependencies */
50 AVBufferRef **buf_deps;
52 int buf_deps_alloc_size;
55 typedef struct VulkanExecCtx {
57 VkCommandBuffer *bufs;
58 VulkanQueueCtx *queues;
63 typedef struct VulkanDevicePriv {
65 VkPhysicalDeviceProperties props;
66 VkPhysicalDeviceMemoryProperties mprops;
73 VkDebugUtilsMessengerEXT debug_ctx;
79 int use_linear_images;
85 typedef struct VulkanFramesPriv {
86 /* Image conversions */
87 VulkanExecCtx conv_ctx;
90 VulkanExecCtx upload_ctx;
91 VulkanExecCtx download_ctx;
94 typedef struct AVVkFrameInternal {
96 /* Importing external memory into cuda is really expensive so we keep the
97 * memory imported all the time */
98 AVBufferRef *cuda_fc_ref; /* Need to keep it around for uninit */
99 CUexternalMemory ext_mem[AV_NUM_DATA_POINTERS];
100 CUmipmappedArray cu_mma[AV_NUM_DATA_POINTERS];
101 CUarray cu_array[AV_NUM_DATA_POINTERS];
102 CUexternalSemaphore cu_sem[AV_NUM_DATA_POINTERS];
106 #define GET_QUEUE_COUNT(hwctx, graph, comp, tx) ( \
107 graph ? hwctx->nb_graphics_queues : \
108 comp ? (hwctx->nb_comp_queues ? \
109 hwctx->nb_comp_queues : hwctx->nb_graphics_queues) : \
110 tx ? (hwctx->nb_tx_queues ? hwctx->nb_tx_queues : \
111 (hwctx->nb_comp_queues ? \
112 hwctx->nb_comp_queues : hwctx->nb_graphics_queues)) : \
116 #define VK_LOAD_PFN(inst, name) PFN_##name pfn_##name = (PFN_##name) \
117 vkGetInstanceProcAddr(inst, #name)
119 #define DEFAULT_USAGE_FLAGS (VK_IMAGE_USAGE_SAMPLED_BIT | \
120 VK_IMAGE_USAGE_STORAGE_BIT | \
121 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | \
122 VK_IMAGE_USAGE_TRANSFER_DST_BIT)
124 #define ADD_VAL_TO_LIST(list, count, val) \
126 list = av_realloc_array(list, sizeof(*list), ++count); \
128 err = AVERROR(ENOMEM); \
131 list[count - 1] = av_strdup(val); \
132 if (!list[count - 1]) { \
133 err = AVERROR(ENOMEM); \
138 static const struct {
139 enum AVPixelFormat pixfmt;
140 const VkFormat vkfmts[3];
141 } vk_pixfmt_map[] = {
142 { AV_PIX_FMT_GRAY8, { VK_FORMAT_R8_UNORM } },
143 { AV_PIX_FMT_GRAY16, { VK_FORMAT_R16_UNORM } },
144 { AV_PIX_FMT_GRAYF32, { VK_FORMAT_R32_SFLOAT } },
146 { AV_PIX_FMT_NV12, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
147 { AV_PIX_FMT_P010, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
148 { AV_PIX_FMT_P016, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
150 { AV_PIX_FMT_YUV420P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
151 { AV_PIX_FMT_YUV422P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
152 { AV_PIX_FMT_YUV444P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
154 { AV_PIX_FMT_YUV420P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
155 { AV_PIX_FMT_YUV422P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
156 { AV_PIX_FMT_YUV444P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
158 { AV_PIX_FMT_ABGR, { VK_FORMAT_A8B8G8R8_UNORM_PACK32 } },
159 { AV_PIX_FMT_BGRA, { VK_FORMAT_B8G8R8A8_UNORM } },
160 { AV_PIX_FMT_RGBA, { VK_FORMAT_R8G8B8A8_UNORM } },
161 { AV_PIX_FMT_RGB24, { VK_FORMAT_R8G8B8_UNORM } },
162 { AV_PIX_FMT_BGR24, { VK_FORMAT_B8G8R8_UNORM } },
163 { AV_PIX_FMT_RGB48, { VK_FORMAT_R16G16B16_UNORM } },
164 { AV_PIX_FMT_RGBA64, { VK_FORMAT_R16G16B16A16_UNORM } },
165 { AV_PIX_FMT_RGB565, { VK_FORMAT_R5G6B5_UNORM_PACK16 } },
166 { AV_PIX_FMT_BGR565, { VK_FORMAT_B5G6R5_UNORM_PACK16 } },
167 { AV_PIX_FMT_BGR0, { VK_FORMAT_B8G8R8A8_UNORM } },
168 { AV_PIX_FMT_0BGR, { VK_FORMAT_A8B8G8R8_UNORM_PACK32 } },
169 { AV_PIX_FMT_RGB0, { VK_FORMAT_R8G8B8A8_UNORM } },
171 { AV_PIX_FMT_GBRPF32, { VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT } },
174 const VkFormat *av_vkfmt_from_pixfmt(enum AVPixelFormat p)
176 for (enum AVPixelFormat i = 0; i < FF_ARRAY_ELEMS(vk_pixfmt_map); i++)
177 if (vk_pixfmt_map[i].pixfmt == p)
178 return vk_pixfmt_map[i].vkfmts;
182 static int pixfmt_is_supported(AVVulkanDeviceContext *hwctx, enum AVPixelFormat p,
185 const VkFormat *fmt = av_vkfmt_from_pixfmt(p);
186 int planes = av_pix_fmt_count_planes(p);
191 for (int i = 0; i < planes; i++) {
192 VkFormatFeatureFlags flags;
193 VkFormatProperties2 prop = {
194 .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
196 vkGetPhysicalDeviceFormatProperties2(hwctx->phys_dev, fmt[i], &prop);
197 flags = linear ? prop.formatProperties.linearTilingFeatures :
198 prop.formatProperties.optimalTilingFeatures;
199 if (!(flags & DEFAULT_USAGE_FLAGS))
206 enum VulkanExtensions {
207 EXT_EXTERNAL_DMABUF_MEMORY = 1ULL << 0, /* VK_EXT_external_memory_dma_buf */
208 EXT_DRM_MODIFIER_FLAGS = 1ULL << 1, /* VK_EXT_image_drm_format_modifier */
209 EXT_EXTERNAL_FD_MEMORY = 1ULL << 2, /* VK_KHR_external_memory_fd */
210 EXT_EXTERNAL_FD_SEM = 1ULL << 3, /* VK_KHR_external_semaphore_fd */
212 EXT_NO_FLAG = 1ULL << 63,
215 typedef struct VulkanOptExtension {
218 } VulkanOptExtension;
220 static const VulkanOptExtension optional_instance_exts[] = {
224 static const VulkanOptExtension optional_device_exts[] = {
225 { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, EXT_EXTERNAL_FD_MEMORY, },
226 { VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME, EXT_EXTERNAL_DMABUF_MEMORY, },
227 { VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME, EXT_DRM_MODIFIER_FLAGS, },
228 { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, EXT_EXTERNAL_FD_SEM, },
231 /* Converts return values to strings */
232 static const char *vk_ret2str(VkResult res)
234 #define CASE(VAL) case VAL: return #VAL
240 CASE(VK_EVENT_RESET);
242 CASE(VK_ERROR_OUT_OF_HOST_MEMORY);
243 CASE(VK_ERROR_OUT_OF_DEVICE_MEMORY);
244 CASE(VK_ERROR_INITIALIZATION_FAILED);
245 CASE(VK_ERROR_DEVICE_LOST);
246 CASE(VK_ERROR_MEMORY_MAP_FAILED);
247 CASE(VK_ERROR_LAYER_NOT_PRESENT);
248 CASE(VK_ERROR_EXTENSION_NOT_PRESENT);
249 CASE(VK_ERROR_FEATURE_NOT_PRESENT);
250 CASE(VK_ERROR_INCOMPATIBLE_DRIVER);
251 CASE(VK_ERROR_TOO_MANY_OBJECTS);
252 CASE(VK_ERROR_FORMAT_NOT_SUPPORTED);
253 CASE(VK_ERROR_FRAGMENTED_POOL);
254 CASE(VK_ERROR_SURFACE_LOST_KHR);
255 CASE(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
256 CASE(VK_SUBOPTIMAL_KHR);
257 CASE(VK_ERROR_OUT_OF_DATE_KHR);
258 CASE(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
259 CASE(VK_ERROR_VALIDATION_FAILED_EXT);
260 CASE(VK_ERROR_INVALID_SHADER_NV);
261 CASE(VK_ERROR_OUT_OF_POOL_MEMORY);
262 CASE(VK_ERROR_INVALID_EXTERNAL_HANDLE);
263 CASE(VK_ERROR_NOT_PERMITTED_EXT);
264 CASE(VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT);
265 CASE(VK_ERROR_INVALID_DEVICE_ADDRESS_EXT);
266 CASE(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT);
267 default: return "Unknown error";
272 static VkBool32 vk_dbg_callback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
273 VkDebugUtilsMessageTypeFlagsEXT messageType,
274 const VkDebugUtilsMessengerCallbackDataEXT *data,
278 AVHWDeviceContext *ctx = priv;
281 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: l = AV_LOG_VERBOSE; break;
282 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: l = AV_LOG_INFO; break;
283 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: l = AV_LOG_WARNING; break;
284 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: l = AV_LOG_ERROR; break;
285 default: l = AV_LOG_DEBUG; break;
288 av_log(ctx, l, "%s\n", data->pMessage);
289 for (int i = 0; i < data->cmdBufLabelCount; i++)
290 av_log(ctx, l, "\t%i: %s\n", i, data->pCmdBufLabels[i].pLabelName);
295 static int check_extensions(AVHWDeviceContext *ctx, int dev, AVDictionary *opts,
296 const char * const **dst, uint32_t *num, int debug)
299 const char **extension_names = NULL;
300 VulkanDevicePriv *p = ctx->internal->priv;
301 AVVulkanDeviceContext *hwctx = ctx->hwctx;
302 int err = 0, found, extensions_found = 0;
305 int optional_exts_num;
306 uint32_t sup_ext_count;
307 char *user_exts_str = NULL;
308 AVDictionaryEntry *user_exts;
309 VkExtensionProperties *sup_ext;
310 const VulkanOptExtension *optional_exts;
314 optional_exts = optional_instance_exts;
315 optional_exts_num = FF_ARRAY_ELEMS(optional_instance_exts);
316 user_exts = av_dict_get(opts, "instance_extensions", NULL, 0);
318 user_exts_str = av_strdup(user_exts->value);
319 if (!user_exts_str) {
320 err = AVERROR(ENOMEM);
324 vkEnumerateInstanceExtensionProperties(NULL, &sup_ext_count, NULL);
325 sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
327 return AVERROR(ENOMEM);
328 vkEnumerateInstanceExtensionProperties(NULL, &sup_ext_count, sup_ext);
331 optional_exts = optional_device_exts;
332 optional_exts_num = FF_ARRAY_ELEMS(optional_device_exts);
333 user_exts = av_dict_get(opts, "device_extensions", NULL, 0);
335 user_exts_str = av_strdup(user_exts->value);
336 if (!user_exts_str) {
337 err = AVERROR(ENOMEM);
341 vkEnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
342 &sup_ext_count, NULL);
343 sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
345 return AVERROR(ENOMEM);
346 vkEnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
347 &sup_ext_count, sup_ext);
350 for (int i = 0; i < optional_exts_num; i++) {
351 tstr = optional_exts[i].name;
353 for (int j = 0; j < sup_ext_count; j++) {
354 if (!strcmp(tstr, sup_ext[j].extensionName)) {
362 av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, tstr);
363 p->extensions |= optional_exts[i].flag;
364 ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
368 tstr = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
370 for (int j = 0; j < sup_ext_count; j++) {
371 if (!strcmp(tstr, sup_ext[j].extensionName)) {
377 av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, tstr);
378 ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
380 av_log(ctx, AV_LOG_ERROR, "Debug extension \"%s\" not found!\n",
382 err = AVERROR(EINVAL);
388 char *save, *token = av_strtok(user_exts_str, "+", &save);
391 for (int j = 0; j < sup_ext_count; j++) {
392 if (!strcmp(token, sup_ext[j].extensionName)) {
398 av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, token);
399 ADD_VAL_TO_LIST(extension_names, extensions_found, token);
401 av_log(ctx, AV_LOG_WARNING, "%s extension \"%s\" not found, excluding.\n",
404 token = av_strtok(NULL, "+", &save);
408 *dst = extension_names;
409 *num = extensions_found;
411 av_free(user_exts_str);
417 for (int i = 0; i < extensions_found; i++)
418 av_free((void *)extension_names[i]);
419 av_free(extension_names);
420 av_free(user_exts_str);
425 /* Creates a VkInstance */
426 static int create_instance(AVHWDeviceContext *ctx, AVDictionary *opts)
430 VulkanDevicePriv *p = ctx->internal->priv;
431 AVVulkanDeviceContext *hwctx = ctx->hwctx;
432 AVDictionaryEntry *debug_opt = av_dict_get(opts, "debug", NULL, 0);
433 const int debug_mode = debug_opt && strtol(debug_opt->value, NULL, 10);
434 VkApplicationInfo application_info = {
435 .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
436 .pEngineName = "libavutil",
437 .apiVersion = VK_API_VERSION_1_1,
438 .engineVersion = VK_MAKE_VERSION(LIBAVUTIL_VERSION_MAJOR,
439 LIBAVUTIL_VERSION_MINOR,
440 LIBAVUTIL_VERSION_MICRO),
442 VkInstanceCreateInfo inst_props = {
443 .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
444 .pApplicationInfo = &application_info,
447 /* Check for present/missing extensions */
448 err = check_extensions(ctx, 0, opts, &inst_props.ppEnabledExtensionNames,
449 &inst_props.enabledExtensionCount, debug_mode);
454 static const char *layers[] = { "VK_LAYER_KHRONOS_validation" };
455 inst_props.ppEnabledLayerNames = layers;
456 inst_props.enabledLayerCount = FF_ARRAY_ELEMS(layers);
459 /* Try to create the instance */
460 ret = vkCreateInstance(&inst_props, hwctx->alloc, &hwctx->inst);
462 /* Check for errors */
463 if (ret != VK_SUCCESS) {
464 av_log(ctx, AV_LOG_ERROR, "Instance creation failure: %s\n",
466 for (int i = 0; i < inst_props.enabledExtensionCount; i++)
467 av_free((void *)inst_props.ppEnabledExtensionNames[i]);
468 av_free((void *)inst_props.ppEnabledExtensionNames);
469 return AVERROR_EXTERNAL;
473 VkDebugUtilsMessengerCreateInfoEXT dbg = {
474 .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
475 .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT |
476 VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
477 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
478 VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT,
479 .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
480 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
481 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
482 .pfnUserCallback = vk_dbg_callback,
485 VK_LOAD_PFN(hwctx->inst, vkCreateDebugUtilsMessengerEXT);
487 pfn_vkCreateDebugUtilsMessengerEXT(hwctx->inst, &dbg,
488 hwctx->alloc, &p->debug_ctx);
491 hwctx->enabled_inst_extensions = inst_props.ppEnabledExtensionNames;
492 hwctx->nb_enabled_inst_extensions = inst_props.enabledExtensionCount;
497 typedef struct VulkanDeviceSelection {
498 uint8_t uuid[VK_UUID_SIZE]; /* Will use this first unless !has_uuid */
500 const char *name; /* Will use this second unless NULL */
501 uint32_t pci_device; /* Will use this third unless 0x0 */
502 uint32_t vendor_id; /* Last resort to find something deterministic */
503 int index; /* Finally fall back to index */
504 } VulkanDeviceSelection;
506 static const char *vk_dev_type(enum VkPhysicalDeviceType type)
509 case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU: return "integrated";
510 case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU: return "discrete";
511 case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU: return "virtual";
512 case VK_PHYSICAL_DEVICE_TYPE_CPU: return "software";
513 default: return "unknown";
518 static int find_device(AVHWDeviceContext *ctx, VulkanDeviceSelection *select)
520 int err = 0, choice = -1;
523 VkPhysicalDevice *devices = NULL;
524 VkPhysicalDeviceIDProperties *idp = NULL;
525 VkPhysicalDeviceProperties2 *prop = NULL;
526 AVVulkanDeviceContext *hwctx = ctx->hwctx;
528 ret = vkEnumeratePhysicalDevices(hwctx->inst, &num, NULL);
529 if (ret != VK_SUCCESS || !num) {
530 av_log(ctx, AV_LOG_ERROR, "No devices found: %s!\n", vk_ret2str(ret));
531 return AVERROR(ENODEV);
534 devices = av_malloc_array(num, sizeof(VkPhysicalDevice));
536 return AVERROR(ENOMEM);
538 ret = vkEnumeratePhysicalDevices(hwctx->inst, &num, devices);
539 if (ret != VK_SUCCESS) {
540 av_log(ctx, AV_LOG_ERROR, "Failed enumerating devices: %s\n",
542 err = AVERROR(ENODEV);
546 prop = av_mallocz_array(num, sizeof(*prop));
548 err = AVERROR(ENOMEM);
552 idp = av_mallocz_array(num, sizeof(*idp));
554 err = AVERROR(ENOMEM);
558 av_log(ctx, AV_LOG_VERBOSE, "GPU listing:\n");
559 for (int i = 0; i < num; i++) {
560 idp[i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
561 prop[i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
562 prop[i].pNext = &idp[i];
564 vkGetPhysicalDeviceProperties2(devices[i], &prop[i]);
565 av_log(ctx, AV_LOG_VERBOSE, " %d: %s (%s) (0x%x)\n", i,
566 prop[i].properties.deviceName,
567 vk_dev_type(prop[i].properties.deviceType),
568 prop[i].properties.deviceID);
571 if (select->has_uuid) {
572 for (int i = 0; i < num; i++) {
573 if (!strncmp(idp[i].deviceUUID, select->uuid, VK_UUID_SIZE)) {
578 av_log(ctx, AV_LOG_ERROR, "Unable to find device by given UUID!\n");
579 err = AVERROR(ENODEV);
581 } else if (select->name) {
582 av_log(ctx, AV_LOG_VERBOSE, "Requested device: %s\n", select->name);
583 for (int i = 0; i < num; i++) {
584 if (strstr(prop[i].properties.deviceName, select->name)) {
589 av_log(ctx, AV_LOG_ERROR, "Unable to find device \"%s\"!\n",
591 err = AVERROR(ENODEV);
593 } else if (select->pci_device) {
594 av_log(ctx, AV_LOG_VERBOSE, "Requested device: 0x%x\n", select->pci_device);
595 for (int i = 0; i < num; i++) {
596 if (select->pci_device == prop[i].properties.deviceID) {
601 av_log(ctx, AV_LOG_ERROR, "Unable to find device with PCI ID 0x%x!\n",
603 err = AVERROR(EINVAL);
605 } else if (select->vendor_id) {
606 av_log(ctx, AV_LOG_VERBOSE, "Requested vendor: 0x%x\n", select->vendor_id);
607 for (int i = 0; i < num; i++) {
608 if (select->vendor_id == prop[i].properties.vendorID) {
613 av_log(ctx, AV_LOG_ERROR, "Unable to find device with Vendor ID 0x%x!\n",
615 err = AVERROR(ENODEV);
618 if (select->index < num) {
619 choice = select->index;
622 av_log(ctx, AV_LOG_ERROR, "Unable to find device with index %i!\n",
624 err = AVERROR(ENODEV);
630 hwctx->phys_dev = devices[choice];
639 static int search_queue_families(AVHWDeviceContext *ctx, VkDeviceCreateInfo *cd)
643 VkQueueFamilyProperties *qs = NULL;
644 AVVulkanDeviceContext *hwctx = ctx->hwctx;
645 int graph_index = -1, comp_index = -1, tx_index = -1;
646 VkDeviceQueueCreateInfo *pc = (VkDeviceQueueCreateInfo *)cd->pQueueCreateInfos;
648 /* First get the number of queue families */
649 vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, NULL);
651 av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
652 return AVERROR_EXTERNAL;
655 /* Then allocate memory */
656 qs = av_malloc_array(num, sizeof(VkQueueFamilyProperties));
658 return AVERROR(ENOMEM);
660 /* Finally retrieve the queue families */
661 vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, qs);
663 #define SEARCH_FLAGS(expr, out) \
664 for (int i = 0; i < num; i++) { \
665 const VkQueueFlagBits flags = qs[i].queueFlags; \
672 SEARCH_FLAGS(flags & VK_QUEUE_GRAPHICS_BIT, graph_index)
674 SEARCH_FLAGS((flags & VK_QUEUE_COMPUTE_BIT) && (i != graph_index),
677 SEARCH_FLAGS((flags & VK_QUEUE_TRANSFER_BIT) && (i != graph_index) &&
678 (i != comp_index), tx_index)
681 #define ADD_QUEUE(fidx, graph, comp, tx) \
682 av_log(ctx, AV_LOG_VERBOSE, "Using queue family %i (total queues: %i) for %s%s%s\n", \
683 fidx, qs[fidx].queueCount, graph ? "graphics " : "", \
684 comp ? "compute " : "", tx ? "transfers " : ""); \
685 av_log(ctx, AV_LOG_VERBOSE, " QF %i flags: %s%s%s%s\n", fidx, \
686 ((qs[fidx].queueFlags) & VK_QUEUE_GRAPHICS_BIT) ? "(graphics) " : "", \
687 ((qs[fidx].queueFlags) & VK_QUEUE_COMPUTE_BIT) ? "(compute) " : "", \
688 ((qs[fidx].queueFlags) & VK_QUEUE_TRANSFER_BIT) ? "(transfers) " : "", \
689 ((qs[fidx].queueFlags) & VK_QUEUE_SPARSE_BINDING_BIT) ? "(sparse) " : ""); \
690 pc[cd->queueCreateInfoCount].queueFamilyIndex = fidx; \
691 pc[cd->queueCreateInfoCount].queueCount = qs[fidx].queueCount; \
692 weights = av_malloc(qs[fidx].queueCount * sizeof(float)); \
693 pc[cd->queueCreateInfoCount].pQueuePriorities = weights; \
696 for (int i = 0; i < qs[fidx].queueCount; i++) \
698 cd->queueCreateInfoCount++;
700 ADD_QUEUE(graph_index, 1, comp_index < 0, tx_index < 0 && comp_index < 0)
701 hwctx->queue_family_index = graph_index;
702 hwctx->queue_family_comp_index = graph_index;
703 hwctx->queue_family_tx_index = graph_index;
704 hwctx->nb_graphics_queues = qs[graph_index].queueCount;
706 if (comp_index != -1) {
707 ADD_QUEUE(comp_index, 0, 1, tx_index < 0)
708 hwctx->queue_family_tx_index = comp_index;
709 hwctx->queue_family_comp_index = comp_index;
710 hwctx->nb_comp_queues = qs[comp_index].queueCount;
713 if (tx_index != -1) {
714 ADD_QUEUE(tx_index, 0, 0, 1)
715 hwctx->queue_family_tx_index = tx_index;
716 hwctx->nb_tx_queues = qs[tx_index].queueCount;
725 av_freep(&pc[0].pQueuePriorities);
726 av_freep(&pc[1].pQueuePriorities);
727 av_freep(&pc[2].pQueuePriorities);
730 return AVERROR(ENOMEM);
733 static int create_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
734 int queue_family_index, int num_queues)
737 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
739 VkCommandPoolCreateInfo cqueue_create = {
740 .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
741 .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
742 .queueFamilyIndex = queue_family_index,
744 VkCommandBufferAllocateInfo cbuf_create = {
745 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
746 .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
747 .commandBufferCount = num_queues,
750 cmd->nb_queues = num_queues;
752 cmd->queues = av_mallocz(num_queues * sizeof(*cmd->queues));
754 return AVERROR(ENOMEM);
756 cmd->bufs = av_mallocz(num_queues * sizeof(*cmd->bufs));
758 return AVERROR(ENOMEM);
760 /* Create command pool */
761 ret = vkCreateCommandPool(hwctx->act_dev, &cqueue_create,
762 hwctx->alloc, &cmd->pool);
763 if (ret != VK_SUCCESS) {
764 av_log(hwfc, AV_LOG_ERROR, "Command pool creation failure: %s\n",
766 return AVERROR_EXTERNAL;
769 cbuf_create.commandPool = cmd->pool;
771 /* Allocate command buffer */
772 ret = vkAllocateCommandBuffers(hwctx->act_dev, &cbuf_create, cmd->bufs);
773 if (ret != VK_SUCCESS) {
774 av_log(hwfc, AV_LOG_ERROR, "Command buffer alloc failure: %s\n",
776 return AVERROR_EXTERNAL;
779 for (int i = 0; i < num_queues; i++) {
780 VulkanQueueCtx *q = &cmd->queues[i];
781 vkGetDeviceQueue(hwctx->act_dev, queue_family_index, i, &q->queue);
782 q->was_synchronous = 1;
788 static void free_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
790 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
792 /* Make sure all queues have finished executing */
793 for (int i = 0; i < cmd->nb_queues; i++) {
794 VulkanQueueCtx *q = &cmd->queues[i];
796 if (q->fence && !q->was_synchronous) {
797 vkWaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
798 vkResetFences(hwctx->act_dev, 1, &q->fence);
803 vkDestroyFence(hwctx->act_dev, q->fence, hwctx->alloc);
805 /* Free buffer dependencies */
806 for (int j = 0; j < q->nb_buf_deps; j++)
807 av_buffer_unref(&q->buf_deps[j]);
808 av_free(q->buf_deps);
812 vkFreeCommandBuffers(hwctx->act_dev, cmd->pool, cmd->nb_queues, cmd->bufs);
814 vkDestroyCommandPool(hwctx->act_dev, cmd->pool, hwctx->alloc);
816 av_freep(&cmd->bufs);
817 av_freep(&cmd->queues);
820 static VkCommandBuffer get_buf_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
822 return cmd->bufs[cmd->cur_queue_idx];
825 static void unref_exec_ctx_deps(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
827 VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
829 for (int j = 0; j < q->nb_buf_deps; j++)
830 av_buffer_unref(&q->buf_deps[j]);
834 static int wait_start_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
837 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
838 VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
840 VkCommandBufferBeginInfo cmd_start = {
841 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
842 .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
845 /* Create the fence and don't wait for it initially */
847 VkFenceCreateInfo fence_spawn = {
848 .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
850 ret = vkCreateFence(hwctx->act_dev, &fence_spawn, hwctx->alloc,
852 if (ret != VK_SUCCESS) {
853 av_log(hwfc, AV_LOG_ERROR, "Failed to queue frame fence: %s\n",
855 return AVERROR_EXTERNAL;
857 } else if (!q->was_synchronous) {
858 vkWaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
859 vkResetFences(hwctx->act_dev, 1, &q->fence);
862 /* Discard queue dependencies */
863 unref_exec_ctx_deps(hwfc, cmd);
865 ret = vkBeginCommandBuffer(cmd->bufs[cmd->cur_queue_idx], &cmd_start);
866 if (ret != VK_SUCCESS) {
867 av_log(hwfc, AV_LOG_ERROR, "Unable to init command buffer: %s\n",
869 return AVERROR_EXTERNAL;
875 static int add_buf_dep_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
876 AVBufferRef * const *deps, int nb_deps)
879 VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
881 if (!deps || !nb_deps)
884 dst = av_fast_realloc(q->buf_deps, &q->buf_deps_alloc_size,
885 (q->nb_buf_deps + nb_deps) * sizeof(*dst));
891 for (int i = 0; i < nb_deps; i++) {
892 q->buf_deps[q->nb_buf_deps] = av_buffer_ref(deps[i]);
893 if (!q->buf_deps[q->nb_buf_deps])
901 unref_exec_ctx_deps(hwfc, cmd);
902 return AVERROR(ENOMEM);
905 static int submit_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
906 VkSubmitInfo *s_info, int synchronous)
909 VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
911 ret = vkEndCommandBuffer(cmd->bufs[cmd->cur_queue_idx]);
912 if (ret != VK_SUCCESS) {
913 av_log(hwfc, AV_LOG_ERROR, "Unable to finish command buffer: %s\n",
915 unref_exec_ctx_deps(hwfc, cmd);
916 return AVERROR_EXTERNAL;
919 s_info->pCommandBuffers = &cmd->bufs[cmd->cur_queue_idx];
920 s_info->commandBufferCount = 1;
922 ret = vkQueueSubmit(q->queue, 1, s_info, q->fence);
923 if (ret != VK_SUCCESS) {
924 unref_exec_ctx_deps(hwfc, cmd);
925 return AVERROR_EXTERNAL;
928 q->was_synchronous = synchronous;
931 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
932 vkWaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
933 vkResetFences(hwctx->act_dev, 1, &q->fence);
934 unref_exec_ctx_deps(hwfc, cmd);
935 } else { /* Rotate queues */
936 cmd->cur_queue_idx = (cmd->cur_queue_idx + 1) % cmd->nb_queues;
942 static void vulkan_device_free(AVHWDeviceContext *ctx)
944 VulkanDevicePriv *p = ctx->internal->priv;
945 AVVulkanDeviceContext *hwctx = ctx->hwctx;
947 vkDestroyDevice(hwctx->act_dev, hwctx->alloc);
950 VK_LOAD_PFN(hwctx->inst, vkDestroyDebugUtilsMessengerEXT);
951 pfn_vkDestroyDebugUtilsMessengerEXT(hwctx->inst, p->debug_ctx,
955 vkDestroyInstance(hwctx->inst, hwctx->alloc);
957 for (int i = 0; i < hwctx->nb_enabled_inst_extensions; i++)
958 av_free((void *)hwctx->enabled_inst_extensions[i]);
959 av_free((void *)hwctx->enabled_inst_extensions);
961 for (int i = 0; i < hwctx->nb_enabled_dev_extensions; i++)
962 av_free((void *)hwctx->enabled_dev_extensions[i]);
963 av_free((void *)hwctx->enabled_dev_extensions);
966 static int vulkan_device_create_internal(AVHWDeviceContext *ctx,
967 VulkanDeviceSelection *dev_select,
968 AVDictionary *opts, int flags)
972 AVDictionaryEntry *opt_d;
973 VulkanDevicePriv *p = ctx->internal->priv;
974 AVVulkanDeviceContext *hwctx = ctx->hwctx;
975 VkPhysicalDeviceFeatures dev_features = { 0 };
976 VkDeviceQueueCreateInfo queue_create_info[3] = {
977 { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, },
978 { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, },
979 { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, },
982 VkDeviceCreateInfo dev_info = {
983 .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
984 .pNext = &hwctx->device_features,
985 .pQueueCreateInfos = queue_create_info,
986 .queueCreateInfoCount = 0,
989 hwctx->device_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
990 ctx->free = vulkan_device_free;
992 /* Create an instance if not given one */
993 if ((err = create_instance(ctx, opts)))
996 /* Find a device (if not given one) */
997 if ((err = find_device(ctx, dev_select)))
1000 vkGetPhysicalDeviceFeatures(hwctx->phys_dev, &dev_features);
1001 #define COPY_FEATURE(DST, NAME) (DST).features.NAME = dev_features.NAME;
1002 COPY_FEATURE(hwctx->device_features, shaderImageGatherExtended)
1003 COPY_FEATURE(hwctx->device_features, fragmentStoresAndAtomics)
1004 COPY_FEATURE(hwctx->device_features, vertexPipelineStoresAndAtomics)
1005 COPY_FEATURE(hwctx->device_features, shaderInt64)
1008 /* Search queue family */
1009 if ((err = search_queue_families(ctx, &dev_info)))
1012 if ((err = check_extensions(ctx, 1, opts, &dev_info.ppEnabledExtensionNames,
1013 &dev_info.enabledExtensionCount, 0))) {
1014 av_free((void *)queue_create_info[0].pQueuePriorities);
1015 av_free((void *)queue_create_info[1].pQueuePriorities);
1016 av_free((void *)queue_create_info[2].pQueuePriorities);
1020 ret = vkCreateDevice(hwctx->phys_dev, &dev_info, hwctx->alloc,
1023 av_free((void *)queue_create_info[0].pQueuePriorities);
1024 av_free((void *)queue_create_info[1].pQueuePriorities);
1025 av_free((void *)queue_create_info[2].pQueuePriorities);
1027 if (ret != VK_SUCCESS) {
1028 av_log(ctx, AV_LOG_ERROR, "Device creation failure: %s\n",
1030 for (int i = 0; i < dev_info.enabledExtensionCount; i++)
1031 av_free((void *)dev_info.ppEnabledExtensionNames[i]);
1032 av_free((void *)dev_info.ppEnabledExtensionNames);
1033 err = AVERROR_EXTERNAL;
1037 /* Tiled images setting, use them by default */
1038 opt_d = av_dict_get(opts, "linear_images", NULL, 0);
1040 p->use_linear_images = strtol(opt_d->value, NULL, 10);
1042 hwctx->enabled_dev_extensions = dev_info.ppEnabledExtensionNames;
1043 hwctx->nb_enabled_dev_extensions = dev_info.enabledExtensionCount;
1049 static int vulkan_device_init(AVHWDeviceContext *ctx)
1052 AVVulkanDeviceContext *hwctx = ctx->hwctx;
1053 VulkanDevicePriv *p = ctx->internal->priv;
1055 vkGetPhysicalDeviceProperties(hwctx->phys_dev, &p->props);
1056 av_log(ctx, AV_LOG_VERBOSE, "Using device: %s\n", p->props.deviceName);
1057 av_log(ctx, AV_LOG_VERBOSE, "Alignments:\n");
1058 av_log(ctx, AV_LOG_VERBOSE, " optimalBufferCopyOffsetAlignment: %li\n",
1059 p->props.limits.optimalBufferCopyOffsetAlignment);
1060 av_log(ctx, AV_LOG_VERBOSE, " optimalBufferCopyRowPitchAlignment: %li\n",
1061 p->props.limits.optimalBufferCopyRowPitchAlignment);
1062 av_log(ctx, AV_LOG_VERBOSE, " minMemoryMapAlignment: %li\n",
1063 p->props.limits.minMemoryMapAlignment);
1065 /* Set device extension flags */
1066 for (int i = 0; i < hwctx->nb_enabled_dev_extensions; i++) {
1067 for (int j = 0; j < FF_ARRAY_ELEMS(optional_device_exts); j++) {
1068 if (!strcmp(hwctx->enabled_dev_extensions[i],
1069 optional_device_exts[j].name)) {
1070 av_log(ctx, AV_LOG_VERBOSE, "Using device extension %s\n",
1071 hwctx->enabled_dev_extensions[i]);
1072 p->extensions |= optional_device_exts[j].flag;
1078 p->dev_is_nvidia = (p->props.vendorID == 0x10de);
1080 vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &queue_num, NULL);
1082 av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
1083 return AVERROR_EXTERNAL;
1086 #define CHECK_QUEUE(type, n) \
1087 if (n >= queue_num) { \
1088 av_log(ctx, AV_LOG_ERROR, "Invalid %s queue index %i (device has %i queues)!\n", \
1089 type, n, queue_num); \
1090 return AVERROR(EINVAL); \
1093 CHECK_QUEUE("graphics", hwctx->queue_family_index)
1094 CHECK_QUEUE("upload", hwctx->queue_family_tx_index)
1095 CHECK_QUEUE("compute", hwctx->queue_family_comp_index)
1099 p->qfs[p->num_qfs++] = hwctx->queue_family_index;
1100 if ((hwctx->queue_family_tx_index != hwctx->queue_family_index) &&
1101 (hwctx->queue_family_tx_index != hwctx->queue_family_comp_index))
1102 p->qfs[p->num_qfs++] = hwctx->queue_family_tx_index;
1103 if ((hwctx->queue_family_comp_index != hwctx->queue_family_index) &&
1104 (hwctx->queue_family_comp_index != hwctx->queue_family_tx_index))
1105 p->qfs[p->num_qfs++] = hwctx->queue_family_comp_index;
1107 /* Get device capabilities */
1108 vkGetPhysicalDeviceMemoryProperties(hwctx->phys_dev, &p->mprops);
1113 static int vulkan_device_create(AVHWDeviceContext *ctx, const char *device,
1114 AVDictionary *opts, int flags)
1116 VulkanDeviceSelection dev_select = { 0 };
1117 if (device && device[0]) {
1119 dev_select.index = strtol(device, &end, 10);
1120 if (end == device) {
1121 dev_select.index = 0;
1122 dev_select.name = device;
1126 return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1129 static int vulkan_device_derive(AVHWDeviceContext *ctx,
1130 AVHWDeviceContext *src_ctx,
1131 AVDictionary *opts, int flags)
1133 av_unused VulkanDeviceSelection dev_select = { 0 };
1135 /* If there's only one device on the system, then even if its not covered
1136 * by the following checks (e.g. non-PCIe ARM GPU), having an empty
1137 * dev_select will mean it'll get picked. */
1138 switch(src_ctx->type) {
1141 case AV_HWDEVICE_TYPE_VAAPI: {
1142 AVVAAPIDeviceContext *src_hwctx = src_ctx->hwctx;
1144 const char *vendor = vaQueryVendorString(src_hwctx->display);
1146 av_log(ctx, AV_LOG_ERROR, "Unable to get device info from VAAPI!\n");
1147 return AVERROR_EXTERNAL;
1150 if (strstr(vendor, "Intel"))
1151 dev_select.vendor_id = 0x8086;
1152 if (strstr(vendor, "AMD"))
1153 dev_select.vendor_id = 0x1002;
1155 return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1158 case AV_HWDEVICE_TYPE_DRM: {
1159 AVDRMDeviceContext *src_hwctx = src_ctx->hwctx;
1161 drmDevice *drm_dev_info;
1162 int err = drmGetDevice(src_hwctx->fd, &drm_dev_info);
1164 av_log(ctx, AV_LOG_ERROR, "Unable to get device info from DRM fd!\n");
1165 return AVERROR_EXTERNAL;
1168 if (drm_dev_info->bustype == DRM_BUS_PCI)
1169 dev_select.pci_device = drm_dev_info->deviceinfo.pci->device_id;
1171 drmFreeDevice(&drm_dev_info);
1173 return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1177 case AV_HWDEVICE_TYPE_CUDA: {
1178 AVHWDeviceContext *cuda_cu = src_ctx;
1179 AVCUDADeviceContext *src_hwctx = src_ctx->hwctx;
1180 AVCUDADeviceContextInternal *cu_internal = src_hwctx->internal;
1181 CudaFunctions *cu = cu_internal->cuda_dl;
1183 int ret = CHECK_CU(cu->cuDeviceGetUuid((CUuuid *)&dev_select.uuid,
1184 cu_internal->cuda_device));
1186 av_log(ctx, AV_LOG_ERROR, "Unable to get UUID from CUDA!\n");
1187 return AVERROR_EXTERNAL;
1190 dev_select.has_uuid = 1;
1192 return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1196 return AVERROR(ENOSYS);
1200 static int vulkan_frames_get_constraints(AVHWDeviceContext *ctx,
1201 const void *hwconfig,
1202 AVHWFramesConstraints *constraints)
1205 AVVulkanDeviceContext *hwctx = ctx->hwctx;
1206 VulkanDevicePriv *p = ctx->internal->priv;
1208 for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
1209 count += pixfmt_is_supported(hwctx, i, p->use_linear_images);
1212 if (p->dev_is_nvidia)
1216 constraints->valid_sw_formats = av_malloc_array(count + 1,
1217 sizeof(enum AVPixelFormat));
1218 if (!constraints->valid_sw_formats)
1219 return AVERROR(ENOMEM);
1222 for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
1223 if (pixfmt_is_supported(hwctx, i, p->use_linear_images))
1224 constraints->valid_sw_formats[count++] = i;
1227 if (p->dev_is_nvidia)
1228 constraints->valid_sw_formats[count++] = AV_PIX_FMT_CUDA;
1230 constraints->valid_sw_formats[count++] = AV_PIX_FMT_NONE;
1232 constraints->min_width = 0;
1233 constraints->min_height = 0;
1234 constraints->max_width = p->props.limits.maxImageDimension2D;
1235 constraints->max_height = p->props.limits.maxImageDimension2D;
1237 constraints->valid_hw_formats = av_malloc_array(2, sizeof(enum AVPixelFormat));
1238 if (!constraints->valid_hw_formats)
1239 return AVERROR(ENOMEM);
1241 constraints->valid_hw_formats[0] = AV_PIX_FMT_VULKAN;
1242 constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1247 static int alloc_mem(AVHWDeviceContext *ctx, VkMemoryRequirements *req,
1248 VkMemoryPropertyFlagBits req_flags, void *alloc_extension,
1249 VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
1253 VulkanDevicePriv *p = ctx->internal->priv;
1254 AVVulkanDeviceContext *dev_hwctx = ctx->hwctx;
1255 VkMemoryAllocateInfo alloc_info = {
1256 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
1257 .pNext = alloc_extension,
1260 /* Align if we need to */
1261 if (req_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
1262 req->size = FFALIGN(req->size, p->props.limits.minMemoryMapAlignment);
1264 alloc_info.allocationSize = req->size;
1266 /* The vulkan spec requires memory types to be sorted in the "optimal"
1267 * order, so the first matching type we find will be the best/fastest one */
1268 for (int i = 0; i < p->mprops.memoryTypeCount; i++) {
1269 /* The memory type must be supported by the requirements (bitfield) */
1270 if (!(req->memoryTypeBits & (1 << i)))
1273 /* The memory type flags must include our properties */
1274 if ((p->mprops.memoryTypes[i].propertyFlags & req_flags) != req_flags)
1277 /* Found a suitable memory type */
1283 av_log(ctx, AV_LOG_ERROR, "No memory type found for flags 0x%x\n",
1285 return AVERROR(EINVAL);
1288 alloc_info.memoryTypeIndex = index;
1290 ret = vkAllocateMemory(dev_hwctx->act_dev, &alloc_info,
1291 dev_hwctx->alloc, mem);
1292 if (ret != VK_SUCCESS) {
1293 av_log(ctx, AV_LOG_ERROR, "Failed to allocate memory: %s\n",
1295 return AVERROR(ENOMEM);
1298 *mem_flags |= p->mprops.memoryTypes[index].propertyFlags;
1303 static void vulkan_free_internal(AVVkFrameInternal *internal)
1309 if (internal->cuda_fc_ref) {
1310 AVHWFramesContext *cuda_fc = (AVHWFramesContext *)internal->cuda_fc_ref->data;
1311 int planes = av_pix_fmt_count_planes(cuda_fc->sw_format);
1312 AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
1313 AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
1314 AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
1315 CudaFunctions *cu = cu_internal->cuda_dl;
1317 for (int i = 0; i < planes; i++) {
1318 if (internal->cu_sem[i])
1319 CHECK_CU(cu->cuDestroyExternalSemaphore(internal->cu_sem[i]));
1320 if (internal->cu_mma[i])
1321 CHECK_CU(cu->cuMipmappedArrayDestroy(internal->cu_mma[i]));
1322 if (internal->ext_mem[i])
1323 CHECK_CU(cu->cuDestroyExternalMemory(internal->ext_mem[i]));
1326 av_buffer_unref(&internal->cuda_fc_ref);
1333 static void vulkan_frame_free(void *opaque, uint8_t *data)
1335 AVVkFrame *f = (AVVkFrame *)data;
1336 AVHWFramesContext *hwfc = opaque;
1337 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1338 int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1340 vulkan_free_internal(f->internal);
1342 for (int i = 0; i < planes; i++) {
1343 vkDestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
1344 vkFreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
1345 vkDestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
1351 static int alloc_bind_mem(AVHWFramesContext *hwfc, AVVkFrame *f,
1352 void *alloc_pnext, size_t alloc_pnext_stride)
1356 AVHWDeviceContext *ctx = hwfc->device_ctx;
1357 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1358 VkBindImageMemoryInfo bind_info[AV_NUM_DATA_POINTERS] = { { 0 } };
1360 AVVulkanDeviceContext *hwctx = ctx->hwctx;
1362 for (int i = 0; i < planes; i++) {
1364 VkImageMemoryRequirementsInfo2 req_desc = {
1365 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
1368 VkMemoryDedicatedAllocateInfo ded_alloc = {
1369 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
1370 .pNext = (void *)(((uint8_t *)alloc_pnext) + i*alloc_pnext_stride),
1372 VkMemoryDedicatedRequirements ded_req = {
1373 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
1375 VkMemoryRequirements2 req = {
1376 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
1380 vkGetImageMemoryRequirements2(hwctx->act_dev, &req_desc, &req);
1382 /* In case the implementation prefers/requires dedicated allocation */
1383 use_ded_mem = ded_req.prefersDedicatedAllocation |
1384 ded_req.requiresDedicatedAllocation;
1386 ded_alloc.image = f->img[i];
1388 /* Allocate memory */
1389 if ((err = alloc_mem(ctx, &req.memoryRequirements,
1390 f->tiling == VK_IMAGE_TILING_LINEAR ?
1391 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT :
1392 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
1393 use_ded_mem ? &ded_alloc : (void *)ded_alloc.pNext,
1394 &f->flags, &f->mem[i])))
1397 f->size[i] = req.memoryRequirements.size;
1398 bind_info[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
1399 bind_info[i].image = f->img[i];
1400 bind_info[i].memory = f->mem[i];
1403 /* Bind the allocated memory to the images */
1404 ret = vkBindImageMemory2(hwctx->act_dev, planes, bind_info);
1405 if (ret != VK_SUCCESS) {
1406 av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
1408 return AVERROR_EXTERNAL;
1416 PREP_MODE_RO_SHADER,
1417 PREP_MODE_EXTERNAL_EXPORT,
1420 static int prepare_frame(AVHWFramesContext *hwfc, VulkanExecCtx *ectx,
1421 AVVkFrame *frame, enum PrepMode pmode)
1425 VkImageLayout new_layout;
1426 VkAccessFlags new_access;
1427 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1429 VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
1431 VkSubmitInfo s_info = {
1432 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
1433 .pSignalSemaphores = frame->sem,
1434 .signalSemaphoreCount = planes,
1437 VkPipelineStageFlagBits wait_st[AV_NUM_DATA_POINTERS];
1438 for (int i = 0; i < planes; i++)
1439 wait_st[i] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1442 case PREP_MODE_WRITE:
1443 new_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1444 new_access = VK_ACCESS_TRANSFER_WRITE_BIT;
1445 dst_qf = VK_QUEUE_FAMILY_IGNORED;
1447 case PREP_MODE_RO_SHADER:
1448 new_layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
1449 new_access = VK_ACCESS_TRANSFER_READ_BIT;
1450 dst_qf = VK_QUEUE_FAMILY_IGNORED;
1452 case PREP_MODE_EXTERNAL_EXPORT:
1453 new_layout = VK_IMAGE_LAYOUT_GENERAL;
1454 new_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
1455 dst_qf = VK_QUEUE_FAMILY_EXTERNAL_KHR;
1456 s_info.pWaitSemaphores = frame->sem;
1457 s_info.pWaitDstStageMask = wait_st;
1458 s_info.waitSemaphoreCount = planes;
1462 if ((err = wait_start_exec_ctx(hwfc, ectx)))
1465 /* Change the image layout to something more optimal for writes.
1466 * This also signals the newly created semaphore, making it usable
1467 * for synchronization */
1468 for (int i = 0; i < planes; i++) {
1469 img_bar[i].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1470 img_bar[i].srcAccessMask = 0x0;
1471 img_bar[i].dstAccessMask = new_access;
1472 img_bar[i].oldLayout = frame->layout[i];
1473 img_bar[i].newLayout = new_layout;
1474 img_bar[i].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1475 img_bar[i].dstQueueFamilyIndex = dst_qf;
1476 img_bar[i].image = frame->img[i];
1477 img_bar[i].subresourceRange.levelCount = 1;
1478 img_bar[i].subresourceRange.layerCount = 1;
1479 img_bar[i].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1481 frame->layout[i] = img_bar[i].newLayout;
1482 frame->access[i] = img_bar[i].dstAccessMask;
1485 vkCmdPipelineBarrier(get_buf_exec_ctx(hwfc, ectx),
1486 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
1487 VK_PIPELINE_STAGE_TRANSFER_BIT,
1488 0, 0, NULL, 0, NULL, planes, img_bar);
1490 return submit_exec_ctx(hwfc, ectx, &s_info, 0);
1493 static int create_frame(AVHWFramesContext *hwfc, AVVkFrame **frame,
1494 VkImageTiling tiling, VkImageUsageFlagBits usage,
1499 AVHWDeviceContext *ctx = hwfc->device_ctx;
1500 VulkanDevicePriv *p = ctx->internal->priv;
1501 AVVulkanDeviceContext *hwctx = ctx->hwctx;
1502 enum AVPixelFormat format = hwfc->sw_format;
1503 const VkFormat *img_fmts = av_vkfmt_from_pixfmt(format);
1504 const int planes = av_pix_fmt_count_planes(format);
1506 VkExportSemaphoreCreateInfo ext_sem_info = {
1507 .sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
1508 .handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
1511 VkSemaphoreCreateInfo sem_spawn = {
1512 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
1513 .pNext = p->extensions & EXT_EXTERNAL_FD_SEM ? &ext_sem_info : NULL,
1516 AVVkFrame *f = av_vk_frame_alloc();
1518 av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
1519 return AVERROR(ENOMEM);
1522 /* Create the images */
1523 for (int i = 0; i < planes; i++) {
1524 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(format);
1525 int w = hwfc->width;
1526 int h = hwfc->height;
1527 const int p_w = i > 0 ? AV_CEIL_RSHIFT(w, desc->log2_chroma_w) : w;
1528 const int p_h = i > 0 ? AV_CEIL_RSHIFT(h, desc->log2_chroma_h) : h;
1530 VkImageCreateInfo image_create_info = {
1531 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1532 .pNext = create_pnext,
1533 .imageType = VK_IMAGE_TYPE_2D,
1534 .format = img_fmts[i],
1535 .extent.width = p_w,
1536 .extent.height = p_h,
1540 .flags = VK_IMAGE_CREATE_ALIAS_BIT,
1542 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
1544 .samples = VK_SAMPLE_COUNT_1_BIT,
1545 .pQueueFamilyIndices = p->qfs,
1546 .queueFamilyIndexCount = p->num_qfs,
1547 .sharingMode = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
1548 VK_SHARING_MODE_EXCLUSIVE,
1551 ret = vkCreateImage(hwctx->act_dev, &image_create_info,
1552 hwctx->alloc, &f->img[i]);
1553 if (ret != VK_SUCCESS) {
1554 av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
1556 err = AVERROR(EINVAL);
1560 /* Create semaphore */
1561 ret = vkCreateSemaphore(hwctx->act_dev, &sem_spawn,
1562 hwctx->alloc, &f->sem[i]);
1563 if (ret != VK_SUCCESS) {
1564 av_log(hwctx, AV_LOG_ERROR, "Failed to create semaphore: %s\n",
1566 return AVERROR_EXTERNAL;
1569 f->layout[i] = image_create_info.initialLayout;
1580 vulkan_frame_free(hwfc, (uint8_t *)f);
1584 /* Checks if an export flag is enabled, and if it is ORs it with *iexp */
1585 static void try_export_flags(AVHWFramesContext *hwfc,
1586 VkExternalMemoryHandleTypeFlags *comp_handle_types,
1587 VkExternalMemoryHandleTypeFlagBits *iexp,
1588 VkExternalMemoryHandleTypeFlagBits exp)
1591 AVVulkanFramesContext *hwctx = hwfc->hwctx;
1592 AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
1593 VkExternalImageFormatProperties eprops = {
1594 .sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
1596 VkImageFormatProperties2 props = {
1597 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
1600 VkPhysicalDeviceExternalImageFormatInfo enext = {
1601 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
1604 VkPhysicalDeviceImageFormatInfo2 pinfo = {
1605 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
1606 .pNext = !exp ? NULL : &enext,
1607 .format = av_vkfmt_from_pixfmt(hwfc->sw_format)[0],
1608 .type = VK_IMAGE_TYPE_2D,
1609 .tiling = hwctx->tiling,
1610 .usage = hwctx->usage,
1611 .flags = VK_IMAGE_CREATE_ALIAS_BIT,
1614 ret = vkGetPhysicalDeviceImageFormatProperties2(dev_hwctx->phys_dev,
1616 if (ret == VK_SUCCESS) {
1618 *comp_handle_types |= eprops.externalMemoryProperties.compatibleHandleTypes;
1622 static AVBufferRef *vulkan_pool_alloc(void *opaque, int size)
1626 AVBufferRef *avbuf = NULL;
1627 AVHWFramesContext *hwfc = opaque;
1628 AVVulkanFramesContext *hwctx = hwfc->hwctx;
1629 VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
1630 VulkanFramesPriv *fp = hwfc->internal->priv;
1631 VkExportMemoryAllocateInfo eminfo[AV_NUM_DATA_POINTERS];
1632 VkExternalMemoryHandleTypeFlags e = 0x0;
1634 VkExternalMemoryImageCreateInfo eiinfo = {
1635 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
1636 .pNext = hwctx->create_pnext,
1639 if (p->extensions & EXT_EXTERNAL_FD_MEMORY)
1640 try_export_flags(hwfc, &eiinfo.handleTypes, &e,
1641 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
1643 if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
1644 try_export_flags(hwfc, &eiinfo.handleTypes, &e,
1645 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1647 for (int i = 0; i < av_pix_fmt_count_planes(hwfc->sw_format); i++) {
1648 eminfo[i].sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
1649 eminfo[i].pNext = hwctx->alloc_pnext[i];
1650 eminfo[i].handleTypes = e;
1653 err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
1654 eiinfo.handleTypes ? &eiinfo : NULL);
1658 err = alloc_bind_mem(hwfc, f, eminfo, sizeof(*eminfo));
1662 err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_WRITE);
1666 avbuf = av_buffer_create((uint8_t *)f, sizeof(AVVkFrame),
1667 vulkan_frame_free, hwfc, 0);
1674 vulkan_frame_free(hwfc, (uint8_t *)f);
1678 static void vulkan_frames_uninit(AVHWFramesContext *hwfc)
1680 VulkanFramesPriv *fp = hwfc->internal->priv;
1682 free_exec_ctx(hwfc, &fp->conv_ctx);
1683 free_exec_ctx(hwfc, &fp->upload_ctx);
1684 free_exec_ctx(hwfc, &fp->download_ctx);
1687 static int vulkan_frames_init(AVHWFramesContext *hwfc)
1691 AVVulkanFramesContext *hwctx = hwfc->hwctx;
1692 VulkanFramesPriv *fp = hwfc->internal->priv;
1693 AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
1694 VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
1696 /* Default pool flags */
1697 hwctx->tiling = hwctx->tiling ? hwctx->tiling : p->use_linear_images ?
1698 VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
1701 hwctx->usage = DEFAULT_USAGE_FLAGS;
1703 err = create_exec_ctx(hwfc, &fp->conv_ctx,
1704 dev_hwctx->queue_family_comp_index,
1705 GET_QUEUE_COUNT(dev_hwctx, 0, 1, 0));
1709 err = create_exec_ctx(hwfc, &fp->upload_ctx,
1710 dev_hwctx->queue_family_tx_index,
1711 GET_QUEUE_COUNT(dev_hwctx, 0, 0, 1));
1715 err = create_exec_ctx(hwfc, &fp->download_ctx,
1716 dev_hwctx->queue_family_tx_index, 1);
1720 /* Test to see if allocation will fail */
1721 err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
1722 hwctx->create_pnext);
1726 vulkan_frame_free(hwfc, (uint8_t *)f);
1728 /* If user did not specify a pool, hwfc->pool will be set to the internal one
1729 * in hwcontext.c just after this gets called */
1731 hwfc->internal->pool_internal = av_buffer_pool_init2(sizeof(AVVkFrame),
1732 hwfc, vulkan_pool_alloc,
1734 if (!hwfc->internal->pool_internal) {
1735 err = AVERROR(ENOMEM);
1743 free_exec_ctx(hwfc, &fp->conv_ctx);
1744 free_exec_ctx(hwfc, &fp->upload_ctx);
1745 free_exec_ctx(hwfc, &fp->download_ctx);
1750 static int vulkan_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame)
1752 frame->buf[0] = av_buffer_pool_get(hwfc->pool);
1754 return AVERROR(ENOMEM);
1756 frame->data[0] = frame->buf[0]->data;
1757 frame->format = AV_PIX_FMT_VULKAN;
1758 frame->width = hwfc->width;
1759 frame->height = hwfc->height;
1764 static int vulkan_transfer_get_formats(AVHWFramesContext *hwfc,
1765 enum AVHWFrameTransferDirection dir,
1766 enum AVPixelFormat **formats)
1768 enum AVPixelFormat *fmts = av_malloc_array(2, sizeof(*fmts));
1770 return AVERROR(ENOMEM);
1772 fmts[0] = hwfc->sw_format;
1773 fmts[1] = AV_PIX_FMT_NONE;
1779 typedef struct VulkanMapping {
1784 static void vulkan_unmap_frame(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
1786 VulkanMapping *map = hwmap->priv;
1787 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1788 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1790 /* Check if buffer needs flushing */
1791 if ((map->flags & AV_HWFRAME_MAP_WRITE) &&
1792 !(map->frame->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
1794 VkMappedMemoryRange flush_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
1796 for (int i = 0; i < planes; i++) {
1797 flush_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1798 flush_ranges[i].memory = map->frame->mem[i];
1799 flush_ranges[i].size = VK_WHOLE_SIZE;
1802 ret = vkFlushMappedMemoryRanges(hwctx->act_dev, planes,
1804 if (ret != VK_SUCCESS) {
1805 av_log(hwfc, AV_LOG_ERROR, "Failed to flush memory: %s\n",
1810 for (int i = 0; i < planes; i++)
1811 vkUnmapMemory(hwctx->act_dev, map->frame->mem[i]);
1816 static int vulkan_map_frame_to_mem(AVHWFramesContext *hwfc, AVFrame *dst,
1817 const AVFrame *src, int flags)
1820 int err, mapped_mem_count = 0;
1821 AVVkFrame *f = (AVVkFrame *)src->data[0];
1822 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1823 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1825 VulkanMapping *map = av_mallocz(sizeof(VulkanMapping));
1827 return AVERROR(EINVAL);
1829 if (src->format != AV_PIX_FMT_VULKAN) {
1830 av_log(hwfc, AV_LOG_ERROR, "Cannot map from pixel format %s!\n",
1831 av_get_pix_fmt_name(src->format));
1832 err = AVERROR(EINVAL);
1836 if (!(f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) ||
1837 !(f->tiling == VK_IMAGE_TILING_LINEAR)) {
1838 av_log(hwfc, AV_LOG_ERROR, "Unable to map frame, not host visible "
1840 err = AVERROR(EINVAL);
1844 dst->width = src->width;
1845 dst->height = src->height;
1847 for (int i = 0; i < planes; i++) {
1848 ret = vkMapMemory(hwctx->act_dev, f->mem[i], 0,
1849 VK_WHOLE_SIZE, 0, (void **)&dst->data[i]);
1850 if (ret != VK_SUCCESS) {
1851 av_log(hwfc, AV_LOG_ERROR, "Failed to map image memory: %s\n",
1853 err = AVERROR_EXTERNAL;
1859 /* Check if the memory contents matter */
1860 if (((flags & AV_HWFRAME_MAP_READ) || !(flags & AV_HWFRAME_MAP_OVERWRITE)) &&
1861 !(f->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
1862 VkMappedMemoryRange map_mem_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
1863 for (int i = 0; i < planes; i++) {
1864 map_mem_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1865 map_mem_ranges[i].size = VK_WHOLE_SIZE;
1866 map_mem_ranges[i].memory = f->mem[i];
1869 ret = vkInvalidateMappedMemoryRanges(hwctx->act_dev, planes,
1871 if (ret != VK_SUCCESS) {
1872 av_log(hwfc, AV_LOG_ERROR, "Failed to invalidate memory: %s\n",
1874 err = AVERROR_EXTERNAL;
1879 for (int i = 0; i < planes; i++) {
1880 VkImageSubresource sub = {
1881 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
1883 VkSubresourceLayout layout;
1884 vkGetImageSubresourceLayout(hwctx->act_dev, f->img[i], &sub, &layout);
1885 dst->linesize[i] = layout.rowPitch;
1891 err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src,
1892 &vulkan_unmap_frame, map);
1899 for (int i = 0; i < mapped_mem_count; i++)
1900 vkUnmapMemory(hwctx->act_dev, f->mem[i]);
1907 static void vulkan_unmap_from(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
1909 VulkanMapping *map = hwmap->priv;
1910 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1911 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1913 for (int i = 0; i < planes; i++) {
1914 vkDestroyImage(hwctx->act_dev, map->frame->img[i], hwctx->alloc);
1915 vkFreeMemory(hwctx->act_dev, map->frame->mem[i], hwctx->alloc);
1916 vkDestroySemaphore(hwctx->act_dev, map->frame->sem[i], hwctx->alloc);
1919 av_freep(&map->frame);
1922 static const struct {
1923 uint32_t drm_fourcc;
1925 } vulkan_drm_format_map[] = {
1926 { DRM_FORMAT_R8, VK_FORMAT_R8_UNORM },
1927 { DRM_FORMAT_R16, VK_FORMAT_R16_UNORM },
1928 { DRM_FORMAT_GR88, VK_FORMAT_R8G8_UNORM },
1929 { DRM_FORMAT_RG88, VK_FORMAT_R8G8_UNORM },
1930 { DRM_FORMAT_GR1616, VK_FORMAT_R16G16_UNORM },
1931 { DRM_FORMAT_RG1616, VK_FORMAT_R16G16_UNORM },
1932 { DRM_FORMAT_ARGB8888, VK_FORMAT_B8G8R8A8_UNORM },
1933 { DRM_FORMAT_XRGB8888, VK_FORMAT_B8G8R8A8_UNORM },
1934 { DRM_FORMAT_ABGR8888, VK_FORMAT_R8G8B8A8_UNORM },
1935 { DRM_FORMAT_XBGR8888, VK_FORMAT_R8G8B8A8_UNORM },
1938 static inline VkFormat drm_to_vulkan_fmt(uint32_t drm_fourcc)
1940 for (int i = 0; i < FF_ARRAY_ELEMS(vulkan_drm_format_map); i++)
1941 if (vulkan_drm_format_map[i].drm_fourcc == drm_fourcc)
1942 return vulkan_drm_format_map[i].vk_format;
1943 return VK_FORMAT_UNDEFINED;
1946 static int vulkan_map_from_drm_frame_desc(AVHWFramesContext *hwfc, AVVkFrame **frame,
1947 AVDRMFrameDescriptor *desc)
1952 int bind_counts = 0;
1953 AVHWDeviceContext *ctx = hwfc->device_ctx;
1954 AVVulkanDeviceContext *hwctx = ctx->hwctx;
1955 VulkanDevicePriv *p = ctx->internal->priv;
1956 VulkanFramesPriv *fp = hwfc->internal->priv;
1957 AVVulkanFramesContext *frames_hwctx = hwfc->hwctx;
1958 const AVPixFmtDescriptor *fmt_desc = av_pix_fmt_desc_get(hwfc->sw_format);
1959 const int has_modifiers = p->extensions & EXT_DRM_MODIFIER_FLAGS;
1960 VkSubresourceLayout plane_data[AV_NUM_DATA_POINTERS] = { 0 };
1961 VkBindImageMemoryInfo bind_info[AV_NUM_DATA_POINTERS] = { 0 };
1962 VkBindImagePlaneMemoryInfo plane_info[AV_NUM_DATA_POINTERS] = { 0 };
1963 VkExternalMemoryHandleTypeFlagBits htype = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1965 VK_LOAD_PFN(hwctx->inst, vkGetMemoryFdPropertiesKHR);
1967 for (int i = 0; i < desc->nb_layers; i++) {
1968 if (drm_to_vulkan_fmt(desc->layers[i].format) == VK_FORMAT_UNDEFINED) {
1969 av_log(ctx, AV_LOG_ERROR, "Unsupported DMABUF layer format %#08x!\n",
1970 desc->layers[i].format);
1971 return AVERROR(EINVAL);
1975 if (!(f = av_vk_frame_alloc())) {
1976 av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
1977 err = AVERROR(ENOMEM);
1981 for (int i = 0; i < desc->nb_objects; i++) {
1982 VkMemoryFdPropertiesKHR fdmp = {
1983 .sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
1985 VkMemoryRequirements req = {
1986 .size = desc->objects[i].size,
1988 VkImportMemoryFdInfoKHR idesc = {
1989 .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
1990 .handleType = htype,
1991 .fd = dup(desc->objects[i].fd),
1994 ret = pfn_vkGetMemoryFdPropertiesKHR(hwctx->act_dev, htype,
1996 if (ret != VK_SUCCESS) {
1997 av_log(hwfc, AV_LOG_ERROR, "Failed to get FD properties: %s\n",
1999 err = AVERROR_EXTERNAL;
2004 req.memoryTypeBits = fdmp.memoryTypeBits;
2006 err = alloc_mem(ctx, &req, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
2007 &idesc, &f->flags, &f->mem[i]);
2013 f->size[i] = desc->objects[i].size;
2016 f->tiling = has_modifiers ? VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT :
2017 desc->objects[0].format_modifier == DRM_FORMAT_MOD_LINEAR ?
2018 VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
2020 for (int i = 0; i < desc->nb_layers; i++) {
2021 const int planes = desc->layers[i].nb_planes;
2022 const int signal_p = has_modifiers && (planes > 1);
2024 VkImageDrmFormatModifierExplicitCreateInfoEXT drm_info = {
2025 .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
2026 .drmFormatModifier = desc->objects[0].format_modifier,
2027 .drmFormatModifierPlaneCount = planes,
2028 .pPlaneLayouts = (const VkSubresourceLayout *)&plane_data,
2031 VkExternalMemoryImageCreateInfo einfo = {
2032 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
2033 .pNext = has_modifiers ? &drm_info : NULL,
2034 .handleTypes = htype,
2037 VkSemaphoreCreateInfo sem_spawn = {
2038 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
2041 const int p_w = i > 0 ? AV_CEIL_RSHIFT(hwfc->width, fmt_desc->log2_chroma_w) : hwfc->width;
2042 const int p_h = i > 0 ? AV_CEIL_RSHIFT(hwfc->height, fmt_desc->log2_chroma_h) : hwfc->height;
2044 VkImageCreateInfo image_create_info = {
2045 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2047 .imageType = VK_IMAGE_TYPE_2D,
2048 .format = drm_to_vulkan_fmt(desc->layers[i].format),
2049 .extent.width = p_w,
2050 .extent.height = p_h,
2054 .flags = VK_IMAGE_CREATE_ALIAS_BIT,
2055 .tiling = f->tiling,
2056 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED, /* specs say so */
2057 .usage = frames_hwctx->usage,
2058 .samples = VK_SAMPLE_COUNT_1_BIT,
2059 .pQueueFamilyIndices = p->qfs,
2060 .queueFamilyIndexCount = p->num_qfs,
2061 .sharingMode = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
2062 VK_SHARING_MODE_EXCLUSIVE,
2065 for (int j = 0; j < planes; j++) {
2066 plane_data[j].offset = desc->layers[i].planes[j].offset;
2067 plane_data[j].rowPitch = desc->layers[i].planes[j].pitch;
2068 plane_data[j].size = 0; /* The specs say so for all 3 */
2069 plane_data[j].arrayPitch = 0;
2070 plane_data[j].depthPitch = 0;
2074 ret = vkCreateImage(hwctx->act_dev, &image_create_info,
2075 hwctx->alloc, &f->img[i]);
2076 if (ret != VK_SUCCESS) {
2077 av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
2079 err = AVERROR(EINVAL);
2083 ret = vkCreateSemaphore(hwctx->act_dev, &sem_spawn,
2084 hwctx->alloc, &f->sem[i]);
2085 if (ret != VK_SUCCESS) {
2086 av_log(hwctx, AV_LOG_ERROR, "Failed to create semaphore: %s\n",
2088 return AVERROR_EXTERNAL;
2091 /* We'd import a semaphore onto the one we created using
2092 * vkImportSemaphoreFdKHR but unfortunately neither DRM nor VAAPI
2093 * offer us anything we could import and sync with, so instead
2094 * just signal the semaphore we created. */
2096 f->layout[i] = image_create_info.initialLayout;
2099 for (int j = 0; j < planes; j++) {
2100 VkImageAspectFlagBits aspect = j == 0 ? VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT :
2101 j == 1 ? VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT :
2102 VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
2104 plane_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
2105 plane_info[bind_counts].planeAspect = aspect;
2107 bind_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
2108 bind_info[bind_counts].pNext = signal_p ? &plane_info[bind_counts] : NULL;
2109 bind_info[bind_counts].image = f->img[i];
2110 bind_info[bind_counts].memory = f->mem[desc->layers[i].planes[j].object_index];
2111 bind_info[bind_counts].memoryOffset = desc->layers[i].planes[j].offset;
2116 /* Bind the allocated memory to the images */
2117 ret = vkBindImageMemory2(hwctx->act_dev, bind_counts, bind_info);
2118 if (ret != VK_SUCCESS) {
2119 av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
2121 return AVERROR_EXTERNAL;
2124 /* NOTE: This is completely uneccesary and unneeded once we can import
2125 * semaphores from DRM. Otherwise we have to activate the semaphores.
2126 * We're reusing the exec context that's also used for uploads/downloads. */
2127 err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_RO_SHADER);
2136 for (int i = 0; i < desc->nb_layers; i++) {
2137 vkDestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
2138 vkDestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
2140 for (int i = 0; i < desc->nb_objects; i++)
2141 vkFreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
2148 static int vulkan_map_from_drm(AVHWFramesContext *hwfc, AVFrame *dst,
2149 const AVFrame *src, int flags)
2153 VulkanMapping *map = NULL;
2155 err = vulkan_map_from_drm_frame_desc(hwfc, &f,
2156 (AVDRMFrameDescriptor *)src->data[0]);
2160 /* The unmapping function will free this */
2161 dst->data[0] = (uint8_t *)f;
2162 dst->width = src->width;
2163 dst->height = src->height;
2165 map = av_mallocz(sizeof(VulkanMapping));
2172 err = ff_hwframe_map_create(dst->hw_frames_ctx, dst, src,
2173 &vulkan_unmap_from, map);
2177 av_log(hwfc, AV_LOG_DEBUG, "Mapped DRM object to Vulkan!\n");
2182 vulkan_frame_free(hwfc->device_ctx->hwctx, (uint8_t *)f);
2188 static int vulkan_map_from_vaapi(AVHWFramesContext *dst_fc,
2189 AVFrame *dst, const AVFrame *src,
2193 AVFrame *tmp = av_frame_alloc();
2194 AVHWFramesContext *vaapi_fc = (AVHWFramesContext*)src->hw_frames_ctx->data;
2195 AVVAAPIDeviceContext *vaapi_ctx = vaapi_fc->device_ctx->hwctx;
2196 VASurfaceID surface_id = (VASurfaceID)(uintptr_t)src->data[3];
2199 return AVERROR(ENOMEM);
2201 /* We have to sync since like the previous comment said, no semaphores */
2202 vaSyncSurface(vaapi_ctx->display, surface_id);
2204 tmp->format = AV_PIX_FMT_DRM_PRIME;
2206 err = av_hwframe_map(tmp, src, flags);
2210 err = vulkan_map_from_drm(dst_fc, dst, tmp, flags);
2214 err = ff_hwframe_map_replace(dst, src);
2217 av_frame_free(&tmp);
2224 static int vulkan_export_to_cuda(AVHWFramesContext *hwfc,
2225 AVBufferRef *cuda_hwfc,
2226 const AVFrame *frame)
2231 AVVkFrameInternal *dst_int;
2232 AVHWDeviceContext *ctx = hwfc->device_ctx;
2233 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2234 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2235 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
2236 VK_LOAD_PFN(hwctx->inst, vkGetMemoryFdKHR);
2237 VK_LOAD_PFN(hwctx->inst, vkGetSemaphoreFdKHR);
2239 AVHWFramesContext *cuda_fc = (AVHWFramesContext*)cuda_hwfc->data;
2240 AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
2241 AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
2242 AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
2243 CudaFunctions *cu = cu_internal->cuda_dl;
2244 CUarray_format cufmt = desc->comp[0].depth > 8 ? CU_AD_FORMAT_UNSIGNED_INT16 :
2245 CU_AD_FORMAT_UNSIGNED_INT8;
2247 dst_f = (AVVkFrame *)frame->data[0];
2249 dst_int = dst_f->internal;
2250 if (!dst_int || !dst_int->cuda_fc_ref) {
2251 if (!dst_f->internal)
2252 dst_f->internal = dst_int = av_mallocz(sizeof(*dst_f->internal));
2255 err = AVERROR(ENOMEM);
2259 dst_int->cuda_fc_ref = av_buffer_ref(cuda_hwfc);
2260 if (!dst_int->cuda_fc_ref) {
2261 err = AVERROR(ENOMEM);
2265 for (int i = 0; i < planes; i++) {
2266 CUDA_EXTERNAL_MEMORY_MIPMAPPED_ARRAY_DESC tex_desc = {
2269 .Width = i > 0 ? AV_CEIL_RSHIFT(hwfc->width, desc->log2_chroma_w)
2271 .Height = i > 0 ? AV_CEIL_RSHIFT(hwfc->height, desc->log2_chroma_h)
2275 .NumChannels = 1 + ((planes == 2) && i),
2280 CUDA_EXTERNAL_MEMORY_HANDLE_DESC ext_desc = {
2281 .type = CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD,
2282 .size = dst_f->size[i],
2284 VkMemoryGetFdInfoKHR export_info = {
2285 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
2286 .memory = dst_f->mem[i],
2287 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
2289 VkSemaphoreGetFdInfoKHR sem_export = {
2290 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
2291 .semaphore = dst_f->sem[i],
2292 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
2294 CUDA_EXTERNAL_SEMAPHORE_HANDLE_DESC ext_sem_desc = {
2295 .type = CU_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD,
2298 ret = pfn_vkGetMemoryFdKHR(hwctx->act_dev, &export_info,
2299 &ext_desc.handle.fd);
2300 if (ret != VK_SUCCESS) {
2301 av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD!\n");
2302 err = AVERROR_EXTERNAL;
2306 ret = CHECK_CU(cu->cuImportExternalMemory(&dst_int->ext_mem[i], &ext_desc));
2308 err = AVERROR_EXTERNAL;
2312 ret = CHECK_CU(cu->cuExternalMemoryGetMappedMipmappedArray(&dst_int->cu_mma[i],
2313 dst_int->ext_mem[i],
2316 err = AVERROR_EXTERNAL;
2320 ret = CHECK_CU(cu->cuMipmappedArrayGetLevel(&dst_int->cu_array[i],
2321 dst_int->cu_mma[i], 0));
2323 err = AVERROR_EXTERNAL;
2327 ret = pfn_vkGetSemaphoreFdKHR(hwctx->act_dev, &sem_export,
2328 &ext_sem_desc.handle.fd);
2329 if (ret != VK_SUCCESS) {
2330 av_log(ctx, AV_LOG_ERROR, "Failed to export semaphore: %s\n",
2332 err = AVERROR_EXTERNAL;
2336 ret = CHECK_CU(cu->cuImportExternalSemaphore(&dst_int->cu_sem[i],
2339 err = AVERROR_EXTERNAL;
2351 static int vulkan_transfer_data_from_cuda(AVHWFramesContext *hwfc,
2352 AVFrame *dst, const AVFrame *src)
2358 AVVkFrameInternal *dst_int;
2359 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2360 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
2362 AVHWFramesContext *cuda_fc = (AVHWFramesContext*)src->hw_frames_ctx->data;
2363 AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
2364 AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
2365 AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
2366 CudaFunctions *cu = cu_internal->cuda_dl;
2367 CUDA_EXTERNAL_SEMAPHORE_WAIT_PARAMS s_w_par[AV_NUM_DATA_POINTERS] = { 0 };
2368 CUDA_EXTERNAL_SEMAPHORE_SIGNAL_PARAMS s_s_par[AV_NUM_DATA_POINTERS] = { 0 };
2370 ret = CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
2372 err = AVERROR_EXTERNAL;
2376 dst_f = (AVVkFrame *)dst->data[0];
2378 ret = vulkan_export_to_cuda(hwfc, src->hw_frames_ctx, dst);
2382 dst_int = dst_f->internal;
2384 ret = CHECK_CU(cu->cuWaitExternalSemaphoresAsync(dst_int->cu_sem, s_w_par,
2385 planes, cuda_dev->stream));
2387 err = AVERROR_EXTERNAL;
2391 for (int i = 0; i < planes; i++) {
2392 CUDA_MEMCPY2D cpy = {
2393 .srcMemoryType = CU_MEMORYTYPE_DEVICE,
2394 .srcDevice = (CUdeviceptr)src->data[i],
2395 .srcPitch = src->linesize[i],
2398 .dstMemoryType = CU_MEMORYTYPE_ARRAY,
2399 .dstArray = dst_int->cu_array[i],
2400 .WidthInBytes = (i > 0 ? AV_CEIL_RSHIFT(hwfc->width, desc->log2_chroma_w)
2401 : hwfc->width) * desc->comp[i].step,
2402 .Height = i > 0 ? AV_CEIL_RSHIFT(hwfc->height, desc->log2_chroma_h)
2406 ret = CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
2408 err = AVERROR_EXTERNAL;
2413 ret = CHECK_CU(cu->cuSignalExternalSemaphoresAsync(dst_int->cu_sem, s_s_par,
2414 planes, cuda_dev->stream));
2416 err = AVERROR_EXTERNAL;
2420 CHECK_CU(cu->cuCtxPopCurrent(&dummy));
2422 av_log(hwfc, AV_LOG_VERBOSE, "Transfered CUDA image to Vulkan!\n");
2427 CHECK_CU(cu->cuCtxPopCurrent(&dummy));
2428 vulkan_free_internal(dst_int);
2429 dst_f->internal = NULL;
2430 av_buffer_unref(&dst->buf[0]);
2435 static int vulkan_map_to(AVHWFramesContext *hwfc, AVFrame *dst,
2436 const AVFrame *src, int flags)
2438 av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2440 switch (src->format) {
2443 case AV_PIX_FMT_VAAPI:
2444 if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2445 return vulkan_map_from_vaapi(hwfc, dst, src, flags);
2447 case AV_PIX_FMT_DRM_PRIME:
2448 if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2449 return vulkan_map_from_drm(hwfc, dst, src, flags);
2452 return AVERROR(ENOSYS);
2457 typedef struct VulkanDRMMapping {
2458 AVDRMFrameDescriptor drm_desc;
2462 static void vulkan_unmap_to_drm(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
2464 AVDRMFrameDescriptor *drm_desc = hwmap->priv;
2466 for (int i = 0; i < drm_desc->nb_objects; i++)
2467 close(drm_desc->objects[i].fd);
2472 static inline uint32_t vulkan_fmt_to_drm(VkFormat vkfmt)
2474 for (int i = 0; i < FF_ARRAY_ELEMS(vulkan_drm_format_map); i++)
2475 if (vulkan_drm_format_map[i].vk_format == vkfmt)
2476 return vulkan_drm_format_map[i].drm_fourcc;
2477 return DRM_FORMAT_INVALID;
2480 static int vulkan_map_to_drm(AVHWFramesContext *hwfc, AVFrame *dst,
2481 const AVFrame *src, int flags)
2485 AVVkFrame *f = (AVVkFrame *)src->data[0];
2486 VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2487 VulkanFramesPriv *fp = hwfc->internal->priv;
2488 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
2489 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2490 VK_LOAD_PFN(hwctx->inst, vkGetMemoryFdKHR);
2491 VkImageDrmFormatModifierPropertiesEXT drm_mod = {
2492 .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
2495 AVDRMFrameDescriptor *drm_desc = av_mallocz(sizeof(*drm_desc));
2497 return AVERROR(ENOMEM);
2499 err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_EXTERNAL_EXPORT);
2503 err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src, &vulkan_unmap_to_drm, drm_desc);
2507 if (p->extensions & EXT_DRM_MODIFIER_FLAGS) {
2508 VK_LOAD_PFN(hwctx->inst, vkGetImageDrmFormatModifierPropertiesEXT);
2509 ret = pfn_vkGetImageDrmFormatModifierPropertiesEXT(hwctx->act_dev, f->img[0],
2511 if (ret != VK_SUCCESS) {
2512 av_log(hwfc, AV_LOG_ERROR, "Failed to retrieve DRM format modifier!\n");
2513 err = AVERROR_EXTERNAL;
2518 for (int i = 0; (i < planes) && (f->mem[i]); i++) {
2519 VkMemoryGetFdInfoKHR export_info = {
2520 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
2521 .memory = f->mem[i],
2522 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
2525 ret = pfn_vkGetMemoryFdKHR(hwctx->act_dev, &export_info,
2526 &drm_desc->objects[i].fd);
2527 if (ret != VK_SUCCESS) {
2528 av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD!\n");
2529 err = AVERROR_EXTERNAL;
2533 drm_desc->nb_objects++;
2534 drm_desc->objects[i].size = f->size[i];
2535 drm_desc->objects[i].format_modifier = drm_mod.drmFormatModifier;
2538 drm_desc->nb_layers = planes;
2539 for (int i = 0; i < drm_desc->nb_layers; i++) {
2540 VkSubresourceLayout layout;
2541 VkImageSubresource sub = {
2542 .aspectMask = p->extensions & EXT_DRM_MODIFIER_FLAGS ?
2543 VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT :
2544 VK_IMAGE_ASPECT_COLOR_BIT,
2546 VkFormat plane_vkfmt = av_vkfmt_from_pixfmt(hwfc->sw_format)[i];
2548 drm_desc->layers[i].format = vulkan_fmt_to_drm(plane_vkfmt);
2549 drm_desc->layers[i].nb_planes = 1;
2551 if (drm_desc->layers[i].format == DRM_FORMAT_INVALID) {
2552 av_log(hwfc, AV_LOG_ERROR, "Cannot map to DRM layer, unsupported!\n");
2553 err = AVERROR_PATCHWELCOME;
2557 drm_desc->layers[i].planes[0].object_index = FFMIN(i, drm_desc->nb_objects - 1);
2559 if (f->tiling == VK_IMAGE_TILING_OPTIMAL)
2562 vkGetImageSubresourceLayout(hwctx->act_dev, f->img[i], &sub, &layout);
2563 drm_desc->layers[i].planes[0].offset = layout.offset;
2564 drm_desc->layers[i].planes[0].pitch = layout.rowPitch;
2567 dst->width = src->width;
2568 dst->height = src->height;
2569 dst->data[0] = (uint8_t *)drm_desc;
2571 av_log(hwfc, AV_LOG_VERBOSE, "Mapped AVVkFrame to a DRM object!\n");
2581 static int vulkan_map_to_vaapi(AVHWFramesContext *hwfc, AVFrame *dst,
2582 const AVFrame *src, int flags)
2585 AVFrame *tmp = av_frame_alloc();
2587 return AVERROR(ENOMEM);
2589 tmp->format = AV_PIX_FMT_DRM_PRIME;
2591 err = vulkan_map_to_drm(hwfc, tmp, src, flags);
2595 err = av_hwframe_map(dst, tmp, flags);
2599 err = ff_hwframe_map_replace(dst, src);
2602 av_frame_free(&tmp);
2608 static int vulkan_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
2609 const AVFrame *src, int flags)
2611 av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2613 switch (dst->format) {
2615 case AV_PIX_FMT_DRM_PRIME:
2616 if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2617 return vulkan_map_to_drm(hwfc, dst, src, flags);
2619 case AV_PIX_FMT_VAAPI:
2620 if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2621 return vulkan_map_to_vaapi(hwfc, dst, src, flags);
2625 return vulkan_map_frame_to_mem(hwfc, dst, src, flags);
2629 typedef struct ImageBuffer {
2632 VkMemoryPropertyFlagBits flags;
2635 static void free_buf(void *opaque, uint8_t *data)
2637 AVHWDeviceContext *ctx = opaque;
2638 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2639 ImageBuffer *vkbuf = (ImageBuffer *)data;
2642 vkDestroyBuffer(hwctx->act_dev, vkbuf->buf, hwctx->alloc);
2644 vkFreeMemory(hwctx->act_dev, vkbuf->mem, hwctx->alloc);
2649 static int create_buf(AVHWDeviceContext *ctx, AVBufferRef **buf,
2650 int height, int *stride, VkBufferUsageFlags usage,
2651 VkMemoryPropertyFlagBits flags, void *create_pnext,
2656 VkMemoryRequirements req;
2657 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2658 VulkanDevicePriv *p = ctx->internal->priv;
2660 VkBufferCreateInfo buf_spawn = {
2661 .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
2662 .pNext = create_pnext,
2664 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
2667 ImageBuffer *vkbuf = av_mallocz(sizeof(*vkbuf));
2669 return AVERROR(ENOMEM);
2671 *stride = FFALIGN(*stride, p->props.limits.optimalBufferCopyRowPitchAlignment);
2672 buf_spawn.size = height*(*stride);
2674 ret = vkCreateBuffer(hwctx->act_dev, &buf_spawn, NULL, &vkbuf->buf);
2675 if (ret != VK_SUCCESS) {
2676 av_log(ctx, AV_LOG_ERROR, "Failed to create buffer: %s\n",
2678 return AVERROR_EXTERNAL;
2681 vkGetBufferMemoryRequirements(hwctx->act_dev, vkbuf->buf, &req);
2683 err = alloc_mem(ctx, &req, flags, alloc_pnext, &vkbuf->flags, &vkbuf->mem);
2687 ret = vkBindBufferMemory(hwctx->act_dev, vkbuf->buf, vkbuf->mem, 0);
2688 if (ret != VK_SUCCESS) {
2689 av_log(ctx, AV_LOG_ERROR, "Failed to bind memory to buffer: %s\n",
2691 free_buf(ctx, (uint8_t *)vkbuf);
2692 return AVERROR_EXTERNAL;
2695 *buf = av_buffer_create((uint8_t *)vkbuf, sizeof(*vkbuf), free_buf, ctx, 0);
2697 free_buf(ctx, (uint8_t *)vkbuf);
2698 return AVERROR(ENOMEM);
2704 static int map_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs, uint8_t *mem[],
2705 int nb_buffers, int invalidate)
2708 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2709 VkMappedMemoryRange invalidate_ctx[AV_NUM_DATA_POINTERS];
2710 int invalidate_count = 0;
2712 for (int i = 0; i < nb_buffers; i++) {
2713 ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2714 ret = vkMapMemory(hwctx->act_dev, vkbuf->mem, 0,
2715 VK_WHOLE_SIZE, 0, (void **)&mem[i]);
2716 if (ret != VK_SUCCESS) {
2717 av_log(ctx, AV_LOG_ERROR, "Failed to map buffer memory: %s\n",
2719 return AVERROR_EXTERNAL;
2726 for (int i = 0; i < nb_buffers; i++) {
2727 ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2728 const VkMappedMemoryRange ival_buf = {
2729 .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
2730 .memory = vkbuf->mem,
2731 .size = VK_WHOLE_SIZE,
2733 if (vkbuf->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
2735 invalidate_ctx[invalidate_count++] = ival_buf;
2738 if (invalidate_count) {
2739 ret = vkInvalidateMappedMemoryRanges(hwctx->act_dev, invalidate_count,
2741 if (ret != VK_SUCCESS)
2742 av_log(ctx, AV_LOG_WARNING, "Failed to invalidate memory: %s\n",
2749 static int unmap_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs,
2750 int nb_buffers, int flush)
2754 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2755 VkMappedMemoryRange flush_ctx[AV_NUM_DATA_POINTERS];
2756 int flush_count = 0;
2759 for (int i = 0; i < nb_buffers; i++) {
2760 ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2761 const VkMappedMemoryRange flush_buf = {
2762 .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
2763 .memory = vkbuf->mem,
2764 .size = VK_WHOLE_SIZE,
2766 if (vkbuf->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
2768 flush_ctx[flush_count++] = flush_buf;
2773 ret = vkFlushMappedMemoryRanges(hwctx->act_dev, flush_count, flush_ctx);
2774 if (ret != VK_SUCCESS) {
2775 av_log(ctx, AV_LOG_ERROR, "Failed to flush memory: %s\n",
2777 err = AVERROR_EXTERNAL; /* We still want to try to unmap them */
2781 for (int i = 0; i < nb_buffers; i++) {
2782 ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2783 vkUnmapMemory(hwctx->act_dev, vkbuf->mem);
2789 static int transfer_image_buf(AVHWFramesContext *hwfc, const AVFrame *f,
2790 AVBufferRef **bufs, const int *buf_stride, int w,
2791 int h, enum AVPixelFormat pix_fmt, int to_buf)
2794 AVVkFrame *frame = (AVVkFrame *)f->data[0];
2795 VulkanFramesPriv *fp = hwfc->internal->priv;
2798 VkPipelineStageFlagBits sem_wait_dst[AV_NUM_DATA_POINTERS];
2800 const int planes = av_pix_fmt_count_planes(pix_fmt);
2801 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
2803 VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
2804 VulkanExecCtx *ectx = to_buf ? &fp->download_ctx : &fp->upload_ctx;
2805 VkCommandBuffer cmd_buf = get_buf_exec_ctx(hwfc, ectx);
2807 VkSubmitInfo s_info = {
2808 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
2809 .pSignalSemaphores = frame->sem,
2810 .pWaitSemaphores = frame->sem,
2811 .pWaitDstStageMask = sem_wait_dst,
2812 .signalSemaphoreCount = planes,
2813 .waitSemaphoreCount = planes,
2816 if ((err = wait_start_exec_ctx(hwfc, ectx)))
2819 /* Change the image layout to something more optimal for transfers */
2820 for (int i = 0; i < planes; i++) {
2821 VkImageLayout new_layout = to_buf ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL :
2822 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2823 VkAccessFlags new_access = to_buf ? VK_ACCESS_TRANSFER_READ_BIT :
2824 VK_ACCESS_TRANSFER_WRITE_BIT;
2826 sem_wait_dst[i] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2828 /* If the layout matches and we have read access skip the barrier */
2829 if ((frame->layout[i] == new_layout) && (frame->access[i] & new_access))
2832 img_bar[bar_num].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2833 img_bar[bar_num].srcAccessMask = 0x0;
2834 img_bar[bar_num].dstAccessMask = new_access;
2835 img_bar[bar_num].oldLayout = frame->layout[i];
2836 img_bar[bar_num].newLayout = new_layout;
2837 img_bar[bar_num].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2838 img_bar[bar_num].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2839 img_bar[bar_num].image = frame->img[i];
2840 img_bar[bar_num].subresourceRange.levelCount = 1;
2841 img_bar[bar_num].subresourceRange.layerCount = 1;
2842 img_bar[bar_num].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2844 frame->layout[i] = img_bar[bar_num].newLayout;
2845 frame->access[i] = img_bar[bar_num].dstAccessMask;
2851 vkCmdPipelineBarrier(cmd_buf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
2852 VK_PIPELINE_STAGE_TRANSFER_BIT, 0,
2853 0, NULL, 0, NULL, bar_num, img_bar);
2855 /* Schedule a copy for each plane */
2856 for (int i = 0; i < planes; i++) {
2857 ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2858 const int p_w = i > 0 ? AV_CEIL_RSHIFT(w, desc->log2_chroma_w) : w;
2859 const int p_h = i > 0 ? AV_CEIL_RSHIFT(h, desc->log2_chroma_h) : h;
2860 VkBufferImageCopy buf_reg = {
2862 /* Buffer stride isn't in bytes, it's in samples, the implementation
2863 * uses the image's VkFormat to know how many bytes per sample
2864 * the buffer has. So we have to convert by dividing. Stupid.
2865 * Won't work with YUVA or other planar formats with alpha. */
2866 .bufferRowLength = buf_stride[i] / desc->comp[i].step,
2867 .bufferImageHeight = p_h,
2868 .imageSubresource.layerCount = 1,
2869 .imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
2870 .imageOffset = { 0, 0, 0, },
2871 .imageExtent = { p_w, p_h, 1, },
2875 vkCmdCopyImageToBuffer(cmd_buf, frame->img[i], frame->layout[i],
2876 vkbuf->buf, 1, &buf_reg);
2878 vkCmdCopyBufferToImage(cmd_buf, vkbuf->buf, frame->img[i],
2879 frame->layout[i], 1, &buf_reg);
2882 /* When uploading, do this asynchronously if the source is refcounted by
2883 * keeping the buffers as a submission dependency.
2884 * The hwcontext is guaranteed to not be freed until all frames are freed
2885 * in the frames_unint function.
2886 * When downloading to buffer, do this synchronously and wait for the
2887 * queue submission to finish executing */
2890 for (ref = 0; ref < AV_NUM_DATA_POINTERS; ref++) {
2893 if ((err = add_buf_dep_exec_ctx(hwfc, ectx, &f->buf[ref], 1)))
2896 if (ref && (err = add_buf_dep_exec_ctx(hwfc, ectx, bufs, planes)))
2898 return submit_exec_ctx(hwfc, ectx, &s_info, !ref);
2900 return submit_exec_ctx(hwfc, ectx, &s_info, 1);
2904 /* Technically we can use VK_EXT_external_memory_host to upload and download,
2905 * however the alignment requirements make this unfeasible as both the pointer
2906 * and the size of each plane need to be aligned to the minimum alignment
2907 * requirement, which on all current implementations (anv, radv) is 4096.
2908 * If the requirement gets relaxed (unlikely) this can easily be implemented. */
2909 static int vulkan_transfer_data_from_mem(AVHWFramesContext *hwfc, AVFrame *dst,
2914 AVVkFrame *f = (AVVkFrame *)dst->data[0];
2915 AVHWDeviceContext *dev_ctx = hwfc->device_ctx;
2916 AVBufferRef *bufs[AV_NUM_DATA_POINTERS] = { 0 };
2917 const int planes = av_pix_fmt_count_planes(src->format);
2918 int log2_chroma = av_pix_fmt_desc_get(src->format)->log2_chroma_h;
2920 if ((src->format != AV_PIX_FMT_NONE && !av_vkfmt_from_pixfmt(src->format))) {
2921 av_log(hwfc, AV_LOG_ERROR, "Unsupported source pixel format!\n");
2922 return AVERROR(EINVAL);
2925 if (src->width > hwfc->width || src->height > hwfc->height)
2926 return AVERROR(EINVAL);
2928 /* For linear, host visiable images */
2929 if (f->tiling == VK_IMAGE_TILING_LINEAR &&
2930 f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
2931 AVFrame *map = av_frame_alloc();
2933 return AVERROR(ENOMEM);
2934 map->format = src->format;
2936 err = vulkan_map_frame_to_mem(hwfc, map, dst, AV_HWFRAME_MAP_WRITE);
2940 err = av_frame_copy(map, src);
2941 av_frame_free(&map);
2945 /* Create buffers */
2946 for (int i = 0; i < planes; i++) {
2947 int h = src->height;
2948 int p_height = i > 0 ? AV_CEIL_RSHIFT(h, log2_chroma) : h;
2950 tmp.linesize[i] = FFABS(src->linesize[i]);
2951 err = create_buf(dev_ctx, &bufs[i], p_height,
2952 &tmp.linesize[i], VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
2953 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, NULL, NULL);
2958 /* Map, copy image to buffer, unmap */
2959 if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 0)))
2962 av_image_copy(tmp.data, tmp.linesize, (const uint8_t **)src->data,
2963 src->linesize, src->format, src->width, src->height);
2965 if ((err = unmap_buffers(dev_ctx, bufs, planes, 1)))
2968 /* Copy buffers to image */
2969 err = transfer_image_buf(hwfc, dst, bufs, tmp.linesize,
2970 src->width, src->height, src->format, 0);
2973 for (int i = 0; i < planes; i++)
2974 av_buffer_unref(&bufs[i]);
2979 static int vulkan_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst,
2982 av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2984 switch (src->format) {
2986 case AV_PIX_FMT_CUDA:
2987 if ((p->extensions & EXT_EXTERNAL_FD_MEMORY) &&
2988 (p->extensions & EXT_EXTERNAL_FD_SEM))
2989 return vulkan_transfer_data_from_cuda(hwfc, dst, src);
2992 if (src->hw_frames_ctx)
2993 return AVERROR(ENOSYS);
2995 return vulkan_transfer_data_from_mem(hwfc, dst, src);
3000 static int vulkan_transfer_data_to_cuda(AVHWFramesContext *hwfc, AVFrame *dst,
3007 AVVkFrameInternal *dst_int;
3008 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
3009 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
3011 AVHWFramesContext *cuda_fc = (AVHWFramesContext*)dst->hw_frames_ctx->data;
3012 AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
3013 AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
3014 AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
3015 CudaFunctions *cu = cu_internal->cuda_dl;
3017 ret = CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
3019 err = AVERROR_EXTERNAL;
3023 dst_f = (AVVkFrame *)src->data[0];
3025 err = vulkan_export_to_cuda(hwfc, dst->hw_frames_ctx, src);
3030 dst_int = dst_f->internal;
3032 for (int i = 0; i < planes; i++) {
3033 CUDA_MEMCPY2D cpy = {
3034 .dstMemoryType = CU_MEMORYTYPE_DEVICE,
3035 .dstDevice = (CUdeviceptr)dst->data[i],
3036 .dstPitch = dst->linesize[i],
3039 .srcMemoryType = CU_MEMORYTYPE_ARRAY,
3040 .srcArray = dst_int->cu_array[i],
3041 .WidthInBytes = (i > 0 ? AV_CEIL_RSHIFT(hwfc->width, desc->log2_chroma_w)
3042 : hwfc->width) * desc->comp[i].step,
3043 .Height = i > 0 ? AV_CEIL_RSHIFT(hwfc->height, desc->log2_chroma_h)
3047 ret = CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
3049 err = AVERROR_EXTERNAL;
3054 CHECK_CU(cu->cuCtxPopCurrent(&dummy));
3056 av_log(hwfc, AV_LOG_VERBOSE, "Transfered Vulkan image to CUDA!\n");
3061 CHECK_CU(cu->cuCtxPopCurrent(&dummy));
3062 vulkan_free_internal(dst_int);
3063 dst_f->internal = NULL;
3064 av_buffer_unref(&dst->buf[0]);
3069 static int vulkan_transfer_data_to_mem(AVHWFramesContext *hwfc, AVFrame *dst,
3074 AVVkFrame *f = (AVVkFrame *)src->data[0];
3075 AVHWDeviceContext *dev_ctx = hwfc->device_ctx;
3076 AVBufferRef *bufs[AV_NUM_DATA_POINTERS] = { 0 };
3077 const int planes = av_pix_fmt_count_planes(dst->format);
3078 int log2_chroma = av_pix_fmt_desc_get(dst->format)->log2_chroma_h;
3080 if (dst->width > hwfc->width || dst->height > hwfc->height)
3081 return AVERROR(EINVAL);
3083 /* For linear, host visiable images */
3084 if (f->tiling == VK_IMAGE_TILING_LINEAR &&
3085 f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
3086 AVFrame *map = av_frame_alloc();
3088 return AVERROR(ENOMEM);
3089 map->format = dst->format;
3091 err = vulkan_map_frame_to_mem(hwfc, map, src, AV_HWFRAME_MAP_READ);
3095 err = av_frame_copy(dst, map);
3096 av_frame_free(&map);
3100 /* Create buffers */
3101 for (int i = 0; i < planes; i++) {
3102 int h = dst->height;
3103 int p_height = i > 0 ? AV_CEIL_RSHIFT(h, log2_chroma) : h;
3105 tmp.linesize[i] = FFABS(dst->linesize[i]);
3106 err = create_buf(dev_ctx, &bufs[i], p_height,
3107 &tmp.linesize[i], VK_BUFFER_USAGE_TRANSFER_DST_BIT,
3108 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, NULL, NULL);
3113 /* Copy image to buffer */
3114 if ((err = transfer_image_buf(hwfc, src, bufs, tmp.linesize,
3115 dst->width, dst->height, dst->format, 1)))
3118 /* Map, copy buffer to frame, unmap */
3119 if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 1)))
3122 av_image_copy(dst->data, dst->linesize, (const uint8_t **)tmp.data,
3123 tmp.linesize, dst->format, dst->width, dst->height);
3125 err = unmap_buffers(dev_ctx, bufs, planes, 0);
3128 for (int i = 0; i < planes; i++)
3129 av_buffer_unref(&bufs[i]);
3134 static int vulkan_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst,
3137 av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
3139 switch (dst->format) {
3141 case AV_PIX_FMT_CUDA:
3142 if ((p->extensions & EXT_EXTERNAL_FD_MEMORY) &&
3143 (p->extensions & EXT_EXTERNAL_FD_SEM))
3144 return vulkan_transfer_data_to_cuda(hwfc, dst, src);
3147 if (dst->hw_frames_ctx)
3148 return AVERROR(ENOSYS);
3150 return vulkan_transfer_data_to_mem(hwfc, dst, src);
3154 AVVkFrame *av_vk_frame_alloc(void)
3156 return av_mallocz(sizeof(AVVkFrame));
3159 const HWContextType ff_hwcontext_type_vulkan = {
3160 .type = AV_HWDEVICE_TYPE_VULKAN,
3163 .device_hwctx_size = sizeof(AVVulkanDeviceContext),
3164 .device_priv_size = sizeof(VulkanDevicePriv),
3165 .frames_hwctx_size = sizeof(AVVulkanFramesContext),
3166 .frames_priv_size = sizeof(VulkanFramesPriv),
3168 .device_init = &vulkan_device_init,
3169 .device_create = &vulkan_device_create,
3170 .device_derive = &vulkan_device_derive,
3172 .frames_get_constraints = &vulkan_frames_get_constraints,
3173 .frames_init = vulkan_frames_init,
3174 .frames_get_buffer = vulkan_get_buffer,
3175 .frames_uninit = vulkan_frames_uninit,
3177 .transfer_get_formats = vulkan_transfer_get_formats,
3178 .transfer_data_to = vulkan_transfer_data_to,
3179 .transfer_data_from = vulkan_transfer_data_from,
3181 .map_to = vulkan_map_to,
3182 .map_from = vulkan_map_from,
3184 .pix_fmts = (const enum AVPixelFormat []) {