2 * This file is part of FFmpeg.
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 #include "hwcontext.h"
24 #include "hwcontext_internal.h"
25 #include "hwcontext_vulkan.h"
30 #include <drm_fourcc.h>
31 #include "hwcontext_drm.h"
33 #include <va/va_drmcommon.h>
34 #include "hwcontext_vaapi.h"
39 #include "hwcontext_cuda_internal.h"
40 #include "cuda_check.h"
41 #define CHECK_CU(x) FF_CUDA_CHECK_DL(cuda_cu, cu, x)
44 typedef struct VulkanQueueCtx {
49 /* Buffer dependencies */
50 AVBufferRef **buf_deps;
52 int buf_deps_alloc_size;
55 typedef struct VulkanExecCtx {
57 VkCommandBuffer *bufs;
58 VulkanQueueCtx *queues;
63 typedef struct VulkanDevicePriv {
65 VkPhysicalDeviceProperties2 props;
66 VkPhysicalDeviceMemoryProperties mprops;
67 VkPhysicalDeviceExternalMemoryHostPropertiesEXT hprops;
74 VkDebugUtilsMessengerEXT debug_ctx;
80 int use_linear_images;
86 typedef struct VulkanFramesPriv {
87 /* Image conversions */
88 VulkanExecCtx conv_ctx;
91 VulkanExecCtx upload_ctx;
92 VulkanExecCtx download_ctx;
95 typedef struct AVVkFrameInternal {
97 /* Importing external memory into cuda is really expensive so we keep the
98 * memory imported all the time */
99 AVBufferRef *cuda_fc_ref; /* Need to keep it around for uninit */
100 CUexternalMemory ext_mem[AV_NUM_DATA_POINTERS];
101 CUmipmappedArray cu_mma[AV_NUM_DATA_POINTERS];
102 CUarray cu_array[AV_NUM_DATA_POINTERS];
103 CUexternalSemaphore cu_sem[AV_NUM_DATA_POINTERS];
107 #define GET_QUEUE_COUNT(hwctx, graph, comp, tx) ( \
108 graph ? hwctx->nb_graphics_queues : \
109 comp ? (hwctx->nb_comp_queues ? \
110 hwctx->nb_comp_queues : hwctx->nb_graphics_queues) : \
111 tx ? (hwctx->nb_tx_queues ? hwctx->nb_tx_queues : \
112 (hwctx->nb_comp_queues ? \
113 hwctx->nb_comp_queues : hwctx->nb_graphics_queues)) : \
117 #define VK_LOAD_PFN(inst, name) PFN_##name pfn_##name = (PFN_##name) \
118 vkGetInstanceProcAddr(inst, #name)
120 #define DEFAULT_USAGE_FLAGS (VK_IMAGE_USAGE_SAMPLED_BIT | \
121 VK_IMAGE_USAGE_STORAGE_BIT | \
122 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | \
123 VK_IMAGE_USAGE_TRANSFER_DST_BIT)
125 #define ADD_VAL_TO_LIST(list, count, val) \
127 list = av_realloc_array(list, sizeof(*list), ++count); \
129 err = AVERROR(ENOMEM); \
132 list[count - 1] = av_strdup(val); \
133 if (!list[count - 1]) { \
134 err = AVERROR(ENOMEM); \
139 static const struct {
140 enum AVPixelFormat pixfmt;
141 const VkFormat vkfmts[4];
142 } vk_pixfmt_map[] = {
143 { AV_PIX_FMT_GRAY8, { VK_FORMAT_R8_UNORM } },
144 { AV_PIX_FMT_GRAY16, { VK_FORMAT_R16_UNORM } },
145 { AV_PIX_FMT_GRAYF32, { VK_FORMAT_R32_SFLOAT } },
147 { AV_PIX_FMT_NV12, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
148 { AV_PIX_FMT_NV21, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
149 { AV_PIX_FMT_P010, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
150 { AV_PIX_FMT_P016, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
152 { AV_PIX_FMT_NV16, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
154 { AV_PIX_FMT_NV24, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
155 { AV_PIX_FMT_NV42, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
157 { AV_PIX_FMT_YUV420P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
158 { AV_PIX_FMT_YUV420P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
159 { AV_PIX_FMT_YUV420P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
160 { AV_PIX_FMT_YUV420P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
162 { AV_PIX_FMT_YUV422P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
163 { AV_PIX_FMT_YUV422P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
164 { AV_PIX_FMT_YUV422P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
165 { AV_PIX_FMT_YUV422P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
167 { AV_PIX_FMT_YUV444P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
168 { AV_PIX_FMT_YUV444P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
169 { AV_PIX_FMT_YUV444P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
170 { AV_PIX_FMT_YUV444P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
172 { AV_PIX_FMT_YUVA420P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
173 { AV_PIX_FMT_YUVA420P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
174 /* There is no AV_PIX_FMT_YUVA420P12 */
175 { AV_PIX_FMT_YUVA420P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
177 { AV_PIX_FMT_YUVA422P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
178 { AV_PIX_FMT_YUVA422P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
179 { AV_PIX_FMT_YUVA422P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
180 { AV_PIX_FMT_YUVA422P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
182 { AV_PIX_FMT_YUVA444P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
183 { AV_PIX_FMT_YUVA444P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
184 { AV_PIX_FMT_YUVA444P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
185 { AV_PIX_FMT_YUVA444P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
187 { AV_PIX_FMT_BGRA, { VK_FORMAT_B8G8R8A8_UNORM } },
188 { AV_PIX_FMT_RGBA, { VK_FORMAT_R8G8B8A8_UNORM } },
189 { AV_PIX_FMT_RGB24, { VK_FORMAT_R8G8B8_UNORM } },
190 { AV_PIX_FMT_BGR24, { VK_FORMAT_B8G8R8_UNORM } },
191 { AV_PIX_FMT_RGB48, { VK_FORMAT_R16G16B16_UNORM } },
192 { AV_PIX_FMT_RGBA64, { VK_FORMAT_R16G16B16A16_UNORM } },
193 { AV_PIX_FMT_RGBA64, { VK_FORMAT_R16G16B16A16_UNORM } },
194 { AV_PIX_FMT_RGB565, { VK_FORMAT_R5G6B5_UNORM_PACK16 } },
195 { AV_PIX_FMT_BGR565, { VK_FORMAT_B5G6R5_UNORM_PACK16 } },
196 { AV_PIX_FMT_BGR0, { VK_FORMAT_B8G8R8A8_UNORM } },
197 { AV_PIX_FMT_RGB0, { VK_FORMAT_R8G8B8A8_UNORM } },
199 /* Lower priority as there's an endianess-dependent overlap between these
200 * and rgba/bgr0, and PACK32 formats are more limited */
201 { AV_PIX_FMT_BGR32, { VK_FORMAT_A8B8G8R8_UNORM_PACK32 } },
202 { AV_PIX_FMT_0BGR32, { VK_FORMAT_A8B8G8R8_UNORM_PACK32 } },
204 { AV_PIX_FMT_X2RGB10, { VK_FORMAT_A2R10G10B10_UNORM_PACK32 } },
206 { AV_PIX_FMT_GBRAP, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
207 { AV_PIX_FMT_GBRAP16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
208 { AV_PIX_FMT_GBRPF32, { VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT } },
209 { AV_PIX_FMT_GBRAPF32, { VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT } },
212 const VkFormat *av_vkfmt_from_pixfmt(enum AVPixelFormat p)
214 for (enum AVPixelFormat i = 0; i < FF_ARRAY_ELEMS(vk_pixfmt_map); i++)
215 if (vk_pixfmt_map[i].pixfmt == p)
216 return vk_pixfmt_map[i].vkfmts;
220 static int pixfmt_is_supported(AVVulkanDeviceContext *hwctx, enum AVPixelFormat p,
223 const VkFormat *fmt = av_vkfmt_from_pixfmt(p);
224 int planes = av_pix_fmt_count_planes(p);
229 for (int i = 0; i < planes; i++) {
230 VkFormatFeatureFlags flags;
231 VkFormatProperties2 prop = {
232 .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
234 vkGetPhysicalDeviceFormatProperties2(hwctx->phys_dev, fmt[i], &prop);
235 flags = linear ? prop.formatProperties.linearTilingFeatures :
236 prop.formatProperties.optimalTilingFeatures;
237 if (!(flags & DEFAULT_USAGE_FLAGS))
244 enum VulkanExtensions {
245 EXT_EXTERNAL_DMABUF_MEMORY = 1ULL << 0, /* VK_EXT_external_memory_dma_buf */
246 EXT_DRM_MODIFIER_FLAGS = 1ULL << 1, /* VK_EXT_image_drm_format_modifier */
247 EXT_EXTERNAL_FD_MEMORY = 1ULL << 2, /* VK_KHR_external_memory_fd */
248 EXT_EXTERNAL_FD_SEM = 1ULL << 3, /* VK_KHR_external_semaphore_fd */
249 EXT_EXTERNAL_HOST_MEMORY = 1ULL << 4, /* VK_EXT_external_memory_host */
250 EXT_PUSH_DESCRIPTORS = 1ULL << 5, /* VK_KHR_push_descriptor */
251 EXT_HOST_QUERY_RESET = 1ULL << 6, /* VK_EXT_host_query_reset */
253 EXT_NO_FLAG = 1ULL << 63,
256 typedef struct VulkanOptExtension {
259 } VulkanOptExtension;
261 static const VulkanOptExtension optional_instance_exts[] = {
265 static const VulkanOptExtension optional_device_exts[] = {
266 { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, EXT_EXTERNAL_FD_MEMORY, },
267 { VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME, EXT_EXTERNAL_DMABUF_MEMORY, },
268 { VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME, EXT_DRM_MODIFIER_FLAGS, },
269 { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, EXT_EXTERNAL_FD_SEM, },
270 { VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, EXT_EXTERNAL_HOST_MEMORY, },
271 { VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME, EXT_PUSH_DESCRIPTORS, },
272 { VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, EXT_HOST_QUERY_RESET, },
275 /* Converts return values to strings */
276 static const char *vk_ret2str(VkResult res)
278 #define CASE(VAL) case VAL: return #VAL
284 CASE(VK_EVENT_RESET);
286 CASE(VK_ERROR_OUT_OF_HOST_MEMORY);
287 CASE(VK_ERROR_OUT_OF_DEVICE_MEMORY);
288 CASE(VK_ERROR_INITIALIZATION_FAILED);
289 CASE(VK_ERROR_DEVICE_LOST);
290 CASE(VK_ERROR_MEMORY_MAP_FAILED);
291 CASE(VK_ERROR_LAYER_NOT_PRESENT);
292 CASE(VK_ERROR_EXTENSION_NOT_PRESENT);
293 CASE(VK_ERROR_FEATURE_NOT_PRESENT);
294 CASE(VK_ERROR_INCOMPATIBLE_DRIVER);
295 CASE(VK_ERROR_TOO_MANY_OBJECTS);
296 CASE(VK_ERROR_FORMAT_NOT_SUPPORTED);
297 CASE(VK_ERROR_FRAGMENTED_POOL);
298 CASE(VK_ERROR_SURFACE_LOST_KHR);
299 CASE(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
300 CASE(VK_SUBOPTIMAL_KHR);
301 CASE(VK_ERROR_OUT_OF_DATE_KHR);
302 CASE(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
303 CASE(VK_ERROR_VALIDATION_FAILED_EXT);
304 CASE(VK_ERROR_INVALID_SHADER_NV);
305 CASE(VK_ERROR_OUT_OF_POOL_MEMORY);
306 CASE(VK_ERROR_INVALID_EXTERNAL_HANDLE);
307 CASE(VK_ERROR_NOT_PERMITTED_EXT);
308 CASE(VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT);
309 CASE(VK_ERROR_INVALID_DEVICE_ADDRESS_EXT);
310 CASE(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT);
311 default: return "Unknown error";
316 static VkBool32 vk_dbg_callback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
317 VkDebugUtilsMessageTypeFlagsEXT messageType,
318 const VkDebugUtilsMessengerCallbackDataEXT *data,
322 AVHWDeviceContext *ctx = priv;
325 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: l = AV_LOG_VERBOSE; break;
326 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: l = AV_LOG_INFO; break;
327 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: l = AV_LOG_WARNING; break;
328 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: l = AV_LOG_ERROR; break;
329 default: l = AV_LOG_DEBUG; break;
332 av_log(ctx, l, "%s\n", data->pMessage);
333 for (int i = 0; i < data->cmdBufLabelCount; i++)
334 av_log(ctx, l, "\t%i: %s\n", i, data->pCmdBufLabels[i].pLabelName);
339 static int check_extensions(AVHWDeviceContext *ctx, int dev, AVDictionary *opts,
340 const char * const **dst, uint32_t *num, int debug)
343 const char **extension_names = NULL;
344 VulkanDevicePriv *p = ctx->internal->priv;
345 AVVulkanDeviceContext *hwctx = ctx->hwctx;
346 int err = 0, found, extensions_found = 0;
349 int optional_exts_num;
350 uint32_t sup_ext_count;
351 char *user_exts_str = NULL;
352 AVDictionaryEntry *user_exts;
353 VkExtensionProperties *sup_ext;
354 const VulkanOptExtension *optional_exts;
358 optional_exts = optional_instance_exts;
359 optional_exts_num = FF_ARRAY_ELEMS(optional_instance_exts);
360 user_exts = av_dict_get(opts, "instance_extensions", NULL, 0);
362 user_exts_str = av_strdup(user_exts->value);
363 if (!user_exts_str) {
364 err = AVERROR(ENOMEM);
368 vkEnumerateInstanceExtensionProperties(NULL, &sup_ext_count, NULL);
369 sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
371 return AVERROR(ENOMEM);
372 vkEnumerateInstanceExtensionProperties(NULL, &sup_ext_count, sup_ext);
375 optional_exts = optional_device_exts;
376 optional_exts_num = FF_ARRAY_ELEMS(optional_device_exts);
377 user_exts = av_dict_get(opts, "device_extensions", NULL, 0);
379 user_exts_str = av_strdup(user_exts->value);
380 if (!user_exts_str) {
381 err = AVERROR(ENOMEM);
385 vkEnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
386 &sup_ext_count, NULL);
387 sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
389 return AVERROR(ENOMEM);
390 vkEnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
391 &sup_ext_count, sup_ext);
394 for (int i = 0; i < optional_exts_num; i++) {
395 tstr = optional_exts[i].name;
397 for (int j = 0; j < sup_ext_count; j++) {
398 if (!strcmp(tstr, sup_ext[j].extensionName)) {
406 av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, tstr);
407 p->extensions |= optional_exts[i].flag;
408 ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
412 tstr = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
414 for (int j = 0; j < sup_ext_count; j++) {
415 if (!strcmp(tstr, sup_ext[j].extensionName)) {
421 av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, tstr);
422 ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
424 av_log(ctx, AV_LOG_ERROR, "Debug extension \"%s\" not found!\n",
426 err = AVERROR(EINVAL);
432 char *save, *token = av_strtok(user_exts_str, "+", &save);
435 for (int j = 0; j < sup_ext_count; j++) {
436 if (!strcmp(token, sup_ext[j].extensionName)) {
442 av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, token);
443 ADD_VAL_TO_LIST(extension_names, extensions_found, token);
445 av_log(ctx, AV_LOG_WARNING, "%s extension \"%s\" not found, excluding.\n",
448 token = av_strtok(NULL, "+", &save);
452 *dst = extension_names;
453 *num = extensions_found;
455 av_free(user_exts_str);
461 for (int i = 0; i < extensions_found; i++)
462 av_free((void *)extension_names[i]);
463 av_free(extension_names);
464 av_free(user_exts_str);
469 /* Creates a VkInstance */
470 static int create_instance(AVHWDeviceContext *ctx, AVDictionary *opts)
474 VulkanDevicePriv *p = ctx->internal->priv;
475 AVVulkanDeviceContext *hwctx = ctx->hwctx;
476 AVDictionaryEntry *debug_opt = av_dict_get(opts, "debug", NULL, 0);
477 const int debug_mode = debug_opt && strtol(debug_opt->value, NULL, 10);
478 VkApplicationInfo application_info = {
479 .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
480 .pEngineName = "libavutil",
481 .apiVersion = VK_API_VERSION_1_1,
482 .engineVersion = VK_MAKE_VERSION(LIBAVUTIL_VERSION_MAJOR,
483 LIBAVUTIL_VERSION_MINOR,
484 LIBAVUTIL_VERSION_MICRO),
486 VkInstanceCreateInfo inst_props = {
487 .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
488 .pApplicationInfo = &application_info,
491 /* Check for present/missing extensions */
492 err = check_extensions(ctx, 0, opts, &inst_props.ppEnabledExtensionNames,
493 &inst_props.enabledExtensionCount, debug_mode);
498 static const char *layers[] = { "VK_LAYER_KHRONOS_validation" };
499 inst_props.ppEnabledLayerNames = layers;
500 inst_props.enabledLayerCount = FF_ARRAY_ELEMS(layers);
503 /* Try to create the instance */
504 ret = vkCreateInstance(&inst_props, hwctx->alloc, &hwctx->inst);
506 /* Check for errors */
507 if (ret != VK_SUCCESS) {
508 av_log(ctx, AV_LOG_ERROR, "Instance creation failure: %s\n",
510 for (int i = 0; i < inst_props.enabledExtensionCount; i++)
511 av_free((void *)inst_props.ppEnabledExtensionNames[i]);
512 av_free((void *)inst_props.ppEnabledExtensionNames);
513 return AVERROR_EXTERNAL;
517 VkDebugUtilsMessengerCreateInfoEXT dbg = {
518 .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
519 .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT |
520 VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
521 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
522 VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT,
523 .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
524 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
525 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
526 .pfnUserCallback = vk_dbg_callback,
529 VK_LOAD_PFN(hwctx->inst, vkCreateDebugUtilsMessengerEXT);
531 pfn_vkCreateDebugUtilsMessengerEXT(hwctx->inst, &dbg,
532 hwctx->alloc, &p->debug_ctx);
535 hwctx->enabled_inst_extensions = inst_props.ppEnabledExtensionNames;
536 hwctx->nb_enabled_inst_extensions = inst_props.enabledExtensionCount;
541 typedef struct VulkanDeviceSelection {
542 uint8_t uuid[VK_UUID_SIZE]; /* Will use this first unless !has_uuid */
544 const char *name; /* Will use this second unless NULL */
545 uint32_t pci_device; /* Will use this third unless 0x0 */
546 uint32_t vendor_id; /* Last resort to find something deterministic */
547 int index; /* Finally fall back to index */
548 } VulkanDeviceSelection;
550 static const char *vk_dev_type(enum VkPhysicalDeviceType type)
553 case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU: return "integrated";
554 case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU: return "discrete";
555 case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU: return "virtual";
556 case VK_PHYSICAL_DEVICE_TYPE_CPU: return "software";
557 default: return "unknown";
562 static int find_device(AVHWDeviceContext *ctx, VulkanDeviceSelection *select)
564 int err = 0, choice = -1;
567 VkPhysicalDevice *devices = NULL;
568 VkPhysicalDeviceIDProperties *idp = NULL;
569 VkPhysicalDeviceProperties2 *prop = NULL;
570 AVVulkanDeviceContext *hwctx = ctx->hwctx;
572 ret = vkEnumeratePhysicalDevices(hwctx->inst, &num, NULL);
573 if (ret != VK_SUCCESS || !num) {
574 av_log(ctx, AV_LOG_ERROR, "No devices found: %s!\n", vk_ret2str(ret));
575 return AVERROR(ENODEV);
578 devices = av_malloc_array(num, sizeof(VkPhysicalDevice));
580 return AVERROR(ENOMEM);
582 ret = vkEnumeratePhysicalDevices(hwctx->inst, &num, devices);
583 if (ret != VK_SUCCESS) {
584 av_log(ctx, AV_LOG_ERROR, "Failed enumerating devices: %s\n",
586 err = AVERROR(ENODEV);
590 prop = av_mallocz_array(num, sizeof(*prop));
592 err = AVERROR(ENOMEM);
596 idp = av_mallocz_array(num, sizeof(*idp));
598 err = AVERROR(ENOMEM);
602 av_log(ctx, AV_LOG_VERBOSE, "GPU listing:\n");
603 for (int i = 0; i < num; i++) {
604 idp[i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
605 prop[i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
606 prop[i].pNext = &idp[i];
608 vkGetPhysicalDeviceProperties2(devices[i], &prop[i]);
609 av_log(ctx, AV_LOG_VERBOSE, " %d: %s (%s) (0x%x)\n", i,
610 prop[i].properties.deviceName,
611 vk_dev_type(prop[i].properties.deviceType),
612 prop[i].properties.deviceID);
615 if (select->has_uuid) {
616 for (int i = 0; i < num; i++) {
617 if (!strncmp(idp[i].deviceUUID, select->uuid, VK_UUID_SIZE)) {
622 av_log(ctx, AV_LOG_ERROR, "Unable to find device by given UUID!\n");
623 err = AVERROR(ENODEV);
625 } else if (select->name) {
626 av_log(ctx, AV_LOG_VERBOSE, "Requested device: %s\n", select->name);
627 for (int i = 0; i < num; i++) {
628 if (strstr(prop[i].properties.deviceName, select->name)) {
633 av_log(ctx, AV_LOG_ERROR, "Unable to find device \"%s\"!\n",
635 err = AVERROR(ENODEV);
637 } else if (select->pci_device) {
638 av_log(ctx, AV_LOG_VERBOSE, "Requested device: 0x%x\n", select->pci_device);
639 for (int i = 0; i < num; i++) {
640 if (select->pci_device == prop[i].properties.deviceID) {
645 av_log(ctx, AV_LOG_ERROR, "Unable to find device with PCI ID 0x%x!\n",
647 err = AVERROR(EINVAL);
649 } else if (select->vendor_id) {
650 av_log(ctx, AV_LOG_VERBOSE, "Requested vendor: 0x%x\n", select->vendor_id);
651 for (int i = 0; i < num; i++) {
652 if (select->vendor_id == prop[i].properties.vendorID) {
657 av_log(ctx, AV_LOG_ERROR, "Unable to find device with Vendor ID 0x%x!\n",
659 err = AVERROR(ENODEV);
662 if (select->index < num) {
663 choice = select->index;
666 av_log(ctx, AV_LOG_ERROR, "Unable to find device with index %i!\n",
668 err = AVERROR(ENODEV);
674 hwctx->phys_dev = devices[choice];
683 static int search_queue_families(AVHWDeviceContext *ctx, VkDeviceCreateInfo *cd)
687 VkQueueFamilyProperties *qs = NULL;
688 AVVulkanDeviceContext *hwctx = ctx->hwctx;
689 int graph_index = -1, comp_index = -1, tx_index = -1;
690 VkDeviceQueueCreateInfo *pc = (VkDeviceQueueCreateInfo *)cd->pQueueCreateInfos;
692 /* First get the number of queue families */
693 vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, NULL);
695 av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
696 return AVERROR_EXTERNAL;
699 /* Then allocate memory */
700 qs = av_malloc_array(num, sizeof(VkQueueFamilyProperties));
702 return AVERROR(ENOMEM);
704 /* Finally retrieve the queue families */
705 vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, qs);
707 #define SEARCH_FLAGS(expr, out) \
708 for (int i = 0; i < num; i++) { \
709 const VkQueueFlagBits flags = qs[i].queueFlags; \
716 SEARCH_FLAGS(flags & VK_QUEUE_GRAPHICS_BIT, graph_index)
718 SEARCH_FLAGS((flags & VK_QUEUE_COMPUTE_BIT) && (i != graph_index),
721 SEARCH_FLAGS((flags & VK_QUEUE_TRANSFER_BIT) && (i != graph_index) &&
722 (i != comp_index), tx_index)
725 #define ADD_QUEUE(fidx, graph, comp, tx) \
726 av_log(ctx, AV_LOG_VERBOSE, "Using queue family %i (total queues: %i) for %s%s%s\n", \
727 fidx, qs[fidx].queueCount, graph ? "graphics " : "", \
728 comp ? "compute " : "", tx ? "transfers " : ""); \
729 av_log(ctx, AV_LOG_VERBOSE, " QF %i flags: %s%s%s%s\n", fidx, \
730 ((qs[fidx].queueFlags) & VK_QUEUE_GRAPHICS_BIT) ? "(graphics) " : "", \
731 ((qs[fidx].queueFlags) & VK_QUEUE_COMPUTE_BIT) ? "(compute) " : "", \
732 ((qs[fidx].queueFlags) & VK_QUEUE_TRANSFER_BIT) ? "(transfers) " : "", \
733 ((qs[fidx].queueFlags) & VK_QUEUE_SPARSE_BINDING_BIT) ? "(sparse) " : ""); \
734 pc[cd->queueCreateInfoCount].queueFamilyIndex = fidx; \
735 pc[cd->queueCreateInfoCount].queueCount = qs[fidx].queueCount; \
736 weights = av_malloc(qs[fidx].queueCount * sizeof(float)); \
737 pc[cd->queueCreateInfoCount].pQueuePriorities = weights; \
740 for (int i = 0; i < qs[fidx].queueCount; i++) \
742 cd->queueCreateInfoCount++;
744 ADD_QUEUE(graph_index, 1, comp_index < 0, tx_index < 0 && comp_index < 0)
745 hwctx->queue_family_index = graph_index;
746 hwctx->queue_family_comp_index = graph_index;
747 hwctx->queue_family_tx_index = graph_index;
748 hwctx->nb_graphics_queues = qs[graph_index].queueCount;
750 if (comp_index != -1) {
751 ADD_QUEUE(comp_index, 0, 1, tx_index < 0)
752 hwctx->queue_family_tx_index = comp_index;
753 hwctx->queue_family_comp_index = comp_index;
754 hwctx->nb_comp_queues = qs[comp_index].queueCount;
757 if (tx_index != -1) {
758 ADD_QUEUE(tx_index, 0, 0, 1)
759 hwctx->queue_family_tx_index = tx_index;
760 hwctx->nb_tx_queues = qs[tx_index].queueCount;
769 av_freep(&pc[0].pQueuePriorities);
770 av_freep(&pc[1].pQueuePriorities);
771 av_freep(&pc[2].pQueuePriorities);
774 return AVERROR(ENOMEM);
777 static int create_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
778 int queue_family_index, int num_queues)
781 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
783 VkCommandPoolCreateInfo cqueue_create = {
784 .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
785 .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
786 .queueFamilyIndex = queue_family_index,
788 VkCommandBufferAllocateInfo cbuf_create = {
789 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
790 .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
791 .commandBufferCount = num_queues,
794 cmd->nb_queues = num_queues;
796 /* Create command pool */
797 ret = vkCreateCommandPool(hwctx->act_dev, &cqueue_create,
798 hwctx->alloc, &cmd->pool);
799 if (ret != VK_SUCCESS) {
800 av_log(hwfc, AV_LOG_ERROR, "Command pool creation failure: %s\n",
802 return AVERROR_EXTERNAL;
805 cmd->bufs = av_mallocz(num_queues * sizeof(*cmd->bufs));
807 return AVERROR(ENOMEM);
809 cbuf_create.commandPool = cmd->pool;
811 /* Allocate command buffer */
812 ret = vkAllocateCommandBuffers(hwctx->act_dev, &cbuf_create, cmd->bufs);
813 if (ret != VK_SUCCESS) {
814 av_log(hwfc, AV_LOG_ERROR, "Command buffer alloc failure: %s\n",
816 av_freep(&cmd->bufs);
817 return AVERROR_EXTERNAL;
820 cmd->queues = av_mallocz(num_queues * sizeof(*cmd->queues));
822 return AVERROR(ENOMEM);
824 for (int i = 0; i < num_queues; i++) {
825 VulkanQueueCtx *q = &cmd->queues[i];
826 vkGetDeviceQueue(hwctx->act_dev, queue_family_index, i, &q->queue);
827 q->was_synchronous = 1;
833 static void free_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
835 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
838 for (int i = 0; i < cmd->nb_queues; i++) {
839 VulkanQueueCtx *q = &cmd->queues[i];
841 /* Make sure all queues have finished executing */
842 if (q->fence && !q->was_synchronous) {
843 vkWaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
844 vkResetFences(hwctx->act_dev, 1, &q->fence);
849 vkDestroyFence(hwctx->act_dev, q->fence, hwctx->alloc);
851 /* Free buffer dependencies */
852 for (int j = 0; j < q->nb_buf_deps; j++)
853 av_buffer_unref(&q->buf_deps[j]);
854 av_free(q->buf_deps);
859 vkFreeCommandBuffers(hwctx->act_dev, cmd->pool, cmd->nb_queues, cmd->bufs);
861 vkDestroyCommandPool(hwctx->act_dev, cmd->pool, hwctx->alloc);
863 av_freep(&cmd->queues);
864 av_freep(&cmd->bufs);
868 static VkCommandBuffer get_buf_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
870 return cmd->bufs[cmd->cur_queue_idx];
873 static void unref_exec_ctx_deps(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
875 VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
877 for (int j = 0; j < q->nb_buf_deps; j++)
878 av_buffer_unref(&q->buf_deps[j]);
882 static int wait_start_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
885 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
886 VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
888 VkCommandBufferBeginInfo cmd_start = {
889 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
890 .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
893 /* Create the fence and don't wait for it initially */
895 VkFenceCreateInfo fence_spawn = {
896 .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
898 ret = vkCreateFence(hwctx->act_dev, &fence_spawn, hwctx->alloc,
900 if (ret != VK_SUCCESS) {
901 av_log(hwfc, AV_LOG_ERROR, "Failed to queue frame fence: %s\n",
903 return AVERROR_EXTERNAL;
905 } else if (!q->was_synchronous) {
906 vkWaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
907 vkResetFences(hwctx->act_dev, 1, &q->fence);
910 /* Discard queue dependencies */
911 unref_exec_ctx_deps(hwfc, cmd);
913 ret = vkBeginCommandBuffer(cmd->bufs[cmd->cur_queue_idx], &cmd_start);
914 if (ret != VK_SUCCESS) {
915 av_log(hwfc, AV_LOG_ERROR, "Unable to init command buffer: %s\n",
917 return AVERROR_EXTERNAL;
923 static int add_buf_dep_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
924 AVBufferRef * const *deps, int nb_deps)
927 VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
929 if (!deps || !nb_deps)
932 dst = av_fast_realloc(q->buf_deps, &q->buf_deps_alloc_size,
933 (q->nb_buf_deps + nb_deps) * sizeof(*dst));
939 for (int i = 0; i < nb_deps; i++) {
940 q->buf_deps[q->nb_buf_deps] = av_buffer_ref(deps[i]);
941 if (!q->buf_deps[q->nb_buf_deps])
949 unref_exec_ctx_deps(hwfc, cmd);
950 return AVERROR(ENOMEM);
953 static int submit_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
954 VkSubmitInfo *s_info, int synchronous)
957 VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
959 ret = vkEndCommandBuffer(cmd->bufs[cmd->cur_queue_idx]);
960 if (ret != VK_SUCCESS) {
961 av_log(hwfc, AV_LOG_ERROR, "Unable to finish command buffer: %s\n",
963 unref_exec_ctx_deps(hwfc, cmd);
964 return AVERROR_EXTERNAL;
967 s_info->pCommandBuffers = &cmd->bufs[cmd->cur_queue_idx];
968 s_info->commandBufferCount = 1;
970 ret = vkQueueSubmit(q->queue, 1, s_info, q->fence);
971 if (ret != VK_SUCCESS) {
972 unref_exec_ctx_deps(hwfc, cmd);
973 return AVERROR_EXTERNAL;
976 q->was_synchronous = synchronous;
979 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
980 vkWaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
981 vkResetFences(hwctx->act_dev, 1, &q->fence);
982 unref_exec_ctx_deps(hwfc, cmd);
983 } else { /* Rotate queues */
984 cmd->cur_queue_idx = (cmd->cur_queue_idx + 1) % cmd->nb_queues;
990 static void vulkan_device_free(AVHWDeviceContext *ctx)
992 VulkanDevicePriv *p = ctx->internal->priv;
993 AVVulkanDeviceContext *hwctx = ctx->hwctx;
995 vkDestroyDevice(hwctx->act_dev, hwctx->alloc);
998 VK_LOAD_PFN(hwctx->inst, vkDestroyDebugUtilsMessengerEXT);
999 pfn_vkDestroyDebugUtilsMessengerEXT(hwctx->inst, p->debug_ctx,
1003 vkDestroyInstance(hwctx->inst, hwctx->alloc);
1005 for (int i = 0; i < hwctx->nb_enabled_inst_extensions; i++)
1006 av_free((void *)hwctx->enabled_inst_extensions[i]);
1007 av_free((void *)hwctx->enabled_inst_extensions);
1009 for (int i = 0; i < hwctx->nb_enabled_dev_extensions; i++)
1010 av_free((void *)hwctx->enabled_dev_extensions[i]);
1011 av_free((void *)hwctx->enabled_dev_extensions);
1014 static int vulkan_device_create_internal(AVHWDeviceContext *ctx,
1015 VulkanDeviceSelection *dev_select,
1016 AVDictionary *opts, int flags)
1020 AVDictionaryEntry *opt_d;
1021 VulkanDevicePriv *p = ctx->internal->priv;
1022 AVVulkanDeviceContext *hwctx = ctx->hwctx;
1023 VkPhysicalDeviceFeatures dev_features = { 0 };
1024 VkDeviceQueueCreateInfo queue_create_info[3] = {
1025 { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, },
1026 { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, },
1027 { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, },
1030 VkDeviceCreateInfo dev_info = {
1031 .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
1032 .pNext = &hwctx->device_features,
1033 .pQueueCreateInfos = queue_create_info,
1034 .queueCreateInfoCount = 0,
1037 hwctx->device_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
1038 ctx->free = vulkan_device_free;
1040 /* Create an instance if not given one */
1041 if ((err = create_instance(ctx, opts)))
1044 /* Find a device (if not given one) */
1045 if ((err = find_device(ctx, dev_select)))
1048 vkGetPhysicalDeviceFeatures(hwctx->phys_dev, &dev_features);
1049 #define COPY_FEATURE(DST, NAME) (DST).features.NAME = dev_features.NAME;
1050 COPY_FEATURE(hwctx->device_features, shaderImageGatherExtended)
1051 COPY_FEATURE(hwctx->device_features, shaderStorageImageReadWithoutFormat)
1052 COPY_FEATURE(hwctx->device_features, shaderStorageImageWriteWithoutFormat)
1053 COPY_FEATURE(hwctx->device_features, fragmentStoresAndAtomics)
1054 COPY_FEATURE(hwctx->device_features, vertexPipelineStoresAndAtomics)
1055 COPY_FEATURE(hwctx->device_features, shaderInt64)
1058 /* Search queue family */
1059 if ((err = search_queue_families(ctx, &dev_info)))
1062 if ((err = check_extensions(ctx, 1, opts, &dev_info.ppEnabledExtensionNames,
1063 &dev_info.enabledExtensionCount, 0))) {
1064 av_free((void *)queue_create_info[0].pQueuePriorities);
1065 av_free((void *)queue_create_info[1].pQueuePriorities);
1066 av_free((void *)queue_create_info[2].pQueuePriorities);
1070 ret = vkCreateDevice(hwctx->phys_dev, &dev_info, hwctx->alloc,
1073 av_free((void *)queue_create_info[0].pQueuePriorities);
1074 av_free((void *)queue_create_info[1].pQueuePriorities);
1075 av_free((void *)queue_create_info[2].pQueuePriorities);
1077 if (ret != VK_SUCCESS) {
1078 av_log(ctx, AV_LOG_ERROR, "Device creation failure: %s\n",
1080 for (int i = 0; i < dev_info.enabledExtensionCount; i++)
1081 av_free((void *)dev_info.ppEnabledExtensionNames[i]);
1082 av_free((void *)dev_info.ppEnabledExtensionNames);
1083 err = AVERROR_EXTERNAL;
1087 /* Tiled images setting, use them by default */
1088 opt_d = av_dict_get(opts, "linear_images", NULL, 0);
1090 p->use_linear_images = strtol(opt_d->value, NULL, 10);
1092 hwctx->enabled_dev_extensions = dev_info.ppEnabledExtensionNames;
1093 hwctx->nb_enabled_dev_extensions = dev_info.enabledExtensionCount;
1099 static int vulkan_device_init(AVHWDeviceContext *ctx)
1102 AVVulkanDeviceContext *hwctx = ctx->hwctx;
1103 VulkanDevicePriv *p = ctx->internal->priv;
1105 /* Set device extension flags */
1106 for (int i = 0; i < hwctx->nb_enabled_dev_extensions; i++) {
1107 for (int j = 0; j < FF_ARRAY_ELEMS(optional_device_exts); j++) {
1108 if (!strcmp(hwctx->enabled_dev_extensions[i],
1109 optional_device_exts[j].name)) {
1110 av_log(ctx, AV_LOG_VERBOSE, "Using device extension %s\n",
1111 hwctx->enabled_dev_extensions[i]);
1112 p->extensions |= optional_device_exts[j].flag;
1118 p->props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
1119 p->props.pNext = &p->hprops;
1120 p->hprops.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
1122 vkGetPhysicalDeviceProperties2(hwctx->phys_dev, &p->props);
1123 av_log(ctx, AV_LOG_VERBOSE, "Using device: %s\n",
1124 p->props.properties.deviceName);
1125 av_log(ctx, AV_LOG_VERBOSE, "Alignments:\n");
1126 av_log(ctx, AV_LOG_VERBOSE, " optimalBufferCopyRowPitchAlignment: %li\n",
1127 p->props.properties.limits.optimalBufferCopyRowPitchAlignment);
1128 av_log(ctx, AV_LOG_VERBOSE, " minMemoryMapAlignment: %li\n",
1129 p->props.properties.limits.minMemoryMapAlignment);
1130 if (p->extensions & EXT_EXTERNAL_HOST_MEMORY)
1131 av_log(ctx, AV_LOG_VERBOSE, " minImportedHostPointerAlignment: %li\n",
1132 p->hprops.minImportedHostPointerAlignment);
1134 p->dev_is_nvidia = (p->props.properties.vendorID == 0x10de);
1136 vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &queue_num, NULL);
1138 av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
1139 return AVERROR_EXTERNAL;
1142 #define CHECK_QUEUE(type, n) \
1143 if (n >= queue_num) { \
1144 av_log(ctx, AV_LOG_ERROR, "Invalid %s queue index %i (device has %i queues)!\n", \
1145 type, n, queue_num); \
1146 return AVERROR(EINVAL); \
1149 CHECK_QUEUE("graphics", hwctx->queue_family_index)
1150 CHECK_QUEUE("upload", hwctx->queue_family_tx_index)
1151 CHECK_QUEUE("compute", hwctx->queue_family_comp_index)
1155 p->qfs[p->num_qfs++] = hwctx->queue_family_index;
1156 if ((hwctx->queue_family_tx_index != hwctx->queue_family_index) &&
1157 (hwctx->queue_family_tx_index != hwctx->queue_family_comp_index))
1158 p->qfs[p->num_qfs++] = hwctx->queue_family_tx_index;
1159 if ((hwctx->queue_family_comp_index != hwctx->queue_family_index) &&
1160 (hwctx->queue_family_comp_index != hwctx->queue_family_tx_index))
1161 p->qfs[p->num_qfs++] = hwctx->queue_family_comp_index;
1163 /* Get device capabilities */
1164 vkGetPhysicalDeviceMemoryProperties(hwctx->phys_dev, &p->mprops);
1169 static int vulkan_device_create(AVHWDeviceContext *ctx, const char *device,
1170 AVDictionary *opts, int flags)
1172 VulkanDeviceSelection dev_select = { 0 };
1173 if (device && device[0]) {
1175 dev_select.index = strtol(device, &end, 10);
1176 if (end == device) {
1177 dev_select.index = 0;
1178 dev_select.name = device;
1182 return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1185 static int vulkan_device_derive(AVHWDeviceContext *ctx,
1186 AVHWDeviceContext *src_ctx,
1187 AVDictionary *opts, int flags)
1189 av_unused VulkanDeviceSelection dev_select = { 0 };
1191 /* If there's only one device on the system, then even if its not covered
1192 * by the following checks (e.g. non-PCIe ARM GPU), having an empty
1193 * dev_select will mean it'll get picked. */
1194 switch(src_ctx->type) {
1197 case AV_HWDEVICE_TYPE_VAAPI: {
1198 AVVAAPIDeviceContext *src_hwctx = src_ctx->hwctx;
1200 const char *vendor = vaQueryVendorString(src_hwctx->display);
1202 av_log(ctx, AV_LOG_ERROR, "Unable to get device info from VAAPI!\n");
1203 return AVERROR_EXTERNAL;
1206 if (strstr(vendor, "Intel"))
1207 dev_select.vendor_id = 0x8086;
1208 if (strstr(vendor, "AMD"))
1209 dev_select.vendor_id = 0x1002;
1211 return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1214 case AV_HWDEVICE_TYPE_DRM: {
1215 AVDRMDeviceContext *src_hwctx = src_ctx->hwctx;
1217 drmDevice *drm_dev_info;
1218 int err = drmGetDevice(src_hwctx->fd, &drm_dev_info);
1220 av_log(ctx, AV_LOG_ERROR, "Unable to get device info from DRM fd!\n");
1221 return AVERROR_EXTERNAL;
1224 if (drm_dev_info->bustype == DRM_BUS_PCI)
1225 dev_select.pci_device = drm_dev_info->deviceinfo.pci->device_id;
1227 drmFreeDevice(&drm_dev_info);
1229 return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1233 case AV_HWDEVICE_TYPE_CUDA: {
1234 AVHWDeviceContext *cuda_cu = src_ctx;
1235 AVCUDADeviceContext *src_hwctx = src_ctx->hwctx;
1236 AVCUDADeviceContextInternal *cu_internal = src_hwctx->internal;
1237 CudaFunctions *cu = cu_internal->cuda_dl;
1239 int ret = CHECK_CU(cu->cuDeviceGetUuid((CUuuid *)&dev_select.uuid,
1240 cu_internal->cuda_device));
1242 av_log(ctx, AV_LOG_ERROR, "Unable to get UUID from CUDA!\n");
1243 return AVERROR_EXTERNAL;
1246 dev_select.has_uuid = 1;
1248 return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
1252 return AVERROR(ENOSYS);
1256 static int vulkan_frames_get_constraints(AVHWDeviceContext *ctx,
1257 const void *hwconfig,
1258 AVHWFramesConstraints *constraints)
1261 AVVulkanDeviceContext *hwctx = ctx->hwctx;
1262 VulkanDevicePriv *p = ctx->internal->priv;
1264 for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
1265 count += pixfmt_is_supported(hwctx, i, p->use_linear_images);
1268 if (p->dev_is_nvidia)
1272 constraints->valid_sw_formats = av_malloc_array(count + 1,
1273 sizeof(enum AVPixelFormat));
1274 if (!constraints->valid_sw_formats)
1275 return AVERROR(ENOMEM);
1278 for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
1279 if (pixfmt_is_supported(hwctx, i, p->use_linear_images))
1280 constraints->valid_sw_formats[count++] = i;
1283 if (p->dev_is_nvidia)
1284 constraints->valid_sw_formats[count++] = AV_PIX_FMT_CUDA;
1286 constraints->valid_sw_formats[count++] = AV_PIX_FMT_NONE;
1288 constraints->min_width = 0;
1289 constraints->min_height = 0;
1290 constraints->max_width = p->props.properties.limits.maxImageDimension2D;
1291 constraints->max_height = p->props.properties.limits.maxImageDimension2D;
1293 constraints->valid_hw_formats = av_malloc_array(2, sizeof(enum AVPixelFormat));
1294 if (!constraints->valid_hw_formats)
1295 return AVERROR(ENOMEM);
1297 constraints->valid_hw_formats[0] = AV_PIX_FMT_VULKAN;
1298 constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1303 static int alloc_mem(AVHWDeviceContext *ctx, VkMemoryRequirements *req,
1304 VkMemoryPropertyFlagBits req_flags, const void *alloc_extension,
1305 VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
1309 VulkanDevicePriv *p = ctx->internal->priv;
1310 AVVulkanDeviceContext *dev_hwctx = ctx->hwctx;
1311 VkMemoryAllocateInfo alloc_info = {
1312 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
1313 .pNext = alloc_extension,
1314 .allocationSize = req->size,
1317 /* The vulkan spec requires memory types to be sorted in the "optimal"
1318 * order, so the first matching type we find will be the best/fastest one */
1319 for (int i = 0; i < p->mprops.memoryTypeCount; i++) {
1320 const VkMemoryType *type = &p->mprops.memoryTypes[i];
1322 /* The memory type must be supported by the requirements (bitfield) */
1323 if (!(req->memoryTypeBits & (1 << i)))
1326 /* The memory type flags must include our properties */
1327 if ((type->propertyFlags & req_flags) != req_flags)
1330 /* The memory type must be large enough */
1331 if (req->size > p->mprops.memoryHeaps[type->heapIndex].size)
1334 /* Found a suitable memory type */
1340 av_log(ctx, AV_LOG_ERROR, "No memory type found for flags 0x%x\n",
1342 return AVERROR(EINVAL);
1345 alloc_info.memoryTypeIndex = index;
1347 ret = vkAllocateMemory(dev_hwctx->act_dev, &alloc_info,
1348 dev_hwctx->alloc, mem);
1349 if (ret != VK_SUCCESS) {
1350 av_log(ctx, AV_LOG_ERROR, "Failed to allocate memory: %s\n",
1352 return AVERROR(ENOMEM);
1355 *mem_flags |= p->mprops.memoryTypes[index].propertyFlags;
1360 static void vulkan_free_internal(AVVkFrameInternal *internal)
1366 if (internal->cuda_fc_ref) {
1367 AVHWFramesContext *cuda_fc = (AVHWFramesContext *)internal->cuda_fc_ref->data;
1368 int planes = av_pix_fmt_count_planes(cuda_fc->sw_format);
1369 AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
1370 AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
1371 AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
1372 CudaFunctions *cu = cu_internal->cuda_dl;
1374 for (int i = 0; i < planes; i++) {
1375 if (internal->cu_sem[i])
1376 CHECK_CU(cu->cuDestroyExternalSemaphore(internal->cu_sem[i]));
1377 if (internal->cu_mma[i])
1378 CHECK_CU(cu->cuMipmappedArrayDestroy(internal->cu_mma[i]));
1379 if (internal->ext_mem[i])
1380 CHECK_CU(cu->cuDestroyExternalMemory(internal->ext_mem[i]));
1383 av_buffer_unref(&internal->cuda_fc_ref);
1390 static void vulkan_frame_free(void *opaque, uint8_t *data)
1392 AVVkFrame *f = (AVVkFrame *)data;
1393 AVHWFramesContext *hwfc = opaque;
1394 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1395 int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1397 vulkan_free_internal(f->internal);
1399 for (int i = 0; i < planes; i++) {
1400 vkDestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
1401 vkFreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
1402 vkDestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
1408 static int alloc_bind_mem(AVHWFramesContext *hwfc, AVVkFrame *f,
1409 void *alloc_pnext, size_t alloc_pnext_stride)
1413 AVHWDeviceContext *ctx = hwfc->device_ctx;
1414 VulkanDevicePriv *p = ctx->internal->priv;
1415 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1416 VkBindImageMemoryInfo bind_info[AV_NUM_DATA_POINTERS] = { { 0 } };
1418 AVVulkanDeviceContext *hwctx = ctx->hwctx;
1420 for (int i = 0; i < planes; i++) {
1422 VkImageMemoryRequirementsInfo2 req_desc = {
1423 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
1426 VkMemoryDedicatedAllocateInfo ded_alloc = {
1427 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
1428 .pNext = (void *)(((uint8_t *)alloc_pnext) + i*alloc_pnext_stride),
1430 VkMemoryDedicatedRequirements ded_req = {
1431 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
1433 VkMemoryRequirements2 req = {
1434 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
1438 vkGetImageMemoryRequirements2(hwctx->act_dev, &req_desc, &req);
1440 if (f->tiling == VK_IMAGE_TILING_LINEAR)
1441 req.memoryRequirements.size = FFALIGN(req.memoryRequirements.size,
1442 p->props.properties.limits.minMemoryMapAlignment);
1444 /* In case the implementation prefers/requires dedicated allocation */
1445 use_ded_mem = ded_req.prefersDedicatedAllocation |
1446 ded_req.requiresDedicatedAllocation;
1448 ded_alloc.image = f->img[i];
1450 /* Allocate memory */
1451 if ((err = alloc_mem(ctx, &req.memoryRequirements,
1452 f->tiling == VK_IMAGE_TILING_LINEAR ?
1453 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT :
1454 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
1455 use_ded_mem ? &ded_alloc : (void *)ded_alloc.pNext,
1456 &f->flags, &f->mem[i])))
1459 f->size[i] = req.memoryRequirements.size;
1460 bind_info[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
1461 bind_info[i].image = f->img[i];
1462 bind_info[i].memory = f->mem[i];
1465 /* Bind the allocated memory to the images */
1466 ret = vkBindImageMemory2(hwctx->act_dev, planes, bind_info);
1467 if (ret != VK_SUCCESS) {
1468 av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
1470 return AVERROR_EXTERNAL;
1478 PREP_MODE_RO_SHADER,
1479 PREP_MODE_EXTERNAL_EXPORT,
1482 static int prepare_frame(AVHWFramesContext *hwfc, VulkanExecCtx *ectx,
1483 AVVkFrame *frame, enum PrepMode pmode)
1487 VkImageLayout new_layout;
1488 VkAccessFlags new_access;
1489 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1491 VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
1493 VkSubmitInfo s_info = {
1494 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
1495 .pSignalSemaphores = frame->sem,
1496 .signalSemaphoreCount = planes,
1499 VkPipelineStageFlagBits wait_st[AV_NUM_DATA_POINTERS];
1500 for (int i = 0; i < planes; i++)
1501 wait_st[i] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1504 case PREP_MODE_WRITE:
1505 new_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1506 new_access = VK_ACCESS_TRANSFER_WRITE_BIT;
1507 dst_qf = VK_QUEUE_FAMILY_IGNORED;
1509 case PREP_MODE_RO_SHADER:
1510 new_layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
1511 new_access = VK_ACCESS_TRANSFER_READ_BIT;
1512 dst_qf = VK_QUEUE_FAMILY_IGNORED;
1514 case PREP_MODE_EXTERNAL_EXPORT:
1515 new_layout = VK_IMAGE_LAYOUT_GENERAL;
1516 new_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
1517 dst_qf = VK_QUEUE_FAMILY_EXTERNAL_KHR;
1518 s_info.pWaitSemaphores = frame->sem;
1519 s_info.pWaitDstStageMask = wait_st;
1520 s_info.waitSemaphoreCount = planes;
1524 if ((err = wait_start_exec_ctx(hwfc, ectx)))
1527 /* Change the image layout to something more optimal for writes.
1528 * This also signals the newly created semaphore, making it usable
1529 * for synchronization */
1530 for (int i = 0; i < planes; i++) {
1531 img_bar[i].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1532 img_bar[i].srcAccessMask = 0x0;
1533 img_bar[i].dstAccessMask = new_access;
1534 img_bar[i].oldLayout = frame->layout[i];
1535 img_bar[i].newLayout = new_layout;
1536 img_bar[i].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1537 img_bar[i].dstQueueFamilyIndex = dst_qf;
1538 img_bar[i].image = frame->img[i];
1539 img_bar[i].subresourceRange.levelCount = 1;
1540 img_bar[i].subresourceRange.layerCount = 1;
1541 img_bar[i].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1543 frame->layout[i] = img_bar[i].newLayout;
1544 frame->access[i] = img_bar[i].dstAccessMask;
1547 vkCmdPipelineBarrier(get_buf_exec_ctx(hwfc, ectx),
1548 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
1549 VK_PIPELINE_STAGE_TRANSFER_BIT,
1550 0, 0, NULL, 0, NULL, planes, img_bar);
1552 return submit_exec_ctx(hwfc, ectx, &s_info, 0);
1555 static inline void get_plane_wh(int *w, int *h, enum AVPixelFormat format,
1556 int frame_w, int frame_h, int plane)
1558 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(format);
1560 /* Currently always true unless gray + alpha support is added */
1561 if (!plane || (plane == 3) || desc->flags & AV_PIX_FMT_FLAG_RGB ||
1562 !(desc->flags & AV_PIX_FMT_FLAG_PLANAR)) {
1568 *w = AV_CEIL_RSHIFT(frame_w, desc->log2_chroma_w);
1569 *h = AV_CEIL_RSHIFT(frame_h, desc->log2_chroma_h);
1572 static int create_frame(AVHWFramesContext *hwfc, AVVkFrame **frame,
1573 VkImageTiling tiling, VkImageUsageFlagBits usage,
1578 AVHWDeviceContext *ctx = hwfc->device_ctx;
1579 VulkanDevicePriv *p = ctx->internal->priv;
1580 AVVulkanDeviceContext *hwctx = ctx->hwctx;
1581 enum AVPixelFormat format = hwfc->sw_format;
1582 const VkFormat *img_fmts = av_vkfmt_from_pixfmt(format);
1583 const int planes = av_pix_fmt_count_planes(format);
1585 VkExportSemaphoreCreateInfo ext_sem_info = {
1586 .sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
1587 .handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
1590 VkSemaphoreCreateInfo sem_spawn = {
1591 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
1592 .pNext = p->extensions & EXT_EXTERNAL_FD_SEM ? &ext_sem_info : NULL,
1595 AVVkFrame *f = av_vk_frame_alloc();
1597 av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
1598 return AVERROR(ENOMEM);
1601 /* Create the images */
1602 for (int i = 0; i < planes; i++) {
1603 VkImageCreateInfo create_info = {
1604 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1605 .pNext = create_pnext,
1606 .imageType = VK_IMAGE_TYPE_2D,
1607 .format = img_fmts[i],
1611 .flags = VK_IMAGE_CREATE_ALIAS_BIT,
1613 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
1615 .samples = VK_SAMPLE_COUNT_1_BIT,
1616 .pQueueFamilyIndices = p->qfs,
1617 .queueFamilyIndexCount = p->num_qfs,
1618 .sharingMode = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
1619 VK_SHARING_MODE_EXCLUSIVE,
1622 get_plane_wh(&create_info.extent.width, &create_info.extent.height,
1623 format, hwfc->width, hwfc->height, i);
1625 ret = vkCreateImage(hwctx->act_dev, &create_info,
1626 hwctx->alloc, &f->img[i]);
1627 if (ret != VK_SUCCESS) {
1628 av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
1630 err = AVERROR(EINVAL);
1634 /* Create semaphore */
1635 ret = vkCreateSemaphore(hwctx->act_dev, &sem_spawn,
1636 hwctx->alloc, &f->sem[i]);
1637 if (ret != VK_SUCCESS) {
1638 av_log(hwctx, AV_LOG_ERROR, "Failed to create semaphore: %s\n",
1640 return AVERROR_EXTERNAL;
1643 f->layout[i] = create_info.initialLayout;
1654 vulkan_frame_free(hwfc, (uint8_t *)f);
1658 /* Checks if an export flag is enabled, and if it is ORs it with *iexp */
1659 static void try_export_flags(AVHWFramesContext *hwfc,
1660 VkExternalMemoryHandleTypeFlags *comp_handle_types,
1661 VkExternalMemoryHandleTypeFlagBits *iexp,
1662 VkExternalMemoryHandleTypeFlagBits exp)
1665 AVVulkanFramesContext *hwctx = hwfc->hwctx;
1666 AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
1667 VkExternalImageFormatProperties eprops = {
1668 .sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
1670 VkImageFormatProperties2 props = {
1671 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
1674 VkPhysicalDeviceExternalImageFormatInfo enext = {
1675 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
1678 VkPhysicalDeviceImageFormatInfo2 pinfo = {
1679 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
1680 .pNext = !exp ? NULL : &enext,
1681 .format = av_vkfmt_from_pixfmt(hwfc->sw_format)[0],
1682 .type = VK_IMAGE_TYPE_2D,
1683 .tiling = hwctx->tiling,
1684 .usage = hwctx->usage,
1685 .flags = VK_IMAGE_CREATE_ALIAS_BIT,
1688 ret = vkGetPhysicalDeviceImageFormatProperties2(dev_hwctx->phys_dev,
1690 if (ret == VK_SUCCESS) {
1692 *comp_handle_types |= eprops.externalMemoryProperties.compatibleHandleTypes;
1696 static AVBufferRef *vulkan_pool_alloc(void *opaque, int size)
1700 AVBufferRef *avbuf = NULL;
1701 AVHWFramesContext *hwfc = opaque;
1702 AVVulkanFramesContext *hwctx = hwfc->hwctx;
1703 VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
1704 VulkanFramesPriv *fp = hwfc->internal->priv;
1705 VkExportMemoryAllocateInfo eminfo[AV_NUM_DATA_POINTERS];
1706 VkExternalMemoryHandleTypeFlags e = 0x0;
1708 VkExternalMemoryImageCreateInfo eiinfo = {
1709 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
1710 .pNext = hwctx->create_pnext,
1713 if (p->extensions & EXT_EXTERNAL_FD_MEMORY)
1714 try_export_flags(hwfc, &eiinfo.handleTypes, &e,
1715 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
1717 if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
1718 try_export_flags(hwfc, &eiinfo.handleTypes, &e,
1719 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1721 for (int i = 0; i < av_pix_fmt_count_planes(hwfc->sw_format); i++) {
1722 eminfo[i].sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
1723 eminfo[i].pNext = hwctx->alloc_pnext[i];
1724 eminfo[i].handleTypes = e;
1727 err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
1728 eiinfo.handleTypes ? &eiinfo : NULL);
1732 err = alloc_bind_mem(hwfc, f, eminfo, sizeof(*eminfo));
1736 err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_WRITE);
1740 avbuf = av_buffer_create((uint8_t *)f, sizeof(AVVkFrame),
1741 vulkan_frame_free, hwfc, 0);
1748 vulkan_frame_free(hwfc, (uint8_t *)f);
1752 static void vulkan_frames_uninit(AVHWFramesContext *hwfc)
1754 VulkanFramesPriv *fp = hwfc->internal->priv;
1756 free_exec_ctx(hwfc, &fp->conv_ctx);
1757 free_exec_ctx(hwfc, &fp->upload_ctx);
1758 free_exec_ctx(hwfc, &fp->download_ctx);
1761 static int vulkan_frames_init(AVHWFramesContext *hwfc)
1765 AVVulkanFramesContext *hwctx = hwfc->hwctx;
1766 VulkanFramesPriv *fp = hwfc->internal->priv;
1767 AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
1768 VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
1770 /* Default pool flags */
1771 hwctx->tiling = hwctx->tiling ? hwctx->tiling : p->use_linear_images ?
1772 VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
1775 hwctx->usage = DEFAULT_USAGE_FLAGS;
1777 err = create_exec_ctx(hwfc, &fp->conv_ctx,
1778 dev_hwctx->queue_family_comp_index,
1779 GET_QUEUE_COUNT(dev_hwctx, 0, 1, 0));
1783 err = create_exec_ctx(hwfc, &fp->upload_ctx,
1784 dev_hwctx->queue_family_tx_index,
1785 GET_QUEUE_COUNT(dev_hwctx, 0, 0, 1));
1789 err = create_exec_ctx(hwfc, &fp->download_ctx,
1790 dev_hwctx->queue_family_tx_index, 1);
1794 /* Test to see if allocation will fail */
1795 err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
1796 hwctx->create_pnext);
1800 vulkan_frame_free(hwfc, (uint8_t *)f);
1802 /* If user did not specify a pool, hwfc->pool will be set to the internal one
1803 * in hwcontext.c just after this gets called */
1805 hwfc->internal->pool_internal = av_buffer_pool_init2(sizeof(AVVkFrame),
1806 hwfc, vulkan_pool_alloc,
1808 if (!hwfc->internal->pool_internal)
1809 return AVERROR(ENOMEM);
1815 static int vulkan_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame)
1817 frame->buf[0] = av_buffer_pool_get(hwfc->pool);
1819 return AVERROR(ENOMEM);
1821 frame->data[0] = frame->buf[0]->data;
1822 frame->format = AV_PIX_FMT_VULKAN;
1823 frame->width = hwfc->width;
1824 frame->height = hwfc->height;
1829 static int vulkan_transfer_get_formats(AVHWFramesContext *hwfc,
1830 enum AVHWFrameTransferDirection dir,
1831 enum AVPixelFormat **formats)
1833 enum AVPixelFormat *fmts = av_malloc_array(2, sizeof(*fmts));
1835 return AVERROR(ENOMEM);
1837 fmts[0] = hwfc->sw_format;
1838 fmts[1] = AV_PIX_FMT_NONE;
1844 typedef struct VulkanMapping {
1849 static void vulkan_unmap_frame(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
1851 VulkanMapping *map = hwmap->priv;
1852 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1853 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1855 /* Check if buffer needs flushing */
1856 if ((map->flags & AV_HWFRAME_MAP_WRITE) &&
1857 !(map->frame->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
1859 VkMappedMemoryRange flush_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
1861 for (int i = 0; i < planes; i++) {
1862 flush_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1863 flush_ranges[i].memory = map->frame->mem[i];
1864 flush_ranges[i].size = VK_WHOLE_SIZE;
1867 ret = vkFlushMappedMemoryRanges(hwctx->act_dev, planes,
1869 if (ret != VK_SUCCESS) {
1870 av_log(hwfc, AV_LOG_ERROR, "Failed to flush memory: %s\n",
1875 for (int i = 0; i < planes; i++)
1876 vkUnmapMemory(hwctx->act_dev, map->frame->mem[i]);
1881 static int vulkan_map_frame_to_mem(AVHWFramesContext *hwfc, AVFrame *dst,
1882 const AVFrame *src, int flags)
1885 int err, mapped_mem_count = 0;
1886 AVVkFrame *f = (AVVkFrame *)src->data[0];
1887 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1888 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1890 VulkanMapping *map = av_mallocz(sizeof(VulkanMapping));
1892 return AVERROR(EINVAL);
1894 if (src->format != AV_PIX_FMT_VULKAN) {
1895 av_log(hwfc, AV_LOG_ERROR, "Cannot map from pixel format %s!\n",
1896 av_get_pix_fmt_name(src->format));
1897 err = AVERROR(EINVAL);
1901 if (!(f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) ||
1902 !(f->tiling == VK_IMAGE_TILING_LINEAR)) {
1903 av_log(hwfc, AV_LOG_ERROR, "Unable to map frame, not host visible "
1905 err = AVERROR(EINVAL);
1909 dst->width = src->width;
1910 dst->height = src->height;
1912 for (int i = 0; i < planes; i++) {
1913 ret = vkMapMemory(hwctx->act_dev, f->mem[i], 0,
1914 VK_WHOLE_SIZE, 0, (void **)&dst->data[i]);
1915 if (ret != VK_SUCCESS) {
1916 av_log(hwfc, AV_LOG_ERROR, "Failed to map image memory: %s\n",
1918 err = AVERROR_EXTERNAL;
1924 /* Check if the memory contents matter */
1925 if (((flags & AV_HWFRAME_MAP_READ) || !(flags & AV_HWFRAME_MAP_OVERWRITE)) &&
1926 !(f->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
1927 VkMappedMemoryRange map_mem_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
1928 for (int i = 0; i < planes; i++) {
1929 map_mem_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1930 map_mem_ranges[i].size = VK_WHOLE_SIZE;
1931 map_mem_ranges[i].memory = f->mem[i];
1934 ret = vkInvalidateMappedMemoryRanges(hwctx->act_dev, planes,
1936 if (ret != VK_SUCCESS) {
1937 av_log(hwfc, AV_LOG_ERROR, "Failed to invalidate memory: %s\n",
1939 err = AVERROR_EXTERNAL;
1944 for (int i = 0; i < planes; i++) {
1945 VkImageSubresource sub = {
1946 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
1948 VkSubresourceLayout layout;
1949 vkGetImageSubresourceLayout(hwctx->act_dev, f->img[i], &sub, &layout);
1950 dst->linesize[i] = layout.rowPitch;
1956 err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src,
1957 &vulkan_unmap_frame, map);
1964 for (int i = 0; i < mapped_mem_count; i++)
1965 vkUnmapMemory(hwctx->act_dev, f->mem[i]);
1972 static void vulkan_unmap_from(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
1974 VulkanMapping *map = hwmap->priv;
1975 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
1976 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
1978 for (int i = 0; i < planes; i++) {
1979 vkDestroyImage(hwctx->act_dev, map->frame->img[i], hwctx->alloc);
1980 vkFreeMemory(hwctx->act_dev, map->frame->mem[i], hwctx->alloc);
1981 vkDestroySemaphore(hwctx->act_dev, map->frame->sem[i], hwctx->alloc);
1984 av_freep(&map->frame);
1987 static const struct {
1988 uint32_t drm_fourcc;
1990 } vulkan_drm_format_map[] = {
1991 { DRM_FORMAT_R8, VK_FORMAT_R8_UNORM },
1992 { DRM_FORMAT_R16, VK_FORMAT_R16_UNORM },
1993 { DRM_FORMAT_GR88, VK_FORMAT_R8G8_UNORM },
1994 { DRM_FORMAT_RG88, VK_FORMAT_R8G8_UNORM },
1995 { DRM_FORMAT_GR1616, VK_FORMAT_R16G16_UNORM },
1996 { DRM_FORMAT_RG1616, VK_FORMAT_R16G16_UNORM },
1997 { DRM_FORMAT_ARGB8888, VK_FORMAT_B8G8R8A8_UNORM },
1998 { DRM_FORMAT_XRGB8888, VK_FORMAT_B8G8R8A8_UNORM },
1999 { DRM_FORMAT_ABGR8888, VK_FORMAT_R8G8B8A8_UNORM },
2000 { DRM_FORMAT_XBGR8888, VK_FORMAT_R8G8B8A8_UNORM },
2003 static inline VkFormat drm_to_vulkan_fmt(uint32_t drm_fourcc)
2005 for (int i = 0; i < FF_ARRAY_ELEMS(vulkan_drm_format_map); i++)
2006 if (vulkan_drm_format_map[i].drm_fourcc == drm_fourcc)
2007 return vulkan_drm_format_map[i].vk_format;
2008 return VK_FORMAT_UNDEFINED;
2011 static int vulkan_map_from_drm_frame_desc(AVHWFramesContext *hwfc, AVVkFrame **frame,
2012 AVDRMFrameDescriptor *desc)
2017 int bind_counts = 0;
2018 AVHWDeviceContext *ctx = hwfc->device_ctx;
2019 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2020 VulkanDevicePriv *p = ctx->internal->priv;
2021 VulkanFramesPriv *fp = hwfc->internal->priv;
2022 AVVulkanFramesContext *frames_hwctx = hwfc->hwctx;
2023 const int has_modifiers = !!(p->extensions & EXT_DRM_MODIFIER_FLAGS);
2024 VkSubresourceLayout plane_data[AV_NUM_DATA_POINTERS] = { 0 };
2025 VkBindImageMemoryInfo bind_info[AV_NUM_DATA_POINTERS] = { 0 };
2026 VkBindImagePlaneMemoryInfo plane_info[AV_NUM_DATA_POINTERS] = { 0 };
2027 VkExternalMemoryHandleTypeFlagBits htype = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
2029 VK_LOAD_PFN(hwctx->inst, vkGetMemoryFdPropertiesKHR);
2031 for (int i = 0; i < desc->nb_layers; i++) {
2032 if (drm_to_vulkan_fmt(desc->layers[i].format) == VK_FORMAT_UNDEFINED) {
2033 av_log(ctx, AV_LOG_ERROR, "Unsupported DMABUF layer format %#08x!\n",
2034 desc->layers[i].format);
2035 return AVERROR(EINVAL);
2039 if (!(f = av_vk_frame_alloc())) {
2040 av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
2041 err = AVERROR(ENOMEM);
2045 f->tiling = has_modifiers ? VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT :
2046 desc->objects[0].format_modifier == DRM_FORMAT_MOD_LINEAR ?
2047 VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
2049 for (int i = 0; i < desc->nb_layers; i++) {
2050 const int planes = desc->layers[i].nb_planes;
2051 VkImageDrmFormatModifierExplicitCreateInfoEXT drm_info = {
2052 .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
2053 .drmFormatModifier = desc->objects[0].format_modifier,
2054 .drmFormatModifierPlaneCount = planes,
2055 .pPlaneLayouts = (const VkSubresourceLayout *)&plane_data,
2058 VkExternalMemoryImageCreateInfo einfo = {
2059 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
2060 .pNext = has_modifiers ? &drm_info : NULL,
2061 .handleTypes = htype,
2064 VkSemaphoreCreateInfo sem_spawn = {
2065 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
2068 VkImageCreateInfo create_info = {
2069 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2071 .imageType = VK_IMAGE_TYPE_2D,
2072 .format = drm_to_vulkan_fmt(desc->layers[i].format),
2076 .flags = VK_IMAGE_CREATE_ALIAS_BIT,
2077 .tiling = f->tiling,
2078 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED, /* specs say so */
2079 .usage = frames_hwctx->usage,
2080 .samples = VK_SAMPLE_COUNT_1_BIT,
2081 .pQueueFamilyIndices = p->qfs,
2082 .queueFamilyIndexCount = p->num_qfs,
2083 .sharingMode = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
2084 VK_SHARING_MODE_EXCLUSIVE,
2087 get_plane_wh(&create_info.extent.width, &create_info.extent.height,
2088 hwfc->sw_format, hwfc->width, hwfc->height, i);
2090 for (int j = 0; j < planes; j++) {
2091 plane_data[j].offset = desc->layers[i].planes[j].offset;
2092 plane_data[j].rowPitch = desc->layers[i].planes[j].pitch;
2093 plane_data[j].size = 0; /* The specs say so for all 3 */
2094 plane_data[j].arrayPitch = 0;
2095 plane_data[j].depthPitch = 0;
2099 ret = vkCreateImage(hwctx->act_dev, &create_info,
2100 hwctx->alloc, &f->img[i]);
2101 if (ret != VK_SUCCESS) {
2102 av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
2104 err = AVERROR(EINVAL);
2108 ret = vkCreateSemaphore(hwctx->act_dev, &sem_spawn,
2109 hwctx->alloc, &f->sem[i]);
2110 if (ret != VK_SUCCESS) {
2111 av_log(hwctx, AV_LOG_ERROR, "Failed to create semaphore: %s\n",
2113 return AVERROR_EXTERNAL;
2116 /* We'd import a semaphore onto the one we created using
2117 * vkImportSemaphoreFdKHR but unfortunately neither DRM nor VAAPI
2118 * offer us anything we could import and sync with, so instead
2119 * just signal the semaphore we created. */
2121 f->layout[i] = create_info.initialLayout;
2125 for (int i = 0; i < desc->nb_objects; i++) {
2126 int use_ded_mem = 0;
2127 VkMemoryFdPropertiesKHR fdmp = {
2128 .sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
2130 VkMemoryRequirements req = {
2131 .size = desc->objects[i].size,
2133 VkImportMemoryFdInfoKHR idesc = {
2134 .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
2135 .handleType = htype,
2136 .fd = dup(desc->objects[i].fd),
2138 VkMemoryDedicatedAllocateInfo ded_alloc = {
2139 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
2143 ret = pfn_vkGetMemoryFdPropertiesKHR(hwctx->act_dev, htype,
2145 if (ret != VK_SUCCESS) {
2146 av_log(hwfc, AV_LOG_ERROR, "Failed to get FD properties: %s\n",
2148 err = AVERROR_EXTERNAL;
2153 req.memoryTypeBits = fdmp.memoryTypeBits;
2155 /* Dedicated allocation only makes sense if there's a one to one mapping
2156 * between images and the memory backing them, so only check in this
2158 if (desc->nb_layers == desc->nb_objects) {
2159 VkImageMemoryRequirementsInfo2 req_desc = {
2160 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
2163 VkMemoryDedicatedRequirements ded_req = {
2164 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
2166 VkMemoryRequirements2 req2 = {
2167 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
2171 vkGetImageMemoryRequirements2(hwctx->act_dev, &req_desc, &req2);
2173 use_ded_mem = ded_req.prefersDedicatedAllocation |
2174 ded_req.requiresDedicatedAllocation;
2176 ded_alloc.image = f->img[i];
2179 err = alloc_mem(ctx, &req, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
2180 use_ded_mem ? &ded_alloc : ded_alloc.pNext,
2181 &f->flags, &f->mem[i]);
2187 f->size[i] = desc->objects[i].size;
2190 for (int i = 0; i < desc->nb_layers; i++) {
2191 const int planes = desc->layers[i].nb_planes;
2192 const int signal_p = has_modifiers && (planes > 1);
2193 for (int j = 0; j < planes; j++) {
2194 VkImageAspectFlagBits aspect = j == 0 ? VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT :
2195 j == 1 ? VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT :
2196 VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
2198 plane_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
2199 plane_info[bind_counts].planeAspect = aspect;
2201 bind_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
2202 bind_info[bind_counts].pNext = signal_p ? &plane_info[bind_counts] : NULL;
2203 bind_info[bind_counts].image = f->img[i];
2204 bind_info[bind_counts].memory = f->mem[desc->layers[i].planes[j].object_index];
2205 bind_info[bind_counts].memoryOffset = desc->layers[i].planes[j].offset;
2210 /* Bind the allocated memory to the images */
2211 ret = vkBindImageMemory2(hwctx->act_dev, bind_counts, bind_info);
2212 if (ret != VK_SUCCESS) {
2213 av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
2215 return AVERROR_EXTERNAL;
2218 /* NOTE: This is completely uneccesary and unneeded once we can import
2219 * semaphores from DRM. Otherwise we have to activate the semaphores.
2220 * We're reusing the exec context that's also used for uploads/downloads. */
2221 err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_RO_SHADER);
2230 for (int i = 0; i < desc->nb_layers; i++) {
2231 vkDestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
2232 vkDestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
2234 for (int i = 0; i < desc->nb_objects; i++)
2235 vkFreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
2242 static int vulkan_map_from_drm(AVHWFramesContext *hwfc, AVFrame *dst,
2243 const AVFrame *src, int flags)
2247 VulkanMapping *map = NULL;
2249 err = vulkan_map_from_drm_frame_desc(hwfc, &f,
2250 (AVDRMFrameDescriptor *)src->data[0]);
2254 /* The unmapping function will free this */
2255 dst->data[0] = (uint8_t *)f;
2256 dst->width = src->width;
2257 dst->height = src->height;
2259 map = av_mallocz(sizeof(VulkanMapping));
2266 err = ff_hwframe_map_create(dst->hw_frames_ctx, dst, src,
2267 &vulkan_unmap_from, map);
2271 av_log(hwfc, AV_LOG_DEBUG, "Mapped DRM object to Vulkan!\n");
2276 vulkan_frame_free(hwfc->device_ctx->hwctx, (uint8_t *)f);
2282 static int vulkan_map_from_vaapi(AVHWFramesContext *dst_fc,
2283 AVFrame *dst, const AVFrame *src,
2287 AVFrame *tmp = av_frame_alloc();
2288 AVHWFramesContext *vaapi_fc = (AVHWFramesContext*)src->hw_frames_ctx->data;
2289 AVVAAPIDeviceContext *vaapi_ctx = vaapi_fc->device_ctx->hwctx;
2290 VASurfaceID surface_id = (VASurfaceID)(uintptr_t)src->data[3];
2293 return AVERROR(ENOMEM);
2295 /* We have to sync since like the previous comment said, no semaphores */
2296 vaSyncSurface(vaapi_ctx->display, surface_id);
2298 tmp->format = AV_PIX_FMT_DRM_PRIME;
2300 err = av_hwframe_map(tmp, src, flags);
2304 err = vulkan_map_from_drm(dst_fc, dst, tmp, flags);
2308 err = ff_hwframe_map_replace(dst, src);
2311 av_frame_free(&tmp);
2318 static int vulkan_export_to_cuda(AVHWFramesContext *hwfc,
2319 AVBufferRef *cuda_hwfc,
2320 const AVFrame *frame)
2325 AVVkFrameInternal *dst_int;
2326 AVHWDeviceContext *ctx = hwfc->device_ctx;
2327 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2328 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2329 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
2330 VK_LOAD_PFN(hwctx->inst, vkGetMemoryFdKHR);
2331 VK_LOAD_PFN(hwctx->inst, vkGetSemaphoreFdKHR);
2333 AVHWFramesContext *cuda_fc = (AVHWFramesContext*)cuda_hwfc->data;
2334 AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
2335 AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
2336 AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
2337 CudaFunctions *cu = cu_internal->cuda_dl;
2338 CUarray_format cufmt = desc->comp[0].depth > 8 ? CU_AD_FORMAT_UNSIGNED_INT16 :
2339 CU_AD_FORMAT_UNSIGNED_INT8;
2341 dst_f = (AVVkFrame *)frame->data[0];
2343 dst_int = dst_f->internal;
2344 if (!dst_int || !dst_int->cuda_fc_ref) {
2345 if (!dst_f->internal)
2346 dst_f->internal = dst_int = av_mallocz(sizeof(*dst_f->internal));
2349 err = AVERROR(ENOMEM);
2353 dst_int->cuda_fc_ref = av_buffer_ref(cuda_hwfc);
2354 if (!dst_int->cuda_fc_ref) {
2355 err = AVERROR(ENOMEM);
2359 for (int i = 0; i < planes; i++) {
2360 CUDA_EXTERNAL_MEMORY_MIPMAPPED_ARRAY_DESC tex_desc = {
2365 .NumChannels = 1 + ((planes == 2) && i),
2370 CUDA_EXTERNAL_MEMORY_HANDLE_DESC ext_desc = {
2371 .type = CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD,
2372 .size = dst_f->size[i],
2374 VkMemoryGetFdInfoKHR export_info = {
2375 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
2376 .memory = dst_f->mem[i],
2377 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
2379 VkSemaphoreGetFdInfoKHR sem_export = {
2380 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
2381 .semaphore = dst_f->sem[i],
2382 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
2384 CUDA_EXTERNAL_SEMAPHORE_HANDLE_DESC ext_sem_desc = {
2385 .type = CU_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD,
2389 get_plane_wh(&p_w, &p_h, hwfc->sw_format, hwfc->width, hwfc->height, i);
2391 tex_desc.arrayDesc.Width = p_w;
2392 tex_desc.arrayDesc.Height = p_h;
2394 ret = pfn_vkGetMemoryFdKHR(hwctx->act_dev, &export_info,
2395 &ext_desc.handle.fd);
2396 if (ret != VK_SUCCESS) {
2397 av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD!\n");
2398 err = AVERROR_EXTERNAL;
2402 ret = CHECK_CU(cu->cuImportExternalMemory(&dst_int->ext_mem[i], &ext_desc));
2404 err = AVERROR_EXTERNAL;
2408 ret = CHECK_CU(cu->cuExternalMemoryGetMappedMipmappedArray(&dst_int->cu_mma[i],
2409 dst_int->ext_mem[i],
2412 err = AVERROR_EXTERNAL;
2416 ret = CHECK_CU(cu->cuMipmappedArrayGetLevel(&dst_int->cu_array[i],
2417 dst_int->cu_mma[i], 0));
2419 err = AVERROR_EXTERNAL;
2423 ret = pfn_vkGetSemaphoreFdKHR(hwctx->act_dev, &sem_export,
2424 &ext_sem_desc.handle.fd);
2425 if (ret != VK_SUCCESS) {
2426 av_log(ctx, AV_LOG_ERROR, "Failed to export semaphore: %s\n",
2428 err = AVERROR_EXTERNAL;
2432 ret = CHECK_CU(cu->cuImportExternalSemaphore(&dst_int->cu_sem[i],
2435 err = AVERROR_EXTERNAL;
2447 static int vulkan_transfer_data_from_cuda(AVHWFramesContext *hwfc,
2448 AVFrame *dst, const AVFrame *src)
2454 AVVkFrameInternal *dst_int;
2455 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2456 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
2458 AVHWFramesContext *cuda_fc = (AVHWFramesContext*)src->hw_frames_ctx->data;
2459 AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
2460 AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
2461 AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
2462 CudaFunctions *cu = cu_internal->cuda_dl;
2463 CUDA_EXTERNAL_SEMAPHORE_WAIT_PARAMS s_w_par[AV_NUM_DATA_POINTERS] = { 0 };
2464 CUDA_EXTERNAL_SEMAPHORE_SIGNAL_PARAMS s_s_par[AV_NUM_DATA_POINTERS] = { 0 };
2466 ret = CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
2468 return AVERROR_EXTERNAL;
2470 dst_f = (AVVkFrame *)dst->data[0];
2472 ret = vulkan_export_to_cuda(hwfc, src->hw_frames_ctx, dst);
2474 CHECK_CU(cu->cuCtxPopCurrent(&dummy));
2478 dst_int = dst_f->internal;
2480 ret = CHECK_CU(cu->cuWaitExternalSemaphoresAsync(dst_int->cu_sem, s_w_par,
2481 planes, cuda_dev->stream));
2483 err = AVERROR_EXTERNAL;
2487 for (int i = 0; i < planes; i++) {
2488 CUDA_MEMCPY2D cpy = {
2489 .srcMemoryType = CU_MEMORYTYPE_DEVICE,
2490 .srcDevice = (CUdeviceptr)src->data[i],
2491 .srcPitch = src->linesize[i],
2494 .dstMemoryType = CU_MEMORYTYPE_ARRAY,
2495 .dstArray = dst_int->cu_array[i],
2499 get_plane_wh(&p_w, &p_h, hwfc->sw_format, hwfc->width, hwfc->height, i);
2501 cpy.WidthInBytes = p_w * desc->comp[i].step;
2504 ret = CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
2506 err = AVERROR_EXTERNAL;
2511 ret = CHECK_CU(cu->cuSignalExternalSemaphoresAsync(dst_int->cu_sem, s_s_par,
2512 planes, cuda_dev->stream));
2514 err = AVERROR_EXTERNAL;
2518 CHECK_CU(cu->cuCtxPopCurrent(&dummy));
2520 av_log(hwfc, AV_LOG_VERBOSE, "Transfered CUDA image to Vulkan!\n");
2525 CHECK_CU(cu->cuCtxPopCurrent(&dummy));
2526 vulkan_free_internal(dst_int);
2527 dst_f->internal = NULL;
2528 av_buffer_unref(&dst->buf[0]);
2533 static int vulkan_map_to(AVHWFramesContext *hwfc, AVFrame *dst,
2534 const AVFrame *src, int flags)
2536 av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2538 switch (src->format) {
2541 case AV_PIX_FMT_VAAPI:
2542 if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2543 return vulkan_map_from_vaapi(hwfc, dst, src, flags);
2545 case AV_PIX_FMT_DRM_PRIME:
2546 if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2547 return vulkan_map_from_drm(hwfc, dst, src, flags);
2550 return AVERROR(ENOSYS);
2555 typedef struct VulkanDRMMapping {
2556 AVDRMFrameDescriptor drm_desc;
2560 static void vulkan_unmap_to_drm(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
2562 AVDRMFrameDescriptor *drm_desc = hwmap->priv;
2564 for (int i = 0; i < drm_desc->nb_objects; i++)
2565 close(drm_desc->objects[i].fd);
2570 static inline uint32_t vulkan_fmt_to_drm(VkFormat vkfmt)
2572 for (int i = 0; i < FF_ARRAY_ELEMS(vulkan_drm_format_map); i++)
2573 if (vulkan_drm_format_map[i].vk_format == vkfmt)
2574 return vulkan_drm_format_map[i].drm_fourcc;
2575 return DRM_FORMAT_INVALID;
2578 static int vulkan_map_to_drm(AVHWFramesContext *hwfc, AVFrame *dst,
2579 const AVFrame *src, int flags)
2583 AVVkFrame *f = (AVVkFrame *)src->data[0];
2584 VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2585 VulkanFramesPriv *fp = hwfc->internal->priv;
2586 AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
2587 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
2588 VK_LOAD_PFN(hwctx->inst, vkGetMemoryFdKHR);
2589 VkImageDrmFormatModifierPropertiesEXT drm_mod = {
2590 .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
2593 AVDRMFrameDescriptor *drm_desc = av_mallocz(sizeof(*drm_desc));
2595 return AVERROR(ENOMEM);
2597 err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_EXTERNAL_EXPORT);
2601 err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src, &vulkan_unmap_to_drm, drm_desc);
2605 if (p->extensions & EXT_DRM_MODIFIER_FLAGS) {
2606 VK_LOAD_PFN(hwctx->inst, vkGetImageDrmFormatModifierPropertiesEXT);
2607 ret = pfn_vkGetImageDrmFormatModifierPropertiesEXT(hwctx->act_dev, f->img[0],
2609 if (ret != VK_SUCCESS) {
2610 av_log(hwfc, AV_LOG_ERROR, "Failed to retrieve DRM format modifier!\n");
2611 err = AVERROR_EXTERNAL;
2616 for (int i = 0; (i < planes) && (f->mem[i]); i++) {
2617 VkMemoryGetFdInfoKHR export_info = {
2618 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
2619 .memory = f->mem[i],
2620 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
2623 ret = pfn_vkGetMemoryFdKHR(hwctx->act_dev, &export_info,
2624 &drm_desc->objects[i].fd);
2625 if (ret != VK_SUCCESS) {
2626 av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD!\n");
2627 err = AVERROR_EXTERNAL;
2631 drm_desc->nb_objects++;
2632 drm_desc->objects[i].size = f->size[i];
2633 drm_desc->objects[i].format_modifier = drm_mod.drmFormatModifier;
2636 drm_desc->nb_layers = planes;
2637 for (int i = 0; i < drm_desc->nb_layers; i++) {
2638 VkSubresourceLayout layout;
2639 VkImageSubresource sub = {
2640 .aspectMask = p->extensions & EXT_DRM_MODIFIER_FLAGS ?
2641 VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT :
2642 VK_IMAGE_ASPECT_COLOR_BIT,
2644 VkFormat plane_vkfmt = av_vkfmt_from_pixfmt(hwfc->sw_format)[i];
2646 drm_desc->layers[i].format = vulkan_fmt_to_drm(plane_vkfmt);
2647 drm_desc->layers[i].nb_planes = 1;
2649 if (drm_desc->layers[i].format == DRM_FORMAT_INVALID) {
2650 av_log(hwfc, AV_LOG_ERROR, "Cannot map to DRM layer, unsupported!\n");
2651 err = AVERROR_PATCHWELCOME;
2655 drm_desc->layers[i].planes[0].object_index = FFMIN(i, drm_desc->nb_objects - 1);
2657 if (f->tiling == VK_IMAGE_TILING_OPTIMAL)
2660 vkGetImageSubresourceLayout(hwctx->act_dev, f->img[i], &sub, &layout);
2661 drm_desc->layers[i].planes[0].offset = layout.offset;
2662 drm_desc->layers[i].planes[0].pitch = layout.rowPitch;
2665 dst->width = src->width;
2666 dst->height = src->height;
2667 dst->data[0] = (uint8_t *)drm_desc;
2669 av_log(hwfc, AV_LOG_VERBOSE, "Mapped AVVkFrame to a DRM object!\n");
2679 static int vulkan_map_to_vaapi(AVHWFramesContext *hwfc, AVFrame *dst,
2680 const AVFrame *src, int flags)
2683 AVFrame *tmp = av_frame_alloc();
2685 return AVERROR(ENOMEM);
2687 tmp->format = AV_PIX_FMT_DRM_PRIME;
2689 err = vulkan_map_to_drm(hwfc, tmp, src, flags);
2693 err = av_hwframe_map(dst, tmp, flags);
2697 err = ff_hwframe_map_replace(dst, src);
2700 av_frame_free(&tmp);
2706 static int vulkan_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
2707 const AVFrame *src, int flags)
2709 av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
2711 switch (dst->format) {
2713 case AV_PIX_FMT_DRM_PRIME:
2714 if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2715 return vulkan_map_to_drm(hwfc, dst, src, flags);
2717 case AV_PIX_FMT_VAAPI:
2718 if (p->extensions & EXT_EXTERNAL_DMABUF_MEMORY)
2719 return vulkan_map_to_vaapi(hwfc, dst, src, flags);
2723 return vulkan_map_frame_to_mem(hwfc, dst, src, flags);
2727 typedef struct ImageBuffer {
2730 VkMemoryPropertyFlagBits flags;
2734 static void free_buf(void *opaque, uint8_t *data)
2736 AVHWDeviceContext *ctx = opaque;
2737 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2738 ImageBuffer *vkbuf = (ImageBuffer *)data;
2741 vkDestroyBuffer(hwctx->act_dev, vkbuf->buf, hwctx->alloc);
2743 vkFreeMemory(hwctx->act_dev, vkbuf->mem, hwctx->alloc);
2748 static size_t get_req_buffer_size(VulkanDevicePriv *p, int *stride, int height)
2751 *stride = FFALIGN(*stride, p->props.properties.limits.optimalBufferCopyRowPitchAlignment);
2752 size = height*(*stride);
2753 size = FFALIGN(size, p->props.properties.limits.minMemoryMapAlignment);
2757 static int create_buf(AVHWDeviceContext *ctx, AVBufferRef **buf,
2758 VkBufferUsageFlags usage, VkMemoryPropertyFlagBits flags,
2759 size_t size, uint32_t req_memory_bits, int host_mapped,
2760 void *create_pnext, void *alloc_pnext)
2765 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2767 VkBufferCreateInfo buf_spawn = {
2768 .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
2769 .pNext = create_pnext,
2772 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
2775 VkBufferMemoryRequirementsInfo2 req_desc = {
2776 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
2778 VkMemoryDedicatedAllocateInfo ded_alloc = {
2779 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
2780 .pNext = alloc_pnext,
2782 VkMemoryDedicatedRequirements ded_req = {
2783 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
2785 VkMemoryRequirements2 req = {
2786 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
2790 ImageBuffer *vkbuf = av_mallocz(sizeof(*vkbuf));
2792 return AVERROR(ENOMEM);
2794 vkbuf->mapped_mem = host_mapped;
2796 ret = vkCreateBuffer(hwctx->act_dev, &buf_spawn, NULL, &vkbuf->buf);
2797 if (ret != VK_SUCCESS) {
2798 av_log(ctx, AV_LOG_ERROR, "Failed to create buffer: %s\n",
2800 err = AVERROR_EXTERNAL;
2804 req_desc.buffer = vkbuf->buf;
2806 vkGetBufferMemoryRequirements2(hwctx->act_dev, &req_desc, &req);
2808 /* In case the implementation prefers/requires dedicated allocation */
2809 use_ded_mem = ded_req.prefersDedicatedAllocation |
2810 ded_req.requiresDedicatedAllocation;
2812 ded_alloc.buffer = vkbuf->buf;
2814 /* Additional requirements imposed on us */
2815 if (req_memory_bits)
2816 req.memoryRequirements.memoryTypeBits &= req_memory_bits;
2818 err = alloc_mem(ctx, &req.memoryRequirements, flags,
2819 use_ded_mem ? &ded_alloc : (void *)ded_alloc.pNext,
2820 &vkbuf->flags, &vkbuf->mem);
2824 ret = vkBindBufferMemory(hwctx->act_dev, vkbuf->buf, vkbuf->mem, 0);
2825 if (ret != VK_SUCCESS) {
2826 av_log(ctx, AV_LOG_ERROR, "Failed to bind memory to buffer: %s\n",
2828 err = AVERROR_EXTERNAL;
2832 *buf = av_buffer_create((uint8_t *)vkbuf, sizeof(*vkbuf), free_buf, ctx, 0);
2834 err = AVERROR(ENOMEM);
2841 free_buf(ctx, (uint8_t *)vkbuf);
2845 /* Skips mapping of host mapped buffers but still invalidates them */
2846 static int map_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs, uint8_t *mem[],
2847 int nb_buffers, int invalidate)
2850 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2851 VkMappedMemoryRange invalidate_ctx[AV_NUM_DATA_POINTERS];
2852 int invalidate_count = 0;
2854 for (int i = 0; i < nb_buffers; i++) {
2855 ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2856 if (vkbuf->mapped_mem)
2859 ret = vkMapMemory(hwctx->act_dev, vkbuf->mem, 0,
2860 VK_WHOLE_SIZE, 0, (void **)&mem[i]);
2861 if (ret != VK_SUCCESS) {
2862 av_log(ctx, AV_LOG_ERROR, "Failed to map buffer memory: %s\n",
2864 return AVERROR_EXTERNAL;
2871 for (int i = 0; i < nb_buffers; i++) {
2872 ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2873 const VkMappedMemoryRange ival_buf = {
2874 .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
2875 .memory = vkbuf->mem,
2876 .size = VK_WHOLE_SIZE,
2879 /* For host imported memory Vulkan says to use platform-defined
2880 * sync methods, but doesn't really say not to call flush or invalidate
2881 * on original host pointers. It does explicitly allow to do that on
2882 * host-mapped pointers which are then mapped again using vkMapMemory,
2883 * but known implementations return the original pointers when mapped
2885 if (vkbuf->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
2888 invalidate_ctx[invalidate_count++] = ival_buf;
2891 if (invalidate_count) {
2892 ret = vkInvalidateMappedMemoryRanges(hwctx->act_dev, invalidate_count,
2894 if (ret != VK_SUCCESS)
2895 av_log(ctx, AV_LOG_WARNING, "Failed to invalidate memory: %s\n",
2902 static int unmap_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs,
2903 int nb_buffers, int flush)
2907 AVVulkanDeviceContext *hwctx = ctx->hwctx;
2908 VkMappedMemoryRange flush_ctx[AV_NUM_DATA_POINTERS];
2909 int flush_count = 0;
2912 for (int i = 0; i < nb_buffers; i++) {
2913 ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2914 const VkMappedMemoryRange flush_buf = {
2915 .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
2916 .memory = vkbuf->mem,
2917 .size = VK_WHOLE_SIZE,
2920 if (vkbuf->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
2923 flush_ctx[flush_count++] = flush_buf;
2928 ret = vkFlushMappedMemoryRanges(hwctx->act_dev, flush_count, flush_ctx);
2929 if (ret != VK_SUCCESS) {
2930 av_log(ctx, AV_LOG_ERROR, "Failed to flush memory: %s\n",
2932 err = AVERROR_EXTERNAL; /* We still want to try to unmap them */
2936 for (int i = 0; i < nb_buffers; i++) {
2937 ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
2938 if (vkbuf->mapped_mem)
2941 vkUnmapMemory(hwctx->act_dev, vkbuf->mem);
2947 static int transfer_image_buf(AVHWFramesContext *hwfc, const AVFrame *f,
2948 AVBufferRef **bufs, size_t *buf_offsets,
2949 const int *buf_stride, int w,
2950 int h, enum AVPixelFormat pix_fmt, int to_buf)
2953 AVVkFrame *frame = (AVVkFrame *)f->data[0];
2954 VulkanFramesPriv *fp = hwfc->internal->priv;
2957 VkPipelineStageFlagBits sem_wait_dst[AV_NUM_DATA_POINTERS];
2959 const int planes = av_pix_fmt_count_planes(pix_fmt);
2960 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
2962 VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
2963 VulkanExecCtx *ectx = to_buf ? &fp->download_ctx : &fp->upload_ctx;
2964 VkCommandBuffer cmd_buf = get_buf_exec_ctx(hwfc, ectx);
2966 VkSubmitInfo s_info = {
2967 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
2968 .pSignalSemaphores = frame->sem,
2969 .pWaitSemaphores = frame->sem,
2970 .pWaitDstStageMask = sem_wait_dst,
2971 .signalSemaphoreCount = planes,
2972 .waitSemaphoreCount = planes,
2975 if ((err = wait_start_exec_ctx(hwfc, ectx)))
2978 /* Change the image layout to something more optimal for transfers */
2979 for (int i = 0; i < planes; i++) {
2980 VkImageLayout new_layout = to_buf ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL :
2981 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2982 VkAccessFlags new_access = to_buf ? VK_ACCESS_TRANSFER_READ_BIT :
2983 VK_ACCESS_TRANSFER_WRITE_BIT;
2985 sem_wait_dst[i] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2987 /* If the layout matches and we have read access skip the barrier */
2988 if ((frame->layout[i] == new_layout) && (frame->access[i] & new_access))
2991 img_bar[bar_num].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2992 img_bar[bar_num].srcAccessMask = 0x0;
2993 img_bar[bar_num].dstAccessMask = new_access;
2994 img_bar[bar_num].oldLayout = frame->layout[i];
2995 img_bar[bar_num].newLayout = new_layout;
2996 img_bar[bar_num].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2997 img_bar[bar_num].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2998 img_bar[bar_num].image = frame->img[i];
2999 img_bar[bar_num].subresourceRange.levelCount = 1;
3000 img_bar[bar_num].subresourceRange.layerCount = 1;
3001 img_bar[bar_num].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3003 frame->layout[i] = img_bar[bar_num].newLayout;
3004 frame->access[i] = img_bar[bar_num].dstAccessMask;
3010 vkCmdPipelineBarrier(cmd_buf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
3011 VK_PIPELINE_STAGE_TRANSFER_BIT, 0,
3012 0, NULL, 0, NULL, bar_num, img_bar);
3014 /* Schedule a copy for each plane */
3015 for (int i = 0; i < planes; i++) {
3016 ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
3017 VkBufferImageCopy buf_reg = {
3018 .bufferOffset = buf_offsets[i],
3019 .bufferRowLength = buf_stride[i] / desc->comp[i].step,
3020 .imageSubresource.layerCount = 1,
3021 .imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
3022 .imageOffset = { 0, 0, 0, },
3026 get_plane_wh(&p_w, &p_h, pix_fmt, w, h, i);
3028 buf_reg.bufferImageHeight = p_h;
3029 buf_reg.imageExtent = (VkExtent3D){ p_w, p_h, 1, };
3032 vkCmdCopyImageToBuffer(cmd_buf, frame->img[i], frame->layout[i],
3033 vkbuf->buf, 1, &buf_reg);
3035 vkCmdCopyBufferToImage(cmd_buf, vkbuf->buf, frame->img[i],
3036 frame->layout[i], 1, &buf_reg);
3039 /* When uploading, do this asynchronously if the source is refcounted by
3040 * keeping the buffers as a submission dependency.
3041 * The hwcontext is guaranteed to not be freed until all frames are freed
3042 * in the frames_unint function.
3043 * When downloading to buffer, do this synchronously and wait for the
3044 * queue submission to finish executing */
3047 for (ref = 0; ref < AV_NUM_DATA_POINTERS; ref++) {
3050 if ((err = add_buf_dep_exec_ctx(hwfc, ectx, &f->buf[ref], 1)))
3053 if (ref && (err = add_buf_dep_exec_ctx(hwfc, ectx, bufs, planes)))
3055 return submit_exec_ctx(hwfc, ectx, &s_info, !ref);
3057 return submit_exec_ctx(hwfc, ectx, &s_info, 1);
3061 static int vulkan_transfer_data(AVHWFramesContext *hwfc, const AVFrame *vkf,
3062 const AVFrame *swf, int from)
3066 AVVkFrame *f = (AVVkFrame *)vkf->data[0];
3067 AVHWDeviceContext *dev_ctx = hwfc->device_ctx;
3068 AVVulkanDeviceContext *hwctx = dev_ctx->hwctx;
3069 VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
3072 AVBufferRef *bufs[AV_NUM_DATA_POINTERS] = { 0 };
3073 size_t buf_offsets[AV_NUM_DATA_POINTERS] = { 0 };
3076 const int planes = av_pix_fmt_count_planes(swf->format);
3078 int host_mapped[AV_NUM_DATA_POINTERS] = { 0 };
3079 const int map_host = !!(p->extensions & EXT_EXTERNAL_HOST_MEMORY);
3081 VK_LOAD_PFN(hwctx->inst, vkGetMemoryHostPointerPropertiesEXT);
3083 if ((swf->format != AV_PIX_FMT_NONE && !av_vkfmt_from_pixfmt(swf->format))) {
3084 av_log(hwfc, AV_LOG_ERROR, "Unsupported software frame pixel format!\n");
3085 return AVERROR(EINVAL);
3088 if (swf->width > hwfc->width || swf->height > hwfc->height)
3089 return AVERROR(EINVAL);
3091 /* For linear, host visiable images */
3092 if (f->tiling == VK_IMAGE_TILING_LINEAR &&
3093 f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
3094 AVFrame *map = av_frame_alloc();
3096 return AVERROR(ENOMEM);
3097 map->format = swf->format;
3099 err = vulkan_map_frame_to_mem(hwfc, map, vkf, AV_HWFRAME_MAP_WRITE);
3103 err = av_frame_copy((AVFrame *)(from ? swf : map), from ? map : swf);
3104 av_frame_free(&map);
3108 /* Create buffers */
3109 for (int i = 0; i < planes; i++) {
3112 VkExternalMemoryBufferCreateInfo create_desc = {
3113 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
3114 .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
3117 VkImportMemoryHostPointerInfoEXT import_desc = {
3118 .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
3119 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
3122 VkMemoryHostPointerPropertiesEXT p_props = {
3123 .sType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
3126 get_plane_wh(&p_w, &p_h, swf->format, swf->width, swf->height, i);
3128 tmp.linesize[i] = FFABS(swf->linesize[i]);
3130 /* Do not map images with a negative stride */
3131 if (map_host && swf->linesize[i] > 0) {
3133 offs = (uintptr_t)swf->data[i] % p->hprops.minImportedHostPointerAlignment;
3134 import_desc.pHostPointer = swf->data[i] - offs;
3136 /* We have to compensate for the few extra bytes of padding we
3137 * completely ignore at the start */
3138 req_size = FFALIGN(offs + tmp.linesize[i] * p_h,
3139 p->hprops.minImportedHostPointerAlignment);
3141 ret = pfn_vkGetMemoryHostPointerPropertiesEXT(hwctx->act_dev,
3142 import_desc.handleType,
3143 import_desc.pHostPointer,
3146 if (ret == VK_SUCCESS) {
3148 buf_offsets[i] = offs;
3152 if (!host_mapped[i])
3153 req_size = get_req_buffer_size(p, &tmp.linesize[i], p_h);
3155 err = create_buf(dev_ctx, &bufs[i],
3156 from ? VK_BUFFER_USAGE_TRANSFER_DST_BIT :
3157 VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
3158 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
3159 req_size, p_props.memoryTypeBits, host_mapped[i],
3160 host_mapped[i] ? &create_desc : NULL,
3161 host_mapped[i] ? &import_desc : NULL);
3167 /* Map, copy image to buffer, unmap */
3168 if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 0)))
3171 for (int i = 0; i < planes; i++) {
3175 get_plane_wh(&p_w, &p_h, swf->format, swf->width, swf->height, i);
3177 av_image_copy_plane(tmp.data[i], tmp.linesize[i],
3178 (const uint8_t *)swf->data[i], swf->linesize[i],
3179 FFMIN(tmp.linesize[i], FFABS(swf->linesize[i])),
3183 if ((err = unmap_buffers(dev_ctx, bufs, planes, 1)))
3187 /* Copy buffers into/from image */
3188 err = transfer_image_buf(hwfc, vkf, bufs, buf_offsets, tmp.linesize,
3189 swf->width, swf->height, swf->format, from);
3192 /* Map, copy image to buffer, unmap */
3193 if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 0)))
3196 for (int i = 0; i < planes; i++) {
3200 get_plane_wh(&p_w, &p_h, swf->format, swf->width, swf->height, i);
3202 av_image_copy_plane(swf->data[i], swf->linesize[i],
3203 (const uint8_t *)tmp.data[i], tmp.linesize[i],
3204 FFMIN(tmp.linesize[i], FFABS(swf->linesize[i])),
3208 if ((err = unmap_buffers(dev_ctx, bufs, planes, 1)))
3213 for (int i = 0; i < planes; i++)
3214 av_buffer_unref(&bufs[i]);
3219 static int vulkan_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst,
3222 av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
3224 switch (src->format) {
3226 case AV_PIX_FMT_CUDA:
3227 if ((p->extensions & EXT_EXTERNAL_FD_MEMORY) &&
3228 (p->extensions & EXT_EXTERNAL_FD_SEM))
3229 return vulkan_transfer_data_from_cuda(hwfc, dst, src);
3232 if (src->hw_frames_ctx)
3233 return AVERROR(ENOSYS);
3235 return vulkan_transfer_data(hwfc, dst, src, 0);
3240 static int vulkan_transfer_data_to_cuda(AVHWFramesContext *hwfc, AVFrame *dst,
3247 AVVkFrameInternal *dst_int;
3248 const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
3249 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
3251 AVHWFramesContext *cuda_fc = (AVHWFramesContext*)dst->hw_frames_ctx->data;
3252 AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
3253 AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
3254 AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
3255 CudaFunctions *cu = cu_internal->cuda_dl;
3257 ret = CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
3259 return AVERROR_EXTERNAL;
3261 dst_f = (AVVkFrame *)src->data[0];
3263 err = vulkan_export_to_cuda(hwfc, dst->hw_frames_ctx, src);
3265 CHECK_CU(cu->cuCtxPopCurrent(&dummy));
3269 dst_int = dst_f->internal;
3271 for (int i = 0; i < planes; i++) {
3272 CUDA_MEMCPY2D cpy = {
3273 .dstMemoryType = CU_MEMORYTYPE_DEVICE,
3274 .dstDevice = (CUdeviceptr)dst->data[i],
3275 .dstPitch = dst->linesize[i],
3278 .srcMemoryType = CU_MEMORYTYPE_ARRAY,
3279 .srcArray = dst_int->cu_array[i],
3283 get_plane_wh(&w, &h, hwfc->sw_format, hwfc->width, hwfc->height, i);
3285 cpy.WidthInBytes = w * desc->comp[i].step;
3288 ret = CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
3290 err = AVERROR_EXTERNAL;
3295 CHECK_CU(cu->cuCtxPopCurrent(&dummy));
3297 av_log(hwfc, AV_LOG_VERBOSE, "Transfered Vulkan image to CUDA!\n");
3302 CHECK_CU(cu->cuCtxPopCurrent(&dummy));
3303 vulkan_free_internal(dst_int);
3304 dst_f->internal = NULL;
3305 av_buffer_unref(&dst->buf[0]);
3310 static int vulkan_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst,
3313 av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
3315 switch (dst->format) {
3317 case AV_PIX_FMT_CUDA:
3318 if ((p->extensions & EXT_EXTERNAL_FD_MEMORY) &&
3319 (p->extensions & EXT_EXTERNAL_FD_SEM))
3320 return vulkan_transfer_data_to_cuda(hwfc, dst, src);
3323 if (dst->hw_frames_ctx)
3324 return AVERROR(ENOSYS);
3326 return vulkan_transfer_data(hwfc, src, dst, 1);
3330 static int vulkan_frames_derive_to(AVHWFramesContext *dst_fc,
3331 AVHWFramesContext *src_fc, int flags)
3333 return vulkan_frames_init(dst_fc);
3336 AVVkFrame *av_vk_frame_alloc(void)
3338 return av_mallocz(sizeof(AVVkFrame));
3341 const HWContextType ff_hwcontext_type_vulkan = {
3342 .type = AV_HWDEVICE_TYPE_VULKAN,
3345 .device_hwctx_size = sizeof(AVVulkanDeviceContext),
3346 .device_priv_size = sizeof(VulkanDevicePriv),
3347 .frames_hwctx_size = sizeof(AVVulkanFramesContext),
3348 .frames_priv_size = sizeof(VulkanFramesPriv),
3350 .device_init = &vulkan_device_init,
3351 .device_create = &vulkan_device_create,
3352 .device_derive = &vulkan_device_derive,
3354 .frames_get_constraints = &vulkan_frames_get_constraints,
3355 .frames_init = vulkan_frames_init,
3356 .frames_get_buffer = vulkan_get_buffer,
3357 .frames_uninit = vulkan_frames_uninit,
3359 .transfer_get_formats = vulkan_transfer_get_formats,
3360 .transfer_data_to = vulkan_transfer_data_to,
3361 .transfer_data_from = vulkan_transfer_data_from,
3363 .map_to = vulkan_map_to,
3364 .map_from = vulkan_map_from,
3365 .frames_derive_to = &vulkan_frames_derive_to,
3367 .pix_fmts = (const enum AVPixelFormat []) {