]> git.sesse.net Git - ffmpeg/blobdiff - libavutil/hwcontext_vulkan.c
lavfi/lavfutils: switch to the new decoding API
[ffmpeg] / libavutil / hwcontext_vulkan.c
index 47feff43dc1443b3e840ae50b7b56aadbedb48cf..a98ea916e7710716f9af902dc23251be7f35cca5 100644 (file)
@@ -62,8 +62,9 @@ typedef struct VulkanExecCtx {
 
 typedef struct VulkanDevicePriv {
     /* Properties */
-    VkPhysicalDeviceProperties props;
+    VkPhysicalDeviceProperties2 props;
     VkPhysicalDeviceMemoryProperties mprops;
+    VkPhysicalDeviceExternalMemoryHostPropertiesEXT hprops;
 
     /* Queues */
     uint32_t qfs[3];
@@ -72,10 +73,6 @@ typedef struct VulkanDevicePriv {
     /* Debug callback */
     VkDebugUtilsMessengerEXT debug_ctx;
 
-    /* Image transfers */
-    VulkanExecCtx upload_ctx;
-    VulkanExecCtx download_ctx;
-
     /* Extensions */
     uint64_t extensions;
 
@@ -89,6 +86,10 @@ typedef struct VulkanDevicePriv {
 typedef struct VulkanFramesPriv {
     /* Image conversions */
     VulkanExecCtx conv_ctx;
+
+    /* Image transfers */
+    VulkanExecCtx upload_ctx;
+    VulkanExecCtx download_ctx;
 } VulkanFramesPriv;
 
 typedef struct AVVkFrameInternal {
@@ -208,6 +209,7 @@ enum VulkanExtensions {
     EXT_DRM_MODIFIER_FLAGS     = 1ULL <<  1, /* VK_EXT_image_drm_format_modifier */
     EXT_EXTERNAL_FD_MEMORY     = 1ULL <<  2, /* VK_KHR_external_memory_fd */
     EXT_EXTERNAL_FD_SEM        = 1ULL <<  3, /* VK_KHR_external_semaphore_fd */
+    EXT_EXTERNAL_HOST_MEMORY   = 1ULL <<  4, /* VK_EXT_external_memory_host */
 
     EXT_NO_FLAG                = 1ULL << 63,
 };
@@ -226,6 +228,7 @@ static const VulkanOptExtension optional_device_exts[] = {
     { VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,          EXT_EXTERNAL_DMABUF_MEMORY, },
     { VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME,        EXT_DRM_MODIFIER_FLAGS,     },
     { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,            EXT_EXTERNAL_FD_SEM,        },
+    { VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME,             EXT_EXTERNAL_HOST_MEMORY,   },
 };
 
 /* Converts return values to strings */
@@ -523,7 +526,6 @@ static int find_device(AVHWDeviceContext *ctx, VulkanDeviceSelection *select)
     VkPhysicalDevice *devices = NULL;
     VkPhysicalDeviceIDProperties *idp = NULL;
     VkPhysicalDeviceProperties2 *prop = NULL;
-    VulkanDevicePriv *p = ctx->internal->priv;
     AVVulkanDeviceContext *hwctx = ctx->hwctx;
 
     ret = vkEnumeratePhysicalDevices(hwctx->inst, &num, NULL);
@@ -627,10 +629,9 @@ static int find_device(AVHWDeviceContext *ctx, VulkanDeviceSelection *select)
     }
 
 end:
-    if (choice > -1) {
-        p->dev_is_nvidia = (prop[choice].properties.vendorID == 0x10de);
+    if (choice > -1)
         hwctx->phys_dev = devices[choice];
-    }
+
     av_free(devices);
     av_free(prop);
     av_free(idp);
@@ -732,11 +733,11 @@ fail:
     return AVERROR(ENOMEM);
 }
 
-static int create_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
+static int create_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
                            int queue_family_index, int num_queues)
 {
     VkResult ret;
-    AVVulkanDeviceContext *hwctx = ctx->hwctx;
+    AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
 
     VkCommandPoolCreateInfo cqueue_create = {
         .sType              = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
@@ -763,7 +764,7 @@ static int create_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
     ret = vkCreateCommandPool(hwctx->act_dev, &cqueue_create,
                               hwctx->alloc, &cmd->pool);
     if (ret != VK_SUCCESS) {
-        av_log(ctx, AV_LOG_ERROR, "Command pool creation failure: %s\n",
+        av_log(hwfc, AV_LOG_ERROR, "Command pool creation failure: %s\n",
                vk_ret2str(ret));
         return AVERROR_EXTERNAL;
     }
@@ -773,7 +774,7 @@ static int create_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
     /* Allocate command buffer */
     ret = vkAllocateCommandBuffers(hwctx->act_dev, &cbuf_create, cmd->bufs);
     if (ret != VK_SUCCESS) {
-        av_log(ctx, AV_LOG_ERROR, "Command buffer alloc failure: %s\n",
+        av_log(hwfc, AV_LOG_ERROR, "Command buffer alloc failure: %s\n",
                vk_ret2str(ret));
         return AVERROR_EXTERNAL;
     }
@@ -787,9 +788,9 @@ static int create_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
     return 0;
 }
 
-static void free_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
+static void free_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
 {
-    AVVulkanDeviceContext *hwctx = ctx->hwctx;
+    AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
 
     /* Make sure all queues have finished executing */
     for (int i = 0; i < cmd->nb_queues; i++) {
@@ -819,12 +820,12 @@ static void free_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
     av_freep(&cmd->queues);
 }
 
-static VkCommandBuffer get_buf_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
+static VkCommandBuffer get_buf_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
 {
     return cmd->bufs[cmd->cur_queue_idx];
 }
 
-static void unref_exec_ctx_deps(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
+static void unref_exec_ctx_deps(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
 {
     VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
 
@@ -833,10 +834,10 @@ static void unref_exec_ctx_deps(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
     q->nb_buf_deps = 0;
 }
 
-static int wait_start_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
+static int wait_start_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
 {
     VkResult ret;
-    AVVulkanDeviceContext *hwctx = ctx->hwctx;
+    AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
     VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
 
     VkCommandBufferBeginInfo cmd_start = {
@@ -852,7 +853,7 @@ static int wait_start_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
         ret = vkCreateFence(hwctx->act_dev, &fence_spawn, hwctx->alloc,
                             &q->fence);
         if (ret != VK_SUCCESS) {
-            av_log(ctx, AV_LOG_ERROR, "Failed to queue frame fence: %s\n",
+            av_log(hwfc, AV_LOG_ERROR, "Failed to queue frame fence: %s\n",
                    vk_ret2str(ret));
             return AVERROR_EXTERNAL;
         }
@@ -862,11 +863,11 @@ static int wait_start_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
     }
 
     /* Discard queue dependencies */
-    unref_exec_ctx_deps(ctx, cmd);
+    unref_exec_ctx_deps(hwfc, cmd);
 
     ret = vkBeginCommandBuffer(cmd->bufs[cmd->cur_queue_idx], &cmd_start);
     if (ret != VK_SUCCESS) {
-        av_log(ctx, AV_LOG_ERROR, "Unable to init command buffer: %s\n",
+        av_log(hwfc, AV_LOG_ERROR, "Unable to init command buffer: %s\n",
                vk_ret2str(ret));
         return AVERROR_EXTERNAL;
     }
@@ -874,7 +875,7 @@ static int wait_start_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd)
     return 0;
 }
 
-static int add_buf_dep_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
+static int add_buf_dep_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
                                 AVBufferRef * const *deps, int nb_deps)
 {
     AVBufferRef **dst;
@@ -900,11 +901,11 @@ static int add_buf_dep_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
     return 0;
 
 err:
-    unref_exec_ctx_deps(ctx, cmd);
+    unref_exec_ctx_deps(hwfc, cmd);
     return AVERROR(ENOMEM);
 }
 
-static int submit_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
+static int submit_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
                            VkSubmitInfo *s_info, int synchronous)
 {
     VkResult ret;
@@ -912,9 +913,9 @@ static int submit_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
 
     ret = vkEndCommandBuffer(cmd->bufs[cmd->cur_queue_idx]);
     if (ret != VK_SUCCESS) {
-        av_log(ctx, AV_LOG_ERROR, "Unable to finish command buffer: %s\n",
+        av_log(hwfc, AV_LOG_ERROR, "Unable to finish command buffer: %s\n",
                vk_ret2str(ret));
-        unref_exec_ctx_deps(ctx, cmd);
+        unref_exec_ctx_deps(hwfc, cmd);
         return AVERROR_EXTERNAL;
     }
 
@@ -923,17 +924,17 @@ static int submit_exec_ctx(AVHWDeviceContext *ctx, VulkanExecCtx *cmd,
 
     ret = vkQueueSubmit(q->queue, 1, s_info, q->fence);
     if (ret != VK_SUCCESS) {
-        unref_exec_ctx_deps(ctx, cmd);
+        unref_exec_ctx_deps(hwfc, cmd);
         return AVERROR_EXTERNAL;
     }
 
     q->was_synchronous = synchronous;
 
     if (synchronous) {
-        AVVulkanDeviceContext *hwctx = ctx->hwctx;
+        AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
         vkWaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
         vkResetFences(hwctx->act_dev, 1, &q->fence);
-        unref_exec_ctx_deps(ctx, cmd);
+        unref_exec_ctx_deps(hwfc, cmd);
     } else { /* Rotate queues */
         cmd->cur_queue_idx = (cmd->cur_queue_idx + 1) % cmd->nb_queues;
     }
@@ -946,8 +947,6 @@ static void vulkan_device_free(AVHWDeviceContext *ctx)
     VulkanDevicePriv *p = ctx->internal->priv;
     AVVulkanDeviceContext *hwctx = ctx->hwctx;
 
-    free_exec_ctx(ctx, &p->cmd);
-
     vkDestroyDevice(hwctx->act_dev, hwctx->alloc);
 
     if (p->debug_ctx) {
@@ -1001,16 +1000,6 @@ static int vulkan_device_create_internal(AVHWDeviceContext *ctx,
     if ((err = find_device(ctx, dev_select)))
         goto end;
 
-    vkGetPhysicalDeviceProperties(hwctx->phys_dev, &p->props);
-    av_log(ctx, AV_LOG_VERBOSE, "Using device: %s\n", p->props.deviceName);
-    av_log(ctx, AV_LOG_VERBOSE, "Alignments:\n");
-    av_log(ctx, AV_LOG_VERBOSE, "    optimalBufferCopyOffsetAlignment:   %li\n",
-           p->props.limits.optimalBufferCopyOffsetAlignment);
-    av_log(ctx, AV_LOG_VERBOSE, "    optimalBufferCopyRowPitchAlignment: %li\n",
-           p->props.limits.optimalBufferCopyRowPitchAlignment);
-    av_log(ctx, AV_LOG_VERBOSE, "    minMemoryMapAlignment:              %li\n",
-           p->props.limits.minMemoryMapAlignment);
-
     vkGetPhysicalDeviceFeatures(hwctx->phys_dev, &dev_features);
 #define COPY_FEATURE(DST, NAME) (DST).features.NAME = dev_features.NAME;
     COPY_FEATURE(hwctx->device_features, shaderImageGatherExtended)
@@ -1062,7 +1051,6 @@ end:
 
 static int vulkan_device_init(AVHWDeviceContext *ctx)
 {
-    int err;
     uint32_t queue_num;
     AVVulkanDeviceContext *hwctx = ctx->hwctx;
     VulkanDevicePriv *p = ctx->internal->priv;
@@ -1072,12 +1060,32 @@ static int vulkan_device_init(AVHWDeviceContext *ctx)
         for (int j = 0; j < FF_ARRAY_ELEMS(optional_device_exts); j++) {
             if (!strcmp(hwctx->enabled_dev_extensions[i],
                         optional_device_exts[j].name)) {
+                av_log(ctx, AV_LOG_VERBOSE, "Using device extension %s\n",
+                       hwctx->enabled_dev_extensions[i]);
                 p->extensions |= optional_device_exts[j].flag;
                 break;
             }
         }
     }
 
+    p->props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
+    p->props.pNext = &p->hprops;
+    p->hprops.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
+
+    vkGetPhysicalDeviceProperties2(hwctx->phys_dev, &p->props);
+    av_log(ctx, AV_LOG_VERBOSE, "Using device: %s\n",
+           p->props.properties.deviceName);
+    av_log(ctx, AV_LOG_VERBOSE, "Alignments:\n");
+    av_log(ctx, AV_LOG_VERBOSE, "    optimalBufferCopyRowPitchAlignment: %li\n",
+           p->props.properties.limits.optimalBufferCopyRowPitchAlignment);
+    av_log(ctx, AV_LOG_VERBOSE, "    minMemoryMapAlignment:              %li\n",
+           p->props.properties.limits.minMemoryMapAlignment);
+    if (p->extensions & EXT_EXTERNAL_HOST_MEMORY)
+        av_log(ctx, AV_LOG_VERBOSE, "    minImportedHostPointerAlignment:    %li\n",
+               p->hprops.minImportedHostPointerAlignment);
+
+    p->dev_is_nvidia = (p->props.properties.vendorID == 0x10de);
+
     vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &queue_num, NULL);
     if (!queue_num) {
         av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
@@ -1105,12 +1113,6 @@ if (n >= queue_num) {
         (hwctx->queue_family_comp_index != hwctx->queue_family_tx_index))
         p->qfs[p->num_qfs++] = hwctx->queue_family_comp_index;
 
-    /* Create exec context - if there's something invalid this will error out */
-    err = create_exec_ctx(ctx, &p->cmd, hwctx->queue_family_tx_index,
-                          GET_QUEUE_COUNT(hwctx, 0, 0, 1));
-    if (err)
-        return err;
-
     /* Get device capabilities */
     vkGetPhysicalDeviceMemoryProperties(hwctx->phys_dev, &p->mprops);
 
@@ -1238,8 +1240,8 @@ static int vulkan_frames_get_constraints(AVHWDeviceContext *ctx,
 
     constraints->min_width  = 0;
     constraints->min_height = 0;
-    constraints->max_width  = p->props.limits.maxImageDimension2D;
-    constraints->max_height = p->props.limits.maxImageDimension2D;
+    constraints->max_width  = p->props.properties.limits.maxImageDimension2D;
+    constraints->max_height = p->props.properties.limits.maxImageDimension2D;
 
     constraints->valid_hw_formats = av_malloc_array(2, sizeof(enum AVPixelFormat));
     if (!constraints->valid_hw_formats)
@@ -1252,7 +1254,7 @@ static int vulkan_frames_get_constraints(AVHWDeviceContext *ctx,
 }
 
 static int alloc_mem(AVHWDeviceContext *ctx, VkMemoryRequirements *req,
-                     VkMemoryPropertyFlagBits req_flags, void *alloc_extension,
+                     VkMemoryPropertyFlagBits req_flags, const void *alloc_extension,
                      VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
 {
     VkResult ret;
@@ -1260,16 +1262,11 @@ static int alloc_mem(AVHWDeviceContext *ctx, VkMemoryRequirements *req,
     VulkanDevicePriv *p = ctx->internal->priv;
     AVVulkanDeviceContext *dev_hwctx = ctx->hwctx;
     VkMemoryAllocateInfo alloc_info = {
-        .sType           = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
-        .pNext           = alloc_extension,
+        .sType          = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+        .pNext          = alloc_extension,
+        .allocationSize = req->size,
     };
 
-    /* Align if we need to */
-    if (req_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
-        req->size = FFALIGN(req->size, p->props.limits.minMemoryMapAlignment);
-
-    alloc_info.allocationSize = req->size;
-
     /* The vulkan spec requires memory types to be sorted in the "optimal"
      * order, so the first matching type we find will be the best/fastest one */
     for (int i = 0; i < p->mprops.memoryTypeCount; i++) {
@@ -1361,6 +1358,7 @@ static int alloc_bind_mem(AVHWFramesContext *hwfc, AVVkFrame *f,
     int err;
     VkResult ret;
     AVHWDeviceContext *ctx = hwfc->device_ctx;
+    VulkanDevicePriv *p = ctx->internal->priv;
     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
     VkBindImageMemoryInfo bind_info[AV_NUM_DATA_POINTERS] = { { 0 } };
 
@@ -1386,6 +1384,10 @@ static int alloc_bind_mem(AVHWFramesContext *hwfc, AVVkFrame *f,
 
         vkGetImageMemoryRequirements2(hwctx->act_dev, &req_desc, &req);
 
+        if (f->tiling == VK_IMAGE_TILING_LINEAR)
+            req.memoryRequirements.size = FFALIGN(req.memoryRequirements.size,
+                                                  p->props.properties.limits.minMemoryMapAlignment);
+
         /* In case the implementation prefers/requires dedicated allocation */
         use_ded_mem = ded_req.prefersDedicatedAllocation |
                       ded_req.requiresDedicatedAllocation;
@@ -1431,7 +1433,6 @@ static int prepare_frame(AVHWFramesContext *hwfc, VulkanExecCtx *ectx,
     uint32_t dst_qf;
     VkImageLayout new_layout;
     VkAccessFlags new_access;
-    AVHWDeviceContext *ctx = hwfc->device_ctx;
     const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
 
     VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
@@ -1467,7 +1468,7 @@ static int prepare_frame(AVHWFramesContext *hwfc, VulkanExecCtx *ectx,
         break;
     }
 
-    if ((err = wait_start_exec_ctx(ctx, ectx)))
+    if ((err = wait_start_exec_ctx(hwfc, ectx)))
         return err;
 
     /* Change the image layout to something more optimal for writes.
@@ -1490,12 +1491,12 @@ static int prepare_frame(AVHWFramesContext *hwfc, VulkanExecCtx *ectx,
         frame->access[i] = img_bar[i].dstAccessMask;
     }
 
-    vkCmdPipelineBarrier(get_buf_exec_ctx(ctx, ectx),
+    vkCmdPipelineBarrier(get_buf_exec_ctx(hwfc, ectx),
                          VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
                          VK_PIPELINE_STAGE_TRANSFER_BIT,
                          0, 0, NULL, 0, NULL, planes, img_bar);
 
-    return submit_exec_ctx(ctx, ectx, &s_info, 0);
+    return submit_exec_ctx(hwfc, ectx, &s_info, 0);
 }
 
 static int create_frame(AVHWFramesContext *hwfc, AVVkFrame **frame,
@@ -1687,7 +1688,9 @@ static void vulkan_frames_uninit(AVHWFramesContext *hwfc)
 {
     VulkanFramesPriv *fp = hwfc->internal->priv;
 
-    free_exec_ctx(hwfc->device_ctx, &fp->conv_ctx);
+    free_exec_ctx(hwfc, &fp->conv_ctx);
+    free_exec_ctx(hwfc, &fp->upload_ctx);
+    free_exec_ctx(hwfc, &fp->download_ctx);
 }
 
 static int vulkan_frames_init(AVHWFramesContext *hwfc)
@@ -1703,21 +1706,31 @@ static int vulkan_frames_init(AVHWFramesContext *hwfc)
     hwctx->tiling = hwctx->tiling ? hwctx->tiling : p->use_linear_images ?
                     VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
 
-    hwctx->usage |= DEFAULT_USAGE_FLAGS;
+    if (!hwctx->usage)
+        hwctx->usage = DEFAULT_USAGE_FLAGS;
 
-    err = create_exec_ctx(hwfc->device_ctx, &fp->conv_ctx,
+    err = create_exec_ctx(hwfc, &fp->conv_ctx,
                           dev_hwctx->queue_family_comp_index,
                           GET_QUEUE_COUNT(dev_hwctx, 0, 1, 0));
     if (err)
-        return err;
+        goto fail;
+
+    err = create_exec_ctx(hwfc, &fp->upload_ctx,
+                          dev_hwctx->queue_family_tx_index,
+                          GET_QUEUE_COUNT(dev_hwctx, 0, 0, 1));
+    if (err)
+        goto fail;
+
+    err = create_exec_ctx(hwfc, &fp->download_ctx,
+                          dev_hwctx->queue_family_tx_index, 1);
+    if (err)
+        goto fail;
 
     /* Test to see if allocation will fail */
     err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
                        hwctx->create_pnext);
-    if (err) {
-        free_exec_ctx(hwfc->device_ctx, &fp->conv_ctx);
-        return err;
-    }
+    if (err)
+        goto fail;
 
     vulkan_frame_free(hwfc, (uint8_t *)f);
 
@@ -1728,12 +1741,19 @@ static int vulkan_frames_init(AVHWFramesContext *hwfc)
                                                              hwfc, vulkan_pool_alloc,
                                                              NULL);
         if (!hwfc->internal->pool_internal) {
-            free_exec_ctx(hwfc->device_ctx, &fp->conv_ctx);
-            return AVERROR(ENOMEM);
+            err = AVERROR(ENOMEM);
+            goto fail;
         }
     }
 
     return 0;
+
+fail:
+    free_exec_ctx(hwfc, &fp->conv_ctx);
+    free_exec_ctx(hwfc, &fp->upload_ctx);
+    free_exec_ctx(hwfc, &fp->download_ctx);
+
+    return err;
 }
 
 static int vulkan_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame)
@@ -1943,6 +1963,7 @@ static int vulkan_map_from_drm_frame_desc(AVHWFramesContext *hwfc, AVVkFrame **f
     AVVulkanDeviceContext *hwctx = ctx->hwctx;
     VulkanDevicePriv *p = ctx->internal->priv;
     VulkanFramesPriv *fp = hwfc->internal->priv;
+    AVVulkanFramesContext *frames_hwctx = hwfc->hwctx;
     const AVPixFmtDescriptor *fmt_desc = av_pix_fmt_desc_get(hwfc->sw_format);
     const int has_modifiers = p->extensions & EXT_DRM_MODIFIER_FLAGS;
     VkSubresourceLayout plane_data[AV_NUM_DATA_POINTERS] = { 0 };
@@ -1966,49 +1987,12 @@ static int vulkan_map_from_drm_frame_desc(AVHWFramesContext *hwfc, AVVkFrame **f
         goto fail;
     }
 
-    for (int i = 0; i < desc->nb_objects; i++) {
-        VkMemoryFdPropertiesKHR fdmp = {
-            .sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
-        };
-        VkMemoryRequirements req = {
-            .size = desc->objects[i].size,
-        };
-        VkImportMemoryFdInfoKHR idesc = {
-            .sType      = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
-            .handleType = htype,
-            .fd         = dup(desc->objects[i].fd),
-        };
-
-        ret = pfn_vkGetMemoryFdPropertiesKHR(hwctx->act_dev, htype,
-                                             idesc.fd, &fdmp);
-        if (ret != VK_SUCCESS) {
-            av_log(hwfc, AV_LOG_ERROR, "Failed to get FD properties: %s\n",
-                   vk_ret2str(ret));
-            err = AVERROR_EXTERNAL;
-            close(idesc.fd);
-            goto fail;
-        }
-
-        req.memoryTypeBits = fdmp.memoryTypeBits;
-
-        err = alloc_mem(ctx, &req, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
-                        &idesc, &f->flags, &f->mem[i]);
-        if (err) {
-            close(idesc.fd);
-            return err;
-        }
-
-        f->size[i] = desc->objects[i].size;
-    }
-
     f->tiling = has_modifiers ? VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT :
                 desc->objects[0].format_modifier == DRM_FORMAT_MOD_LINEAR ?
                 VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
 
     for (int i = 0; i < desc->nb_layers; i++) {
         const int planes = desc->layers[i].nb_planes;
-        const int signal_p = has_modifiers && (planes > 1);
-
         VkImageDrmFormatModifierExplicitCreateInfoEXT drm_info = {
             .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
             .drmFormatModifier = desc->objects[0].format_modifier,
@@ -2042,7 +2026,7 @@ static int vulkan_map_from_drm_frame_desc(AVHWFramesContext *hwfc, AVVkFrame **f
             .flags                 = VK_IMAGE_CREATE_ALIAS_BIT,
             .tiling                = f->tiling,
             .initialLayout         = VK_IMAGE_LAYOUT_UNDEFINED, /* specs say so */
-            .usage                 = DEFAULT_USAGE_FLAGS,
+            .usage                 = frames_hwctx->usage,
             .samples               = VK_SAMPLE_COUNT_1_BIT,
             .pQueueFamilyIndices   = p->qfs,
             .queueFamilyIndexCount = p->num_qfs,
@@ -2083,7 +2067,76 @@ static int vulkan_map_from_drm_frame_desc(AVHWFramesContext *hwfc, AVVkFrame **f
 
         f->layout[i] = image_create_info.initialLayout;
         f->access[i] = 0x0;
+    }
+
+    for (int i = 0; i < desc->nb_objects; i++) {
+        int use_ded_mem = 0;
+        VkMemoryFdPropertiesKHR fdmp = {
+            .sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
+        };
+        VkMemoryRequirements req = {
+            .size = desc->objects[i].size,
+        };
+        VkImportMemoryFdInfoKHR idesc = {
+            .sType      = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
+            .handleType = htype,
+            .fd         = dup(desc->objects[i].fd),
+        };
+        VkMemoryDedicatedAllocateInfo ded_alloc = {
+            .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+            .pNext = &idesc,
+        };
+
+        ret = pfn_vkGetMemoryFdPropertiesKHR(hwctx->act_dev, htype,
+                                             idesc.fd, &fdmp);
+        if (ret != VK_SUCCESS) {
+            av_log(hwfc, AV_LOG_ERROR, "Failed to get FD properties: %s\n",
+                   vk_ret2str(ret));
+            err = AVERROR_EXTERNAL;
+            close(idesc.fd);
+            goto fail;
+        }
+
+        req.memoryTypeBits = fdmp.memoryTypeBits;
+
+        /* Dedicated allocation only makes sense if there's a one to one mapping
+         * between images and the memory backing them, so only check in this
+         * case. */
+        if (desc->nb_layers == desc->nb_objects) {
+            VkImageMemoryRequirementsInfo2 req_desc = {
+                .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+                .image = f->img[i],
+            };
+            VkMemoryDedicatedRequirements ded_req = {
+                .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+            };
+            VkMemoryRequirements2 req2 = {
+                .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+                .pNext = &ded_req,
+            };
+
+            vkGetImageMemoryRequirements2(hwctx->act_dev, &req_desc, &req2);
+
+            use_ded_mem = ded_req.prefersDedicatedAllocation |
+                          ded_req.requiresDedicatedAllocation;
+            if (use_ded_mem)
+                ded_alloc.image = f->img[i];
+        }
+
+        err = alloc_mem(ctx, &req, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+                        use_ded_mem ? &ded_alloc : ded_alloc.pNext,
+                        &f->flags, &f->mem[i]);
+        if (err) {
+            close(idesc.fd);
+            return err;
+        }
+
+        f->size[i] = desc->objects[i].size;
+    }
 
+    for (int i = 0; i < desc->nb_layers; i++) {
+        const int planes = desc->layers[i].nb_planes;
+        const int signal_p = has_modifiers && (planes > 1);
         for (int j = 0; j < planes; j++) {
             VkImageAspectFlagBits aspect = j == 0 ? VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT :
                                            j == 1 ? VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT :
@@ -2618,6 +2671,7 @@ typedef struct ImageBuffer {
     VkBuffer buf;
     VkDeviceMemory mem;
     VkMemoryPropertyFlagBits flags;
+    int mapped_mem;
 } ImageBuffer;
 
 static void free_buf(void *opaque, uint8_t *data)
@@ -2634,14 +2688,14 @@ static void free_buf(void *opaque, uint8_t *data)
     av_free(data);
 }
 
-static int create_buf(AVHWDeviceContext *ctx, AVBufferRef **buf,
+static int create_buf(AVHWDeviceContext *ctx, AVBufferRef **buf, size_t imp_size,
                       int height, int *stride, VkBufferUsageFlags usage,
                       VkMemoryPropertyFlagBits flags, void *create_pnext,
                       void *alloc_pnext)
 {
     int err;
     VkResult ret;
-    VkMemoryRequirements req;
+    int use_ded_mem;
     AVVulkanDeviceContext *hwctx = ctx->hwctx;
     VulkanDevicePriv *p = ctx->internal->priv;
 
@@ -2652,12 +2706,34 @@ static int create_buf(AVHWDeviceContext *ctx, AVBufferRef **buf,
         .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
     };
 
+    VkBufferMemoryRequirementsInfo2 req_desc = {
+        .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
+    };
+    VkMemoryDedicatedAllocateInfo ded_alloc = {
+        .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+        .pNext = alloc_pnext,
+    };
+    VkMemoryDedicatedRequirements ded_req = {
+        .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+    };
+    VkMemoryRequirements2 req = {
+        .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+        .pNext = &ded_req,
+    };
+
     ImageBuffer *vkbuf = av_mallocz(sizeof(*vkbuf));
     if (!vkbuf)
         return AVERROR(ENOMEM);
 
-    *stride = FFALIGN(*stride, p->props.limits.optimalBufferCopyRowPitchAlignment);
-    buf_spawn.size = height*(*stride);
+    vkbuf->mapped_mem = !!imp_size;
+
+    if (!vkbuf->mapped_mem) {
+        *stride = FFALIGN(*stride, p->props.properties.limits.optimalBufferCopyRowPitchAlignment);
+        buf_spawn.size = height*(*stride);
+        buf_spawn.size = FFALIGN(buf_spawn.size, p->props.properties.limits.minMemoryMapAlignment);
+    } else {
+        buf_spawn.size = imp_size;
+    }
 
     ret = vkCreateBuffer(hwctx->act_dev, &buf_spawn, NULL, &vkbuf->buf);
     if (ret != VK_SUCCESS) {
@@ -2666,9 +2742,19 @@ static int create_buf(AVHWDeviceContext *ctx, AVBufferRef **buf,
         return AVERROR_EXTERNAL;
     }
 
-    vkGetBufferMemoryRequirements(hwctx->act_dev, vkbuf->buf, &req);
+    req_desc.buffer = vkbuf->buf;
+
+    vkGetBufferMemoryRequirements2(hwctx->act_dev, &req_desc, &req);
+
+    /* In case the implementation prefers/requires dedicated allocation */
+    use_ded_mem = ded_req.prefersDedicatedAllocation |
+                  ded_req.requiresDedicatedAllocation;
+    if (use_ded_mem)
+        ded_alloc.buffer = vkbuf->buf;
 
-    err = alloc_mem(ctx, &req, flags, alloc_pnext, &vkbuf->flags, &vkbuf->mem);
+    err = alloc_mem(ctx, &req.memoryRequirements, flags,
+                    use_ded_mem ? &ded_alloc : (void *)ded_alloc.pNext,
+                    &vkbuf->flags, &vkbuf->mem);
     if (err)
         return err;
 
@@ -2689,6 +2775,7 @@ static int create_buf(AVHWDeviceContext *ctx, AVBufferRef **buf,
     return 0;
 }
 
+/* Skips mapping of host mapped buffers but still invalidates them */
 static int map_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs, uint8_t *mem[],
                        int nb_buffers, int invalidate)
 {
@@ -2699,6 +2786,9 @@ static int map_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs, uint8_t *mem[
 
     for (int i = 0; i < nb_buffers; i++) {
         ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
+        if (vkbuf->mapped_mem)
+            continue;
+
         ret = vkMapMemory(hwctx->act_dev, vkbuf->mem, 0,
                           VK_WHOLE_SIZE, 0, (void **)&mem[i]);
         if (ret != VK_SUCCESS) {
@@ -2768,19 +2858,22 @@ static int unmap_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs,
 
     for (int i = 0; i < nb_buffers; i++) {
         ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
+        if (vkbuf->mapped_mem)
+            continue;
+
         vkUnmapMemory(hwctx->act_dev, vkbuf->mem);
     }
 
     return err;
 }
 
-static int transfer_image_buf(AVHWDeviceContext *ctx, const AVFrame *f,
+static int transfer_image_buf(AVHWFramesContext *hwfc, const AVFrame *f,
                               AVBufferRef **bufs, const int *buf_stride, int w,
                               int h, enum AVPixelFormat pix_fmt, int to_buf)
 {
     int err;
     AVVkFrame *frame = (AVVkFrame *)f->data[0];
-    VulkanDevicePriv *s = ctx->internal->priv;
+    VulkanFramesPriv *fp = hwfc->internal->priv;
 
     int bar_num = 0;
     VkPipelineStageFlagBits sem_wait_dst[AV_NUM_DATA_POINTERS];
@@ -2789,7 +2882,8 @@ static int transfer_image_buf(AVHWDeviceContext *ctx, const AVFrame *f,
     const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
 
     VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
-    VkCommandBuffer cmd_buf = get_buf_exec_ctx(ctx, &s->cmd);
+    VulkanExecCtx *ectx = to_buf ? &fp->download_ctx : &fp->upload_ctx;
+    VkCommandBuffer cmd_buf = get_buf_exec_ctx(hwfc, ectx);
 
     VkSubmitInfo s_info = {
         .sType                = VK_STRUCTURE_TYPE_SUBMIT_INFO,
@@ -2800,7 +2894,7 @@ static int transfer_image_buf(AVHWDeviceContext *ctx, const AVFrame *f,
         .waitSemaphoreCount   = planes,
     };
 
-    if ((err = wait_start_exec_ctx(ctx, &s->cmd)))
+    if ((err = wait_start_exec_ctx(hwfc, ectx)))
         return err;
 
     /* Change the image layout to something more optimal for transfers */
@@ -2877,22 +2971,17 @@ static int transfer_image_buf(AVHWDeviceContext *ctx, const AVFrame *f,
         for (ref = 0; ref < AV_NUM_DATA_POINTERS; ref++) {
             if (!f->buf[ref])
                 break;
-            if ((err = add_buf_dep_exec_ctx(hwfc, &s->cmd, &f->buf[ref], 1)))
+            if ((err = add_buf_dep_exec_ctx(hwfc, ectx, &f->buf[ref], 1)))
                 return err;
         }
-        if (ref && (err = add_buf_dep_exec_ctx(hwfc, &s->cmd, bufs, planes)))
+        if (ref && (err = add_buf_dep_exec_ctx(hwfc, ectx, bufs, planes)))
             return err;
-        return submit_exec_ctx(hwfc, &s->cmd, &s_info, !ref);
+        return submit_exec_ctx(hwfc, ectx, &s_info, !ref);
     } else {
-        return submit_exec_ctx(hwfc, &s->cmd, &s_info,    1);
+        return submit_exec_ctx(hwfc, ectx, &s_info,    1);
     }
 }
 
-/* Technically we can use VK_EXT_external_memory_host to upload and download,
- * however the alignment requirements make this unfeasible as both the pointer
- * and the size of each plane need to be aligned to the minimum alignment
- * requirement, which on all current implementations (anv, radv) is 4096.
- * If the requirement gets relaxed (unlikely) this can easily be implemented. */
 static int vulkan_transfer_data_from_mem(AVHWFramesContext *hwfc, AVFrame *dst,
                                          const AVFrame *src)
 {
@@ -2903,6 +2992,9 @@ static int vulkan_transfer_data_from_mem(AVHWFramesContext *hwfc, AVFrame *dst,
     AVBufferRef *bufs[AV_NUM_DATA_POINTERS] = { 0 };
     const int planes = av_pix_fmt_count_planes(src->format);
     int log2_chroma = av_pix_fmt_desc_get(src->format)->log2_chroma_h;
+    VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
+    int host_mapped[AV_NUM_DATA_POINTERS] = { 0 };
+    int map_host = p->extensions & EXT_EXTERNAL_HOST_MEMORY;
 
     if ((src->format != AV_PIX_FMT_NONE && !av_vkfmt_from_pixfmt(src->format))) {
         av_log(hwfc, AV_LOG_ERROR, "Unsupported source pixel format!\n");
@@ -2933,11 +3025,27 @@ static int vulkan_transfer_data_from_mem(AVHWFramesContext *hwfc, AVFrame *dst,
     for (int i = 0; i < planes; i++) {
         int h = src->height;
         int p_height = i > 0 ? AV_CEIL_RSHIFT(h, log2_chroma) : h;
+        size_t p_size = FFALIGN(FFABS(src->linesize[i]) * p_height,
+                                p->hprops.minImportedHostPointerAlignment);
+
+        VkImportMemoryHostPointerInfoEXT import_desc = {
+            .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
+            .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
+            .pHostPointer = src->data[i],
+        };
+
+        /* We can only map images with positive stride and alignment appropriate
+         * for the device. */
+        host_mapped[i] = map_host && src->linesize[i] > 0 &&
+                         !(((uintptr_t)import_desc.pHostPointer) %
+                           p->hprops.minImportedHostPointerAlignment);
+        p_size = host_mapped[i] ? p_size : 0;
 
         tmp.linesize[i] = FFABS(src->linesize[i]);
-        err = create_buf(dev_ctx, &bufs[i], p_height,
-                         &tmp.linesize[i], VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
-                         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, NULL, NULL);
+        err = create_buf(dev_ctx, &bufs[i], p_size, p_height, &tmp.linesize[i],
+                         VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+                         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, NULL,
+                         host_mapped[i] ? &import_desc : NULL);
         if (err)
             goto end;
     }
@@ -2946,14 +3054,24 @@ static int vulkan_transfer_data_from_mem(AVHWFramesContext *hwfc, AVFrame *dst,
     if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 0)))
         goto end;
 
-    av_image_copy(tmp.data, tmp.linesize, (const uint8_t **)src->data,
-                  src->linesize, src->format, src->width, src->height);
+    for (int i = 0; i < planes; i++) {
+        int h = src->height;
+        int p_height = i > 0 ? AV_CEIL_RSHIFT(h, log2_chroma) : h;
+
+        if (host_mapped[i])
+            continue;
+
+        av_image_copy_plane(tmp.data[i], tmp.linesize[i],
+                            (const uint8_t *)src->data[i], src->linesize[i],
+                            FFMIN(tmp.linesize[i], FFABS(src->linesize[i])),
+                            p_height);
+    }
 
     if ((err = unmap_buffers(dev_ctx, bufs, planes, 1)))
         goto end;
 
     /* Copy buffers to image */
-    err = transfer_image_buf(dev_ctx, dst, bufs, tmp.linesize,
+    err = transfer_image_buf(hwfc, dst, bufs, tmp.linesize,
                              src->width, src->height, src->format, 0);
 
 end:
@@ -3063,6 +3181,9 @@ static int vulkan_transfer_data_to_mem(AVHWFramesContext *hwfc, AVFrame *dst,
     AVBufferRef *bufs[AV_NUM_DATA_POINTERS] = { 0 };
     const int planes = av_pix_fmt_count_planes(dst->format);
     int log2_chroma = av_pix_fmt_desc_get(dst->format)->log2_chroma_h;
+    VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
+    int host_mapped[AV_NUM_DATA_POINTERS] = { 0 };
+    int map_host = p->extensions & EXT_EXTERNAL_HOST_MEMORY;
 
     if (dst->width > hwfc->width || dst->height > hwfc->height)
         return AVERROR(EINVAL);
@@ -3088,17 +3209,33 @@ static int vulkan_transfer_data_to_mem(AVHWFramesContext *hwfc, AVFrame *dst,
     for (int i = 0; i < planes; i++) {
         int h = dst->height;
         int p_height = i > 0 ? AV_CEIL_RSHIFT(h, log2_chroma) : h;
+        size_t p_size = FFALIGN(FFABS(dst->linesize[i]) * p_height,
+                                p->hprops.minImportedHostPointerAlignment);
+
+        VkImportMemoryHostPointerInfoEXT import_desc = {
+            .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
+            .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
+            .pHostPointer = dst->data[i],
+        };
+
+        /* We can only map images with positive stride and alignment appropriate
+         * for the device. */
+        host_mapped[i] = map_host && dst->linesize[i] > 0 &&
+                         !(((uintptr_t)import_desc.pHostPointer) %
+                           p->hprops.minImportedHostPointerAlignment);
+        p_size = host_mapped[i] ? p_size : 0;
 
         tmp.linesize[i] = FFABS(dst->linesize[i]);
-        err = create_buf(dev_ctx, &bufs[i], p_height,
+        err = create_buf(dev_ctx, &bufs[i], p_size, p_height,
                          &tmp.linesize[i], VK_BUFFER_USAGE_TRANSFER_DST_BIT,
-                         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, NULL, NULL);
+                         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, NULL,
+                         host_mapped[i] ? &import_desc : NULL);
         if (err)
             goto end;
     }
 
     /* Copy image to buffer */
-    if ((err = transfer_image_buf(dev_ctx, src, bufs, tmp.linesize,
+    if ((err = transfer_image_buf(hwfc, src, bufs, tmp.linesize,
                                   dst->width, dst->height, dst->format, 1)))
         goto end;
 
@@ -3106,8 +3243,18 @@ static int vulkan_transfer_data_to_mem(AVHWFramesContext *hwfc, AVFrame *dst,
     if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 1)))
         goto end;
 
-    av_image_copy(dst->data, dst->linesize, (const uint8_t **)tmp.data,
-                  tmp.linesize, dst->format, dst->width, dst->height);
+    for (int i = 0; i < planes; i++) {
+        int h = dst->height;
+        int p_height = i > 0 ? AV_CEIL_RSHIFT(h, log2_chroma) : h;
+
+        if (host_mapped[i])
+            continue;
+
+        av_image_copy_plane(dst->data[i], dst->linesize[i],
+                            (const uint8_t *)tmp.data[i], tmp.linesize[i],
+                            FFMIN(tmp.linesize[i], FFABS(dst->linesize[i])),
+                            p_height);
+    }
 
     err = unmap_buffers(dev_ctx, bufs, planes, 0);
 
@@ -3138,6 +3285,12 @@ static int vulkan_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst,
     }
 }
 
+static int vulkan_frames_derive_to(AVHWFramesContext *dst_fc,
+                                   AVHWFramesContext *src_fc, int flags)
+{
+    return vulkan_frames_init(dst_fc);
+}
+
 AVVkFrame *av_vk_frame_alloc(void)
 {
     return av_mallocz(sizeof(AVVkFrame));
@@ -3167,6 +3320,7 @@ const HWContextType ff_hwcontext_type_vulkan = {
 
     .map_to                 = vulkan_map_to,
     .map_from               = vulkan_map_from,
+    .frames_derive_to       = &vulkan_frames_derive_to,
 
     .pix_fmts = (const enum AVPixelFormat []) {
         AV_PIX_FMT_VULKAN,