// layers_num,layer_type,layer_parameterss,layer_type,layer_parameters...
// For CONV layer: activation_function, input_num, output_num, kernel_size, kernel, biases
// For DEPTH_TO_SPACE layer: block_size
-DNNModel *ff_dnn_load_model_native(const char *model_filename)
+DNNModel *ff_dnn_load_model_native(const char *model_filename, const char *options)
{
DNNModel *model = NULL;
char header_expected[] = "FFMPEGDNNNATIVE";
model->set_input_output = &set_input_output_native;
model->get_input = &get_input_native;
+ model->options = options;
return model;
uint32_t nb_output;
} ConvolutionalNetwork;
-DNNModel *ff_dnn_load_model_native(const char *model_filename);
+DNNModel *ff_dnn_load_model_native(const char *model_filename, const char *options);
DNNReturnType ff_dnn_execute_model_native(const DNNModel *model, DNNData *outputs, uint32_t nb_output);
return DNN_ERROR;
}
-DNNModel *ff_dnn_load_model_ov(const char *model_filename)
+DNNModel *ff_dnn_load_model_ov(const char *model_filename, const char *options)
{
DNNModel *model = NULL;
OVModel *ov_model = NULL;
model->model = (void *)ov_model;
model->set_input_output = &set_input_output_ov;
model->get_input = &get_input_ov;
+ model->options = options;
return model;
#include "../dnn_interface.h"
-DNNModel *ff_dnn_load_model_ov(const char *model_filename);
+DNNModel *ff_dnn_load_model_ov(const char *model_filename, const char *options);
DNNReturnType ff_dnn_execute_model_ov(const DNNModel *model, DNNData *outputs, uint32_t nb_output);
return DNN_SUCCESS;
}
-DNNModel *ff_dnn_load_model_tf(const char *model_filename)
+DNNModel *ff_dnn_load_model_tf(const char *model_filename, const char *options)
{
DNNModel *model = NULL;
TFModel *tf_model = NULL;
model->model = (void *)tf_model;
model->set_input_output = &set_input_output_tf;
model->get_input = &get_input_tf;
+ model->options = options;
return model;
}
#include "../dnn_interface.h"
-DNNModel *ff_dnn_load_model_tf(const char *model_filename);
+DNNModel *ff_dnn_load_model_tf(const char *model_filename, const char *options);
DNNReturnType ff_dnn_execute_model_tf(const DNNModel *model, DNNData *outputs, uint32_t nb_output);
typedef struct DNNModel{
// Stores model that can be different for different backends.
void *model;
+ // Stores options when the model is executed by the backend
+ const char *options;
// Gets model input information
// Just reuse struct DNNData here, actually the DNNData.data field is not needed.
DNNReturnType (*get_input)(void *model, DNNData *input, const char *input_name);
// Stores pointers to functions for loading, executing, freeing DNN models for one of the backends.
typedef struct DNNModule{
// Loads model and parameters from given file. Returns NULL if it is not possible.
- DNNModel *(*load_model)(const char *model_filename);
+ DNNModel *(*load_model)(const char *model_filename, const char *options);
// Executes model with specified input and output. Returns DNN_ERROR otherwise.
DNNReturnType (*execute_model)(const DNNModel *model, DNNData *outputs, uint32_t nb_output);
// Frees memory allocated for model.
return AVERROR(EINVAL);
}
- dr_context->model = (dr_context->dnn_module->load_model)(dr_context->model_filename);
+ dr_context->model = (dr_context->dnn_module->load_model)(dr_context->model_filename, NULL);
if (!dr_context->model) {
av_log(ctx, AV_LOG_ERROR, "could not load DNN model\n");
return AVERROR(EINVAL);
DNNBackendType backend_type;
char *model_inputname;
char *model_outputname;
+ char *backend_options;
DNNModule *dnn_module;
DNNModel *model;
{ "model", "path to model file", OFFSET(model_filename), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },
{ "input", "input name of the model", OFFSET(model_inputname), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },
{ "output", "output name of the model", OFFSET(model_outputname), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },
+ { "options", "backend options", OFFSET(backend_options), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },
{ NULL }
};
return AVERROR(EINVAL);
}
- ctx->model = (ctx->dnn_module->load_model)(ctx->model_filename);
+ ctx->model = (ctx->dnn_module->load_model)(ctx->model_filename, ctx->backend_options);
if (!ctx->model) {
av_log(ctx, AV_LOG_ERROR, "could not load DNN model\n");
return AVERROR(EINVAL);
av_log(context, AV_LOG_ERROR, "load_model for network was not specified\n");
return AVERROR(EIO);
}
- sr_context->model = (sr_context->dnn_module->load_model)(sr_context->model_filename);
+ sr_context->model = (sr_context->dnn_module->load_model)(sr_context->model_filename, NULL);
if (!sr_context->model){
av_log(context, AV_LOG_ERROR, "could not load DNN model\n");
return AVERROR(EIO);