]> git.sesse.net Git - ffmpeg/commitdiff
dnn/native: add log error message
authorTing Fu <ting.fu@intel.com>
Tue, 25 Aug 2020 03:47:50 +0000 (11:47 +0800)
committerGuo, Yejun <yejun.guo@intel.com>
Tue, 25 Aug 2020 05:03:46 +0000 (13:03 +0800)
Signed-off-by: Ting Fu <ting.fu@intel.com>
24 files changed:
libavfilter/dnn/dnn_backend_native.c
libavfilter/dnn/dnn_backend_native.h
libavfilter/dnn/dnn_backend_native_layer_avgpool.c
libavfilter/dnn/dnn_backend_native_layer_avgpool.h
libavfilter/dnn/dnn_backend_native_layer_conv2d.c
libavfilter/dnn/dnn_backend_native_layer_conv2d.h
libavfilter/dnn/dnn_backend_native_layer_depth2space.c
libavfilter/dnn/dnn_backend_native_layer_depth2space.h
libavfilter/dnn/dnn_backend_native_layer_mathbinary.c
libavfilter/dnn/dnn_backend_native_layer_mathbinary.h
libavfilter/dnn/dnn_backend_native_layer_mathunary.c
libavfilter/dnn/dnn_backend_native_layer_mathunary.h
libavfilter/dnn/dnn_backend_native_layer_maximum.c
libavfilter/dnn/dnn_backend_native_layer_maximum.h
libavfilter/dnn/dnn_backend_native_layer_pad.c
libavfilter/dnn/dnn_backend_native_layer_pad.h
libavfilter/dnn/dnn_backend_native_layers.h
tests/dnn/dnn-layer-avgpool-test.c
tests/dnn/dnn-layer-conv2d-test.c
tests/dnn/dnn-layer-depth2space-test.c
tests/dnn/dnn-layer-mathbinary-test.c
tests/dnn/dnn-layer-mathunary-test.c
tests/dnn/dnn-layer-maximum-test.c
tests/dnn/dnn-layer-pad-test.c

index 436ce938daa054277a37070f3bf036c4498440da..a8fe6b94eb3345efba644d66d3a2e84d298e1001 100644 (file)
 #include "dnn_backend_native_layer_conv2d.h"
 #include "dnn_backend_native_layers.h"
 
+static const AVClass dnn_native_class = {
+    .class_name = "dnn_native",
+    .item_name  = av_default_item_name,
+    .option     = NULL,
+    .version    = LIBAVUTIL_VERSION_INT,
+    .category   = AV_CLASS_CATEGORY_FILTER,
+};
+
 static DNNReturnType get_input_native(void *model, DNNData *input, const char *input_name)
 {
     NativeModel *native_model = (NativeModel *)model;
+    NativeContext *ctx = &native_model->ctx;
 
     for (int i = 0; i < native_model->operands_num; ++i) {
         DnnOperand *oprd = &native_model->operands[i];
         if (strcmp(oprd->name, input_name) == 0) {
-            if (oprd->type != DOT_INPUT)
+            if (oprd->type != DOT_INPUT) {
+                av_log(ctx, AV_LOG_ERROR, "Found \"%s\" in model, but it is not input node\n", input_name);
                 return DNN_ERROR;
+            }
             input->dt = oprd->data_type;
             av_assert0(oprd->dims[0] == 1);
             input->height = oprd->dims[1];
@@ -47,30 +58,37 @@ static DNNReturnType get_input_native(void *model, DNNData *input, const char *i
     }
 
     // do not find the input operand
+    av_log(ctx, AV_LOG_ERROR, "Could not find \"%s\" in model\n", input_name);
     return DNN_ERROR;
 }
 
 static DNNReturnType set_input_native(void *model, DNNData *input, const char *input_name)
 {
     NativeModel *native_model = (NativeModel *)model;
+    NativeContext *ctx = &native_model->ctx;
     DnnOperand *oprd = NULL;
 
-    if (native_model->layers_num <= 0 || native_model->operands_num <= 0)
+    if (native_model->layers_num <= 0 || native_model->operands_num <= 0) {
+        av_log(ctx, AV_LOG_ERROR, "No operands or layers in model\n");
         return DNN_ERROR;
+    }
 
     /* inputs */
     for (int i = 0; i < native_model->operands_num; ++i) {
         oprd = &native_model->operands[i];
         if (strcmp(oprd->name, input_name) == 0) {
-            if (oprd->type != DOT_INPUT)
+            if (oprd->type != DOT_INPUT) {
+                av_log(ctx, AV_LOG_ERROR, "Found \"%s\" in model, but it is not input node\n", input_name);
                 return DNN_ERROR;
+            }
             break;
         }
         oprd = NULL;
     }
-
-    if (!oprd)
+    if (!oprd) {
+        av_log(ctx, AV_LOG_ERROR, "Could not find \"%s\" in model\n", input_name);
         return DNN_ERROR;
+    }
 
     oprd->dims[0] = 1;
     oprd->dims[1] = input->height;
@@ -79,11 +97,15 @@ static DNNReturnType set_input_native(void *model, DNNData *input, const char *i
 
     av_freep(&oprd->data);
     oprd->length = calculate_operand_data_length(oprd);
-    if (oprd->length <= 0)
+    if (oprd->length <= 0) {
+        av_log(ctx, AV_LOG_ERROR, "The input data length overflow\n");
         return DNN_ERROR;
+    }
     oprd->data = av_malloc(oprd->length);
-    if (!oprd->data)
+    if (!oprd->data) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to malloc memory for input data\n");
         return DNN_ERROR;
+    }
 
     input->data = oprd->data;
 
@@ -150,6 +172,8 @@ DNNModel *ff_dnn_load_model_native(const char *model_filename, const char *optio
     if (!native_model){
         goto fail;
     }
+
+    native_model->ctx.class = &dnn_native_class;
     model->model = (void *)native_model;
 
     avio_seek(model_file_context, file_size - 8, SEEK_SET);
@@ -237,19 +261,26 @@ fail:
 DNNReturnType ff_dnn_execute_model_native(const DNNModel *model, DNNData *outputs, const char **output_names, uint32_t nb_output)
 {
     NativeModel *native_model = (NativeModel *)model->model;
+    NativeContext *ctx = &native_model->ctx;
     int32_t layer;
 
-    if (native_model->layers_num <= 0 || native_model->operands_num <= 0)
+    if (native_model->layers_num <= 0 || native_model->operands_num <= 0) {
+        av_log(ctx, AV_LOG_ERROR, "No operands or layers in model\n");
         return DNN_ERROR;
-    if (!native_model->operands[0].data)
+    }
+    if (!native_model->operands[0].data) {
+        av_log(ctx, AV_LOG_ERROR, "Empty model input data\n");
         return DNN_ERROR;
+    }
 
     for (layer = 0; layer < native_model->layers_num; ++layer){
         DNNLayerType layer_type = native_model->layers[layer].type;
         if (layer_funcs[layer_type].pf_exec(native_model->operands,
                                             native_model->layers[layer].input_operand_indexes,
                                             native_model->layers[layer].output_operand_index,
-                                            native_model->layers[layer].params) == DNN_ERROR) {
+                                            native_model->layers[layer].params,
+                                            &native_model->ctx) == DNN_ERROR) {
+            av_log(ctx, AV_LOG_ERROR, "Failed to execuet model\n");
             return DNN_ERROR;
         }
     }
@@ -264,8 +295,10 @@ DNNReturnType ff_dnn_execute_model_native(const DNNModel *model, DNNData *output
             }
         }
 
-        if (oprd == NULL)
+        if (oprd == NULL) {
+            av_log(ctx, AV_LOG_ERROR, "Could not find output in model\n");
             return DNN_ERROR;
+        }
 
         outputs[i].data = oprd->data;
         outputs[i].height = oprd->dims[1];
index 554098fe76d065a4aa3af915baa75b3ad4d6415a..197f557deeabb7d90224f02452b5a6a58c2e9ba0 100644 (file)
@@ -106,8 +106,13 @@ typedef struct InputParams{
     int height, width, channels;
 } InputParams;
 
+typedef struct NativeContext {
+    const AVClass *class;
+} NativeContext;
+
 // Represents simple feed-forward convolutional network.
 typedef struct NativeModel{
+    NativeContext ctx;
     Layer *layers;
     int32_t layers_num;
     DnnOperand *operands;
index bd7bdb4c97a6ca4b566cacda33193d54cc636ff6..989006d797bbe7b4d079d9fd51dacc4c9fde3250 100644 (file)
@@ -56,7 +56,7 @@ int dnn_load_layer_avg_pool(Layer *layer, AVIOContext *model_file_context, int f
 }
 
 int dnn_execute_layer_avg_pool(DnnOperand *operands, const int32_t *input_operand_indexes,
-                             int32_t output_operand_index, const void *parameters)
+                             int32_t output_operand_index, const void *parameters, NativeContext *ctx)
 {
     float *output;
     int height_end, width_end, height_radius, width_radius, output_height, output_width, kernel_area;
@@ -107,9 +107,15 @@ int dnn_execute_layer_avg_pool(DnnOperand *operands, const int32_t *input_operan
     output_operand->dims[3] = channel;
     output_operand->data_type = operands[input_operand_index].data_type;
     output_operand->length = calculate_operand_data_length(output_operand);
+    if (output_operand->length <= 0) {
+        av_log(ctx, AV_LOG_ERROR, "The output data length overflow\n");
+        return DNN_ERROR;
+    }
     output_operand->data = av_realloc(output_operand->data, output_operand->length);
-    if (!output_operand->data)
+    if (!output_operand->data) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to reallocate memory for output\n");
         return DNN_ERROR;
+    }
     output = output_operand->data;
 
     for (int y = 0; y < height_end; y += kernel_strides) {
index 8e31ddb7c893703d3d8c0026c2f37f99307e1458..543370ff3bb737d92426bc4f6a7f87a72b52449c 100644 (file)
@@ -35,6 +35,6 @@ typedef struct AvgPoolParams{
 
 int dnn_load_layer_avg_pool(Layer *layer, AVIOContext *model_file_context, int file_size, int operands_num);
 int dnn_execute_layer_avg_pool(DnnOperand *operands, const int32_t *input_operand_indexes,
-                             int32_t output_operand_index, const void *parameters);
+                             int32_t output_operand_index, const void *parameters, NativeContext *ctx);
 
 #endif
index 25356901c28932ebcf65390204c2df096867e762..d079795bf8435fdf445862d355bce901b865875b 100644 (file)
@@ -89,7 +89,7 @@ int dnn_load_layer_conv2d(Layer *layer, AVIOContext *model_file_context, int fil
 }
 
 int dnn_execute_layer_conv2d(DnnOperand *operands, const int32_t *input_operand_indexes,
-                             int32_t output_operand_index, const void *parameters)
+                             int32_t output_operand_index, const void *parameters, NativeContext *ctx)
 {
     float *output;
     int32_t input_operand_index = input_operand_indexes[0];
@@ -113,11 +113,15 @@ int dnn_execute_layer_conv2d(DnnOperand *operands, const int32_t *input_operand_
     output_operand->dims[3] = conv_params->output_num;
     output_operand->data_type = operands[input_operand_index].data_type;
     output_operand->length = calculate_operand_data_length(output_operand);
-    if (output_operand->length <= 0)
+    if (output_operand->length <= 0) {
+        av_log(ctx, AV_LOG_ERROR, "The output data length overflow\n");
         return DNN_ERROR;
+    }
     output_operand->data = av_realloc(output_operand->data, output_operand->length);
-    if (!output_operand->data)
+    if (!output_operand->data) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to reallocate memory for output\n");
         return DNN_ERROR;
+    }
     output = output_operand->data;
 
     av_assert0(channel == conv_params->input_num);
index b240b7ef6bbff4929d10352cffc87cda5e8beadf..72319f2ebe8850ac621d71d89a4ba977365fb225 100644 (file)
@@ -37,5 +37,5 @@ typedef struct ConvolutionalParams{
 
 int dnn_load_layer_conv2d(Layer *layer, AVIOContext *model_file_context, int file_size, int operands_num);
 int dnn_execute_layer_conv2d(DnnOperand *operands, const int32_t *input_operand_indexes,
-                             int32_t output_operand_index, const void *parameters);
+                             int32_t output_operand_index, const void *parameters, NativeContext *ctx);
 #endif
index 5a61025f7ae13c3113217641254b39f27cd95523..4107ee6caeca3f5b7787de93e956fa4755f3b4c7 100644 (file)
@@ -50,7 +50,7 @@ int dnn_load_layer_depth2space(Layer *layer, AVIOContext *model_file_context, in
 }
 
 int dnn_execute_layer_depth2space(DnnOperand *operands, const int32_t *input_operand_indexes,
-                                  int32_t output_operand_index, const void *parameters)
+                                  int32_t output_operand_index, const void *parameters, NativeContext *ctx)
 {
     float *output;
     const DepthToSpaceParams *params = (const DepthToSpaceParams *)parameters;
@@ -75,11 +75,15 @@ int dnn_execute_layer_depth2space(DnnOperand *operands, const int32_t *input_ope
     output_operand->dims[3] = new_channels;
     output_operand->data_type = operands[input_operand_index].data_type;
     output_operand->length = calculate_operand_data_length(output_operand);
-    if (output_operand->length <= 0)
+    if (output_operand->length <= 0) {
+        av_log(ctx, AV_LOG_ERROR, "The output data length overflow\n");
         return DNN_ERROR;
+    }
     output_operand->data = av_realloc(output_operand->data, output_operand->length);
-    if (!output_operand->data)
+    if (!output_operand->data) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to reallocate memory for output\n");
         return DNN_ERROR;
+    }
     output = output_operand->data;
 
     for (y = 0; y < height; ++y){
index b2901e0141ad0ab5070f3ee29469a547aababa0c..648a927f2dffeda0d96ee27a3ba03155e507cea2 100644 (file)
@@ -36,6 +36,6 @@ typedef struct DepthToSpaceParams{
 
 int dnn_load_layer_depth2space(Layer *layer, AVIOContext *model_file_context, int file_size, int operands_num);
 int dnn_execute_layer_depth2space(DnnOperand *operands, const int32_t *input_operand_indexes,
-                                  int32_t output_operand_index, const void *parameters);
+                                  int32_t output_operand_index, const void *parameters, NativeContext *ctx);
 
 #endif
index 6ec1f08e9f1ea771b6733f8db3898094489bfde3..998a75245cbe05ef80def3f64654c9013895bf4a 100644 (file)
@@ -149,7 +149,7 @@ int dnn_load_layer_math_binary(Layer *layer, AVIOContext *model_file_context, in
 }
 
 int dnn_execute_layer_math_binary(DnnOperand *operands, const int32_t *input_operand_indexes,
-                                 int32_t output_operand_index, const void *parameters)
+                                 int32_t output_operand_index, const void *parameters, NativeContext *ctx)
 {
     const DnnOperand *input = &operands[input_operand_indexes[0]];
     DnnOperand *output = &operands[output_operand_index];
@@ -160,11 +160,15 @@ int dnn_execute_layer_math_binary(DnnOperand *operands, const int32_t *input_ope
 
     output->data_type = input->data_type;
     output->length = calculate_operand_data_length(output);
-    if (output->length <= 0)
+    if (output->length <= 0) {
+        av_log(ctx, AV_LOG_ERROR, "The output data length overflow\n");
         return DNN_ERROR;
+    }
     output->data = av_realloc(output->data, output->length);
-    if (!output->data)
+    if (!output->data) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to reallocate memory for output\n");
         return DNN_ERROR;
+    }
 
     switch (params->bin_op) {
     case DMBO_SUB:
@@ -186,6 +190,7 @@ int dnn_execute_layer_math_binary(DnnOperand *operands, const int32_t *input_ope
         math_binary_not_commutative(floormod, params, input, output, operands, input_operand_indexes);
         return 0;
     default:
+        av_log(ctx, AV_LOG_ERROR, "Unmatch math binary operator\n");
         return DNN_ERROR;
     }
 }
index 9525685afa46814df589f5a0fd6b6c074d426f0c..bb97ba2dcae9609b00f537b710b305a9b012e214 100644 (file)
@@ -49,6 +49,6 @@ typedef struct DnnLayerMathBinaryParams{
 
 int dnn_load_layer_math_binary(Layer *layer, AVIOContext *model_file_context, int file_size, int operands_num);
 int dnn_execute_layer_math_binary(DnnOperand *operands, const int32_t *input_operand_indexes,
-                                 int32_t output_operand_index, const void *parameters);
+                                 int32_t output_operand_index, const void *parameters, NativeContext *ctx);
 
 #endif
index 57bbd9d3e8df2c4f849b9a659b73ea3ff66cefd8..ae5d4daae90bfa6985cf7b3ca51a3a178f80ac5d 100644 (file)
@@ -53,7 +53,7 @@ int dnn_load_layer_math_unary(Layer *layer, AVIOContext *model_file_context, int
 }
 
 int dnn_execute_layer_math_unary(DnnOperand *operands, const int32_t *input_operand_indexes,
-                                int32_t output_operand_index, const void *parameters)
+                                int32_t output_operand_index, const void *parameters, NativeContext *ctx)
 {
     const DnnOperand *input = &operands[input_operand_indexes[0]];
     DnnOperand *output = &operands[output_operand_index];
@@ -67,11 +67,15 @@ int dnn_execute_layer_math_unary(DnnOperand *operands, const int32_t *input_oper
 
     output->data_type = input->data_type;
     output->length = calculate_operand_data_length(output);
-    if (output->length <= 0)
+    if (output->length <= 0) {
+        av_log(ctx, AV_LOG_ERROR, "The output data length overflow\n");
         return DNN_ERROR;
+    }
     output->data = av_realloc(output->data, output->length);
-    if (!output->data)
+    if (!output->data) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to reallocate memory for output\n");
         return DNN_ERROR;
+    }
 
     dims_count = calculate_operand_dims_count(output);
     src = input->data;
@@ -143,6 +147,7 @@ int dnn_execute_layer_math_unary(DnnOperand *operands, const int32_t *input_oper
             dst[i] = round(src[i]);
         return 0;
     default:
+        av_log(ctx, AV_LOG_ERROR, "Unmatch math unary operator\n");
         return DNN_ERROR;
     }
 }
index d6a61effd5568f082edab3c8a0e2737d36960cbd..301d02e5fb450b370a4830339dbf1e13af8156a4 100644 (file)
@@ -55,6 +55,6 @@ typedef struct DnnLayerMathUnaryParams{
 
 int dnn_load_layer_math_unary(Layer *layer, AVIOContext *model_file_context, int file_size, int operands_num);
 int dnn_execute_layer_math_unary(DnnOperand *operands, const int32_t *input_operand_indexes,
-                                int32_t output_operand_index, const void *parameters);
+                                int32_t output_operand_index, const void *parameters, NativeContext *ctx);
 
 #endif
index cdddfdd87b20f4680782275c053f50d44164ec1b..7ad5a22969898d8a9241d8b2311ad7fbf90dffe2 100644 (file)
@@ -50,7 +50,7 @@ int dnn_load_layer_maximum(Layer *layer, AVIOContext *model_file_context, int fi
 }
 
 int dnn_execute_layer_maximum(DnnOperand *operands, const int32_t *input_operand_indexes,
-                              int32_t output_operand_index, const void *parameters)
+                              int32_t output_operand_index, const void *parameters, NativeContext *ctx)
 {
     const DnnOperand *input = &operands[input_operand_indexes[0]];
     DnnOperand *output = &operands[output_operand_index];
@@ -64,11 +64,15 @@ int dnn_execute_layer_maximum(DnnOperand *operands, const int32_t *input_operand
 
     output->data_type = input->data_type;
     output->length = calculate_operand_data_length(output);
-    if (output->length <= 0)
+    if (output->length <= 0) {
+        av_log(ctx, AV_LOG_ERROR, "The output data length overflow\n");
         return DNN_ERROR;
+    }
     output->data = av_realloc(output->data, output->length);
-    if (!output->data)
+    if (!output->data) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to reallocate memory for output\n");
         return DNN_ERROR;
+    }
 
     dims_count = calculate_operand_dims_count(output);
     src = input->data;
index c049c63fd888f17da13e19c36f06685e663a4c4b..be63a3ab5b0256d4e640964150ddd9b3391c2fbc 100644 (file)
@@ -39,6 +39,6 @@ typedef struct DnnLayerMaximumParams{
 
 int dnn_load_layer_maximum(Layer *layer, AVIOContext *model_file_context, int file_size, int operands_num);
 int dnn_execute_layer_maximum(DnnOperand *operands, const int32_t *input_operand_indexes,
-                              int32_t output_operand_index, const void *parameters);
+                              int32_t output_operand_index, const void *parameters, NativeContext *ctx);
 
 #endif
index 5452d22878f3467d268aba6252e06369ff753cb8..05892d43f4f012ceaaff7100848cf80af50abdba 100644 (file)
@@ -76,7 +76,7 @@ static int after_get_buddy(int given, int border, LayerPadModeParam mode)
 }
 
 int dnn_execute_layer_pad(DnnOperand *operands, const int32_t *input_operand_indexes,
-                          int32_t output_operand_index, const void *parameters)
+                          int32_t output_operand_index, const void *parameters, NativeContext *ctx)
 {
     int32_t before_paddings;
     int32_t after_paddings;
@@ -111,11 +111,15 @@ int dnn_execute_layer_pad(DnnOperand *operands, const int32_t *input_operand_ind
     output_operand->dims[3] = new_channel;
     output_operand->data_type = operands[input_operand_index].data_type;
     output_operand->length = calculate_operand_data_length(output_operand);
-    if (output_operand->length <= 0)
+    if (output_operand->length <= 0) {
+        av_log(ctx, AV_LOG_ERROR, "The output data length overflow\n");
         return DNN_ERROR;
+    }
     output_operand->data = av_realloc(output_operand->data, output_operand->length);
-    if (!output_operand->data)
+    if (!output_operand->data) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to reallocate memory for output\n");
         return DNN_ERROR;
+    }
     output = output_operand->data;
 
     // copy the original data
index 18e05bdd5c1bd53d3ff8d3069d80c49d18e41f90..6c69211824fcc96d90f34c48244fe89e8ba06792 100644 (file)
@@ -38,6 +38,6 @@ typedef struct LayerPadParams{
 
 int dnn_load_layer_pad(Layer *layer, AVIOContext *model_file_context, int file_size, int operands_num);
 int dnn_execute_layer_pad(DnnOperand *operands, const int32_t *input_operand_indexes,
-                          int32_t output_operand_index, const void *parameters);
+                          int32_t output_operand_index, const void *parameters, NativeContext *ctx);
 
 #endif
index b696e9c6fa7ec1578585c69aca4f052c88e2d374..dc76ace65a2b8e9f11323ac636187c41258cb7ec 100644 (file)
@@ -25,7 +25,7 @@
 #include "dnn_backend_native.h"
 
 typedef int (*LAYER_EXEC_FUNC)(DnnOperand *operands, const int32_t *input_operand_indexes,
-                               int32_t output_operand_index, const void *parameters);
+                               int32_t output_operand_index, const void *parameters, NativeContext *ctx);
 typedef int (*LAYER_LOAD_FUNC)(Layer *layer, AVIOContext *model_file_context, int file_size, int operands_num);
 
 typedef struct LayerFunc {
index d7c33a0e8834c9fcff677a4eead1cf1f77888cd4..0e6be8ba579e8efab8a7492b76ff5b9f191ef9a3 100644 (file)
@@ -91,7 +91,7 @@ static int test_with_same(void)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params);
+    dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(expected_output) / sizeof(float); ++i) {
@@ -171,7 +171,7 @@ static int test_with_valid(void)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params);
+    dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(expected_output) / sizeof(float); ++i) {
index 2da01e5372a9824a8c316462123fa72663d60888..836839cc64e8ab43c960c56240ff04846cd94b88 100644 (file)
@@ -114,7 +114,7 @@ static int test_with_same_dilate(void)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_conv2d(operands, input_indexes, 1, &params);
+    dnn_execute_layer_conv2d(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
@@ -214,7 +214,7 @@ static int test_with_valid(void)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_conv2d(operands, input_indexes, 1, &params);
+    dnn_execute_layer_conv2d(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
index 5225ec7b7ac153932465b8a2084affae479e1ede..2c641884c1bb5a0df030ad09893795495ffd8860 100644 (file)
@@ -81,7 +81,7 @@ static int test(void)
 
     input_indexes[0] = 0;
     params.block_size = 2;
-    dnn_execute_layer_depth2space(operands, input_indexes, 1, &params);
+    dnn_execute_layer_depth2space(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
index 5422b2a207d72731a69ac7fc00a4de5fd1a384bc..c4da3f6a86dfeeb51520e64f2a1acf7f104c81c9 100644 (file)
@@ -71,7 +71,7 @@ static int test_broadcast_input0(DNNMathBinaryOperation op)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_math_binary(operands, input_indexes, 1, &params);
+    dnn_execute_layer_math_binary(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(input) / sizeof(float); i++) {
@@ -111,7 +111,7 @@ static int test_broadcast_input1(DNNMathBinaryOperation op)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_math_binary(operands, input_indexes, 1, &params);
+    dnn_execute_layer_math_binary(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(input) / sizeof(float); i++) {
@@ -159,7 +159,7 @@ static int test_no_broadcast(DNNMathBinaryOperation op)
 
     input_indexes[0] = 0;
     input_indexes[1] = 1;
-    dnn_execute_layer_math_binary(operands, input_indexes, 2, &params);
+    dnn_execute_layer_math_binary(operands, input_indexes, 2, &params, NULL);
 
     output = operands[2].data;
     for (int i = 0; i < sizeof(input0) / sizeof(float); i++) {
index e9235120f3495183f12d56cbdc872628ac74e7f0..ce14c41311be039a13c4cfb463a1efc2d13f03ab 100644 (file)
@@ -87,7 +87,7 @@ static int test(DNNMathUnaryOperation op)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_math_unary(operands, input_indexes, 1, &params);
+    dnn_execute_layer_math_unary(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(input) / sizeof(float); ++i) {
index 06daf64481af2aeeb0e2e8c1ede39e1e7dc719a6..c9826705916d6b811b3620fb7602e180900a6110 100644 (file)
@@ -45,7 +45,7 @@ static int test(void)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_maximum(operands, input_indexes, 1, &params);
+    dnn_execute_layer_maximum(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(input) / sizeof(float); i++) {
index ea8c824d1e9c006cf3bbd700264a282924517e1a..6a72adb3aef20200d3839011d26b6c98dad8e3b9 100644 (file)
@@ -79,7 +79,7 @@ static int test_with_mode_symmetric(void)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_pad(operands, input_indexes, 1, &params);
+    dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
@@ -144,7 +144,7 @@ static int test_with_mode_reflect(void)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_pad(operands, input_indexes, 1, &params);
+    dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
@@ -210,7 +210,7 @@ static int test_with_mode_constant(void)
     operands[1].data = NULL;
 
     input_indexes[0] = 0;
-    dnn_execute_layer_pad(operands, input_indexes, 1, &params);
+    dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
 
     output = operands[1].data;
     for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {