aboutsummaryrefslogtreecommitdiffstats
path: root/tests
diff options
context:
space:
mode:
authorTing Fu <ting.fu@intel.com>2020-08-25 11:47:50 +0800
committerGuo, Yejun <yejun.guo@intel.com>2020-08-25 13:03:46 +0800
commitc8ba0daf8dab2f5cbcdded37cd6383649933fbf3 (patch)
tree07fc4d82be5a16a925f5710ec370141e1163d778 /tests
parent230cf9d1854b171727d1b10c2a2fbe0df9d1d489 (diff)
downloadffmpeg-c8ba0daf8dab2f5cbcdded37cd6383649933fbf3.tar.gz
dnn/native: add log error message
Signed-off-by: Ting Fu <ting.fu@intel.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/dnn/dnn-layer-avgpool-test.c4
-rw-r--r--tests/dnn/dnn-layer-conv2d-test.c4
-rw-r--r--tests/dnn/dnn-layer-depth2space-test.c2
-rw-r--r--tests/dnn/dnn-layer-mathbinary-test.c6
-rw-r--r--tests/dnn/dnn-layer-mathunary-test.c2
-rw-r--r--tests/dnn/dnn-layer-maximum-test.c2
-rw-r--r--tests/dnn/dnn-layer-pad-test.c6
7 files changed, 13 insertions, 13 deletions
diff --git a/tests/dnn/dnn-layer-avgpool-test.c b/tests/dnn/dnn-layer-avgpool-test.c
index d7c33a0e88..0e6be8ba57 100644
--- a/tests/dnn/dnn-layer-avgpool-test.c
+++ b/tests/dnn/dnn-layer-avgpool-test.c
@@ -91,7 +91,7 @@ static int test_with_same(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params);
+ dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); ++i) {
@@ -171,7 +171,7 @@ static int test_with_valid(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params);
+ dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); ++i) {
diff --git a/tests/dnn/dnn-layer-conv2d-test.c b/tests/dnn/dnn-layer-conv2d-test.c
index 2da01e5372..836839cc64 100644
--- a/tests/dnn/dnn-layer-conv2d-test.c
+++ b/tests/dnn/dnn-layer-conv2d-test.c
@@ -114,7 +114,7 @@ static int test_with_same_dilate(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_conv2d(operands, input_indexes, 1, &params);
+ dnn_execute_layer_conv2d(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
@@ -214,7 +214,7 @@ static int test_with_valid(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_conv2d(operands, input_indexes, 1, &params);
+ dnn_execute_layer_conv2d(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
diff --git a/tests/dnn/dnn-layer-depth2space-test.c b/tests/dnn/dnn-layer-depth2space-test.c
index 5225ec7b7a..2c641884c1 100644
--- a/tests/dnn/dnn-layer-depth2space-test.c
+++ b/tests/dnn/dnn-layer-depth2space-test.c
@@ -81,7 +81,7 @@ static int test(void)
input_indexes[0] = 0;
params.block_size = 2;
- dnn_execute_layer_depth2space(operands, input_indexes, 1, &params);
+ dnn_execute_layer_depth2space(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
diff --git a/tests/dnn/dnn-layer-mathbinary-test.c b/tests/dnn/dnn-layer-mathbinary-test.c
index 5422b2a207..c4da3f6a86 100644
--- a/tests/dnn/dnn-layer-mathbinary-test.c
+++ b/tests/dnn/dnn-layer-mathbinary-test.c
@@ -71,7 +71,7 @@ static int test_broadcast_input0(DNNMathBinaryOperation op)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_math_binary(operands, input_indexes, 1, &params);
+ dnn_execute_layer_math_binary(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(input) / sizeof(float); i++) {
@@ -111,7 +111,7 @@ static int test_broadcast_input1(DNNMathBinaryOperation op)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_math_binary(operands, input_indexes, 1, &params);
+ dnn_execute_layer_math_binary(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(input) / sizeof(float); i++) {
@@ -159,7 +159,7 @@ static int test_no_broadcast(DNNMathBinaryOperation op)
input_indexes[0] = 0;
input_indexes[1] = 1;
- dnn_execute_layer_math_binary(operands, input_indexes, 2, &params);
+ dnn_execute_layer_math_binary(operands, input_indexes, 2, &params, NULL);
output = operands[2].data;
for (int i = 0; i < sizeof(input0) / sizeof(float); i++) {
diff --git a/tests/dnn/dnn-layer-mathunary-test.c b/tests/dnn/dnn-layer-mathunary-test.c
index e9235120f3..ce14c41311 100644
--- a/tests/dnn/dnn-layer-mathunary-test.c
+++ b/tests/dnn/dnn-layer-mathunary-test.c
@@ -87,7 +87,7 @@ static int test(DNNMathUnaryOperation op)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_math_unary(operands, input_indexes, 1, &params);
+ dnn_execute_layer_math_unary(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(input) / sizeof(float); ++i) {
diff --git a/tests/dnn/dnn-layer-maximum-test.c b/tests/dnn/dnn-layer-maximum-test.c
index 06daf64481..c982670591 100644
--- a/tests/dnn/dnn-layer-maximum-test.c
+++ b/tests/dnn/dnn-layer-maximum-test.c
@@ -45,7 +45,7 @@ static int test(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_maximum(operands, input_indexes, 1, &params);
+ dnn_execute_layer_maximum(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(input) / sizeof(float); i++) {
diff --git a/tests/dnn/dnn-layer-pad-test.c b/tests/dnn/dnn-layer-pad-test.c
index ea8c824d1e..6a72adb3ae 100644
--- a/tests/dnn/dnn-layer-pad-test.c
+++ b/tests/dnn/dnn-layer-pad-test.c
@@ -79,7 +79,7 @@ static int test_with_mode_symmetric(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_pad(operands, input_indexes, 1, &params);
+ dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
@@ -144,7 +144,7 @@ static int test_with_mode_reflect(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_pad(operands, input_indexes, 1, &params);
+ dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
@@ -210,7 +210,7 @@ static int test_with_mode_constant(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_pad(operands, input_indexes, 1, &params);
+ dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {