Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/dnn/test/test_onnx_importer.cpp
16354 views
1
// This file is part of OpenCV project.
2
// It is subject to the license terms in the LICENSE file found in the top-level directory
3
// of this distribution and at http://opencv.org/license.html.
4
5
// Copyright (C) 2018, Intel Corporation, all rights reserved.
6
// Third party copyrights are property of their respective owners.
7
8
9
#include "test_precomp.hpp"
10
#include "npy_blob.hpp"
11
#include <opencv2/dnn/shape_utils.hpp>
12
13
namespace opencv_test { namespace {
14
15
template<typename TString>
16
static std::string _tf(TString filename)
17
{
18
String rootFolder = "dnn/onnx/";
19
return findDataFile(rootFolder + filename, false);
20
}
21
22
class Test_ONNX_layers : public DNNTestLayer
23
{
24
public:
25
enum Extension
26
{
27
npy,
28
pb
29
};
30
31
void testONNXModels(const String& basename, const Extension ext = npy, const double l1 = 0, const float lInf = 0)
32
{
33
String onnxmodel = _tf("models/" + basename + ".onnx");
34
Mat inp, ref;
35
if (ext == npy) {
36
inp = blobFromNPY(_tf("data/input_" + basename + ".npy"));
37
ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
38
}
39
else if (ext == pb) {
40
inp = readTensorFromONNX(_tf("data/input_" + basename + ".pb"));
41
ref = readTensorFromONNX(_tf("data/output_" + basename + ".pb"));
42
}
43
else
44
CV_Error(Error::StsUnsupportedFormat, "Unsupported extension");
45
46
checkBackend(&inp, &ref);
47
Net net = readNetFromONNX(onnxmodel);
48
ASSERT_FALSE(net.empty());
49
50
net.setPreferableBackend(backend);
51
net.setPreferableTarget(target);
52
53
net.setInput(inp);
54
Mat out = net.forward();
55
normAssert(ref, out, "", l1 ? l1 : default_l1, lInf ? lInf : default_lInf);
56
}
57
};
58
59
TEST_P(Test_ONNX_layers, MaxPooling)
60
{
61
testONNXModels("maxpooling");
62
testONNXModels("two_maxpooling");
63
}
64
65
TEST_P(Test_ONNX_layers, Convolution)
66
{
67
testONNXModels("convolution");
68
testONNXModels("two_convolution");
69
}
70
71
TEST_P(Test_ONNX_layers, Dropout)
72
{
73
testONNXModels("dropout");
74
}
75
76
TEST_P(Test_ONNX_layers, Linear)
77
{
78
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
79
throw SkipTestException("");
80
testONNXModels("linear");
81
}
82
83
TEST_P(Test_ONNX_layers, ReLU)
84
{
85
testONNXModels("ReLU");
86
}
87
88
TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid)
89
{
90
testONNXModels("maxpooling_sigmoid");
91
}
92
93
TEST_P(Test_ONNX_layers, Concatenation)
94
{
95
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
96
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
97
throw SkipTestException("");
98
testONNXModels("concatenation");
99
}
100
101
TEST_P(Test_ONNX_layers, AveragePooling)
102
{
103
testONNXModels("average_pooling");
104
}
105
106
TEST_P(Test_ONNX_layers, BatchNormalization)
107
{
108
testONNXModels("batch_norm");
109
}
110
111
TEST_P(Test_ONNX_layers, Transpose)
112
{
113
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
114
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
115
throw SkipTestException("");
116
testONNXModels("transpose");
117
}
118
119
TEST_P(Test_ONNX_layers, Multiplication)
120
{
121
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16 ||
122
backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
123
throw SkipTestException("");
124
testONNXModels("mul");
125
}
126
127
TEST_P(Test_ONNX_layers, Constant)
128
{
129
testONNXModels("constant");
130
}
131
132
TEST_P(Test_ONNX_layers, MultyInputs)
133
{
134
const String model = _tf("models/multy_inputs.onnx");
135
136
Net net = readNetFromONNX(model);
137
ASSERT_FALSE(net.empty());
138
139
net.setPreferableBackend(backend);
140
net.setPreferableTarget(target);
141
142
Mat inp1 = blobFromNPY(_tf("data/input_multy_inputs_0.npy"));
143
Mat inp2 = blobFromNPY(_tf("data/input_multy_inputs_1.npy"));
144
Mat ref = blobFromNPY(_tf("data/output_multy_inputs.npy"));
145
checkBackend(&inp1, &ref);
146
147
net.setInput(inp1, "0");
148
net.setInput(inp2, "1");
149
Mat out = net.forward();
150
151
normAssert(ref, out, "", default_l1, default_lInf);
152
}
153
154
155
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());
156
157
class Test_ONNX_nets : public Test_ONNX_layers {};
158
TEST_P(Test_ONNX_nets, Alexnet)
159
{
160
const String model = _tf("models/alexnet.onnx");
161
162
Net net = readNetFromONNX(model);
163
ASSERT_FALSE(net.empty());
164
165
net.setPreferableBackend(backend);
166
net.setPreferableTarget(target);
167
168
Mat inp = imread(_tf("../grace_hopper_227.png"));
169
Mat ref = blobFromNPY(_tf("../caffe_alexnet_prob.npy"));
170
checkBackend(&inp, &ref);
171
172
net.setInput(blobFromImage(inp, 1.0f, Size(227, 227), Scalar(), false));
173
ASSERT_FALSE(net.empty());
174
Mat out = net.forward();
175
176
normAssert(out, ref, "", default_l1, default_lInf);
177
}
178
179
TEST_P(Test_ONNX_nets, Squeezenet)
180
{
181
testONNXModels("squeezenet", pb);
182
}
183
184
TEST_P(Test_ONNX_nets, Googlenet)
185
{
186
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
187
throw SkipTestException("");
188
189
const String model = _tf("models/googlenet.onnx");
190
191
Net net = readNetFromONNX(model);
192
ASSERT_FALSE(net.empty());
193
194
net.setPreferableBackend(backend);
195
net.setPreferableTarget(target);
196
197
std::vector<Mat> images;
198
images.push_back( imread(_tf("../googlenet_0.png")) );
199
images.push_back( imread(_tf("../googlenet_1.png")) );
200
Mat inp = blobFromImages(images, 1.0f, Size(), Scalar(), false);
201
Mat ref = blobFromNPY(_tf("../googlenet_prob.npy"));
202
checkBackend(&inp, &ref);
203
204
net.setInput(inp);
205
ASSERT_FALSE(net.empty());
206
Mat out = net.forward();
207
208
normAssert(ref, out, "", default_l1, default_lInf);
209
}
210
211
TEST_P(Test_ONNX_nets, CaffeNet)
212
{
213
testONNXModels("caffenet", pb);
214
}
215
216
TEST_P(Test_ONNX_nets, RCNN_ILSVRC13)
217
{
218
testONNXModels("rcnn_ilsvrc13", pb);
219
}
220
221
#ifdef OPENCV_32BIT_CONFIGURATION
222
TEST_P(Test_ONNX_nets, DISABLED_VGG16) // memory usage >2Gb
223
#else
224
TEST_P(Test_ONNX_nets, VGG16)
225
#endif
226
{
227
double l1 = default_l1;
228
double lInf = default_lInf;
229
// output range: [-69; 72]
230
if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) {
231
l1 = 0.087;
232
lInf = 0.585;
233
}
234
else if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL) {
235
lInf = 1.2e-4;
236
}
237
testONNXModels("vgg16", pb, l1, lInf);
238
}
239
240
#ifdef OPENCV_32BIT_CONFIGURATION
241
TEST_P(Test_ONNX_nets, DISABLED_VGG16_bn) // memory usage >2Gb
242
#else
243
TEST_P(Test_ONNX_nets, VGG16_bn)
244
#endif
245
{
246
double l1 = default_l1;
247
double lInf = default_lInf;
248
// output range: [-16; 27]
249
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) {
250
l1 = 0.0086;
251
lInf = 0.037;
252
}
253
else if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
254
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)) {
255
l1 = 0.031;
256
lInf = 0.2;
257
}
258
testONNXModels("vgg16-bn", pb, l1, lInf);
259
}
260
261
TEST_P(Test_ONNX_nets, ZFNet)
262
{
263
testONNXModels("zfnet512", pb);
264
}
265
266
TEST_P(Test_ONNX_nets, ResNet18v1)
267
{
268
// output range: [-16; 22]
269
const double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.022 : default_l1;
270
const double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.12 : default_lInf;
271
testONNXModels("resnet18v1", pb, l1, lInf);
272
}
273
274
TEST_P(Test_ONNX_nets, ResNet50v1)
275
{
276
// output range: [-67; 75]
277
const double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.6 : 1.25e-5;
278
const double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.51 : 1.2e-4;
279
testONNXModels("resnet50v1", pb, l1, lInf);
280
}
281
282
TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
283
{
284
if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL
285
|| target == DNN_TARGET_MYRIAD) {
286
throw SkipTestException("");
287
}
288
testONNXModels("resnet101_duc_hdc", pb);
289
}
290
291
TEST_P(Test_ONNX_nets, TinyYolov2)
292
{
293
if (cvtest::skipUnstableTests ||
294
backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)) {
295
throw SkipTestException("");
296
}
297
// output range: [-11; 8]
298
const double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.017 : default_l1;
299
const double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.14 : default_lInf;
300
testONNXModels("tiny_yolo2", pb, l1, lInf);
301
}
302
303
TEST_P(Test_ONNX_nets, CNN_MNIST)
304
{
305
// output range: [-1952; 6574]
306
const double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 3.82 : 4.4e-4;
307
const double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 13.5 : 2e-3;
308
309
testONNXModels("cnn_mnist", pb, l1, lInf);
310
}
311
312
TEST_P(Test_ONNX_nets, MobileNet_v2)
313
{
314
// output range: [-166; 317]
315
const double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.38 : 7e-5;
316
const double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 2.87 : 5e-4;
317
testONNXModels("mobilenetv2", pb, l1, lInf);
318
}
319
320
TEST_P(Test_ONNX_nets, LResNet100E_IR)
321
{
322
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
323
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
324
throw SkipTestException("");
325
326
double l1 = default_l1;
327
double lInf = default_lInf;
328
// output range: [-3; 3]
329
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) {
330
l1 = 0.009;
331
lInf = 0.035;
332
}
333
testONNXModels("LResNet100E_IR", pb, l1, lInf);
334
}
335
336
TEST_P(Test_ONNX_nets, Emotion_ferplus)
337
{
338
testONNXModels("emotion_ferplus", pb);
339
}
340
341
TEST_P(Test_ONNX_nets, Inception_v2)
342
{
343
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
344
throw SkipTestException("");
345
346
testONNXModels("inception_v2", pb);
347
}
348
349
TEST_P(Test_ONNX_nets, DenseNet121)
350
{
351
// output range: [-87; 138]
352
const double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.12 : 2.2e-5;
353
const double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.74 : 1.23e-4;
354
testONNXModels("densenet121", pb, l1, lInf);
355
}
356
357
TEST_P(Test_ONNX_nets, Inception_v1)
358
{
359
testONNXModels("inception_v1", pb);
360
}
361
362
TEST_P(Test_ONNX_nets, Shufflenet)
363
{
364
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
365
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
366
throw SkipTestException("");
367
testONNXModels("shufflenet", pb);
368
}
369
370
INSTANTIATE_TEST_CASE_P(/**/, Test_ONNX_nets, dnnBackendsAndTargets());
371
372
}} // namespace
373
374