Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/dnn/test/test_misc.cpp
16354 views
1
// This file is part of OpenCV project.
2
// It is subject to the license terms in the LICENSE file found in the top-level directory
3
// of this distribution and at http://opencv.org/license.html.
4
//
5
// Copyright (C) 2017, Intel Corporation, all rights reserved.
6
// Third party copyrights are property of their respective owners.
7
8
#include "test_precomp.hpp"
9
#include <opencv2/core/ocl.hpp>
10
#include <opencv2/core/opencl/ocl_defs.hpp>
11
#include <opencv2/dnn/layer.details.hpp> // CV_DNN_REGISTER_LAYER_CLASS
12
13
namespace opencv_test { namespace {
14
15
TEST(blobFromImage_4ch, Regression)
16
{
17
Mat ch[4];
18
for(int i = 0; i < 4; i++)
19
ch[i] = Mat::ones(10, 10, CV_8U)*i;
20
21
Mat img;
22
merge(ch, 4, img);
23
Mat blob = dnn::blobFromImage(img, 1., Size(), Scalar(), false, false);
24
25
for(int i = 0; i < 4; i++)
26
{
27
ch[i] = Mat(img.rows, img.cols, CV_32F, blob.ptr(0, i));
28
ASSERT_DOUBLE_EQ(cvtest::norm(ch[i], cv::NORM_INF), i);
29
}
30
}
31
32
TEST(blobFromImage, allocated)
33
{
34
int size[] = {1, 3, 4, 5};
35
Mat img(size[2], size[3], CV_32FC(size[1]));
36
Mat blob(4, size, CV_32F);
37
void* blobData = blob.data;
38
dnn::blobFromImage(img, blob, 1.0 / 255, Size(), Scalar(), false, false);
39
ASSERT_EQ(blobData, blob.data);
40
}
41
42
TEST(imagesFromBlob, Regression)
43
{
44
int nbOfImages = 8;
45
46
std::vector<cv::Mat> inputImgs(nbOfImages);
47
for (int i = 0; i < nbOfImages; i++)
48
{
49
inputImgs[i] = cv::Mat::ones(100, 100, CV_32FC3);
50
cv::randu(inputImgs[i], cv::Scalar::all(0), cv::Scalar::all(1));
51
}
52
53
cv::Mat blob = cv::dnn::blobFromImages(inputImgs, 1., cv::Size(), cv::Scalar(), false, false);
54
std::vector<cv::Mat> outputImgs;
55
cv::dnn::imagesFromBlob(blob, outputImgs);
56
57
for (int i = 0; i < nbOfImages; i++)
58
{
59
ASSERT_EQ(cv::countNonZero(inputImgs[i] != outputImgs[i]), 0);
60
}
61
}
62
63
TEST(readNet, Regression)
64
{
65
Net net = readNet(findDataFile("dnn/squeezenet_v1.1.prototxt", false),
66
findDataFile("dnn/squeezenet_v1.1.caffemodel", false));
67
EXPECT_FALSE(net.empty());
68
net = readNet(findDataFile("dnn/opencv_face_detector.caffemodel", false),
69
findDataFile("dnn/opencv_face_detector.prototxt", false));
70
EXPECT_FALSE(net.empty());
71
net = readNet(findDataFile("dnn/openface_nn4.small2.v1.t7", false));
72
EXPECT_FALSE(net.empty());
73
net = readNet(findDataFile("dnn/tiny-yolo-voc.cfg", false),
74
findDataFile("dnn/tiny-yolo-voc.weights", false));
75
EXPECT_FALSE(net.empty());
76
net = readNet(findDataFile("dnn/ssd_mobilenet_v1_coco.pbtxt", false),
77
findDataFile("dnn/ssd_mobilenet_v1_coco.pb", false));
78
EXPECT_FALSE(net.empty());
79
}
80
81
class FirstCustomLayer CV_FINAL : public Layer
82
{
83
public:
84
FirstCustomLayer(const LayerParams &params) : Layer(params) {}
85
86
static Ptr<Layer> create(LayerParams& params)
87
{
88
return Ptr<Layer>(new FirstCustomLayer(params));
89
}
90
91
void forward(InputArrayOfArrays, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays) CV_OVERRIDE
92
{
93
CV_TRACE_FUNCTION();
94
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
95
96
std::vector<Mat> outputs;
97
outputs_arr.getMatVector(outputs);
98
outputs[0].setTo(1);
99
}
100
};
101
102
class SecondCustomLayer CV_FINAL : public Layer
103
{
104
public:
105
SecondCustomLayer(const LayerParams &params) : Layer(params) {}
106
107
static Ptr<Layer> create(LayerParams& params)
108
{
109
return Ptr<Layer>(new SecondCustomLayer(params));
110
}
111
112
void forward(InputArrayOfArrays, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays) CV_OVERRIDE
113
{
114
CV_TRACE_FUNCTION();
115
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
116
117
std::vector<Mat> outputs;
118
outputs_arr.getMatVector(outputs);
119
outputs[0].setTo(2);
120
}
121
};
122
123
TEST(LayerFactory, custom_layers)
124
{
125
LayerParams lp;
126
lp.name = "name";
127
lp.type = "CustomType";
128
129
Mat inp(1, 1, CV_32FC1);
130
for (int i = 0; i < 3; ++i)
131
{
132
if (i == 0) { CV_DNN_REGISTER_LAYER_CLASS(CustomType, FirstCustomLayer); }
133
else if (i == 1) { CV_DNN_REGISTER_LAYER_CLASS(CustomType, SecondCustomLayer); }
134
else if (i == 2) { LayerFactory::unregisterLayer("CustomType"); }
135
136
Net net;
137
net.addLayerToPrev(lp.name, lp.type, lp);
138
139
net.setInput(inp);
140
net.setPreferableBackend(DNN_BACKEND_OPENCV);
141
Mat output = net.forward();
142
143
if (i == 0) EXPECT_EQ(output.at<float>(0), 1);
144
else if (i == 1) EXPECT_EQ(output.at<float>(0), 2);
145
else if (i == 2) EXPECT_EQ(output.at<float>(0), 1);
146
}
147
LayerFactory::unregisterLayer("CustomType");
148
}
149
150
typedef testing::TestWithParam<tuple<float, Vec3f, int, tuple<Backend, Target> > > setInput;
151
TEST_P(setInput, normalization)
152
{
153
const float kScale = get<0>(GetParam());
154
const Scalar kMean = get<1>(GetParam());
155
const int dtype = get<2>(GetParam());
156
const int backend = get<0>(get<3>(GetParam()));
157
const int target = get<1>(get<3>(GetParam()));
158
const bool kSwapRB = true;
159
160
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD && !checkMyriadTarget())
161
throw SkipTestException("Myriad is not available/disabled in OpenCV");
162
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16 && dtype != CV_32F)
163
throw SkipTestException("");
164
if (backend == DNN_BACKEND_VKCOM && dtype != CV_32F)
165
throw SkipTestException("");
166
167
Mat inp(5, 5, CV_8UC3);
168
randu(inp, 0, 255);
169
Mat ref = blobFromImage(inp, kScale, Size(), kMean, kSwapRB, /*crop*/false);
170
171
LayerParams lp;
172
Net net;
173
net.addLayerToPrev("testLayer", "Identity", lp);
174
net.setPreferableBackend(backend);
175
net.setPreferableTarget(target);
176
177
Mat blob = blobFromImage(inp, 1.0, Size(), Scalar(), kSwapRB, /*crop*/false, dtype);
178
ASSERT_EQ(blob.type(), dtype);
179
net.setInput(blob, "", kScale, kMean);
180
Mat out = net.forward();
181
ASSERT_EQ(out.type(), CV_32F);
182
normAssert(ref, out, "", 4e-4, 1e-3);
183
}
184
185
INSTANTIATE_TEST_CASE_P(/**/, setInput, Combine(
186
Values(1.0f, 1.0 / 127.5),
187
Values(Vec3f(), Vec3f(50, 50, 50), Vec3f(10, 50, 140)),
188
Values(CV_32F, CV_8U),
189
dnnBackendsAndTargets()
190
));
191
192
class CustomLayerWithDeprecatedForward CV_FINAL : public Layer
193
{
194
public:
195
CustomLayerWithDeprecatedForward(const LayerParams &params) : Layer(params) {}
196
197
static Ptr<Layer> create(LayerParams& params)
198
{
199
return Ptr<Layer>(new CustomLayerWithDeprecatedForward(params));
200
}
201
202
virtual void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals) CV_OVERRIDE
203
{
204
CV_Assert_N(inputs[0]->depth() == CV_32F, outputs[0].depth() == CV_32F);
205
cv::add(*inputs[0], 0.5f, outputs[0]);
206
}
207
};
208
209
class CustomLayerWithDeprecatedForwardAndFallback CV_FINAL : public Layer
210
{
211
public:
212
CustomLayerWithDeprecatedForwardAndFallback(const LayerParams &params) : Layer(params) {}
213
214
static Ptr<Layer> create(LayerParams& params)
215
{
216
return Ptr<Layer>(new CustomLayerWithDeprecatedForwardAndFallback(params));
217
}
218
219
void forward(InputArrayOfArrays inputs, OutputArrayOfArrays outputs, OutputArrayOfArrays internals) CV_OVERRIDE
220
{
221
CV_TRACE_FUNCTION();
222
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
223
224
CV_OCL_RUN(preferableTarget == DNN_TARGET_OPENCL || preferableTarget == DNN_TARGET_OPENCL_FP16,
225
forward_ocl(inputs, outputs, internals));
226
227
Layer::forward_fallback(inputs, outputs, internals);
228
}
229
230
virtual void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals) CV_OVERRIDE
231
{
232
CV_Assert_N(inputs[0]->depth() == CV_32F, outputs[0].depth() == CV_32F);
233
cv::add(*inputs[0], 0.5f, outputs[0]);
234
}
235
236
#ifdef HAVE_OPENCL
237
bool forward_ocl(InputArrayOfArrays inputs_arr, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays internals_arr)
238
{
239
if (inputs_arr.depth() != CV_32F)
240
return false;
241
242
std::vector<UMat> inputs;
243
std::vector<UMat> outputs;
244
inputs_arr.getUMatVector(inputs);
245
outputs_arr.getUMatVector(outputs);
246
cv::add(inputs[0], 0.5f, outputs[0]);
247
return true;
248
}
249
#endif
250
};
251
252
typedef testing::TestWithParam<tuple<Backend, Target> > DeprecatedForward;
253
TEST_P(DeprecatedForward, CustomLayer)
254
{
255
const int backend = get<0>(GetParam());
256
const int target = get<1>(GetParam());
257
258
Mat inp(5, 5, CV_32FC1);
259
randu(inp, -1.0f, 1.0f);
260
inp = blobFromImage(inp);
261
262
CV_DNN_REGISTER_LAYER_CLASS(CustomType, CustomLayerWithDeprecatedForward);
263
try
264
{
265
LayerParams lp;
266
Net net;
267
net.addLayerToPrev("testLayer", "CustomType", lp);
268
net.setPreferableBackend(backend);
269
net.setPreferableTarget(target);
270
net.setInput(inp);
271
Mat out = net.forward();
272
normAssert(out, inp + 0.5f, "", 2e-4, 7e-4);
273
}
274
catch (...)
275
{
276
LayerFactory::unregisterLayer("CustomType");
277
throw;
278
}
279
LayerFactory::unregisterLayer("CustomType");
280
}
281
282
TEST_P(DeprecatedForward, CustomLayerWithFallback)
283
{
284
const int backend = get<0>(GetParam());
285
const int target = get<1>(GetParam());
286
287
Mat inp(5, 5, CV_32FC1);
288
randu(inp, -1.0f, 1.0f);
289
inp = blobFromImage(inp);
290
291
CV_DNN_REGISTER_LAYER_CLASS(CustomType, CustomLayerWithDeprecatedForwardAndFallback);
292
try
293
{
294
LayerParams lp;
295
Net net;
296
net.addLayerToPrev("testLayer", "CustomType", lp);
297
net.setPreferableBackend(backend);
298
net.setPreferableTarget(target);
299
net.setInput(inp);
300
Mat out = net.forward();
301
normAssert(out, inp + 0.5f, "", 2e-4, 7e-4);
302
}
303
catch (...)
304
{
305
LayerFactory::unregisterLayer("CustomType");
306
throw;
307
}
308
LayerFactory::unregisterLayer("CustomType");
309
}
310
311
INSTANTIATE_TEST_CASE_P(/**/, DeprecatedForward, dnnBackendsAndTargets());
312
313
}} // namespace
314
315