Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/dnn/test/test_halide_layers.cpp
16354 views
1
// This file is part of OpenCV project.
2
// It is subject to the license terms in the LICENSE file found in the top-level directory
3
// of this distribution and at http://opencv.org/license.html.
4
//
5
// Copyright (C) 2017, Intel Corporation, all rights reserved.
6
// Third party copyrights are property of their respective owners.
7
8
// This tests doesn't require any external data. They just compare outputs of
9
// layers using different computation backends. Input and parameters are random.
10
11
#include "test_precomp.hpp"
12
13
namespace opencv_test { namespace {
14
15
using namespace cv;
16
using namespace cv::dnn;
17
using namespace testing;
18
19
static void test(Mat& input, Net& net, Backend backendId, Target targetId, bool skipCheck = false)
20
{
21
DNNTestLayer::checkBackend(backendId, targetId);
22
randu(input, -1.0f, 1.0f);
23
24
net.setInput(input);
25
net.setPreferableBackend(DNN_BACKEND_OPENCV);
26
Mat outputDefault = net.forward().clone();
27
28
net.setPreferableBackend(backendId);
29
net.setPreferableTarget(targetId);
30
Mat outputHalide = net.forward().clone();
31
32
if (skipCheck)
33
return;
34
35
double l1, lInf;
36
DNNTestLayer::getDefaultThresholds(backendId, targetId, &l1, &lInf);
37
normAssert(outputDefault, outputHalide, "", l1, lInf);
38
}
39
40
static void test(LayerParams& params, Mat& input, Backend backendId, Target targetId, bool skipCheck = false)
41
{
42
Net net;
43
net.addLayerToPrev(params.name, params.type, params);
44
test(input, net, backendId, targetId, skipCheck);
45
}
46
47
static inline testing::internal::ParamGenerator<tuple<Backend, Target> > dnnBackendsAndTargetsWithHalide()
48
{
49
return dnnBackendsAndTargets(true, true, false); // OpenCV/CPU is used as reference
50
}
51
52
class Test_Halide_layers : public DNNTestLayer {};
53
54
////////////////////////////////////////////////////////////////////////////////
55
// Padding
56
////////////////////////////////////////////////////////////////////////////////
57
TEST_P(Test_Halide_layers, Padding)
58
{
59
static const int kNumRuns = 10;
60
std::vector<int> paddings(8);
61
cv::RNG& rng = cv::theRNG();
62
for (int t = 0; t < kNumRuns; ++t)
63
{
64
for (int i = 0; i < paddings.size(); ++i)
65
paddings[i] = rng(5);
66
67
LayerParams lp;
68
lp.set("paddings", DictValue::arrayInt<int*>(&paddings[0], paddings.size()));
69
lp.type = "Padding";
70
lp.name = "testLayer";
71
72
int sz[] = {1 + (int)rng(10), 1 + (int)rng(10), 1 + (int)rng(10), 1 + (int)rng(10)};
73
Mat input(4, &sz[0], CV_32F);
74
test(lp, input, backend, target);
75
}
76
}
77
78
////////////////////////////////////////////////////////////////////////////////
79
// Convolution
80
////////////////////////////////////////////////////////////////////////////////
81
typedef TestWithParam<tuple<Vec3i, Size, Size, Size, Size, Size, bool, tuple<Backend, Target> > > Convolution;
82
TEST_P(Convolution, Accuracy)
83
{
84
int inChannels = get<0>(GetParam())[0];
85
int outChannels = get<0>(GetParam())[1];
86
int group = get<0>(GetParam())[2];
87
Size inSize = get<1>(GetParam());
88
Size kernel = get<2>(GetParam());
89
Size stride = get<3>(GetParam());
90
Size pad = get<4>(GetParam());
91
Size dilation = get<5>(GetParam());
92
bool hasBias = get<6>(GetParam());
93
Backend backendId = get<0>(get<7>(GetParam()));
94
Target targetId = get<1>(get<7>(GetParam()));
95
96
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
97
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD)
98
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");
99
#endif
100
101
bool skipCheck = false;
102
103
int sz[] = {outChannels, inChannels / group, kernel.height, kernel.width};
104
Mat weights(4, &sz[0], CV_32F);
105
randu(weights, -1.0f, 1.0f);
106
107
LayerParams lp;
108
lp.set("kernel_w", kernel.width);
109
lp.set("kernel_h", kernel.height);
110
lp.set("pad_w", pad.width);
111
lp.set("pad_h", pad.height);
112
lp.set("stride_w", stride.width);
113
lp.set("stride_h", stride.height);
114
lp.set("dilation_w", dilation.width);
115
lp.set("dilation_h", dilation.height);
116
lp.set("num_output", outChannels);
117
lp.set("group", group);
118
lp.set("bias_term", hasBias);
119
lp.type = "Convolution";
120
lp.name = "testLayer";
121
lp.blobs.push_back(weights);
122
if (hasBias)
123
{
124
Mat bias(1, outChannels, CV_32F);
125
randu(bias, -1.0f, 1.0f);
126
lp.blobs.push_back(bias);
127
}
128
int inpSz[] = {1, inChannels, inSize.height, inSize.width};
129
Mat input(4, &inpSz[0], CV_32F);
130
test(lp, input, backendId, targetId, skipCheck);
131
if (skipCheck)
132
throw SkipTestException("Skip checks in unstable test");
133
}
134
135
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Convolution, Combine(
136
/*in channels, out channels, group*/
137
Values(Vec3i(6, 4, 1), Vec3i(6, 9, 1),
138
Vec3i(6, 4, 2), Vec3i(6, 9, 3)),
139
/*in size*/ Values(Size(5, 6)),
140
/*kernel*/ Values(Size(3, 1), Size(1, 3)),
141
/*stride*/ Values(Size(1, 1), Size(2, 2)),
142
/*pad*/ Values(Size(1, 0), Size(0, 1)),
143
/*dilation*/ Values(Size(1, 1), Size(2, 2)),
144
/*has bias*/ Bool(),
145
dnnBackendsAndTargetsWithHalide()
146
));
147
148
////////////////////////////////////////////////////////////////////////////////
149
// Deconvolution
150
////////////////////////////////////////////////////////////////////////////////
151
typedef TestWithParam<tuple<Vec3i, Size, Size, Size, Size, Vec4i, bool, tuple<Backend, Target> > > Deconvolution;
152
TEST_P(Deconvolution, Accuracy)
153
{
154
int inChannels = get<0>(GetParam())[0];
155
int outChannels = get<0>(GetParam())[1];
156
int group = get<0>(GetParam())[2];
157
Size inSize = get<1>(GetParam());
158
Size kernel = get<2>(GetParam());
159
Size pad = get<3>(GetParam());
160
Size dilation = get<4>(GetParam());
161
Size stride = Size(get<5>(GetParam())[0], get<5>(GetParam())[1]);
162
Size adjPad = Size(get<5>(GetParam())[2], get<5>(GetParam())[3]);
163
bool hasBias = get<6>(GetParam());
164
Backend backendId = get<0>(get<7>(GetParam()));
165
Target targetId = get<1>(get<7>(GetParam()));
166
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU &&
167
dilation.width == 2 && dilation.height == 2)
168
throw SkipTestException("");
169
170
int sz[] = {inChannels, outChannels / group, kernel.height, kernel.width};
171
Mat weights(4, &sz[0], CV_32F);
172
randu(weights, -1.0f, 1.0f);
173
174
LayerParams lp;
175
lp.set("kernel_w", kernel.width);
176
lp.set("kernel_h", kernel.height);
177
lp.set("pad_w", pad.width);
178
lp.set("pad_h", pad.height);
179
lp.set("stride_w", stride.width);
180
lp.set("stride_h", stride.height);
181
lp.set("dilation_w", dilation.width);
182
lp.set("dilation_h", dilation.height);
183
lp.set("adj_w", adjPad.width);
184
lp.set("adj_h", adjPad.height);
185
lp.set("num_output", outChannels);
186
lp.set("group", group);
187
lp.set("bias_term", hasBias);
188
lp.type = "Deconvolution";
189
lp.name = "testLayer";
190
lp.blobs.push_back(weights);
191
if (hasBias)
192
{
193
Mat bias(1, outChannels, CV_32F);
194
randu(bias, -1.0f, 1.0f);
195
lp.blobs.push_back(bias);
196
}
197
int inpSz[] = {1, inChannels, inSize.height, inSize.width};
198
Mat input(4, &inpSz[0], CV_32F);
199
test(lp, input, backendId, targetId);
200
}
201
202
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Deconvolution, Combine(
203
/*in channels, out channels, group*/
204
Values(Vec3i(6, 4, 1), Vec3i(6, 9, 3)),
205
/*in size*/ Values(Size(5, 6)),
206
/*kernel*/ Values(Size(3, 1), Size(1, 3)),
207
/*pad*/ Values(Size(1, 0), Size(0, 1)),
208
/*dilation*/ Values(Size(1, 1), Size(2, 2)),
209
/*stride, adj. pad*/ Values(Vec4i(1,1, 0,0), Vec4i(2,2, 1,0), Vec4i(1,2, 0,1)),
210
/*has bias*/ Bool(),
211
dnnBackendsAndTargetsWithHalide()
212
));
213
214
////////////////////////////////////////////////////////////////////////////////
215
// LRN
216
////////////////////////////////////////////////////////////////////////////////
217
typedef TestWithParam<tuple<Vec3i, int, Vec3f, bool, std::string, tuple<Backend, Target> > > LRN;
218
TEST_P(LRN, Accuracy)
219
{
220
int inChannels = get<0>(GetParam())[0];
221
Size inSize = Size(get<0>(GetParam())[1], get<0>(GetParam())[2]);
222
int localSize = get<1>(GetParam());
223
float alpha = get<2>(GetParam())[0];
224
float beta = get<2>(GetParam())[1];
225
float bias = get<2>(GetParam())[2];
226
bool normBySize = get<3>(GetParam());
227
std::string nrmType = get<4>(GetParam());
228
Backend backendId = get<0>(get<5>(GetParam()));
229
Target targetId = get<1>(get<5>(GetParam()));
230
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
231
throw SkipTestException("");
232
233
LayerParams lp;
234
lp.set("norm_region", nrmType);
235
lp.set("local_size", localSize);
236
lp.set("alpha", alpha);
237
lp.set("beta", beta);
238
lp.set("bias", bias);
239
lp.set("norm_by_size", normBySize);
240
lp.type = "LRN";
241
lp.name = "testLayer";
242
243
int sz[] = {1, inChannels, inSize.height, inSize.width};
244
Mat input(4, &sz[0], CV_32F);
245
test(lp, input, backendId, targetId);
246
}
247
248
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, LRN, Combine(
249
/*input ch,w,h*/ Values(Vec3i(6, 5, 8), Vec3i(7, 11, 6)),
250
/*local size*/ Values(3, 5),
251
Values(Vec3f(0.9f, 1.0f, 1.1f), Vec3f(0.9f, 1.1f, 1.0f),
252
/*alpha, beta,*/ Vec3f(1.0f, 0.9f, 1.1f), Vec3f(1.0f, 1.1f, 0.9f),
253
/*bias */ Vec3f(1.1f, 0.9f, 1.0f), Vec3f(1.1f, 1.0f, 0.9f)),
254
/*norm_by_size*/ Bool(),
255
/*norm_type*/ Values("ACROSS_CHANNELS", "WITHIN_CHANNEL"),
256
dnnBackendsAndTargetsWithHalide()
257
));
258
259
////////////////////////////////////////////////////////////////////////////////
260
// Average pooling
261
////////////////////////////////////////////////////////////////////////////////
262
typedef TestWithParam<tuple<int, Size, Size, Size, tuple<Backend, Target> > > AvePooling;
263
TEST_P(AvePooling, Accuracy)
264
{
265
int inChannels = get<0>(GetParam());
266
Size outSize = get<1>(GetParam());; // Input size will be computed from parameters.
267
Size kernel = get<2>(GetParam());
268
Size stride = get<3>(GetParam());
269
Backend backendId = get<0>(get<4>(GetParam()));
270
Target targetId = get<1>(get<4>(GetParam()));
271
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD &&
272
stride == Size(3, 2) && kernel == Size(3, 3) && outSize != Size(1, 1))
273
throw SkipTestException("");
274
275
const int inWidth = (outSize.width - 1) * stride.width + kernel.width;
276
const int inHeight = (outSize.height - 1) * stride.height + kernel.height;
277
278
LayerParams lp;
279
lp.set("pool", "ave");
280
lp.set("kernel_w", kernel.width);
281
lp.set("kernel_h", kernel.height);
282
lp.set("stride_w", stride.width);
283
lp.set("stride_h", stride.height);
284
lp.type = "Pooling";
285
lp.name = "testLayer";
286
287
int sz[] = {1, inChannels, inHeight, inWidth};
288
Mat input(4, &sz[0], CV_32F);
289
test(lp, input, backendId, targetId);
290
}
291
292
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, AvePooling, Combine(
293
/*in channels*/ Values(3, 4),
294
/*out size*/ Values(Size(1, 1), Size(2, 2), Size(3, 2), Size(4, 7)),
295
/*kernel*/ Values(Size(1, 1), Size(2, 2), Size(3, 3), Size(3, 2)),
296
/*stride*/ Values(Size(1, 1), Size(2, 2), Size(3, 2)),
297
dnnBackendsAndTargetsWithHalide()
298
));
299
300
////////////////////////////////////////////////////////////////////////////////
301
// Maximum pooling
302
////////////////////////////////////////////////////////////////////////////////
303
typedef TestWithParam<tuple<int, Size, Size, Size, Size, tuple<Backend, Target> > > MaxPooling;
304
TEST_P(MaxPooling, Accuracy)
305
{
306
int inChannels = get<0>(GetParam());
307
Size inSize = get<1>(GetParam());
308
Size kernel = get<2>(GetParam());
309
Size stride = get<3>(GetParam());
310
Size pad = get<4>(GetParam());
311
Backend backendId = get<0>(get<5>(GetParam()));
312
Target targetId = get<1>(get<5>(GetParam()));
313
314
LayerParams lp;
315
lp.set("pool", "max");
316
lp.set("kernel_w", kernel.width);
317
lp.set("kernel_h", kernel.height);
318
lp.set("stride_w", stride.width);
319
lp.set("stride_h", stride.height);
320
lp.set("pad_w", pad.width);
321
lp.set("pad_h", pad.height);
322
lp.type = "Pooling";
323
lp.name = "testLayer";
324
325
int sz[] = {1, inChannels, inSize.height, inSize.width};
326
Mat input(4, &sz[0], CV_32F);
327
test(lp, input, backendId, targetId);
328
}
329
330
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, MaxPooling, Combine(
331
/*in channels*/ Values(3, 4),
332
/*in size*/ Values(Size(5, 5), Size(7, 6)),
333
/*kernel*/ Values(Size(2, 2), Size(3, 3), Size(3, 2)),
334
/*stride*/ Values(Size(1, 1), Size(2, 2), Size(3, 2)),
335
/*pad*/ Values(Size(0, 0), Size(1, 1), Size(0, 1)),
336
dnnBackendsAndTargetsWithHalide()
337
));
338
339
////////////////////////////////////////////////////////////////////////////////
340
// Fully-connected
341
////////////////////////////////////////////////////////////////////////////////
342
typedef TestWithParam<tuple<int, Size, int, bool, tuple<Backend, Target> > > FullyConnected;
343
TEST_P(FullyConnected, Accuracy)
344
{
345
int inChannels = get<0>(GetParam());
346
Size inSize = get<1>(GetParam());
347
int outChannels = get<2>(GetParam());
348
bool hasBias = get<3>(GetParam());
349
Backend backendId = get<0>(get<4>(GetParam()));
350
Target targetId = get<1>(get<4>(GetParam()));
351
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
352
throw SkipTestException("");
353
354
Mat weights(outChannels, inChannels * inSize.height * inSize.width, CV_32F);
355
randu(weights, -1.0f, 1.0f);
356
357
Mat bias(1, outChannels, CV_32F);
358
randu(bias, -1.0f, 1.0f);
359
360
LayerParams lp;
361
lp.set("num_output", outChannels);
362
lp.set("bias_term", hasBias);
363
lp.blobs.push_back(weights);
364
lp.blobs.push_back(bias);
365
lp.type = "InnerProduct";
366
lp.name = "testLayer";
367
368
int sz[] = {1, inChannels, inSize.height, inSize.width};
369
Mat input(4, &sz[0], CV_32F);
370
test(lp, input, backendId, targetId);
371
}
372
373
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, FullyConnected, Combine(
374
/*in channels*/ Values(3, 4),
375
/*in size*/ Values(Size(5, 4), Size(4, 5), Size(1, 1)),
376
/*out channels*/ Values(3, 4),
377
/*has bias*/ Bool(),
378
dnnBackendsAndTargetsWithHalide()
379
));
380
381
////////////////////////////////////////////////////////////////////////////////
382
// SoftMax
383
////////////////////////////////////////////////////////////////////////////////
384
typedef TestWithParam<tuple<int, tuple<Backend, Target> > > SoftMax;
385
TEST_P(SoftMax, Accuracy)
386
{
387
int inChannels = get<0>(GetParam());
388
Backend backendId = get<0>(get<1>(GetParam()));
389
Target targetId = get<1>(get<1>(GetParam()));
390
LayerParams lp;
391
lp.type = "SoftMax";
392
lp.name = "testLayer";
393
394
int sz[] = {1, inChannels, 1, 1};
395
Mat input(4, &sz[0], CV_32F);
396
test(lp, input, backendId, targetId);
397
}
398
399
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, SoftMax, Combine(
400
Values(3, 4, 5, 1024),
401
dnnBackendsAndTargetsWithHalide()
402
));
403
404
//////////////////////////////////////////////////////////////////////////////
405
// Max pooling - unpooling
406
//////////////////////////////////////////////////////////////////////////////
407
TEST_P(Test_Halide_layers, MaxPoolUnpool)
408
{
409
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
410
throw SkipTestException("");
411
412
LayerParams pool;
413
pool.set("pool", "max");
414
pool.set("kernel_w", 2);
415
pool.set("kernel_h", 2);
416
pool.set("stride_w", 2);
417
pool.set("stride_h", 2);
418
pool.set("pad_w", 0);
419
pool.set("pad_h", 0);
420
pool.type = "Pooling";
421
pool.name = "testPool";
422
423
LayerParams unpool;
424
unpool.set("pool_k_w", 2);
425
unpool.set("pool_k_h", 2);
426
unpool.set("pool_stride_w", 2);
427
unpool.set("pool_stride_h", 2);
428
unpool.set("pool_pad_w", 0);
429
unpool.set("pool_pad_h", 0);
430
unpool.type = "MaxUnpool";
431
unpool.name = "testUnpool";
432
433
Net net;
434
int poolId = net.addLayer(pool.name, pool.type, pool);
435
net.connect(0, 0, poolId, 0);
436
437
int unpoolId = net.addLayer(unpool.name, unpool.type, unpool);
438
net.connect(poolId, 0, unpoolId, 0);
439
net.connect(poolId, 1, unpoolId, 1);
440
441
int sz[] = {1, 1, 4, 4};
442
Mat input(4, &sz[0], CV_32F);
443
test(input, net, backend, target);
444
}
445
446
////////////////////////////////////////////////////////////////////////////////
447
// AvePooling + in-place layers
448
////////////////////////////////////////////////////////////////////////////////
449
static const int kNumChannels = 3;
450
451
void testInPlaceActivation(LayerParams& lp, Backend backendId, Target targetId)
452
{
453
EXPECT_FALSE(lp.name.empty());
454
455
LayerParams pool;
456
pool.set("pool", "ave");
457
pool.set("kernel_w", 2);
458
pool.set("kernel_h", 2);
459
pool.set("stride_w", 2);
460
pool.set("stride_h", 2);
461
pool.type = "Pooling";
462
463
Net net;
464
int poolId = net.addLayer(pool.name, pool.type, pool);
465
net.connect(0, 0, poolId, 0);
466
net.addLayerToPrev(lp.name, lp.type, lp);
467
468
int sz[] = {1, kNumChannels, 10, 10};
469
Mat input(4, &sz[0], CV_32F);
470
test(input, net, backendId, targetId);
471
}
472
473
typedef TestWithParam<tuple<bool, bool, float, tuple<Backend, Target> > > BatchNorm;
474
TEST_P(BatchNorm, Accuracy)
475
{
476
bool hasWeights = get<0>(GetParam());
477
bool hasBias = get<1>(GetParam());
478
float epsilon = get<2>(GetParam());
479
Backend backendId = get<0>(get<3>(GetParam()));
480
Target targetId = get<1>(get<3>(GetParam()));
481
482
LayerParams lp;
483
lp.set("has_weight", hasWeights);
484
lp.set("has_bias", hasBias);
485
lp.set("eps", epsilon);
486
lp.type = "BatchNorm";
487
lp.name = "testLayer";
488
489
lp.blobs.reserve(4);
490
for (int i = 0; i < 3; ++i)
491
lp.blobs.push_back(Mat(1, kNumChannels, CV_32F));
492
if (hasBias || hasWeights)
493
lp.blobs.push_back(Mat(1, kNumChannels, CV_32F));
494
495
for (int i = 0; i < lp.blobs.size(); ++i)
496
randu(lp.blobs[i], 0.0f, 1.0f);
497
498
testInPlaceActivation(lp, backendId, targetId);
499
}
500
501
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, BatchNorm, Combine(
502
/*has weights*/ Bool(),
503
/*has bias*/ Bool(),
504
/*epsilon*/ Values(1e-3f, 1e-5f),
505
dnnBackendsAndTargetsWithHalide()
506
));
507
508
typedef TestWithParam<tuple<float, tuple<Backend, Target> > > ReLU;
509
TEST_P(ReLU, Accuracy)
510
{
511
float negativeSlope = get<0>(GetParam());
512
Backend backendId = get<0>(get<1>(GetParam()));
513
Target targetId = get<1>(get<1>(GetParam()));
514
515
LayerParams lp;
516
lp.set("negative_slope", negativeSlope);
517
lp.type = "ReLU";
518
lp.name = "testLayer";
519
testInPlaceActivation(lp, backendId, targetId);
520
}
521
522
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, ReLU, Combine(
523
/*negative slope*/ Values(2.0f, 0.3f, -0.1f, 0.0f),
524
dnnBackendsAndTargetsWithHalide()
525
));
526
527
typedef TestWithParam<tuple<std::string, tuple<Backend, Target> > > NoParamActivation;
528
TEST_P(NoParamActivation, Accuracy)
529
{
530
Backend backendId = get<0>(get<1>(GetParam()));
531
Target targetId = get<1>(get<1>(GetParam()));
532
533
LayerParams lp;
534
lp.type = get<0>(GetParam());
535
lp.name = "testLayer";
536
testInPlaceActivation(lp, backendId, targetId);
537
}
538
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, NoParamActivation, Combine(
539
/*type*/ Values("TanH", "Sigmoid", "AbsVal", "BNLL"),
540
dnnBackendsAndTargetsWithHalide()
541
));
542
543
typedef TestWithParam<tuple<Vec3f, tuple<Backend, Target> > > Power;
544
TEST_P(Power, Accuracy)
545
{
546
float power = get<0>(GetParam())[0];
547
float scale = get<0>(GetParam())[1];
548
float shift = get<0>(GetParam())[2];
549
Backend backendId = get<0>(get<1>(GetParam()));
550
Target targetId = get<1>(get<1>(GetParam()));
551
552
LayerParams lp;
553
lp.set("power", power);
554
lp.set("scale", scale);
555
lp.set("shift", shift);
556
lp.type = "Power";
557
lp.name = "testLayer";
558
testInPlaceActivation(lp, backendId, targetId);
559
}
560
561
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Power, Combine(
562
/*power, scale, shift*/ Values(Vec3f(0.9f, 1.0f, 1.1f), Vec3f(0.9f, 1.1f, 1.0f),
563
Vec3f(1.0f, 0.9f, 1.1f), Vec3f(1.0f, 1.1f, 0.9f),
564
Vec3f(1.1f, 0.9f, 1.0f), Vec3f(1.1f, 1.0f, 0.9f)),
565
dnnBackendsAndTargetsWithHalide()
566
));
567
568
TEST_P(Test_Halide_layers, ChannelsPReLU)
569
{
570
LayerParams lp;
571
lp.type = "ChannelsPReLU";
572
lp.name = "testLayer";
573
lp.blobs.push_back(Mat(1, kNumChannels, CV_32F));
574
randu(lp.blobs[0], -1.0f, 1.0f);
575
576
testInPlaceActivation(lp, backend, target);
577
}
578
579
typedef TestWithParam<tuple<bool, tuple<Backend, Target> > > Scale;
580
TEST_P(Scale, Accuracy)
581
{
582
bool hasBias = get<0>(GetParam());
583
Backend backendId = get<0>(get<1>(GetParam()));
584
Target targetId = get<1>(get<1>(GetParam()));
585
586
LayerParams lp;
587
lp.set("bias_term", hasBias);
588
lp.type = "Scale";
589
lp.name = "testLayer";
590
lp.blobs.push_back(Mat(1, kNumChannels, CV_32F));
591
randu(lp.blobs[0], -1.0f, 1.0f);
592
if (hasBias)
593
{
594
lp.blobs.push_back(Mat(1, kNumChannels, CV_32F));
595
randu(lp.blobs[1], -1.0f, 1.0f);
596
}
597
testInPlaceActivation(lp, backendId, targetId);
598
}
599
600
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Scale, Combine(
601
Bool(),
602
dnnBackendsAndTargetsWithHalide()
603
));
604
605
////////////////////////////////////////////////////////////////////////////////
606
// Concat layer
607
////////////////////////////////////////////////////////////////////////////////
608
//
609
// input --- conv --- concat --- output
610
// `--- conv ----^ ^ ^
611
// `---- ... ------' '
612
// `-----------------'
613
typedef TestWithParam<tuple<Vec3i, Vec3i, tuple<Backend, Target> > > Concat;
614
TEST_P(Concat, Accuracy)
615
{
616
Vec3i inSize = get<0>(GetParam());
617
Vec3i numChannels = get<1>(GetParam());
618
Backend backendId = get<0>(get<2>(GetParam()));
619
Target targetId = get<1>(get<2>(GetParam()));
620
621
Net net;
622
623
std::vector<int> convLayerIds;
624
convLayerIds.reserve(numChannels.channels);
625
for (int i = 0, n = numChannels.channels; i < n; ++i)
626
{
627
if (!numChannels[i])
628
break;
629
630
int sz[] = {numChannels[i], inSize[0], 1, 1};
631
Mat weights(4, &sz[0], CV_32F);
632
randu(weights, -1.0f, 1.0f);
633
634
LayerParams convParam;
635
convParam.set("kernel_w", 1);
636
convParam.set("kernel_h", 1);
637
convParam.set("num_output", numChannels[i]);
638
convParam.set("bias_term", false);
639
convParam.type = "Convolution";
640
std::ostringstream ss;
641
ss << "convLayer" << i;
642
convParam.name = ss.str();
643
convParam.blobs.push_back(weights);
644
645
int layerId = net.addLayer(convParam.name, convParam.type, convParam);
646
convLayerIds.push_back(layerId);
647
net.connect(0, 0, layerId, 0);
648
}
649
650
LayerParams concatParam;
651
concatParam.type = "Concat";
652
concatParam.name = "testLayer";
653
int concatId = net.addLayer(concatParam.name, concatParam.type, concatParam);
654
net.connect(0, 0, concatId, 0);
655
for (int i = 0; i < convLayerIds.size(); ++i)
656
{
657
net.connect(convLayerIds[i], 0, concatId, i + 1);
658
}
659
660
int sz[] = {1, inSize[0], inSize[1], inSize[2]};
661
Mat input(4, &sz[0], CV_32F);
662
test(input, net, backendId, targetId);
663
}
664
665
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Concat, Combine(
666
/*input size*/ Values(Vec3i(1, 4, 5), Vec3i(2, 8, 6)),
667
/*channels*/ Values(Vec3i(2, 0, 0), Vec3i(3, 4, 0), Vec3i(1, 6, 2)),
668
dnnBackendsAndTargetsWithHalide()
669
));
670
671
////////////////////////////////////////////////////////////////////////////////
672
// Element-wise layers
673
////////////////////////////////////////////////////////////////////////////////
674
//
675
// input --- conv --- eltwise --- output
676
// `--- conv ----^ ^ ^
677
// `---- ... ------' '
678
// `-----------------'
679
typedef TestWithParam<tuple<Vec3i, std::string, int, bool, tuple<Backend, Target> > > Eltwise;
680
TEST_P(Eltwise, Accuracy)
681
{
682
Vec3i inSize = get<0>(GetParam());
683
std::string op = get<1>(GetParam());
684
int numConv = get<2>(GetParam());
685
bool weighted = get<3>(GetParam());
686
Backend backendId = get<0>(get<4>(GetParam()));
687
Target targetId = get<1>(get<4>(GetParam()));
688
689
Net net;
690
691
std::vector<int> convLayerIds(numConv);
692
for (int i = 0; i < numConv; ++i)
693
{
694
int sz[] = {inSize[0], inSize[0], 1, 1};
695
Mat weights(4, &sz[0], CV_32F);
696
randu(weights, -1.0f, 1.0f);
697
698
LayerParams convParam;
699
convParam.set("kernel_w", 1);
700
convParam.set("kernel_h", 1);
701
convParam.set("num_output", inSize[0]);
702
convParam.set("bias_term", false);
703
convParam.type = "Convolution";
704
std::ostringstream ss;
705
ss << "convLayer" << i;
706
convParam.name = ss.str();
707
convParam.blobs.push_back(weights);
708
709
convLayerIds[i] = net.addLayer(convParam.name, convParam.type, convParam);
710
net.connect(0, 0, convLayerIds[i], 0);
711
}
712
713
LayerParams eltwiseParam;
714
eltwiseParam.set("operation", op);
715
if (op == "sum" && weighted)
716
{
717
RNG& rng = cv::theRNG();
718
std::vector<float> coeff(1 + numConv);
719
for (int i = 0; i < coeff.size(); ++i)
720
{
721
coeff[i] = rng.uniform(-2.0f, 2.0f);
722
}
723
eltwiseParam.set("coeff", DictValue::arrayReal<float*>(&coeff[0], coeff.size()));
724
}
725
eltwiseParam.type = "Eltwise";
726
eltwiseParam.name = "testLayer";
727
int eltwiseId = net.addLayer(eltwiseParam.name, eltwiseParam.type, eltwiseParam);
728
net.connect(0, 0, eltwiseId, 0);
729
for (int i = 0; i < numConv; ++i)
730
{
731
net.connect(convLayerIds[i], 0, eltwiseId, i + 1);
732
}
733
734
int sz[] = {1, inSize[0], inSize[1], inSize[2]};
735
Mat input(4, &sz[0], CV_32F);
736
test(input, net, backendId, targetId);
737
}
738
739
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Eltwise, Combine(
740
/*input size*/ Values(Vec3i(1, 4, 5), Vec3i(2, 8, 6)),
741
/*operation*/ Values("prod", "sum", "max"),
742
/*num convs*/ Values(1, 2, 3),
743
/*weighted(for sum only)*/ Bool(),
744
dnnBackendsAndTargetsWithHalide()
745
));
746
747
////////////////////////////////////////////////////////////////////////////
748
// Mixed backends
749
////////////////////////////////////////////////////////////////////////////
750
#ifdef HAVE_HALIDE
751
TEST(MixedBackends_Halide_Default_Halide, Accuracy)
752
{
753
// Just a layer that supports Halide backend.
754
LayerParams lrn;
755
lrn.type = "LRN";
756
lrn.name = "testLRN";
757
758
// Some of layers that doesn't supports Halide backend yet.
759
LayerParams mvn;
760
mvn.type = "MVN";
761
mvn.name = "testMVN";
762
763
// Halide layer again.
764
LayerParams lrn2;
765
lrn2.type = "LRN";
766
lrn2.name = "testLRN2";
767
768
Net net;
769
int lrnId = net.addLayer(lrn.name, lrn.type, lrn);
770
net.connect(0, 0, lrnId, 0);
771
net.addLayerToPrev(mvn.name, mvn.type, mvn);
772
net.addLayerToPrev(lrn2.name, lrn2.type, lrn2);
773
774
int sz[] = {4, 3, 5, 6};
775
Mat input(4, &sz[0], CV_32F);
776
randu(input, -1.0f, 1.0f);
777
net.setInput(input);
778
net.setPreferableBackend(DNN_BACKEND_OPENCV);
779
Mat outputDefault = net.forward().clone();
780
781
net.setPreferableBackend(DNN_BACKEND_HALIDE);
782
net.setInput(input);
783
Mat outputHalide = net.forward().clone();
784
normAssert(outputDefault, outputHalide);
785
786
net.setPreferableTarget(DNN_TARGET_OPENCL);
787
net.setInput(input);
788
outputHalide = net.forward().clone();
789
normAssert(outputDefault, outputHalide);
790
}
791
#endif // HAVE_HALIDE
792
793
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_Halide_layers, dnnBackendsAndTargetsWithHalide());
794
795
}} // namespace
796
797