Path: blob/master/modules/dnn/test/test_halide_layers.cpp
16354 views
// This file is part of OpenCV project.1// It is subject to the license terms in the LICENSE file found in the top-level directory2// of this distribution and at http://opencv.org/license.html.3//4// Copyright (C) 2017, Intel Corporation, all rights reserved.5// Third party copyrights are property of their respective owners.67// This tests doesn't require any external data. They just compare outputs of8// layers using different computation backends. Input and parameters are random.910#include "test_precomp.hpp"1112namespace opencv_test { namespace {1314using namespace cv;15using namespace cv::dnn;16using namespace testing;1718static void test(Mat& input, Net& net, Backend backendId, Target targetId, bool skipCheck = false)19{20DNNTestLayer::checkBackend(backendId, targetId);21randu(input, -1.0f, 1.0f);2223net.setInput(input);24net.setPreferableBackend(DNN_BACKEND_OPENCV);25Mat outputDefault = net.forward().clone();2627net.setPreferableBackend(backendId);28net.setPreferableTarget(targetId);29Mat outputHalide = net.forward().clone();3031if (skipCheck)32return;3334double l1, lInf;35DNNTestLayer::getDefaultThresholds(backendId, targetId, &l1, &lInf);36normAssert(outputDefault, outputHalide, "", l1, lInf);37}3839static void test(LayerParams& params, Mat& input, Backend backendId, Target targetId, bool skipCheck = false)40{41Net net;42net.addLayerToPrev(params.name, params.type, params);43test(input, net, backendId, targetId, skipCheck);44}4546static inline testing::internal::ParamGenerator<tuple<Backend, Target> > dnnBackendsAndTargetsWithHalide()47{48return dnnBackendsAndTargets(true, true, false); // OpenCV/CPU is used as reference49}5051class Test_Halide_layers : public DNNTestLayer {};5253////////////////////////////////////////////////////////////////////////////////54// Padding55////////////////////////////////////////////////////////////////////////////////56TEST_P(Test_Halide_layers, Padding)57{58static const int kNumRuns = 10;59std::vector<int> paddings(8);60cv::RNG& rng = cv::theRNG();61for (int t = 0; t < kNumRuns; ++t)62{63for (int i = 0; i < paddings.size(); ++i)64paddings[i] = rng(5);6566LayerParams lp;67lp.set("paddings", DictValue::arrayInt<int*>(&paddings[0], paddings.size()));68lp.type = "Padding";69lp.name = "testLayer";7071int sz[] = {1 + (int)rng(10), 1 + (int)rng(10), 1 + (int)rng(10), 1 + (int)rng(10)};72Mat input(4, &sz[0], CV_32F);73test(lp, input, backend, target);74}75}7677////////////////////////////////////////////////////////////////////////////////78// Convolution79////////////////////////////////////////////////////////////////////////////////80typedef TestWithParam<tuple<Vec3i, Size, Size, Size, Size, Size, bool, tuple<Backend, Target> > > Convolution;81TEST_P(Convolution, Accuracy)82{83int inChannels = get<0>(GetParam())[0];84int outChannels = get<0>(GetParam())[1];85int group = get<0>(GetParam())[2];86Size inSize = get<1>(GetParam());87Size kernel = get<2>(GetParam());88Size stride = get<3>(GetParam());89Size pad = get<4>(GetParam());90Size dilation = get<5>(GetParam());91bool hasBias = get<6>(GetParam());92Backend backendId = get<0>(get<7>(GetParam()));93Target targetId = get<1>(get<7>(GetParam()));9495#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 201803000096if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD)97throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");98#endif99100bool skipCheck = false;101102int sz[] = {outChannels, inChannels / group, kernel.height, kernel.width};103Mat weights(4, &sz[0], CV_32F);104randu(weights, -1.0f, 1.0f);105106LayerParams lp;107lp.set("kernel_w", kernel.width);108lp.set("kernel_h", kernel.height);109lp.set("pad_w", pad.width);110lp.set("pad_h", pad.height);111lp.set("stride_w", stride.width);112lp.set("stride_h", stride.height);113lp.set("dilation_w", dilation.width);114lp.set("dilation_h", dilation.height);115lp.set("num_output", outChannels);116lp.set("group", group);117lp.set("bias_term", hasBias);118lp.type = "Convolution";119lp.name = "testLayer";120lp.blobs.push_back(weights);121if (hasBias)122{123Mat bias(1, outChannels, CV_32F);124randu(bias, -1.0f, 1.0f);125lp.blobs.push_back(bias);126}127int inpSz[] = {1, inChannels, inSize.height, inSize.width};128Mat input(4, &inpSz[0], CV_32F);129test(lp, input, backendId, targetId, skipCheck);130if (skipCheck)131throw SkipTestException("Skip checks in unstable test");132}133134INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Convolution, Combine(135/*in channels, out channels, group*/136Values(Vec3i(6, 4, 1), Vec3i(6, 9, 1),137Vec3i(6, 4, 2), Vec3i(6, 9, 3)),138/*in size*/ Values(Size(5, 6)),139/*kernel*/ Values(Size(3, 1), Size(1, 3)),140/*stride*/ Values(Size(1, 1), Size(2, 2)),141/*pad*/ Values(Size(1, 0), Size(0, 1)),142/*dilation*/ Values(Size(1, 1), Size(2, 2)),143/*has bias*/ Bool(),144dnnBackendsAndTargetsWithHalide()145));146147////////////////////////////////////////////////////////////////////////////////148// Deconvolution149////////////////////////////////////////////////////////////////////////////////150typedef TestWithParam<tuple<Vec3i, Size, Size, Size, Size, Vec4i, bool, tuple<Backend, Target> > > Deconvolution;151TEST_P(Deconvolution, Accuracy)152{153int inChannels = get<0>(GetParam())[0];154int outChannels = get<0>(GetParam())[1];155int group = get<0>(GetParam())[2];156Size inSize = get<1>(GetParam());157Size kernel = get<2>(GetParam());158Size pad = get<3>(GetParam());159Size dilation = get<4>(GetParam());160Size stride = Size(get<5>(GetParam())[0], get<5>(GetParam())[1]);161Size adjPad = Size(get<5>(GetParam())[2], get<5>(GetParam())[3]);162bool hasBias = get<6>(GetParam());163Backend backendId = get<0>(get<7>(GetParam()));164Target targetId = get<1>(get<7>(GetParam()));165if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU &&166dilation.width == 2 && dilation.height == 2)167throw SkipTestException("");168169int sz[] = {inChannels, outChannels / group, kernel.height, kernel.width};170Mat weights(4, &sz[0], CV_32F);171randu(weights, -1.0f, 1.0f);172173LayerParams lp;174lp.set("kernel_w", kernel.width);175lp.set("kernel_h", kernel.height);176lp.set("pad_w", pad.width);177lp.set("pad_h", pad.height);178lp.set("stride_w", stride.width);179lp.set("stride_h", stride.height);180lp.set("dilation_w", dilation.width);181lp.set("dilation_h", dilation.height);182lp.set("adj_w", adjPad.width);183lp.set("adj_h", adjPad.height);184lp.set("num_output", outChannels);185lp.set("group", group);186lp.set("bias_term", hasBias);187lp.type = "Deconvolution";188lp.name = "testLayer";189lp.blobs.push_back(weights);190if (hasBias)191{192Mat bias(1, outChannels, CV_32F);193randu(bias, -1.0f, 1.0f);194lp.blobs.push_back(bias);195}196int inpSz[] = {1, inChannels, inSize.height, inSize.width};197Mat input(4, &inpSz[0], CV_32F);198test(lp, input, backendId, targetId);199}200201INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Deconvolution, Combine(202/*in channels, out channels, group*/203Values(Vec3i(6, 4, 1), Vec3i(6, 9, 3)),204/*in size*/ Values(Size(5, 6)),205/*kernel*/ Values(Size(3, 1), Size(1, 3)),206/*pad*/ Values(Size(1, 0), Size(0, 1)),207/*dilation*/ Values(Size(1, 1), Size(2, 2)),208/*stride, adj. pad*/ Values(Vec4i(1,1, 0,0), Vec4i(2,2, 1,0), Vec4i(1,2, 0,1)),209/*has bias*/ Bool(),210dnnBackendsAndTargetsWithHalide()211));212213////////////////////////////////////////////////////////////////////////////////214// LRN215////////////////////////////////////////////////////////////////////////////////216typedef TestWithParam<tuple<Vec3i, int, Vec3f, bool, std::string, tuple<Backend, Target> > > LRN;217TEST_P(LRN, Accuracy)218{219int inChannels = get<0>(GetParam())[0];220Size inSize = Size(get<0>(GetParam())[1], get<0>(GetParam())[2]);221int localSize = get<1>(GetParam());222float alpha = get<2>(GetParam())[0];223float beta = get<2>(GetParam())[1];224float bias = get<2>(GetParam())[2];225bool normBySize = get<3>(GetParam());226std::string nrmType = get<4>(GetParam());227Backend backendId = get<0>(get<5>(GetParam()));228Target targetId = get<1>(get<5>(GetParam()));229if (backendId == DNN_BACKEND_INFERENCE_ENGINE)230throw SkipTestException("");231232LayerParams lp;233lp.set("norm_region", nrmType);234lp.set("local_size", localSize);235lp.set("alpha", alpha);236lp.set("beta", beta);237lp.set("bias", bias);238lp.set("norm_by_size", normBySize);239lp.type = "LRN";240lp.name = "testLayer";241242int sz[] = {1, inChannels, inSize.height, inSize.width};243Mat input(4, &sz[0], CV_32F);244test(lp, input, backendId, targetId);245}246247INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, LRN, Combine(248/*input ch,w,h*/ Values(Vec3i(6, 5, 8), Vec3i(7, 11, 6)),249/*local size*/ Values(3, 5),250Values(Vec3f(0.9f, 1.0f, 1.1f), Vec3f(0.9f, 1.1f, 1.0f),251/*alpha, beta,*/ Vec3f(1.0f, 0.9f, 1.1f), Vec3f(1.0f, 1.1f, 0.9f),252/*bias */ Vec3f(1.1f, 0.9f, 1.0f), Vec3f(1.1f, 1.0f, 0.9f)),253/*norm_by_size*/ Bool(),254/*norm_type*/ Values("ACROSS_CHANNELS", "WITHIN_CHANNEL"),255dnnBackendsAndTargetsWithHalide()256));257258////////////////////////////////////////////////////////////////////////////////259// Average pooling260////////////////////////////////////////////////////////////////////////////////261typedef TestWithParam<tuple<int, Size, Size, Size, tuple<Backend, Target> > > AvePooling;262TEST_P(AvePooling, Accuracy)263{264int inChannels = get<0>(GetParam());265Size outSize = get<1>(GetParam());; // Input size will be computed from parameters.266Size kernel = get<2>(GetParam());267Size stride = get<3>(GetParam());268Backend backendId = get<0>(get<4>(GetParam()));269Target targetId = get<1>(get<4>(GetParam()));270if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD &&271stride == Size(3, 2) && kernel == Size(3, 3) && outSize != Size(1, 1))272throw SkipTestException("");273274const int inWidth = (outSize.width - 1) * stride.width + kernel.width;275const int inHeight = (outSize.height - 1) * stride.height + kernel.height;276277LayerParams lp;278lp.set("pool", "ave");279lp.set("kernel_w", kernel.width);280lp.set("kernel_h", kernel.height);281lp.set("stride_w", stride.width);282lp.set("stride_h", stride.height);283lp.type = "Pooling";284lp.name = "testLayer";285286int sz[] = {1, inChannels, inHeight, inWidth};287Mat input(4, &sz[0], CV_32F);288test(lp, input, backendId, targetId);289}290291INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, AvePooling, Combine(292/*in channels*/ Values(3, 4),293/*out size*/ Values(Size(1, 1), Size(2, 2), Size(3, 2), Size(4, 7)),294/*kernel*/ Values(Size(1, 1), Size(2, 2), Size(3, 3), Size(3, 2)),295/*stride*/ Values(Size(1, 1), Size(2, 2), Size(3, 2)),296dnnBackendsAndTargetsWithHalide()297));298299////////////////////////////////////////////////////////////////////////////////300// Maximum pooling301////////////////////////////////////////////////////////////////////////////////302typedef TestWithParam<tuple<int, Size, Size, Size, Size, tuple<Backend, Target> > > MaxPooling;303TEST_P(MaxPooling, Accuracy)304{305int inChannels = get<0>(GetParam());306Size inSize = get<1>(GetParam());307Size kernel = get<2>(GetParam());308Size stride = get<3>(GetParam());309Size pad = get<4>(GetParam());310Backend backendId = get<0>(get<5>(GetParam()));311Target targetId = get<1>(get<5>(GetParam()));312313LayerParams lp;314lp.set("pool", "max");315lp.set("kernel_w", kernel.width);316lp.set("kernel_h", kernel.height);317lp.set("stride_w", stride.width);318lp.set("stride_h", stride.height);319lp.set("pad_w", pad.width);320lp.set("pad_h", pad.height);321lp.type = "Pooling";322lp.name = "testLayer";323324int sz[] = {1, inChannels, inSize.height, inSize.width};325Mat input(4, &sz[0], CV_32F);326test(lp, input, backendId, targetId);327}328329INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, MaxPooling, Combine(330/*in channels*/ Values(3, 4),331/*in size*/ Values(Size(5, 5), Size(7, 6)),332/*kernel*/ Values(Size(2, 2), Size(3, 3), Size(3, 2)),333/*stride*/ Values(Size(1, 1), Size(2, 2), Size(3, 2)),334/*pad*/ Values(Size(0, 0), Size(1, 1), Size(0, 1)),335dnnBackendsAndTargetsWithHalide()336));337338////////////////////////////////////////////////////////////////////////////////339// Fully-connected340////////////////////////////////////////////////////////////////////////////////341typedef TestWithParam<tuple<int, Size, int, bool, tuple<Backend, Target> > > FullyConnected;342TEST_P(FullyConnected, Accuracy)343{344int inChannels = get<0>(GetParam());345Size inSize = get<1>(GetParam());346int outChannels = get<2>(GetParam());347bool hasBias = get<3>(GetParam());348Backend backendId = get<0>(get<4>(GetParam()));349Target targetId = get<1>(get<4>(GetParam()));350if (backendId == DNN_BACKEND_INFERENCE_ENGINE)351throw SkipTestException("");352353Mat weights(outChannels, inChannels * inSize.height * inSize.width, CV_32F);354randu(weights, -1.0f, 1.0f);355356Mat bias(1, outChannels, CV_32F);357randu(bias, -1.0f, 1.0f);358359LayerParams lp;360lp.set("num_output", outChannels);361lp.set("bias_term", hasBias);362lp.blobs.push_back(weights);363lp.blobs.push_back(bias);364lp.type = "InnerProduct";365lp.name = "testLayer";366367int sz[] = {1, inChannels, inSize.height, inSize.width};368Mat input(4, &sz[0], CV_32F);369test(lp, input, backendId, targetId);370}371372INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, FullyConnected, Combine(373/*in channels*/ Values(3, 4),374/*in size*/ Values(Size(5, 4), Size(4, 5), Size(1, 1)),375/*out channels*/ Values(3, 4),376/*has bias*/ Bool(),377dnnBackendsAndTargetsWithHalide()378));379380////////////////////////////////////////////////////////////////////////////////381// SoftMax382////////////////////////////////////////////////////////////////////////////////383typedef TestWithParam<tuple<int, tuple<Backend, Target> > > SoftMax;384TEST_P(SoftMax, Accuracy)385{386int inChannels = get<0>(GetParam());387Backend backendId = get<0>(get<1>(GetParam()));388Target targetId = get<1>(get<1>(GetParam()));389LayerParams lp;390lp.type = "SoftMax";391lp.name = "testLayer";392393int sz[] = {1, inChannels, 1, 1};394Mat input(4, &sz[0], CV_32F);395test(lp, input, backendId, targetId);396}397398INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, SoftMax, Combine(399Values(3, 4, 5, 1024),400dnnBackendsAndTargetsWithHalide()401));402403//////////////////////////////////////////////////////////////////////////////404// Max pooling - unpooling405//////////////////////////////////////////////////////////////////////////////406TEST_P(Test_Halide_layers, MaxPoolUnpool)407{408if (backend == DNN_BACKEND_INFERENCE_ENGINE)409throw SkipTestException("");410411LayerParams pool;412pool.set("pool", "max");413pool.set("kernel_w", 2);414pool.set("kernel_h", 2);415pool.set("stride_w", 2);416pool.set("stride_h", 2);417pool.set("pad_w", 0);418pool.set("pad_h", 0);419pool.type = "Pooling";420pool.name = "testPool";421422LayerParams unpool;423unpool.set("pool_k_w", 2);424unpool.set("pool_k_h", 2);425unpool.set("pool_stride_w", 2);426unpool.set("pool_stride_h", 2);427unpool.set("pool_pad_w", 0);428unpool.set("pool_pad_h", 0);429unpool.type = "MaxUnpool";430unpool.name = "testUnpool";431432Net net;433int poolId = net.addLayer(pool.name, pool.type, pool);434net.connect(0, 0, poolId, 0);435436int unpoolId = net.addLayer(unpool.name, unpool.type, unpool);437net.connect(poolId, 0, unpoolId, 0);438net.connect(poolId, 1, unpoolId, 1);439440int sz[] = {1, 1, 4, 4};441Mat input(4, &sz[0], CV_32F);442test(input, net, backend, target);443}444445////////////////////////////////////////////////////////////////////////////////446// AvePooling + in-place layers447////////////////////////////////////////////////////////////////////////////////448static const int kNumChannels = 3;449450void testInPlaceActivation(LayerParams& lp, Backend backendId, Target targetId)451{452EXPECT_FALSE(lp.name.empty());453454LayerParams pool;455pool.set("pool", "ave");456pool.set("kernel_w", 2);457pool.set("kernel_h", 2);458pool.set("stride_w", 2);459pool.set("stride_h", 2);460pool.type = "Pooling";461462Net net;463int poolId = net.addLayer(pool.name, pool.type, pool);464net.connect(0, 0, poolId, 0);465net.addLayerToPrev(lp.name, lp.type, lp);466467int sz[] = {1, kNumChannels, 10, 10};468Mat input(4, &sz[0], CV_32F);469test(input, net, backendId, targetId);470}471472typedef TestWithParam<tuple<bool, bool, float, tuple<Backend, Target> > > BatchNorm;473TEST_P(BatchNorm, Accuracy)474{475bool hasWeights = get<0>(GetParam());476bool hasBias = get<1>(GetParam());477float epsilon = get<2>(GetParam());478Backend backendId = get<0>(get<3>(GetParam()));479Target targetId = get<1>(get<3>(GetParam()));480481LayerParams lp;482lp.set("has_weight", hasWeights);483lp.set("has_bias", hasBias);484lp.set("eps", epsilon);485lp.type = "BatchNorm";486lp.name = "testLayer";487488lp.blobs.reserve(4);489for (int i = 0; i < 3; ++i)490lp.blobs.push_back(Mat(1, kNumChannels, CV_32F));491if (hasBias || hasWeights)492lp.blobs.push_back(Mat(1, kNumChannels, CV_32F));493494for (int i = 0; i < lp.blobs.size(); ++i)495randu(lp.blobs[i], 0.0f, 1.0f);496497testInPlaceActivation(lp, backendId, targetId);498}499500INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, BatchNorm, Combine(501/*has weights*/ Bool(),502/*has bias*/ Bool(),503/*epsilon*/ Values(1e-3f, 1e-5f),504dnnBackendsAndTargetsWithHalide()505));506507typedef TestWithParam<tuple<float, tuple<Backend, Target> > > ReLU;508TEST_P(ReLU, Accuracy)509{510float negativeSlope = get<0>(GetParam());511Backend backendId = get<0>(get<1>(GetParam()));512Target targetId = get<1>(get<1>(GetParam()));513514LayerParams lp;515lp.set("negative_slope", negativeSlope);516lp.type = "ReLU";517lp.name = "testLayer";518testInPlaceActivation(lp, backendId, targetId);519}520521INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, ReLU, Combine(522/*negative slope*/ Values(2.0f, 0.3f, -0.1f, 0.0f),523dnnBackendsAndTargetsWithHalide()524));525526typedef TestWithParam<tuple<std::string, tuple<Backend, Target> > > NoParamActivation;527TEST_P(NoParamActivation, Accuracy)528{529Backend backendId = get<0>(get<1>(GetParam()));530Target targetId = get<1>(get<1>(GetParam()));531532LayerParams lp;533lp.type = get<0>(GetParam());534lp.name = "testLayer";535testInPlaceActivation(lp, backendId, targetId);536}537INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, NoParamActivation, Combine(538/*type*/ Values("TanH", "Sigmoid", "AbsVal", "BNLL"),539dnnBackendsAndTargetsWithHalide()540));541542typedef TestWithParam<tuple<Vec3f, tuple<Backend, Target> > > Power;543TEST_P(Power, Accuracy)544{545float power = get<0>(GetParam())[0];546float scale = get<0>(GetParam())[1];547float shift = get<0>(GetParam())[2];548Backend backendId = get<0>(get<1>(GetParam()));549Target targetId = get<1>(get<1>(GetParam()));550551LayerParams lp;552lp.set("power", power);553lp.set("scale", scale);554lp.set("shift", shift);555lp.type = "Power";556lp.name = "testLayer";557testInPlaceActivation(lp, backendId, targetId);558}559560INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Power, Combine(561/*power, scale, shift*/ Values(Vec3f(0.9f, 1.0f, 1.1f), Vec3f(0.9f, 1.1f, 1.0f),562Vec3f(1.0f, 0.9f, 1.1f), Vec3f(1.0f, 1.1f, 0.9f),563Vec3f(1.1f, 0.9f, 1.0f), Vec3f(1.1f, 1.0f, 0.9f)),564dnnBackendsAndTargetsWithHalide()565));566567TEST_P(Test_Halide_layers, ChannelsPReLU)568{569LayerParams lp;570lp.type = "ChannelsPReLU";571lp.name = "testLayer";572lp.blobs.push_back(Mat(1, kNumChannels, CV_32F));573randu(lp.blobs[0], -1.0f, 1.0f);574575testInPlaceActivation(lp, backend, target);576}577578typedef TestWithParam<tuple<bool, tuple<Backend, Target> > > Scale;579TEST_P(Scale, Accuracy)580{581bool hasBias = get<0>(GetParam());582Backend backendId = get<0>(get<1>(GetParam()));583Target targetId = get<1>(get<1>(GetParam()));584585LayerParams lp;586lp.set("bias_term", hasBias);587lp.type = "Scale";588lp.name = "testLayer";589lp.blobs.push_back(Mat(1, kNumChannels, CV_32F));590randu(lp.blobs[0], -1.0f, 1.0f);591if (hasBias)592{593lp.blobs.push_back(Mat(1, kNumChannels, CV_32F));594randu(lp.blobs[1], -1.0f, 1.0f);595}596testInPlaceActivation(lp, backendId, targetId);597}598599INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Scale, Combine(600Bool(),601dnnBackendsAndTargetsWithHalide()602));603604////////////////////////////////////////////////////////////////////////////////605// Concat layer606////////////////////////////////////////////////////////////////////////////////607//608// input --- conv --- concat --- output609// `--- conv ----^ ^ ^610// `---- ... ------' '611// `-----------------'612typedef TestWithParam<tuple<Vec3i, Vec3i, tuple<Backend, Target> > > Concat;613TEST_P(Concat, Accuracy)614{615Vec3i inSize = get<0>(GetParam());616Vec3i numChannels = get<1>(GetParam());617Backend backendId = get<0>(get<2>(GetParam()));618Target targetId = get<1>(get<2>(GetParam()));619620Net net;621622std::vector<int> convLayerIds;623convLayerIds.reserve(numChannels.channels);624for (int i = 0, n = numChannels.channels; i < n; ++i)625{626if (!numChannels[i])627break;628629int sz[] = {numChannels[i], inSize[0], 1, 1};630Mat weights(4, &sz[0], CV_32F);631randu(weights, -1.0f, 1.0f);632633LayerParams convParam;634convParam.set("kernel_w", 1);635convParam.set("kernel_h", 1);636convParam.set("num_output", numChannels[i]);637convParam.set("bias_term", false);638convParam.type = "Convolution";639std::ostringstream ss;640ss << "convLayer" << i;641convParam.name = ss.str();642convParam.blobs.push_back(weights);643644int layerId = net.addLayer(convParam.name, convParam.type, convParam);645convLayerIds.push_back(layerId);646net.connect(0, 0, layerId, 0);647}648649LayerParams concatParam;650concatParam.type = "Concat";651concatParam.name = "testLayer";652int concatId = net.addLayer(concatParam.name, concatParam.type, concatParam);653net.connect(0, 0, concatId, 0);654for (int i = 0; i < convLayerIds.size(); ++i)655{656net.connect(convLayerIds[i], 0, concatId, i + 1);657}658659int sz[] = {1, inSize[0], inSize[1], inSize[2]};660Mat input(4, &sz[0], CV_32F);661test(input, net, backendId, targetId);662}663664INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Concat, Combine(665/*input size*/ Values(Vec3i(1, 4, 5), Vec3i(2, 8, 6)),666/*channels*/ Values(Vec3i(2, 0, 0), Vec3i(3, 4, 0), Vec3i(1, 6, 2)),667dnnBackendsAndTargetsWithHalide()668));669670////////////////////////////////////////////////////////////////////////////////671// Element-wise layers672////////////////////////////////////////////////////////////////////////////////673//674// input --- conv --- eltwise --- output675// `--- conv ----^ ^ ^676// `---- ... ------' '677// `-----------------'678typedef TestWithParam<tuple<Vec3i, std::string, int, bool, tuple<Backend, Target> > > Eltwise;679TEST_P(Eltwise, Accuracy)680{681Vec3i inSize = get<0>(GetParam());682std::string op = get<1>(GetParam());683int numConv = get<2>(GetParam());684bool weighted = get<3>(GetParam());685Backend backendId = get<0>(get<4>(GetParam()));686Target targetId = get<1>(get<4>(GetParam()));687688Net net;689690std::vector<int> convLayerIds(numConv);691for (int i = 0; i < numConv; ++i)692{693int sz[] = {inSize[0], inSize[0], 1, 1};694Mat weights(4, &sz[0], CV_32F);695randu(weights, -1.0f, 1.0f);696697LayerParams convParam;698convParam.set("kernel_w", 1);699convParam.set("kernel_h", 1);700convParam.set("num_output", inSize[0]);701convParam.set("bias_term", false);702convParam.type = "Convolution";703std::ostringstream ss;704ss << "convLayer" << i;705convParam.name = ss.str();706convParam.blobs.push_back(weights);707708convLayerIds[i] = net.addLayer(convParam.name, convParam.type, convParam);709net.connect(0, 0, convLayerIds[i], 0);710}711712LayerParams eltwiseParam;713eltwiseParam.set("operation", op);714if (op == "sum" && weighted)715{716RNG& rng = cv::theRNG();717std::vector<float> coeff(1 + numConv);718for (int i = 0; i < coeff.size(); ++i)719{720coeff[i] = rng.uniform(-2.0f, 2.0f);721}722eltwiseParam.set("coeff", DictValue::arrayReal<float*>(&coeff[0], coeff.size()));723}724eltwiseParam.type = "Eltwise";725eltwiseParam.name = "testLayer";726int eltwiseId = net.addLayer(eltwiseParam.name, eltwiseParam.type, eltwiseParam);727net.connect(0, 0, eltwiseId, 0);728for (int i = 0; i < numConv; ++i)729{730net.connect(convLayerIds[i], 0, eltwiseId, i + 1);731}732733int sz[] = {1, inSize[0], inSize[1], inSize[2]};734Mat input(4, &sz[0], CV_32F);735test(input, net, backendId, targetId);736}737738INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Eltwise, Combine(739/*input size*/ Values(Vec3i(1, 4, 5), Vec3i(2, 8, 6)),740/*operation*/ Values("prod", "sum", "max"),741/*num convs*/ Values(1, 2, 3),742/*weighted(for sum only)*/ Bool(),743dnnBackendsAndTargetsWithHalide()744));745746////////////////////////////////////////////////////////////////////////////747// Mixed backends748////////////////////////////////////////////////////////////////////////////749#ifdef HAVE_HALIDE750TEST(MixedBackends_Halide_Default_Halide, Accuracy)751{752// Just a layer that supports Halide backend.753LayerParams lrn;754lrn.type = "LRN";755lrn.name = "testLRN";756757// Some of layers that doesn't supports Halide backend yet.758LayerParams mvn;759mvn.type = "MVN";760mvn.name = "testMVN";761762// Halide layer again.763LayerParams lrn2;764lrn2.type = "LRN";765lrn2.name = "testLRN2";766767Net net;768int lrnId = net.addLayer(lrn.name, lrn.type, lrn);769net.connect(0, 0, lrnId, 0);770net.addLayerToPrev(mvn.name, mvn.type, mvn);771net.addLayerToPrev(lrn2.name, lrn2.type, lrn2);772773int sz[] = {4, 3, 5, 6};774Mat input(4, &sz[0], CV_32F);775randu(input, -1.0f, 1.0f);776net.setInput(input);777net.setPreferableBackend(DNN_BACKEND_OPENCV);778Mat outputDefault = net.forward().clone();779780net.setPreferableBackend(DNN_BACKEND_HALIDE);781net.setInput(input);782Mat outputHalide = net.forward().clone();783normAssert(outputDefault, outputHalide);784785net.setPreferableTarget(DNN_TARGET_OPENCL);786net.setInput(input);787outputHalide = net.forward().clone();788normAssert(outputDefault, outputHalide);789}790#endif // HAVE_HALIDE791792INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_Halide_layers, dnnBackendsAndTargetsWithHalide());793794}} // namespace795796797