Path: blob/master/modules/dnn/src/layers/blank_layer.cpp
16337 views
/*M///////////////////////////////////////////////////////////////////////////////////////1//2// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.3//4// By downloading, copying, installing or using the software you agree to this license.5// If you do not agree to this license, do not download, install,6// copy or use the software.7//8//9// License Agreement10// For Open Source Computer Vision Library11//12// Copyright (C) 2013, OpenCV Foundation, all rights reserved.13// Copyright (C) 2017, Intel Corporation, all rights reserved.14// Third party copyrights are property of their respective owners.15//16// Redistribution and use in source and binary forms, with or without modification,17// are permitted provided that the following conditions are met:18//19// * Redistribution's of source code must retain the above copyright notice,20// this list of conditions and the following disclaimer.21//22// * Redistribution's in binary form must reproduce the above copyright notice,23// this list of conditions and the following disclaimer in the documentation24// and/or other materials provided with the distribution.25//26// * The name of the copyright holders may not be used to endorse or promote products27// derived from this software without specific prior written permission.28//29// This software is provided by the copyright holders and contributors "as is" and30// any express or implied warranties, including, but not limited to, the implied31// warranties of merchantability and fitness for a particular purpose are disclaimed.32// In no event shall the Intel Corporation or contributors be liable for any direct,33// indirect, incidental, special, exemplary, or consequential damages34// (including, but not limited to, procurement of substitute goods or services;35// loss of use, data, or profits; or business interruption) however caused36// and on any theory of liability, whether in contract, strict liability,37// or tort (including negligence or otherwise) arising in any way out of38// the use of this software, even if advised of the possibility of such damage.39//40//M*/41#include "../precomp.hpp"42#include "../op_inf_engine.hpp"4344namespace cv45{46namespace dnn47{48class BlankLayerImpl CV_FINAL : public BlankLayer49{50public:51BlankLayerImpl(const LayerParams& params)52{53setParamsFrom(params);54}5556virtual bool supportBackend(int backendId) CV_OVERRIDE57{58return backendId == DNN_BACKEND_OPENCV ||59backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine();60}6162bool getMemoryShapes(const std::vector<MatShape> &inputs,63const int requiredOutputs,64std::vector<MatShape> &outputs,65std::vector<MatShape> &internals) const CV_OVERRIDE66{67Layer::getMemoryShapes(inputs, requiredOutputs, outputs, internals);68return true;69}7071#ifdef HAVE_OPENCL72bool forward_ocl(InputArrayOfArrays inputs_, OutputArrayOfArrays outputs_, OutputArrayOfArrays internals_)73{74std::vector<UMat> inputs;75std::vector<UMat> outputs;7677inputs_.getUMatVector(inputs);78outputs_.getUMatVector(outputs);7980for (int i = 0, n = outputs.size(); i < n; ++i)81{82void *src_handle = inputs[i].handle(ACCESS_READ);83void *dst_handle = outputs[i].handle(ACCESS_WRITE);84if (src_handle != dst_handle)85inputs[i].copyTo(outputs[i]);86}8788return true;89}90#endif9192void forward(InputArrayOfArrays inputs_arr, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays internals_arr) CV_OVERRIDE93{94CV_TRACE_FUNCTION();95CV_TRACE_ARG_VALUE(name, "name", name.c_str());9697CV_OCL_RUN(IS_DNN_OPENCL_TARGET(preferableTarget),98forward_ocl(inputs_arr, outputs_arr, internals_arr))99100std::vector<Mat> inputs, outputs;101inputs_arr.getMatVector(inputs);102outputs_arr.getMatVector(outputs);103104for (int i = 0, n = outputs.size(); i < n; ++i)105if (outputs[i].data != inputs[i].data)106inputs[i].copyTo(outputs[i]);107}108109virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE110{111#ifdef HAVE_INF_ENGINE112InferenceEngine::LayerParams lp;113lp.name = name;114lp.type = "Split";115lp.precision = InferenceEngine::Precision::FP32;116std::shared_ptr<InferenceEngine::SplitLayer> ieLayer(new InferenceEngine::SplitLayer(lp));117return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));118#endif // HAVE_INF_ENGINE119return Ptr<BackendNode>();120}121};122123Ptr<Layer> BlankLayer::create(const LayerParams& params)124{125// In case of Caffe's Dropout layer from Faster-RCNN framework,126// https://github.com/rbgirshick/caffe-fast-rcnn/tree/faster-rcnn127// return Power layer.128if (!params.get<bool>("scale_train", true))129{130float scale = 1 - params.get<float>("dropout_ratio", 0.5f);131CV_Assert(scale > 0);132133LayerParams powerParams;134powerParams.name = params.name;135powerParams.type = "Power";136powerParams.set("scale", scale);137138return PowerLayer::create(powerParams);139}140else141return Ptr<BlankLayer>(new BlankLayerImpl(params));142}143144}145}146147148