Path: blob/master/modules/dnn/test/test_ie_models.cpp
16354 views
// This file is part of OpenCV project.1// It is subject to the license terms in the LICENSE file found in the top-level directory2// of this distribution and at http://opencv.org/license.html.3//4// Copyright (C) 2018, Intel Corporation, all rights reserved.5// Third party copyrights are property of their respective owners.6#include "test_precomp.hpp"78#ifdef HAVE_INF_ENGINE9#include <opencv2/core/utils/filesystem.hpp>1011#include <inference_engine.hpp>12#include <ie_icnn_network.hpp>13#include <ie_extension.h>1415namespace opencv_test { namespace {1617static void initDLDTDataPath()18{19#ifndef WINRT20static bool initialized = false;21if (!initialized)22{23const char* dldtTestDataPath = getenv("INTEL_CVSDK_DIR");24if (dldtTestDataPath)25cvtest::addDataSearchPath(cv::utils::fs::join(dldtTestDataPath, "deployment_tools"));26initialized = true;27}28#endif29}3031using namespace cv;32using namespace cv::dnn;33using namespace InferenceEngine;3435static inline void genData(const std::vector<size_t>& dims, Mat& m, Blob::Ptr& dataPtr)36{37std::vector<int> reversedDims(dims.begin(), dims.end());38std::reverse(reversedDims.begin(), reversedDims.end());3940m.create(reversedDims, CV_32F);41randu(m, -1, 1);4243dataPtr = make_shared_blob<float>(Precision::FP32, dims, (float*)m.data);44}4546void runIE(Target target, const std::string& xmlPath, const std::string& binPath,47std::map<std::string, cv::Mat>& inputsMap, std::map<std::string, cv::Mat>& outputsMap)48{49CNNNetReader reader;50reader.ReadNetwork(xmlPath);51reader.ReadWeights(binPath);5253CNNNetwork net = reader.getNetwork();5455InferenceEnginePluginPtr enginePtr;56InferencePlugin plugin;57ExecutableNetwork netExec;58InferRequest infRequest;59TargetDevice targetDevice;60switch (target)61{62case DNN_TARGET_CPU:63targetDevice = TargetDevice::eCPU;64break;65case DNN_TARGET_OPENCL:66case DNN_TARGET_OPENCL_FP16:67targetDevice = TargetDevice::eGPU;68break;69case DNN_TARGET_MYRIAD:70targetDevice = TargetDevice::eMYRIAD;71break;72default:73CV_Error(Error::StsNotImplemented, "Unknown target");74};7576try77{78enginePtr = PluginDispatcher({""}).getSuitablePlugin(targetDevice);7980if (targetDevice == TargetDevice::eCPU)81{82std::string suffixes[] = {"_avx2", "_sse4", ""};83bool haveFeature[] = {84checkHardwareSupport(CPU_AVX2),85checkHardwareSupport(CPU_SSE4_2),86true87};88for (int i = 0; i < 3; ++i)89{90if (!haveFeature[i])91continue;92#ifdef _WIN3293std::string libName = "cpu_extension" + suffixes[i] + ".dll";94#else95std::string libName = "libcpu_extension" + suffixes[i] + ".so";96#endif // _WIN3297try98{99IExtensionPtr extension = make_so_pointer<IExtension>(libName);100enginePtr->AddExtension(extension, 0);101break;102}103catch(...) {}104}105// Some of networks can work without a library of extra layers.106}107plugin = InferencePlugin(enginePtr);108109netExec = plugin.LoadNetwork(net, {});110infRequest = netExec.CreateInferRequest();111}112catch (const std::exception& ex)113{114CV_Error(Error::StsAssert, format("Failed to initialize Inference Engine backend: %s", ex.what()));115}116117// Fill input blobs.118inputsMap.clear();119BlobMap inputBlobs;120for (auto& it : net.getInputsInfo())121{122genData(it.second->getDims(), inputsMap[it.first], inputBlobs[it.first]);123}124infRequest.SetInput(inputBlobs);125126// Fill output blobs.127outputsMap.clear();128BlobMap outputBlobs;129for (auto& it : net.getOutputsInfo())130{131genData(it.second->dims, outputsMap[it.first], outputBlobs[it.first]);132}133infRequest.SetOutput(outputBlobs);134135infRequest.Infer();136}137138std::vector<String> getOutputsNames(const Net& net)139{140std::vector<String> names;141if (names.empty())142{143std::vector<int> outLayers = net.getUnconnectedOutLayers();144std::vector<String> layersNames = net.getLayerNames();145names.resize(outLayers.size());146for (size_t i = 0; i < outLayers.size(); ++i)147names[i] = layersNames[outLayers[i] - 1];148}149return names;150}151152void runCV(Target target, const std::string& xmlPath, const std::string& binPath,153const std::map<std::string, cv::Mat>& inputsMap,154std::map<std::string, cv::Mat>& outputsMap)155{156Net net = readNet(xmlPath, binPath);157for (auto& it : inputsMap)158net.setInput(it.second, it.first);159net.setPreferableTarget(target);160161std::vector<String> outNames = getOutputsNames(net);162std::vector<Mat> outs;163net.forward(outs, outNames);164165outputsMap.clear();166EXPECT_EQ(outs.size(), outNames.size());167for (int i = 0; i < outs.size(); ++i)168{169EXPECT_TRUE(outputsMap.insert({outNames[i], outs[i]}).second);170}171}172173typedef TestWithParam<tuple<Target, String> > DNNTestOpenVINO;174TEST_P(DNNTestOpenVINO, models)175{176Target target = (dnn::Target)(int)get<0>(GetParam());177std::string modelName = get<1>(GetParam());178179if (target == DNN_TARGET_MYRIAD && (modelName == "landmarks-regression-retail-0001" ||180modelName == "semantic-segmentation-adas-0001" ||181modelName == "face-reidentification-retail-0001"))182throw SkipTestException("");183184std::string precision = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? "FP16" : "FP32";185std::string prefix = utils::fs::join("intel_models",186utils::fs::join(modelName,187utils::fs::join(precision, modelName)));188std::string xmlPath = findDataFile(prefix + ".xml");189std::string binPath = findDataFile(prefix + ".bin");190191std::map<std::string, cv::Mat> inputsMap;192std::map<std::string, cv::Mat> ieOutputsMap, cvOutputsMap;193// Single Myriad device cannot be shared across multiple processes.194resetMyriadDevice();195runIE(target, xmlPath, binPath, inputsMap, ieOutputsMap);196runCV(target, xmlPath, binPath, inputsMap, cvOutputsMap);197198EXPECT_EQ(ieOutputsMap.size(), cvOutputsMap.size());199for (auto& srcIt : ieOutputsMap)200{201auto dstIt = cvOutputsMap.find(srcIt.first);202CV_Assert(dstIt != cvOutputsMap.end());203double normInf = cvtest::norm(srcIt.second, dstIt->second, cv::NORM_INF);204EXPECT_EQ(normInf, 0);205}206}207208static testing::internal::ParamGenerator<String> intelModels()209{210initDLDTDataPath();211std::vector<String> modelsNames;212213std::string path;214try215{216path = findDataDirectory("intel_models", false);217}218catch (...)219{220std::cerr << "ERROR: Can't find OpenVINO models. Check INTEL_CVSDK_DIR environment variable (run setup.sh)" << std::endl;221return ValuesIn(modelsNames); // empty list222}223224cv::utils::fs::glob_relative(path, "", modelsNames, false, true);225226modelsNames.erase(227std::remove_if(modelsNames.begin(), modelsNames.end(),228[&](const String& dir){ return !utils::fs::isDirectory(utils::fs::join(path, dir)); }),229modelsNames.end()230);231CV_Assert(!modelsNames.empty());232233return ValuesIn(modelsNames);234}235236static testing::internal::ParamGenerator<Target> dnnDLIETargets()237{238std::vector<Target> targets;239targets.push_back(DNN_TARGET_CPU);240#ifdef HAVE_OPENCL241if (cv::ocl::useOpenCL() && ocl::Device::getDefault().isIntel())242{243targets.push_back(DNN_TARGET_OPENCL);244targets.push_back(DNN_TARGET_OPENCL_FP16);245}246#endif247if (checkMyriadTarget())248targets.push_back(DNN_TARGET_MYRIAD);249return testing::ValuesIn(targets);250}251252INSTANTIATE_TEST_CASE_P(/**/, DNNTestOpenVINO, Combine(253dnnDLIETargets(), intelModels()254));255256}}257#endif // HAVE_INF_ENGINE258259260