Path: blob/master/modules/stitching/perf/perf_estimators.cpp
16344 views
#include "perf_precomp.hpp"1#include "opencv2/imgcodecs.hpp"2#include "opencv2/opencv_modules.hpp"34namespace opencv_test5{6using namespace perf;78typedef TestBaseWithParam<tuple<string, string> > bundleAdjuster;910#ifdef HAVE_OPENCV_XFEATURES2D11#define TEST_DETECTORS testing::Values("surf", "orb")12#else13#define TEST_DETECTORS testing::Values<string>("orb")14#endif15#define WORK_MEGAPIX 0.616#define AFFINE_FUNCTIONS testing::Values("affinePartial", "affine")1718PERF_TEST_P(bundleAdjuster, affine, testing::Combine(TEST_DETECTORS, AFFINE_FUNCTIONS))19{20Mat img1, img1_full = imread(getDataPath("stitching/s1.jpg"));21Mat img2, img2_full = imread(getDataPath("stitching/s2.jpg"));22float scale1 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img1_full.total()));23float scale2 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img2_full.total()));24resize(img1_full, img1, Size(), scale1, scale1, INTER_LINEAR_EXACT);25resize(img2_full, img2, Size(), scale2, scale2, INTER_LINEAR_EXACT);2627string detector = get<0>(GetParam());28string affine_fun = get<1>(GetParam());2930Ptr<detail::FeaturesFinder> finder;31Ptr<detail::FeaturesMatcher> matcher;32Ptr<detail::BundleAdjusterBase> bundle_adjuster;33if (detector == "surf")34finder = makePtr<detail::SurfFeaturesFinder>();35else if (detector == "orb")36finder = makePtr<detail::OrbFeaturesFinder>();37if (affine_fun == "affinePartial")38{39matcher = makePtr<detail::AffineBestOf2NearestMatcher>(false);40bundle_adjuster = makePtr<detail::BundleAdjusterAffinePartial>();41}42else if (affine_fun == "affine")43{44matcher = makePtr<detail::AffineBestOf2NearestMatcher>(true);45bundle_adjuster = makePtr<detail::BundleAdjusterAffine>();46}47Ptr<detail::Estimator> estimator = makePtr<detail::AffineBasedEstimator>();4849std::vector<Mat> images;50images.push_back(img1), images.push_back(img2);51std::vector<detail::ImageFeatures> features;52std::vector<detail::MatchesInfo> pairwise_matches;53std::vector<detail::CameraParams> cameras;54std::vector<detail::CameraParams> cameras2;5556(*finder)(images, features);57(*matcher)(features, pairwise_matches);58if (!(*estimator)(features, pairwise_matches, cameras))59FAIL() << "estimation failed. this should never happen.";60// this is currently required61for (size_t i = 0; i < cameras.size(); ++i)62{63Mat R;64cameras[i].R.convertTo(R, CV_32F);65cameras[i].R = R;66}6768cameras2 = cameras;69bool success = true;70while(next())71{72cameras = cameras2; // revert cameras back to original initial guess73startTimer();74success = (*bundle_adjuster)(features, pairwise_matches, cameras);75stopTimer();76}7778EXPECT_TRUE(success);79EXPECT_TRUE(cameras.size() == 2);8081// fist camera should be just identity82Mat &first = cameras[0].R;83SANITY_CHECK(first, 1e-3, ERROR_ABSOLUTE);84// second camera should be the estimated transform between images85// separate rotation and translation in transform matrix86Mat T_second (cameras[1].R, Range(0, 2), Range(2, 3));87Mat R_second (cameras[1].R, Range(0, 2), Range(0, 2));88Mat h (cameras[1].R, Range(2, 3), Range::all());89SANITY_CHECK(T_second, 5, ERROR_ABSOLUTE); // allow 5 pixels diff in translations90SANITY_CHECK(R_second, .01, ERROR_ABSOLUTE); // rotations must be more precise91// last row should be precisely (0, 0, 1) as it is just added for representation in homogeneous92// coordinates93EXPECT_TRUE(h.type() == CV_32F);94EXPECT_FLOAT_EQ(h.at<float>(0), 0.f);95EXPECT_FLOAT_EQ(h.at<float>(1), 0.f);96EXPECT_FLOAT_EQ(h.at<float>(2), 1.f);97}9899} // namespace100101102