Path: blob/master/modules/videostab/src/global_motion.cpp
16354 views
/*M///////////////////////////////////////////////////////////////////////////////////////1//2// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.3//4// By downloading, copying, installing or using the software you agree to this license.5// If you do not agree to this license, do not download, install,6// copy or use the software.7//8//9// License Agreement10// For Open Source Computer Vision Library11//12// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.13// Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.14// Third party copyrights are property of their respective owners.15//16// Redistribution and use in source and binary forms, with or without modification,17// are permitted provided that the following conditions are met:18//19// * Redistribution's of source code must retain the above copyright notice,20// this list of conditions and the following disclaimer.21//22// * Redistribution's in binary form must reproduce the above copyright notice,23// this list of conditions and the following disclaimer in the documentation24// and/or other materials provided with the distribution.25//26// * The name of the copyright holders may not be used to endorse or promote products27// derived from this software without specific prior written permission.28//29// This software is provided by the copyright holders and contributors "as is" and30// any express or implied warranties, including, but not limited to, the implied31// warranties of merchantability and fitness for a particular purpose are disclaimed.32// In no event shall the Intel Corporation or contributors be liable for any direct,33// indirect, incidental, special, exemplary, or consequential damages34// (including, but not limited to, procurement of substitute goods or services;35// loss of use, data, or profits; or business interruption) however caused36// and on any theory of liability, whether in contract, strict liability,37// or tort (including negligence or otherwise) arising in any way out of38// the use of this software, even if advised of the possibility of such damage.39//40//M*/4142#include "precomp.hpp"43#include "opencv2/videostab/global_motion.hpp"44#include "opencv2/videostab/ring_buffer.hpp"45#include "opencv2/videostab/outlier_rejection.hpp"46#include "opencv2/opencv_modules.hpp"47#include "clp.hpp"4849#include "opencv2/core/private.cuda.hpp"5051#if defined(HAVE_OPENCV_CUDAIMGPROC) && defined(HAVE_OPENCV_CUDAOPTFLOW)52#if !defined HAVE_CUDA || defined(CUDA_DISABLER)53namespace cv { namespace cuda {54static void compactPoints(GpuMat&, GpuMat&, const GpuMat&) { throw_no_cuda(); }55}}56#else57namespace cv { namespace cuda { namespace device { namespace globmotion {58int compactPoints(int N, float *points0, float *points1, const uchar *mask);59}}}}60namespace cv { namespace cuda {61static void compactPoints(GpuMat &points0, GpuMat &points1, const GpuMat &mask)62{63CV_Assert(points0.rows == 1 && points1.rows == 1 && mask.rows == 1);64CV_Assert(points0.type() == CV_32FC2 && points1.type() == CV_32FC2 && mask.type() == CV_8U);65CV_Assert(points0.cols == mask.cols && points1.cols == mask.cols);6667int npoints = points0.cols;68int remaining = cv::cuda::device::globmotion::compactPoints(69npoints, (float*)points0.data, (float*)points1.data, mask.data);7071points0 = points0.colRange(0, remaining);72points1 = points1.colRange(0, remaining);73}74}}75#endif76#endif7778namespace cv79{80namespace videostab81{8283// does isotropic normalization84static Mat normalizePoints(int npoints, Point2f *points)85{86float cx = 0.f, cy = 0.f;87for (int i = 0; i < npoints; ++i)88{89cx += points[i].x;90cy += points[i].y;91}92cx /= npoints;93cy /= npoints;9495float d = 0.f;96for (int i = 0; i < npoints; ++i)97{98points[i].x -= cx;99points[i].y -= cy;100d += std::sqrt(sqr(points[i].x) + sqr(points[i].y));101}102d /= npoints;103104float s = std::sqrt(2.f) / d;105for (int i = 0; i < npoints; ++i)106{107points[i].x *= s;108points[i].y *= s;109}110111Mat_<float> T = Mat::eye(3, 3, CV_32F);112T(0,0) = T(1,1) = s;113T(0,2) = -cx*s;114T(1,2) = -cy*s;115return T;116}117118119static Mat estimateGlobMotionLeastSquaresTranslation(120int npoints, Point2f *points0, Point2f *points1, float *rmse)121{122Mat_<float> M = Mat::eye(3, 3, CV_32F);123for (int i = 0; i < npoints; ++i)124{125M(0,2) += points1[i].x - points0[i].x;126M(1,2) += points1[i].y - points0[i].y;127}128M(0,2) /= npoints;129M(1,2) /= npoints;130131if (rmse)132{133*rmse = 0;134for (int i = 0; i < npoints; ++i)135*rmse += sqr(points1[i].x - points0[i].x - M(0,2)) +136sqr(points1[i].y - points0[i].y - M(1,2));137*rmse = std::sqrt(*rmse / npoints);138}139140return M;141}142143144static Mat estimateGlobMotionLeastSquaresTranslationAndScale(145int npoints, Point2f *points0, Point2f *points1, float *rmse)146{147Mat_<float> T0 = normalizePoints(npoints, points0);148Mat_<float> T1 = normalizePoints(npoints, points1);149150Mat_<float> A(2*npoints, 3), b(2*npoints, 1);151float *a0, *a1;152Point2f p0, p1;153154for (int i = 0; i < npoints; ++i)155{156a0 = A[2*i];157a1 = A[2*i+1];158p0 = points0[i];159p1 = points1[i];160a0[0] = p0.x; a0[1] = 1; a0[2] = 0;161a1[0] = p0.y; a1[1] = 0; a1[2] = 1;162b(2*i,0) = p1.x;163b(2*i+1,0) = p1.y;164}165166Mat_<float> sol;167solve(A, b, sol, DECOMP_NORMAL | DECOMP_LU);168169if (rmse)170*rmse = static_cast<float>(norm(A*sol, b, NORM_L2) / std::sqrt(static_cast<double>(npoints)));171172Mat_<float> M = Mat::eye(3, 3, CV_32F);173M(0,0) = M(1,1) = sol(0,0);174M(0,2) = sol(1,0);175M(1,2) = sol(2,0);176177return T1.inv() * M * T0;178}179180static Mat estimateGlobMotionLeastSquaresRotation(181int npoints, Point2f *points0, Point2f *points1, float *rmse)182{183Point2f p0, p1;184float A(0), B(0);185for(int i=0; i<npoints; ++i)186{187p0 = points0[i];188p1 = points1[i];189190A += p0.x*p1.x + p0.y*p1.y;191B += p0.x*p1.y - p1.x*p0.y;192}193194// A*sin(alpha) + B*cos(alpha) = 0195float C = std::sqrt(A*A + B*B);196Mat_<float> M = Mat::eye(3, 3, CV_32F);197if ( C != 0 )198{199float sinAlpha = - B / C;200float cosAlpha = A / C;201202M(0,0) = cosAlpha;203M(1,1) = M(0,0);204M(0,1) = sinAlpha;205M(1,0) = - M(0,1);206}207208if (rmse)209{210*rmse = 0;211for (int i = 0; i < npoints; ++i)212{213p0 = points0[i];214p1 = points1[i];215*rmse += sqr(p1.x - M(0,0)*p0.x - M(0,1)*p0.y) +216sqr(p1.y - M(1,0)*p0.x - M(1,1)*p0.y);217}218*rmse = std::sqrt(*rmse / npoints);219}220221return M;222}223224static Mat estimateGlobMotionLeastSquaresRigid(225int npoints, Point2f *points0, Point2f *points1, float *rmse)226{227Point2f mean0(0.f, 0.f);228Point2f mean1(0.f, 0.f);229230for (int i = 0; i < npoints; ++i)231{232mean0 += points0[i];233mean1 += points1[i];234}235236mean0 *= 1.f / npoints;237mean1 *= 1.f / npoints;238239Mat_<float> A = Mat::zeros(2, 2, CV_32F);240Point2f pt0, pt1;241242for (int i = 0; i < npoints; ++i)243{244pt0 = points0[i] - mean0;245pt1 = points1[i] - mean1;246A(0,0) += pt1.x * pt0.x;247A(0,1) += pt1.x * pt0.y;248A(1,0) += pt1.y * pt0.x;249A(1,1) += pt1.y * pt0.y;250}251252Mat_<float> M = Mat::eye(3, 3, CV_32F);253254SVD svd(A);255Mat_<float> R = svd.u * svd.vt;256Mat tmp(M(Rect(0,0,2,2)));257R.copyTo(tmp);258259M(0,2) = mean1.x - R(0,0)*mean0.x - R(0,1)*mean0.y;260M(1,2) = mean1.y - R(1,0)*mean0.x - R(1,1)*mean0.y;261262if (rmse)263{264*rmse = 0;265for (int i = 0; i < npoints; ++i)266{267pt0 = points0[i];268pt1 = points1[i];269*rmse += sqr(pt1.x - M(0,0)*pt0.x - M(0,1)*pt0.y - M(0,2)) +270sqr(pt1.y - M(1,0)*pt0.x - M(1,1)*pt0.y - M(1,2));271}272*rmse = std::sqrt(*rmse / npoints);273}274275return M;276}277278279static Mat estimateGlobMotionLeastSquaresSimilarity(280int npoints, Point2f *points0, Point2f *points1, float *rmse)281{282Mat_<float> T0 = normalizePoints(npoints, points0);283Mat_<float> T1 = normalizePoints(npoints, points1);284285Mat_<float> A(2*npoints, 4), b(2*npoints, 1);286float *a0, *a1;287Point2f p0, p1;288289for (int i = 0; i < npoints; ++i)290{291a0 = A[2*i];292a1 = A[2*i+1];293p0 = points0[i];294p1 = points1[i];295a0[0] = p0.x; a0[1] = p0.y; a0[2] = 1; a0[3] = 0;296a1[0] = p0.y; a1[1] = -p0.x; a1[2] = 0; a1[3] = 1;297b(2*i,0) = p1.x;298b(2*i+1,0) = p1.y;299}300301Mat_<float> sol;302solve(A, b, sol, DECOMP_NORMAL | DECOMP_LU);303304if (rmse)305*rmse = static_cast<float>(norm(A*sol, b, NORM_L2) / std::sqrt(static_cast<double>(npoints)));306307Mat_<float> M = Mat::eye(3, 3, CV_32F);308M(0,0) = M(1,1) = sol(0,0);309M(0,1) = sol(1,0);310M(1,0) = -sol(1,0);311M(0,2) = sol(2,0);312M(1,2) = sol(3,0);313314return T1.inv() * M * T0;315}316317318static Mat estimateGlobMotionLeastSquaresAffine(319int npoints, Point2f *points0, Point2f *points1, float *rmse)320{321Mat_<float> T0 = normalizePoints(npoints, points0);322Mat_<float> T1 = normalizePoints(npoints, points1);323324Mat_<float> A(2*npoints, 6), b(2*npoints, 1);325float *a0, *a1;326Point2f p0, p1;327328for (int i = 0; i < npoints; ++i)329{330a0 = A[2*i];331a1 = A[2*i+1];332p0 = points0[i];333p1 = points1[i];334a0[0] = p0.x; a0[1] = p0.y; a0[2] = 1; a0[3] = a0[4] = a0[5] = 0;335a1[0] = a1[1] = a1[2] = 0; a1[3] = p0.x; a1[4] = p0.y; a1[5] = 1;336b(2*i,0) = p1.x;337b(2*i+1,0) = p1.y;338}339340Mat_<float> sol;341solve(A, b, sol, DECOMP_NORMAL | DECOMP_LU);342343if (rmse)344*rmse = static_cast<float>(norm(A*sol, b, NORM_L2) / std::sqrt(static_cast<double>(npoints)));345346Mat_<float> M = Mat::eye(3, 3, CV_32F);347for (int i = 0, k = 0; i < 2; ++i)348for (int j = 0; j < 3; ++j, ++k)349M(i,j) = sol(k,0);350351return T1.inv() * M * T0;352}353354355Mat estimateGlobalMotionLeastSquares(356InputOutputArray points0, InputOutputArray points1, int model, float *rmse)357{358CV_INSTRUMENT_REGION();359360CV_Assert(model <= MM_AFFINE);361CV_Assert(points0.type() == points1.type());362const int npoints = points0.getMat().checkVector(2);363CV_Assert(points1.getMat().checkVector(2) == npoints);364365typedef Mat (*Impl)(int, Point2f*, Point2f*, float*);366static Impl impls[] = { estimateGlobMotionLeastSquaresTranslation,367estimateGlobMotionLeastSquaresTranslationAndScale,368estimateGlobMotionLeastSquaresRotation,369estimateGlobMotionLeastSquaresRigid,370estimateGlobMotionLeastSquaresSimilarity,371estimateGlobMotionLeastSquaresAffine };372373Point2f *points0_ = points0.getMat().ptr<Point2f>();374Point2f *points1_ = points1.getMat().ptr<Point2f>();375376return impls[model](npoints, points0_, points1_, rmse);377}378379380Mat estimateGlobalMotionRansac(381InputArray points0, InputArray points1, int model, const RansacParams ¶ms,382float *rmse, int *ninliers)383{384CV_INSTRUMENT_REGION();385386CV_Assert(model <= MM_AFFINE);387CV_Assert(points0.type() == points1.type());388const int npoints = points0.getMat().checkVector(2);389CV_Assert(points1.getMat().checkVector(2) == npoints);390391if (npoints < params.size)392return Mat::eye(3, 3, CV_32F);393394const Point2f *points0_ = points0.getMat().ptr<Point2f>();395const Point2f *points1_ = points1.getMat().ptr<Point2f>();396const int niters = params.niters();397398// current hypothesis399std::vector<int> indices(params.size);400std::vector<Point2f> subset0(params.size);401std::vector<Point2f> subset1(params.size);402403// best hypothesis404std::vector<int> bestIndices(params.size);405406Mat_<float> bestM;407int ninliersMax = -1;408409RNG rng(0);410Point2f p0, p1;411float x, y;412413for (int iter = 0; iter < niters; ++iter)414{415for (int i = 0; i < params.size; ++i)416{417bool ok = false;418while (!ok)419{420ok = true;421indices[i] = static_cast<unsigned>(rng) % npoints;422for (int j = 0; j < i; ++j)423if (indices[i] == indices[j])424{ ok = false; break; }425}426}427for (int i = 0; i < params.size; ++i)428{429subset0[i] = points0_[indices[i]];430subset1[i] = points1_[indices[i]];431}432433Mat_<float> M = estimateGlobalMotionLeastSquares(subset0, subset1, model, 0);434435int numinliers = 0;436for (int i = 0; i < npoints; ++i)437{438p0 = points0_[i];439p1 = points1_[i];440x = M(0,0)*p0.x + M(0,1)*p0.y + M(0,2);441y = M(1,0)*p0.x + M(1,1)*p0.y + M(1,2);442if (sqr(x - p1.x) + sqr(y - p1.y) < params.thresh * params.thresh)443numinliers++;444}445if (numinliers >= ninliersMax)446{447bestM = M;448ninliersMax = numinliers;449bestIndices.swap(indices);450}451}452453if (ninliersMax < params.size)454{455// compute RMSE456for (int i = 0; i < params.size; ++i)457{458subset0[i] = points0_[bestIndices[i]];459subset1[i] = points1_[bestIndices[i]];460}461bestM = estimateGlobalMotionLeastSquares(subset0, subset1, model, rmse);462}463else464{465subset0.resize(ninliersMax);466subset1.resize(ninliersMax);467for (int i = 0, j = 0; i < npoints && j < ninliersMax ; ++i)468{469p0 = points0_[i];470p1 = points1_[i];471x = bestM(0,0)*p0.x + bestM(0,1)*p0.y + bestM(0,2);472y = bestM(1,0)*p0.x + bestM(1,1)*p0.y + bestM(1,2);473if (sqr(x - p1.x) + sqr(y - p1.y) < params.thresh * params.thresh)474{475subset0[j] = p0;476subset1[j] = p1;477j++;478}479}480bestM = estimateGlobalMotionLeastSquares(subset0, subset1, model, rmse);481}482483if (ninliers)484*ninliers = ninliersMax;485486return bestM;487}488489490MotionEstimatorRansacL2::MotionEstimatorRansacL2(MotionModel model)491: MotionEstimatorBase(model)492{493setRansacParams(RansacParams::default2dMotion(model));494setMinInlierRatio(0.1f);495}496497498Mat MotionEstimatorRansacL2::estimate(InputArray points0, InputArray points1, bool *ok)499{500CV_Assert(points0.type() == points1.type());501const int npoints = points0.getMat().checkVector(2);502CV_Assert(points1.getMat().checkVector(2) == npoints);503504// find motion505506int ninliers = 0;507Mat_<float> M;508509if (motionModel() != MM_HOMOGRAPHY)510M = estimateGlobalMotionRansac(511points0, points1, motionModel(), ransacParams_, 0, &ninliers);512else513{514std::vector<uchar> mask;515M = findHomography(points0, points1, mask, LMEDS);516for (int i = 0; i < npoints; ++i)517if (mask[i]) ninliers++;518}519520// check if we're confident enough in estimated motion521522if (ok) *ok = true;523if (static_cast<float>(ninliers) / npoints < minInlierRatio_)524{525M = Mat::eye(3, 3, CV_32F);526if (ok) *ok = false;527}528529return M;530}531532533MotionEstimatorL1::MotionEstimatorL1(MotionModel model)534: MotionEstimatorBase(model)535{536}537538539// TODO will estimation of all motions as one LP problem be faster?540Mat MotionEstimatorL1::estimate(InputArray points0, InputArray points1, bool *ok)541{542CV_Assert(points0.type() == points1.type());543const int npoints = points0.getMat().checkVector(2);544CV_Assert(points1.getMat().checkVector(2) == npoints);545546#ifndef HAVE_CLP547548CV_UNUSED(ok);549CV_Error(Error::StsError, "The library is built without Clp support");550551#else552553CV_Assert(motionModel() <= MM_AFFINE && motionModel() != MM_RIGID);554555if(npoints <= 0)556return Mat::eye(3, 3, CV_32F);557558// prepare LP problem559560const Point2f *points0_ = points0.getMat().ptr<Point2f>();561const Point2f *points1_ = points1.getMat().ptr<Point2f>();562563int ncols = 6 + 2*npoints;564int nrows = 4*npoints;565566if (motionModel() == MM_SIMILARITY)567nrows += 2;568else if (motionModel() == MM_TRANSLATION_AND_SCALE)569nrows += 3;570else if (motionModel() == MM_TRANSLATION)571nrows += 4;572573rows_.clear();574cols_.clear();575elems_.clear();576obj_.assign(ncols, 0);577collb_.assign(ncols, -INF);578colub_.assign(ncols, INF);579580int c = 6;581582for (int i = 0; i < npoints; ++i, c += 2)583{584obj_[c] = 1;585collb_[c] = 0;586587obj_[c+1] = 1;588collb_[c+1] = 0;589}590591elems_.clear();592rowlb_.assign(nrows, -INF);593rowub_.assign(nrows, INF);594595int r = 0;596Point2f p0, p1;597598for (int i = 0; i < npoints; ++i, r += 4)599{600p0 = points0_[i];601p1 = points1_[i];602603set(r, 0, p0.x); set(r, 1, p0.y); set(r, 2, 1); set(r, 6+2*i, -1);604rowub_[r] = p1.x;605606set(r+1, 3, p0.x); set(r+1, 4, p0.y); set(r+1, 5, 1); set(r+1, 6+2*i+1, -1);607rowub_[r+1] = p1.y;608609set(r+2, 0, p0.x); set(r+2, 1, p0.y); set(r+2, 2, 1); set(r+2, 6+2*i, 1);610rowlb_[r+2] = p1.x;611612set(r+3, 3, p0.x); set(r+3, 4, p0.y); set(r+3, 5, 1); set(r+3, 6+2*i+1, 1);613rowlb_[r+3] = p1.y;614}615616if (motionModel() == MM_SIMILARITY)617{618set(r, 0, 1); set(r, 4, -1); rowlb_[r] = rowub_[r] = 0;619set(r+1, 1, 1); set(r+1, 3, 1); rowlb_[r+1] = rowub_[r+1] = 0;620}621else if (motionModel() == MM_TRANSLATION_AND_SCALE)622{623set(r, 0, 1); set(r, 4, -1); rowlb_[r] = rowub_[r] = 0;624set(r+1, 1, 1); rowlb_[r+1] = rowub_[r+1] = 0;625set(r+2, 3, 1); rowlb_[r+2] = rowub_[r+2] = 0;626}627else if (motionModel() == MM_TRANSLATION)628{629set(r, 0, 1); rowlb_[r] = rowub_[r] = 1;630set(r+1, 1, 1); rowlb_[r+1] = rowub_[r+1] = 0;631set(r+2, 3, 1); rowlb_[r+2] = rowub_[r+2] = 0;632set(r+3, 4, 1); rowlb_[r+3] = rowub_[r+3] = 1;633}634635// solve636637CoinPackedMatrix A(true, &rows_[0], &cols_[0], &elems_[0], elems_.size());638A.setDimensions(nrows, ncols);639640ClpSimplex model(false);641model.loadProblem(A, &collb_[0], &colub_[0], &obj_[0], &rowlb_[0], &rowub_[0]);642643ClpDualRowSteepest dualSteep(1);644model.setDualRowPivotAlgorithm(dualSteep);645model.scaling(1);646647model.dual();648649// extract motion650651const double *sol = model.getColSolution();652653Mat_<float> M = Mat::eye(3, 3, CV_32F);654M(0,0) = sol[0];655M(0,1) = sol[1];656M(0,2) = sol[2];657M(1,0) = sol[3];658M(1,1) = sol[4];659M(1,2) = sol[5];660661if (ok) *ok = true;662return M;663#endif664}665666667FromFileMotionReader::FromFileMotionReader(const String &path)668: ImageMotionEstimatorBase(MM_UNKNOWN)669{670file_.open(path.c_str());671CV_Assert(file_.is_open());672}673674675Mat FromFileMotionReader::estimate(const Mat &/*frame0*/, const Mat &/*frame1*/, bool *ok)676{677Mat_<float> M(3, 3);678bool ok_;679file_ >> M(0,0) >> M(0,1) >> M(0,2)680>> M(1,0) >> M(1,1) >> M(1,2)681>> M(2,0) >> M(2,1) >> M(2,2) >> ok_;682if (ok) *ok = ok_;683return M;684}685686687ToFileMotionWriter::ToFileMotionWriter(const String &path, Ptr<ImageMotionEstimatorBase> estimator)688: ImageMotionEstimatorBase(estimator->motionModel()), motionEstimator_(estimator)689{690file_.open(path.c_str());691CV_Assert(file_.is_open());692}693694695Mat ToFileMotionWriter::estimate(const Mat &frame0, const Mat &frame1, bool *ok)696{697bool ok_;698Mat_<float> M = motionEstimator_->estimate(frame0, frame1, &ok_);699file_ << M(0,0) << " " << M(0,1) << " " << M(0,2) << " "700<< M(1,0) << " " << M(1,1) << " " << M(1,2) << " "701<< M(2,0) << " " << M(2,1) << " " << M(2,2) << " " << ok_ << std::endl;702if (ok) *ok = ok_;703return M;704}705706707KeypointBasedMotionEstimator::KeypointBasedMotionEstimator(Ptr<MotionEstimatorBase> estimator)708: ImageMotionEstimatorBase(estimator->motionModel()), motionEstimator_(estimator)709{710setDetector(GFTTDetector::create());711setOpticalFlowEstimator(makePtr<SparsePyrLkOptFlowEstimator>());712setOutlierRejector(makePtr<NullOutlierRejector>());713}714715716Mat KeypointBasedMotionEstimator::estimate(const Mat &frame0, const Mat &frame1, bool *ok)717{718InputArray image0 = frame0;719InputArray image1 = frame1;720721return estimate(image0, image1, ok);722}723724Mat KeypointBasedMotionEstimator::estimate(InputArray frame0, InputArray frame1, bool *ok)725{726// find keypoints727detector_->detect(frame0, keypointsPrev_);728if (keypointsPrev_.empty())729return Mat::eye(3, 3, CV_32F);730731// extract points from keypoints732pointsPrev_.resize(keypointsPrev_.size());733for (size_t i = 0; i < keypointsPrev_.size(); ++i)734pointsPrev_[i] = keypointsPrev_[i].pt;735736// find correspondences737optFlowEstimator_->run(frame0, frame1, pointsPrev_, points_, status_, noArray());738739// leave good correspondences only740741pointsPrevGood_.clear(); pointsPrevGood_.reserve(points_.size());742pointsGood_.clear(); pointsGood_.reserve(points_.size());743744for (size_t i = 0; i < points_.size(); ++i)745{746if (status_[i])747{748pointsPrevGood_.push_back(pointsPrev_[i]);749pointsGood_.push_back(points_[i]);750}751}752753// perform outlier rejection754755IOutlierRejector *outlRejector = outlierRejector_.get();756if (!dynamic_cast<NullOutlierRejector*>(outlRejector))757{758pointsPrev_.swap(pointsPrevGood_);759points_.swap(pointsGood_);760761outlierRejector_->process(frame0.size(), pointsPrev_, points_, status_);762763pointsPrevGood_.clear();764pointsPrevGood_.reserve(points_.size());765766pointsGood_.clear();767pointsGood_.reserve(points_.size());768769for (size_t i = 0; i < points_.size(); ++i)770{771if (status_[i])772{773pointsPrevGood_.push_back(pointsPrev_[i]);774pointsGood_.push_back(points_[i]);775}776}777}778779// estimate motion780return motionEstimator_->estimate(pointsPrevGood_, pointsGood_, ok);781}782783#if defined(HAVE_OPENCV_CUDAIMGPROC) && defined(HAVE_OPENCV_CUDAOPTFLOW)784785KeypointBasedMotionEstimatorGpu::KeypointBasedMotionEstimatorGpu(Ptr<MotionEstimatorBase> estimator)786: ImageMotionEstimatorBase(estimator->motionModel()), motionEstimator_(estimator)787{788detector_ = cuda::createGoodFeaturesToTrackDetector(CV_8UC1);789790CV_Assert(cuda::getCudaEnabledDeviceCount() > 0);791setOutlierRejector(makePtr<NullOutlierRejector>());792}793794795Mat KeypointBasedMotionEstimatorGpu::estimate(const Mat &frame0, const Mat &frame1, bool *ok)796{797frame0_.upload(frame0);798frame1_.upload(frame1);799return estimate(frame0_, frame1_, ok);800}801802803Mat KeypointBasedMotionEstimatorGpu::estimate(const cuda::GpuMat &frame0, const cuda::GpuMat &frame1, bool *ok)804{805// convert frame to gray if it's color806807cuda::GpuMat grayFrame0;808if (frame0.channels() == 1)809grayFrame0 = frame0;810else811{812cuda::cvtColor(frame0, grayFrame0_, COLOR_BGR2GRAY);813grayFrame0 = grayFrame0_;814}815816// find keypoints817detector_->detect(grayFrame0, pointsPrev_);818819// find correspondences820optFlowEstimator_.run(frame0, frame1, pointsPrev_, points_, status_);821822// leave good correspondences only823cuda::compactPoints(pointsPrev_, points_, status_);824825pointsPrev_.download(hostPointsPrev_);826points_.download(hostPoints_);827828// perform outlier rejection829830IOutlierRejector *rejector = outlierRejector_.get();831if (!dynamic_cast<NullOutlierRejector*>(rejector))832{833outlierRejector_->process(frame0.size(), hostPointsPrev_, hostPoints_, rejectionStatus_);834835hostPointsPrevTmp_.clear();836hostPointsPrevTmp_.reserve(hostPoints_.cols);837838hostPointsTmp_.clear();839hostPointsTmp_.reserve(hostPoints_.cols);840841for (int i = 0; i < hostPoints_.cols; ++i)842{843if (rejectionStatus_[i])844{845hostPointsPrevTmp_.push_back(hostPointsPrev_.at<Point2f>(0,i));846hostPointsTmp_.push_back(hostPoints_.at<Point2f>(0,i));847}848}849850hostPointsPrev_ = Mat(1, (int)hostPointsPrevTmp_.size(), CV_32FC2, &hostPointsPrevTmp_[0]);851hostPoints_ = Mat(1, (int)hostPointsTmp_.size(), CV_32FC2, &hostPointsTmp_[0]);852}853854// estimate motion855return motionEstimator_->estimate(hostPointsPrev_, hostPoints_, ok);856}857858#endif // defined(HAVE_OPENCV_CUDAIMGPROC) && defined(HAVE_OPENCV_CUDAOPTFLOW)859860861Mat getMotion(int from, int to, const std::vector<Mat> &motions)862{863CV_INSTRUMENT_REGION();864865Mat M = Mat::eye(3, 3, CV_32F);866if (to > from)867{868for (int i = from; i < to; ++i)869M = at(i, motions) * M;870}871else if (from > to)872{873for (int i = to; i < from; ++i)874M = at(i, motions) * M;875M = M.inv();876}877return M;878}879880} // namespace videostab881} // namespace cv882883884