Path: blob/master/modules/core/src/conjugate_gradient.cpp
16337 views
/*M///////////////////////////////////////////////////////////////////////////////////////1//2// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.3//4// By downloading, copying, installing or using the software you agree to this license.5// If you do not agree to this license, do not download, install,6// copy or use the software.7//8//9// License Agreement10// For Open Source Computer Vision Library11//12// Copyright (C) 2013, OpenCV Foundation, all rights reserved.13// Third party copyrights are property of their respective owners.14//15// Redistribution and use in source and binary forms, with or without modification,16// are permitted provided that the following conditions are met:17//18// * Redistribution's of source code must retain the above copyright notice,19// this list of conditions and the following disclaimer.20//21// * Redistribution's in binary form must reproduce the above copyright notice,22// this list of conditions and the following disclaimer in the documentation23// and/or other materials provided with the distribution.24//25// * The name of the copyright holders may not be used to endorse or promote products26// derived from this software without specific prior written permission.27//28// This software is provided by the copyright holders and contributors "as is" and29// any express or implied warranties, including, but not limited to, the implied30// warranties of merchantability and fitness for a particular purpose are disclaimed.31// In no event shall the OpenCV Foundation or contributors be liable for any direct,32// indirect, incidental, special, exemplary, or consequential damages33// (including, but not limited to, procurement of substitute goods or services;34// loss of use, data, or profits; or business interruption) however caused35// and on any theory of liability, whether in contract, strict liability,36// or tort (including negligence or otherwise) arising in any way out of37// the use of this software, even if advised of the possibility of such damage.38//39//M*/4041#include "precomp.hpp"4243#define dprintf(x)44#define print_matrix(x)4546namespace cv47{48double MinProblemSolver::Function::getGradientEps() const { return 1e-3; }49void MinProblemSolver::Function::getGradient(const double* x, double* grad)50{51double eps = getGradientEps();52int i, n = getDims();53AutoBuffer<double> x_buf(n);54double* x_ = x_buf.data();55for( i = 0; i < n; i++ )56x_[i] = x[i];57for( i = 0; i < n; i++ )58{59x_[i] = x[i] + eps;60double y1 = calc(x_);61x_[i] = x[i] - eps;62double y0 = calc(x_);63grad[i] = (y1 - y0)/(2*eps);64x_[i] = x[i];65}66}6768#define SEC_METHOD_ITERATIONS 469#define INITIAL_SEC_METHOD_SIGMA 0.170class ConjGradSolverImpl CV_FINAL : public ConjGradSolver71{72public:73Ptr<Function> getFunction() const CV_OVERRIDE;74void setFunction(const Ptr<Function>& f) CV_OVERRIDE;75TermCriteria getTermCriteria() const CV_OVERRIDE;76ConjGradSolverImpl();77void setTermCriteria(const TermCriteria& termcrit) CV_OVERRIDE;78double minimize(InputOutputArray x) CV_OVERRIDE;79protected:80Ptr<MinProblemSolver::Function> _Function;81TermCriteria _termcrit;82Mat_<double> d,r,buf_x,r_old;83Mat_<double> minimizeOnTheLine_buf1,minimizeOnTheLine_buf2;84private:85static void minimizeOnTheLine(Ptr<MinProblemSolver::Function> _f,Mat_<double>& x,const Mat_<double>& d,Mat_<double>& buf1,Mat_<double>& buf2);86};8788void ConjGradSolverImpl::minimizeOnTheLine(Ptr<MinProblemSolver::Function> _f,Mat_<double>& x,const Mat_<double>& d,Mat_<double>& buf1,89Mat_<double>& buf2){90double sigma=INITIAL_SEC_METHOD_SIGMA;91buf1=0.0;92buf2=0.0;9394dprintf(("before minimizeOnTheLine\n"));95dprintf(("x:\n"));96print_matrix(x);97dprintf(("d:\n"));98print_matrix(d);99100for(int i=0;i<SEC_METHOD_ITERATIONS;i++){101_f->getGradient((double*)x.data,(double*)buf1.data);102dprintf(("buf1:\n"));103print_matrix(buf1);104x=x+sigma*d;105_f->getGradient((double*)x.data,(double*)buf2.data);106dprintf(("buf2:\n"));107print_matrix(buf2);108double d1=buf1.dot(d), d2=buf2.dot(d);109if((d1-d2)==0){110break;111}112double alpha=-sigma*d1/(d2-d1);113dprintf(("(buf2.dot(d)-buf1.dot(d))=%f\nalpha=%f\n",(buf2.dot(d)-buf1.dot(d)),alpha));114x=x+(alpha-sigma)*d;115sigma=-alpha;116}117118dprintf(("after minimizeOnTheLine\n"));119print_matrix(x);120}121122double ConjGradSolverImpl::minimize(InputOutputArray x){123CV_Assert(_Function.empty()==false);124dprintf(("termcrit:\n\ttype: %d\n\tmaxCount: %d\n\tEPS: %g\n",_termcrit.type,_termcrit.maxCount,_termcrit.epsilon));125126Mat x_mat=x.getMat();127CV_Assert(MIN(x_mat.rows,x_mat.cols)==1);128int ndim=MAX(x_mat.rows,x_mat.cols);129CV_Assert(x_mat.type()==CV_64FC1);130131if(d.cols!=ndim){132d.create(1,ndim);133r.create(1,ndim);134r_old.create(1,ndim);135minimizeOnTheLine_buf1.create(1,ndim);136minimizeOnTheLine_buf2.create(1,ndim);137}138139Mat_<double> proxy_x;140if(x_mat.rows>1){141buf_x.create(1,ndim);142Mat_<double> proxy(ndim,1,buf_x.ptr<double>());143x_mat.copyTo(proxy);144proxy_x=buf_x;145}else{146proxy_x=x_mat;147}148_Function->getGradient(proxy_x.ptr<double>(),d.ptr<double>());149d*=-1.0;150d.copyTo(r);151152//here everything goes. check that everything is set properly153dprintf(("proxy_x\n"));print_matrix(proxy_x);154dprintf(("d first time\n"));print_matrix(d);155dprintf(("r\n"));print_matrix(r);156157for(int count=0;count<_termcrit.maxCount;count++){158minimizeOnTheLine(_Function,proxy_x,d,minimizeOnTheLine_buf1,minimizeOnTheLine_buf2);159r.copyTo(r_old);160_Function->getGradient(proxy_x.ptr<double>(),r.ptr<double>());161r*=-1.0;162double r_norm_sq=norm(r);163if(_termcrit.type==(TermCriteria::MAX_ITER+TermCriteria::EPS) && r_norm_sq<_termcrit.epsilon){164break;165}166r_norm_sq=r_norm_sq*r_norm_sq;167double beta=MAX(0.0,(r_norm_sq-r.dot(r_old))/r_norm_sq);168d=r+beta*d;169}170171172173if(x_mat.rows>1){174Mat(ndim, 1, CV_64F, proxy_x.ptr<double>()).copyTo(x);175}176return _Function->calc(proxy_x.ptr<double>());177}178179ConjGradSolverImpl::ConjGradSolverImpl(){180_Function=Ptr<Function>();181}182Ptr<MinProblemSolver::Function> ConjGradSolverImpl::getFunction()const{183return _Function;184}185void ConjGradSolverImpl::setFunction(const Ptr<Function>& f){186_Function=f;187}188TermCriteria ConjGradSolverImpl::getTermCriteria()const{189return _termcrit;190}191void ConjGradSolverImpl::setTermCriteria(const TermCriteria& termcrit){192CV_Assert((termcrit.type==(TermCriteria::MAX_ITER+TermCriteria::EPS) && termcrit.epsilon>0 && termcrit.maxCount>0) ||193((termcrit.type==TermCriteria::MAX_ITER) && termcrit.maxCount>0));194_termcrit=termcrit;195}196// both minRange & minError are specified by termcrit.epsilon; In addition, user may specify the number of iterations that the algorithm does.197Ptr<ConjGradSolver> ConjGradSolver::create(const Ptr<MinProblemSolver::Function>& f, TermCriteria termcrit){198Ptr<ConjGradSolver> CG = makePtr<ConjGradSolverImpl>();199CG->setFunction(f);200CG->setTermCriteria(termcrit);201return CG;202}203}204205206