Path: blob/devel/ElmerGUI/netgen/libsrc/opti/linsearch.cpp
3206 views
/***************************************************************************/1/* */2/* Problem: Liniensuche */3/* */4/* Programmautor: Joachim Sch�berl */5/* Matrikelnummer: 9155284 */6/* */7/* Algorithmus nach: */8/* */9/* Optimierung I, Gfrerer, WS94/95 */10/* Algorithmus 2.1: Liniensuche Problem (ii) */11/* */12/***************************************************************************/13141516#include <mystdlib.h>1718#include <myadt.hpp> // min, max, sqr1920#include <linalg.hpp>21#include "opti.hpp"222324namespace netgen25{26const double eps0 = 1E-15;2728// Liniensuche293031double MinFunction :: Func (const Vector & /* x */) const32{33cerr << "Func of MinFunction called" << endl;34return 0;35}3637void MinFunction :: Grad (const Vector & /* x */, Vector & /* g */) const38{39cerr << "Grad of MinFunction called" << endl;40}4142double MinFunction :: FuncGrad (const Vector & x, Vector & g) const43{44cerr << "Grad of MinFunction called" << endl;45return 0;46/*47int n = x.Size();4849static Vector xr;50static Vector xl;51xr.SetSize(n);52xl.SetSize(n);5354double eps = 1e-6;55double fl, fr;5657for (int i = 1; i <= n; i++)58{59xr.Set (1, x);60xl.Set (1, x);6162xr.Elem(i) += eps;63fr = Func (xr);6465xl.Elem(i) -= eps;66fl = Func (xl);6768g.Elem(i) = (fr - fl) / (2 * eps);69}7071double f = Func(x);72// (*testout) << "f = " << f << " grad = " << g << endl;73return f;74*/75}767778double MinFunction :: FuncDeriv (const Vector & x, const Vector & dir, double & deriv) const79{80Vector g(x.Size());81double f = FuncGrad (x, g);82deriv = (g * dir);8384// (*testout) << "g = " << g << ", dir = " << dir << ", deriv = " << deriv << endl;85return f;86}8788void MinFunction :: ApproximateHesse (const Vector & x,89DenseMatrix & hesse) const90{91int n = x.Size();92int i, j;9394static Vector hx;95hx.SetSize(n);9697double eps = 1e-6;98double f, f11, f12, f21, f22;99100for (i = 1; i <= n; i++)101{102for (j = 1; j < i; j++)103{104hx = x;105hx.Elem(i) = x.Get(i) + eps;106hx.Elem(j) = x.Get(j) + eps;107f11 = Func(hx);108hx.Elem(i) = x.Get(i) + eps;109hx.Elem(j) = x.Get(j) - eps;110f12 = Func(hx);111hx.Elem(i) = x.Get(i) - eps;112hx.Elem(j) = x.Get(j) + eps;113f21 = Func(hx);114hx.Elem(i) = x.Get(i) - eps;115hx.Elem(j) = x.Get(j) - eps;116f22 = Func(hx);117118hesse.Elem(i, j) = hesse.Elem(j, i) =119(f11 + f22 - f12 - f21) / (2 * eps * eps);120}121122hx = x;123f = Func(x);124hx.Elem(i) = x.Get(i) + eps;125f11 = Func(hx);126hx.Elem(i) = x.Get(i) - eps;127f22 = Func(hx);128129hesse.Elem(i, i) = (f11 + f22 - 2 * f) / (eps * eps);130}131// (*testout) << "hesse = " << hesse << endl;132}133134135136137138139140/// Line search, modified Mangasarien conditions141void lines (Vector & x, // i: initial point of line-search142Vector & xneu, // o: solution, if successful143Vector & p, // i: search direction144double & f, // i: function-value at x145// o: function-value at xneu, iff ifail = 0146Vector & g, // i: gradient at x147// o: gradient at xneu, iff ifail = 0148const MinFunction & fun, // function to minimize149const OptiParameters & par,150double & alphahat, // i: initial value for alpha_hat151// o: solution alpha iff ifail = 0152double fmin, // i: lower bound for f153double mu1, // i: Parameter mu_1 of Alg.2.1154double sigma, // i: Parameter sigma of Alg.2.1155double xi1, // i: Parameter xi_1 of Alg.2.1156double xi2, // i: Parameter xi_1 of Alg.2.1157double tau, // i: Parameter tau of Alg.2.1158double tau1, // i: Parameter tau_1 of Alg.2.1159double tau2, // i: Parameter tau_2 of Alg.2.1160int & ifail) // o: 0 on success161// -1 bei termination because lower limit fmin162// 1 bei illegal termination due to different reasons163164{165double phi0, phi0prime, phi1, phi1prime, phihatprime;166double alpha1, alpha2, alphaincr, c;167char flag = 1;168long it;169170alpha1 = 0;171alpha2 = 1e50;172phi0 = phi1 = f;173174phi0prime = g * p;175176if (phi0prime > 0)177{178ifail = 1;179return;180}181182ifail = 1; // Markus183184phi1prime = phi0prime;185186// (*testout) << "phi0prime = " << phi0prime << endl;187188// it = 100000l;189it = 0;190191while (it++ <= par.maxit_linsearch)192{193194xneu.Set2 (1, x, alphahat, p);195196197// f = fun.FuncGrad (xneu, g);198// f = fun.Func (xneu);199f = fun.FuncDeriv (xneu, p, phihatprime);200201// (*testout) << "lines, f = " << f << " phip = " << phihatprime << endl;202203if (f < fmin)204{205ifail = -1;206break;207}208209210if (alpha2 - alpha1 < eps0 * alpha2)211{212ifail = 0;213break;214}215216// (*testout) << "i = " << it << " al = " << alphahat << " f = " << f << " fprime " << phihatprime << endl;;217218if (f - phi0 > mu1 * alphahat * phi1prime + eps0 * fabs (phi0))219220{221222flag = 0;223alpha2 = alphahat;224225c =226(f - phi1 - phi1prime * (alphahat-alpha1)) /227sqr (alphahat-alpha1);228229alphahat = alpha1 - 0.5 * phi1prime / c;230231if (alphahat > alpha2)232alphahat = alpha1 + 1/(4*c) *233( (sigma+mu1) * phi0prime - 2*phi1prime234+ sqrt (sqr(phi1prime - mu1 * phi0prime) -2354 * (phi1 - phi0 - mu1 * alpha1 * phi0prime) * c));236237alphahat = max2 (alphahat, alpha1 + tau * (alpha2 - alpha1));238alphahat = min2 (alphahat, alpha2 - tau * (alpha2 - alpha1));239240// (*testout) << " if-branch" << endl;241242}243244else245246{247/*248f = fun.FuncGrad (xneu, g);249phihatprime = g * p;250*/251f = fun.FuncDeriv (xneu, p, phihatprime);252253if (phihatprime < sigma * phi0prime * (1 + eps0))254255{256if (phi1prime < phihatprime)257// Approximationsfunktion ist konvex258259alphaincr = (alphahat - alpha1) * phihatprime /260(phi1prime - phihatprime);261262else263alphaincr = 1e99; // MAXDOUBLE;264265if (flag)266{267alphaincr = max2 (alphaincr, xi1 * (alphahat-alpha1));268alphaincr = min2 (alphaincr, xi2 * (alphahat-alpha1));269}270else271{272alphaincr = max2 (alphaincr, tau1 * (alpha2 - alphahat));273alphaincr = min2 (alphaincr, tau2 * (alpha2 - alphahat));274}275276alpha1 = alphahat;277alphahat += alphaincr;278phi1 = f;279phi1prime = phihatprime;280}281282else283284{285ifail = 0; // Erfolg !!286break;287}288289// (*testout) << " else, " << endl;290291}292293}294295// (*testout) << "linsearch: it = " << it << " ifail = " << ifail << endl;296297fun.FuncGrad (xneu, g);298299300if (it < 0)301ifail = 1;302303// (*testout) << "fail = " << ifail << endl;304}305306307308309310311312313314315316317318319320321322323324void SteepestDescent (Vector & x, const MinFunction & fun,325const OptiParameters & par)326{327int it, n = x.Size();328Vector xnew(n), p(n), g(n), g2(n);329double val, alphahat;330int fail;331332val = fun.FuncGrad(x, g);333334alphahat = 1;335// testout << "f = ";336for (it = 0; it < 10; it++)337{338// testout << val << " ";339340// p = -g;341p.Set (-1, g);342343lines (x, xnew, p, val, g, fun, par, alphahat, -1e5,3440.1, 0.1, 1, 10, 0.1, 0.1, 0.6, fail);345346x = xnew;347}348// testout << endl;349}350}351352353