OpenMD 3.1
Molecular Dynamics in the Open
Loading...
Searching...
No Matches
BFGS.cpp
1/* -*- mode: c++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
2
3/*
4Copyright (C) 2009 Frédéric Degraeve
5
6This file is part of QuantLib, a free-software/open-source library
7for financial quantitative analysts and developers - http://quantlib.org/
8
9QuantLib is free software: you can redistribute it and/or modify it
10under the terms of the QuantLib license. You should have received a
11copy of the license along with this program; if not, please email
12<quantlib-dev@lists.sf.net>. The license is also available online at
13<http://quantlib.org/license.shtml>.
14
15This program is distributed in the hope that it will be useful, but WITHOUT
16ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
17FOR A PARTICULAR PURPOSE. See the license for more details.
18*/
19
20#include "optimization/BFGS.hpp"
21
24
25namespace QuantLib {
26
27 DynamicVector<RealType> BFGS::getUpdatedDirection(
28 const Problem& P, RealType, const DynamicVector<RealType>& oldGradient) {
29 if (inverseHessian_.getNRow() == 0) {
30 // first time in this update, we create needed structures
31 inverseHessian_ = DynamicRectMatrix<RealType>(
32 P.currentValue().size(), P.currentValue().size(), 0.);
33 for (size_t i = 0; i < P.currentValue().size(); ++i)
34 inverseHessian_(i, i) = 1.;
35 }
36
37 DynamicVector<RealType> diffGradient;
38 DynamicVector<RealType> diffGradientWithHessianApplied(
39 P.currentValue().size(), 0.);
40
41 diffGradient = lineSearch_->lastGradient() - oldGradient;
42 for (size_t i = 0; i < P.currentValue().size(); ++i)
43 for (size_t j = 0; j < P.currentValue().size(); ++j)
44 diffGradientWithHessianApplied[i] +=
45 inverseHessian_(i, j) * diffGradient[j];
46
47 double fac, fae, fad;
48 double sumdg, sumxi;
49
50 fac = fae = sumdg = sumxi = 0.;
51 for (size_t i = 0; i < P.currentValue().size(); ++i) {
52 fac += diffGradient[i] * lineSearch_->searchDirection()[i];
53 fae += diffGradient[i] * diffGradientWithHessianApplied[i];
54 sumdg += std::pow(diffGradient[i], 2.);
55 sumxi += std::pow(lineSearch_->searchDirection()[i], 2.);
56 }
57
58 if (fac > std::sqrt(1e-8 * sumdg *
59 sumxi)) // skip update if fac not sufficiently positive
60 {
61 fac = 1.0 / fac;
62 fad = 1.0 / fae;
63
64 for (size_t i = 0; i < P.currentValue().size(); ++i)
65 diffGradient[i] = fac * lineSearch_->searchDirection()[i] -
66 fad * diffGradientWithHessianApplied[i];
67
68 for (size_t i = 0; i < P.currentValue().size(); ++i)
69 for (size_t j = 0; j < P.currentValue().size(); ++j) {
70 inverseHessian_(i, j) += fac * lineSearch_->searchDirection()[i] *
72 inverseHessian_(i, j) -= fad * diffGradientWithHessianApplied[i] *
73 diffGradientWithHessianApplied[j];
74 inverseHessian_(i, j) += fae * diffGradient[i] * diffGradient[j];
75 }
76 }
77 // else
78 // throw "BFGS: FAC not sufficiently positive";
79
80 DynamicVector<RealType> direction(P.currentValue().size());
81 for (size_t i = 0; i < P.currentValue().size(); ++i) {
82 direction[i] = 0.0;
83 for (size_t j = 0; j < P.currentValue().size(); ++j)
84 direction[i] -= inverseHessian_(i, j) * lineSearch_->lastGradient()[j];
85 }
86
87 return direction;
88 }
89
90} // namespace QuantLib
Broyden-Fletcher-Goldfarb-Shanno optimization method.
Line search abstract class.
Abstract optimization problem class.
rectangular matrix class
Dynamically-sized vector class.
const DynamicVector< RealType > & searchDirection() const
current value of the search direction
const DynamicVector< RealType > & lastGradient()
return last gradient