29 if (inverseHessian_.getNRow() == 0) {
32 P.currentValue().size(), P.currentValue().size(), 0.);
33 for (
size_t i = 0; i < P.currentValue().size(); ++i)
34 inverseHessian_(i, i) = 1.;
39 P.currentValue().size(), 0.);
42 for (
size_t i = 0; i < P.currentValue().size(); ++i)
43 for (
size_t j = 0; j < P.currentValue().size(); ++j)
44 diffGradientWithHessianApplied[i] +=
45 inverseHessian_(i, j) * diffGradient[j];
50 fac = fae = sumdg = sumxi = 0.;
51 for (
size_t i = 0; i < P.currentValue().size(); ++i) {
53 fae += diffGradient[i] * diffGradientWithHessianApplied[i];
54 sumdg += std::pow(diffGradient[i], 2.);
58 if (fac > std::sqrt(1e-8 * sumdg *
64 for (
size_t i = 0; i < P.currentValue().size(); ++i)
66 fad * diffGradientWithHessianApplied[i];
68 for (
size_t i = 0; i < P.currentValue().size(); ++i)
69 for (
size_t j = 0; j < P.currentValue().size(); ++j) {
72 inverseHessian_(i, j) -= fad * diffGradientWithHessianApplied[i] *
73 diffGradientWithHessianApplied[j];
74 inverseHessian_(i, j) += fae * diffGradient[i] * diffGradient[j];
81 for (
size_t i = 0; i < P.currentValue().size(); ++i) {
83 for (
size_t j = 0; j < P.currentValue().size(); ++j)
Broyden-Fletcher-Goldfarb-Shanno optimization method.
Line search abstract class.
Abstract optimization problem class.
Dynamically-sized vector class.
LineSearch * lineSearch_
line search
const DynamicVector< RealType > & searchDirection() const
current value of the search direction
const DynamicVector< RealType > & lastGradient()
return last gradient