bpp-core3  3.0.0
ConjugateGradientMultiDimensions.cpp
Go to the documentation of this file.
1 // SPDX-FileCopyrightText: The Bio++ Development Group
2 //
3 // SPDX-License-Identifier: CECILL-2.1
4 
5 #include "../VectorTools.h"
8 
9 using namespace bpp;
10 
11 /******************************************************************************/
12 
13 ConjugateGradientMultiDimensions::ConjugateGradientMultiDimensions(std::shared_ptr<FirstOrderDerivable> function) :
14  AbstractOptimizer(function), optimizer_(function),
15  xi_(), h_(), g_(), f1dim_(new DirectionFunction(function))
16 {
17  setDefaultStopCondition_(make_shared<FunctionStopCondition>(this));
19 }
20 
21 /******************************************************************************/
22 
24 {
25  size_t nbParams = params.size();
26  g_.resize(nbParams);
27  h_.resize(nbParams);
28  xi_.resize(nbParams);
30  function().setParameters(params);
32 
33  for (size_t i = 0; i < nbParams; ++i)
34  {
35  g_[i] = -xi_[i];
36  xi_[i] = h_[i] = g_[i];
37  }
38 }
39 
40 /******************************************************************************/
41 
43 {
44  double gg, gam, f, dgg;
45  size_t n = getParameters().size();
46  // Loop over iterations.
49  getParameters_(), xi_, getStopCondition()->getTolerance(),
50  0, 0, getVerbose() > 0 ? getVerbose() - 1 : 0);
51 
53  f = getFunction()->f(getParameters());
54 
55  if (tolIsReached_)
56  {
57  return f;
58  }
60 
61  dgg = gg = 0.0;
62  for (unsigned j = 0; j < n; j++)
63  {
64  gg += g_[j] * g_[j];
65  /* dgg += xi[j] * xi[j]; */ // This statement for Fletcher-Reeves.
66  dgg += (xi_[j] + g_[j]) * xi_[j]; // This statement for Polak-Ribiere.
67  }
68 
69  if (gg == 0.0)
70  {
71  // Unlikely. If gradient is exactly zero then
72  return f;
73  }
74  gam = dgg / gg;
75 
76  if (!(std::isnan(gam) || std::isinf(gam)))
77  {
78  for (unsigned int j = 0; j < n; j++)
79  {
80  g_[j] = -xi_[j];
81  xi_[j] = h_[j] = g_[j] + gam * h_[j];
82  }
83  }
84 
85  return f;
86 }
87 
88 /******************************************************************************/
89 
90 void ConjugateGradientMultiDimensions::getGradient(std::vector<double>& gradient) const
91 {
92  for (size_t i = 0; i < gradient.size(); ++i)
93  {
95  }
96 }
97 
98 /******************************************************************************/
virtual void enableFirstOrderDerivatives(bool yn)=0
Tell if derivatives must be computed.
std::shared_ptr< OptimizationStopCondition > getStopCondition() override
Get the stop condition of the optimization algorithm.
size_t size() const
Definition: ParameterList.h:56
void setDefaultStopCondition_(std::shared_ptr< OptimizationStopCondition > osc)
ParameterList & getParameters_()
const ParameterList & getParameters() const override
double doStep()
This function is called by the step() method and contains all calculations.
The parameter list object.
Definition: ParameterList.h:27
bool tolIsReached_
Tell if the tolerance level has been reached.
virtual double getFirstOrderDerivative(const std::string &variable) const =0
Get the derivative of the function at the current point.
unsigned int getVerbose() const override
Get the verbose level.
ConjugateGradientMultiDimensions(std::shared_ptr< FirstOrderDerivable > function)
void doInit(const ParameterList &params)
This function is called by the init() method and contains all calculations.
unsigned int nbEval_
The current number of function evaluations achieved.
Partial implementation of the Optimizer interface.
static unsigned int lineMinimization(std::shared_ptr< DirectionFunction > f1dim, ParameterList &parameters, std::vector< double > &xi, double tolerance, std::shared_ptr< OutputStream > profiler=nullptr, std::shared_ptr< OutputStream > messenger=nullptr, unsigned int verbose=2)
const FirstOrderDerivable & firstOrderDerivableFunction() const
std::shared_ptr< DirectionFunction > f1dim_
std::shared_ptr< const FunctionInterface > getFunction() const override
Get the current function being optimized.
std::shared_ptr< OptimizationStopCondition > getDefaultStopCondition() override
Get the default stop condition of the optimization algorithm.
void getGradient(std::vector< double > &gradient) const
void setStopCondition(std::shared_ptr< OptimizationStopCondition > stopCondition) override
Set the stop condition of the optimization algorithm.