Shark machine learning library
About Shark
News!
Contribute
Credits and copyright
Downloads
Getting Started
Installation
Using the docs
Documentation
Tutorials
Quick references
Class list
Global functions
FAQ
Showroom
include
shark
Algorithms
GradientDescent
CG.h
Go to the documentation of this file.
1
//===========================================================================
2
/*!
3
*
4
*
5
* \brief CG
6
*
7
* Conjugate-gradient method for unconstraint optimization.
8
*
9
*
10
*
11
* \author O. Krause
12
* \date 2010
13
*
14
*
15
* \par Copyright 1995-2017 Shark Development Team
16
*
17
* <BR><HR>
18
* This file is part of Shark.
19
* <http://shark-ml.org/>
20
*
21
* Shark is free software: you can redistribute it and/or modify
22
* it under the terms of the GNU Lesser General Public License as published
23
* by the Free Software Foundation, either version 3 of the License, or
24
* (at your option) any later version.
25
*
26
* Shark is distributed in the hope that it will be useful,
27
* but WITHOUT ANY WARRANTY; without even the implied warranty of
28
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
29
* GNU Lesser General Public License for more details.
30
*
31
* You should have received a copy of the GNU Lesser General Public License
32
* along with Shark. If not, see <http://www.gnu.org/licenses/>.
33
*
34
*/
35
//===========================================================================
36
37
#ifndef SHARK_ML_OPTIMIZER_CG_H
38
#define SHARK_ML_OPTIMIZER_CG_H
39
40
#include <
shark/Core/DLLSupport.h
>
41
#include <
shark/Algorithms/GradientDescent/AbstractLineSearchOptimizer.h
>
42
43
namespace
shark
{
44
/// \brief Conjugate-gradient method for unconstrained optimization
45
///
46
/// The next CG search Direction p_{k+1} is computed using the current gradient g_k by
47
/// \f$ p_{k+1} = \beta p_k - g_k \f$
48
/// where beta can be computed using different formulas
49
/// well known is the Fletcher - Reeves method:
50
/// \f$ \beta = ||g_k||2/ ||g_{k-1}||^2 \f$
51
/// we use
52
/// \f$ \beta = ||g_k||^2 /<p_k,g_k-g_{k-1}> \f$
53
/// which is formula 5.49 in Nocedal, Wright - Numerical Optimization.
54
/// This formula has better numerical properties than Fletcher-Reeves for non-quadratic functions
55
/// while ensuring a descent direction.
56
///
57
/// We implement restarting to ensure quadratic convergence near the optimum as well as numerical stability
58
class
CG
:
public
AbstractLineSearchOptimizer
{
59
protected
:
60
SHARK_EXPORT_SYMBOL
void
initModel
();
61
SHARK_EXPORT_SYMBOL
void
computeSearchDirection
(
ObjectiveFunctionType
const
& objectiveFunction);
62
public
:
63
std::string
name
()
const
64
{
return
"CG"
; }
65
66
//from ISerializable
67
SHARK_EXPORT_SYMBOL
void
read
(
InArchive
& archive );
68
SHARK_EXPORT_SYMBOL
void
write
(
OutArchive
& archive )
const
;
69
protected
:
70
unsigned
m_count
;
71
};
72
73
}
74
75
#endif