Shark machine learning library
About Shark
News!
Contribute
Credits and copyright
Downloads
Getting Started
Installation
Using the docs
Documentation
Tutorials
Quick references
Class list
Global functions
FAQ
Showroom
include
shark
Algorithms
StoppingCriteria
TrainingError.h
Go to the documentation of this file.
1
/*!
2
*
3
*
4
* \brief Stopping Criterion which stops, when the trainign error seems to converge
5
*
6
*
7
*
8
* \author O. Krause
9
* \date 2010
10
*
11
*
12
* \par Copyright 1995-2017 Shark Development Team
13
*
14
* <BR><HR>
15
* This file is part of Shark.
16
* <http://shark-ml.org/>
17
*
18
* Shark is free software: you can redistribute it and/or modify
19
* it under the terms of the GNU Lesser General Public License as published
20
* by the Free Software Foundation, either version 3 of the License, or
21
* (at your option) any later version.
22
*
23
* Shark is distributed in the hope that it will be useful,
24
* but WITHOUT ANY WARRANTY; without even the implied warranty of
25
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
26
* GNU Lesser General Public License for more details.
27
*
28
* You should have received a copy of the GNU Lesser General Public License
29
* along with Shark. If not, see <http://www.gnu.org/licenses/>.
30
*
31
*/
32
33
#ifndef SHARK_TRAINERS_STOPPINGCRITERA_TRAININGERROR_H
34
#define SHARK_TRAINERS_STOPPINGCRITERA_TRAININGERROR_H
35
36
#include "
AbstractStoppingCriterion.h
"
37
#include <
shark/Core/ResultSets.h
>
38
#include <queue>
39
#include <numeric>
40
#include <
shark/LinAlg/Base.h
>
41
namespace
shark
{
42
43
/// \brief This stopping criterion tracks the improvement of the error function of the training error over an interval of iterations.
44
///
45
/// If at one point, the difference between the error values of the beginning and the end of the interval are smaller
46
/// than a certain value, this stopping criterion assumes convergence and stops.
47
/// Of course, this may be misleading, when the algorithm temporarily gets stuck at a saddle point of the error surface.
48
/// The functions assumes that the algorithm is minimizing. For details, see:
49
///
50
/// Lutz Prechelt. Early Stopping - but when? In Genevieve B. Orr and
51
/// Klaus-Robert Müller: Neural Networks: Tricks of the Trade, volume
52
/// 1524 of LNCS, Springer, 1997.
53
///
54
template
<
class
Po
int
Type = RealVector>
55
class
TrainingError
:
public
AbstractStoppingCriterion
< SingleObjectiveResultSet<PointType> >{
56
public
:
57
/// constructs the TrainingError generalization loss
58
/// @param intervalSize size of the interval over which the progress is monitored
59
/// @param minDifference minimum difference between start and end of the interval allowed before training stops
60
TrainingError
(
size_t
intervalSize,
double
minDifference){
61
m_minDifference
= minDifference;
62
m_intervalSize
= intervalSize;
63
reset
();
64
}
65
/// returns true if training should stop
66
bool
stop
(
const
SingleObjectiveResultSet<PointType>
&
set
){
67
68
m_interval
.pop();
69
m_interval
.push(
set
.value);
70
return
(
m_interval
.front()-
set
.value) >= 0
71
&& (
m_interval
.front()-
set
.value) <
m_minDifference
;
72
}
73
/// resets the internal state
74
void
reset
(){
75
m_interval
= std::queue<double>();
76
for
(
size_t
i = 0; i !=
m_intervalSize
;++i) {
77
m_interval
.push(std::numeric_limits<double>::max());
78
}
79
}
80
protected
:
81
/// monitored training interval
82
std::queue<double>
m_interval
;
83
/// minmum difference allowed
84
double
m_minDifference
;
85
/// size of the interval
86
size_t
m_intervalSize
;
87
};
88
}
89
90
91
#endif