NoisyErrorFunction.h
Go to the documentation of this file.
1 /*!
2  *
3  *
4  * \brief implements an error fucntion which only uses a random portion of the data for training
5  *
6  *
7  *
8  * \author T.Voss, T. Glasmachers, O.Krause
9  * \date 2010-2011
10  *
11  *
12  * \par Copyright 1995-2017 Shark Development Team
13  *
14  * <BR><HR>
15  * This file is part of Shark.
16  * <http://shark-ml.org/>
17  *
18  * Shark is free software: you can redistribute it and/or modify
19  * it under the terms of the GNU Lesser General Public License as published
20  * by the Free Software Foundation, either version 3 of the License, or
21  * (at your option) any later version.
22  *
23  * Shark is distributed in the hope that it will be useful,
24  * but WITHOUT ANY WARRANTY; without even the implied warranty of
25  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
26  * GNU Lesser General Public License for more details.
27  *
28  * You should have received a copy of the GNU Lesser General Public License
29  * along with Shark. If not, see <http://www.gnu.org/licenses/>.
30  *
31  */
32 #ifndef SHARK_OBJECTIVEFUNCTIONS_NOISYERRORFUNCTION_H
33 #define SHARK_OBJECTIVEFUNCTIONS_NOISYERRORFUNCTION_H
34 
38 #include <shark/Core/Random.h>
39 #include "Impl/FunctionWrapperBase.h"
40 
41 #include <boost/scoped_ptr.hpp>
42 
43 namespace shark{
44 
45 namespace detail{
46 ///\brief Baseclass for the Typewrapper of the Noisy Error Function.
47 class NoisyErrorFunctionWrapperBase:public FunctionWrapperBase{
48 protected:
49  std::size_t m_batchSize;
50 public:
51  void setBatchSize(std::size_t batchSize){
52  m_batchSize = batchSize;
53  }
54  std::size_t batchSize() const{
55  return m_batchSize;
56  }
57 };
58 }
59 
60 ///\brief Error Function which only uses a random fraction of data.
61 ///
62 ///Conceptionally, this is the same as the normal ErrorFunction, with the only difference,
63 ///that only a fraction of the training examples is chosen randomly out of the set and
64 ///thus noise is introduced. This can be used to perform stochastic gradient
65 ///descent or to introduce some noise to a problem.
66 ///
67 /// Setting the batch size to 0 is equivalent to performing minibatch learning
68 /// where one random batch is picked from the dataset instead of sampling
69 /// points from it
71 {
72 public:
73  template<class InputType, class LabelType, class OutputType>
75  LabeledData<InputType,LabelType> const& dataset,
78  std::size_t batchSize=1
79  );
81  NoisyErrorFunction& operator = (NoisyErrorFunction const& op1);
82 
83  /// \brief From INameable: return the class name.
84  std::string name() const
85  { return "NoisyErrorFunction"; }
86 
87  void setBatchSize(std::size_t batchSize);
88  std::size_t batchSize() const;
89 
91  return mp_wrapper -> proposeStartingPoint();
92  }
93  std::size_t numberOfVariables()const{
94  return mp_wrapper -> numberOfVariables();
95  }
96 
97  void init(){
98  mp_wrapper->setRng(this->mep_rng);
99  mp_wrapper-> init();
100  }
101 
102  void setRegularizer(double factor, SingleObjectiveFunction* regularizer){
103  m_regularizer = regularizer;
104  m_regularizationStrength = factor;
105  }
106 
107  double eval(RealVector const& input)const;
108  ResultType evalDerivative( SearchPointType const& input, FirstOrderDerivative & derivative )const;
109 
110  friend void swap(NoisyErrorFunction& op1, NoisyErrorFunction& op2);
111 private:
112  boost::scoped_ptr<detail::NoisyErrorFunctionWrapperBase> mp_wrapper;
113 
114  SingleObjectiveFunction* m_regularizer;
115  double m_regularizationStrength;
116 
117 };
118 }
119 #endif
120 #include "Impl/NoisyErrorFunction.inl"