28 #ifndef SHARK_UNSUPERVISED_RBM_NEURONLAYERS_BIPOLARLAYER_H 29 #define SHARK_UNSUPERVISED_RBM_NEURONLAYERS_BIPOLARLAYER_H 59 const RealVector&
bias()
const{
72 m_bias.resize(newSize);
86 template<
class Input,
class BetaVector>
90 SIZE_CHECK(input.size1() == statistics.size1());
92 for(std::size_t i = 0; i != input.size1(); ++i){
93 noalias(row(statistics,i)) =
sigmoid(2*(row(input,i)+m_bias)*
beta(i));
108 template<
class Matrix,
class Rng>
109 void sample(StatisticsBatch
const& statistics, Matrix& state,
double alpha, Rng& rng)
const{
111 SIZE_CHECK(statistics.size1() == state.size1());
112 SIZE_CHECK(statistics.size2() == state.size2());
116 for(std::size_t s = 0; s != state.size1();++s){
117 for(std::size_t i = 0; i != state.size2();++i){
119 if(state(s,i)==0) state(s,i)=-1.;
124 for(
size_t s = 0; s != state.size1(); ++s){
125 for (
size_t i = 0; i != state.size2(); i++) {
126 double prob = statistics(s,i);
127 if (state(s,i) == -1) {
129 prob = (1. - alpha) * prob + alpha * prob / (1. - prob);
131 prob = (1. - alpha) * prob + alpha;
135 prob = (1. - alpha) * prob + alpha * (1. - (1. - prob) / prob);
137 prob = (1. - alpha) * prob;
141 if(state(s,i)==0) state(s,i)=-1.;
154 template<
class Matrix>
155 RealVector
logProbability(StatisticsBatch
const& statistics, Matrix
const& state)
const{
157 SIZE_CHECK(statistics.size1() == state.size1());
158 SIZE_CHECK(statistics.size2() == state.size2());
160 RealVector logProbabilities(state.size1(),1.0);
161 for(std::size_t s = 0; s != state.size1();++s){
162 for(std::size_t i = 0; i != state.size2();++i){
163 logProbabilities(s) += (state(s,i) > 0.0)? std::log(statistics(s,i)) : std::log(1-statistics(s,i));
166 return logProbabilities;
175 template<
class Matrix>
176 Matrix
const&
phi(Matrix
const& state)
const{
187 return 2*statistics - 1;
193 RealMatrix
mean(StatisticsBatch
const& statistics)
const{
196 return 2*statistics - 1;
205 template<
class Matrix,
class BetaVector>
209 RealVector energies = beta*prod(state,m_bias);
224 template<
class Input>
227 long double logFactorization = 0;
228 for(std::size_t i = 0; i != inputs.size(); ++i){
229 long double arg = std::abs((inputs(i)+m_bias(i))*beta);
230 logFactorization +=
softPlus(-2*arg)+arg;
232 return logFactorization;
242 template<
class Vector,
class SampleBatch>
245 sumRows(2*samples.statistics,derivative);
246 derivative -= samples.size();
256 template<
class Vector,
class SampleBatch,
class WeightVector>
259 noalias(derivative) += 2*prod(weights,samples.statistics) - sum(weights);
268 template<
class Vector,
class SampleBatch>
271 sumRows(samples.state,derivative);
280 template<
class Vector,
class SampleBatch,
class WeightVector>
281 void parameterDerivative(Vector& derivative, SampleBatch
const& samples, WeightVector
const& weights)
const{
283 noalias(derivative) += prod(weights,samples.state);
293 m_bias = newParameters;