KernelLogisticRegression.cpp
Go to the documentation of this file.
1 
2 #include <shark/LinAlg/Base.h>
3 #include <shark/Core/Random.h>
4 #include <shark/Data/Dataset.h>
11 #include <iostream>
12 
13 
14 using namespace shark;
15 
16 
17 // data generating distribution for our toy
18 // multi-category classification problem
19 /// @cond EXAMPLE_SYMBOLS
20 class Problem : public LabeledDataDistribution<RealVector, unsigned int>
21 {
22 public:
23  void draw(RealVector& input, unsigned int& label)const
24  {
25  label = random::discrete(random::globalRng, 0, 4);
26  input.resize(1);
27  input(0) = random::gauss(random::globalRng) + 3.0 * label;
28  }
29 };
30 /// @endcond
31 
32 int main()
33 {
34  std::cout << "kernel logistic regression example program" << std::endl;
35 
36  // experiment settings
37  unsigned int ell = 1000;
38  unsigned int tests = 1000;
39  double C = 10.0;
40  double gamma = 0.5;
41 
42  // generate a very simple dataset with a little noise
43  Problem problem;
44  ClassificationDataset training = problem.generateDataset(ell);
45  ClassificationDataset test = problem.generateDataset(tests);
46 
47  // kernel function
48  GaussianRbfKernel<> kernel(gamma);
49 
50  // classifier model
52 
53  // loss measuring classification errors
55 
56  // loss measuring training errors
57  CrossEntropy crossentropy;
58 
59  // machine training
60  KernelSGDTrainer<RealVector> trainer(&kernel, &crossentropy, C, false);
61  trainer.train(classifier, training);
62 
63  // evaluation
64  Data<unsigned int> output = classifier(training.inputs());
65  double train_error = loss.eval(training.labels(), output);
66  std::cout << "training error: " << train_error << std::endl;
67  output = classifier(test.inputs());
68  double test_error = loss.eval(test.labels(), output);
69  std::cout << " test error: " << test_error << std::endl;
70 }