GPLIB++
learntest.cpp
Go to the documentation of this file.
1 #include "NeuralNetwork.h"
2 #include <iostream>
3 #include <vector>
4 #include <fstream>
5 #include <string>
6 #include "UniformRNG.h"
7 
8 using namespace std;
9 using namespace gplib;
10 
11 int main()
12  {
13  // first some global constants that can be changed to play with
14  const int seglength = 2; // the length of the segment we want to work on
15  const double maxinit = 0.01; // the maximum the neuron weights are initialized to
16  const double maxbias = 0.01; // the maximum bias for each neuron
17  const int hiddenlayers = 1; // the number of hiddenlayers
18  const double mu = 1; // the stepsize for weight adaptation for each neuron;
19  const int trainsamples = 20;
20  const int testsamples = 20;
21  const int gap = trainsamples / 10;
22 
23  int currentindex = 0;
24  ttypeVector typeVector; // a vector that contains the type for each neuron in a layer
25  ttypeArray typeArray; // an array of typevectors that contains the neuron type for the whole network;
26 
27  UniformRNG Random;
28 
29  double currentinput, currentref; // the current input and reference values we're working on
30 
31  string logfilename("learn.log");
32  ofstream logfile(logfilename.c_str());
33 
34  typeVector.assign(4 * seglength, bipolar); // we want 4* seglength number of bipolar neurons per hidden layer
35  for (int i = 0; i < hiddenlayers; ++i) //intialize the type array for the hidden layers
36  {
37  typeArray.push_back(typeVector); // all layers are the same, so we copy the same vector there
38  }
39  typeVector.assign(1, identity);
40  typeArray.push_back(typeVector); // and then we add it to the type Array
41 
42  NeuralNetwork Network; //We declare a new variable for the network
43  Network.Input.assign(seglength, 0); // We have to allocate memory for the network input and set it to zero
44  Network.SetLayers(typeArray); // We use the type array we created before to setup the neurons in the Network
45  Network.InitWeights(maxinit, maxbias); // The Weights are initialized with the two maximum values
46  Network.mu = mu; // The mu value is copied to the network properties
47 
48 
49  for (int i = 0; i < trainsamples; ++i)
50  {
51  double input1 = (0.5 - Random.GetNumber());
52  double input2 = (0.5 - Random.GetNumber());
53  double desired = exp(input1 + input2);
54  Network.Input.at(0) = input1;
55  Network.Input.at(1) = input2;
56  Network.Desired.front() = desired;
57  Network.AdaptWeights();
58  Network.CalcOutput();
59  for (int j = 0; j < typeArray.size(); ++j)
60  {
61  cout << "Layer: " << j << endl;
62  for (int k = 0; k < typeArray.at(j).size(); ++k)
63  {
64  cout << "Neuron: " << k << endl;
65  cout << "Delta: " << Network.Layers.at(j).at(k)->Delta << endl;
66  cout << "Net: " << Network.Layers.at(j).at(k)->Net << endl;
67  cout << "Output: " << Network.Layers.at(j).at(k)->Output
68  << endl;
69  }
70  cout << endl;
71  }
72  logfile << i << " " << desired << " " << Network.Output.at(0) << " "
73  << desired - Network.Output.at(0) << endl;
74  }
75 
76  for (int i = trainsamples + gap; i < trainsamples + gap + testsamples; ++i)
77  {
78  double input1 = (0.5 - Random.GetNumber());
79  double input2 = (0.5 - Random.GetNumber());
80  double desired = input1 + input2;
81  Network.Input.at(0) = input1;
82  Network.Input.at(1) = input2;
83  Network.CalcOutput();
84  for (int j = 0; j < typeArray.size(); ++j)
85  {
86  cout << "Layer: " << j << endl;
87  for (int k = 0; k < typeArray.at(j).size(); ++k)
88  {
89  cout << "Neuron: " << k << endl;
90  cout << "Delta: " << Network.Layers.at(j).at(k)->Delta << endl;
91  cout << "Net: " << Network.Layers.at(j).at(k)->Net << endl;
92  cout << "Output: " << Network.Layers.at(j).at(k)->Output
93  << endl;
94  }
95  cout << endl;
96  }
97  //Network.AdaptWeights();
98  logfile << i << " " << desired << " " << Network.Output.at(0) << " "
99  << desired - Network.Output.at(0) << endl;
100  }
101 
102  }
void InitWeights(const double MaxWeight, const double MaxBias)
Initialize the weights with random values with the specified maxima.
void SetLayers(ttypeArray typeArray, bool cachedoutput=false)
Configure the layers of the network according to the types in typeArray.
Generate uniformly distributed random numbers, this is basically a wrapper for the boost random numbe...
Definition: UniformRNG.h:19
int main()
Definition: learntest.cpp:11
float GetNumber(const float low, const float high)
Return a random float between low and high.
Definition: UniformRNG.cpp:21