gtkIOStream  1.7.0
GTK+ << C++ IOStream operators for GTK+. Now with ORBing, numerical computation, audio client and more ...
NeuralNetwork.H
Go to the documentation of this file.
1 /* Copyright 2000-2018 Matt Flax <flatmax@flatmax.org>
2  This file is part of GTK+ IOStream class set
3 
4  GTK+ IOStream is free software; you can redistribute it and/or modify
5  it under the terms of the GNU General Public License as published by
6  the Free Software Foundation; either version 2 of the License, or
7  (at your option) any later version.
8 
9  GTK+ IOStream is distributed in the hope that it will be useful,
10  but WITHOUT ANY WARRANTY; without even the implied warranty of
11  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12  GNU General Public License for more details.
13 
14  You have received a copy of the GNU General Public License
15  along with GTK+ IOStream
16  */
17 #ifndef NEURALNETWORK_H_
18 #define NEURALNETWORK_H_
19 
20 #include <Eigen/Dense>
21 #include <vector>
22 using namespace std;
23 
27 template<typename TYPE>
28 class NeuralLayer {
29 protected:
30  Eigen::Matrix<TYPE, Eigen::Dynamic, Eigen::Dynamic> weights;
31  Eigen::Matrix<TYPE, Eigen::Dynamic, 1> bias;
32 public:
33 
34  Eigen::Matrix<TYPE, Eigen::Dynamic, 1> output;
35 
40  NeuralLayer(int inputSize, int outputSize) {
41  weights.resize(inputSize, outputSize);
42  bias.resize(1,outputSize);
43  output.resize(1,outputSize);
44  }
45 
51  template <typename Derived>
52  NeuralLayer(const Eigen::MatrixBase<Derived> &weightsIn, const Eigen::MatrixBase<Derived> &biasIn) {
53  weights=weightsIn;
54  bias=biasIn;
55  output.resize(bias.rows(),1);
56  }
57 
59  virtual ~NeuralLayer(void) {}
60 
66  virtual Eigen::Matrix<TYPE, Eigen::Dynamic, 1> &activate(const Eigen::Matrix<TYPE, Eigen::Dynamic, 1> &input) {
67 // cout<<"weights r,c "<<weights.rows()<<'\t'<<weights.cols()<<endl;
68 // cout<<"bias r,c "<<bias.rows()<<'\t'<<bias.cols()<<endl;
69 // cout<<"input r,c "<<input.rows()<<'\t'<<input.cols()<<endl;
70 // cout<<"output r,c "<<output.rows()<<'\t'<<output.cols()<<endl;
71  output=bias;
72  output.noalias()+=weights*input;
73 // output.noalias()+=input.transpose()*weights.transpose();
74 // cout<<"bias "<<bias<<endl;
75  return output;
76  }
77 
79  int inputSize(void){
80  return weights.rows();
81  }
82 
84  int outputSize(void){
85  return weights.cols();
86  }
87 };
88 
92 template<typename TYPE>
93 class SigmoidLayer : public NeuralLayer<TYPE> {
94 public:
99  SigmoidLayer(int inputSize, int outputSize) : NeuralLayer<TYPE>(inputSize, outputSize) {
100  }
101 
107  template <typename Derived>
108  SigmoidLayer(const Eigen::MatrixBase<Derived> &weightsIn, const Eigen::MatrixBase<Derived> &biasIn) : NeuralLayer<TYPE>(weightsIn, biasIn) {
109  }
110 
112  virtual ~SigmoidLayer(void) {}
113 
119  virtual Eigen::Matrix<TYPE, Eigen::Dynamic, 1> &activate(const Eigen::Matrix<TYPE, Eigen::Dynamic, 1> &input) {
121  NeuralLayer<TYPE>::output=1./(1.+(-NeuralLayer<TYPE>::output).array().exp());
122 // cout<<"output "<<NeuralLayer<TYPE>::output<<endl;
124  }
125 };
126 
130 template<typename TYPE>
131 class TanhLayer : public NeuralLayer<TYPE> {
132 public:
137  TanhLayer(int inputSize, int outputSize) : NeuralLayer<TYPE>(inputSize, outputSize) {
138  }
139 
145  template <typename Derived>
146  TanhLayer(const Eigen::MatrixBase<Derived> &weightsIn, const Eigen::MatrixBase<Derived> &biasIn) : NeuralLayer<TYPE>(weightsIn, biasIn) {
147  }
148 
150  virtual ~TanhLayer(void) {}
151 
157  virtual Eigen::Matrix<TYPE, Eigen::Dynamic, 1> &activate(const Eigen::Matrix<TYPE, Eigen::Dynamic, 1> &input) {
159  NeuralLayer<TYPE>::output=2./(1.+(-2.*NeuralLayer<TYPE>::output).array().exp())-1.;
161  }
162 };
163 
164 /* Implements a neural layer with an scaled and offset tanh activation function
165 \tparam TYPE the precision of the data to use, e.g. float, double
166 */
167 //template<typename TYPE>
168 //class PosLayer : public NeuralLayer<TYPE> {
169 //public:
170  /* Generate a neural layer of particular size
171  \param inputSize The number of the inputs
172  \param outputSize The number of outputs
173  */
174 // PosLayer(int inputSize, int outputSize) : NeuralLayer<TYPE>(inputSize, outputSize) {
175 // }
176 
177  /* Generate a neural layer of particular size providing the weights
178  \param weightsIn The weights to set
179  \param biasIn The biases to set
180  \tparam Derived is used by Eigen's Curiously recurring template pattern (CRTP)
181  */
182 // template <typename Derived>
183 // PosLayer(const Eigen::MatrixBase<Derived> &weightsIn, const Eigen::MatrixBase<Derived> &biasIn) : NeuralLayer<TYPE>(weightsIn, biasIn) {
184 // }
185 
186  // Destructor
187 // virtual ~PosLayer(void) {}
188 
189  /* The positive only layer
190  Evaluate the neural network using the sigmoid as the activation function
191  \param input The input to this layer
192  \return The result of the layer after processing the input
193  */
194 // virtual Eigen::Matrix<TYPE, Eigen::Dynamic, 1> &activate(const Eigen::Matrix<TYPE, Eigen::Dynamic, 1> &input) {
195 // NeuralLayer<TYPE>::activate(input);
196 // NeuralLayer<TYPE>::output=(NeuralLayer<TYPE>::output>0.).select(NeuralLayer<TYPE>::output,0.);
197 // }
198 //};
199 
221 template<typename TYPE>
223 public:
225  NeuralNetwork(void) {}
226 
228  virtual ~NeuralNetwork(void) {}
229 
235  void activate(vector<NeuralLayer<TYPE> *> &layers, Eigen::Matrix<TYPE, Eigen::Dynamic, 1> &input) {
236  int layerCount=layers.size();
237  if (layerCount>0) {
238  // process the first layer
239  layers[0]->activate(input);
240  for (int i=1; i<layerCount; i++) {
241 // cout<<i<<endl;
242  layers[i]->activate(layers[i-1]->NeuralLayer<TYPE>::output);
243 // cout<<layers[i-1]->NeuralLayer<TYPE>::output.transpose()<<endl;
244  }
245  }
246  }
247 };
248 #endif // NEURALNETWORK_H_
Eigen::Matrix< TYPE, Eigen::Dynamic, Eigen::Dynamic > weights
The neural weights for this layer.
Definition: NeuralNetwork.H:30
NeuralLayer(int inputSize, int outputSize)
Definition: NeuralNetwork.H:40
Eigen::Matrix< TYPE, Eigen::Dynamic, 1 > bias
The biases for this layer.
Definition: NeuralNetwork.H:31
int outputSize(void)
Definition: NeuralNetwork.H:84
void activate(vector< NeuralLayer< TYPE > *> &layers, Eigen::Matrix< TYPE, Eigen::Dynamic, 1 > &input)
virtual Eigen::Matrix< TYPE, Eigen::Dynamic, 1 > & activate(const Eigen::Matrix< TYPE, Eigen::Dynamic, 1 > &input)
Definition: NeuralNetwork.H:66
virtual ~NeuralLayer(void)
Destructor.
Definition: NeuralNetwork.H:59
STL namespace.
NeuralNetwork(void)
Constructor.
int inputSize(void)
Definition: NeuralNetwork.H:79
virtual ~NeuralNetwork(void)
Destructor.
Eigen::Matrix< TYPE, Eigen::Dynamic, 1 > output
The output from this layer.
Definition: NeuralNetwork.H:34
SigmoidLayer(int inputSize, int outputSize)
Definition: NeuralNetwork.H:99
TanhLayer(const Eigen::MatrixBase< Derived > &weightsIn, const Eigen::MatrixBase< Derived > &biasIn)
TanhLayer(int inputSize, int outputSize)
virtual ~SigmoidLayer(void)
Destructor.
SigmoidLayer(const Eigen::MatrixBase< Derived > &weightsIn, const Eigen::MatrixBase< Derived > &biasIn)
virtual Eigen::Matrix< TYPE, Eigen::Dynamic, 1 > & activate(const Eigen::Matrix< TYPE, Eigen::Dynamic, 1 > &input)
virtual Eigen::Matrix< TYPE, Eigen::Dynamic, 1 > & activate(const Eigen::Matrix< TYPE, Eigen::Dynamic, 1 > &input)
virtual ~TanhLayer(void)
Destructor.
NeuralLayer(const Eigen::MatrixBase< Derived > &weightsIn, const Eigen::MatrixBase< Derived > &biasIn)
Definition: NeuralNetwork.H:52
gtkIOStream: /tmp/gtkiostream/include/NeuralNetwork.H Source File
GTK+ IOStream  Beta