added feedForward and moving Reccurent neuron to normal

This commit is contained in:
2016-01-28 22:17:36 +01:00
parent 13b179dd57
commit 3e383e9add
12 changed files with 265 additions and 252 deletions

View File

@@ -0,0 +1,43 @@
#pragma once
#include <cstddef>
#include <vector>
#include "../Neuron.h"
namespace NeuralNetwork {
namespace FeedForward {
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Class for Layer of FeedForward network
*/
class Layer
{
public:
~Layer() {};
/**
* @brief This is a virtual function for selecting neuron
* @param neuron is position in layer
* @returns Specific neuron
*/
Neuron& operator[](const std::size_t& neuron) {
return neurons[neuron];
}
void solve(const std::vector<float> &input, std::vector<float> &output);
/**
* @returns Size of layer
*/
std::size_t size() {
return neurons.size();
}
protected:
std::vector<Neuron> neurons;
};
}
}

View File

@@ -0,0 +1,53 @@
#pragma once
#include "../Network.h"
#include "Layer.h"
#include <vector>
#include <sstream>
#include <iomanip>
#include <limits>
namespace NeuralNetwork {
namespace FeedForward {
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief FeedForward model of Artifical neural network
*/
class Network: public NeuralNetwork::Network {
public:
/**
* @brief Constructor for Network
* @param _inputSize is number of inputs to network
* @param _outputSize is size of output from network
* @param hiddenUnits is number of hiddenUnits to be created
*/
inline Network(size_t _inputSize):NeuralNetwork::Network() {
};
/**
* @brief Virtual destructor for Network
*/
virtual ~Network() {
};
/**
* @brief This is a function to compute one iterations of network
* @param input is input of network
* @returns output of network
*/
virtual std::vector<float> computeOutput(const std::vector<float>& input) override;
using NeuralNetwork::Network::stringify;
void stringify(std::ostream& out) const override {
}
protected:
std::vector<Layer> layers;
};
}
}

View File

@@ -1,33 +0,0 @@
#pragma once
#include <cstddef>
#include <vector>
#include "Neuron.h"
namespace NeuralNetwork
{
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Abstract class for all Layers of neurons
*/
class Layer
{
public:
virtual ~Layer() {};
/**
* @brief This is a virtual function for selecting neuron
* @param neuron is position in layer
* @returns Specific neuron
*/
virtual Neuron& operator[](const size_t& neuron)=0;
/**
* @returns Size of layer
*/
virtual size_t size() const=0;
};
}

View File

@@ -3,13 +3,19 @@
#include <string>
#include <vector>
#include <sstream>
#include <limits>
#include <NeuralNetwork/ActivationFunction/Sigmoid.h>
#include <NeuralNetwork/BasisFunction/Linear.h>
namespace NeuralNetwork
{
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Abstract class of neuron. All Neuron classes should derive from this on
*/
class Neuron
class NeuronInterface
{
public:
@@ -21,7 +27,7 @@ namespace NeuralNetwork
/**
* @brief virtual destructor for Neuron
*/
virtual ~Neuron() {};
virtual ~NeuronInterface() {};
/**
* @brief This is a virtual function for storing network
@@ -33,14 +39,14 @@ namespace NeuralNetwork
* @brief Gets weight
* @param n is neuron
*/
virtual float getWeight(const Neuron &n) const =0;
virtual float getWeight(const NeuronInterface &n) const =0;
/**
* @brief Sets weight
* @param n is neuron
* @param w is new weight for input neuron n
*/
virtual void setWeight(const Neuron& n ,const float &w) =0;
virtual void setWeight(const NeuronInterface& n ,const float &w) =0;
/**
* @brief Returns output of neuron
@@ -73,21 +79,133 @@ namespace NeuralNetwork
/**
* @brief Function returns clone of object
*/
virtual Neuron* clone() const = 0;
virtual NeuronInterface* clone() const = 0;
protected:
};
class BiasNeuron: public Neuron {
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Class of FeedForward neuron.
*/
class Neuron: public NeuronInterface
{
public:
Neuron(unsigned long _id=0): NeuronInterface(), basis(new BasisFunction::Linear),
activation(new ActivationFunction::Sigmoid(-4.9)),
id_(_id),weights(_id+1),_output(0),_value(0) {
}
Neuron(const Neuron &r): NeuronInterface(), basis(r.basis->clone()), activation(r.activation->clone()),id_(r.id_),
weights(r.weights), _output(r._output), _value(r._value) {
}
virtual ~Neuron() {
delete basis;
delete activation;
};
virtual std::string stringify(const std::string &prefix="") const override;
Neuron& operator=(const Neuron&r) {
id_=r.id_;
weights=r.weights;
basis=r.basis->clone();
activation=r.activation->clone();
return *this;
}
virtual long unsigned int id() const override {
return id_;
};
/**
* @brief Gets weight
* @param n is neuron
*/
virtual float getWeight(const NeuronInterface &n) const override {
return weights[n.id()];
}
/**
* @brief Sets weight
* @param n is neuron
* @param w is new weight for input neuron n
*/
virtual void setWeight(const NeuronInterface& n ,const float &w) override {
if(weights.size()<n.id()+1) {
weights.resize(n.id()+1);
}
weights[n.id()]=w;
}
/**
* @brief Returns output of neuron
*/
virtual float output() const override {
return _output;
}
/**
* @brief Returns input of neuron
*/
virtual float value() const override {
return _value;
}
/**
* @brief Function sets bias for neuron
* @param _bias is new bias (initial value for neuron)
*/
virtual void setBias(const float &_bias) override {
weights[0]=_bias;
}
/**
* @brief Function returns bias for neuron
*/
virtual float getBias() const override {
return weights[0];
}
float operator()(const std::vector<float>& inputs) {
//compute value
_value=basis->operator()(weights,inputs);
//compute output
_output=activation->operator()(_value);
return _output;
}
virtual Neuron* clone() const override {
Neuron *n = new Neuron;
*n=*this;
return n;
}
protected:
BasisFunction::BasisFunction *basis;
ActivationFunction::ActivationFunction *activation;
unsigned long id_;
std::vector<float> weights;
float _output;
float _value;
};
class BiasNeuron: public NeuronInterface {
public:
virtual float getBias() const override { return 0; };
virtual float getWeight(const Neuron&) const override { return 0; }
virtual float getWeight(const NeuronInterface&) const override { return 0; }
virtual void setBias(const float&) override{ }
virtual float output() const override { return 1.0; };
virtual void setWeight(const Neuron&, const float&) override { }
virtual void setWeight(const NeuronInterface&, const float&) override { }
virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::BiasNeuron\" }"; }
@@ -97,10 +215,10 @@ namespace NeuralNetwork
virtual float operator()(const std::vector< float >&) override { return 1.0; }
virtual Neuron* clone() const { return new BiasNeuron(); }
virtual BiasNeuron* clone() const { return new BiasNeuron(); }
};
class InputNeuron: public Neuron {
class InputNeuron: public NeuronInterface {
public:
InputNeuron(long unsigned int _id): id_(_id) {
@@ -108,13 +226,13 @@ namespace NeuralNetwork
virtual float getBias() const override { return 0; };
virtual float getWeight(const Neuron&) const override { return 0; }
virtual float getWeight(const NeuronInterface&) const override { return 0; }
virtual void setBias(const float&) override{ }
virtual float output() const override { return 1.0; };
virtual void setWeight(const Neuron&, const float&) override { }
virtual void setWeight(const NeuronInterface&, const float&) override { }
virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::InputNeuron\", \"id\": "+std::to_string(id_)+" }"; }
@@ -124,7 +242,7 @@ namespace NeuralNetwork
virtual float operator()(const std::vector< float >&) override { return 1.0; }
virtual Neuron* clone() const { return new InputNeuron(id_); }
virtual InputNeuron* clone() const { return new InputNeuron(id_); }
protected:
long unsigned int id_;
};

View File

@@ -1,7 +1,6 @@
#pragma once
#include "../Network.h"
#include "Neuron.h"
#include <vector>
@@ -67,7 +66,7 @@ namespace Recurrent {
*/
std::vector<float> computeOutput(const std::vector<float>& input, unsigned int iterations);
std::vector<NeuralNetwork::Neuron*>& getNeurons () {
std::vector<NeuronInterface*>& getNeurons () {
return neurons;
}
@@ -75,10 +74,10 @@ namespace Recurrent {
void stringify(std::ostream& out) const override;
NeuralNetwork::Neuron& addNeuron() {
neurons.push_back(new Recurrent::Neuron(neurons.size()));
NeuralNetwork::Neuron *newNeuron=neurons.back();
for(size_t i=0;i<neurons.size();i++) {
NeuronInterface& addNeuron() {
neurons.push_back(new Neuron(neurons.size()));
NeuronInterface *newNeuron=neurons.back();
for(std::size_t i=0;i<neurons.size();i++) {
neurons[i]->setWeight(*newNeuron,0.0);
}
return *newNeuron;
@@ -95,7 +94,7 @@ namespace Recurrent {
size_t inputSize=0;
size_t outputSize=0;
std::vector<NeuralNetwork::Neuron*> neurons;
std::vector<NeuronInterface*> neurons;
};
}
}

View File

@@ -1,191 +0,0 @@
#pragma once
#include "../Neuron.h"
#include <NeuralNetwork/ActivationFunction/Sigmoid.h>
#include <NeuralNetwork/BasisFunction/Linear.h>
#include <vector>
#include <sstream>
#include <iomanip>
#include <limits>
namespace NeuralNetwork {
namespace Recurrent {
class Network;
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Class of recurrent neuron.
*/
class Neuron : public NeuralNetwork::Neuron
{
public:
Neuron(unsigned long _id=0): NeuralNetwork::Neuron(), basis(new BasisFunction::Linear),
activation(new ActivationFunction::Sigmoid(-4.9)),
id_(_id),weights(_id+1),_output(0),_value(0) {
}
Neuron(const Neuron &r): NeuralNetwork::Neuron(), basis(r.basis->clone()), activation(r.activation->clone()),id_(r.id_),
weights(r.weights), _output(r._output), _value(r._value) {
}
virtual ~Neuron() {
delete basis;
delete activation;
};
virtual std::string stringify(const std::string &prefix="") const override;
Recurrent::Neuron& operator=(const NeuralNetwork::Recurrent::Neuron&r) {
id_=r.id_;
weights=r.weights;
basis=r.basis->clone();
activation=r.activation->clone();
return *this;
}
virtual long unsigned int id() const override {
return id_;
};
/**
* @brief Gets weight
* @param n is neuron
*/
virtual float getWeight(const NeuralNetwork::Neuron &n) const override {
return weights[n.id()];
}
/**
* @brief Sets weight
* @param n is neuron
* @param w is new weight for input neuron n
*/
virtual void setWeight(const NeuralNetwork::Neuron& n ,const float &w) override {
if(weights.size()<n.id()+1) {
weights.resize(n.id()+1);
}
weights[n.id()]=w;
}
/**
* @brief Returns output of neuron
*/
virtual float output() const override {
return _output;
}
/**
* @brief Returns input of neuron
*/
virtual float value() const override {
return _value;
}
/**
* @brief Function sets bias for neuron
* @param _bias is new bias (initial value for neuron)
*/
virtual void setBias(const float &_bias) override {
weights[0]=_bias;
}
/**
* @brief Function returns bias for neuron
*/
virtual float getBias() const override {
return weights[0];
}
float operator()(const std::vector<float>& inputs) {
//compute value
_value=basis->operator()(weights,inputs);
//compute output
_output=activation->operator()(_value);
return _output;
}
virtual Neuron* clone() const override {
Neuron *n = new Recurrent::Neuron;
*n=*this;
return n;
}
protected:
BasisFunction::BasisFunction *basis;
ActivationFunction::ActivationFunction *activation;
unsigned long id_;
std::vector<float> weights;
float _output;
float _value;
};
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Class of LSTM unit
*/
// input + input gate + forget gate + ouput gate
// https://en.wikipedia.org/wiki/Long_short-term_memory
class LSTMNeuron : public Neuron
{
public:
LSTMNeuron(unsigned long _id=0): Neuron(_id) {
}
LSTMNeuron(const Neuron &r): Neuron(r) {
}
virtual ~LSTMNeuron() {
};
virtual std::string stringify(const std::string &prefix="") const override;
LSTMNeuron& operator=(const LSTMNeuron&r) {
this->Neuron::operator=(r);
return *this;
}
/**
* @brief Returns output of neuron
*/
virtual float output() const override {
return _output;
}
/**
* @brief Returns input of neuron
*/
virtual float value() const override {
return _value;
}
float operator()(const std::vector<float>& inputs) override {
//compute value
_value=basis->operator()(weights,inputs);
//compute output
_output=activation->operator()(_value);
return _output;
}
virtual Recurrent::LSTMNeuron* clone() const override {
LSTMNeuron *n = new Recurrent::LSTMNeuron;
*n=*this;
return n;
}
protected:
std::vector<float> forgetWeights;
std::vector<float> outputWeights;
};
}
}

View File

@@ -1,6 +1,6 @@
include ../Makefile.const
OBJFILES= ./sse_mathfun.o ./NeuralNetwork/Recurrent/Network.o ./NeuralNetwork/Recurrent/Neuron.o
OBJFILES= ./sse_mathfun.o ./NeuralNetwork/Recurrent/Network.o ./NeuralNetwork/Neuron.o ./NeuralNetwork/FeedForward/Network.o ./NeuralNetwork/FeedForward/Layer.o
#LayerNetwork.o\
# Learning/Learning.o Learning/BackPropagation.o ../sse_mathfun.o

View File

@@ -0,0 +1,10 @@
#include <NeuralNetwork/FeedForward/Layer.h>
void NeuralNetwork::FeedForward::Layer::solve(const std::vector<float> &input, std::vector<float> &output) {
output.resize(neurons.size());
for(auto &neuron:neurons) {
output[neuron.id()]=neuron(input);
}
}

View File

@@ -0,0 +1,16 @@
#include <NeuralNetwork/FeedForward/Network.h>
std::vector<float> NeuralNetwork::FeedForward::Network::computeOutput(const std::vector<float>& input) {
// this is here for simple swapping between input and output
std::vector<float> partial1=input;
std::vector<float> partial2;
std::vector<float> *partialInputPtr = &partial1;
std::vector<float> *partialOutputPtr = &partial2;
for(int i=1;i<layers.size();i++) {
layers[i].solve(*partialInputPtr,*partialOutputPtr);
std::swap(partialInputPtr,partialOutputPtr);
}
return std::vector<float>(*partialInputPtr);
}

View File

@@ -1,11 +1,11 @@
#include <NeuralNetwork/Recurrent/Neuron.h>
#include <NeuralNetwork/Neuron.h>
std::string NeuralNetwork::Recurrent::Neuron::stringify(const std::string &prefix) const {
std::string NeuralNetwork::Neuron::stringify(const std::string &prefix) const {
std::ostringstream out;
out.precision(std::numeric_limits<float>::digits10+1);
out << prefix << "{\n";
out << prefix << "\t\"class\": \"NeuralNetwork::Recurrent::Neuron\",\n";
out << prefix << "\t\"class\": \"NeuralNetwork::Neuron\",\n";
out << prefix << "\t\"id\": " << id() << ",\n";
out << prefix << "\t\"bias\": " << getBias() << ",\n";
out << prefix << "\t\"output\": " << output() << ",\n";
@@ -14,7 +14,7 @@ std::string NeuralNetwork::Recurrent::Neuron::stringify(const std::string &prefi
out << prefix << "\t\"basisFunction\": " << basis->stringify() <<",\n";
out << prefix << "\t\"weights\": {";
bool first=true;
for(size_t j=0;j<weights.size();j++) {
for(std::size_t j=0;j<weights.size();j++) {
if(weights[j]!= 0.0) {
if(!first)
out << ", ";

View File

@@ -2,7 +2,7 @@ include ../Makefile.const
LIB_DIR = ../lib
ALL_TESTS=activation basis recurrent recurrent_join
ALL_TESTS=activation basis recurrent feedforward
#LIBS=$(LIB_DIR)/Genetics.a $(LIB_DIR)/NeuralNetwork.a
#LIBS=-lGenetics.so -lNeuronNetwork

View File

@@ -17,6 +17,4 @@ int main() {
float res= a.computeOutput({1,0.7})[0];
assert(res > solutions[i]*0.999 && res < solutions[i]*1.001);
}
std::cout << a;
}