Neuron change

This commit is contained in:
2016-02-01 23:43:13 +01:00
parent 3e383e9add
commit ea4ce22867
8 changed files with 147 additions and 13 deletions

View File

@@ -1,5 +1,5 @@
CXX=g++ -m64
CXXFLAGS+= -Wall -Wextra -pedantic -Weffc++ -Wshadow -Wstrict-aliasing -ansi -Woverloaded-virtual -Wdelete-non-virtual-dtor
CXXFLAGS+= -Wall -Wextra -pedantic -Weffc++ -Wshadow -Wstrict-aliasing -ansi -Woverloaded-virtual -Wdelete-non-virtual-dtor -Wno-unused-function
CXXFLAGS+= -std=c++14
#-fprefetch-loop-arrays
CXXFLAGS+= -pg -fPIC

View File

@@ -4,6 +4,7 @@
#include <vector>
#include "../Neuron.h"
#include "../Stringifiable.h"
namespace NeuralNetwork {
namespace FeedForward {
@@ -12,19 +13,46 @@ namespace FeedForward {
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Class for Layer of FeedForward network
*/
class Layer
{
public:
class Layer : public Stringifiable {
~Layer() {};
public:
Layer(std::size_t size = 0):neurons() {
neurons.push_back(new BiasNeuron);
for(std::size_t i=0;i<size;i++) {
neurons.push_back(new Neuron(neurons.size()));
}
}
Layer(const Layer&r):neurons() {
*this=r;
}
Layer& operator=(const Layer &r) {
for(auto &neuron:neurons) {
delete neuron;
}
neurons.clear();
for(auto &neuron:r.neurons) {
neurons.push_back(neuron->clone());
}
return *this;
}
~Layer() {
for(auto &neuron:neurons) {
delete neuron;
}
};
/**
* @brief This is a virtual function for selecting neuron
* @param neuron is position in layer
* @returns Specific neuron
*/
Neuron& operator[](const std::size_t& neuron) {
return neurons[neuron];
NeuronInterface& operator[](const std::size_t& neuron) {
return *neurons[neuron];
}
void solve(const std::vector<float> &input, std::vector<float> &output);
@@ -36,8 +64,29 @@ namespace FeedForward {
return neurons.size();
}
void setInputSize(std::size_t size) {
for(auto& neuron:neurons) {
neuron->setInputSize(size);
}
}
using Stringifiable::stringify;
virtual void stringify(std::ostream& out) const override {
out << "{" << std::endl;
out << "\t \"class\": \"NeuralNetwork::FeedForward::Layer\"," << std::endl;
out << "\t \"neurons\": [" << std::endl;
bool first=true;
for(auto &neuron: neurons) {
if(!first)
out << ", ";
out << neuron->stringify();
first=false;
}
out << "]";
out << "}";
}
protected:
std::vector<Neuron> neurons;
std::vector<NeuronInterface*> neurons;
};
}
}

View File

@@ -25,9 +25,23 @@ namespace FeedForward {
* @param _outputSize is size of output from network
* @param hiddenUnits is number of hiddenUnits to be created
*/
inline Network(size_t _inputSize):NeuralNetwork::Network() {
inline Network(size_t _inputSize):NeuralNetwork::Network(),layers() {
appendLayer(_inputSize);
};
Layer& appendLayer(std::size_t size=1) {
layers.push_back(Layer(size));
if(layers.size() > 1)
layers.back().setInputSize(layers[layers.size()-2].size());
return layers.back();
}
Layer& operator[](const std::size_t &id) {
return layers[id];
}
/**
* @brief Virtual destructor for Network
*/
@@ -44,6 +58,19 @@ namespace FeedForward {
using NeuralNetwork::Network::stringify;
void stringify(std::ostream& out) const override {
out << "{" << std::endl;
out << "\t \"class\": \"NeuralNetwork::FeedForward::Network\"," << std::endl;
out << "\t \"layers\": [" << std::endl;
bool first=true;
for(auto &layer:layers) {
if(!first) {
out << ",";
}
out << layer;
first=false;
}
out << "]";
out << "}";
}
protected:

View File

@@ -76,6 +76,8 @@ namespace NeuralNetwork
virtual float operator()(const std::vector<float>& inputs) =0;
virtual void setInputSize(const std::size_t &size) = 0;
/**
* @brief Function returns clone of object
*/
@@ -138,6 +140,12 @@ namespace NeuralNetwork
weights[n.id()]=w;
}
virtual void setInputSize(const std::size_t &size) override {
if(weights.size()<size+1) {
weights.resize(size+1);
}
}
/**
* @brief Returns output of neuron
*/
@@ -215,6 +223,9 @@ namespace NeuralNetwork
virtual float operator()(const std::vector< float >&) override { return 1.0; }
virtual void setInputSize(const std::size_t&) override {
}
virtual BiasNeuron* clone() const { return new BiasNeuron(); }
};
@@ -242,6 +253,9 @@ namespace NeuralNetwork
virtual float operator()(const std::vector< float >&) override { return 1.0; }
virtual void setInputSize(const std::size_t&) override {
}
virtual InputNeuron* clone() const { return new InputNeuron(id_); }
protected:
long unsigned int id_;

View File

@@ -22,8 +22,9 @@ namespace NeuralNetwork {
};
static std::ostream& operator<<(std::ostream& o, const Stringifiable& n) {
inline static std::ostream& operator<<(std::ostream& o, const Stringifiable& n) {
n.stringify(o);
return o;
}
}

View File

@@ -4,7 +4,7 @@ void NeuralNetwork::FeedForward::Layer::solve(const std::vector<float> &input, s
output.resize(neurons.size());
for(auto &neuron:neurons) {
output[neuron.id()]=neuron(input);
output[neuron->id()]=neuron->operator()(input);
}
}

View File

@@ -2,15 +2,27 @@
std::vector<float> NeuralNetwork::FeedForward::Network::computeOutput(const std::vector<float>& input) {
// this is here for simple swapping between input and output
std::vector<float> partial1=input;
std::vector<float> partial1=std::vector<float>(input.size()+1);
std::vector<float> partial2;
std::vector<float> *partialInputPtr = &partial1;
std::vector<float> *partialOutputPtr = &partial2;
for(int i=1;i<layers.size();i++) {
// 0 is bias
partial1[0]=1.0;
for(int i=0;i<input.size();i++) {
partial1[i+1]=input[i];
}
for(std::size_t i=1;i<layers.size();i++) {
layers[i].solve(*partialInputPtr,*partialOutputPtr);
std::swap(partialInputPtr,partialOutputPtr);
}
for(int i=0;i<partialInputPtr->size()-1;i++) {
partialInputPtr->operator[](i)=partialInputPtr->operator[](i+1);
}
partialInputPtr->resize(partialInputPtr->size()-1);
return std::vector<float>(*partialInputPtr);
}

31
tests/feedforward.cpp Normal file
View File

@@ -0,0 +1,31 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <iostream>
void printVec(const std::vector<float> &v) {
for(int i=0;i<v.size();i++) {
std::cout << v[i] << ", ";
}
std::cout<< "\n";
}
int main() {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::FeedForward::Layer &sec=n.appendLayer(4);
NeuralNetwork::FeedForward::Layer &in = n[0];
NeuralNetwork::FeedForward::Layer &t = n.appendLayer(4);
sec[1].setWeight(in[1],-1.0);
sec[1].setWeight(in[2],-1.0);
sec[2].setWeight(in[2],-1.0);
t[2].setWeight(sec[2],-1.0);
std::vector<float> ret =n.computeOutput({0.7,0.7});
printVec(ret);
std::cout << n;
}