refactoring recurent
This commit is contained in:
@@ -44,8 +44,6 @@ namespace NeuralNetwork
|
||||
|
||||
inline virtual void setThreads(const unsigned& t) final {threads=t;}
|
||||
|
||||
using Stringifiable::stringify;
|
||||
|
||||
protected:
|
||||
/**
|
||||
* @brief Number of threads used by network
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace NeuralNetwork
|
||||
{
|
||||
@@ -66,6 +67,65 @@ namespace NeuralNetwork
|
||||
* @brief Function returns bias for neuron
|
||||
*/
|
||||
virtual float getBias() const=0;
|
||||
|
||||
virtual float operator()(const std::vector<float>& inputs) =0;
|
||||
|
||||
/**
|
||||
* @brief Function returns clone of object
|
||||
*/
|
||||
virtual Neuron* clone() const = 0;
|
||||
protected:
|
||||
};
|
||||
|
||||
class BiasNeuron: public Neuron {
|
||||
public:
|
||||
virtual float getBias() const override { return 0; };
|
||||
|
||||
virtual float getWeight(const Neuron&) const override { return 0; }
|
||||
|
||||
virtual void setBias(const float&) override{ }
|
||||
|
||||
virtual float output() const override { return 1.0; };
|
||||
|
||||
virtual void setWeight(const Neuron&, const float&) override { }
|
||||
|
||||
virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::BiasNeuron\" }"; }
|
||||
|
||||
virtual float value() const override { return 1.0; }
|
||||
|
||||
virtual long unsigned int id() const override { return 0; }
|
||||
|
||||
virtual float operator()(const std::vector< float >&) override { return 1.0; }
|
||||
|
||||
virtual Neuron* clone() const { return new BiasNeuron(); }
|
||||
};
|
||||
|
||||
class InputNeuron: public Neuron {
|
||||
public:
|
||||
InputNeuron(long unsigned int _id): id_(_id) {
|
||||
|
||||
}
|
||||
|
||||
virtual float getBias() const override { return 0; };
|
||||
|
||||
virtual float getWeight(const Neuron&) const override { return 0; }
|
||||
|
||||
virtual void setBias(const float&) override{ }
|
||||
|
||||
virtual float output() const override { return 1.0; };
|
||||
|
||||
virtual void setWeight(const Neuron&, const float&) override { }
|
||||
|
||||
virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::InputNeuron\", \"id\": "+std::to_string(id_)+" }"; }
|
||||
|
||||
virtual float value() const override { return 1.0; }
|
||||
|
||||
virtual long unsigned int id() const override { return id_; }
|
||||
|
||||
virtual float operator()(const std::vector< float >&) override { return 1.0; }
|
||||
|
||||
virtual Neuron* clone() const { return new InputNeuron(id_); }
|
||||
protected:
|
||||
long unsigned int id_;
|
||||
};
|
||||
}
|
||||
@@ -26,7 +26,13 @@ namespace Recurrent {
|
||||
* @param hiddenUnits is number of hiddenUnits to be created
|
||||
*/
|
||||
inline Network(size_t _inputSize, size_t _outputSize,size_t hiddenUnits=0):NeuralNetwork::Network(),inputSize(_inputSize),outputSize(_outputSize), neurons(0) {
|
||||
for(size_t i=0;i<_inputSize+_outputSize;i++) {
|
||||
neurons.push_back(new NeuralNetwork::BiasNeuron());
|
||||
|
||||
for(size_t i=0;i<_inputSize;i++) {
|
||||
neurons.push_back(new NeuralNetwork::InputNeuron(neurons.size()));
|
||||
}
|
||||
|
||||
for(size_t i=0;i<_outputSize;i++) {
|
||||
addNeuron();
|
||||
}
|
||||
|
||||
@@ -35,14 +41,14 @@ namespace Recurrent {
|
||||
}
|
||||
};
|
||||
|
||||
// todo: implement
|
||||
inline Network(const std::string &json) {
|
||||
|
||||
}
|
||||
/**
|
||||
* @brief Virtual destructor for Network
|
||||
*/
|
||||
virtual ~Network() {};
|
||||
virtual ~Network() {
|
||||
for(auto& a:neurons) {
|
||||
delete a;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief This is a function to compute one iterations of network
|
||||
@@ -61,7 +67,7 @@ namespace Recurrent {
|
||||
*/
|
||||
std::vector<float> computeOutput(const std::vector<float>& input, unsigned int iterations);
|
||||
|
||||
std::vector<Neuron>& getNeurons () {
|
||||
std::vector<NeuralNetwork::Neuron*>& getNeurons () {
|
||||
return neurons;
|
||||
}
|
||||
|
||||
@@ -69,20 +75,27 @@ namespace Recurrent {
|
||||
|
||||
void stringify(std::ostream& out) const override;
|
||||
|
||||
Neuron& addNeuron() {
|
||||
neurons.push_back(Recurrent::Neuron(neurons.size()));
|
||||
Neuron &newNeuron=neurons.back();
|
||||
NeuralNetwork::Neuron& addNeuron() {
|
||||
neurons.push_back(new Recurrent::Neuron(neurons.size()));
|
||||
NeuralNetwork::Neuron *newNeuron=neurons.back();
|
||||
for(size_t i=0;i<neurons.size();i++) {
|
||||
neurons[i].setWeight(newNeuron,0.0);
|
||||
neurons[i]->setWeight(*newNeuron,0.0);
|
||||
}
|
||||
return newNeuron;
|
||||
return *newNeuron;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief creates new network from joining two
|
||||
* @param r is network that is connected to outputs of this network
|
||||
* @returns network of constructed from two networks
|
||||
*/
|
||||
NeuralNetwork::Recurrent::Network connectWith(const NeuralNetwork::Recurrent::Network &r) const;
|
||||
|
||||
protected:
|
||||
size_t inputSize=0;
|
||||
size_t outputSize=0;
|
||||
|
||||
std::vector<Recurrent::Neuron> neurons;
|
||||
std::vector<NeuralNetwork::Neuron*> neurons;
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -22,14 +22,15 @@ namespace Recurrent {
|
||||
class Neuron : public NeuralNetwork::Neuron
|
||||
{
|
||||
public:
|
||||
Neuron(unsigned long _id=0,const float& _bias = 0): NeuralNetwork::Neuron(), basis(new BasisFunction::Linear),
|
||||
Neuron(unsigned long _id=0): NeuralNetwork::Neuron(), basis(new BasisFunction::Linear),
|
||||
activation(new ActivationFunction::Sigmoid(-4.9)),
|
||||
id_(_id),bias(_bias),weights(_id+1),_output(0),_value(0) {
|
||||
id_(_id),weights(_id+1),_output(0),_value(0) {
|
||||
}
|
||||
|
||||
Neuron(const Neuron &r): NeuralNetwork::Neuron(), basis(r.basis->clone()), activation(r.activation->clone()),id_(r.id_),
|
||||
bias(r.bias), weights(r.weights), _output(r._output), _value(r._value) {
|
||||
weights(r.weights), _output(r._output), _value(r._value) {
|
||||
}
|
||||
|
||||
virtual ~Neuron() {
|
||||
delete basis;
|
||||
delete activation;
|
||||
@@ -39,7 +40,6 @@ namespace Recurrent {
|
||||
|
||||
Recurrent::Neuron& operator=(const NeuralNetwork::Recurrent::Neuron&r) {
|
||||
id_=r.id_;
|
||||
bias=r.bias;
|
||||
weights=r.weights;
|
||||
basis=r.basis->clone();
|
||||
activation=r.activation->clone();
|
||||
@@ -89,19 +89,19 @@ namespace Recurrent {
|
||||
* @param _bias is new bias (initial value for neuron)
|
||||
*/
|
||||
virtual void setBias(const float &_bias) override {
|
||||
bias=_bias;
|
||||
weights[0]=_bias;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Function returns bias for neuron
|
||||
*/
|
||||
virtual float getBias() const override {
|
||||
return bias;
|
||||
return weights[0];
|
||||
}
|
||||
|
||||
float operator()(const std::vector<float>& inputs) {
|
||||
//compute value
|
||||
_value=basis->operator()(weights,inputs)+bias;
|
||||
_value=basis->operator()(weights,inputs);
|
||||
|
||||
//compute output
|
||||
_output=activation->operator()(_value);
|
||||
@@ -109,15 +109,83 @@ namespace Recurrent {
|
||||
return _output;
|
||||
}
|
||||
|
||||
virtual Neuron* clone() const override {
|
||||
Neuron *n = new Recurrent::Neuron;
|
||||
*n=*this;
|
||||
return n;
|
||||
}
|
||||
protected:
|
||||
BasisFunction::BasisFunction *basis;
|
||||
ActivationFunction::ActivationFunction *activation;
|
||||
|
||||
unsigned long id_;
|
||||
float bias;
|
||||
std::vector<float> weights;
|
||||
float _output;
|
||||
float _value;
|
||||
};
|
||||
|
||||
/**
|
||||
* @author Tomas Cernik (Tom.Cernik@gmail.com)
|
||||
* @brief Class of LSTM unit
|
||||
*/
|
||||
// input + input gate + forget gate + ouput gate
|
||||
// https://en.wikipedia.org/wiki/Long_short-term_memory
|
||||
|
||||
class LSTMNeuron : public Neuron
|
||||
{
|
||||
public:
|
||||
LSTMNeuron(unsigned long _id=0): Neuron(_id) {
|
||||
}
|
||||
|
||||
LSTMNeuron(const Neuron &r): Neuron(r) {
|
||||
}
|
||||
|
||||
virtual ~LSTMNeuron() {
|
||||
|
||||
};
|
||||
|
||||
virtual std::string stringify(const std::string &prefix="") const override;
|
||||
|
||||
LSTMNeuron& operator=(const LSTMNeuron&r) {
|
||||
this->Neuron::operator=(r);
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Returns output of neuron
|
||||
*/
|
||||
virtual float output() const override {
|
||||
return _output;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Returns input of neuron
|
||||
*/
|
||||
virtual float value() const override {
|
||||
return _value;
|
||||
}
|
||||
|
||||
float operator()(const std::vector<float>& inputs) override {
|
||||
|
||||
//compute value
|
||||
_value=basis->operator()(weights,inputs);
|
||||
|
||||
//compute output
|
||||
_output=activation->operator()(_value);
|
||||
|
||||
return _output;
|
||||
}
|
||||
|
||||
virtual Recurrent::LSTMNeuron* clone() const override {
|
||||
LSTMNeuron *n = new Recurrent::LSTMNeuron;
|
||||
*n=*this;
|
||||
return n;
|
||||
}
|
||||
protected:
|
||||
std::vector<float> forgetWeights;
|
||||
std::vector<float> outputWeights;
|
||||
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,4 +29,6 @@ $(LIBNAME).a: $(OBJFILES)
|
||||
nm --demangle $(LIBNAME).a > $(LIBNAME).nm
|
||||
|
||||
clean:
|
||||
@rm -f ./*.o ./*.so ./*.a ./*.nm ./*/*.o
|
||||
@rm -rf *.o ./*.so ./*.a ./*.nm ./*/*.o
|
||||
@rm -rf *.o ./*.so ./*.a ./*.nm ./*/*.o
|
||||
@rm -f $(OBJFILES)
|
||||
|
||||
@@ -5,24 +5,24 @@ std::vector<float> NeuralNetwork::Recurrent::Network::computeOutput(const std::v
|
||||
|
||||
std::vector<float> outputs(neuronSize);
|
||||
for(size_t i=0;i<inputSize;i++) {
|
||||
outputs[i]=input[i];
|
||||
outputs[i+1]=input[i];
|
||||
}
|
||||
|
||||
for(unsigned int iter=0;iter< iterations;iter++) {
|
||||
|
||||
for(size_t i=inputSize;i<neuronSize;i++) {
|
||||
outputs[i]=neurons[i].output();
|
||||
for(size_t i=inputSize+1;i<neuronSize;i++) {
|
||||
outputs[i]=neurons[i]->output();
|
||||
}
|
||||
|
||||
// update neurons
|
||||
for(size_t i=inputSize;i<neuronSize;i++) {
|
||||
neurons[i](outputs);
|
||||
for(size_t i=inputSize+1;i<neuronSize;i++) {
|
||||
neurons[i]->operator()(outputs);
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<float> ret;
|
||||
for(size_t i=0;i<outputSize;i++) {
|
||||
ret.push_back(neurons[i+inputSize].output());
|
||||
ret.push_back(neurons[i+inputSize+1]->output());
|
||||
}
|
||||
|
||||
return ret;
|
||||
@@ -36,15 +36,47 @@ void NeuralNetwork::Recurrent::Network::stringify(std::ostream& out) const {
|
||||
out << "\t\"inputs\":" << inputSize << ",\n";
|
||||
out << "\t\"outputs\":" << outputSize << ",\n";
|
||||
|
||||
out << "\t\"neurons\":[";
|
||||
out << "\t\"neurons\":[\n";
|
||||
|
||||
for(size_t i=0;i<neurons.size();i++) {
|
||||
if(i!=0)
|
||||
out << ",\n";
|
||||
|
||||
out << neurons[i].stringify("\t\t");
|
||||
out << neurons[i]->stringify("\t\t");
|
||||
}
|
||||
out << "\n\t]\n";
|
||||
|
||||
out <<"}";
|
||||
}
|
||||
}
|
||||
|
||||
NeuralNetwork::Recurrent::Network NeuralNetwork::Recurrent::Network::connectWith(const NeuralNetwork::Recurrent::Network &r) const {
|
||||
|
||||
}
|
||||
/*
|
||||
NeuralNetwork::Recurrent::Network NeuralNetwork::Recurrent::Network::connectWith(const NeuralNetwork::Recurrent::Network &r) const {
|
||||
if(outputSize!=r.inputSize) {
|
||||
//TODO: throw exception
|
||||
}
|
||||
|
||||
NeuralNetwork::Recurrent::Network newNetwork(inputSize,r.outputSize,(neurons.size()-1-inputSize)+(r.neurons.size()-1-r.inputSize-r.outputSize));
|
||||
|
||||
// update output neurons first
|
||||
for(size_t i=0;i<r.outputSize;i++) {
|
||||
size_t index=1+newNetwork.inputSize+i;
|
||||
|
||||
delete newNetwork.neurons[index];
|
||||
newNetwork.neurons[index]= r.neurons[1+r.inputSize+i]->clone();
|
||||
|
||||
Neuron* n= newNetwork.neurons[index];
|
||||
|
||||
for(int i=0;i<newNetwork.inputSize;i++) {
|
||||
n->setWeight(newNetwork.+i,0.0);
|
||||
}
|
||||
|
||||
for(int i=0;i<r.inputSize;i++) {
|
||||
n->setWeight(1+newNetwork.inputSize+newNetwork.outputSize,0.0);
|
||||
}
|
||||
}
|
||||
|
||||
return newNetwork;
|
||||
}
|
||||
*/
|
||||
@@ -3,7 +3,6 @@
|
||||
std::string NeuralNetwork::Recurrent::Neuron::stringify(const std::string &prefix) const {
|
||||
std::ostringstream out;
|
||||
out.precision(std::numeric_limits<float>::digits10+1);
|
||||
out <<std::setprecision(std::numeric_limits<float>::digits10+1);
|
||||
|
||||
out << prefix << "{\n";
|
||||
out << prefix << "\t\"class\": \"NeuralNetwork::Recurrent::Neuron\",\n";
|
||||
@@ -13,13 +12,17 @@ std::string NeuralNetwork::Recurrent::Neuron::stringify(const std::string &prefi
|
||||
out << prefix << "\t\"value\": " << value() << ",\n";
|
||||
out << prefix << "\t\"activationFunction\": " << activation->stringify() <<",\n";
|
||||
out << prefix << "\t\"basisFunction\": " << basis->stringify() <<",\n";
|
||||
out << prefix << "\t\"weights\": [";
|
||||
out << prefix << "\t\"weights\": {";
|
||||
bool first=true;
|
||||
for(size_t j=0;j<weights.size();j++) {
|
||||
if(j!=0)
|
||||
out << ", ";
|
||||
out << weights[j];
|
||||
if(weights[j]!= 0.0) {
|
||||
if(!first)
|
||||
out << ", ";
|
||||
first=false;
|
||||
out << "\"" << j << "\": " << weights[j];
|
||||
}
|
||||
}
|
||||
out << "]\n";
|
||||
out << "}\n";
|
||||
out << prefix << "}";
|
||||
return out.str();
|
||||
}
|
||||
@@ -2,7 +2,7 @@ include ../Makefile.const
|
||||
|
||||
LIB_DIR = ../lib
|
||||
|
||||
ALL_TESTS=activation basis recurrent
|
||||
ALL_TESTS=activation basis recurrent recurrent_join
|
||||
|
||||
#LIBS=$(LIB_DIR)/Genetics.a $(LIB_DIR)/NeuralNetwork.a
|
||||
#LIBS=-lGenetics.so -lNeuronNetwork
|
||||
@@ -12,7 +12,7 @@ CXXFLAGS += -I$(LIB_DIR)
|
||||
all:$(ALL_TESTS);
|
||||
|
||||
test: all
|
||||
@for i in $(ALL_TESTS); do echo -n ./$$i; echo -n " - "; ./$$i; echo ""; done
|
||||
@for i in $(ALL_TESTS); do echo -n ./$$i; ./$$i; echo ""; done
|
||||
|
||||
../src/NeuralNetwork.so: lib
|
||||
|
||||
|
||||
@@ -1,30 +1,22 @@
|
||||
#include <NeuralNetwork/Recurrent/Network.h>
|
||||
|
||||
#include <assert.h>
|
||||
#include <iostream>
|
||||
|
||||
int main() {
|
||||
NeuralNetwork::Recurrent::Network a(2,1,1);
|
||||
/* a.getNeurons()[3].setWeight(a.getNeurons()[2],0.00000001565598595);
|
||||
a.getNeurons()[2].setWeight(a.getNeurons()[3],0.00000001565598595);
|
||||
a.getNeurons()[3].setWeight(a.getNeurons()[1],0.00000001565598595);
|
||||
a.getNeurons()[3].setWeight(a.getNeurons()[0],0.00000001565598595);
|
||||
|
||||
a.computeOutput({0.5,0});
|
||||
|
||||
std::cout << a;
|
||||
|
||||
NeuralNetwork::Recurrent::Network b(a.stringify());
|
||||
*/
|
||||
a.getNeurons()[4]->setWeight(*a.getNeurons()[1],0.05);
|
||||
a.getNeurons()[4]->setWeight(*a.getNeurons()[2],0.05);
|
||||
a.getNeurons()[4]->setWeight(*a.getNeurons()[3],0.7);
|
||||
a.getNeurons()[3]->setWeight(*a.getNeurons()[4],0.1);
|
||||
|
||||
a.getNeurons()[3].setWeight(a.getNeurons()[0],0.05);
|
||||
a.getNeurons()[3].setWeight(a.getNeurons()[1],0.05);
|
||||
a.getNeurons()[3].setWeight(a.getNeurons()[2],0.7);
|
||||
a.getNeurons()[2].setWeight(a.getNeurons()[3],0.1);
|
||||
std::vector <float> solutions({0.5,0.5732923,0.6077882,0.6103067,0.6113217,0.6113918,0.61142,0.6114219,0.6114227,0.6114227});
|
||||
|
||||
std::cout << a;
|
||||
|
||||
for(int i=0;i<40;i++) {
|
||||
std::cout << a.computeOutput({1,0.7})[0] << "\n";
|
||||
for(size_t i=0;i<solutions.size();i++) {
|
||||
float res= a.computeOutput({1,0.7})[0];
|
||||
assert(res > solutions[i]*0.999 && res < solutions[i]*1.001);
|
||||
}
|
||||
std::cout << a;
|
||||
|
||||
std::cout << a;
|
||||
}
|
||||
Reference in New Issue
Block a user