moving atributes to NeuronInterface for speed

This commit is contained in:
2016-02-18 18:43:09 +01:00
parent ec82018ef9
commit ce814b89b0
7 changed files with 75 additions and 109 deletions

View File

@@ -18,11 +18,13 @@ namespace NeuralNetwork
class NeuronInterface
{
public:
NeuronInterface(const unsigned long &_id=0): id(_id) {
/**
* @brief returns unique id for neuron
*/
virtual unsigned long id() const =0;
}
NeuronInterface(const NeuronInterface &r): id(r.id), weights(r.weights),_output(r._output) {
weights=weights;
}
/**
* @brief virtual destructor for Neuron
@@ -35,35 +37,28 @@ namespace NeuralNetwork
*/
virtual std::string stringify(const std::string &prefix="") const =0;
/**
* @brief Returns weight
* @param n is neuron
*/
virtual float weight(const NeuronInterface &n) const =0;
inline virtual float weight(const NeuronInterface &n) const final {
return weights[n.id];
}
/**
* @brief Returns weight
* @param n is id of neuron
*/
virtual float weight(const std::size_t &n) const =0;
inline virtual float weight(const std::size_t &n) const final {
return weights[n];
}
/**
* @brief Returns reference to weight
* @param n is neuron
*/
virtual float& weight(const NeuronInterface &n) =0;
inline virtual float& weight(const NeuronInterface &n) final {
return weights[n.id];
}
/**
* @brief Returns reference to weight
* @param n is id of neuron
*/
virtual float& weight(const std::size_t &n) =0;
inline virtual float& weight(const std::size_t &n) final {
return weights[n];
}
/**
* @brief Returns output of neuron
*/
virtual float output() const =0;
inline virtual float output() const final {
return _output;
}
/**
* @brief Returns input of neuron
*/
@@ -82,7 +77,12 @@ namespace NeuralNetwork
virtual float operator()(const std::vector<float>& inputs) =0;
virtual void setInputSize(const std::size_t &size) = 0;
inline virtual void setInputSize(const std::size_t &size) final {
if(weights.size()<size) {
weights.resize(size);
}
}
/**
* @brief Function returns clone of object
@@ -98,6 +98,12 @@ namespace NeuralNetwork
* @brief getter for activation function of neuron
*/
virtual ActivationFunction::ActivationFunction& getActivationFunction() =0;
const unsigned long id;
protected:
std::vector<float> weights= std::vector<float>(1);
float _output=1.0;
};
/**
@@ -108,13 +114,14 @@ namespace NeuralNetwork
{
public:
Neuron(unsigned long _id=0, const ActivationFunction::ActivationFunction &activationFunction=ActivationFunction::Sigmoid(-4.9)):
NeuronInterface(), id_(_id), basis(new BasisFunction::Linear),
NeuronInterface(_id), basis(new BasisFunction::Linear),
activation(activationFunction.clone()),
weights(1),_output(0),_value(0) {
_value(0) {
_output=0.0;
}
Neuron(const Neuron &r): NeuronInterface(), id_(r.id_), basis(r.basis->clone()), activation(r.activation->clone()),
weights(r.weights), _output(r._output), _value(r._value) {
Neuron(const Neuron &r): NeuronInterface(r), basis(r.basis->clone()), activation(r.activation->clone()),
_value(r._value) {
}
virtual ~Neuron() {
@@ -126,39 +133,6 @@ namespace NeuralNetwork
virtual std::string stringify(const std::string &prefix="") const override;
virtual long unsigned int id() const override {
return id_;
};
virtual float weight(const NeuronInterface &n) const override {
return weights[n.id()];
}
virtual float weight(const std::size_t &n) const override {
return weights[n];
}
virtual float& weight(const NeuronInterface &n) override {
return weights[n.id()];
}
virtual float& weight(const std::size_t &n) override {
return weights[n];
}
virtual void setInputSize(const std::size_t &size) override {
if(weights.size()<size) {
weights.resize(size);
}
}
/**
* @brief Returns output of neuron
*/
virtual float output() const override {
return _output;
}
/**
* @brief Returns input of neuron
*/
@@ -203,16 +177,11 @@ namespace NeuralNetwork
virtual ActivationFunction::ActivationFunction& getActivationFunction() override {
return *activation;
}
const unsigned long id_;
protected:
BasisFunction::BasisFunction *basis;
ActivationFunction::ActivationFunction *activation;
std::vector<float> weights;
float _output;
float _value;
};
@@ -233,27 +202,14 @@ namespace NeuralNetwork
virtual float getBias() const override { return 0; };
float a=0.0;
virtual float& weight(const NeuronInterface &) override { return a; }
virtual float& weight(const std::size_t &) override { return a; }
virtual float weight(const NeuronInterface&) const override { return 0; }
virtual float weight(const std::size_t&) const override { return 0; }
virtual void setBias(const float&) override{ }
virtual float output() const override { return 1.0; };
virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::BiasNeuron\" }"; }
virtual float value() const override { return 1.0; }
virtual long unsigned int id() const override { return 0; }
virtual float operator()(const std::vector< float >&) override { return 1.0; }
virtual void setInputSize(const std::size_t&) override {
}
virtual BiasNeuron* clone() const { return new BiasNeuron(); }
virtual BasisFunction::BasisFunction& getBasisFunction() override {
@@ -281,34 +237,21 @@ namespace NeuralNetwork
std::string text;
};
InputNeuron(long unsigned int _id): id_(_id) {
InputNeuron(long unsigned int _id): NeuronInterface(_id) {
}
virtual float getBias() const override { return 0; };
float a=0.0;
virtual float& weight(const NeuronInterface &) override { return a; }
virtual float& weight(const std::size_t &) override { return a; }
virtual float weight(const NeuronInterface&) const override { return 0; }
virtual float weight(const std::size_t&) const override { return 0; }
virtual void setBias(const float&) override{ }
virtual float output() const override { return 1.0; };
virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::InputNeuron\", \"id\": "+std::to_string(id_)+" }"; }
virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::InputNeuron\", \"id\": "+std::to_string(id)+" }"; }
virtual float value() const override { return 1.0; }
virtual long unsigned int id() const override { return id_; }
virtual float operator()(const std::vector< float >&) override { return 1.0; }
virtual void setInputSize(const std::size_t&) override {
}
virtual InputNeuron* clone() const { return new InputNeuron(id_); }
virtual InputNeuron* clone() const { return new InputNeuron(id); }
virtual BasisFunction::BasisFunction& getBasisFunction() override {
throw usageException("basis function");
@@ -317,7 +260,5 @@ namespace NeuralNetwork
virtual ActivationFunction::ActivationFunction& getActivationFunction() override {
throw usageException("activation function");
}
protected:
long unsigned int id_;
};
}

View File

@@ -78,7 +78,7 @@ namespace Recurrent {
neurons.push_back(new Neuron(neurons.size()));
NeuronInterface *newNeuron=neurons.back();
for(std::size_t i=0;i<neurons.size();i++) {
neurons[i]->setInputSize(newNeuron->id()+1);
neurons[i]->setInputSize(newNeuron->id+1);
}
return *newNeuron;
}

View File

@@ -4,7 +4,7 @@ void NeuralNetwork::FeedForward::Layer::solve(const std::vector<float> &input, s
output.resize(neurons.size());
for(auto&neuron: neurons) {
output[neuron->id()] = neuron->operator()(input);
output[neuron->id] = neuron->operator()(input);
}
}

View File

@@ -1,6 +1,7 @@
#include <NeuralNetwork/Learning/BackPropagation.h>
#include <cassert>
#include <immintrin.h>
void NeuralNetwork::Learning::BackPropagation::teach(FeedForward::Network &network,const std::vector<float> &input, const std::vector<float> &expectation) {
@@ -26,11 +27,12 @@ void NeuralNetwork::Learning::BackPropagation::teach(FeedForward::Network &netwo
for(std::size_t j=1;j<layer.size();j++) {
float deltasWeight = 0;
for(std::size_t k=1;k<network[layerIndex+1].size();k++) {
deltasWeight+=deltas[layerIndex+1][k]* network[layerIndex+1][k].weight(j);
}
float newDelta=deltasWeight*layer[j].getActivationFunction().derivatedOutput(layer[j].value(),layer[j].output());
deltas[layerIndex][j]=newDelta;
deltas[layerIndex][j]=deltasWeight*layer[j].getActivationFunction().derivatedOutput(layer[j].value(),layer[j].output());
}
}
@@ -38,12 +40,33 @@ void NeuralNetwork::Learning::BackPropagation::teach(FeedForward::Network &netwo
auto &layer=network[layerIndex];
auto &prevLayer=network[layerIndex-1];
std::size_t max=prevLayer.size();
std::size_t prevLayerSize=prevLayer.size();
std::size_t layerSize=layer.size();
for(std::size_t j=1;j<layer.size();j++) {
/*
#ifdef USE_AVX
std::size_t alignedPrev=layerSize-layerSize%8;
__m256 learningCoefficientAVX=_mm256_set_ps(learningCoefficient, learningCoefficient, learningCoefficient, learningCoefficient,
learningCoefficient, learningCoefficient, learningCoefficient, learningCoefficient);
float* tmp =deltas[layerIndex].data();
for(std::size_t j=1;j<alignedPrev;j+=8) {
_mm256_storeu_ps(tmp+j,_mm256_mul_ps(learningCoefficientAVX,_mm256_loadu_ps(tmp+j)));
}
for(std::size_t j =alignedPrev; j < layerSize;j++) {
deltas[layerIndex][j]*=learningCoefficient;
}
#endif
*/
for(std::size_t j=1;j<layerSize;j++) {
deltas[layerIndex][j]*=learningCoefficient;
layer[j].weight(0)+=deltas[layerIndex][j];
for(std::size_t k=1;k<max;k++) {
for(std::size_t k=1;k<prevLayerSize;k++) {
if(layerIndex==1) {
layer[j].weight(k)+=deltas[layerIndex][j]*input[k-1];
} else {
@@ -51,5 +74,6 @@ void NeuralNetwork::Learning::BackPropagation::teach(FeedForward::Network &netwo
}
}
}
}
}
}

View File

@@ -6,7 +6,7 @@ std::string NeuralNetwork::Neuron::stringify(const std::string &prefix) const {
out << prefix << "{\n";
out << prefix << "\t\"class\": \"NeuralNetwork::Neuron\",\n";
out << prefix << "\t\"id\": " << id() << ",\n";
out << prefix << "\t\"id\": " << id << ",\n";
out << prefix << "\t\"bias\": " << getBias() << ",\n";
out << prefix << "\t\"output\": " << output() << ",\n";
out << prefix << "\t\"value\": " << value() << ",\n";

View File

@@ -6,7 +6,7 @@ std::vector<float> NeuralNetwork::Recurrent::Network::computeOutput(const std::v
if(outputs.size() != neurons.size()) {
outputs.resize(neurons.size());
for(auto &neuron:neurons) {
outputs[neuron->id()]=neuron->output();
outputs[neuron->id]=neuron->output();
}
}

View File

@@ -15,6 +15,7 @@ int main() {
for(size_t i=0;i<solutions.size();i++) {
float res= a.computeOutput({1,0.7})[0];
float resA=solutions[i];
assert(res > solutions[i]*0.999 && res < solutions[i]*1.001);
}
}