new weights interface and addaption + mall tweaks

This commit is contained in:
2016-02-18 16:36:16 +01:00
parent 19253e975f
commit ec82018ef9
10 changed files with 101 additions and 99 deletions

View File

@@ -59,6 +59,8 @@ set (LIBRARY_SOURCES
add_library(NeuralNetwork STATIC ${LIBRARY_SOURCES}) add_library(NeuralNetwork STATIC ${LIBRARY_SOURCES})
link_libraries(NeuralNetwork pthread)
IF(BUILD_SHARED_LIBS) IF(BUILD_SHARED_LIBS)
add_library(NeuralNetworkShared SHARED ${LIBRARY_SOURCES}) add_library(NeuralNetworkShared SHARED ${LIBRARY_SOURCES})
set_target_properties(NeuralNetworkShared PROPERTIES OUTPUT_NAME NeuralNetwork) set_target_properties(NeuralNetworkShared PROPERTIES OUTPUT_NAME NeuralNetwork)

View File

@@ -20,3 +20,4 @@ i5-5300U & 8GB ram
-------------------- | ---------------- | -------------- | -------------------- | -------------------- | ---------------- | -------------- | -------------------- |
| 2016/02/07 initial | 8.27 sec | 7.15 sec | 6.00 sec | | 2016/02/07 initial | 8.27 sec | 7.15 sec | 6.00 sec |
| 2016/02/17 AVX | 5.53 sec | 4.68 sec | 4.63 sec | | 2016/02/17 AVX | 5.53 sec | 4.68 sec | 4.63 sec |
| 2016/02/17 weights | 5.53 sec | 4.68 sec | 3.02 sec |

View File

@@ -36,17 +36,28 @@ namespace NeuralNetwork
virtual std::string stringify(const std::string &prefix="") const =0; virtual std::string stringify(const std::string &prefix="") const =0;
/** /**
* @brief Gets weight * @brief Returns weight
* @param n is neuron * @param n is neuron
*/ */
virtual float getWeight(const NeuronInterface &n) const =0; virtual float weight(const NeuronInterface &n) const =0;
/** /**
* @brief Sets weight * @brief Returns weight
* @param n is neuron * @param n is id of neuron
* @param w is new weight for input neuron n
*/ */
virtual void setWeight(const NeuronInterface& n ,const float &w) =0; virtual float weight(const std::size_t &n) const =0;
/**
* @brief Returns reference to weight
* @param n is neuron
*/
virtual float& weight(const NeuronInterface &n) =0;
/**
* @brief Returns reference to weight
* @param n is id of neuron
*/
virtual float& weight(const std::size_t &n) =0;
/** /**
* @brief Returns output of neuron * @brief Returns output of neuron
@@ -58,11 +69,6 @@ namespace NeuralNetwork
*/ */
virtual float value() const=0; virtual float value() const=0;
/**
* @brief Returns value for derivation of activation function
*/
// virtual float derivatedOutput() const=0;
/** /**
* @brief Function sets bias for neuron * @brief Function sets bias for neuron
* @param bias is new bias (initial value for neuron) * @param bias is new bias (initial value for neuron)
@@ -102,12 +108,12 @@ namespace NeuralNetwork
{ {
public: public:
Neuron(unsigned long _id=0, const ActivationFunction::ActivationFunction &activationFunction=ActivationFunction::Sigmoid(-4.9)): Neuron(unsigned long _id=0, const ActivationFunction::ActivationFunction &activationFunction=ActivationFunction::Sigmoid(-4.9)):
NeuronInterface(), basis(new BasisFunction::Linear), NeuronInterface(), id_(_id), basis(new BasisFunction::Linear),
activation(activationFunction.clone()), activation(activationFunction.clone()),
id_(_id),weights(1),_output(0),_value(0) { weights(1),_output(0),_value(0) {
} }
Neuron(const Neuron &r): NeuronInterface(), basis(r.basis->clone()), activation(r.activation->clone()),id_(r.id_), Neuron(const Neuron &r): NeuronInterface(), id_(r.id_), basis(r.basis->clone()), activation(r.activation->clone()),
weights(r.weights), _output(r._output), _value(r._value) { weights(r.weights), _output(r._output), _value(r._value) {
} }
@@ -116,38 +122,28 @@ namespace NeuralNetwork
delete activation; delete activation;
}; };
virtual std::string stringify(const std::string &prefix="") const override; Neuron operator=(const Neuron&) = delete;
Neuron& operator=(const Neuron&r) { virtual std::string stringify(const std::string &prefix="") const override;
id_=r.id_;
weights=r.weights;
basis=r.basis->clone();
activation=r.activation->clone();
return *this;
}
virtual long unsigned int id() const override { virtual long unsigned int id() const override {
return id_; return id_;
}; };
/** virtual float weight(const NeuronInterface &n) const override {
* @brief Gets weight
* @param n is neuron
*/
virtual float getWeight(const NeuronInterface &n) const override {
return weights[n.id()]; return weights[n.id()];
} }
/** virtual float weight(const std::size_t &n) const override {
* @brief Sets weight return weights[n];
* @param n is neuron
* @param w is new weight for input neuron n
*/
virtual void setWeight(const NeuronInterface& n ,const float &w) override {
if(weights.size()<n.id()+1) {
weights.resize(n.id()+1);
} }
weights[n.id()]=w;
virtual float& weight(const NeuronInterface &n) override {
return weights[n.id()];
}
virtual float& weight(const std::size_t &n) override {
return weights[n];
} }
virtual void setInputSize(const std::size_t &size) override { virtual void setInputSize(const std::size_t &size) override {
@@ -196,8 +192,7 @@ namespace NeuralNetwork
} }
virtual Neuron* clone() const override { virtual Neuron* clone() const override {
Neuron *n = new Neuron; Neuron *n = new Neuron(*this);
*n=*this;
return n; return n;
} }
@@ -209,12 +204,12 @@ namespace NeuralNetwork
return *activation; return *activation;
} }
const unsigned long id_;
protected: protected:
BasisFunction::BasisFunction *basis; BasisFunction::BasisFunction *basis;
ActivationFunction::ActivationFunction *activation; ActivationFunction::ActivationFunction *activation;
unsigned long id_;
std::vector<float> weights; std::vector<float> weights;
float _output; float _output;
@@ -238,14 +233,16 @@ namespace NeuralNetwork
virtual float getBias() const override { return 0; }; virtual float getBias() const override { return 0; };
virtual float getWeight(const NeuronInterface&) const override { return 0; } float a=0.0;
virtual float& weight(const NeuronInterface &) override { return a; }
virtual float& weight(const std::size_t &) override { return a; }
virtual float weight(const NeuronInterface&) const override { return 0; }
virtual float weight(const std::size_t&) const override { return 0; }
virtual void setBias(const float&) override{ } virtual void setBias(const float&) override{ }
virtual float output() const override { return 1.0; }; virtual float output() const override { return 1.0; };
virtual void setWeight(const NeuronInterface&, const float&) override { }
virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::BiasNeuron\" }"; } virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::BiasNeuron\" }"; }
virtual float value() const override { return 1.0; } virtual float value() const override { return 1.0; }
@@ -290,14 +287,16 @@ namespace NeuralNetwork
virtual float getBias() const override { return 0; }; virtual float getBias() const override { return 0; };
virtual float getWeight(const NeuronInterface&) const override { return 0; } float a=0.0;
virtual float& weight(const NeuronInterface &) override { return a; }
virtual float& weight(const std::size_t &) override { return a; }
virtual float weight(const NeuronInterface&) const override { return 0; }
virtual float weight(const std::size_t&) const override { return 0; }
virtual void setBias(const float&) override{ } virtual void setBias(const float&) override{ }
virtual float output() const override { return 1.0; }; virtual float output() const override { return 1.0; };
virtual void setWeight(const NeuronInterface&, const float&) override { }
virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::InputNeuron\", \"id\": "+std::to_string(id_)+" }"; } virtual std::string stringify(const std::string& prefix = "") const override { return prefix+"{ \"class\" : \"NeuralNetwork::InputNeuron\", \"id\": "+std::to_string(id_)+" }"; }
virtual float value() const override { return 1.0; } virtual float value() const override { return 1.0; }

View File

@@ -24,7 +24,7 @@ namespace Recurrent {
* @param _outputSize is size of output from network * @param _outputSize is size of output from network
* @param hiddenUnits is number of hiddenUnits to be created * @param hiddenUnits is number of hiddenUnits to be created
*/ */
inline Network(size_t _inputSize, size_t _outputSize,size_t hiddenUnits=0):NeuralNetwork::Network(),inputSize(_inputSize),outputSize(_outputSize), neurons(0) { inline Network(size_t _inputSize, size_t _outputSize,size_t hiddenUnits=0):NeuralNetwork::Network(),inputSize(_inputSize),outputSize(_outputSize), neurons(0),outputs(0) {
neurons.push_back(new NeuralNetwork::BiasNeuron()); neurons.push_back(new NeuralNetwork::BiasNeuron());
for(size_t i=0;i<_inputSize;i++) { for(size_t i=0;i<_inputSize;i++) {
@@ -78,7 +78,7 @@ namespace Recurrent {
neurons.push_back(new Neuron(neurons.size())); neurons.push_back(new Neuron(neurons.size()));
NeuronInterface *newNeuron=neurons.back(); NeuronInterface *newNeuron=neurons.back();
for(std::size_t i=0;i<neurons.size();i++) { for(std::size_t i=0;i<neurons.size();i++) {
neurons[i]->setWeight(*newNeuron,0.0); neurons[i]->setInputSize(newNeuron->id()+1);
} }
return *newNeuron; return *newNeuron;
} }
@@ -95,6 +95,7 @@ namespace Recurrent {
size_t outputSize=0; size_t outputSize=0;
std::vector<NeuronInterface*> neurons; std::vector<NeuronInterface*> neurons;
std::vector<float> outputs;
}; };
} }
} }

View File

@@ -6,7 +6,6 @@ void NeuralNetwork::FeedForward::Layer::solve(const std::vector<float> &input, s
for(auto&neuron: neurons) { for(auto&neuron: neurons) {
output[neuron->id()] = neuron->operator()(input); output[neuron->id()] = neuron->operator()(input);
} }
} }
void NeuralNetwork::FeedForward::Layer::stringify(std::ostream &out) const { void NeuralNetwork::FeedForward::Layer::stringify(std::ostream &out) const {

View File

@@ -1,30 +1,21 @@
#include <NeuralNetwork/FeedForward/Network.h> #include <NeuralNetwork/FeedForward/Network.h>
std::vector<float> NeuralNetwork::FeedForward::Network::computeOutput(const std::vector<float>& input) { std::vector<float> NeuralNetwork::FeedForward::Network::computeOutput(const std::vector<float>& input) {
// this is here for simple swapping between input and output std::vector<float> partialInput(input.size()+1);
std::vector<float> partial1=std::vector<float>(input.size()+1); std::vector<float> partialOutput;
std::vector<float> partial2;
std::vector<float> *partialInputPtr = &partial1;
std::vector<float> *partialOutputPtr = &partial2;
// 0 is bias // 0 is bias
partial1[0]=1.0; partialInput[0]=1.0;
for(std::size_t i=0;i<input.size();i++) { for(std::size_t i=0;i<input.size();i++) {
partial1[i+1]=input[i]; partialInput[i+1]=input[i];
} }
for(std::size_t i=1;i<layers.size();i++) { for(std::size_t i=1;i<layers.size();i++) {
layers[i]->solve(*partialInputPtr,*partialOutputPtr); layers[i]->solve(partialInput,partialOutput);
std::swap(partialInputPtr,partialOutputPtr); partialInput.swap(partialOutput);
} }
for(std::size_t i=0;i<partialInputPtr->size()-1;i++) { return std::vector<float>(partialInput.begin()+1,partialInput.end());
partialInputPtr->operator[](i)=partialInputPtr->operator[](i+1);
}
partialInputPtr->resize(partialInputPtr->size()-1);
return std::vector<float>(*partialInputPtr);
} }
void NeuralNetwork::FeedForward::Network::randomizeWeights() { void NeuralNetwork::FeedForward::Network::randomizeWeights() {
@@ -34,7 +25,7 @@ void NeuralNetwork::FeedForward::Network::randomizeWeights() {
for(std::size_t neuron=1; neuron < layer->size(); neuron ++ ) { for(std::size_t neuron=1; neuron < layer->size(); neuron ++ ) {
for(std::size_t prevNeuron=0; prevNeuron < prevLayer->size(); prevNeuron++) { for(std::size_t prevNeuron=0; prevNeuron < prevLayer->size(); prevNeuron++) {
layer->operator[](neuron).setWeight(prevLayer->operator[](prevNeuron),1.0-static_cast<float>(rand()%2001)/1000.0); layer->operator[](neuron).weight(prevNeuron)=1.0-static_cast<float>(rand()%2001)/1000.0;
} }
} }
} }

View File

@@ -27,7 +27,7 @@ void NeuralNetwork::Learning::BackPropagation::teach(FeedForward::Network &netwo
for(std::size_t j=1;j<layer.size();j++) { for(std::size_t j=1;j<layer.size();j++) {
float deltasWeight = 0; float deltasWeight = 0;
for(std::size_t k=1;k<network[layerIndex+1].size();k++) { for(std::size_t k=1;k<network[layerIndex+1].size();k++) {
deltasWeight+=deltas[layerIndex+1][k]* network[layerIndex+1][k].getWeight(layer[j]); deltasWeight+=deltas[layerIndex+1][k]* network[layerIndex+1][k].weight(j);
} }
float newDelta=deltasWeight*layer[j].getActivationFunction().derivatedOutput(layer[j].value(),layer[j].output()); float newDelta=deltasWeight*layer[j].getActivationFunction().derivatedOutput(layer[j].value(),layer[j].output());
deltas[layerIndex][j]=newDelta; deltas[layerIndex][j]=newDelta;
@@ -41,12 +41,13 @@ void NeuralNetwork::Learning::BackPropagation::teach(FeedForward::Network &netwo
std::size_t max=prevLayer.size(); std::size_t max=prevLayer.size();
for(std::size_t j=1;j<layer.size();j++) { for(std::size_t j=1;j<layer.size();j++) {
layer[j].setWeight(prevLayer[0],layer[j].getWeight(prevLayer[0])+deltas[layerIndex][j]*learningCoefficient); deltas[layerIndex][j]*=learningCoefficient;
layer[j].weight(0)+=deltas[layerIndex][j];
for(std::size_t k=1;k<max;k++) { for(std::size_t k=1;k<max;k++) {
if(layerIndex==1) { if(layerIndex==1) {
layer[j].setWeight(prevLayer[k], layer[j].getWeight(prevLayer[k])+learningCoefficient*deltas[layerIndex][j]*input[k-1]); layer[j].weight(k)+=deltas[layerIndex][j]*input[k-1];
} else { } else {
layer[j].setWeight(prevLayer[k], layer[j].getWeight(prevLayer[k])+learningCoefficient*deltas[layerIndex][j]*prevLayer[k].output()); layer[j].weight(k)+=deltas[layerIndex][j]*prevLayer[k].output();
} }
} }
} }

View File

@@ -1,23 +1,31 @@
#include <NeuralNetwork/Recurrent/Network.h> #include <NeuralNetwork/Recurrent/Network.h>
std::vector<float> NeuralNetwork::Recurrent::Network::computeOutput(const std::vector<float>& input, unsigned int iterations) { std::vector<float> NeuralNetwork::Recurrent::Network::computeOutput(const std::vector<float>& input, unsigned int iterations) {
//TODO: check inputSize
size_t neuronSize=neurons.size();
std::vector<float> outputs(neuronSize); assert(input.size() == inputSize);
if(outputs.size() != neurons.size()) {
outputs.resize(neurons.size());
for(auto &neuron:neurons) {
outputs[neuron->id()]=neuron->output();
}
}
std::vector<float> newOutputs(neurons.size());
for(size_t i=0;i<inputSize;i++) { for(size_t i=0;i<inputSize;i++) {
outputs[i+1]=input[i]; outputs[i+1]=input[i];
newOutputs[i+1]=input[i];
} }
newOutputs[0]=neurons[0]->output();
std::size_t neuronsSize = neurons.size();
for(unsigned int iter=0;iter< iterations;iter++) { for(unsigned int iter=0;iter< iterations;iter++) {
for(size_t i=inputSize+1;i<neuronsSize;i++) {
for(size_t i=inputSize+1;i<neuronSize;i++) { newOutputs[i] = neurons[i]->operator()(outputs);
outputs[i]=neurons[i]->output();
}
// update neurons
for(size_t i=inputSize+1;i<neuronSize;i++) {
neurons[i]->operator()(outputs);
} }
outputs.swap(newOutputs);
} }
std::vector<float> ret; std::vector<float> ret;

View File

@@ -10,17 +10,17 @@ int main() {
NeuralNetwork::FeedForward::Layer &hidden=n.appendLayer(2,a); NeuralNetwork::FeedForward::Layer &hidden=n.appendLayer(2,a);
NeuralNetwork::FeedForward::Layer &out = n.appendLayer(1,a); NeuralNetwork::FeedForward::Layer &out = n.appendLayer(1,a);
hidden[1].setWeight(n[0][0],7); hidden[1].weight(n[0][0])=7;
hidden[1].setWeight(n[0][1],-4.7); hidden[1].weight(n[0][1])=-4.7;
hidden[1].setWeight(n[0][2],-4.7); hidden[1].weight(n[0][2])=-4.7;
hidden[2].setWeight(n[0][0],2.6); hidden[2].weight(n[0][0])=2.6;
hidden[2].setWeight(n[0][1],-6.4); hidden[2].weight(n[0][1])=-6.4;
hidden[2].setWeight(n[0][2],-6.4); hidden[2].weight(n[0][2])=-6.4;
out[1].setWeight(hidden[0],-4.5); out[1].weight(hidden[0])=-4.5;
out[1].setWeight(hidden[1],9.6); out[1].weight(hidden[1])=9.6;
out[1].setWeight(hidden[2],-6.8); out[1].weight(hidden[2])=-6.8;
{ {

View File

@@ -6,10 +6,10 @@
int main() { int main() {
NeuralNetwork::Recurrent::Network a(2,1,1); NeuralNetwork::Recurrent::Network a(2,1,1);
a.getNeurons()[4]->setWeight(*a.getNeurons()[1],0.05); a.getNeurons()[4]->weight(1)=0.05;
a.getNeurons()[4]->setWeight(*a.getNeurons()[2],0.05); a.getNeurons()[4]->weight(2)=0.05;
a.getNeurons()[4]->setWeight(*a.getNeurons()[3],0.7); a.getNeurons()[4]->weight(3)=0.7;
a.getNeurons()[3]->setWeight(*a.getNeurons()[4],0.1); a.getNeurons()[3]->weight(4)=0.1;
std::vector <float> solutions({0.5,0.5732923,0.6077882,0.6103067,0.6113217,0.6113918,0.61142,0.6114219,0.6114227,0.6114227}); std::vector <float> solutions({0.5,0.5732923,0.6077882,0.6103067,0.6113217,0.6113918,0.61142,0.6114219,0.6114227,0.6114227});