backpropagation implementation

This commit is contained in:
2016-02-07 21:58:21 +01:00
parent 5bd520cfed
commit 5f43fb8cfb
3 changed files with 131 additions and 0 deletions

View File

@@ -0,0 +1,54 @@
#include <NeuralNetwork/Learning/BackPropagation.h>
#include <cassert>
void NeuralNetwork::Learning::BackPropagation::teach(FeedForward::Network &network,const std::vector<float> &input, const std::vector<float> &expectation) {
network.computeOutput(input);
std::vector<std::vector<float>> deltas;
deltas.resize(network.size());
for(std::size_t i=0; i < network.size(); i++) {
deltas[i].resize(network[i].size());
deltas[i][0]=0.0;
}
auto& outputLayer=network[network.size()-1];
for(std::size_t j=1;j<outputLayer.size();j++) {
auto& neuron = outputLayer[j];
deltas[network.size()-1][j]=correction( expectation[j-1], neuron.output())*
neuron.getActivationFunction().derivatedOutput(neuron.value(),neuron.output());
}
for(int layerIndex=static_cast<int>(network.size()-2);layerIndex>0;layerIndex--) {
auto &layer=network[layerIndex];
for(std::size_t j=1;j<layer.size();j++) {
float deltasWeight = 0;
for(std::size_t k=1;k<network[layerIndex+1].size();k++) {
deltasWeight+=deltas[layerIndex+1][k]* network[layerIndex+1][k].getWeight(layer[j]);
}
float newDelta=deltasWeight*layer[j].getActivationFunction().derivatedOutput(layer[j].value(),layer[j].output());
deltas[layerIndex][j]=newDelta;
}
}
for(std::size_t layerIndex=1;layerIndex<network.size();layerIndex++) {
auto &layer=network[layerIndex];
auto &prevLayer=network[layerIndex-1];
std::size_t max=prevLayer.size();
for(std::size_t j=1;j<layer.size();j++) {
layer[j].setWeight(prevLayer[0],layer[j].getWeight(prevLayer[0])+deltas[layerIndex][j]*learningCoefficient);
for(std::size_t k=1;k<max;k++) {
if(layerIndex==1) {
layer[j].setWeight(prevLayer[k], layer[j].getWeight(prevLayer[k])+learningCoefficient*deltas[layerIndex][j]*input[k-1]);
} else {
layer[j].setWeight(prevLayer[k], layer[j].getWeight(prevLayer[k])+learningCoefficient*deltas[layerIndex][j]*prevLayer[k].output());
}
}
}
}
}