quickProapagtion and tests

This commit is contained in:
2016-02-24 20:23:16 +01:00
parent c45f12f53c
commit 3c924d01f3
9 changed files with 359 additions and 44 deletions

View File

@@ -30,6 +30,11 @@ void NeuralNetwork::Learning::BackPropagation::teach(const std::vector<float> &i
}
}
updateWeights(input);
}
void NeuralNetwork::Learning::BackPropagation::updateWeights(const std::vector<float> &input) {
for(std::size_t layerIndex=1;layerIndex<network.size();layerIndex++) {
auto &layer=network[layerIndex];
auto &prevLayer=network[layerIndex-1];
@@ -52,4 +57,5 @@ void NeuralNetwork::Learning::BackPropagation::teach(const std::vector<float> &i
}
}
}
}
}

View File

@@ -0,0 +1,35 @@
#include <NeuralNetwork/Learning/QuickPropagation.h>
#include <cassert>
#include <immintrin.h>
void NeuralNetwork::Learning::QuickPropagation::updateWeights(const std::vector<float> &input) {
for(std::size_t layerIndex=1;layerIndex<network.size();layerIndex++) {
auto &layer=network[layerIndex];
auto &prevLayer=network[layerIndex-1];
std::size_t prevLayerSize=prevLayer.size();
std::size_t layerSize=layer.size();
for(std::size_t j=1;j<layerSize;j++) {
//TODO: is this correct??
float delta=deltas[layerIndex][j]/(deltasPrev[layerIndex][j]-deltas[layerIndex][j]);
deltas[layerIndex][j]=delta;
layer[j].weight(0)+=delta;
for(std::size_t k=1;k<prevLayerSize;k++) {
if(layerIndex==1) {
layer[j].weight(k)+=delta*input[k-1];
} else {
layer[j].weight(k)+=delta*prevLayer[k].output();
}
}
}
}
deltas.swap(deltasPrev);
}