bp: refactored

This commit is contained in:
2016-10-30 20:40:07 +01:00
parent f1ea858f32
commit 006810a49c
2 changed files with 95 additions and 48 deletions

View File

@@ -37,6 +37,7 @@ namespace Learning {
void setMomentumWeight(const float& m) {
momentumWeight=m;
resize();
}
float getWeightDecay() const {
@@ -47,6 +48,14 @@ namespace Learning {
weightDecay=wd;
}
std::size_t getBatchSize() const {
return batchSize;
}
void setBatchSize(std::size_t size) {
batchSize = size;
}
protected:
virtual inline void resize() {
@@ -58,25 +67,40 @@ namespace Learning {
slopes[i].resize(network[i].size());
}
if(lastDeltas.size()!=network.size())
lastDeltas.resize(network.size());
if(deltas.size() != network.size())
deltas.resize(network.size());
for(std::size_t i=0; i < network.size(); i++) {
if(lastDeltas[i].size()!=network[i].size()) {
lastDeltas[i].resize(network[i].size());
bool resized = false;
for(std::size_t j = 0; j < lastDeltas[i].size(); j++) {
lastDeltas[i][j] = 0.0;
for(std::size_t i = 0; i < network.size(); i++) {
if(deltas[i].size() != network[i].size()) {
deltas[i].resize(network[i].size());
resized = true;
if(i > 0) {
for(std::size_t j = 0; j < deltas[i].size(); j++) {
deltas[i][j].resize(network[i - 1].size());
std::fill(deltas[i][j].begin(),deltas[i][j].end(),0.0);
}
}
}
deltas= lastDeltas;
}
virtual void updateWeights(const std::vector<float> &input);
if(momentumWeight > 0.0 && (resized || lastDeltas.size() != deltas.size())) {
lastDeltas = deltas;
}
}
virtual void computeDeltas(const std::vector<float> &input);
void updateWeights();
virtual void computeSlopes(const std::vector<float> &expectation);
virtual void endBatch() {
}
FeedForward::Network &network;
CorrectionFunction::CorrectionFunction *correctionFunction;
@@ -87,9 +111,13 @@ namespace Learning {
float weightDecay = 0.0;
std::size_t batchSize = 1;
std::size_t currentBatchSize = 0;
std::vector<std::vector<float>> slopes;
std::vector<std::vector<float>> deltas;
std::vector<std::vector<float>> lastDeltas;
std::vector<std::vector<std::vector<float>>> deltas = {};
std::vector<std::vector<std::vector<float>>> lastDeltas = {};
};
}
}

View File

@@ -4,47 +4,15 @@
#include <immintrin.h>
void NeuralNetwork::Learning::BackPropagation::teach(const std::vector<float> &input, const std::vector<float> &expectation) {
network.computeOutput(input);
resize();
computeSlopes(expectation);
updateWeights(input);
std::swap(deltas,lastDeltas);
}
void NeuralNetwork::Learning::BackPropagation::updateWeights(const std::vector<float> &input) {
for(std::size_t layerIndex=1;layerIndex<network.size();layerIndex++) {
auto &layer=network[layerIndex];
auto &prevLayer=network[layerIndex-1];
std::size_t prevLayerSize=prevLayer.size();
std::size_t layerSize=layer.size();
for(std::size_t j=1;j<layerSize;j++) {
float delta =slopes[layerIndex][j]*learningCoefficient;
//momentum
delta += momentumWeight * lastDeltas[layerIndex][j];
deltas[layerIndex][j]=delta;
layer[j].weight(0)+=delta - weightDecay *layer[j].weight(0);
for(std::size_t k=1;k<prevLayerSize;k++) {
if(layerIndex==1) {
layer[j].weight(k)+=delta*input[k-1] - weightDecay * layer[j].weight(k);
} else {
layer[j].weight(k)+=delta*prevLayer[k].output() - weightDecay * layer[j].weight(k);
}
}
}
computeDeltas(input);
if(++currentBatchSize >= batchSize) {
updateWeights();
endBatch();
currentBatchSize=0;
}
}
@@ -71,4 +39,55 @@ void NeuralNetwork::Learning::BackPropagation::computeSlopes(const std::vector<f
}
}
void NeuralNetwork::Learning::BackPropagation::computeDeltas(const std::vector<float> &input) {
for(std::size_t layerIndex=1;layerIndex<network.size();layerIndex++) {
auto &layer=network[layerIndex];
auto &prevLayer=network[layerIndex-1];
std::size_t prevLayerSize=prevLayer.size();
std::size_t layerSize=layer.size();
for(std::size_t j=1;j<layerSize;j++) {
float update = slopes[layerIndex][j];
for(std::size_t k=0;k<prevLayerSize;k++) {
float inputValue = 0.0;
if(layerIndex==1 && k!=0) {
inputValue = input[k-1];
} else {
inputValue= prevLayer[k].output();
}
if(currentBatchSize == 0) {
deltas[layerIndex][j][k] = update * inputValue;
} else {
deltas[layerIndex][j][k] += update * inputValue;
}
}
}
}
}
void NeuralNetwork::Learning::BackPropagation::updateWeights() {
bool enableMoments = momentumWeight > 0.0;
for(std::size_t layerIndex=1;layerIndex<network.size();layerIndex++) {
auto &layer = network[layerIndex];
auto &prevLayer = network[layerIndex - 1];
std::size_t prevLayerSize = prevLayer.size();
std::size_t layerSize = layer.size();
for(std::size_t j = 1; j < layerSize; j++) {
for(std::size_t k = 0; k < prevLayerSize; k++) {
float delta = deltas[layerIndex][j][k]*learningCoefficient - weightDecay * layer[j].weight(k);
if(enableMoments) {
delta += momentumWeight * lastDeltas[layerIndex][j][k];
lastDeltas[layerIndex][j][k]=delta;
}
layer[j].weight(k)+= delta;
}
}
}
}