39 lines
1.2 KiB
C++
39 lines
1.2 KiB
C++
#include <NeuralNetwork/Learning/RProp.h>
|
|
|
|
void NeuralNetwork::Learning::RProp::updateWeightsAndEndBatch() {
|
|
|
|
for(std::size_t layerIndex=1;layerIndex<_network.size();layerIndex++) {
|
|
auto &layer = _network[layerIndex];
|
|
auto &prevLayer = _network[layerIndex - 1];
|
|
|
|
std::size_t prevLayerSize = prevLayer.size();
|
|
std::size_t layerSize = layer.size();
|
|
|
|
for(std::size_t j = 1; j < layerSize; j++) {
|
|
for(std::size_t k = 0; k < prevLayerSize; k++) {
|
|
float gradient = _gradients[layerIndex][j][k];
|
|
float lastGradient = _lastGradients[layerIndex][j][k];
|
|
|
|
_lastGradients[layerIndex][j][k] = gradient;
|
|
|
|
float weightChangeDelta = _lastWeightChanges[layerIndex][j][k];
|
|
|
|
if(gradient * lastGradient > 0) {
|
|
weightChangeDelta = std::min(weightChangeDelta*weightChangePlus,maxChangeOfWeights);
|
|
} else if (gradient * lastGradient < 0) {
|
|
weightChangeDelta = std::max(weightChangeDelta*weightChangeMinus,minChangeOfWeights);
|
|
} else {
|
|
weightChangeDelta = _lastWeightChanges[layerIndex][j][k];
|
|
}
|
|
|
|
_lastWeightChanges[layerIndex][j][k] = weightChangeDelta;
|
|
|
|
if(gradient > 0) {
|
|
layer[j].weight(k) += weightChangeDelta;
|
|
} else if (gradient < 0){
|
|
layer[j].weight(k) -= weightChangeDelta;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} |