Rprop implementation

This commit is contained in:
2016-10-30 23:00:50 +01:00
parent 554ef1b46b
commit 8749b3eb03
5 changed files with 415 additions and 3 deletions

View File

@@ -0,0 +1,140 @@
#pragma once
#include <vector>
#include <cmath>
#include <NeuralNetwork/FeedForward/Network.h>
#include "CorrectionFunction/Linear.h"
namespace NeuralNetwork {
namespace Learning {
/** @class Resilient Propagation
* @brief
*/
class RProp {
public:
RProp(FeedForward::Network &feedForwardNetwork, CorrectionFunction::CorrectionFunction *correction = new CorrectionFunction::Linear()):
network(feedForwardNetwork), correctionFunction(correction) {
resize();
}
virtual ~RProp() {
delete correctionFunction;
}
RProp(const RProp&)=delete;
RProp& operator=(const NeuralNetwork::Learning::RProp&) = delete;
void teach(const std::vector<float> &input, const std::vector<float> &output);
std::size_t getBatchSize() const {
return batchSize;
}
void setBatchSize(std::size_t size) {
batchSize = size;
}
void setInitialWeightChange(float init) {
initialWeightChange=init;
}
protected:
virtual inline void resize() {
if(slopes.size()!=network.size())
slopes.resize(network.size());
for(std::size_t i=0; i < network.size(); i++) {
if(slopes[i].size()!=network[i].size())
slopes[i].resize(network[i].size());
}
if(gradients.size() != network.size())
gradients.resize(network.size());
bool resized = false;
for(std::size_t i = 0; i < network.size(); i++) {
if(gradients[i].size() != network[i].size()) {
gradients[i].resize(network[i].size());
resized = true;
if(i > 0) {
for(std::size_t j = 0; j < gradients[i].size(); j++) {
gradients[i][j].resize(network[i - 1].size());
std::fill(gradients[i][j].begin(),gradients[i][j].end(),0.0);
}
}
}
}
if(resized) {
lastGradients = gradients;
if(changesOfWeightChanges.size() != network.size())
changesOfWeightChanges.resize(network.size());
for(std::size_t i = 0; i < network.size(); i++) {
if(changesOfWeightChanges[i].size() != network[i].size()) {
changesOfWeightChanges[i].resize(network[i].size());
if(i > 0) {
for(std::size_t j = 0; j < changesOfWeightChanges[i].size(); j++) {
changesOfWeightChanges[i][j].resize(network[i - 1].size());
std::fill(changesOfWeightChanges[i][j].begin(),changesOfWeightChanges[i][j].end(),initialWeightChange);
}
}
}
}
}
if(resized) {
if(lastWeightChanges.size() != network.size())
lastWeightChanges.resize(network.size());
for(std::size_t i = 0; i < network.size(); i++) {
if(lastWeightChanges[i].size() != network[i].size()) {
lastWeightChanges[i].resize(network[i].size());
if(i > 0) {
for(std::size_t j = 0; j < lastWeightChanges[i].size(); j++) {
lastWeightChanges[i][j].resize(network[i - 1].size());
std::fill(lastWeightChanges[i][j].begin(),lastWeightChanges[i][j].end(),0.1);
}
}
}
}
}
}
virtual void computeSlopes(const std::vector<float> &expectation);
virtual void computeDeltas(const std::vector<float> &input);
void updateWeights();
virtual void endBatch() {
}
FeedForward::Network &network;
CorrectionFunction::CorrectionFunction *correctionFunction;
std::vector<std::vector<float>> slopes;
std::vector<std::vector<std::vector<float>>> gradients = {};
std::vector<std::vector<std::vector<float>>> lastGradients = {};
std::vector<std::vector<std::vector<float>>> lastWeightChanges = {};
std::vector<std::vector<std::vector<float>>> changesOfWeightChanges = {};
std::size_t batchSize = 1;
std::size_t currentBatchSize = 0;
float maxChangeOfWeights = 50;
float minChangeOfWeights = 0.0001;
float initialWeightChange=0.02;
float weightChangePlus=1.2;
float weightChangeMinus=0.5;
};
}
}