60 lines
1.5 KiB
C++
60 lines
1.5 KiB
C++
#pragma once
|
|
|
|
#include <vector>
|
|
#include <cmath>
|
|
|
|
#include <NeuralNetwork/FeedForward/Network.h>
|
|
#include "BackPropagation.h"
|
|
|
|
namespace NeuralNetwork {
|
|
namespace Learning {
|
|
|
|
/** @class QuickPropagation
|
|
* @brief
|
|
*/
|
|
class QuickPropagation : public BackPropagation {
|
|
|
|
public:
|
|
inline QuickPropagation(FeedForward::Network &feedForwardNetwork, CorrectionFunction::CorrectionFunction *correction = new CorrectionFunction::Linear()):
|
|
BackPropagation(feedForwardNetwork,correction),previousSlopes() {
|
|
resize();
|
|
}
|
|
|
|
virtual ~QuickPropagation() {
|
|
}
|
|
|
|
protected:
|
|
float _maxChange=1.75;
|
|
float _epsilon=0.5;
|
|
|
|
virtual inline void resize() override {
|
|
if(slopes.size()!=network.size())
|
|
slopes.resize(network.size());
|
|
|
|
for(std::size_t i=0; i < network.size(); i++) {
|
|
if(slopes[i].size()!=network[i].size())
|
|
slopes[i].resize(network[i].size());
|
|
}
|
|
|
|
if(deltas.size()!=network.size())
|
|
deltas.resize(network.size());
|
|
|
|
for(std::size_t i=0; i < network.size(); i++) {
|
|
if(deltas[i].size()!=network[i].size())
|
|
deltas[i].resize(network[i].size());
|
|
|
|
for(std::size_t j=0; j < previousSlopes[i].size(); j++) {
|
|
deltas[i][j]=1.0;
|
|
}
|
|
}
|
|
weightChange= deltas;
|
|
}
|
|
|
|
virtual void updateWeights(const std::vector<float> &input) override;
|
|
|
|
std::vector<std::vector<float>> previousSlopes ={};
|
|
std::vector<std::vector<float>> deltas ={};
|
|
std::vector<std::vector<float>> weightChange ={};
|
|
};
|
|
}
|
|
} |