#pragma once #include #include #include #include "BackPropagation.h" namespace NeuralNetwork { namespace Learning { /** @class QuickPropagation * @brief */ class QuickPropagation : public BackPropagation { public: inline QuickPropagation(FeedForward::Network &feedForwardNetwork, CorrectionFunction::CorrectionFunction *correction = new CorrectionFunction::Linear()): BackPropagation(feedForwardNetwork,correction),previousSlopes() { resize(); } virtual ~QuickPropagation() { } protected: float _maxChange=1.75; float _epsilon=0.5; virtual inline void resize() override { if(slopes.size()!=network.size()) slopes.resize(network.size()); for(std::size_t i=0; i < network.size(); i++) { if(slopes[i].size()!=network[i].size()) slopes[i].resize(network[i].size()); } if(previousSlopes.size()!=network.size()) previousSlopes.resize(network.size()); for(std::size_t i=0; i < network.size(); i++) { if(previousSlopes[i].size()!=network[i].size()) previousSlopes[i].resize(network[i].size()); for(std::size_t j=0; j < previousSlopes[i].size(); j++) { previousSlopes[i][j]=1.0; } } if(lastWeightChange.size()!=network.size()) lastWeightChange.resize(network.size()); for(std::size_t i=0; i < network.size(); i++) { if(lastWeightChange[i].size()!=network[i].size()) lastWeightChange[i].resize(network[i].size()); for(std::size_t j=0; j < previousSlopes[i].size(); j++) { lastWeightChange[i][j]=1.0; } } weightChange= lastWeightChange; } virtual void updateWeights(const std::vector &input) override; std::vector> previousSlopes ={}; std::vector> lastWeightChange ={}; std::vector> weightChange ={}; }; } }