#pragma once #include "BatchPropagation.h" namespace NeuralNetwork { namespace Learning { /** @class Resilient Propagation * @brief */ class RProp : public BatchPropagation { public: RProp(FeedForward::Network &feedForwardNetwork, std::shared_ptr correction = std::make_shared()): BatchPropagation(feedForwardNetwork, correction) { } RProp(const RProp&)=delete; RProp& operator=(const NeuralNetwork::Learning::RProp&) = delete; void setInitialWeightChange(float initVal) { initialWeightChange=initVal; } void setLearningCoefficient(float) { } protected: virtual inline void resize() override { BatchPropagation::resize(); _lastGradients =_gradients; _changesOfWeightChanges = _lastGradients; for(std::size_t i = 1; i < _network.size(); i++) { for(std::size_t j = 0; j < _changesOfWeightChanges[i].size(); j++) { std::fill(_changesOfWeightChanges[i][j].begin(),_changesOfWeightChanges[i][j].end(),initialWeightChange); } } _lastWeightChanges = _lastGradients; for(std::size_t i = 1; i < _network.size(); i++) { for(std::size_t j = 0; j < _lastWeightChanges[i].size(); j++) { std::fill(_lastWeightChanges[i][j].begin(),_lastWeightChanges[i][j].end(),0.1); } } } void updateWeightsAndEndBatch() override; std::vector>> _lastGradients = {}; std::vector>> _lastWeightChanges = {}; std::vector>> _changesOfWeightChanges = {}; float maxChangeOfWeights = 50; float minChangeOfWeights = 0.0001; float initialWeightChange=0.02; float weightChangePlus=1.2; float weightChangeMinus=0.5; }; } }