#pragma once #include #include #include #include "CorrectionFunction/Linear.h" namespace NeuralNetwork { namespace Learning { /** @class BackPropagation * @brief */ class BackPropagation { public: inline BackPropagation(FeedForward::Network &feedForwardNetwork, CorrectionFunction::CorrectionFunction *correction = new CorrectionFunction::Linear()): network(feedForwardNetwork), correctionFunction(correction),learningCoefficient(0.4), slopes() { resize(); } virtual ~BackPropagation() { delete correctionFunction; } BackPropagation(const BackPropagation&)=delete; BackPropagation& operator=(const NeuralNetwork::Learning::BackPropagation&) = delete; void teach(const std::vector &input, const std::vector &output); inline virtual void setLearningCoefficient (const float& coefficient) { learningCoefficient=coefficient; } float getMomentumWeight() const { return momentumWeight; } void setMomentumWeight(const float& m) { momentumWeight=m; } float getWeightDecay() const { return weightDecay; } void setWeightDecay(const float& wd) { weightDecay=wd; } protected: virtual inline void resize() { if(slopes.size()!=network.size()) slopes.resize(network.size()); for(std::size_t i=0; i < network.size(); i++) { if(slopes[i].size()!=network[i].size()) slopes[i].resize(network[i].size()); } if(lastDeltas.size()!=network.size()) lastDeltas.resize(network.size()); for(std::size_t i=0; i < network.size(); i++) { if(lastDeltas[i].size()!=network[i].size()) { lastDeltas[i].resize(network[i].size()); for(std::size_t j = 0; j < lastDeltas[i].size(); j++) { lastDeltas[i][j] = 0.0; } } } deltas= lastDeltas; } virtual void updateWeights(const std::vector &input); virtual void computeSlopes(const std::vector &expectation); FeedForward::Network &network; CorrectionFunction::CorrectionFunction *correctionFunction; float learningCoefficient; float momentumWeight = 0.0; float weightDecay = 0.0; std::vector> slopes; std::vector> deltas; std::vector> lastDeltas; }; } }