#pragma once #include #include #include #include "CorrectionFunction/Linear.h" namespace NeuralNetwork { namespace Learning { /** @class BackPropagation * @brief */ class BackPropagation { public: inline BackPropagation(FeedForward::Network &feedForwardNetwork, CorrectionFunction::CorrectionFunction *correction = new CorrectionFunction::Linear()): network(feedForwardNetwork), correctionFunction(correction),learningCoefficient(0.4), deltas() { resize(); } virtual ~BackPropagation() { delete correctionFunction; } BackPropagation(const BackPropagation&)=delete; BackPropagation& operator=(const NeuralNetwork::Learning::BackPropagation&) = delete; void teach(const std::vector &input, const std::vector &output); inline virtual void setLearningCoefficient (const float& coefficient) { learningCoefficient=coefficient; } protected: virtual inline void resize() { if(deltas.size()!=network.size()) deltas.resize(network.size()); for(std::size_t i=0; i < network.size(); i++) { if(deltas[i].size()!=network[i].size()) deltas[i].resize(network[i].size()); } } virtual void updateWeights(const std::vector &input); FeedForward::Network &network; CorrectionFunction::CorrectionFunction *correctionFunction; float learningCoefficient; std::vector> deltas; }; } }