Cascade-Correlation ...

This commit is contained in:
2016-05-13 20:18:19 +02:00
parent a40b6fad88
commit 9e2ce222fb
2 changed files with 44 additions and 15 deletions

View File

@@ -18,7 +18,11 @@ namespace NeuralNetwork {
setWeightRange(_weightRange);
}
Cascade::Network construct(const std::vector<TrainingPattern> &patterns) {
virtual ~CascadeCorrelation() {
}
virtual Cascade::Network construct(const std::vector<TrainingPattern> &patterns) {
std::size_t inputs = patterns[0].first.size();
std::size_t outputs = patterns[0].second.size();
@@ -28,6 +32,7 @@ namespace NeuralNetwork {
_epoch = 0;
float error;
float lastError;
if(_maxRandomOutputWeights) {
error = trainOutputsRandom(0, network, patterns);
} else {
@@ -40,11 +45,18 @@ namespace NeuralNetwork {
addBestCandidate(network, candidate);
lastError=error;
if(_maxRandomOutputWeights) {
error = trainOutputsRandom(_epoch, network, patterns);
} else {
error = trainOutputs(network, patterns);
}
if(_prunningStatus && error*1.01 >= lastError) { // it is not getting bettter
network.removeLastHiddenNeuron();
error=lastError;
std::cout << "PRUNED\n";
}
}
return network;
@@ -79,7 +91,15 @@ namespace NeuralNetwork {
return _epoch;
}
void setErrorThreshold(std::size_t err) {
void setPruningStatus(bool status) {
_prunningStatus=status;
}
bool getPruningStatus() const {
return _prunningStatus;
}
void setErrorThreshold(float err) {
_errorTreshold = err;
}
@@ -128,24 +148,25 @@ namespace NeuralNetwork {
}
protected:
std::shared_ptr<ActivationFunction::ActivationFunction> _activFunction = std::make_shared<ActivationFunction::Sigmoid>(-0.8);
std::shared_ptr<ActivationFunction::ActivationFunction> _activFunction = std::make_shared<ActivationFunction::Sigmoid>(-1.0);
float _minimalErrorStep = 0.00005;
float _errorTreshold;
float _weightRange;
bool _prunningStatus = false;
std::size_t _epoch = 0;
std::size_t _maxHiddenUnits = 20;
std::size_t _maxRandomOutputWeights = 0;
std::size_t _numberOfCandidates;
std::size_t _maxOutputLearningIterations = 1000;
std::size_t _maxOutputLearningIterationsWithoutChange = 50;
std::size_t _maxOutputLearningIterationsWithoutChange = 5;
std::size_t _maxCandidateIterations = 4000;
std::size_t _maxCandidateIterationsWithoutChange = 50;
std::size_t _maxCandidateIterationsWithoutChange = 5;
std::mt19937 _generator;
std::uniform_real_distribution<> _distribution;
private:
std::vector<float> getInnerNeuronsOutput(Cascade::Network &network, const std::vector<float> &input) {
std::vector<float> output = network.computeOutput(input);
@@ -161,11 +182,11 @@ namespace NeuralNetwork {
return outputOfUnits;
}
float trainOutputs(Cascade::Network &network, const std::vector<TrainingPattern> &patterns);
virtual float trainOutputs(Cascade::Network &network, const std::vector<TrainingPattern> &patterns);
float trainOutputsRandom(std::size_t step, Cascade::Network &network, const std::vector<TrainingPattern> &patterns);
virtual float trainOutputsRandom(std::size_t step, Cascade::Network &network, const std::vector<TrainingPattern> &patterns);
std::pair<std::shared_ptr<Neuron>, std::vector<float>> trainCandidates(Cascade::Network &network, std::vector<std::shared_ptr<Neuron>> &candidates,
virtual std::pair<std::shared_ptr<Neuron>, std::vector<float>> trainCandidates(Cascade::Network &network, std::vector<std::shared_ptr<Neuron>> &candidates,
const std::vector<TrainingPattern> &patterns);
void addBestCandidate(Cascade::Network &network, const std::pair<std::shared_ptr<Neuron>, std::vector<float>> &candidate) {
@@ -180,9 +201,9 @@ namespace NeuralNetwork {
for(auto &weight: weights) {
weight *= 0.9;
}
outIndex++;
n->setWeights(weights);
n->weight(n->getWeights().size() - 1) = -candidate.second[outIndex] / weightPortion;
n->weight(n->getWeights().size() - 1) = candidate.second[outIndex] / weightPortion;
outIndex++;
}
}
@@ -195,7 +216,7 @@ namespace NeuralNetwork {
candidates.back()->setActivationFunction(*_activFunction.get());
for(std::size_t weightIndex = 0; weightIndex < id; weightIndex++) {
candidates.back()->weight(weightIndex) = _distribution(_generator) * 3.0;
candidates.back()->weight(weightIndex) = _distribution(_generator);// * 3.0;
}
}
return candidates;