CC: getters and setters
This commit is contained in:
@@ -14,7 +14,7 @@ namespace NeuralNetwork {
|
||||
typedef std::pair<std::vector<float>, std::vector<float>> TrainingPattern;
|
||||
|
||||
CascadeCorrelation(std::size_t numberOfCandidate = 18, float maxError = 0.7) :
|
||||
_maxError(maxError), _weightRange(0.3), _numberOfCandidates(numberOfCandidate), _generator(rand()), _distribution() {
|
||||
_errorTreshold(maxError), _weightRange(0.3), _numberOfCandidates(numberOfCandidate), _generator(rand()), _distribution() {
|
||||
setWeightRange(_weightRange);
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ namespace NeuralNetwork {
|
||||
} else {
|
||||
error = trainOutputs(network, patterns);
|
||||
}
|
||||
while(_epoch++ < _maxHiddenUnits && error > _maxError) {
|
||||
while(_epoch++ < _maxHiddenUnits && error > _errorTreshold) {
|
||||
std::vector<std::shared_ptr<Neuron>> candidates = createCandidates(network.getNeuronSize() - outputs);
|
||||
|
||||
std::pair<std::shared_ptr<Neuron>, std::vector<float>> candidate = trainCandidates(network, candidates, patterns);
|
||||
@@ -50,14 +50,6 @@ namespace NeuralNetwork {
|
||||
return network;
|
||||
}
|
||||
|
||||
std::size_t getNumberOfCandidates() const {
|
||||
return _numberOfCandidates;
|
||||
}
|
||||
|
||||
void setNumberOfCandidates(std::size_t numberOfCandidates) {
|
||||
_numberOfCandidates = numberOfCandidates;
|
||||
}
|
||||
|
||||
float getWeightRange() const {
|
||||
return _weightRange;
|
||||
}
|
||||
@@ -87,20 +79,69 @@ namespace NeuralNetwork {
|
||||
return _epoch;
|
||||
}
|
||||
|
||||
void setErrorThreshold(std::size_t err) {
|
||||
_errorTreshold = err;
|
||||
}
|
||||
|
||||
std::size_t getErrorThreshold() const {
|
||||
return _errorTreshold;
|
||||
}
|
||||
|
||||
void setMaxCandidateIterationsWithoutChange(std::size_t iter) {
|
||||
_maxCandidateIterationsWithoutChange = iter;
|
||||
}
|
||||
|
||||
std::size_t getMaxCandidateIterationsWithoutChange() const {
|
||||
return _maxCandidateIterationsWithoutChange;
|
||||
}
|
||||
|
||||
void setMaxCandidateIterations(std::size_t iter) {
|
||||
_maxCandidateIterations = iter;
|
||||
}
|
||||
|
||||
std::size_t getMaxCandidateIterations() const {
|
||||
return _maxCandidateIterations;
|
||||
}
|
||||
|
||||
void setMaxOutpuLearningIterationsWithoutChange(std::size_t iter) {
|
||||
_maxOutputLearningIterationsWithoutChange = iter;
|
||||
}
|
||||
|
||||
std::size_t getMaxOutpuLearningIterationsWithoutChange() const {
|
||||
return _maxOutputLearningIterationsWithoutChange;
|
||||
}
|
||||
|
||||
void setMaxOutpuLearningIterations(std::size_t iter) {
|
||||
_maxOutputLearningIterations = iter;
|
||||
}
|
||||
|
||||
std::size_t getMaxOutpuLearningIterations() const {
|
||||
return _maxOutputLearningIterations;
|
||||
}
|
||||
|
||||
std::size_t getNumberOfCandidates() const {
|
||||
return _numberOfCandidates;
|
||||
}
|
||||
|
||||
void setNumberOfCandidates(std::size_t numberOfCandidates) {
|
||||
_numberOfCandidates = numberOfCandidates;
|
||||
}
|
||||
|
||||
protected:
|
||||
std::shared_ptr<ActivationFunction::ActivationFunction> _activFunction = std::make_shared<ActivationFunction::Sigmoid>(-0.8);
|
||||
float _minimalErrorStep = 0.00005;
|
||||
float _maxError;
|
||||
float _errorTreshold;
|
||||
float _weightRange;
|
||||
|
||||
std::size_t _epoch = 0;
|
||||
std::size_t _maxHiddenUnits = 20;
|
||||
std::size_t _maxRandomOutputWeights = 0;
|
||||
std::size_t _numberOfCandidates;
|
||||
std::size_t _maxOutpuLearningIterations = 1000;
|
||||
std::size_t _maxOutpuLearningIterationsWithoutChange = 100;
|
||||
|
||||
std::size_t _maxOutputLearningIterations = 1000;
|
||||
std::size_t _maxOutputLearningIterationsWithoutChange = 50;
|
||||
std::size_t _maxCandidateIterations = 4000;
|
||||
std::size_t _maxCandidateIterationsWithoutChange = 15;
|
||||
std::size_t _maxCandidateIterationsWithoutChange = 50;
|
||||
|
||||
std::mt19937 _generator;
|
||||
std::uniform_real_distribution<> _distribution;
|
||||
|
||||
@@ -44,7 +44,7 @@ float CascadeCorrelation::trainOutputs(Cascade::Network &network, const std::vec
|
||||
iterWithoutImporvement = 0;
|
||||
}
|
||||
}
|
||||
while(iteration++ < _maxOutpuLearningIterations && iterWithoutImporvement < _maxOutpuLearningIterationsWithoutChange);
|
||||
while(iteration++ < _maxOutputLearningIterations && iterWithoutImporvement < _maxOutputLearningIterationsWithoutChange);
|
||||
|
||||
std::cout << "outputLearning: " << error << ", last: " << lastError << ", iters: " << iteration << "\n";
|
||||
for(std::size_t neuron = 0; neuron < outputs; neuron++) {
|
||||
@@ -130,7 +130,7 @@ float CascadeCorrelation::trainOutputsRandom(std::size_t step, Cascade::Network
|
||||
iterWithoutImporvement = 0;
|
||||
}
|
||||
}
|
||||
while(iteration++ < _maxOutpuLearningIterations && iterWithoutImporvement < _maxOutpuLearningIterationsWithoutChange);
|
||||
while(iteration++ < _maxOutputLearningIterations && iterWithoutImporvement < _maxOutputLearningIterationsWithoutChange);
|
||||
if(error < bestScore) {
|
||||
bestScore = error;
|
||||
bestNetwork = index;
|
||||
@@ -148,7 +148,6 @@ float CascadeCorrelation::trainOutputsRandom(std::size_t step, Cascade::Network
|
||||
return bestScore;
|
||||
}
|
||||
|
||||
|
||||
std::pair<std::shared_ptr<NeuralNetwork::Neuron>, std::vector<float>> CascadeCorrelation::trainCandidates(Cascade::Network &network,
|
||||
std::vector<std::shared_ptr<Neuron>> &candidates,
|
||||
const std::vector<TrainingPattern> &patterns) {
|
||||
|
||||
Reference in New Issue
Block a user