cascade correlation: refactoring

This commit is contained in:
2016-05-07 21:17:57 +02:00
parent 36ce3f6463
commit eaafc27211
2 changed files with 61 additions and 58 deletions

View File

@@ -13,8 +13,8 @@ namespace NeuralNetwork {
class CascadeCorrelation {
typedef std::pair<std::vector<float>, std::vector<float>> TrainingPattern;
public:
CascadeCorrelation(std::size_t numberOfCandidate = 2, float maxError = 0.7) :
_maxError(maxError), _weightRange(1), _numberOfCandidates(numberOfCandidate), _generator(rand()), _distribution() {
CascadeCorrelation(std::size_t numberOfCandidate = 20, float maxError = 0.7) :
_maxError(maxError), _weightRange(0.3), _numberOfCandidates(numberOfCandidate), _generator(rand()), _distribution() {
setWeightRange(_weightRange);
}
@@ -28,10 +28,10 @@ namespace NeuralNetwork {
int step = 0;
float error = trainOutputs(network, patterns);
while(step++ < 20 && error > _maxError) {
std::shared_ptr<Neuron> candidate = createCandidate(network.getNeuronSize() - outputs);
while(step++ < 15 && error > _maxError) {
std::vector<std::shared_ptr<Neuron>> candidates = createCandidates(network.getNeuronSize() - outputs);
trainCandidates(network, candidate, patterns);
std::shared_ptr<Neuron> candidate=trainCandidates(network, candidates, patterns);
addBestCandidate(network, candidate);
error = trainOutputs(network, patterns);
@@ -60,7 +60,7 @@ namespace NeuralNetwork {
}
protected:
float _minimalErrorStep = 0.0005;
float _minimalErrorStep = 0.00005;
float _maxError;
float _weightRange;
std::size_t _numberOfCandidates;
@@ -84,15 +84,11 @@ namespace NeuralNetwork {
float trainOutputs(Cascade::Network &network, const std::vector<TrainingPattern> &patterns);
void trainCandidates(Cascade::Network &network, std::shared_ptr<Neuron> &candidates, const std::vector<TrainingPattern> &patterns);
std::shared_ptr<Neuron> trainCandidates(Cascade::Network &network, std::vector<std::shared_ptr<Neuron>> &candidates, const std::vector<TrainingPattern> &patterns);
void addBestCandidate(Cascade::Network &network, const std::shared_ptr<Neuron> &candidate) {
auto neuron = network.addNeuron();
//auto tmp = candidate->getWeights();
//std::fill(tmp.begin(),tmp.end(),0.2);
//neuron->setWeights(tmp);
neuron->setWeights(candidate->getWeights());
neuron->setActivationFunction(candidate->getActivationFunction());
for(auto &n :network.getOutputNeurons()) {
@@ -102,12 +98,10 @@ namespace NeuralNetwork {
}
weights[weights.size()-1] = _distribution(_generator);
n->setWeights(weights);
//n->weight(n->getWeights().size() - 1) = _distribution(_generator);
//n->weight(n->getWeights().size() - 1) = 0.2;//.distribution(_generator);
}
}
std::shared_ptr<Neuron> createCandidate(std::size_t id) { //TODO
std::vector<std::shared_ptr<Neuron>> createCandidates(std::size_t id) {
std::vector<std::shared_ptr<Neuron>> candidates;
for(std::size_t i = 0; i < _numberOfCandidates; i++) {
@@ -117,10 +111,9 @@ namespace NeuralNetwork {
for(std::size_t weightIndex = 0; weightIndex < id; weightIndex++) {
candidates.back()->weight(weightIndex) = _distribution(_generator);
// candidates.back()->weight(weightIndex) = 0.1;//_distribution(_generator);
}
}
return candidates[0];
return candidates;
}
};