cascade : reading and modifing to fit original implementation

This commit is contained in:
2016-05-08 00:34:43 +02:00
parent 0221158a56
commit 173e07c25d

View File

@@ -67,6 +67,8 @@ std::pair<std::shared_ptr<NeuralNetwork::Neuron>, std::vector<float>> CascadeCor
std::vector<std::vector<float>> errors(patterns.size());
std::vector<float> meanErrors(outputs);
float sumSquareError=0;
for(std::size_t patternNumber = 0; patternNumber < patterns.size(); patternNumber++) {
auto &pattern = patterns[patternNumber];
errors[patternNumber].resize(network.outputs());
@@ -76,6 +78,7 @@ std::pair<std::shared_ptr<NeuralNetwork::Neuron>, std::vector<float>> CascadeCor
float error = pow(pattern.second[outputIndex] - output[outputIndex], 2);
errors[patternNumber][outputIndex] = error;
meanErrors[outputIndex] += error;
sumSquareError+=error;
}
}
@@ -120,8 +123,8 @@ std::pair<std::shared_ptr<NeuralNetwork::Neuron>, std::vector<float>> CascadeCor
float thetaO = 0.0;
for(std::size_t meanError = 0; meanError < meanErrors.size(); meanError++) {
(*candidate)(patternsForOutput[err].first);
float derivative = candidate->getActivationFunction().derivatedOutput(candidate->value(), candidate->output());
thetaO += correlationSigns[meanError] * (errors[err][meanError] - meanErrors[meanError]) * derivative * candidate->weight(input);
float derivative = candidate->getActivationFunction().derivatedOutput(candidate->value(), candidate->output())/sumSquareError;
thetaO += correlationSigns[meanError] * (errors[err][meanError] - meanErrors[meanError]) * derivative * patternsForOutput[err].first[input];
}
dcdw += thetaO;
}
@@ -149,5 +152,4 @@ std::pair<std::shared_ptr<NeuralNetwork::Neuron>, std::vector<float>> CascadeCor
std::cout << "iter: " << iterations << ", correlation: " << bestCorrelation << ", " << lastCorrelation << "\n";
return {bestCandidate, bestCorrelations};
}
}