new weights interface and addaption + mall tweaks

This commit is contained in:
2016-02-18 16:36:16 +01:00
parent 19253e975f
commit ec82018ef9
10 changed files with 101 additions and 99 deletions

View File

@@ -3,10 +3,9 @@
void NeuralNetwork::FeedForward::Layer::solve(const std::vector<float> &input, std::vector<float> &output) {
output.resize(neurons.size());
for(auto &neuron:neurons) {
output[neuron->id()]=neuron->operator()(input);
for(auto&neuron: neurons) {
output[neuron->id()] = neuron->operator()(input);
}
}
void NeuralNetwork::FeedForward::Layer::stringify(std::ostream &out) const {

View File

@@ -1,30 +1,21 @@
#include <NeuralNetwork/FeedForward/Network.h>
std::vector<float> NeuralNetwork::FeedForward::Network::computeOutput(const std::vector<float>& input) {
// this is here for simple swapping between input and output
std::vector<float> partial1=std::vector<float>(input.size()+1);
std::vector<float> partial2;
std::vector<float> *partialInputPtr = &partial1;
std::vector<float> *partialOutputPtr = &partial2;
std::vector<float> partialInput(input.size()+1);
std::vector<float> partialOutput;
// 0 is bias
partial1[0]=1.0;
partialInput[0]=1.0;
for(std::size_t i=0;i<input.size();i++) {
partial1[i+1]=input[i];
partialInput[i+1]=input[i];
}
for(std::size_t i=1;i<layers.size();i++) {
layers[i]->solve(*partialInputPtr,*partialOutputPtr);
std::swap(partialInputPtr,partialOutputPtr);
layers[i]->solve(partialInput,partialOutput);
partialInput.swap(partialOutput);
}
for(std::size_t i=0;i<partialInputPtr->size()-1;i++) {
partialInputPtr->operator[](i)=partialInputPtr->operator[](i+1);
}
partialInputPtr->resize(partialInputPtr->size()-1);
return std::vector<float>(*partialInputPtr);
return std::vector<float>(partialInput.begin()+1,partialInput.end());
}
void NeuralNetwork::FeedForward::Network::randomizeWeights() {
@@ -34,7 +25,7 @@ void NeuralNetwork::FeedForward::Network::randomizeWeights() {
for(std::size_t neuron=1; neuron < layer->size(); neuron ++ ) {
for(std::size_t prevNeuron=0; prevNeuron < prevLayer->size(); prevNeuron++) {
layer->operator[](neuron).setWeight(prevLayer->operator[](prevNeuron),1.0-static_cast<float>(rand()%2001)/1000.0);
layer->operator[](neuron).weight(prevNeuron)=1.0-static_cast<float>(rand()%2001)/1000.0;
}
}
}

View File

@@ -27,7 +27,7 @@ void NeuralNetwork::Learning::BackPropagation::teach(FeedForward::Network &netwo
for(std::size_t j=1;j<layer.size();j++) {
float deltasWeight = 0;
for(std::size_t k=1;k<network[layerIndex+1].size();k++) {
deltasWeight+=deltas[layerIndex+1][k]* network[layerIndex+1][k].getWeight(layer[j]);
deltasWeight+=deltas[layerIndex+1][k]* network[layerIndex+1][k].weight(j);
}
float newDelta=deltasWeight*layer[j].getActivationFunction().derivatedOutput(layer[j].value(),layer[j].output());
deltas[layerIndex][j]=newDelta;
@@ -41,12 +41,13 @@ void NeuralNetwork::Learning::BackPropagation::teach(FeedForward::Network &netwo
std::size_t max=prevLayer.size();
for(std::size_t j=1;j<layer.size();j++) {
layer[j].setWeight(prevLayer[0],layer[j].getWeight(prevLayer[0])+deltas[layerIndex][j]*learningCoefficient);
deltas[layerIndex][j]*=learningCoefficient;
layer[j].weight(0)+=deltas[layerIndex][j];
for(std::size_t k=1;k<max;k++) {
if(layerIndex==1) {
layer[j].setWeight(prevLayer[k], layer[j].getWeight(prevLayer[k])+learningCoefficient*deltas[layerIndex][j]*input[k-1]);
layer[j].weight(k)+=deltas[layerIndex][j]*input[k-1];
} else {
layer[j].setWeight(prevLayer[k], layer[j].getWeight(prevLayer[k])+learningCoefficient*deltas[layerIndex][j]*prevLayer[k].output());
layer[j].weight(k)+=deltas[layerIndex][j]*prevLayer[k].output();
}
}
}

View File

@@ -1,23 +1,31 @@
#include <NeuralNetwork/Recurrent/Network.h>
std::vector<float> NeuralNetwork::Recurrent::Network::computeOutput(const std::vector<float>& input, unsigned int iterations) {
//TODO: check inputSize
size_t neuronSize=neurons.size();
std::vector<float> outputs(neuronSize);
for(size_t i=0;i<inputSize;i++) {
outputs[i+1]=input[i];
assert(input.size() == inputSize);
if(outputs.size() != neurons.size()) {
outputs.resize(neurons.size());
for(auto &neuron:neurons) {
outputs[neuron->id()]=neuron->output();
}
}
std::vector<float> newOutputs(neurons.size());
for(size_t i=0;i<inputSize;i++) {
outputs[i+1]=input[i];
newOutputs[i+1]=input[i];
}
newOutputs[0]=neurons[0]->output();
std::size_t neuronsSize = neurons.size();
for(unsigned int iter=0;iter< iterations;iter++) {
for(size_t i=inputSize+1;i<neuronSize;i++) {
outputs[i]=neurons[i]->output();
}
// update neurons
for(size_t i=inputSize+1;i<neuronSize;i++) {
neurons[i]->operator()(outputs);
for(size_t i=inputSize+1;i<neuronsSize;i++) {
newOutputs[i] = neurons[i]->operator()(outputs);
}
outputs.swap(newOutputs);
}
std::vector<float> ret;