refactoring recurent

This commit is contained in:
2016-01-28 20:49:47 +01:00
parent 3c26c9641c
commit 13b179dd57
9 changed files with 229 additions and 61 deletions

View File

@@ -2,7 +2,7 @@ include ../Makefile.const
LIB_DIR = ../lib
ALL_TESTS=activation basis recurrent
ALL_TESTS=activation basis recurrent recurrent_join
#LIBS=$(LIB_DIR)/Genetics.a $(LIB_DIR)/NeuralNetwork.a
#LIBS=-lGenetics.so -lNeuronNetwork
@@ -12,7 +12,7 @@ CXXFLAGS += -I$(LIB_DIR)
all:$(ALL_TESTS);
test: all
@for i in $(ALL_TESTS); do echo -n ./$$i; echo -n " - "; ./$$i; echo ""; done
@for i in $(ALL_TESTS); do echo -n ./$$i; ./$$i; echo ""; done
../src/NeuralNetwork.so: lib

View File

@@ -1,30 +1,22 @@
#include <NeuralNetwork/Recurrent/Network.h>
#include <assert.h>
#include <iostream>
int main() {
NeuralNetwork::Recurrent::Network a(2,1,1);
/* a.getNeurons()[3].setWeight(a.getNeurons()[2],0.00000001565598595);
a.getNeurons()[2].setWeight(a.getNeurons()[3],0.00000001565598595);
a.getNeurons()[3].setWeight(a.getNeurons()[1],0.00000001565598595);
a.getNeurons()[3].setWeight(a.getNeurons()[0],0.00000001565598595);
a.computeOutput({0.5,0});
std::cout << a;
NeuralNetwork::Recurrent::Network b(a.stringify());
*/
a.getNeurons()[4]->setWeight(*a.getNeurons()[1],0.05);
a.getNeurons()[4]->setWeight(*a.getNeurons()[2],0.05);
a.getNeurons()[4]->setWeight(*a.getNeurons()[3],0.7);
a.getNeurons()[3]->setWeight(*a.getNeurons()[4],0.1);
a.getNeurons()[3].setWeight(a.getNeurons()[0],0.05);
a.getNeurons()[3].setWeight(a.getNeurons()[1],0.05);
a.getNeurons()[3].setWeight(a.getNeurons()[2],0.7);
a.getNeurons()[2].setWeight(a.getNeurons()[3],0.1);
std::vector <float> solutions({0.5,0.5732923,0.6077882,0.6103067,0.6113217,0.6113918,0.61142,0.6114219,0.6114227,0.6114227});
std::cout << a;
for(int i=0;i<40;i++) {
std::cout << a.computeOutput({1,0.7})[0] << "\n";
for(size_t i=0;i<solutions.size();i++) {
float res= a.computeOutput({1,0.7})[0];
assert(res > solutions[i]*0.999 && res < solutions[i]*1.001);
}
std::cout << a;
std::cout << a;
}