new implementation of FF Network

This commit is contained in:
2014-11-04 22:25:11 +01:00
parent 0238312a5b
commit 75ca9bc21f
23 changed files with 370 additions and 104 deletions

View File

@@ -1,9 +1,8 @@
include ../Makefile.const
LIB_DIR = ../lib
GEN_TESTS=g-01
#g-02
NN_TESTS=nn-01 nn-02 nn-03
GEN_TESTS=g-01 g-02
NN_TESTS=nn-01 nn-02 nn-03 nn-04
ALL_TESTS=$(NN_TESTS) $(GEN_TESTS)
LIBS=$(LIB_DIR)/Genetics.a $(LIB_DIR)/NeuronNetwork.a

View File

@@ -41,10 +41,11 @@ class S: public Shin::Genetics::Individual
return S(a,b);
}
double getFitness() const
double getFitness()
{
// return fit;
return abs(Sa-98545)+abs(Q+85);
return (double)1.0/(double)(Sa);
//double s=abs(Sa-98545)+abs(Q+85);
//return Sa*100-Q*5;
//return 985258-s;
//return s < 0?0:s;
@@ -61,6 +62,7 @@ class S: public Shin::Genetics::Individual
int main()
{
Shin::Genetics::Genetics<S> g;
g.getCreator().setMaxGenerationSize(30);
g.addIndividual(S(1,0));
g.addIndividual(S(1,50));
g.addIndividual(S(50,50));

View File

@@ -21,12 +21,12 @@ class X: public Shin::NeuronNetwork::Problem
class S: public Shin::Genetics::Individual
{
public:
S():n(2,4,1)
S():n({2,4,1})
{
}
void mutate()
{
for(int i=0;i<3;i++)
for(unsigned int i=0;i<n.size();i++)
{
for (int j=0;j<n[i]->size();j++)
{
@@ -39,12 +39,14 @@ class S: public Shin::Genetics::Individual
}
int k;
if(i==0)
k=0;
continue;
else if(i==1)
k=2;
k=1;
else
k=3;
for(;k>=0;--k)
{
std::cerr << "i: "<<i <<" " << k << std::endl;
if(rand()%20==0)
{
if(rand()%2)
@@ -52,15 +54,16 @@ class S: public Shin::Genetics::Individual
else
n[i]->operator[](j)->setWeight(k,n[i]->operator[](j)->getWeight(k)+1);
}
}
}
}
};
S* SQ(S *s)
}
S combine(S &s)
{
S * a= new S();
S a;
for(int i=0;i<3;i++)
{
for (int j=0;j<n[i]->size();j++)
for (int j=0;j<s.n[i]->size();j++)
{
Shin::NeuronNetwork::Neuron *q;
if(rand()%2==1)
@@ -68,9 +71,9 @@ class S: public Shin::Genetics::Individual
q=n[i]->operator[](j);
}else
{
q=s->n[i]->operator[](j);
q=s.n[i]->operator[](j);
}
a->n[i]->operator[](j)->setPotential(q->getPotential());
a.n[i]->operator[](j)->setPotential(q->getPotential());
int k;
if(i==0)
@@ -80,15 +83,11 @@ class S: public Shin::Genetics::Individual
else
k=3;
for(;k>=0;--k)
a->n[i]->operator[](j)->setWeight(k,q->getWeight(k));
a.n[i]->operator[](j)->setWeight(k,q->getWeight(k));
}
}
return a;
}
Individual* combine(Individual *s)
{
return SQ(dynamic_cast<S*>(s));
}
Shin::NeuronNetwork::FeedForwardNetwork n;
double getFitness()
@@ -113,6 +112,7 @@ class S: public Shin::Genetics::Individual
int main()
{
Shin::Genetics::Genetics<S> g;
S* s=(S*)g.getSolution(99999,999999);
s->dump();
g.addIndividual(S());
S &s=g.getSolution(99999,999999);
s.dump();
}

View File

@@ -1,4 +1,5 @@
#include "../src/NeuronNetwork/Network"
#include "../src/NeuronNetwork/FeedForward"
#include "../src/NeuronNetwork/FeedForwardQuick"
#include <iostream>
class X: public Shin::NeuronNetwork::Problem
@@ -12,8 +13,10 @@ class X: public Shin::NeuronNetwork::Problem
int main()
{
Shin::NeuronNetwork::FeedForwardNetwork n(2,3,2);
Shin::NeuronNetwork::FeedForwardNetwork n({2,3,2});
Shin::NeuronNetwork::Solution s =n.solve(X());
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,3,2});
Shin::NeuronNetwork::Solution sq =q.solve(X());
if(s.size()!=2)
{
std::cout << "1";
@@ -29,5 +32,16 @@ int main()
std::cout << "3";
return 1;
}
if(s.size()!=sq.size())
{
std::cout << "3";
return 1;
}
for(int i=0;i<2;i++)
if(s[i]!=sq[i])
{
std::cout << "4 " << i;
return 1;
}
return 0;
}

View File

@@ -1,5 +1,6 @@
#include "../src/NeuronNetwork/Network"
#include "../src/NeuronNetwork/FeedForward"
#include "../src/NeuronNetwork/FeedForwardQuick.h"
#include <iostream>
@@ -14,20 +15,34 @@ class X: public Shin::NeuronNetwork::Problem
int main()
{
Shin::NeuronNetwork::FeedForwardNetwork n(2,4,2);
Shin::NeuronNetwork::FeedForwardNetwork n({2,4,2});
Shin::NeuronNetwork::FeedForwardNetworkQuick nq({2,4,2});
if(n[1]->size() != 4)
{
std::cout << "ACtual size:" << n[0]->size();
std::cout << "Actual size:" << n[0]->size();
return 1;
}
if(nq[1]->size() != 4)
{
std::cout << "QUICK Actual size:" << nq[0]->size();
return 1;
}
n[2]->operator[](0)->setPotential(25);
nq[2]->operator[](0)->setPotential(25);
std::cout << "Potential: " << n[2]->operator[](0)->getPotential() << "\n";
std::cout << "Potential: " << nq[2]->operator[](0)->getPotential() << "\n";
Shin::NeuronNetwork::Solution s =n.solve(X());
Shin::NeuronNetwork::Solution sq =nq.solve(X());
if(s.size()!=2)
{
std::cout << "1";
return 1;
}
if(s[0]!=0)
{
std::cout << "2";
@@ -39,8 +54,20 @@ int main()
return 1;
}
for(int i=0;i<2;i++)
{
if(s[i]!=sq[i])
{
std::cout << " 4 - " << i << " expected "<<s[i] << " was " <<sq[i];
return 1;
}
}
n[2]->operator[](0)->setWeight(0,26.0);
nq[2]->operator[](0)->setWeight(0,26.0);
s =n.solve(X());
sq =n.solve(X());
if(s.size()!=2)
{
std::cout << "a1";
@@ -56,5 +83,15 @@ int main()
std::cout << "a3";
return 1;
}
for(int i=0;i<2;i++)
{
if(s[i]!=sq[i])
{
std::cout << " a4 - " << i << " expected "<<s[i] << " was " <<sq[i];
return 1;
}
}
return 0;
}

View File

@@ -14,7 +14,7 @@ int main()
{
srand(time(NULL));
int lm=5;
Shin::NeuronNetwork::FeedForwardNetwork net(2,lm,1);
Shin::NeuronNetwork::FeedForwardNetwork net({2,lm,1});
bool x=1;
int prev_err=0;
int err=0;