new implementation of FF Network

This commit is contained in:
2014-11-04 22:25:11 +01:00
parent 0238312a5b
commit 75ca9bc21f
23 changed files with 370 additions and 104 deletions

View File

@@ -1,7 +1,9 @@
CXX=g++ -m64
CXXFLAGS+= -Wall -std=c++14
CXXFLAGS+= -Wall -Wextra -pedantic -Weffc++ -Wshadow -Wstrict-aliasing -ansi
#CXXFLAGS+=-Werror
CXXFLAGS+= -g
CXXFLAGS+= -O2
CXXFLAGS+= -O3
CXXFLAGS+= -std=c++14
#CXXFLAGS+= -pg -fPIC
CXXFLAGS+= -fPIC -pthread

View File

@@ -47,7 +47,7 @@ namespace Genetics
double bestFitness()
{
double f=DBL_MIN;
for(const _T &a:individual)
for(_T &a:individual)
if(f < a.getFitness())
f=a.getFitness();
return f;
@@ -56,7 +56,7 @@ namespace Genetics
double worstFitness()
{
double f=DBL_MAX;
for(const _T &a:individual)
for(_T &a:individual)
if(f > a.getFitness())
f=a.getFitness();
return f;
@@ -65,7 +65,7 @@ namespace Genetics
double averageFitness()
{
double f=0;
for(const _T &a:individual)
for(_T &a:individual)
f+=a.getFitness();
return f/individual.size();
}

View File

@@ -9,14 +9,15 @@ namespace Shin
namespace Genetics
{
template <class _T>
class GenerationCreater
class GenerationCreator
{
public:
Generation<_T> operator()(Generation<_T> &g) {return generate(g);}
virtual Generation<_T> generate(Generation<_T> &gen)=0;
virtual ~GenerationCreater() {}
virtual ~GenerationCreator() {}
void setMaxGenerationSize(const unsigned size) {maxGenerationSize=size;}
protected:
unsigned maxGenerationSize=150;
unsigned maxGenerationSize=100;
/* static void run(GenerationCreater* r,unsigned long from, unsigned long to,unsigned long fitness, Generation *gen, Generation *s)
{ r->runner(from,to,fitness,gen,s); }
virtual void runner(unsigned long from, unsigned long to,unsigned long fitness, Generation *gen, Generation *s)=0;
@@ -24,7 +25,7 @@ namespace Genetics
};
template <class _T>
class Roulete: public GenerationCreater<_T>
class Roulete: public GenerationCreator<_T>
{
public:
Generation<_T> generate(Generation<_T> &gen) override;
@@ -38,13 +39,13 @@ Shin::Genetics::Generation< _T > Shin::Genetics::Roulete<_T>::generate(Shin::Gen
{
Generation<_T> newGen;
long fitness=0;
for(const _T &a:gen)
for(_T &a:gen)
{
fitness+=a.fitness();
}
double avFitness=gen.averageFitness();
double avFitness=(gen.bestFitness()+gen.averageFitness())/2;
for(unsigned int i=0;i<gen.size() && i < this->maxGenerationSize*2/3;i++)
for(unsigned int i=0;i<gen.size() && i < this->maxGenerationSize;i++)
{
if(gen[i].getFitness() >= avFitness)
{

View File

@@ -29,11 +29,11 @@ namespace Genetics
class Genetics
{
public:
Genetics():c(*new _C()),generation(),deleteCreater(1) {}
Genetics(GenerationCreater<_T> *gc):c(gc),generation() {}
Genetics():c(*new _C()),generation(),deleteCreator(1) {}
Genetics(GenerationCreator<_T> *gc):c(gc),generation() {}
virtual ~Genetics()
{
if(deleteCreater)
if(deleteCreator)
delete &c;
}
void addIndividual (const _T &ind) { generation.add(ind); }
@@ -60,14 +60,15 @@ namespace Genetics
}
void makeRound(const int round)
{
if(round%500==1)
srand(time(NULL));
// if(round%500==1)
// srand(time(NULL));
generation=c(generation);
}
GenerationCreator<_T>& getCreator() {return c;};
protected:
GenerationCreater<_T> &c;
GenerationCreator<_T> &c;
Generation<_T> generation;
bool deleteCreater=0;
bool deleteCreator=0;
private:
};

View File

@@ -8,11 +8,13 @@ namespace Genetics
class Individual
{
public:
virtual ~Individual() {};
virtual void mutate()=0;
virtual double getFitness() const=0;
inline double fitness() const {return getFitness();}
virtual double getFitness()=0;
virtual double fitness() final {return getFitness();}
private:
};
}
}
#endif // INDIVIDUAL_H

View File

@@ -0,0 +1 @@
./FeedForward.h

View File

@@ -0,0 +1,32 @@
#include "FeedForward"
using namespace Shin::NeuronNetwork;
FeedForwardNetwork::~FeedForwardNetwork()
{
for(Layer *l:layers)
{
delete l;
}
}
Solution FeedForwardNetwork::solve(const Problem& p)
{
Solution s=Solution(p);
for (Layer *l:layers)
{
s=l->solve(s);
}
return s;
}
const Layer* FeedForwardNetwork::operator[](int layer)
{
return layers[layer];
}
void FeedForwardNetwork::addLayer(int neurons)
{
layers.push_back(new Layer(neurons));
}

View File

@@ -0,0 +1,50 @@
#ifndef _S_NN_FF_H_
#define _S_NN_FFs_H_
#include "Problem"
#include "Solution"
#include "Neuron"
#include "Network"
#include <cstdarg>
#include <vector>
#include <initializer_list>
#include <iostream>
namespace Shin
{
namespace NeuronNetwork
{
// template <typename _NT>
class FeedForwardNetwork : public ACyclicNetwork
{
public:
FeedForwardNetwork(const FeedForwardNetwork &f):first(nullptr),last(nullptr),layers()
{
for(Layer *l:f.layers)
{
layers.push_back(new Layer(*l));
last=layers[layers.size()-1];
}
first=layers[0];
}
FeedForwardNetwork operator=(const FeedForwardNetwork &f)=delete;
template<typename... Args>inline FeedForwardNetwork(std::initializer_list<int> s):first(nullptr),last(nullptr),layers() { for(const int i:s) {addLayer(i);}}
//inline FeedForwardNetwork(std::vector<int> q);
~FeedForwardNetwork();
virtual Solution solve(const Problem& p)override;
unsigned size() {return layers.size();}
const Layer* operator[](int layer);
protected:
void addLayer(int neurons);
private:
Layer* first;
Layer* last ;
std::vector<Layer*> layers;
};
}
}
#endif

View File

@@ -0,0 +1 @@
./FeedForwardQuick.h

View File

@@ -0,0 +1,67 @@
#include "FeedForwardQuick"
using namespace Shin::NeuronNetwork;
FeedForwardNetworkQuick::~FeedForwardNetworkQuick()
{
if(weights != nullptr)
{
for(size_t i=0;i<layers;i++)
{
for (size_t j=0;j<layerSizes[i];j++)
{
delete[] weights[i][j];
}
delete[] weights[i];
delete[] potentials[i];
}
delete[] weights;
delete[] potentials;
delete[] layerSizes;
}
}
Solution FeedForwardNetworkQuick::solve(const Problem& p)
{
std::vector<bool> solution=p;
bool* sol=new bool[solution.size()];
for(size_t i=0;i<solution.size();i++)
{
sol[i]=solution[i];
}
size_t prevSize=1;
for(size_t i=0;i<layers;i++)
{
bool* newSolution= new bool[layerSizes[i]];
for(size_t j=0;j<layerSizes[i];j++)
{
double q;
if(i==0)
{
q=sol[j]*weights[i][j][0];
}else
{
q=0;
for(size_t k=0;k<prevSize;k++)
{
q+=sol[k]*weights[i][j][k];
}
}
newSolution[j]=q >= potentials[i][j]?1:0;
}
prevSize=layerSizes[i];
delete[] sol;
sol=newSolution;
}
std::vector<bool> ret;
for(size_t i=0;i<prevSize;i++)
{
ret.push_back(sol[i]);
}
delete[] sol;
return ret;
}

View File

@@ -0,0 +1,95 @@
#ifndef _S_NN_FF_H_
#define _S_NN_FFs_H_
#include "Problem"
#include "Solution"
#include "Neuron"
#include "Network"
#include <cstdarg>
#include <vector>
#include <initializer_list>
#include <iostream>
namespace Shin
{
namespace NeuronNetwork
{
class FFNeuron : public Neuron
{
public:
FFNeuron() = delete;
FFNeuron(const FFNeuron&) = delete;
FFNeuron& operator=(const FFNeuron&) = delete;
FFNeuron(double *pot, double *w):potential(pot),weights(w) { }
double getPotential() {return *potential;}
void setPotential(double p) { std::cout <<"S"; *potential=p;}
double getWeight(unsigned int i ) { return weights[i];}
void setWeight(unsigned int i,double p) { weights[i]=p; }
protected:
double *potential;
double *weights;
private:
};
class FFLayer//: public Layer
{
public:
FFLayer(size_t s, double *p,double **w): layerSize(s),potentials(p),weights(w) {}
FFNeuron* operator[](int neuron) const { return new FFNeuron(&potentials[neuron],weights[neuron]); };
int size() const {return layerSize;};
protected:
size_t layerSize;
double *potentials;
double **weights;
};
class FeedForwardNetworkQuick:public ACyclicNetwork
{
public:
FeedForwardNetworkQuick(const FeedForwardNetworkQuick &f) = delete; //TODO
FeedForwardNetworkQuick operator=(const FeedForwardNetworkQuick &f)=delete;
template<typename... Args>inline FeedForwardNetworkQuick(std::initializer_list<int> s):weights(nullptr),potentials(nullptr),layerSizes(nullptr),layers(s.size())
{
weights= new double**[s.size()];
potentials= new double*[s.size()];
layerSizes= new size_t[s.size()];
int i=0;
int prev_size=1;
for(int layeSize:s) // TODO rename
{
if(i==0)
prev_size=layeSize;
layerSizes[i]=layeSize;
weights[i]= new double*[layeSize];
potentials[i]= new double[layeSize];
for (int j=0;j<layeSize;j++)
{
potentials[i][j]=1;
weights[i][j]= new double[prev_size];
for(int k=0;k<prev_size;k++)
{
weights[i][j][k]=1;
}
}
i++;
prev_size=layeSize;
}
}
~FeedForwardNetworkQuick();
virtual Solution solve(const Problem& p) override;
unsigned size() { return layers;}
FFLayer* operator[](int l) { return new FFLayer(layerSizes[l],potentials[l],weights[l]); }
protected:
private:
double ***weights;
double **potentials;
size_t *layerSizes;
size_t layers;
};
}
}
#endif

View File

@@ -1,4 +1,4 @@
OBJFILES=./Solution.o ./Problem.o ./Network.o ./Neuron.o
OBJFILES=./Solution.o ./Problem.o ./Network.o ./Neuron.o ./FeedForward.o ./FeedForwardQuick.o
LIBNAME=NeuronNetwork

View File

@@ -2,39 +2,6 @@
using namespace Shin::NeuronNetwork;
FeedForwardNetwork::~FeedForwardNetwork()
{
for(Layer *l:layers)
{
delete l;
}
}
Solution FeedForwardNetwork::solve(const Problem& p)
{
Solution s=Solution(p);
for (Layer *l:layers)
{
s=l->solve(s);
}
return s;
}
void FeedForwardNetwork::learn(const Problem & problem, const Solution &actual)
{
//S::Solution s= solve(p);
}
const Layer* FeedForwardNetwork::operator[](int layer)
{
return layers[layer];
}
void FeedForwardNetwork::addLayer(int neurons)
{
layers.push_back(new Layer(neurons));
}
Layer::Layer(int a):neurons()
{
while(a--)

View File

@@ -7,7 +7,9 @@
#include <cstdarg>
#include <vector>
#include <initializer_list>
#include <iostream>
namespace Shin
{
namespace NeuronNetwork
@@ -15,8 +17,8 @@ namespace NeuronNetwork
class Network
{
public:
virtual ~Network() {};
virtual Solution solve(const Problem&)=0;
virtual void learn(const Problem & p, const Solution &s)=0;
protected:
private:
};
@@ -32,6 +34,13 @@ namespace NeuronNetwork
{
public:
Layer(int a);
Layer(const Layer &l):neurons()
{
for(unsigned i=0;i<l.neurons.size();i++)
{
neurons.push_back(new Neuron(*l.neurons[i]));
}
}
~Layer();
Solution solve(const std::vector<bool> &input);
Neuron* operator[](int neuron) const;
@@ -40,25 +49,6 @@ namespace NeuronNetwork
std::vector<Neuron*> neurons;
};
// template <typename _NT>
class FeedForwardNetwork : public ACyclicNetwork
{
public:
template<typename... Args>inline FeedForwardNetwork(Args &&... args) {pass((addLayer(args),1)...);};
//inline FeedForwardNetwork(std::vector<int> q);
~FeedForwardNetwork();
virtual Solution solve(const Problem& p) override;
virtual void learn(const Problem & p, const Solution &s) override;
const Layer* operator[](int layer);
protected:
template<typename... Args> inline void pass(Args&&...) {};
void addLayer(int neurons);
private:
Layer* first;
Layer* last ;
std::vector<Layer*> layers;
};
}
}

View File

@@ -11,6 +11,10 @@ namespace NeuronNetwork
{
public:
Neuron();
Neuron(const Neuron &n):potential(n.potential),weights(n.weights)
{
}
double getPotential() const;
void setPotential(double p);
double getWeight(unsigned int) const;

View File

@@ -11,6 +11,7 @@ namespace NeuronNetwork
{
public:
Problem();
virtual ~Problem(){};
operator std::vector<bool>() const;
protected:
virtual std::vector<bool> representation() const =0;

View File

@@ -2,7 +2,7 @@
using namespace Shin::NeuronNetwork;
Solution::Solution(std::vector<bool>solution):solution(solution)
Solution::Solution(std::vector<bool>sol):solution(sol)
{
}

View File

@@ -1,9 +1,8 @@
include ../Makefile.const
LIB_DIR = ../lib
GEN_TESTS=g-01
#g-02
NN_TESTS=nn-01 nn-02 nn-03
GEN_TESTS=g-01 g-02
NN_TESTS=nn-01 nn-02 nn-03 nn-04
ALL_TESTS=$(NN_TESTS) $(GEN_TESTS)
LIBS=$(LIB_DIR)/Genetics.a $(LIB_DIR)/NeuronNetwork.a

View File

@@ -41,10 +41,11 @@ class S: public Shin::Genetics::Individual
return S(a,b);
}
double getFitness() const
double getFitness()
{
// return fit;
return abs(Sa-98545)+abs(Q+85);
return (double)1.0/(double)(Sa);
//double s=abs(Sa-98545)+abs(Q+85);
//return Sa*100-Q*5;
//return 985258-s;
//return s < 0?0:s;
@@ -61,6 +62,7 @@ class S: public Shin::Genetics::Individual
int main()
{
Shin::Genetics::Genetics<S> g;
g.getCreator().setMaxGenerationSize(30);
g.addIndividual(S(1,0));
g.addIndividual(S(1,50));
g.addIndividual(S(50,50));

View File

@@ -21,12 +21,12 @@ class X: public Shin::NeuronNetwork::Problem
class S: public Shin::Genetics::Individual
{
public:
S():n(2,4,1)
S():n({2,4,1})
{
}
void mutate()
{
for(int i=0;i<3;i++)
for(unsigned int i=0;i<n.size();i++)
{
for (int j=0;j<n[i]->size();j++)
{
@@ -39,12 +39,14 @@ class S: public Shin::Genetics::Individual
}
int k;
if(i==0)
k=0;
continue;
else if(i==1)
k=2;
k=1;
else
k=3;
for(;k>=0;--k)
{
std::cerr << "i: "<<i <<" " << k << std::endl;
if(rand()%20==0)
{
if(rand()%2)
@@ -52,15 +54,16 @@ class S: public Shin::Genetics::Individual
else
n[i]->operator[](j)->setWeight(k,n[i]->operator[](j)->getWeight(k)+1);
}
}
}
}
};
S* SQ(S *s)
}
S combine(S &s)
{
S * a= new S();
S a;
for(int i=0;i<3;i++)
{
for (int j=0;j<n[i]->size();j++)
for (int j=0;j<s.n[i]->size();j++)
{
Shin::NeuronNetwork::Neuron *q;
if(rand()%2==1)
@@ -68,9 +71,9 @@ class S: public Shin::Genetics::Individual
q=n[i]->operator[](j);
}else
{
q=s->n[i]->operator[](j);
q=s.n[i]->operator[](j);
}
a->n[i]->operator[](j)->setPotential(q->getPotential());
a.n[i]->operator[](j)->setPotential(q->getPotential());
int k;
if(i==0)
@@ -80,15 +83,11 @@ class S: public Shin::Genetics::Individual
else
k=3;
for(;k>=0;--k)
a->n[i]->operator[](j)->setWeight(k,q->getWeight(k));
a.n[i]->operator[](j)->setWeight(k,q->getWeight(k));
}
}
return a;
}
Individual* combine(Individual *s)
{
return SQ(dynamic_cast<S*>(s));
}
Shin::NeuronNetwork::FeedForwardNetwork n;
double getFitness()
@@ -113,6 +112,7 @@ class S: public Shin::Genetics::Individual
int main()
{
Shin::Genetics::Genetics<S> g;
S* s=(S*)g.getSolution(99999,999999);
s->dump();
g.addIndividual(S());
S &s=g.getSolution(99999,999999);
s.dump();
}

View File

@@ -1,4 +1,5 @@
#include "../src/NeuronNetwork/Network"
#include "../src/NeuronNetwork/FeedForward"
#include "../src/NeuronNetwork/FeedForwardQuick"
#include <iostream>
class X: public Shin::NeuronNetwork::Problem
@@ -12,8 +13,10 @@ class X: public Shin::NeuronNetwork::Problem
int main()
{
Shin::NeuronNetwork::FeedForwardNetwork n(2,3,2);
Shin::NeuronNetwork::FeedForwardNetwork n({2,3,2});
Shin::NeuronNetwork::Solution s =n.solve(X());
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,3,2});
Shin::NeuronNetwork::Solution sq =q.solve(X());
if(s.size()!=2)
{
std::cout << "1";
@@ -29,5 +32,16 @@ int main()
std::cout << "3";
return 1;
}
if(s.size()!=sq.size())
{
std::cout << "3";
return 1;
}
for(int i=0;i<2;i++)
if(s[i]!=sq[i])
{
std::cout << "4 " << i;
return 1;
}
return 0;
}

View File

@@ -1,5 +1,6 @@
#include "../src/NeuronNetwork/Network"
#include "../src/NeuronNetwork/FeedForward"
#include "../src/NeuronNetwork/FeedForwardQuick.h"
#include <iostream>
@@ -14,20 +15,34 @@ class X: public Shin::NeuronNetwork::Problem
int main()
{
Shin::NeuronNetwork::FeedForwardNetwork n(2,4,2);
Shin::NeuronNetwork::FeedForwardNetwork n({2,4,2});
Shin::NeuronNetwork::FeedForwardNetworkQuick nq({2,4,2});
if(n[1]->size() != 4)
{
std::cout << "ACtual size:" << n[0]->size();
std::cout << "Actual size:" << n[0]->size();
return 1;
}
if(nq[1]->size() != 4)
{
std::cout << "QUICK Actual size:" << nq[0]->size();
return 1;
}
n[2]->operator[](0)->setPotential(25);
nq[2]->operator[](0)->setPotential(25);
std::cout << "Potential: " << n[2]->operator[](0)->getPotential() << "\n";
std::cout << "Potential: " << nq[2]->operator[](0)->getPotential() << "\n";
Shin::NeuronNetwork::Solution s =n.solve(X());
Shin::NeuronNetwork::Solution sq =nq.solve(X());
if(s.size()!=2)
{
std::cout << "1";
return 1;
}
if(s[0]!=0)
{
std::cout << "2";
@@ -39,8 +54,20 @@ int main()
return 1;
}
for(int i=0;i<2;i++)
{
if(s[i]!=sq[i])
{
std::cout << " 4 - " << i << " expected "<<s[i] << " was " <<sq[i];
return 1;
}
}
n[2]->operator[](0)->setWeight(0,26.0);
nq[2]->operator[](0)->setWeight(0,26.0);
s =n.solve(X());
sq =n.solve(X());
if(s.size()!=2)
{
std::cout << "a1";
@@ -56,5 +83,15 @@ int main()
std::cout << "a3";
return 1;
}
for(int i=0;i<2;i++)
{
if(s[i]!=sq[i])
{
std::cout << " a4 - " << i << " expected "<<s[i] << " was " <<sq[i];
return 1;
}
}
return 0;
}

View File

@@ -14,7 +14,7 @@ int main()
{
srand(time(NULL));
int lm=5;
Shin::NeuronNetwork::FeedForwardNetwork net(2,lm,1);
Shin::NeuronNetwork::FeedForwardNetwork net({2,lm,1});
bool x=1;
int prev_err=0;
int err=0;