loooot of fixes nad SSE enhacement

This commit is contained in:
2014-11-18 11:09:34 +01:00
parent 207e141cca
commit 0abc0d07dd
28 changed files with 246 additions and 280 deletions

View File

@@ -2,10 +2,13 @@ CXX=g++ -m64
CXXFLAGS+= -Wall -Wextra -pedantic -Weffc++ -Wshadow -Wstrict-aliasing -ansi -Woverloaded-virtual -Wdelete-non-virtual-dtor CXXFLAGS+= -Wall -Wextra -pedantic -Weffc++ -Wshadow -Wstrict-aliasing -ansi -Woverloaded-virtual -Wdelete-non-virtual-dtor
#CXXFLAGS+=-Werror #CXXFLAGS+=-Werror
CXXFLAGS+= -g CXXFLAGS+= -g
CXXFLAGS+= -O3 -msse4.2 -mfpmath=sse -march=native -mtune=native CXXFLAGS+= -msse4.2 -mmmx
#-fprefetch-loop-arrays
CXXFLAGS+= -std=c++14 CXXFLAGS+= -std=c++14
#CXXFLAGS+= -pg -fPIC #CXXFLAGS+= -pg -fPIC
CXXFLAGS+= -fPIC -pthread CXXFLAGS+= -fPIC -pthread
OPTIMALIZATION = -O3 -march=native -mtune=native
%.o : %.cpp %.h %.o : %.cpp %.h
$(CXX) $(CXXFLAGS) -c $< -o $@ $(CXX) $(CXXFLAGS) $(OPTIMALIZATION) -c $< -o $@

View File

@@ -40,14 +40,15 @@ FeedForwardNetworkQuick::~FeedForwardNetworkQuick()
} }
delete[] weights[i]; delete[] weights[i];
delete[] potentials[i]; delete[] potentials[i];
if(i!=layers-1)
delete[] sums[i]; delete[] sums[i];
delete[] inputs[i];
} }
delete[] sums[layers]; delete[] sums[layers];
delete[] weights; delete[] weights;
delete[] potentials; delete[] potentials;
delete[] layerSizes; delete[] layerSizes;
delete[] sums; delete[] sums;
delete[] inputs;
} }
if(ffLayers !=nullptr) if(ffLayers !=nullptr)
{ {
@@ -61,39 +62,40 @@ FeedForwardNetworkQuick::~FeedForwardNetworkQuick()
void FeedForwardNetworkQuick::solvePart(float *newSolution, register size_t begin, size_t end,size_t prevSize, float *sol,size_t layer) void FeedForwardNetworkQuick::solvePart(float *newSolution, register size_t begin, size_t end,size_t prevSize, float *sol,size_t layer)
{ {
if(prevSize >8) if(prevSize >4)
{ {
__m128 partialSolution; __m128 partialSolution;
__m128 partialSolution2;
__m128 w; __m128 w;
__m128 sols; __m128 sols;
__m128 w2;
__m128 sols2;
__m128 temporaryConst1=_mm_set1_ps(1.0); __m128 temporaryConst1=_mm_set1_ps(1.0);
__m128 temporaryConstLambda=_mm_set1_ps(-lambda); __m128 temporaryConstLambda=_mm_set1_ps(-lambda);
register size_t alignedPrev=prevSize>8?(prevSize-(prevSize%8)):0; register size_t alignedPrev=prevSize>16?(prevSize-(prevSize%16)):0;
float tmp;
for( size_t j=begin;j<end;j++) for( size_t j=begin;j<end;j++)
{ {
tmp=0; partialSolution= _mm_setzero_ps();
w=_mm_setzero_ps();
for(register size_t k=alignedPrev;k<prevSize;k++) for(register size_t k=alignedPrev;k<prevSize;k++)
{ {
tmp+=sol[k]*weights[layer][j][k]; w = _mm_load_ss(this->weights[layer][j]+k);
sols = _mm_load_ss(sol+k);
w=_mm_mul_ps(w,sols);
partialSolution=_mm_add_ps(partialSolution,w);
// w=_mm_shuffle_ps(w,w,3*2^0+0*2^2+1*2^4+2*2^6);
// sols=_mm_shuffle_ps(sols,sols,3*2^0+0*2^2+1*2^4+2*2^6);
} }
partialSolution = _mm_setzero_ps(); for(register size_t k=0;k<alignedPrev;k+=4)
partialSolution2 = _mm_set_ss(tmp);
for(register size_t k=0;k<alignedPrev;k+=8)
{ {
w = _mm_load_ps(this->weights[layer][j]+k); w = _mm_load_ps(this->weights[layer][j]+k);
w2 = _mm_load_ps(this->weights[layer][j]+k+4); //_mm_prefetch((char*)this->weights[layer][j]+k+4,_MM_HINT_T0);
sols = _mm_load_ps(sol+k); sols = _mm_load_ps(sol+k);
sols2 = _mm_load_ps(sol+k+4);
w=_mm_mul_ps(w,sols); w=_mm_mul_ps(w,sols);
w2=_mm_mul_ps(w2,sols2);
partialSolution=_mm_add_ps(partialSolution,w); partialSolution=_mm_add_ps(partialSolution,w);
partialSolution2=_mm_add_ps(partialSolution2,w2);
} }
partialSolution = _mm_hadd_ps(partialSolution, partialSolution2); /* pre-SSE3 solution
__m128 temp = _mm_add_ps(_mm_movehl_ps(foo128, foo128), foo128);
float x;
_mm_store_ss(&x, _mm_add_ss(temp, _mm_shuffle_ps(temp, 1)));
*/
partialSolution = _mm_hadd_ps(partialSolution, partialSolution); partialSolution = _mm_hadd_ps(partialSolution, partialSolution);
partialSolution = _mm_hadd_ps(partialSolution, partialSolution); partialSolution = _mm_hadd_ps(partialSolution, partialSolution);
_mm_store_ss(inputs[layer]+j,partialSolution); _mm_store_ss(inputs[layer]+j,partialSolution);
@@ -120,7 +122,7 @@ void FeedForwardNetworkQuick::solvePart(float *newSolution, register size_t begi
Solution FeedForwardNetworkQuick::solve(const Problem& p) Solution FeedForwardNetworkQuick::solve(const Problem& p)
{ {
std::vector<bool> solution(p); std::vector<float> solution(p);
register float* sol=sums[0];//new bool[solution.size()]; register float* sol=sums[0];//new bool[solution.size()];
for(size_t i=0;i<solution.size();i++) for(size_t i=0;i<solution.size();i++)
@@ -132,11 +134,10 @@ Solution FeedForwardNetworkQuick::solve(const Problem& p)
for(register size_t i=0;i<layers;i++) for(register size_t i=0;i<layers;i++)
{ {
float* newSolution= sums[i+1];//new bool[layerSizes[i]]; float* newSolution= sums[i+1];//new bool[layerSizes[i]];
if(threads > 1 && (layerSizes[i] > 700 ||prevSize > 700)) // 600 is an guess about actual size, when creating thread has some speedup if(threads > 1 && (layerSizes[i] > 700 ||prevSize > 700)) // 700 is an guess about actual size, when creating thread has some speedup
{ {
std::vector<std::thread> th; std::vector<std::thread> th;
size_t s=1; size_t s=1;
//TODO THIS IS NOT WORKING!!!
size_t step =layerSizes[i]/threads; size_t step =layerSizes[i]/threads;
for(size_t t=1;t<=threads;t++) for(size_t t=1;t<=threads;t++)
{ {
@@ -158,7 +159,7 @@ Solution FeedForwardNetworkQuick::solve(const Problem& p)
prevSize=layerSizes[i]; prevSize=layerSizes[i];
sol=newSolution; sol=newSolution;
} }
std::vector<double> ret; std::vector<float> ret;
for(size_t i=1;i<prevSize;i++) for(size_t i=1;i<prevSize;i++)
{ {
ret.push_back(sol[i]); ret.push_back(sol[i]);

View File

@@ -17,6 +17,7 @@
#include <xmmintrin.h> #include <xmmintrin.h>
#include <emmintrin.h> #include <emmintrin.h>
#include <xmmintrin.h> #include <xmmintrin.h>
#include "../sse_mathfun.h" #include "../sse_mathfun.h"
#define LAMBDA 0.8 #define LAMBDA 0.8
@@ -34,7 +35,7 @@ namespace NeuronNetwork
FFNeuron(float &pot, float *w, float &s, float &i,float lam):potential(pot),weights(w),sum(s),inputs(i),lambda(lam) { } FFNeuron(float &pot, float *w, float &s, float &i,float lam):potential(pot),weights(w),sum(s),inputs(i),lambda(lam) { }
float getPotential() {return potential;} float getPotential() {return potential;}
void setPotential(double p) { potential=p;} void setPotential(float p) { potential=p;}
float getWeight(unsigned int i ) { return weights[i];} float getWeight(unsigned int i ) { return weights[i];}
void setWeight(unsigned int i,float p) { weights[i]=p; } void setWeight(unsigned int i,float p) { weights[i]=p; }
inline float output() const { return sum; } inline float output() const { return sum; }

View File

@@ -6,27 +6,40 @@ Shin::NeuronNetwork::Learning::BackPropagation::BackPropagation(FeedForwardNetwo
} }
Shin::NeuronNetwork::Learning::BackPropagation::~BackPropagation()
{
if(deltas!=nullptr)
{
for(size_t i=0;i<network.size();i++)
delete[] deltas[i];
}
delete[] deltas;
}
void Shin::NeuronNetwork::Learning::BackPropagation::propagate(const Shin::NeuronNetwork::Solution& expectation) void Shin::NeuronNetwork::Learning::BackPropagation::propagate(const Shin::NeuronNetwork::Solution& expectation)
{ {
float **deltas; if(deltas==nullptr)
{
deltas=new float*[network.size()]; deltas=new float*[network.size()];
for(int i=(int)network.size()-1;i>=0;i--) for(size_t i=0;i<network.size();i++)
{ {
deltas[i]=new float[network[i]->size()]; deltas[i]=new float[network[i]->size()];
deltas[i][0]=0.0; deltas[i][0]=0.0;
if(i==(int)network.size()-1)
{
for(size_t j=1;j<network[i]->size();j++)
{
deltas[i][j]= (expectation[j-1]-network[i]->operator[](j)->output())*network[i]->operator[](j)->derivatedOutput();
// std::cerr << "X "<< deltas[i][j] <" Z ";
} }
}else }
for(size_t j=1;j<network[network.size()-1]->size();j++)
{
deltas[network.size()-1][j]= correction(expectation[j-1],network[network.size()-1]->operator[](j)->output())
*network[network.size()-1]->operator[](j)->derivatedOutput();
}
for(int i=(int)network.size()-2;i>=0;i--)
{ {
if(allowThreads) if(allowThreads)
{ {
std::vector<std::thread> th; std::vector<std::thread> th;
int s=0; size_t s=0;
//TODO THIS IS NOT WORKING!!! //TODO THIS IS NOT WORKING!!!
#define THREADS 4 #define THREADS 4
int step =network[i]->size()/THREADS; int step =network[i]->size()/THREADS;
@@ -34,7 +47,7 @@ void Shin::NeuronNetwork::Learning::BackPropagation::propagate(const Shin::Neuro
{ {
if(s>=network[i]->size()) if(s>=network[i]->size())
break; break;
th.push_back(std::thread([&i,this,&deltas](size_t from, size_t to)->void{ th.push_back(std::thread([&i,this](size_t from, size_t to)->void{
for(size_t j=from;j<to;j++) for(size_t j=from;j<to;j++)
{ {
register float deltasWeight = 0; register float deltasWeight = 0;
@@ -62,7 +75,6 @@ void Shin::NeuronNetwork::Learning::BackPropagation::propagate(const Shin::Neuro
} }
} }
} }
}
for(size_t i=0;i<network.size();i++) for(size_t i=0;i<network.size();i++)
{ {
@@ -83,11 +95,6 @@ void Shin::NeuronNetwork::Learning::BackPropagation::propagate(const Shin::Neuro
} }
} }
} }
for(size_t i=0;i<network.size();i++)
{
delete[] deltas[i];
}
delete[] deltas;
} }
@@ -96,7 +103,7 @@ float Shin::NeuronNetwork::Learning::BackPropagation::teach(const Shin::NeuronNe
Shin::NeuronNetwork::Solution a=network.solve(p); Shin::NeuronNetwork::Solution a=network.solve(p);
double error=calculateError(solution,a); double error=calculateError(solution,a);
std::vector<double> s; std::vector<float> s;
if(entropy) if(entropy)
{ {
for(size_t i=0;i<solution.size();i++) for(size_t i=0;i<solution.size();i++)
@@ -120,3 +127,8 @@ void Shin::NeuronNetwork::Learning::BackPropagation::setLearningCoeficient(float
learningCoeficient=c; learningCoeficient=c;
} }
float Shin::NeuronNetwork::Learning::BackPropagation::correction(float expected, float computed)
{
return expected-computed;
}

View File

@@ -30,6 +30,11 @@ namespace Learning
{ {
public: public:
BackPropagation(FeedForwardNetworkQuick &n); BackPropagation(FeedForwardNetworkQuick &n);
virtual ~BackPropagation();
BackPropagation(const Shin::NeuronNetwork::Learning::BackPropagation&) =delete;
BackPropagation operator=(const Shin::NeuronNetwork::Learning::BackPropagation&) =delete;
virtual void propagate(const Shin::NeuronNetwork::Solution& expectation); virtual void propagate(const Shin::NeuronNetwork::Solution& expectation);
float teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution); float teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution);
@@ -38,10 +43,12 @@ namespace Learning
void setEntropySize(int milipercents) { entropySize=milipercents; } void setEntropySize(int milipercents) { entropySize=milipercents; }
inline void allowThreading() {allowThreads=1; } inline void allowThreading() {allowThreads=1; }
protected: protected:
virtual float correction(float expected, float computed);
float learningCoeficient=0.4; float learningCoeficient=0.4;
bool entropy=0; bool entropy=0;
bool allowThreads=0; bool allowThreads=0;
int entropySize=500; int entropySize=500;
float **deltas=nullptr;
}; };
} }
} }

View File

@@ -5,61 +5,9 @@ Shin::NeuronNetwork::Learning::OpticalBackPropagation::OpticalBackPropagation(Fe
setEntropySize(100); setEntropySize(100);
} }
void Shin::NeuronNetwork::Learning::OpticalBackPropagation::propagate(const Shin::NeuronNetwork::Solution& expectation) float Shin::NeuronNetwork::Learning::OpticalBackPropagation::correction(float expected, float computed)
{ {
double **deltas; register float tmp=(expected-computed);
deltas=new double*[network.size()]; register float ret=1+exp(tmp*tmp);
for(int i=(int)network.size()-1;i>=0;i--) return tmp < 0? -ret:ret;
{
deltas[i]=new double[network[i]->size()];
deltas[i][0]=0.0;
if(i==(int)network.size()-1)
{
for(size_t j=1;j<network[i]->size();j++)
{
register double tmp=(expectation[j-1]-network[i]->operator[](j)->output());
deltas[i][j]= (1+exp(tmp*tmp))*network[i]->operator[](j)->derivatedOutput();
if(tmp <0)
{
deltas[i][j]=-deltas[i][j];
}
}
}else
{
for(size_t j=1;j<network[i]->size();j++)
{
register double deltasWeight = 0;
for(size_t k=1;k<network[i+1]->size();k++)
{
deltasWeight+=deltas[i+1][k]*network[i+1]->operator[](k)->getWeight(j);
}
deltas[i][j]=deltasWeight*network[i]->operator[](j)->derivatedOutput();
}
}
}
for(size_t i=0;i<network.size();i++)
{
size_t max;
if(i==0)
max=network[i]->size();
else
max=network[i-1]->size();
for(size_t j=1;j<network[i]->size();j++)
{
network[i]->operator[](j)->setWeight(0,network[i]->operator[](j)->getWeight(0)+deltas[i][j]*learningCoeficient);
for(size_t k=1;k<max;k++)
{
network[i]->operator[](j)->setWeight(k,
network[i]->operator[](j)->getWeight(k)+learningCoeficient* deltas[i][j]*
(i==0? network.sums[0][k]:(double)network[i-1]->operator[](k)->output()));
}
}
}
for(size_t i=0;i<network.size();i++)
{
delete[] deltas[i];
}
delete[] deltas;
} }

View File

@@ -24,8 +24,8 @@ namespace Learning
{ {
public: public:
OpticalBackPropagation(FeedForwardNetworkQuick &n); OpticalBackPropagation(FeedForwardNetworkQuick &n);
virtual void propagate(const Shin::NeuronNetwork::Solution& expectation) override;
protected: protected:
virtual float correction(float expected, float computed) override;
}; };
} }
} }

View File

@@ -2,7 +2,7 @@
Shin::NeuronNetwork::Learning::Reinforcement::Reinforcement(Shin::NeuronNetwork::FeedForwardNetworkQuick& n): Unsupervised(n), p(new BackPropagation(n)) Shin::NeuronNetwork::Learning::Reinforcement::Reinforcement(Shin::NeuronNetwork::FeedForwardNetworkQuick& n): Unsupervised(n), p(new BackPropagation(n))
{ {
p->setLearningCoeficient(9); p->setLearningCoeficient(1);
} }
Shin::NeuronNetwork::Learning::Reinforcement::~Reinforcement() Shin::NeuronNetwork::Learning::Reinforcement::~Reinforcement()
@@ -10,39 +10,43 @@ Shin::NeuronNetwork::Learning::Reinforcement::~Reinforcement()
delete p; delete p;
} }
void Shin::NeuronNetwork::Learning::Reinforcement::setQualityFunction(std::function< double(const Problem&,const Solution&) > f) void Shin::NeuronNetwork::Learning::Reinforcement::setQualityFunction(std::function< float(const Problem&,const Solution&) > f)
{ {
qualityFunction=f; qualityFunction=f;
} }
double Shin::NeuronNetwork::Learning::Reinforcement::learn(const Shin::NeuronNetwork::Problem& problem) float Shin::NeuronNetwork::Learning::Reinforcement::learn(const Shin::NeuronNetwork::Problem& problem)
{ {
//network[2]->operator[](0)->setWeight(0,-5);
Solution s=network.solve(problem); Solution s=network.solve(problem);
double quality=qualityFunction(problem,s); float quality=qualityFunction(problem,s);
std::vector<double> q; std::vector<float> q;
//std::cerr << s[0] << "\n";
for(register size_t j=0;j<s.size();j++) for(register size_t j=0;j<s.size();j++)
{ {
q.push_back(s[j]*((double)(990+(rand()%21))/1000.0)); q.push_back(s[j]);//*((float)(990+(rand()%21))/1000.0));
} }
if(quality <= 0) if(quality <= 0)
{ {
for(register size_t j=0;j<s.size();j++) for(register size_t j=0;j<s.size();j++)
{ {
do{ q[j]=((float)(100-(rand()%101)))/100.0;
q[j]=((double)(10+rand()%80))/100.0;
}while(fabs(q[j]-s[j]) < 0.1);
} }
} }
for(register int i=abs((int)quality);i>=0;i--) register int i=abs((int)quality);
if(quality != 0.0 && i==0)
i+=1;
for(;i>=0;i--)
{ {
p->propagate(q); p->propagate(q);
network.solve(problem); // resolve problem ??? TOTO: should it be here?
} }
return quality; return quality;
} }
double Shin::NeuronNetwork::Learning::Reinforcement::learnSet(const std::vector< Shin::NeuronNetwork::Problem* >& problems) float Shin::NeuronNetwork::Learning::Reinforcement::learnSet(const std::vector< Shin::NeuronNetwork::Problem* >& problems)
{ {
double err=0; float err=0;
for(Shin::NeuronNetwork::Problem *pr:problems) for(Shin::NeuronNetwork::Problem *pr:problems)
{ {
err+=learn(*pr); err+=learn(*pr);

View File

@@ -45,15 +45,15 @@ namespace Learning
Reinforcement(const Reinforcement&) =delete; Reinforcement(const Reinforcement&) =delete;
Reinforcement& operator=(const Reinforcement&) =delete; Reinforcement& operator=(const Reinforcement&) =delete;
void setQualityFunction(std::function<double(const Problem&,const Solution&)>); void setQualityFunction(std::function<float(const Problem&,const Solution&)>);
double learn(const Shin::NeuronNetwork::Problem &p); float learn(const Shin::NeuronNetwork::Problem &p);
double learnSet(const std::vector<Shin::NeuronNetwork::Problem*> &); float learnSet(const std::vector<Shin::NeuronNetwork::Problem*> &);
void setCoef(double q); void setCoef(double q);
inline BackPropagation& getPropagator() {return *p;}; inline BackPropagation& getPropagator() {return *p;};
void setPropagator(BackPropagation *p); void setPropagator(BackPropagation *p);
protected: protected:
double learningCoeficient=3; float learningCoeficient=3;
std::function<double(const Problem&,const Solution&)> qualityFunction=nullptr; std::function<float(const Problem&,const Solution&)> qualityFunction=nullptr;
BackPropagation *p; BackPropagation *p;
}; };
} }

View File

@@ -19,9 +19,9 @@ Layer::~Layer()
} }
Solution Layer::solve(const std::vector<double> &input) Solution Layer::solve(const std::vector<float> &input)
{ {
std::vector <double> ret; std::vector <float> ret;
for(Neuron *n:neurons) for(Neuron *n:neurons)
{ {
ret.push_back(n->output(input)); ret.push_back(n->output(input));

View File

@@ -42,7 +42,7 @@ namespace NeuronNetwork
} }
} }
~Layer(); ~Layer();
Solution solve(const std::vector<double> &input); Solution solve(const std::vector<float> &input);
Neuron* operator[](int neuron) const; Neuron* operator[](int neuron) const;
int size() const {return neurons.size();}; int size() const {return neurons.size();};
protected: protected:

View File

@@ -6,17 +6,17 @@ Neuron::Neuron(): potential(1),weights()
{ {
} }
double Neuron::getPotential() const float Neuron::getPotential() const
{ {
return potential; return potential;
} }
void Neuron::setPotential(double p) void Neuron::setPotential(float p)
{ {
potential=p; potential=p;
} }
double Neuron::getWeight(unsigned int i) const float Neuron::getWeight(unsigned int i) const
{ {
if(i >= weights.size()) if(i >= weights.size())
{ {
@@ -25,7 +25,7 @@ double Neuron::getWeight(unsigned int i) const
return weights[0]; return weights[0];
} }
void Neuron::setWeight(unsigned int i,double p) void Neuron::setWeight(unsigned int i,float p)
{ {
if(i >= weights.size()) if(i >= weights.size())
{ {
@@ -36,9 +36,9 @@ void Neuron::setWeight(unsigned int i,double p)
weights[i]=p; weights[i]=p;
} }
double Neuron::output(std::vector<double> input) float Neuron::output(std::vector<float> input)
{ {
register double sum=0; register float sum=0;
for(unsigned int i=0;i<input.size();i++) for(unsigned int i=0;i<input.size();i++)
{ {
// std::cerr << "W: " << getWeight(i) <<"\n"; // std::cerr << "W: " << getWeight(i) <<"\n";

View File

@@ -16,18 +16,18 @@ namespace NeuronNetwork
{ {
} }
double getPotential() const; float getPotential() const;
void setPotential(double p); void setPotential(float p);
double getWeight(unsigned int) const; float getWeight(unsigned int) const;
void setWeight(unsigned int i,double p); void setWeight(unsigned int i,float p);
double output(const std::vector<double>); float output(const std::vector<float>);
double output() { return lastOutput;} float output() { return lastOutput;}
protected: protected:
double potential; double potential;
private: private:
std::vector<double> weights; std::vector<float> weights;
double lastOutput=0.0; float lastOutput=0.0;
double lastInput=0.0; float lastInput=0.0;
}; };
class SimpleNeuron: public Neuron class SimpleNeuron: public Neuron
{ {

View File

@@ -7,7 +7,7 @@ Problem::Problem()
} }
Problem::operator std::vector<bool>() const Problem::operator std::vector<float>() const
{ {
return representation(); return representation();
} }

View File

@@ -13,8 +13,8 @@ namespace NeuronNetwork
public: public:
Problem(); Problem();
virtual ~Problem(){}; virtual ~Problem(){};
operator std::vector<bool>() const; operator std::vector<float>() const;
virtual std::vector<bool> representation() const =0; virtual std::vector<float> representation() const =0;
protected: protected:
private: private:
}; };

View File

@@ -2,21 +2,21 @@
using namespace Shin::NeuronNetwork; using namespace Shin::NeuronNetwork;
Solution::Solution(std::vector<double>sol):solution(sol) Solution::Solution(std::vector<float>sol):solution(sol)
{ {
} }
Solution::Solution(const Problem& p):solution() Solution::Solution(const Problem& p):solution()
{ {
std::vector<bool> q(p); std::vector<float> q(p);
for(bool s:q) for(float s:q)
{ {
solution.push_back(s); solution.push_back(s);
} }
} }
double Solution::operator[](size_t pos) const float Solution::operator[](size_t pos) const
{ {
return solution[pos]; return solution[pos];
} }
@@ -26,7 +26,7 @@ size_t Solution::size() const
return solution.size(); return solution.size();
} }
Solution::operator std::vector<double>() Solution::operator std::vector<float>()
{ {
return solution; return solution;
} }

View File

@@ -13,12 +13,12 @@ namespace NeuronNetwork
{ {
public: public:
Solution(const Problem& p); Solution(const Problem& p);
Solution(std::vector<double> solution); Solution(std::vector<float> solution);
size_t size() const; size_t size() const;
double operator[] (size_t pos) const; float operator[] (size_t pos) const;
operator std::vector<double>(); operator std::vector<float>();
protected: protected:
std::vector<double> solution; std::vector<float> solution;
}; };
} }
} }

View File

@@ -1,5 +1,6 @@
include ../Makefile.const include ../Makefile.const
OPTIMALIZATION=
LIB_DIR = ../lib LIB_DIR = ../lib
GEN_TESTS=g-01 g-02 GEN_TESTS=g-01 g-02
NN_TESTS= \ NN_TESTS= \
@@ -23,7 +24,7 @@ test: all
@for i in $(ALL_TESTS); do echo -n ./$$i; echo -n " - "; ./$$i; echo ""; done @for i in $(ALL_TESTS); do echo -n ./$$i; echo -n " - "; ./$$i; echo ""; done
g-%: g-%.cpp $(LIB_DIR)/Genetics.a g-%: g-%.cpp $(LIB_DIR)/Genetics.a
$(CXX) $(CXXFLAGS) -o $@ $< $ $(LIB_DIR)/Genetics.a $(LIB_DIR)/NeuronNetwork.a -lm $(CXX) $(CXXFLAGS) $(OPTIMALIZATION) -o $@ $< $ $(LIB_DIR)/Genetics.a $(LIB_DIR)/NeuronNetwork.a -lm
nn-%: nn-%.cpp $(LIB_DIR)/NeuronNetwork.a nn-%: nn-%.cpp $(LIB_DIR)/NeuronNetwork.a
$(CXX) $(CXXFLAGS) -o $@ $< $ $(LIB_DIR)/NeuronNetwork.a -lm $(CXX) $(CXXFLAGS) -o $@ $< $ $(LIB_DIR)/NeuronNetwork.a -lm

View File

@@ -9,59 +9,45 @@ class X: public Shin::NeuronNetwork::Problem
{ {
public: public:
X(const X& a) :q(a.q) {} X(const X& a) :q(a.q) {}
X(const std::vector<bool> &a):q(a) {} X(const std::vector<float> &a):q(a) {}
std::vector<bool> representation() const X(const std::vector<bool> &a):q() {for(bool s:a) q.push_back((float)s);}
std::vector<float> representation() const
{ {
return q; return q;
} }
protected: protected:
std::vector<bool> q; std::vector<float> q;
}; };
int main(int argc) int main(int argc,char**)
{ {
srand(time(NULL)); srand(time(NULL));
std::vector<Shin::NeuronNetwork::Solution> s; std::vector<Shin::NeuronNetwork::Solution> s;
std::vector<X> p; std::vector<X> p;
// //
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({1}))); s.push_back(Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back(X(std::vector<bool>({0}))); p.push_back(X(std::vector<bool>({0})));
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({0}))); s.push_back(Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(X(std::vector<bool>({1}))); p.push_back(X(std::vector<bool>({1})));
Shin::NeuronNetwork::FeedForwardNetworkQuick q({1,5000,5000,5000}); Shin::NeuronNetwork::FeedForwardNetworkQuick q({1,5000,5000,15000,2});
Shin::NeuronNetwork::Learning::BackPropagation b(q); Shin::NeuronNetwork::Learning::BackPropagation b(q);
if(argc > 1) if(argc > 1)
{ {
std::cerr << "THREADING\n"; std::cerr << "THREADING\n";
q.setThreads(4); q.setThreads(2);
} }
#include <chrono>
auto t1 = std::chrono::high_resolution_clock::now();
for(int i=0;i<100;i++) for(int i=0;i<100;i++)
{ {
//b.teach(p[i%2],s[i%2]); //b.teach(p[i%2],s[i%2]);
q.solve(p[i%2])[0]; q.solve(p[i%2])[0];
//std::cerr << i%2 <<". FOR: [" << p[i%2].representation()[0] << "] res: " << q.solve(p[i%2])[0] << " should be " << s[i%2][0]<<"\n"; //std::cerr << i%2 <<". FOR: [" << p[i%2].representation()[0] << "] res: " << q.solve(p[i%2])[0] << " should be " << s[i%2][0]<<"\n";
} }
for(int i=0;i<2;i++) auto t2 = std::chrono::high_resolution_clock::now();
{ std::cout << "Time: " << std::chrono::duration_cast<std::chrono::milliseconds>(t2-t1).count() << std::endl;
// b.teach(p[i%2],s[i%2]);
// std::cerr << i%4 <<". FOR: [" << p[i%4].representation()[0] << "," <<p[i%4].representation()[0] << "] res: " << q.solve(p[i%4])[0] << " should be " <<
// s[i%4][0]<<"\n";
}
/*
for(int i=0;i<40;i++)
{
b.teach(p[i%4],s[i%4]);
}
b.debugOn();
std::cerr << "LEARNED\n";
for(int i=0;i<4;i++)
{
b.teach(p[i%4],s[i%4]);
std::cerr << i%4 <<". FOR: [" << p[i%4].representation()[0] << "," <<p[i%4].representation()[1] << "] res: " << q.solve(p[i%4])[0] << " should be " <<
s[i%4][0]<<"\n";
}
*/
} }

View File

@@ -7,9 +7,9 @@
class X: public Shin::NeuronNetwork::Problem class X: public Shin::NeuronNetwork::Problem
{ {
protected: protected:
std::vector<bool> representation() const std::vector<float> representation() const
{ {
return std::vector<bool>({1,1}); return std::vector<float>({1,1});
} }
}; };

View File

@@ -9,13 +9,13 @@ class X: public Shin::NeuronNetwork::Problem
{ {
public: public:
X(const X& a) :q(a.q) {} X(const X& a) :q(a.q) {}
X(const std::vector<bool> &a):q(a) {} X(const std::vector<float> &a):q(a) {}
std::vector<bool> representation() const std::vector<float> representation() const
{ {
return q; return q;
} }
protected: protected:
std::vector<bool> q; std::vector<float> q;
}; };
int main() int main()
@@ -24,14 +24,14 @@ int main()
std::vector<X> p; std::vector<X> p;
// //
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({0}))); s.push_back(Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(X(std::vector<bool>({1,0}))); p.push_back(X(std::vector<float>({1,0})));
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({0}))); s.push_back(Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(X(std::vector<bool>({0,1}))); p.push_back(X(std::vector<float>({0,1})));
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({0}))); s.push_back(Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(X(std::vector<bool>({0,0}))); p.push_back(X(std::vector<float>({0,0})));
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({1}))); s.push_back(Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back(X(std::vector<bool>({1,1}))); p.push_back(X(std::vector<float>({1,1})));
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,4,1}); Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,4,1});
Shin::NeuronNetwork::Learning::BackPropagation b(q); Shin::NeuronNetwork::Learning::BackPropagation b(q);
@@ -45,7 +45,7 @@ int main()
} }
b.debugOff(); b.debugOff();
for(int i=0;i<40;i++) for(int i=0;i<4000;i++)
{ {
b.teach(p[i%4],s[i%4]); b.teach(p[i%4],s[i%4]);
} }

View File

@@ -4,7 +4,7 @@
class X: public Shin::NeuronNetwork::Problem class X: public Shin::NeuronNetwork::Problem
{ {
public: X(bool x,bool y):x(x),y(y) {} public: X(bool x,bool y):x(x),y(y) {}
protected: std::vector<bool> representation() const { return std::vector<bool>({x,y}); } protected: std::vector<float> representation() const { return std::vector<float>({x,y}); }
private: private:
bool x; bool x;
bool y; bool y;

View File

@@ -9,27 +9,27 @@ class X: public Shin::NeuronNetwork::Problem
{ {
public: public:
X(const X& a) :q(a.q) {} X(const X& a) :q(a.q) {}
X(const std::vector<bool> &a):q(a) {} X(const std::vector<float> &a):q(a) {}
std::vector<bool> representation() const std::vector<float> representation() const
{ {
return q; return q;
} }
protected: protected:
std::vector<bool> q; std::vector<float> q;
}; };
int main(int argc, char*argv) int main(int argc, char**)
{ {
srand(time(NULL)); srand(time(NULL));
std::vector<Shin::NeuronNetwork::Solution> s; std::vector<Shin::NeuronNetwork::Solution> s;
std::vector<X> p; std::vector<X> p;
// //
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({1}))); s.push_back(Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back(X(std::vector<bool>({0}))); p.push_back(X(std::vector<float>({0})));
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({0}))); s.push_back(Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(X(std::vector<bool>({1}))); p.push_back(X(std::vector<float>({1})));
Shin::NeuronNetwork::FeedForwardNetworkQuick q({1,5000,5000,5000,1}); Shin::NeuronNetwork::FeedForwardNetworkQuick q({1,5000,5000,5000,1});
Shin::NeuronNetwork::Learning::BackPropagation b(q); Shin::NeuronNetwork::Learning::BackPropagation b(q);

View File

@@ -8,13 +8,13 @@ class X: public Shin::NeuronNetwork::Problem
{ {
public: public:
X(const X& a) :q(a.q) {} X(const X& a) :q(a.q) {}
X(const std::vector<bool> &a):q(a) {} X(const std::vector<float> &a):q(a) {}
std::vector<bool> representation() const std::vector<float> representation() const
{ {
return q; return q;
} }
protected: protected:
std::vector<bool> q; std::vector<float> q;
}; };
int main() int main()
@@ -29,17 +29,17 @@ int main()
std::vector<Shin::NeuronNetwork::Solution*> s; std::vector<Shin::NeuronNetwork::Solution*> s;
std::vector<Shin::NeuronNetwork::Problem*> p; std::vector<Shin::NeuronNetwork::Problem*> p;
s.push_back(new Shin::NeuronNetwork::Solution(std::vector<double>({0}))); s.push_back(new Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(new X(std::vector<bool>({0,0}))); p.push_back(new X(std::vector<float>({0,0})));
s.push_back( new Shin::NeuronNetwork::Solution(std::vector<double>({1}))); s.push_back( new Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back( new X(std::vector<bool>({1,0}))); p.push_back( new X(std::vector<float>({1,0})));
s.push_back(new Shin::NeuronNetwork::Solution(std::vector<double>({0}))); s.push_back(new Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(new X(std::vector<bool>({1,1}))); p.push_back(new X(std::vector<float>({1,1})));
s.push_back( new Shin::NeuronNetwork::Solution(std::vector<double>({1}))); s.push_back( new Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back( new X(std::vector<bool>({0,1}))); p.push_back( new X(std::vector<float>({0,1})));
if(test) if(test)
{ {

View File

@@ -8,13 +8,13 @@ class X: public Shin::NeuronNetwork::Problem
{ {
public: public:
X(const X& a) :q(a.q) {} X(const X& a) :q(a.q) {}
X(const std::vector<bool> &a):q(a) {} X(const std::vector<float> &a):q(a) {}
std::vector<bool> representation() const std::vector<float> representation() const
{ {
return q; return q;
} }
protected: protected:
std::vector<bool> q; std::vector<float> q;
}; };
int main() int main()
@@ -29,17 +29,17 @@ int main()
std::vector<Shin::NeuronNetwork::Solution*> s; std::vector<Shin::NeuronNetwork::Solution*> s;
std::vector<Shin::NeuronNetwork::Problem*> p; std::vector<Shin::NeuronNetwork::Problem*> p;
s.push_back(new Shin::NeuronNetwork::Solution(std::vector<double>({0}))); s.push_back(new Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(new X(std::vector<bool>({0,0}))); p.push_back(new X(std::vector<float>({0,0})));
s.push_back( new Shin::NeuronNetwork::Solution(std::vector<double>({1}))); s.push_back( new Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back( new X(std::vector<bool>({1,0}))); p.push_back( new X(std::vector<float>({1,0})));
s.push_back(new Shin::NeuronNetwork::Solution(std::vector<double>({0}))); s.push_back(new Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(new X(std::vector<bool>({1,1}))); p.push_back(new X(std::vector<float>({1,1})));
s.push_back( new Shin::NeuronNetwork::Solution(std::vector<double>({1}))); s.push_back( new Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back( new X(std::vector<bool>({0,1}))); p.push_back( new X(std::vector<float>({0,1})));
b.debugOn(); b.debugOn();
if(test) if(test)

View File

@@ -9,13 +9,13 @@ class X: public Shin::NeuronNetwork::Problem
{ {
public: public:
X(const X& a) :q(a.q) {} X(const X& a) :q(a.q) {}
X(const std::vector<bool> &a):q(a) {} X(const std::vector<float> &a):q(a) {}
std::vector<bool> representation() const std::vector<float> representation() const
{ {
return q; return q;
} }
protected: protected:
std::vector<bool> q; std::vector<float> q;
}; };
int main() int main()
@@ -24,15 +24,16 @@ int main()
std::vector<X> p; std::vector<X> p;
p.push_back(X(std::vector<bool>({0,0}))); p.push_back(X(std::vector<float>({0,0})));
p.push_back(X(std::vector<bool>({1,1}))); p.push_back(X(std::vector<float>({1,1})));
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,6,2}); Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,6,2});
Shin::NeuronNetwork::Learning::Reinforcement b(q); Shin::NeuronNetwork::Learning::Reinforcement b(q);
b.getPropagator().setLearningCoeficient(1);
int i=0; int i=0;
b.setQualityFunction( b.setQualityFunction(
[&i](const Shin::NeuronNetwork::Solution &s)->double [&i](const Shin::NeuronNetwork::Problem &,const Shin::NeuronNetwork::Solution &s)->float
{ {
if(i%2==0) if(i%2==0)
{ {

View File

@@ -9,13 +9,13 @@ class X: public Shin::NeuronNetwork::Problem
{ {
public: public:
X(const X& a) :q(a.q) {} X(const X& a) :q(a.q) {}
X(const std::vector<bool> &a):q(a) {} X(const std::vector<float> &a):q(a) {}
std::vector<bool> representation() const std::vector<float> representation() const
{ {
return q; return q;
} }
protected: protected:
std::vector<bool> q; std::vector<float> q;
}; };
int main() int main()
@@ -24,16 +24,16 @@ int main()
std::vector<Shin::NeuronNetwork::Problem*> p; std::vector<Shin::NeuronNetwork::Problem*> p;
p.push_back(new X(std::vector<bool>({0,0}))); p.push_back(new X(std::vector<float>({0,0})));
p.push_back(new X(std::vector<bool>({1,1}))); p.push_back(new X(std::vector<float>({1,1})));
Shin::NeuronNetwork::FeedForwardNetworkQuick q({1,1}); Shin::NeuronNetwork::FeedForwardNetworkQuick q({1,1});
Shin::NeuronNetwork::Learning::Reinforcement b(q); Shin::NeuronNetwork::Learning::Reinforcement b(q);
int i=0; int i=0;
double targetQuality=1.4; double targetQuality=1.4;
b.setQualityFunction( b.setQualityFunction(
[](const Shin::NeuronNetwork::Problem &pr,const Shin::NeuronNetwork::Solution &s)->double [](const Shin::NeuronNetwork::Problem &pr,const Shin::NeuronNetwork::Solution &s)->float
{ {
if(pr.representation()[0]==0) if(pr.representation()[0]==0)
{ {

View File

@@ -9,23 +9,26 @@ class X: public Shin::NeuronNetwork::Problem
{ {
public: public:
X(const X& a) :q(a.q) {} X(const X& a) :q(a.q) {}
X(const std::vector<bool> &a):q(a) {} X(const std::vector<float> &a):q(a) {}
std::vector<bool> representation() const std::vector<float> representation() const
{ {
return q; return q;
} }
protected: protected:
std::vector<bool> q; std::vector<float> q;
}; };
int main() int main()
{ {
srand(time(NULL));
for (int test=0;test<3;test++) for (int test=0;test<3;test++)
{ {
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,6,1}); Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,6,1});
Shin::NeuronNetwork::Learning::Reinforcement b(q); Shin::NeuronNetwork::Learning::Reinforcement b(q);
double targetQuality =1.2; b.setPropagator(new Shin::NeuronNetwork::Learning::OpticalBackPropagation(q));
b.getPropagator().setLearningCoeficient(0.9);
b.getPropagator().allowEntropy();
double targetQuality =1.7;
if(test==2) if(test==2)
{ {
targetQuality =1.62; targetQuality =1.62;
@@ -35,10 +38,10 @@ int main()
b.getPropagator().setLearningCoeficient(3); b.getPropagator().setLearningCoeficient(3);
} }
b.setQualityFunction( b.setQualityFunction(
[](const Shin::NeuronNetwork::Problem &pr,const Shin::NeuronNetwork::Solution &s)->double [](const Shin::NeuronNetwork::Problem &pr,const Shin::NeuronNetwork::Solution &s)->float
{ {
std::vector <bool> p=pr; std::vector <float> p=pr;
double expect=0.0; float expect=0.0;
if(p[0] && p[1]) if(p[0] && p[1])
expect=0; expect=0;
else if(p[0] && !p[1]) else if(p[0] && !p[1])
@@ -60,17 +63,15 @@ int main()
// std::cerr << " returnning " << expect*5.0 << "\n"; // std::cerr << " returnning " << expect*5.0 << "\n";
return expect*5.0; return expect*9.0;
}); });
srand(time(NULL));
std::vector<Shin::NeuronNetwork::Problem*> p; std::vector<Shin::NeuronNetwork::Problem*> p;
p.push_back(new X(std::vector<bool>({0,0}))); p.push_back(new X(std::vector<float>({0,0})));
p.push_back( new X(std::vector<bool>({1,0}))); p.push_back( new X(std::vector<float>({1,0})));
p.push_back( new X(std::vector<bool>({0,1}))); p.push_back( new X(std::vector<float>({0,1})));
p.push_back(new X(std::vector<bool>({1,1}))); p.push_back(new X(std::vector<float>({1,1})));
if(test==1) if(test==1)
{ {
@@ -82,12 +83,13 @@ int main()
} }
for(int i=0;i < 500000000;i++) for(int i=0;i < 500000000;i++)
// for(int i=0;i < 5;i++)
{ {
double err=b.learnSet(p); double err=b.learnSet(p);
if(i%100000==0) if(i%100000==0)
srand(time(NULL)); srand(time(NULL));
if(i%40000==0 || err > targetQuality) if(i%200000==0 || err > targetQuality)
{ {
std::cerr << i << " ("<< err <<").\n"; std::cerr << i << " ("<< err <<").\n";
for(int j=0;j<4;j++) for(int j=0;j<4;j++)