This commit is contained in:
2014-12-10 20:47:11 +01:00
parent 5d0fa9301b
commit ddef0e37a7
18 changed files with 50 additions and 179 deletions

View File

@@ -1,11 +1,9 @@
CXX=g++ -m64 CXX=g++ -m64
CXXFLAGS+= -Wall -Wextra -pedantic -Weffc++ -Wshadow -Wstrict-aliasing -ansi -Woverloaded-virtual -Wdelete-non-virtual-dtor CXXFLAGS+= -Wall -Wextra -pedantic -Weffc++ -Wshadow -Wstrict-aliasing -ansi -Woverloaded-virtual -Wdelete-non-virtual-dtor
#CXXFLAGS+=-Werror
CXXFLAGS+= -g
CXXFLAGS+= -msse4.2 -mmmx
#-fprefetch-loop-arrays
CXXFLAGS+= -std=c++14 CXXFLAGS+= -std=c++14
#CXXFLAGS+= -pg -fPIC #-fprefetch-loop-arrays
CXXFLAGS+= -pg -fPIC
CXXFLAGS+= -g
CXXFLAGS+= -fPIC -pthread CXXFLAGS+= -fPIC -pthread
OPTIMALIZATION = -O3 -march=native -mtune=native OPTIMALIZATION = -O3 -march=native -mtune=native

View File

@@ -32,7 +32,7 @@ FeedForward::FeedForward(std::initializer_list< int > s, double lam): ACyclicNet
weights= new float**[s.size()]; weights= new float**[s.size()];
potentials= new float*[s.size()]; potentials= new float*[s.size()];
layerSizes= new size_t[s.size()]; layerSizes= new size_t[s.size()];
sums= new float*[s.size()+1]; sums= new float*[s.size()];
inputs= new float*[s.size()]; inputs= new float*[s.size()];
int i=0; int i=0;
int prev_size=1; int prev_size=1;
@@ -42,16 +42,15 @@ FeedForward::FeedForward(std::initializer_list< int > s, double lam): ACyclicNet
if(i==0) if(i==0)
{ {
prev_size=layeSize; prev_size=layeSize;
sums[0]= new float[layeSize];
sums[0][0]=1.0;
} }
layerSizes[i]=layeSize; layerSizes[i]=layeSize;
weights[i]= new float*[layeSize]; weights[i]= new float*[layeSize];
potentials[i]= new float[layeSize]; potentials[i]= new float[layeSize];
sums[i+1]= new float[layeSize]; sums[i]= new float[layeSize];
inputs[i]= new float[layeSize]; inputs[i]= new float[layeSize];
potentials[i][0]=1.0; potentials[i][0]=1.0;
sums[i+1][0]=1.0; sums[i][0]=1.0;
for (int j=1;j<layeSize;j++) for (int j=1;j<layeSize;j++)
{ {
potentials[i][j]=1.0; potentials[i][j]=1.0;
@@ -80,7 +79,6 @@ FeedForward::~FeedForward()
delete[] sums[i]; delete[] sums[i];
delete[] inputs[i]; delete[] inputs[i];
} }
delete[] sums[layers];
delete[] weights; delete[] weights;
delete[] potentials; delete[] potentials;
delete[] layerSizes; delete[] layerSizes;
@@ -117,13 +115,10 @@ void FeedForward::solvePart(float *newSolution, register size_t begin, size_t en
sols = _mm_load_ss(sol+k); sols = _mm_load_ss(sol+k);
w=_mm_mul_ps(w,sols); w=_mm_mul_ps(w,sols);
partialSolution=_mm_add_ps(partialSolution,w); partialSolution=_mm_add_ps(partialSolution,w);
// w=_mm_shuffle_ps(w,w,3*2^0+0*2^2+1*2^4+2*2^6);
// sols=_mm_shuffle_ps(sols,sols,3*2^0+0*2^2+1*2^4+2*2^6);
} }
for(register size_t k=0;k<alignedPrev;k+=4) for(register size_t k=0;k<alignedPrev;k+=4)
{ {
w = _mm_load_ps(this->weights[layer][j]+k); w = _mm_load_ps(this->weights[layer][j]+k);
//_mm_prefetch((char*)this->weights[layer][j]+k+4,_MM_HINT_T0);
sols = _mm_load_ps(sol+k); sols = _mm_load_ps(sol+k);
w=_mm_mul_ps(w,sols); w=_mm_mul_ps(w,sols);
partialSolution=_mm_add_ps(partialSolution,w); partialSolution=_mm_add_ps(partialSolution,w);
@@ -159,20 +154,16 @@ void FeedForward::solvePart(float *newSolution, register size_t begin, size_t en
Solution FeedForward::solve(const Problem& p) Solution FeedForward::solve(const Problem& p)
{ {
register float* sol=sums[1]; register float* sol=sums[0];
sums[0][0]=1;
sol[0]=1; sol[0]=1;
for(size_t i=0;i<p.size();i++) for(size_t i=0;i<p.size();i++)
{
sums[0][i+1]=p[i];
sol[i+1]=p[i]; sol[i+1]=p[i];
}
register size_t prevSize=layerSizes[0]; register size_t prevSize=layerSizes[0];
for(register size_t i=1;i<layers;i++) for(register size_t i=1;i<layers;i++)
{ {
float* newSolution= sums[i+1]; float* newSolution= sums[i];
if(threads > 1 && (layerSizes[i] > 700 ||prevSize > 700)) // 700 is an guess about actual size, when creating thread has some speedup if(threads > 1 && (layerSizes[i] > 700 ||prevSize > 700)) // 700 is an guess about actual size, when creating thread has some speedup
{ {
std::vector<std::thread> th; std::vector<std::thread> th;
@@ -213,7 +204,7 @@ FFLayer& FeedForward::operator[](size_t l)
ffLayers=new FFLayer*[layers]; ffLayers=new FFLayer*[layers];
for(size_t i=0;i<layers;i++) for(size_t i=0;i<layers;i++)
{ {
ffLayers[i]=new FFLayer(layerSizes[i],potentials[i],weights[i],sums[i+1],inputs[i],lambda); ffLayers[i]=new FFLayer(layerSizes[i],potentials[i],weights[i],sums[i],inputs[i],lambda);
} }
} }
return *ffLayers[l]; return *ffLayers[l];

View File

@@ -61,7 +61,6 @@ namespace NeuronNetwork
FFLayer(const FFLayer &) = delete; FFLayer(const FFLayer &) = delete;
FFLayer& operator=(const FFLayer &) = delete; FFLayer& operator=(const FFLayer &) = delete;
// inline virtual Neuron& operator[](size_t layer) override {return operator[](layer);};
virtual FFNeuron& operator[](size_t layer) override; virtual FFNeuron& operator[](size_t layer) override;
inline virtual size_t size() const override {return layerSize;}; inline virtual size_t size() const override {return layerSize;};
protected: protected:

View File

@@ -1,25 +1,5 @@
#include "./IO" #include "./IO"
Shin::NeuronNetwork::IO::IO():data()
{
}
Shin::NeuronNetwork::IO::IO(std::vector< float >& d):data(d)
{
}
Shin::NeuronNetwork::IO::IO(const Shin::NeuronNetwork::IO& old): data(old.data)
{
}
Shin::NeuronNetwork::IO::~IO()
{
}
Shin::NeuronNetwork::IO Shin::NeuronNetwork::IO::operator+(const IO &r) Shin::NeuronNetwork::IO Shin::NeuronNetwork::IO::operator+(const IO &r)
{ {
Shin::NeuronNetwork::IO tmp; Shin::NeuronNetwork::IO tmp;
@@ -33,25 +13,3 @@ Shin::NeuronNetwork::IO Shin::NeuronNetwork::IO::operator+(const IO &r)
} }
return tmp; return tmp;
} }
Shin::NeuronNetwork::IO::operator std::vector<float>&()
{
return data;
}
Shin::NeuronNetwork::IO::operator std::vector<float>()
{
return data;
}
float Shin::NeuronNetwork::IO::operator[](size_t pos) const
{
return data[pos];
}
size_t Shin::NeuronNetwork::IO::size() const
{
return data.size();
}

View File

@@ -11,17 +11,19 @@ namespace NeuronNetwork
class IO class IO
{ {
public: public:
IO(); IO() {};
IO(std::vector<float> &d); IO(std::vector<float> &d) : data(d) {}
IO(const IO &old); IO(const IO &old) : data(old.data) {}
virtual ~IO(); IO(const std::initializer_list<float> &a):data(a) { }
virtual ~IO() {};
IO operator+(const IO &r); IO operator+(const IO &r);
virtual operator std::vector<float>&() final; // TOO CONST inline virtual operator std::vector<float>&() final {return data;}
virtual operator std::vector<float>() final; // TOO CONST inline virtual operator std::vector<float>() final {return data;}
float operator[] (size_t pos) const; virtual float& operator[] (size_t pos) final { return data[pos];}
size_t size() const; virtual float operator[] (size_t pos) const final { return data[pos];}
inline virtual size_t size() const final {return data.size();}
protected: protected:
std::vector<float> data; std::vector<float> data = {};
private: private:
}; };
} }

View File

@@ -4,14 +4,11 @@
#include <math.h> #include <math.h>
#include <cstddef> #include <cstddef>
#include "../Solution.h"
#include "../FeedForward.h" #include "../FeedForward.h"
#include "BackPropagation" #include "BackPropagation"
/* /*
* http://proceedings.informingscience.org/InSITE2005/P106Otai.pdf * http://proceedings.informingscience.org/InSITE2005/P106Otai.pdf
*
*
*/ */
namespace Shin namespace Shin

View File

@@ -1,7 +1,7 @@
#ifndef _Q_FUNCTION_H_ #ifndef _Q_FUNCTION_H_
#define _Q_FUNCTION_H_ #define _Q_FUNCTION_H_
#include "../../Solution" #include "../../Solution.h"
#include "../../FeedForward" #include "../../FeedForward"
#include "../BackPropagation.h" #include "../BackPropagation.h"
#include "../OpticalBackPropagation.h" #include "../OpticalBackPropagation.h"

View File

@@ -15,22 +15,12 @@ float Shin::NeuronNetwork::Learning::Supervised::calculateError(const Shin::Neur
return a; return a;
} }
float Shin::NeuronNetwork::Learning::Supervised::teachSet(std::vector< Shin::NeuronNetwork::Problem* >& p, std::vector< Shin::NeuronNetwork::Solution* >& solution) float Shin::NeuronNetwork::Learning::Supervised::teachSet(const std::vector<std::pair<Shin::NeuronNetwork::Problem,Shin::NeuronNetwork::Solution>> &set)
{ {
double error=0; double error=0;
for (register size_t i=0;i<p.size();i++) for (register size_t i=0;i<set.size();i++)
{ {
error+=teach(*p[i],*solution[i]); error+=teach(set[i].first,set[i].second);
} }
return error; return error;
} }
void Shin::NeuronNetwork::Learning::Supervised::debugOn()
{
debug=1;
}
void Shin::NeuronNetwork::Learning::Supervised::debugOff()
{
debug=0;
}

View File

@@ -1,7 +1,8 @@
#ifndef _SUPERVISEDLEARNING_H_ #ifndef _SUPERVISEDLEARNING_H_
#define _SUPERVIESDLERANING_H_ #define _SUPERVIESDLERANING_H_
#include <math.h> #include <vector>
#include <set>
#include <cstddef> #include <cstddef>
#include "../Solution.h" #include "../Solution.h"
@@ -19,14 +20,12 @@ namespace Learning
Supervised() =delete; Supervised() =delete;
Supervised(FeedForward &n); Supervised(FeedForward &n);
virtual ~Supervised() {}; virtual ~Supervised() {};
float calculateError(const Solution &expectation,const Solution &solution); float calculateError(const Solution &expectation,const Solution &solution);
virtual float teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution)=0; virtual float teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution)=0;
float teachSet(std::vector<Shin::NeuronNetwork::Problem*> &p,std::vector<Shin::NeuronNetwork::Solution*> &solution); virtual float teachSet(const std::vector<std::pair<Problem,Solution>> &set) final;
void debugOn();
void debugOff();
protected: protected:
FeedForward &network; FeedForward &network;
bool debug=0;
}; };
} }
} }

View File

@@ -4,13 +4,3 @@ Shin::NeuronNetwork::Learning::Unsupervised::Unsupervised(Shin::NeuronNetwork::F
{ {
} }
void Shin::NeuronNetwork::Learning::Unsupervised::debugOn()
{
debug=1;
}
void Shin::NeuronNetwork::Learning::Unsupervised::debugOff()
{
debug=0;
}

View File

@@ -19,11 +19,8 @@ namespace Learning
Unsupervised() =delete; Unsupervised() =delete;
Unsupervised(FeedForward &n); Unsupervised(FeedForward &n);
virtual ~Unsupervised() {}; virtual ~Unsupervised() {};
void debugOn();
void debugOff();
protected: protected:
FeedForward &network; FeedForward &network;
bool debug=0;
}; };
} }
} }

View File

@@ -2,7 +2,7 @@ OBJFILES=\
FeedForward.o\ FeedForward.o\
Learning/Supervised.o Learning/BackPropagation.o Learning/OpticalBackPropagation.o\ Learning/Supervised.o Learning/BackPropagation.o Learning/OpticalBackPropagation.o\
Learning/Unsupervised.o Learning/Reinforcement.o Learning/RL/QFunction.o Learning/QLearning.o\ Learning/Unsupervised.o Learning/Reinforcement.o Learning/RL/QFunction.o Learning/QLearning.o\
Solution.o Problem.o ./IO.o ./IO.o
LINKFILES= ../sse_mathfun.o LINKFILES= ../sse_mathfun.o
@@ -17,7 +17,7 @@ lib: $(LIBNAME).so $(LIBNAME).a
$(LIBNAME).so: $(OBJFILES) $(LIBNAME).so: $(OBJFILES)
$(CXX) -shared $(CXXFLAGS) $(OBJFILES) $(LINKFILES) -o $(LIBNAME).so $(CXX) -shared $(CXXFLAGS) $(OBJFILES) $(LINKFILES) -o $(LIBNAME).so
$(LIBNAME).a: $(OBJFILES) ./Neuron.h ./Network.h $(LIBNAME).a: $(OBJFILES) ./Neuron.h ./Network.h ./Solution.h ./Problem.h
rm -f $(LIBNAME).a # create new library rm -f $(LIBNAME).a # create new library
ar rcv $(LIBNAME).a $(OBJFILES) $(LINKFILES) ar rcv $(LIBNAME).a $(OBJFILES) $(LINKFILES)
ranlib $(LIBNAME).a ranlib $(LIBNAME).a

View File

@@ -1,13 +0,0 @@
#include "Problem"
using namespace Shin::NeuronNetwork;
Problem::Problem()
{
}
Problem::Problem(std::vector< float >& p): IO(p)
{
}

View File

@@ -12,8 +12,9 @@ namespace NeuronNetwork
class Problem : public IO class Problem : public IO
{ {
public: public:
Problem(); Problem(): IO() {};
Problem(std::vector<float> &p); Problem(std::vector<float> &p):IO(p) {};
Problem(const std::initializer_list<float> &a) : IO(a) {};
protected: protected:
private: private:
}; };

View File

@@ -1,18 +0,0 @@
#include "./Solution"
using namespace Shin::NeuronNetwork;
Solution::Solution(std::vector<float>&sol):IO(sol)
{
}
Solution::Solution(std::vector< float > solution):IO(solution)
{
}
void Solution::push_back(float d)
{
data.push_back(d);
}

View File

@@ -11,11 +11,12 @@ namespace NeuronNetwork
class Solution : public IO class Solution : public IO
{ {
public: public:
Solution(): IO() {}; Solution(): IO() {}
Solution(const Problem& p) :IO(p) {}; Solution(const Problem& p) :IO(p) {}
Solution(std::vector<float> &solution); Solution(std::vector<float> &solution):IO(solution) {}
Solution(std::vector<float> solution); Solution(std::vector<float> solution): IO(solution) {}
void push_back(float); Solution(const std::initializer_list<float> &a) : IO(a) {};
inline void push_back(const float &a) {data.push_back(a);};
}; };
} }
} }

View File

@@ -37,20 +37,17 @@ int main()
Shin::NeuronNetwork::Learning::BackPropagation b(q); Shin::NeuronNetwork::Learning::BackPropagation b(q);
b.setLearningCoeficient(10); b.setLearningCoeficient(10);
b.debugOn();
for(int i=0;i<4;i++) for(int i=0;i<4;i++)
{ {
b.teach(p[i%4],s[i%4]); b.teach(p[i%4],s[i%4]);
std::cerr << i%4 <<". FOR: [" << p[i%4].representation()[0] << "," <<p[i%4].representation()[1] << "] res: " << q.solve(p[i%4])[0] << " should be " << std::cerr << i%4 <<". FOR: [" << p[i%4].representation()[0] << "," <<p[i%4].representation()[1] << "] res: " << q.solve(p[i%4])[0] << " should be " <<
s[i%4][0]<<"\n"; s[i%4][0]<<"\n";
} }
b.debugOff();
for(int i=0;i<40000;i++) for(int i=0;i<40000;i++)
{ {
b.teach(p[i%4],s[i%4]); b.teach(p[i%4],s[i%4]);
} }
b.debugOn();
std::cerr << "LEARNED\n"; std::cerr << "LEARNED\n";
for(int i=0;i<4;i++) for(int i=0;i<4;i++)
{ {

View File

@@ -20,21 +20,11 @@ int main()
Shin::NeuronNetwork::Learning::BackPropagation b(q); Shin::NeuronNetwork::Learning::BackPropagation b(q);
srand(time(NULL)); srand(time(NULL));
std::vector<Shin::NeuronNetwork::Solution*> s; std::vector<std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution> > set;
std::vector<Shin::NeuronNetwork::Problem*> p; set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({0,0}),Shin::NeuronNetwork::Solution({0})));
set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({1,0}),Shin::NeuronNetwork::Solution({1})));
s.push_back(new Shin::NeuronNetwork::Solution(std::vector<float>({0}))); set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({1,1}),Shin::NeuronNetwork::Solution({0})));
p.push_back(new X(std::vector<float>({0,0}))); set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({0,1}),Shin::NeuronNetwork::Solution({1})));
s.push_back( new Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back( new X(std::vector<float>({1,0})));
s.push_back(new Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(new X(std::vector<float>({1,1})));
s.push_back( new Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back( new X(std::vector<float>({0,1})));
if(test) if(test)
{ {
std::cerr << "Testing with entropy\n"; std::cerr << "Testing with entropy\n";
@@ -46,7 +36,7 @@ int main()
b.setLearningCoeficient(20);//8); b.setLearningCoeficient(20);//8);
for(int j=0;;j++) for(int j=0;;j++)
{ {
double err=b.teachSet(p,s); double err=b.teachSet(set);
if(err <0.3) if(err <0.3)
{ {
// b.setLearningCoeficient(5); // b.setLearningCoeficient(5);
@@ -60,20 +50,12 @@ int main()
std::cerr << j << "(" << err <<"):\n"; std::cerr << j << "(" << err <<"):\n";
for(int i=0;i<4;i++) for(int i=0;i<4;i++)
{ {
std::cerr << "\t" << i%4 <<". FOR: [" << p[i%4]->operator[](0) << "," <<p[i%4]->operator[](1) << "] res: " << std::cerr << "\t" << i%4 <<". FOR: [" << set[i%4].first[0] << "," <<set[i%4].first[1] << "] res: " <<
q.solve(*p[i%4])[0] << " should be " << s[i%4]->operator[](0)<<"\n"; q.solve(set[i%4].first)[0] << " should be " << set[i%4].second[0]<<"\n";
} }
} }
if(err <0.001) if(err <0.001)
break; break;
} }
for(auto a:p)
{
delete a;
}
for(auto a:s)
{
delete a;
}
} }
} }