cleaning methods and refactoring abstract classes...

This commit is contained in:
2014-12-11 00:37:58 +01:00
parent ddef0e37a7
commit 2736ede1be
17 changed files with 68 additions and 93 deletions

View File

@@ -24,7 +24,13 @@ FFNeuron& FFLayer::operator[](size_t neuron)
neurons[i]=new FFNeuron(potentials[i],weights[i],sums[i],inputs[i],lambda); neurons[i]=new FFNeuron(potentials[i],weights[i],sums[i],inputs[i],lambda);
} }
} }
if(neuron>=layerSize)
throw std::out_of_range("Not so many neurons in layers.");
return *neurons[neuron]; return *neurons[neuron];
} }
FeedForward::FeedForward(std::initializer_list< int > s, double lam): ACyclicNetwork(lam),layers(s.size()) FeedForward::FeedForward(std::initializer_list< int > s, double lam): ACyclicNetwork(lam),layers(s.size())
@@ -207,5 +213,9 @@ FFLayer& FeedForward::operator[](size_t l)
ffLayers[i]=new FFLayer(layerSizes[i],potentials[i],weights[i],sums[i],inputs[i],lambda); ffLayers[i]=new FFLayer(layerSizes[i],potentials[i],weights[i],sums[i],inputs[i],lambda);
} }
} }
if(l>=layers)
throw std::out_of_range("Not so many layers in network.");
return *ffLayers[l]; return *ffLayers[l];
} }

View File

@@ -55,7 +55,7 @@ namespace NeuronNetwork
class FFLayer: public Layer class FFLayer: public Layer
{ {
public: public:
FFLayer(size_t s, float *p,float **w,float *su,float *in,float lam): neurons(nullptr),layerSize(s),potentials(p),weights(w),sums(su),inputs(in),lambda(lam) {} FFLayer(size_t s, float *p,float **w,float *su,float *in,float lam): layerSize(s),potentials(p),weights(w),sums(su),inputs(in),lambda(lam) {}
~FFLayer(); ~FFLayer();
FFLayer(const FFLayer &) = delete; FFLayer(const FFLayer &) = delete;
@@ -64,7 +64,7 @@ namespace NeuronNetwork
virtual FFNeuron& operator[](size_t layer) override; virtual FFNeuron& operator[](size_t layer) override;
inline virtual size_t size() const override {return layerSize;}; inline virtual size_t size() const override {return layerSize;};
protected: protected:
FFNeuron **neurons; FFNeuron **neurons=nullptr;
size_t layerSize; size_t layerSize;
float *potentials; float *potentials;
float **weights; float **weights;
@@ -77,26 +77,24 @@ namespace NeuronNetwork
{ {
public: public:
FeedForward(std::initializer_list<int> s, double lam=Shin::NeuronNetwork::lambda); FeedForward(std::initializer_list<int> s, double lam=Shin::NeuronNetwork::lambda);
virtual ~FeedForward();
FeedForward(const FeedForward &f) = delete; //TODO FeedForward(const FeedForward &f) = delete; //TODO
FeedForward operator=(const FeedForward &f)=delete; FeedForward operator=(const FeedForward &f)=delete;
virtual ~FeedForward();
virtual Solution solve(const Problem& p) override; virtual Solution solve(const Problem& p) override;
virtual size_t size() const override { return layers;}; virtual size_t size() const override { return layers;};
virtual FFLayer& operator[](size_t l) override; virtual FFLayer& operator[](size_t l) override;
void setThreads(unsigned t) {threads=t;}
protected: protected:
void solvePart(float *newSolution, size_t begin, size_t end,size_t prevSize, float* sol,size_t layer); void solvePart(float *newSolution, size_t begin, size_t end,size_t prevSize, float* sol,size_t layer);
private: private:
FFLayer **ffLayers=nullptr; FFLayer **ffLayers=nullptr;
float ***weights=nullptr; float ***weights=nullptr;
float **potentials=nullptr; float **potentials=nullptr;
public:
float **sums=nullptr; float **sums=nullptr;
float **inputs=nullptr; float **inputs=nullptr;
private:
size_t *layerSizes=nullptr; size_t *layerSizes=nullptr;
size_t layers; size_t layers;
unsigned threads=1;
}; };
} }

View File

@@ -99,11 +99,11 @@ float Shin::NeuronNetwork::Learning::BackPropagation::teach(const Shin::NeuronNe
double error=calculateError(solution,a); double error=calculateError(solution,a);
Solution s; Solution s;
if(entropy) if(noise)
{ {
for(size_t i=0;i<solution.size();i++) for(size_t i=0;i<solution.size();i++)
{ {
s.push_back(solution[i]*((double)((100000-entropySize)+(rand()%(entropySize*2+1)))/100000.0)); s.push_back(solution[i]*((double)((100000-noiseSize)+(rand()%(noiseSize*2+1)))/100000.0));
} }
propagate(s); propagate(s);
}else }else
@@ -113,15 +113,3 @@ float Shin::NeuronNetwork::Learning::BackPropagation::teach(const Shin::NeuronNe
return error; return error;
} }
void Shin::NeuronNetwork::Learning::BackPropagation::setLearningCoeficient(float c)
{
learningCoeficient=c;
}
float Shin::NeuronNetwork::Learning::BackPropagation::correction(float expected, float computed)
{
return expected-computed;
}

View File

@@ -35,19 +35,11 @@ namespace Learning
BackPropagation(const Shin::NeuronNetwork::Learning::BackPropagation&) =delete; BackPropagation(const Shin::NeuronNetwork::Learning::BackPropagation&) =delete;
BackPropagation operator=(const Shin::NeuronNetwork::Learning::BackPropagation&) =delete; BackPropagation operator=(const Shin::NeuronNetwork::Learning::BackPropagation&) =delete;
virtual void propagate(const Shin::NeuronNetwork::Solution& expectation);
float teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution); float teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution);
virtual void propagate(const Shin::NeuronNetwork::Solution& expectation);
void setLearningCoeficient (float);
void allowEntropy() {entropy=1;}
void setEntropySize(int milipercents) { entropySize=milipercents; }
inline void allowThreading() {allowThreads=1; }
protected: protected:
virtual float correction(float expected, float computed); inline virtual float correction(const float& expected, const float& computed) { return expected - computed;};
float learningCoeficient=0.4;
bool entropy=0;
bool allowThreads=0;
int entropySize=500;
float **deltas=nullptr; float **deltas=nullptr;
}; };
} }

View File

@@ -1,11 +1,6 @@
#include "./OpticalBackPropagation" #include "./OpticalBackPropagation"
Shin::NeuronNetwork::Learning::OpticalBackPropagation::OpticalBackPropagation(FeedForward &n): BackPropagation(n) float Shin::NeuronNetwork::Learning::OpticalBackPropagation::correction(const float& expected, const float& computed)
{
setEntropySize(100);
}
float Shin::NeuronNetwork::Learning::OpticalBackPropagation::correction(float expected, float computed)
{ {
register float tmp=(expected-computed); register float tmp=(expected-computed);
register float ret=1+exp(tmp*tmp); register float ret=1+exp(tmp*tmp);

View File

@@ -1,11 +1,8 @@
#ifndef _OPT_BACK_PROPAGATION_H_ #ifndef _OPT_BACK_PROPAGATION_H_
#define _OPT_BACK_PROPAGATION_H_ #define _OPT_BACK_PROPAGATION_H_
#include <math.h>
#include <cstddef>
#include "../FeedForward.h" #include "../FeedForward.h"
#include "BackPropagation" #include "BackPropagation.h"
/* /*
* http://proceedings.informingscience.org/InSITE2005/P106Otai.pdf * http://proceedings.informingscience.org/InSITE2005/P106Otai.pdf
@@ -20,9 +17,9 @@ namespace Learning
class OpticalBackPropagation : public BackPropagation class OpticalBackPropagation : public BackPropagation
{ {
public: public:
OpticalBackPropagation(FeedForward &n); inline OpticalBackPropagation(FeedForward &n): BackPropagation(n) {}
protected: protected:
virtual float correction(float expected, float computed) override; virtual float correction(const float& expected, const float& computed) override;
}; };
} }
} }

View File

@@ -1,16 +1,15 @@
#ifndef _QLEARNING_H_ #ifndef _QLEARNING_H_
#define _QLEARNING_H_ #define _QLEARNING_H_
#include <math.h>
#include <cstddef> #include <cstddef>
#include <functional>
#include "BackPropagation.h"
#include "OpticalBackPropagation.h"
#include "../Problem.h" #include "../Problem.h"
#include "../FeedForward.h" #include "../FeedForward.h"
#include "BackPropagation" #include "Unsupervised.h"
#include "Unsupervised"
#include "RL/QFunction.h" #include "RL/QFunction.h"
#include "OpticalBackPropagation.h"
#include "functional"
/* /*
* http://www2.econ.iastate.edu/tesfatsi/RLUsersGuide.ICAC2005.pdf * http://www2.econ.iastate.edu/tesfatsi/RLUsersGuide.ICAC2005.pdf

View File

@@ -1,11 +1,13 @@
#ifndef _Q_FUNCTION_H_ #ifndef _Q_FUNCTION_H_
#define _Q_FUNCTION_H_ #define _Q_FUNCTION_H_
#include <map>
#include "../../Solution.h" #include "../../Solution.h"
#include "../../FeedForward" #include "../../FeedForward.h"
#include "../BackPropagation.h" #include "../BackPropagation.h"
#include "../OpticalBackPropagation.h" #include "../OpticalBackPropagation.h"
#include <map>
namespace Shin namespace Shin
{ {
namespace NeuronNetwork namespace NeuronNetwork

View File

@@ -1,10 +1,4 @@
#include "./Supervised" #include "./Supervised"
Shin::NeuronNetwork::Learning::Supervised::Supervised(Shin::NeuronNetwork::FeedForward& n) :network(n)
{
}
float Shin::NeuronNetwork::Learning::Supervised::calculateError(const Shin::NeuronNetwork::Solution& expectation, const Shin::NeuronNetwork::Solution& solution) float Shin::NeuronNetwork::Learning::Supervised::calculateError(const Shin::NeuronNetwork::Solution& expectation, const Shin::NeuronNetwork::Solution& solution)
{ {
register float a=0; register float a=0;

View File

@@ -14,18 +14,33 @@ namespace NeuronNetwork
{ {
namespace Learning namespace Learning
{ {
const float LearningCoeficient=0.4;
class Supervised class Supervised
{ {
public: public:
Supervised() =delete; Supervised() =delete;
Supervised(FeedForward &n); Supervised(FeedForward &n) : network(n) {};
virtual ~Supervised() {}; virtual ~Supervised() {};
float calculateError(const Solution &expectation,const Solution &solution); float calculateError(const Solution &expectation,const Solution &solution);
virtual float teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution)=0; virtual float teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution)=0;
virtual float teachSet(const std::vector<std::pair<Problem,Solution>> &set) final; virtual float teachSet(const std::vector<std::pair<Problem,Solution>> &set) final;
inline virtual void setLearningCoeficient (const float& coef) { learningCoeficient=coef; };
inline virtual void allowThreading() final {allowThreads=1;}
inline virtual void disableThreading() final {allowThreads=0;}
inline virtual void allowNoise() final {noise=1;}
inline virtual void disableNoise() final {noise=0;}
inline virtual void setNoiseSize(const unsigned& milipercents) final { noiseSize=milipercents; }
protected: protected:
FeedForward &network; FeedForward &network;
float learningCoeficient=Shin::NeuronNetwork::Learning::LearningCoeficient;
bool allowThreads=0;
bool noise=0;
unsigned noiseSize=500;
}; };
} }
} }

View File

@@ -1,6 +1 @@
#include "./Unsupervised" #include "./Unsupervised"
Shin::NeuronNetwork::Learning::Unsupervised::Unsupervised(Shin::NeuronNetwork::FeedForward& n) :network(n)
{
}

View File

@@ -16,9 +16,10 @@ namespace Learning
class Unsupervised class Unsupervised
{ {
public: public:
Unsupervised() =delete; Unsupervised(FeedForward &n): network(n) {};
Unsupervised(FeedForward &n);
virtual ~Unsupervised() {}; virtual ~Unsupervised() {};
Unsupervised() =delete;
protected: protected:
FeedForward &network; FeedForward &network;
}; };

View File

@@ -34,9 +34,10 @@ namespace NeuronNetwork
virtual Layer& operator[](size_t layer)=0; virtual Layer& operator[](size_t layer)=0;
inline float getLambda() const {return lambda;} inline float getLambda() const {return lambda;}
inline virtual void setThreads(const unsigned&t) final {threads=t;}
protected: protected:
float lambda; float lambda;
private: unsigned threads=1;
}; };
class ACyclicNetwork : public Network class ACyclicNetwork : public Network

Binary file not shown.

View File

@@ -13,13 +13,13 @@ class X: public Shin::NeuronNetwork::Problem
int main() int main()
{ {
srand(time(NULL));
for (int test=0;test<2;test++) for (int test=0;test<2;test++)
{ {
Shin::NeuronNetwork::FeedForward q({2,3,1}); Shin::NeuronNetwork::FeedForward q({2,3,1});
Shin::NeuronNetwork::Learning::BackPropagation b(q); Shin::NeuronNetwork::Learning::BackPropagation b(q);
srand(time(NULL));
std::vector<std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution> > set; std::vector<std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution> > set;
set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({0,0}),Shin::NeuronNetwork::Solution({0}))); set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({0,0}),Shin::NeuronNetwork::Solution({0})));
set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({1,0}),Shin::NeuronNetwork::Solution({1}))); set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({1,0}),Shin::NeuronNetwork::Solution({1})));
@@ -28,7 +28,7 @@ int main()
if(test) if(test)
{ {
std::cerr << "Testing with entropy\n"; std::cerr << "Testing with entropy\n";
b.allowEntropy(); b.allowNoise();
}else }else
{ {
std::cerr << "Testing without entropy\n"; std::cerr << "Testing without entropy\n";

View File

@@ -13,41 +13,29 @@ class X: public Shin::NeuronNetwork::Problem
int main() int main()
{ {
srand(time(NULL));
for (int test=0;test<2;test++) for (int test=0;test<2;test++)
{ {
Shin::NeuronNetwork::FeedForward q({2,40,1}); Shin::NeuronNetwork::FeedForward q({2,40,1});
Shin::NeuronNetwork::Learning::OpticalBackPropagation b(q); Shin::NeuronNetwork::Learning::OpticalBackPropagation b(q);
b.setLearningCoeficient(0.1);
srand(time(NULL)); std::vector<std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution> > set;
std::vector<Shin::NeuronNetwork::Solution*> s; set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({0,0}),Shin::NeuronNetwork::Solution({0})));
std::vector<Shin::NeuronNetwork::Problem*> p; set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({1,0}),Shin::NeuronNetwork::Solution({1})));
set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({1,1}),Shin::NeuronNetwork::Solution({0})));
s.push_back(new Shin::NeuronNetwork::Solution(std::vector<float>({0}))); set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({0,1}),Shin::NeuronNetwork::Solution({1})));
p.push_back(new X(std::vector<float>({0,0})));
s.push_back( new Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back( new X(std::vector<float>({1,0})));
s.push_back(new Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(new X(std::vector<float>({1,1})));
s.push_back( new Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back( new X(std::vector<float>({0,1})));
b.debugOn();
if(test) if(test)
{ {
std::cerr << "Testing with entropy\n"; std::cerr << "Testing with entropy\n";
b.allowEntropy(); b.allowNoise();
}else }else
{ {
std::cerr << "Testing without entropy\n"; std::cerr << "Testing without entropy\n";
} }
b.setLearningCoeficient(0.1);
for(int j=0;;j++) for(int j=0;;j++)
{ {
double err=b.teachSet(p,s); double err=b.teachSet(set);
if(err <0.3) if(err <0.3)
{ {
// b.setLearningCoeficient(5); // b.setLearningCoeficient(5);
@@ -61,8 +49,8 @@ int main()
std::cerr << j << "(" << err <<"):\n"; std::cerr << j << "(" << err <<"):\n";
for(int i=0;i<4;i++) for(int i=0;i<4;i++)
{ {
std::cerr << "\t" << i%4 <<". FOR: [" << p[i%4]->operator[](0) << "," <<p[i%4]->operator[](1) << "] res: " << std::cerr << "\t" << i%4 <<". FOR: [" << set[i%4].first[0] << "," <<set[i%4].first[1] << "] res: " <<
q.solve(*p[i%4])[0] << " should be " << s[i%4]->operator[](0)<<"\n"; q.solve(set[i%4].first)[0] << " should be " << set[i%4].second[0]<<"\n";
} }
} }
if(err <0.001) if(err <0.001)

View File

@@ -71,7 +71,7 @@ int main()
if(test==1) if(test==1)
{ {
std::cerr << "Testing with entropy ...\n"; std::cerr << "Testing with entropy ...\n";
b.getPropagator().allowEntropy(); b.getPropagator().allowNoise();
}else }else
{ {
std::cerr << "Testing without entropy ...\n"; std::cerr << "Testing without entropy ...\n";