cleaning methods and refactoring abstract classes...

This commit is contained in:
2014-12-11 00:37:58 +01:00
parent ddef0e37a7
commit 2736ede1be
17 changed files with 68 additions and 93 deletions

View File

@@ -24,7 +24,13 @@ FFNeuron& FFLayer::operator[](size_t neuron)
neurons[i]=new FFNeuron(potentials[i],weights[i],sums[i],inputs[i],lambda);
}
}
if(neuron>=layerSize)
throw std::out_of_range("Not so many neurons in layers.");
return *neurons[neuron];
}
FeedForward::FeedForward(std::initializer_list< int > s, double lam): ACyclicNetwork(lam),layers(s.size())
@@ -207,5 +213,9 @@ FFLayer& FeedForward::operator[](size_t l)
ffLayers[i]=new FFLayer(layerSizes[i],potentials[i],weights[i],sums[i],inputs[i],lambda);
}
}
if(l>=layers)
throw std::out_of_range("Not so many layers in network.");
return *ffLayers[l];
}

View File

@@ -55,7 +55,7 @@ namespace NeuronNetwork
class FFLayer: public Layer
{
public:
FFLayer(size_t s, float *p,float **w,float *su,float *in,float lam): neurons(nullptr),layerSize(s),potentials(p),weights(w),sums(su),inputs(in),lambda(lam) {}
FFLayer(size_t s, float *p,float **w,float *su,float *in,float lam): layerSize(s),potentials(p),weights(w),sums(su),inputs(in),lambda(lam) {}
~FFLayer();
FFLayer(const FFLayer &) = delete;
@@ -64,7 +64,7 @@ namespace NeuronNetwork
virtual FFNeuron& operator[](size_t layer) override;
inline virtual size_t size() const override {return layerSize;};
protected:
FFNeuron **neurons;
FFNeuron **neurons=nullptr;
size_t layerSize;
float *potentials;
float **weights;
@@ -77,26 +77,24 @@ namespace NeuronNetwork
{
public:
FeedForward(std::initializer_list<int> s, double lam=Shin::NeuronNetwork::lambda);
virtual ~FeedForward();
FeedForward(const FeedForward &f) = delete; //TODO
FeedForward operator=(const FeedForward &f)=delete;
virtual ~FeedForward();
virtual Solution solve(const Problem& p) override;
virtual size_t size() const override { return layers;};
virtual FFLayer& operator[](size_t l) override;
void setThreads(unsigned t) {threads=t;}
protected:
void solvePart(float *newSolution, size_t begin, size_t end,size_t prevSize, float* sol,size_t layer);
private:
FFLayer **ffLayers=nullptr;
float ***weights=nullptr;
float **potentials=nullptr;
public:
float **sums=nullptr;
float **inputs=nullptr;
private:
size_t *layerSizes=nullptr;
size_t layers;
unsigned threads=1;
};
}

View File

@@ -99,11 +99,11 @@ float Shin::NeuronNetwork::Learning::BackPropagation::teach(const Shin::NeuronNe
double error=calculateError(solution,a);
Solution s;
if(entropy)
if(noise)
{
for(size_t i=0;i<solution.size();i++)
{
s.push_back(solution[i]*((double)((100000-entropySize)+(rand()%(entropySize*2+1)))/100000.0));
s.push_back(solution[i]*((double)((100000-noiseSize)+(rand()%(noiseSize*2+1)))/100000.0));
}
propagate(s);
}else
@@ -112,16 +112,4 @@ float Shin::NeuronNetwork::Learning::BackPropagation::teach(const Shin::NeuronNe
}
return error;
}
void Shin::NeuronNetwork::Learning::BackPropagation::setLearningCoeficient(float c)
{
learningCoeficient=c;
}
float Shin::NeuronNetwork::Learning::BackPropagation::correction(float expected, float computed)
{
return expected-computed;
}
}

View File

@@ -35,19 +35,11 @@ namespace Learning
BackPropagation(const Shin::NeuronNetwork::Learning::BackPropagation&) =delete;
BackPropagation operator=(const Shin::NeuronNetwork::Learning::BackPropagation&) =delete;
virtual void propagate(const Shin::NeuronNetwork::Solution& expectation);
float teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution);
void setLearningCoeficient (float);
void allowEntropy() {entropy=1;}
void setEntropySize(int milipercents) { entropySize=milipercents; }
inline void allowThreading() {allowThreads=1; }
virtual void propagate(const Shin::NeuronNetwork::Solution& expectation);
protected:
virtual float correction(float expected, float computed);
float learningCoeficient=0.4;
bool entropy=0;
bool allowThreads=0;
int entropySize=500;
inline virtual float correction(const float& expected, const float& computed) { return expected - computed;};
float **deltas=nullptr;
};
}

View File

@@ -1,11 +1,6 @@
#include "./OpticalBackPropagation"
Shin::NeuronNetwork::Learning::OpticalBackPropagation::OpticalBackPropagation(FeedForward &n): BackPropagation(n)
{
setEntropySize(100);
}
float Shin::NeuronNetwork::Learning::OpticalBackPropagation::correction(float expected, float computed)
float Shin::NeuronNetwork::Learning::OpticalBackPropagation::correction(const float& expected, const float& computed)
{
register float tmp=(expected-computed);
register float ret=1+exp(tmp*tmp);

View File

@@ -1,11 +1,8 @@
#ifndef _OPT_BACK_PROPAGATION_H_
#define _OPT_BACK_PROPAGATION_H_
#include <math.h>
#include <cstddef>
#include "../FeedForward.h"
#include "BackPropagation"
#include "BackPropagation.h"
/*
* http://proceedings.informingscience.org/InSITE2005/P106Otai.pdf
@@ -20,9 +17,9 @@ namespace Learning
class OpticalBackPropagation : public BackPropagation
{
public:
OpticalBackPropagation(FeedForward &n);
inline OpticalBackPropagation(FeedForward &n): BackPropagation(n) {}
protected:
virtual float correction(float expected, float computed) override;
virtual float correction(const float& expected, const float& computed) override;
};
}
}

View File

@@ -1,16 +1,15 @@
#ifndef _QLEARNING_H_
#define _QLEARNING_H_
#include <math.h>
#include <cstddef>
#include <functional>
#include "BackPropagation.h"
#include "OpticalBackPropagation.h"
#include "../Problem.h"
#include "../FeedForward.h"
#include "BackPropagation"
#include "Unsupervised"
#include "Unsupervised.h"
#include "RL/QFunction.h"
#include "OpticalBackPropagation.h"
#include "functional"
/*
* http://www2.econ.iastate.edu/tesfatsi/RLUsersGuide.ICAC2005.pdf

View File

@@ -1,11 +1,13 @@
#ifndef _Q_FUNCTION_H_
#define _Q_FUNCTION_H_
#include <map>
#include "../../Solution.h"
#include "../../FeedForward"
#include "../../FeedForward.h"
#include "../BackPropagation.h"
#include "../OpticalBackPropagation.h"
#include <map>
namespace Shin
{
namespace NeuronNetwork

View File

@@ -1,10 +1,4 @@
#include "./Supervised"
Shin::NeuronNetwork::Learning::Supervised::Supervised(Shin::NeuronNetwork::FeedForward& n) :network(n)
{
}
float Shin::NeuronNetwork::Learning::Supervised::calculateError(const Shin::NeuronNetwork::Solution& expectation, const Shin::NeuronNetwork::Solution& solution)
{
register float a=0;

View File

@@ -14,18 +14,33 @@ namespace NeuronNetwork
{
namespace Learning
{
const float LearningCoeficient=0.4;
class Supervised
{
public:
Supervised() =delete;
Supervised(FeedForward &n);
Supervised(FeedForward &n) : network(n) {};
virtual ~Supervised() {};
float calculateError(const Solution &expectation,const Solution &solution);
virtual float teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution)=0;
virtual float teachSet(const std::vector<std::pair<Problem,Solution>> &set) final;
inline virtual void setLearningCoeficient (const float& coef) { learningCoeficient=coef; };
inline virtual void allowThreading() final {allowThreads=1;}
inline virtual void disableThreading() final {allowThreads=0;}
inline virtual void allowNoise() final {noise=1;}
inline virtual void disableNoise() final {noise=0;}
inline virtual void setNoiseSize(const unsigned& milipercents) final { noiseSize=milipercents; }
protected:
FeedForward &network;
float learningCoeficient=Shin::NeuronNetwork::Learning::LearningCoeficient;
bool allowThreads=0;
bool noise=0;
unsigned noiseSize=500;
};
}
}

View File

@@ -1,6 +1 @@
#include "./Unsupervised"
Shin::NeuronNetwork::Learning::Unsupervised::Unsupervised(Shin::NeuronNetwork::FeedForward& n) :network(n)
{
}

View File

@@ -16,9 +16,10 @@ namespace Learning
class Unsupervised
{
public:
Unsupervised() =delete;
Unsupervised(FeedForward &n);
Unsupervised(FeedForward &n): network(n) {};
virtual ~Unsupervised() {};
Unsupervised() =delete;
protected:
FeedForward &network;
};

View File

@@ -34,9 +34,10 @@ namespace NeuronNetwork
virtual Layer& operator[](size_t layer)=0;
inline float getLambda() const {return lambda;}
inline virtual void setThreads(const unsigned&t) final {threads=t;}
protected:
float lambda;
private:
unsigned threads=1;
};
class ACyclicNetwork : public Network

Binary file not shown.

View File

@@ -13,13 +13,13 @@ class X: public Shin::NeuronNetwork::Problem
int main()
{
srand(time(NULL));
for (int test=0;test<2;test++)
{
Shin::NeuronNetwork::FeedForward q({2,3,1});
Shin::NeuronNetwork::Learning::BackPropagation b(q);
srand(time(NULL));
std::vector<std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution> > set;
set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({0,0}),Shin::NeuronNetwork::Solution({0})));
set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({1,0}),Shin::NeuronNetwork::Solution({1})));
@@ -28,7 +28,7 @@ int main()
if(test)
{
std::cerr << "Testing with entropy\n";
b.allowEntropy();
b.allowNoise();
}else
{
std::cerr << "Testing without entropy\n";

View File

@@ -13,41 +13,29 @@ class X: public Shin::NeuronNetwork::Problem
int main()
{
srand(time(NULL));
for (int test=0;test<2;test++)
{
Shin::NeuronNetwork::FeedForward q({2,40,1});
Shin::NeuronNetwork::Learning::OpticalBackPropagation b(q);
b.setLearningCoeficient(0.1);
srand(time(NULL));
std::vector<Shin::NeuronNetwork::Solution*> s;
std::vector<Shin::NeuronNetwork::Problem*> p;
s.push_back(new Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(new X(std::vector<float>({0,0})));
s.push_back( new Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back( new X(std::vector<float>({1,0})));
s.push_back(new Shin::NeuronNetwork::Solution(std::vector<float>({0})));
p.push_back(new X(std::vector<float>({1,1})));
s.push_back( new Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back( new X(std::vector<float>({0,1})));
b.debugOn();
std::vector<std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution> > set;
set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({0,0}),Shin::NeuronNetwork::Solution({0})));
set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({1,0}),Shin::NeuronNetwork::Solution({1})));
set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({1,1}),Shin::NeuronNetwork::Solution({0})));
set.push_back(std::pair<Shin::NeuronNetwork::Problem, Shin::NeuronNetwork::Solution>(Shin::NeuronNetwork::Problem({0,1}),Shin::NeuronNetwork::Solution({1})));
if(test)
{
std::cerr << "Testing with entropy\n";
b.allowEntropy();
b.allowNoise();
}else
{
std::cerr << "Testing without entropy\n";
}
b.setLearningCoeficient(0.1);
for(int j=0;;j++)
{
double err=b.teachSet(p,s);
double err=b.teachSet(set);
if(err <0.3)
{
// b.setLearningCoeficient(5);
@@ -61,8 +49,8 @@ int main()
std::cerr << j << "(" << err <<"):\n";
for(int i=0;i<4;i++)
{
std::cerr << "\t" << i%4 <<". FOR: [" << p[i%4]->operator[](0) << "," <<p[i%4]->operator[](1) << "] res: " <<
q.solve(*p[i%4])[0] << " should be " << s[i%4]->operator[](0)<<"\n";
std::cerr << "\t" << i%4 <<". FOR: [" << set[i%4].first[0] << "," <<set[i%4].first[1] << "] res: " <<
q.solve(set[i%4].first)[0] << " should be " << set[i%4].second[0]<<"\n";
}
}
if(err <0.001)

View File

@@ -71,7 +71,7 @@ int main()
if(test==1)
{
std::cerr << "Testing with entropy ...\n";
b.getPropagator().allowEntropy();
b.getPropagator().allowNoise();
}else
{
std::cerr << "Testing without entropy ...\n";