IO class as parrent of Solution and Problem

This commit is contained in:
2014-12-03 18:58:28 +01:00
parent 41c92cc3c0
commit 993b4d3f04
14 changed files with 134 additions and 62 deletions

View File

@@ -123,17 +123,18 @@ void FeedForwardNetworkQuick::solvePart(float *newSolution, register size_t begi
Solution FeedForwardNetworkQuick::solve(const Problem& p)
{
std::vector<float> solution(p);
register float* sol=sums[0];//new bool[solution.size()];
register float* sol=sums[1];
for(size_t i=0;i<solution.size();i++)
{
sums[0][i+1]=solution[i];
sol[i+1]=solution[i];
}
register size_t prevSize=layerSizes[0];
for(register size_t i=0;i<layers;i++)
for(register size_t i=1;i<layers;i++)
{
float* newSolution= sums[i+1];//new bool[layerSizes[i]];
float* newSolution= sums[i+1];
if(threads > 1 && (layerSizes[i] > 700 ||prevSize > 700)) // 700 is an guess about actual size, when creating thread has some speedup
{
std::vector<std::thread> th;

1
src/NeuronNetwork/IO Symbolic link
View File

@@ -0,0 +1 @@
./IO.h

52
src/NeuronNetwork/IO.cpp Normal file
View File

@@ -0,0 +1,52 @@
#include "./IO"
Shin::NeuronNetwork::IO::IO():data()
{
}
Shin::NeuronNetwork::IO::IO(std::vector< float >& d):data(d)
{
}
Shin::NeuronNetwork::IO::IO(const Shin::NeuronNetwork::IO& old): data(old.data)
{
}
Shin::NeuronNetwork::IO::~IO()
{
}
Shin::NeuronNetwork::IO Shin::NeuronNetwork::IO::operator+(IO &r)
{
Shin::NeuronNetwork::IO tmp;
for(float a:this->data)
{
tmp.data.push_back(a);
}
for(float a:r.data)
{
tmp.data.push_back(a);
}
return tmp;
}
Shin::NeuronNetwork::IO::operator std::vector<float>&()
{
return data;
}
float Shin::NeuronNetwork::IO::operator[](size_t pos) const
{
return data[pos];
}
size_t Shin::NeuronNetwork::IO::size() const
{
return data.size();
}

28
src/NeuronNetwork/IO.h Normal file
View File

@@ -0,0 +1,28 @@
#ifndef _NN_IO_H_
#define _NN_IO_H_
#include <vector>
#include <cstddef>
namespace Shin
{
namespace NeuronNetwork
{
class IO
{
public:
IO();
IO(std::vector<float> &d);
IO(const IO &old);
virtual ~IO();
IO operator+(IO &r);
virtual operator std::vector<float>&() final; // TOO CONST
float operator[] (size_t pos) const;
size_t size() const;
protected:
std::vector<float> data;
private:
};
}
}
#endif

View File

@@ -18,6 +18,7 @@ Shin::NeuronNetwork::Learning::BackPropagation::~BackPropagation()
void Shin::NeuronNetwork::Learning::BackPropagation::propagate(const Shin::NeuronNetwork::Solution& expectation)
{
if(deltas==nullptr)
{
deltas=new float*[network.size()];
@@ -34,7 +35,7 @@ void Shin::NeuronNetwork::Learning::BackPropagation::propagate(const Shin::Neuro
*network[network.size()-1]->operator[](j)->derivatedOutput();
}
for(int i=(int)network.size()-2;i>=0;i--)
for(int i=(int)network.size()-2;i>0;i--)
{
if(allowThreads)
{
@@ -75,23 +76,18 @@ void Shin::NeuronNetwork::Learning::BackPropagation::propagate(const Shin::Neuro
}
}
}
for(size_t i=0;i<network.size();i++)
for(size_t i=1;i<network.size();i++)
{
size_t max;
if(i==0)
max=network[i]->size();
else
max=network[i-1]->size();
size_t max=network[i-1]->size();
for(size_t j=1;j<network[i]->size();j++)
{
network[i]->operator[](j)->setWeight(0,network[i]->operator[](j)->getWeight(0)+deltas[i][j]*learningCoeficient);
for(size_t k=1;k<max;k++)
{
network[i]->operator[](j)->setWeight(k,
network[i]->operator[](j)->getWeight(k)+learningCoeficient* deltas[i][j]*
(i==0? network.sums[0][k]:network[i-1]->operator[](k)->output()));
network[i]->operator[](j)->getWeight(k)+learningCoeficient* deltas[i][j]*network[i-1]->operator[](k)->output());
}
}
}

View File

@@ -0,0 +1 @@
./QFunction.h

View File

@@ -0,0 +1,6 @@
#include "./QFunction"
Shin::NN::RL::QFunction::QFunction()
{
}

View File

@@ -0,0 +1,18 @@
namespace Shin
{
namespace NN
{
namespace RL
{
class QFunction
{
public:
QFunction();
protected:
private:
};
}
}
}

View File

@@ -1,7 +1,7 @@
OBJFILES= Neuron.o Network.o FeedForward.o FeedForwardQuick.o\
Learning/Supervised.o Learning/BackPropagation.o Learning/OpticalBackPropagation.o\
Learning/Unsupervised.o Learning/Reinforcement.o\
Solution.o Problem.o
Solution.o Problem.o ./IO.o
LINKFILES= ../sse_mathfun.o

View File

@@ -2,31 +2,7 @@
using namespace Shin::NeuronNetwork;
Solution::Solution(std::vector<float>sol):solution(sol)
Solution::Solution(std::vector<float>&sol):IO(sol)
{
}
Solution::Solution(const Problem& p):solution()
{
std::vector<float> q(p);
for(float s:q)
{
solution.push_back(s);
}
}
float Solution::operator[](size_t pos) const
{
return solution[pos];
}
size_t Solution::size() const
{
return solution.size();
}
Solution::operator std::vector<float>()
{
return solution;
}

View File

@@ -1,24 +1,18 @@
#ifndef _SOL_H_
#define _SOL_H_
#include <cstddef>
#include <vector>
#include "Problem"
#include "IO.h"
namespace Shin
{
namespace NeuronNetwork
{
class Solution
class Solution : public IO
{
public:
Solution(const Problem& p);
Solution(std::vector<float> solution);
size_t size() const;
float operator[] (size_t pos) const;
operator std::vector<float>();
protected:
std::vector<float> solution;
Solution(std::vector<float> &solution);
};
}
}

View File

@@ -33,7 +33,7 @@ int main()
s.push_back(Shin::NeuronNetwork::Solution(std::vector<float>({1})));
p.push_back(X(std::vector<float>({1,1})));
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,4,1});
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,2,4,1});
Shin::NeuronNetwork::Learning::BackPropagation b(q);
b.debugOn();

View File

@@ -22,7 +22,7 @@ int main()
for (int test=0;test<2;test++)
{
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,40,1});
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,3,1});
Shin::NeuronNetwork::Learning::BackPropagation b(q);
srand(time(NULL));
@@ -49,7 +49,7 @@ int main()
{
std::cerr << "Testing without entropy\n";
}
b.setLearningCoeficient(0.1);//8);
b.setLearningCoeficient(20);//8);
for(int j=0;;j++)
{
double err=b.teachSet(p,s);

View File

@@ -23,19 +23,19 @@ int main()
srand(time(NULL));
for (int test=0;test<3;test++)
{
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,6,1});
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,4,1});
Shin::NeuronNetwork::Learning::Reinforcement b(q);
b.setPropagator(new Shin::NeuronNetwork::Learning::OpticalBackPropagation(q));
b.getPropagator().setLearningCoeficient(0.9);
b.getPropagator().allowEntropy();
double targetQuality =1.7;
//b.setPropagator(new Shin::NeuronNetwork::Learning::OpticalBackPropagation(q));
b.getPropagator().setLearningCoeficient(3);
//b.getPropagator().allowEntropy();
double targetQuality =1;
if(test==2)
{
targetQuality =1.62;
std::cerr << "Testing with OBP ...\n";
b.setPropagator(new Shin::NeuronNetwork::Learning::OpticalBackPropagation(q));
b.getPropagator().setLearningCoeficient(3);
b.getPropagator().setLearningCoeficient(0.5);
}
b.setQualityFunction(
[](const Shin::NeuronNetwork::Problem &pr,const Shin::NeuronNetwork::Solution &s)->float
@@ -55,15 +55,15 @@ int main()
if(expect==0)
{
expect=0.33-s[0];
expect=0.1-abs(s[0]);
}else
{
expect=s[0]-0.67;
expect=s[0]-0.9;
}
// std::cerr << " returnning " << expect*5.0 << "\n";
return expect*9.0;
return expect*19.0;
});
std::vector<Shin::NeuronNetwork::Problem*> p;
@@ -86,7 +86,6 @@ int main()
// for(int i=0;i < 5;i++)
{
double err=b.learnSet(p);
if(i%100000==0)
srand(time(NULL));
if(i%200000==0 || err > targetQuality)