BackPropagation works!
This commit is contained in:
@@ -22,7 +22,7 @@ Solution FeedForwardNetwork::solve(const Problem& p)
|
||||
|
||||
const Layer* FeedForwardNetwork::operator[](int layer)
|
||||
{
|
||||
return layers[layer];
|
||||
return layers[layer];
|
||||
}
|
||||
|
||||
void FeedForwardNetwork::addLayer(int neurons)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#ifndef _S_NN_FF_H_
|
||||
#define _S_NN_FFs_H_
|
||||
#define _S_NN_FF_H_
|
||||
|
||||
#include "Problem"
|
||||
#include "Solution"
|
||||
@@ -34,7 +34,7 @@ namespace NeuronNetwork
|
||||
//inline FeedForwardNetwork(std::vector<int> q);
|
||||
~FeedForwardNetwork();
|
||||
|
||||
virtual Solution solve(const Problem& p)override;
|
||||
virtual Solution solve(const Problem& p) override;
|
||||
unsigned size() {return layers.size();}
|
||||
const Layer* operator[](int layer);
|
||||
protected:
|
||||
|
||||
@@ -2,6 +2,31 @@
|
||||
|
||||
using namespace Shin::NeuronNetwork;
|
||||
|
||||
FFLayer::~FFLayer()
|
||||
{
|
||||
if(neurons!=nullptr)
|
||||
{
|
||||
for(size_t i=0;i<layerSize;i++)
|
||||
{
|
||||
delete neurons[i];
|
||||
}
|
||||
delete[] neurons;
|
||||
}
|
||||
}
|
||||
|
||||
FFNeuron* FFLayer::operator[](int neuron)
|
||||
{
|
||||
if(neurons==nullptr)
|
||||
{
|
||||
neurons=new FFNeuron*[layerSize];
|
||||
for(size_t i=0;i<layerSize;i++)
|
||||
{
|
||||
neurons[i]=new FFNeuron(&potentials[i],weights[i],&sums[i]);
|
||||
}
|
||||
}
|
||||
return neurons[neuron];
|
||||
}
|
||||
|
||||
FeedForwardNetworkQuick::~FeedForwardNetworkQuick()
|
||||
{
|
||||
if(weights != nullptr)
|
||||
@@ -14,54 +39,73 @@ FeedForwardNetworkQuick::~FeedForwardNetworkQuick()
|
||||
}
|
||||
delete[] weights[i];
|
||||
delete[] potentials[i];
|
||||
delete[] sums[i];
|
||||
}
|
||||
delete[] sums[layers];
|
||||
delete[] weights;
|
||||
delete[] potentials;
|
||||
delete[] layerSizes;
|
||||
delete[] sums;
|
||||
}
|
||||
if(ffLayers !=nullptr)
|
||||
{
|
||||
for(size_t i=0;i<layers;i++)
|
||||
{
|
||||
delete ffLayers[i];
|
||||
}
|
||||
delete[] ffLayers;
|
||||
}
|
||||
}
|
||||
|
||||
Solution FeedForwardNetworkQuick::solve(const Problem& p)
|
||||
{
|
||||
std::vector<bool> solution=p;
|
||||
bool* sol=new bool[solution.size()];
|
||||
std::vector<bool> solution(p);
|
||||
register double* sol=sums[0];//new bool[solution.size()];
|
||||
|
||||
for(size_t i=0;i<solution.size();i++)
|
||||
{
|
||||
sol[i]=solution[i];
|
||||
sol[i+1]=solution[i];
|
||||
}
|
||||
|
||||
size_t prevSize=1;
|
||||
for(size_t i=0;i<layers;i++)
|
||||
register size_t prevSize=layerSizes[0];
|
||||
for(register size_t i=0;i<layers;i++)
|
||||
{
|
||||
bool* newSolution= new bool[layerSizes[i]];
|
||||
for(size_t j=0;j<layerSizes[i];j++)
|
||||
double* newSolution= sums[i+1];//new bool[layerSizes[i]];
|
||||
for(register size_t j=1;j<layerSizes[i];j++)
|
||||
{
|
||||
double q;
|
||||
if(i==0)
|
||||
register double q=sol[0]*weights[i][j][0];
|
||||
for(register size_t k=1;k<prevSize;k++)
|
||||
{
|
||||
q=sol[j]*weights[i][j][0];
|
||||
}else
|
||||
{
|
||||
q=0;
|
||||
for(size_t k=0;k<prevSize;k++)
|
||||
if(i==0)
|
||||
{
|
||||
q+=sol[k]*weights[i][j][k];
|
||||
}else
|
||||
{
|
||||
q+=(1.0/(1.0+exp(-0.5*sol[k])))*weights[i][j][k];
|
||||
}
|
||||
}
|
||||
newSolution[j]=q >= potentials[i][j]?1:0;
|
||||
newSolution[j]=q;
|
||||
}
|
||||
|
||||
prevSize=layerSizes[i];
|
||||
delete[] sol;
|
||||
sol=newSolution;
|
||||
}
|
||||
|
||||
std::vector<bool> ret;
|
||||
for(size_t i=0;i<prevSize;i++)
|
||||
std::vector<double> ret;
|
||||
for(size_t i=1;i<prevSize;i++)
|
||||
{
|
||||
ret.push_back(sol[i]);
|
||||
ret.push_back((1.0/(1.0+exp(-0.5*sol[i]))));
|
||||
}
|
||||
delete[] sol;
|
||||
return ret;
|
||||
}
|
||||
|
||||
FFLayer* FeedForwardNetworkQuick::operator[](int l)
|
||||
{
|
||||
if(ffLayers==nullptr)
|
||||
{
|
||||
ffLayers=new FFLayer*[layers];
|
||||
for(size_t i=0;i<layers;i++)
|
||||
{
|
||||
ffLayers[i]=new FFLayer(layerSizes[i],potentials[i],weights[i],sums[i+1]);
|
||||
}
|
||||
}
|
||||
return ffLayers[l];
|
||||
}
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
#ifndef _S_NN_FF_H_
|
||||
#define _S_NN_FFs_H_
|
||||
#ifndef _S_NN_FFQ_H_
|
||||
#define _S_NN_FFQ_H_
|
||||
|
||||
#include "Problem"
|
||||
#include "Solution"
|
||||
#include "Neuron"
|
||||
#include "Network"
|
||||
#include "FeedForward"
|
||||
|
||||
#include <cstdarg>
|
||||
#include <vector>
|
||||
#include <initializer_list>
|
||||
|
||||
#include <iostream>
|
||||
#include <math.h>
|
||||
|
||||
namespace Shin
|
||||
{
|
||||
namespace NeuronNetwork
|
||||
@@ -21,28 +22,43 @@ namespace NeuronNetwork
|
||||
FFNeuron() = delete;
|
||||
FFNeuron(const FFNeuron&) = delete;
|
||||
FFNeuron& operator=(const FFNeuron&) = delete;
|
||||
FFNeuron(double *pot, double *w):potential(pot),weights(w) { }
|
||||
FFNeuron(double *pot, double *w, double*s):potential(pot),weights(w),sum(s) { }
|
||||
|
||||
double getPotential() {return *potential;}
|
||||
void setPotential(double p) { std::cout <<"S"; *potential=p;}
|
||||
void setPotential(double p) { *potential=p;}
|
||||
double getWeight(unsigned int i ) { return weights[i];}
|
||||
void setWeight(unsigned int i,double p) { weights[i]=p; }
|
||||
inline double output()
|
||||
{
|
||||
return 1.0/(1.0+(exp(-0.5*input())));
|
||||
return input();
|
||||
// register double tmp=;
|
||||
// return NAN==tmp?0:tmp;
|
||||
/* > *potential? 1 :0;*/ }
|
||||
inline double input() { return *sum; }
|
||||
inline double derivatedOutput() { return output()*(1.0-output()); };
|
||||
protected:
|
||||
double *potential;
|
||||
double *weights;
|
||||
private:
|
||||
double *sum;
|
||||
private:
|
||||
};
|
||||
|
||||
class FFLayer//: public Layer
|
||||
{
|
||||
public:
|
||||
FFLayer(size_t s, double *p,double **w): layerSize(s),potentials(p),weights(w) {}
|
||||
FFNeuron* operator[](int neuron) const { return new FFNeuron(&potentials[neuron],weights[neuron]); };
|
||||
int size() const {return layerSize;};
|
||||
FFLayer(const FFLayer &) =delete;
|
||||
FFLayer operator=(const FFLayer &) = delete;
|
||||
FFLayer(size_t s, double *p,double **w,double *su): neurons(nullptr),layerSize(s),potentials(p),weights(w),sums(su) {}
|
||||
~FFLayer();
|
||||
FFNeuron* operator[](int neuron);
|
||||
size_t size() const {return layerSize;};
|
||||
protected:
|
||||
FFNeuron **neurons;
|
||||
size_t layerSize;
|
||||
double *potentials;
|
||||
double **weights;
|
||||
double *sums;
|
||||
};
|
||||
|
||||
class FeedForwardNetworkQuick:public ACyclicNetwork
|
||||
@@ -50,42 +66,54 @@ namespace NeuronNetwork
|
||||
public:
|
||||
FeedForwardNetworkQuick(const FeedForwardNetworkQuick &f) = delete; //TODO
|
||||
FeedForwardNetworkQuick operator=(const FeedForwardNetworkQuick &f)=delete;
|
||||
template<typename... Args>inline FeedForwardNetworkQuick(std::initializer_list<int> s):weights(nullptr),potentials(nullptr),layerSizes(nullptr),layers(s.size())
|
||||
template<typename... Args>inline FeedForwardNetworkQuick(std::initializer_list<int> s):ffLayers(nullptr),weights(nullptr),potentials(nullptr),sums(nullptr),layerSizes(nullptr),layers(s.size())
|
||||
{
|
||||
weights= new double**[s.size()];
|
||||
potentials= new double*[s.size()];
|
||||
layerSizes= new size_t[s.size()];
|
||||
sums= new double*[s.size()+1];
|
||||
int i=0;
|
||||
int prev_size=1;
|
||||
for(int layeSize:s) // TODO rename
|
||||
{
|
||||
if(i==0)
|
||||
layeSize+=1;
|
||||
if(i==0)
|
||||
{
|
||||
prev_size=layeSize;
|
||||
|
||||
sums[0]= new double[layeSize];
|
||||
sums[0][0]=1.0;
|
||||
}
|
||||
layerSizes[i]=layeSize;
|
||||
weights[i]= new double*[layeSize];
|
||||
potentials[i]= new double[layeSize];
|
||||
for (int j=0;j<layeSize;j++)
|
||||
sums[i+1]= new double[layeSize];
|
||||
potentials[i][0]=1.0;
|
||||
sums[i+1][0]=1.0;
|
||||
for (int j=1;j<layeSize;j++)
|
||||
{
|
||||
potentials[i][j]=1;
|
||||
potentials[i][j]=1.0;
|
||||
weights[i][j]= new double[prev_size];
|
||||
for(int k=0;k<prev_size;k++)
|
||||
{
|
||||
weights[i][j][k]=1;
|
||||
weights[i][j][k]=0.5-((double)(rand()%1000))/1000.0;
|
||||
}
|
||||
}
|
||||
i++;
|
||||
prev_size=layeSize;
|
||||
}
|
||||
}
|
||||
~FeedForwardNetworkQuick();
|
||||
virtual ~FeedForwardNetworkQuick();
|
||||
virtual Solution solve(const Problem& p) override;
|
||||
unsigned size() { return layers;}
|
||||
FFLayer* operator[](int l) { return new FFLayer(layerSizes[l],potentials[l],weights[l]); }
|
||||
FFLayer* operator[](int l);
|
||||
|
||||
protected:
|
||||
private:
|
||||
FFLayer **ffLayers;
|
||||
double ***weights;
|
||||
double **potentials;
|
||||
public:
|
||||
double **sums;
|
||||
size_t *layerSizes;
|
||||
size_t layers;
|
||||
};
|
||||
|
||||
1
src/NeuronNetwork/Learning/BackPropagation
Symbolic link
1
src/NeuronNetwork/Learning/BackPropagation
Symbolic link
@@ -0,0 +1 @@
|
||||
./BackPropagation.h
|
||||
87
src/NeuronNetwork/Learning/BackPropagation.cpp
Normal file
87
src/NeuronNetwork/Learning/BackPropagation.cpp
Normal file
@@ -0,0 +1,87 @@
|
||||
#include "./BackPropagation"
|
||||
|
||||
Shin::NeuronNetwork::Learning::BackPropagation::BackPropagation(FeedForwardNetworkQuick &n): Supervised(n)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
double Shin::NeuronNetwork::Learning::BackPropagation::calculateError(const Shin::NeuronNetwork::Solution& expectation, const Shin::NeuronNetwork::Solution& solution)
|
||||
{
|
||||
register double a=0;
|
||||
for (size_t i=0;i<expectation.size();i++)
|
||||
{
|
||||
a+=pow(expectation[i]-solution[i],2)/2;
|
||||
}
|
||||
return a;
|
||||
}
|
||||
|
||||
const double LAMBDA = 0.5;
|
||||
|
||||
void Shin::NeuronNetwork::Learning::BackPropagation::propagate(const Shin::NeuronNetwork::Solution& expectation)
|
||||
{
|
||||
double **deltas;
|
||||
deltas=new double*[network.size()];
|
||||
for(int i=(int)network.size()-1;i>=0;i--)
|
||||
{
|
||||
std::cerr << i << "XXXXXXXXXXXXXX\n";
|
||||
deltas[i]=new double[network[i]->size()];
|
||||
deltas[i][0]=0.0;
|
||||
if(i==(int)network.size()-1)
|
||||
{
|
||||
for(size_t j=1;j<network[i]->size();j++)
|
||||
{
|
||||
deltas[i][j]= (expectation[j-1]-network[i]->operator[](j)->output())*network[i]->operator[](j)->derivatedOutput();
|
||||
// std::cerr << "X "<< deltas[i][j] <" Z ";
|
||||
}
|
||||
}else
|
||||
{
|
||||
for(size_t j=1;j<network[i]->size();j++)
|
||||
{
|
||||
register double deltasWeight = 0;
|
||||
for(size_t k=1;k<network[i+1]->size();k++)
|
||||
{
|
||||
deltasWeight+=deltas[i+1][k]*network[i+1]->operator[](k)->getWeight(j);
|
||||
}
|
||||
deltas[i][j]=deltasWeight*network[i]->operator[](j)->derivatedOutput();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for(size_t i=0;i<network.size();i++)
|
||||
{
|
||||
size_t max;
|
||||
if(i==0)
|
||||
max=network[i]->size();
|
||||
else
|
||||
max=network[i-1]->size();
|
||||
|
||||
for(size_t j=1;j<network[i]->size();j++)
|
||||
{
|
||||
network[i]->operator[](j)->setWeight(0,network[i]->operator[](j)->getWeight(0)+0.5*deltas[i][j]);
|
||||
for(size_t k=1;k<max;k++)
|
||||
{
|
||||
network[i]->operator[](j)->setWeight(k,
|
||||
network[i]->operator[](j)->getWeight(k)+learningCoeficient* deltas[i][j]*
|
||||
(i==0? network.sums[0][k]:(double)network[i-1]->operator[](k)->output()));
|
||||
}
|
||||
}
|
||||
}
|
||||
for(size_t i=0;i<network.size();i++)
|
||||
{
|
||||
delete[] deltas[i];
|
||||
}
|
||||
delete[] deltas;
|
||||
}
|
||||
|
||||
|
||||
double Shin::NeuronNetwork::Learning::BackPropagation::teach(const Shin::NeuronNetwork::Problem& p, const Shin::NeuronNetwork::Solution& solution)
|
||||
{
|
||||
Shin::NeuronNetwork::Solution a=network.solve(p);
|
||||
double error=calculateError(solution,a);
|
||||
|
||||
propagate(solution);
|
||||
|
||||
|
||||
// std::cerr << "error: " << error << "\n";
|
||||
return error;
|
||||
}
|
||||
36
src/NeuronNetwork/Learning/BackPropagation.h
Normal file
36
src/NeuronNetwork/Learning/BackPropagation.h
Normal file
@@ -0,0 +1,36 @@
|
||||
#ifndef _BACK_PROPAGATION_H_
|
||||
#define _BACK_PROPAGATION_H_
|
||||
|
||||
#include <math.h>
|
||||
#include <cstddef>
|
||||
|
||||
#include "../Solution.h"
|
||||
#include "../FeedForwardQuick.h"
|
||||
#include "Supervised"
|
||||
|
||||
/*
|
||||
*
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
namespace Shin
|
||||
{
|
||||
namespace NeuronNetwork
|
||||
{
|
||||
namespace Learning
|
||||
{
|
||||
class BackPropagation : public Supervised
|
||||
{
|
||||
public:
|
||||
BackPropagation(FeedForwardNetworkQuick &n);
|
||||
double calculateError(const Solution &expectation,const Solution &solution);
|
||||
void propagate(const Shin::NeuronNetwork::Solution& expectation);
|
||||
double teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution);
|
||||
protected:
|
||||
double learningCoeficient=0.8;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
1
src/NeuronNetwork/Learning/Reinforcement
Symbolic link
1
src/NeuronNetwork/Learning/Reinforcement
Symbolic link
@@ -0,0 +1 @@
|
||||
./Reinforcement.h
|
||||
1
src/NeuronNetwork/Learning/Reinforcement.cpp
Normal file
1
src/NeuronNetwork/Learning/Reinforcement.cpp
Normal file
@@ -0,0 +1 @@
|
||||
#include "./Reinforcement"
|
||||
36
src/NeuronNetwork/Learning/Reinforcement.h
Normal file
36
src/NeuronNetwork/Learning/Reinforcement.h
Normal file
@@ -0,0 +1,36 @@
|
||||
#ifndef _REINFORCEMENT_H_
|
||||
#define _REINFORCEMENT_H_
|
||||
|
||||
#include <math.h>
|
||||
#include <cstddef>
|
||||
|
||||
#include "../Solution.h"
|
||||
#include "../FeedForwardQuick.h"
|
||||
#include "Unsupervised"
|
||||
|
||||
/*
|
||||
*
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
namespace Shin
|
||||
{
|
||||
namespace NeuronNetwork
|
||||
{
|
||||
namespace Learning
|
||||
{
|
||||
class Reinforcement : public Unsupervised
|
||||
{
|
||||
public:
|
||||
Reinforcement(FeedForwardNetworkQuick &n);
|
||||
double calculateError(const Solution &expectation,const Solution &solution);
|
||||
void propagate(const Shin::NeuronNetwork::Solution& expectation);
|
||||
double teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution);
|
||||
protected:
|
||||
double learningCoeficient=0.8;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
1
src/NeuronNetwork/Learning/Supervised
Symbolic link
1
src/NeuronNetwork/Learning/Supervised
Symbolic link
@@ -0,0 +1 @@
|
||||
./Supervised.h
|
||||
15
src/NeuronNetwork/Learning/Supervised.cpp
Normal file
15
src/NeuronNetwork/Learning/Supervised.cpp
Normal file
@@ -0,0 +1,15 @@
|
||||
#include "./Supervised"
|
||||
Shin::NeuronNetwork::Learning::Supervised::Supervised(Shin::NeuronNetwork::FeedForwardNetworkQuick& n) :network(n)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
void Shin::NeuronNetwork::Learning::Supervised::debugOn()
|
||||
{
|
||||
debug=1;
|
||||
}
|
||||
|
||||
void Shin::NeuronNetwork::Learning::Supervised::debugOff()
|
||||
{
|
||||
debug=0;
|
||||
}
|
||||
33
src/NeuronNetwork/Learning/Supervised.h
Normal file
33
src/NeuronNetwork/Learning/Supervised.h
Normal file
@@ -0,0 +1,33 @@
|
||||
#ifndef _SUPERVISEDLEARNING_H_
|
||||
#define _SUPERVIESDLERANING_H_
|
||||
|
||||
#include <math.h>
|
||||
#include <cstddef>
|
||||
|
||||
#include "../Solution.h"
|
||||
#include "../FeedForwardQuick.h"
|
||||
|
||||
namespace Shin
|
||||
{
|
||||
namespace NeuronNetwork
|
||||
{
|
||||
namespace Learning
|
||||
{
|
||||
class Supervised
|
||||
{
|
||||
public:
|
||||
Supervised() =delete;
|
||||
Supervised(FeedForwardNetworkQuick &n);
|
||||
virtual ~Supervised() {};
|
||||
virtual double calculateError(const Solution &expectation,const Solution &solution)=0;
|
||||
virtual double teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution)=0;
|
||||
void debugOn();
|
||||
void debugOff();
|
||||
protected:
|
||||
FeedForwardNetworkQuick &network;
|
||||
bool debug=0;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
1
src/NeuronNetwork/Learning/Unsupervised
Symbolic link
1
src/NeuronNetwork/Learning/Unsupervised
Symbolic link
@@ -0,0 +1 @@
|
||||
./Unsupervised.h
|
||||
16
src/NeuronNetwork/Learning/Unsupervised.cpp
Normal file
16
src/NeuronNetwork/Learning/Unsupervised.cpp
Normal file
@@ -0,0 +1,16 @@
|
||||
#include "./Unsupervised"
|
||||
|
||||
Shin::NeuronNetwork::Learning::Unsupervised::Unsupervised(Shin::NeuronNetwork::FeedForwardNetworkQuick& n) :network(n)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
void Shin::NeuronNetwork::Learning::Unsupervised::debugOn()
|
||||
{
|
||||
debug=1;
|
||||
}
|
||||
|
||||
void Shin::NeuronNetwork::Learning::Unsupervised::debugOff()
|
||||
{
|
||||
debug=0;
|
||||
}
|
||||
33
src/NeuronNetwork/Learning/Unsupervised.h
Normal file
33
src/NeuronNetwork/Learning/Unsupervised.h
Normal file
@@ -0,0 +1,33 @@
|
||||
#ifndef _UNSUPERVISEDLEARNING_H_
|
||||
#define _UNSUPERVIESDLERANING_H_
|
||||
|
||||
#include <math.h>
|
||||
#include <cstddef>
|
||||
|
||||
#include "../Solution.h"
|
||||
#include "../FeedForwardQuick.h"
|
||||
|
||||
namespace Shin
|
||||
{
|
||||
namespace NeuronNetwork
|
||||
{
|
||||
namespace Learning
|
||||
{
|
||||
class Unsupervised
|
||||
{
|
||||
public:
|
||||
Unsupervised() =delete;
|
||||
Unsupervised(FeedForwardNetworkQuick &n);
|
||||
virtual ~Unsupervised() {};
|
||||
virtual double calculateError(const Solution &expectation,const Solution &solution)=0;
|
||||
virtual double teach(const Shin::NeuronNetwork::Problem &p,const Solution &solution)=0;
|
||||
void debugOn();
|
||||
void debugOff();
|
||||
protected:
|
||||
FeedForwardNetworkQuick &network;
|
||||
bool debug=0;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
@@ -1,4 +1,6 @@
|
||||
OBJFILES=./Solution.o ./Problem.o ./Network.o ./Neuron.o ./FeedForward.o ./FeedForwardQuick.o
|
||||
OBJFILES= Neuron.o ./Network.o FeedForward.o FeedForwardQuick.o \
|
||||
Learning/Supervised.o Learning/Unsupervised.o Learning/Reinforcement.o Learning/BackPropagation.o \
|
||||
./Solution.o ./Problem.o
|
||||
|
||||
LIBNAME=NeuronNetwork
|
||||
|
||||
@@ -18,4 +20,4 @@ $(LIBNAME).a: $(OBJFILES)
|
||||
nm --demangle $(LIBNAME).a > $(LIBNAME).nm
|
||||
|
||||
clean:
|
||||
@rm -f ./*.o ./*.so ./*.a ./*.nm
|
||||
@rm -f ./*.o ./*.so ./*.a ./*.nm ./*/*.o
|
||||
|
||||
@@ -19,12 +19,12 @@ Layer::~Layer()
|
||||
}
|
||||
|
||||
|
||||
Solution Layer::solve(const std::vector<bool> &input)
|
||||
Solution Layer::solve(const std::vector<double> &input)
|
||||
{
|
||||
std::vector <bool> ret;
|
||||
std::vector <double> ret;
|
||||
for(Neuron *n:neurons)
|
||||
{
|
||||
ret.push_back(n->activates(input));
|
||||
ret.push_back(n->output(input));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ namespace NeuronNetwork
|
||||
}
|
||||
}
|
||||
~Layer();
|
||||
Solution solve(const std::vector<bool> &input);
|
||||
Solution solve(const std::vector<double> &input);
|
||||
Neuron* operator[](int neuron) const;
|
||||
int size() const {return neurons.size();};
|
||||
protected:
|
||||
|
||||
@@ -36,15 +36,15 @@ void Neuron::setWeight(unsigned int i,double p)
|
||||
weights[i]=p;
|
||||
}
|
||||
|
||||
bool Neuron::activates(std::vector<bool> input)
|
||||
double Neuron::output(std::vector<double> input)
|
||||
{
|
||||
double sum=0;
|
||||
register double sum=0;
|
||||
for(unsigned int i=0;i<input.size();i++)
|
||||
{
|
||||
// std::cerr << "W: " << getWeight(i) <<"\n";
|
||||
sum+=getWeight(i)*input[i];
|
||||
}
|
||||
//std::cerr << "X: " << sum <<"\n";
|
||||
return 1.0/(1.0+exp(-0.5*sum));
|
||||
if(sum <= getPotential())
|
||||
return 0;
|
||||
return 1;
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#define _S_NN_NEURON_H_
|
||||
|
||||
#include <vector>
|
||||
#include <math.h>
|
||||
|
||||
namespace Shin
|
||||
{
|
||||
@@ -19,11 +20,14 @@ namespace NeuronNetwork
|
||||
void setPotential(double p);
|
||||
double getWeight(unsigned int) const;
|
||||
void setWeight(unsigned int i,double p);
|
||||
bool activates(const std::vector<bool>);
|
||||
double output(const std::vector<double>);
|
||||
double output() { return lastOutput;}
|
||||
protected:
|
||||
double potential;
|
||||
private:
|
||||
std::vector<double> weights;
|
||||
double lastOutput=0.0;
|
||||
double lastInput=0.0;
|
||||
};
|
||||
class SimpleNeuron: public Neuron
|
||||
{
|
||||
|
||||
@@ -1 +1 @@
|
||||
././Problem.h
|
||||
./Problem.h
|
||||
@@ -1,6 +1,7 @@
|
||||
#ifndef _P_H_
|
||||
#define _P_H_
|
||||
|
||||
#include <cstddef>
|
||||
#include <vector>
|
||||
|
||||
namespace Shin
|
||||
@@ -19,4 +20,5 @@ namespace NeuronNetwork
|
||||
};
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
././Solution.h
|
||||
./Solution.h
|
||||
@@ -2,22 +2,31 @@
|
||||
|
||||
using namespace Shin::NeuronNetwork;
|
||||
|
||||
Solution::Solution(std::vector<bool>sol):solution(sol)
|
||||
Solution::Solution(std::vector<double>sol):solution(sol)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
bool Solution::operator[](int pos)
|
||||
Solution::Solution(const Problem& p):solution()
|
||||
{
|
||||
std::vector<bool> q(p);
|
||||
for(bool s:q)
|
||||
{
|
||||
solution.push_back(s);
|
||||
}
|
||||
}
|
||||
|
||||
double Solution::operator[](size_t pos) const
|
||||
{
|
||||
return solution[pos];
|
||||
}
|
||||
|
||||
int Solution::size()
|
||||
size_t Solution::size() const
|
||||
{
|
||||
return solution.size();
|
||||
}
|
||||
|
||||
Solution::operator std::vector<bool>()
|
||||
Solution::operator std::vector<double>()
|
||||
{
|
||||
return solution;
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
#ifndef _SOL_H_
|
||||
#define _SOL_H_
|
||||
|
||||
#include <cstddef>
|
||||
#include <vector>
|
||||
#include "Problem"
|
||||
|
||||
@@ -11,14 +12,16 @@ namespace NeuronNetwork
|
||||
class Solution
|
||||
{
|
||||
public:
|
||||
Solution(std::vector<bool> solution);
|
||||
int size();
|
||||
bool operator[] (int pos);
|
||||
operator std::vector<bool>();
|
||||
Solution(const Problem& p);
|
||||
Solution(std::vector<double> solution);
|
||||
size_t size() const;
|
||||
double operator[] (size_t pos) const;
|
||||
operator std::vector<double>();
|
||||
protected:
|
||||
std::vector<bool> solution;
|
||||
std::vector<double> solution;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
#endif
|
||||
|
||||
|
||||
@@ -1,47 +1,67 @@
|
||||
#include "../src/NeuronNetwork/FeedForward"
|
||||
#include "../src/NeuronNetwork/FeedForwardQuick"
|
||||
#include "../src/NeuronNetwork/Learning/BackPropagation"
|
||||
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
class X: public Shin::NeuronNetwork::Problem
|
||||
{
|
||||
protected:
|
||||
public:
|
||||
X(const X& a) :q(a.q) {}
|
||||
X(const std::vector<bool> &a):q(a) {}
|
||||
std::vector<bool> representation() const
|
||||
{
|
||||
return std::vector<bool>({1,1});
|
||||
return q;
|
||||
}
|
||||
protected:
|
||||
std::vector<bool> q;
|
||||
};
|
||||
|
||||
int main()
|
||||
{
|
||||
Shin::NeuronNetwork::FeedForwardNetwork n({2,3,2});
|
||||
Shin::NeuronNetwork::Solution s =n.solve(X());
|
||||
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,3,2});
|
||||
Shin::NeuronNetwork::Solution sq =q.solve(X());
|
||||
if(s.size()!=2)
|
||||
srand(time(NULL));
|
||||
std::vector<Shin::NeuronNetwork::Solution> s;
|
||||
std::vector<X> p;
|
||||
|
||||
//
|
||||
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({1})));
|
||||
p.push_back(X(std::vector<bool>({0})));
|
||||
|
||||
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({0})));
|
||||
p.push_back(X(std::vector<bool>({1})));
|
||||
|
||||
Shin::NeuronNetwork::FeedForwardNetworkQuick q({1,1});
|
||||
Shin::NeuronNetwork::Learning::BackPropagation b(q);
|
||||
|
||||
int i=0;
|
||||
std::cerr << i%4 <<". FOR: [" << p[i%2].representation()[0] << "] res: " << q.solve(p[i%2])[0] << " should be " << s[i%2][0]<<"\n";
|
||||
|
||||
for(int i=0;i<2000;i++)
|
||||
{
|
||||
std::cout << "1";
|
||||
return 1;
|
||||
}
|
||||
if(s[0]!=1)
|
||||
{
|
||||
std::cout << "2";
|
||||
return 1;
|
||||
}
|
||||
if(s[1]!=1)
|
||||
{
|
||||
std::cout << "3";
|
||||
return 1;
|
||||
}
|
||||
if(s.size()!=sq.size())
|
||||
{
|
||||
std::cout << "3";
|
||||
return 1;
|
||||
b.teach(p[i%2],s[i%2]);
|
||||
std::cerr << i%2 <<". FOR: [" << p[i%2].representation()[0] << "] res: " << q.solve(p[i%2])[0] << " should be " << s[i%2][0]<<"\n";
|
||||
}
|
||||
b.debugOn();
|
||||
for(int i=0;i<2;i++)
|
||||
if(s[i]!=sq[i])
|
||||
{
|
||||
std::cout << "4 " << i;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
{
|
||||
b.teach(p[i%2],s[i%2]);
|
||||
std::cerr << i%4 <<". FOR: [" << p[i%4].representation()[0] << "," <<p[i%4].representation()[0] << "] res: " << q.solve(p[i%4])[0] << " should be " <<
|
||||
s[i%4][0]<<"\n";
|
||||
}
|
||||
b.debugOff();
|
||||
/*
|
||||
for(int i=0;i<40;i++)
|
||||
{
|
||||
b.teach(p[i%4],s[i%4]);
|
||||
}
|
||||
b.debugOn();
|
||||
std::cerr << "LEARNED\n";
|
||||
for(int i=0;i<4;i++)
|
||||
{
|
||||
b.teach(p[i%4],s[i%4]);
|
||||
std::cerr << i%4 <<". FOR: [" << p[i%4].representation()[0] << "," <<p[i%4].representation()[1] << "] res: " << q.solve(p[i%4])[0] << " should be " <<
|
||||
s[i%4][0]<<"\n";
|
||||
}
|
||||
*/
|
||||
}
|
||||
@@ -43,17 +43,6 @@ int main()
|
||||
return 1;
|
||||
}
|
||||
|
||||
if(s[0]!=0)
|
||||
{
|
||||
std::cout << "2";
|
||||
return 1;
|
||||
}
|
||||
if(s[1]!=1)
|
||||
{
|
||||
std::cout << "3";
|
||||
return 1;
|
||||
}
|
||||
|
||||
for(int i=0;i<2;i++)
|
||||
{
|
||||
if(s[i]!=sq[i])
|
||||
|
||||
113
tests/nn-03.cpp
113
tests/nn-03.cpp
@@ -1,74 +1,61 @@
|
||||
#include "../src/NeuronNetwork/Network"
|
||||
#include "../src/NeuronNetwork/FeedForward"
|
||||
#include "../src/NeuronNetwork/FeedForwardQuick"
|
||||
#include "../src/NeuronNetwork/Learning/BackPropagation"
|
||||
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
class X: public Shin::NeuronNetwork::Problem
|
||||
{
|
||||
public: X(bool x,bool y):x(x),y(y) {}
|
||||
protected: std::vector<bool> representation() const { return std::vector<bool>({x,y}); }
|
||||
private:
|
||||
bool x;
|
||||
bool y;
|
||||
public:
|
||||
X(const X& a) :q(a.q) {}
|
||||
X(const std::vector<bool> &a):q(a) {}
|
||||
std::vector<bool> representation() const
|
||||
{
|
||||
return q;
|
||||
}
|
||||
protected:
|
||||
std::vector<bool> q;
|
||||
};
|
||||
|
||||
int main()
|
||||
{
|
||||
srand(time(NULL));
|
||||
int lm=5;
|
||||
Shin::NeuronNetwork::FeedForwardNetwork net({2,lm,1});
|
||||
bool x=1;
|
||||
int prev_err=0;
|
||||
int err=0;
|
||||
int l;
|
||||
int n;
|
||||
int w;
|
||||
int pot;
|
||||
int wei;
|
||||
int c=0;
|
||||
std::cout << "\ntest 1 & 1 -" << net.solve(X(1,1))[0];
|
||||
std::cout << "\ntest 1 & 0 -" << net.solve(X(1,0))[0];
|
||||
std::cout << "\ntest 0 & 1 - " << net.solve(X(0,1))[0];
|
||||
std::cout << "\ntest 0 & 0- " << net.solve(X(0,0))[0];
|
||||
std::cout << "\n---------------------------------------";
|
||||
do{
|
||||
if(c%10000 ==1)
|
||||
{
|
||||
std::cout << "\nmixed";
|
||||
srand(time(NULL));
|
||||
}
|
||||
err=0;
|
||||
c++;
|
||||
l=rand()%2+1;
|
||||
n=rand()%lm;
|
||||
w=rand()%2;
|
||||
if(l==2)
|
||||
n=0;
|
||||
pot=net[l]->operator[](n)->getPotential();
|
||||
net[l]->operator[](n)->setPotential(pot*(rand()%21+90)/100);
|
||||
wei=net[l]->operator[](n)->getWeight(w);
|
||||
net[l]->operator[](n)->setWeight(w,wei*(rand()%21+90)/100);
|
||||
std::vector<Shin::NeuronNetwork::Solution> s;
|
||||
std::vector<X> p;
|
||||
|
||||
for(int i=0;i<100;i++)
|
||||
{
|
||||
bool x= rand()%2;
|
||||
bool y=rand()%2;
|
||||
Shin::NeuronNetwork::Solution s =net.solve(X(x,y));
|
||||
if(s[0]!= (x xor y))
|
||||
err++;
|
||||
}
|
||||
//
|
||||
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({0})));
|
||||
p.push_back(X(std::vector<bool>({1,0})));
|
||||
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({0})));
|
||||
p.push_back(X(std::vector<bool>({0,1})));
|
||||
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({0})));
|
||||
p.push_back(X(std::vector<bool>({0,0})));
|
||||
s.push_back(Shin::NeuronNetwork::Solution(std::vector<double>({1})));
|
||||
p.push_back(X(std::vector<bool>({1,1})));
|
||||
|
||||
Shin::NeuronNetwork::FeedForwardNetworkQuick q({2,4,1});
|
||||
Shin::NeuronNetwork::Learning::BackPropagation b(q);
|
||||
|
||||
b.debugOn();
|
||||
for(int i=0;i<4;i++)
|
||||
{
|
||||
b.teach(p[i%4],s[i%4]);
|
||||
std::cerr << i%4 <<". FOR: [" << p[i%4].representation()[0] << "," <<p[i%4].representation()[1] << "] res: " << q.solve(p[i%4])[0] << " should be " <<
|
||||
s[i%4][0]<<"\n";
|
||||
}
|
||||
b.debugOff();
|
||||
|
||||
for(int i=0;i<40;i++)
|
||||
{
|
||||
b.teach(p[i%4],s[i%4]);
|
||||
}
|
||||
b.debugOn();
|
||||
std::cerr << "LEARNED\n";
|
||||
for(int i=0;i<4;i++)
|
||||
{
|
||||
b.teach(p[i%4],s[i%4]);
|
||||
std::cerr << i%4 <<". FOR: [" << p[i%4].representation()[0] << "," <<p[i%4].representation()[1] << "] res: " << q.solve(p[i%4])[0] << " should be " <<
|
||||
s[i%4][0]<<"\n";
|
||||
}
|
||||
|
||||
if(err > prev_err)
|
||||
{
|
||||
net[l]->operator[](n)->setPotential(pot);
|
||||
net[l]->operator[](n)->setWeight(w,wei);
|
||||
};
|
||||
// std::cout << "C: " << c << " err: " << err << " prev: "<<prev_err << "\n";
|
||||
prev_err=err;
|
||||
if(err <1)
|
||||
x=0;
|
||||
}while(x);
|
||||
std::cout << "\ntest 1 & 1 -" << net.solve(X(1,1))[0];
|
||||
std::cout << "\ntest 1 & 0 -" << net.solve(X(1,0))[0];
|
||||
std::cout << "\ntest 0 & 1 - " << net.solve(X(0,1))[0];
|
||||
std::cout << "\ntest 0 & 0- " << net.solve(X(0,0))[0];
|
||||
std::cout << "\nTotaly: " << c << "\n";
|
||||
}
|
||||
Reference in New Issue
Block a user