This commit is contained in:
2015-01-06 18:07:16 +01:00
parent 6337ce98dd
commit 451ba2971b
3 changed files with 34 additions and 5 deletions

View File

@@ -33,7 +33,7 @@ FFNeuron& FFLayer::operator[](const size_t& neuron)
} }
FeedForward::FeedForward(std::initializer_list<size_t> s, double lam): ACyclicNetwork(lam),layers(s.size()) FeedForward::FeedForward(std::initializer_list<size_t> s, double lam, std::function<float(const size_t&layer, const size_t &neuron, const size_t &weight)> weightInit): ACyclicNetwork(lam),layers(s.size())
{ {
transfer = new TransferFunction::TransferFunction*[s.size()]; transfer = new TransferFunction::TransferFunction*[s.size()];
weights= new float**[s.size()]; weights= new float**[s.size()];
@@ -41,7 +41,7 @@ FeedForward::FeedForward(std::initializer_list<size_t> s, double lam): ACyclicNe
layerSizes= new size_t[s.size()]; layerSizes= new size_t[s.size()];
outputs= new float*[s.size()]; outputs= new float*[s.size()];
inputs= new float*[s.size()]; inputs= new float*[s.size()];
int i=0; register int i=0;
register int prev_size=1; register int prev_size=1;
for(int layeSize:s) // TODO rename for(int layeSize:s) // TODO rename
{ {
@@ -65,7 +65,7 @@ FeedForward::FeedForward(std::initializer_list<size_t> s, double lam): ACyclicNe
weights[i][j]= new float[prev_size]; weights[i][j]= new float[prev_size];
for(int k=0;k<prev_size;k++) for(int k=0;k<prev_size;k++)
{ {
weights[i][j][k]=1.0-((float)(rand()%2001))/1000.0; weights[i][j][k]=weightInit(i,j,k);
} }
} }
i++; i++;
@@ -173,7 +173,7 @@ Shin::Solution FeedForward::solve(const Shin::Problem& p)
{ {
std::vector<std::thread> th; std::vector<std::thread> th;
size_t s=1; size_t s=1;
size_t step =layerSizes[i]/threads; register size_t step =layerSizes[i]/threads;
for(size_t t=1;t<threads;t++) for(size_t t=1;t<threads;t++)
{ {
th.push_back(std::thread([i,this,newSolution,prevSize,sol](size_t from, size_t to)->void{ th.push_back(std::thread([i,this,newSolution,prevSize,sol](size_t from, size_t to)->void{

View File

@@ -81,7 +81,10 @@ namespace NeuralNetwork
class FeedForward:public ACyclicNetwork class FeedForward:public ACyclicNetwork
{ {
public: public:
FeedForward(std::initializer_list<size_t> s, double lam=Shin::NeuralNetwork::lambda); FeedForward(std::initializer_list<size_t> s, double lam=Shin::NeuralNetwork::lambda,
std::function<float(const size_t&layer, const size_t &neuron, const size_t &weight)> weightInit=
[](const size_t&, const size_t &, const size_t &)->float{ return 1.0-((float)(rand()%2001))/1000.0;}
);
virtual ~FeedForward(); virtual ~FeedForward();
FeedForward(const FeedForward &f) = delete; //TODO FeedForward(const FeedForward &f) = delete; //TODO

View File

@@ -0,0 +1,26 @@
#ifndef _S_NN_PERCEP_H_
#define _S_NN_PERCEP_H_
#include "./FeedForward"
#include "TransferFunction/Heaviside.h"
namespace Shin
{
namespace NeuralNetwork
{
class Perceptron:public FeedForward
{
public:
Perceptron(const size_t &inputSize, const size_t &outputSize):FeedForward({inputSize,outputSize})
{
for(int i=0;i<layers;i++)
{
delete transfer[i];
transfer[i]= new TransferFunction::Heaviside(0.5);
}
};
};
}
}
#endif