yeah
This commit is contained in:
@@ -33,7 +33,7 @@ FFNeuron& FFLayer::operator[](const size_t& neuron)
|
||||
|
||||
}
|
||||
|
||||
FeedForward::FeedForward(std::initializer_list<size_t> s, double lam): ACyclicNetwork(lam),layers(s.size())
|
||||
FeedForward::FeedForward(std::initializer_list<size_t> s, double lam, std::function<float(const size_t&layer, const size_t &neuron, const size_t &weight)> weightInit): ACyclicNetwork(lam),layers(s.size())
|
||||
{
|
||||
transfer = new TransferFunction::TransferFunction*[s.size()];
|
||||
weights= new float**[s.size()];
|
||||
@@ -41,7 +41,7 @@ FeedForward::FeedForward(std::initializer_list<size_t> s, double lam): ACyclicNe
|
||||
layerSizes= new size_t[s.size()];
|
||||
outputs= new float*[s.size()];
|
||||
inputs= new float*[s.size()];
|
||||
int i=0;
|
||||
register int i=0;
|
||||
register int prev_size=1;
|
||||
for(int layeSize:s) // TODO rename
|
||||
{
|
||||
@@ -65,7 +65,7 @@ FeedForward::FeedForward(std::initializer_list<size_t> s, double lam): ACyclicNe
|
||||
weights[i][j]= new float[prev_size];
|
||||
for(int k=0;k<prev_size;k++)
|
||||
{
|
||||
weights[i][j][k]=1.0-((float)(rand()%2001))/1000.0;
|
||||
weights[i][j][k]=weightInit(i,j,k);
|
||||
}
|
||||
}
|
||||
i++;
|
||||
@@ -173,7 +173,7 @@ Shin::Solution FeedForward::solve(const Shin::Problem& p)
|
||||
{
|
||||
std::vector<std::thread> th;
|
||||
size_t s=1;
|
||||
size_t step =layerSizes[i]/threads;
|
||||
register size_t step =layerSizes[i]/threads;
|
||||
for(size_t t=1;t<threads;t++)
|
||||
{
|
||||
th.push_back(std::thread([i,this,newSolution,prevSize,sol](size_t from, size_t to)->void{
|
||||
|
||||
@@ -81,7 +81,10 @@ namespace NeuralNetwork
|
||||
class FeedForward:public ACyclicNetwork
|
||||
{
|
||||
public:
|
||||
FeedForward(std::initializer_list<size_t> s, double lam=Shin::NeuralNetwork::lambda);
|
||||
FeedForward(std::initializer_list<size_t> s, double lam=Shin::NeuralNetwork::lambda,
|
||||
std::function<float(const size_t&layer, const size_t &neuron, const size_t &weight)> weightInit=
|
||||
[](const size_t&, const size_t &, const size_t &)->float{ return 1.0-((float)(rand()%2001))/1000.0;}
|
||||
);
|
||||
virtual ~FeedForward();
|
||||
|
||||
FeedForward(const FeedForward &f) = delete; //TODO
|
||||
|
||||
26
src/NeuralNetwork/Perceptron.h
Normal file
26
src/NeuralNetwork/Perceptron.h
Normal file
@@ -0,0 +1,26 @@
|
||||
#ifndef _S_NN_PERCEP_H_
|
||||
#define _S_NN_PERCEP_H_
|
||||
|
||||
#include "./FeedForward"
|
||||
#include "TransferFunction/Heaviside.h"
|
||||
|
||||
namespace Shin
|
||||
{
|
||||
namespace NeuralNetwork
|
||||
{
|
||||
class Perceptron:public FeedForward
|
||||
{
|
||||
public:
|
||||
Perceptron(const size_t &inputSize, const size_t &outputSize):FeedForward({inputSize,outputSize})
|
||||
{
|
||||
for(int i=0;i<layers;i++)
|
||||
{
|
||||
delete transfer[i];
|
||||
transfer[i]= new TransferFunction::Heaviside(0.5);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
}
|
||||
}
|
||||
#endif
|
||||
Reference in New Issue
Block a user