renamed TransferFunction to ActivationFunction
This commit is contained in:
@@ -5,12 +5,12 @@
|
|||||||
|
|
||||||
namespace NeuralNetwork
|
namespace NeuralNetwork
|
||||||
{
|
{
|
||||||
namespace TransferFunction
|
namespace ActivationFunction
|
||||||
{
|
{
|
||||||
class TransferFunction
|
class ActivationFunction
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
virtual ~TransferFunction() {}
|
virtual ~ActivationFunction() {}
|
||||||
virtual float derivatedOutput(const float &input,const float &output)=0;
|
virtual float derivatedOutput(const float &input,const float &output)=0;
|
||||||
virtual float operator()(const float &x)=0;
|
virtual float operator()(const float &x)=0;
|
||||||
};
|
};
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
#ifndef __TRAN_HEAVISIDE_H_
|
#ifndef __TRAN_HEAVISIDE_H_
|
||||||
#define __TRAN_HEAVISIDE_H_
|
#define __TRAN_HEAVISIDE_H_
|
||||||
|
|
||||||
#include "./TransferFunction.h"
|
#include "./ActivationFunction.h"
|
||||||
|
|
||||||
namespace NeuralNetwork
|
namespace NeuralNetwork
|
||||||
{
|
{
|
||||||
namespace TransferFunction
|
namespace ActivationFunction
|
||||||
{
|
{
|
||||||
class Heaviside: public TransferFunction
|
class Heaviside: public ActivationFunction
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
Sigmoid(const float &lambdaP): lambda(lambdaP) {}
|
Sigmoid(const float &lambdaP): lambda(lambdaP) {}
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
#ifndef __TRAN_HYPTAN_H_
|
#ifndef __TRAN_HYPTAN_H_
|
||||||
#define __TRAN_HYPTAN_H_
|
#define __TRAN_HYPTAN_H_
|
||||||
|
|
||||||
#include "./TransferFunction.h"
|
#include "./ActivationFunction.h"
|
||||||
|
|
||||||
namespace NeuralNetwork
|
namespace NeuralNetwork
|
||||||
{
|
{
|
||||||
namespace TransferFunction
|
namespace ActivationFunction
|
||||||
{
|
{
|
||||||
class HyperbolicTangent: public TransferFunction
|
class HyperbolicTangent: public ActivationFunction
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
HyperbolicTangent(const float& lam=1):lambda(lam) {}
|
HyperbolicTangent(const float& lam=1):lambda(lam) {}
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
#ifndef __TRAN_SIGMOID_H_
|
#ifndef __TRAN_SIGMOID_H_
|
||||||
#define __TRAN_SIGMOID_H_
|
#define __TRAN_SIGMOID_H_
|
||||||
|
|
||||||
#include "./StreamingTransferFunction.h"
|
#include "./StreamingActivationFunction.h"
|
||||||
|
|
||||||
namespace NeuralNetwork
|
namespace NeuralNetwork
|
||||||
{
|
{
|
||||||
namespace TransferFunction
|
namespace ActivationFunction
|
||||||
{
|
{
|
||||||
class Sigmoid: public StreamingTransferFunction
|
class Sigmoid: public StreamingActivationFunction
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
Sigmoid(const float lambdaP = 0.8): lambda(lambdaP) {}
|
Sigmoid(const float lambdaP = 0.8): lambda(lambdaP) {}
|
||||||
@@ -5,13 +5,13 @@
|
|||||||
|
|
||||||
#include "../../sse_mathfun.h"
|
#include "../../sse_mathfun.h"
|
||||||
|
|
||||||
#include "./TransferFunction.h"
|
#include "./ActivationFunction.h"
|
||||||
|
|
||||||
namespace NeuralNetwork
|
namespace NeuralNetwork
|
||||||
{
|
{
|
||||||
namespace TransferFunction
|
namespace ActivationFunction
|
||||||
{
|
{
|
||||||
class StreamingTransferFunction : public TransferFunction
|
class StreamingActivationFunction : public ActivationFunction
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
virtual float derivatedOutput(const float &input,const float &output)=0;
|
virtual float derivatedOutput(const float &input,const float &output)=0;
|
||||||
@@ -34,7 +34,7 @@ FFNeuron& FFLayer::operator[](const size_t& neuron)
|
|||||||
|
|
||||||
FeedForward::FeedForward(std::initializer_list<size_t> s, double lam, FeedForwardInitializer weightInit): ACyclicNetwork(lam),layers(s.size())
|
FeedForward::FeedForward(std::initializer_list<size_t> s, double lam, FeedForwardInitializer weightInit): ACyclicNetwork(lam),layers(s.size())
|
||||||
{
|
{
|
||||||
transfer = new TransferFunction::TransferFunction*[s.size()];
|
transfer = new ActivationFunction::ActivationFunction*[s.size()];
|
||||||
weights= new float**[s.size()];
|
weights= new float**[s.size()];
|
||||||
potentials= new float*[s.size()];
|
potentials= new float*[s.size()];
|
||||||
layerSizes= new size_t[s.size()];
|
layerSizes= new size_t[s.size()];
|
||||||
@@ -44,7 +44,7 @@ FeedForward::FeedForward(std::initializer_list<size_t> s, double lam, FeedForwar
|
|||||||
register int prev_size=1;
|
register int prev_size=1;
|
||||||
for(int layeSize:s) // TODO rename
|
for(int layeSize:s) // TODO rename
|
||||||
{
|
{
|
||||||
transfer[i]= new TransferFunction::Sigmoid(lam);
|
transfer[i]= new ActivationFunction::Sigmoid(lam);
|
||||||
layeSize+=1;
|
layeSize+=1;
|
||||||
if(i==0)
|
if(i==0)
|
||||||
{
|
{
|
||||||
@@ -106,7 +106,7 @@ FeedForward::~FeedForward()
|
|||||||
|
|
||||||
void FeedForward::solvePart(float *newSolution, register size_t begin, size_t end,size_t prevSize, float *sol,size_t layer)
|
void FeedForward::solvePart(float *newSolution, register size_t begin, size_t end,size_t prevSize, float *sol,size_t layer)
|
||||||
{
|
{
|
||||||
TransferFunction::StreamingTransferFunction *function=dynamic_cast<TransferFunction::StreamingTransferFunction*>(transfer[layer]);
|
ActivationFunction::StreamingActivationFunction *function=dynamic_cast<ActivationFunction::StreamingActivationFunction*>(transfer[layer]);
|
||||||
if(prevSize >=4 && function !=nullptr)
|
if(prevSize >=4 && function !=nullptr)
|
||||||
{
|
{
|
||||||
__m128 partialSolution;
|
__m128 partialSolution;
|
||||||
|
|||||||
@@ -5,8 +5,8 @@
|
|||||||
#include "../Solution"
|
#include "../Solution"
|
||||||
#include "Network"
|
#include "Network"
|
||||||
|
|
||||||
#include "TransferFunction/Sigmoid.h"
|
#include "ActivationFunction/Sigmoid.h"
|
||||||
#include "TransferFunction/TransferFunction.h"
|
#include "ActivationFunction/ActivationFunction.h"
|
||||||
|
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <initializer_list>
|
#include <initializer_list>
|
||||||
@@ -29,7 +29,7 @@ namespace NeuralNetwork
|
|||||||
class FFNeuron : public Neuron
|
class FFNeuron : public Neuron
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
inline FFNeuron(float &pot, float *w, float &outputF, float &i,float lam,TransferFunction::TransferFunction &fun):function(fun),potential(pot),weights(w),out(outputF),inputs(i),lambda(lam) { }
|
inline FFNeuron(float &pot, float *w, float &outputF, float &i,float lam,ActivationFunction::ActivationFunction &fun):function(fun),potential(pot),weights(w),out(outputF),inputs(i),lambda(lam) { }
|
||||||
|
|
||||||
FFNeuron() = delete;
|
FFNeuron() = delete;
|
||||||
FFNeuron(const FFNeuron&) = delete;
|
FFNeuron(const FFNeuron&) = delete;
|
||||||
@@ -45,7 +45,7 @@ namespace NeuralNetwork
|
|||||||
inline virtual float input() const override { return inputs; }
|
inline virtual float input() const override { return inputs; }
|
||||||
inline virtual float derivatedOutput() const override { return function.derivatedOutput(inputs,out); }
|
inline virtual float derivatedOutput() const override { return function.derivatedOutput(inputs,out); }
|
||||||
protected:
|
protected:
|
||||||
TransferFunction::TransferFunction &function;
|
ActivationFunction::ActivationFunction &function;
|
||||||
float &potential;
|
float &potential;
|
||||||
float *weights;
|
float *weights;
|
||||||
float &out;
|
float &out;
|
||||||
@@ -57,7 +57,7 @@ namespace NeuralNetwork
|
|||||||
class FFLayer: public Layer
|
class FFLayer: public Layer
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
inline FFLayer(size_t s, float *p,float **w,float *out,float *in,float lam,TransferFunction::TransferFunction &fun): function(fun), layerSize(s),potentials(p),weights(w),outputs(out),inputs(in),lambda(lam) {}
|
inline FFLayer(size_t s, float *p,float **w,float *out,float *in,float lam,ActivationFunction::ActivationFunction &fun): function(fun), layerSize(s),potentials(p),weights(w),outputs(out),inputs(in),lambda(lam) {}
|
||||||
~FFLayer();
|
~FFLayer();
|
||||||
|
|
||||||
FFLayer(const FFLayer &) = delete;
|
FFLayer(const FFLayer &) = delete;
|
||||||
@@ -66,7 +66,7 @@ namespace NeuralNetwork
|
|||||||
virtual FFNeuron& operator[](const size_t& layer) override;
|
virtual FFNeuron& operator[](const size_t& layer) override;
|
||||||
inline virtual size_t size() const override {return layerSize;};
|
inline virtual size_t size() const override {return layerSize;};
|
||||||
protected:
|
protected:
|
||||||
TransferFunction::TransferFunction &function;
|
ActivationFunction::ActivationFunction &function;
|
||||||
FFNeuron **neurons=nullptr;
|
FFNeuron **neurons=nullptr;
|
||||||
size_t layerSize;
|
size_t layerSize;
|
||||||
float *potentials;
|
float *potentials;
|
||||||
@@ -131,7 +131,7 @@ namespace NeuralNetwork
|
|||||||
float **potentials=nullptr;
|
float **potentials=nullptr;
|
||||||
float **outputs=nullptr;
|
float **outputs=nullptr;
|
||||||
float **inputs=nullptr;
|
float **inputs=nullptr;
|
||||||
TransferFunction::TransferFunction **transfer=nullptr;
|
ActivationFunction::ActivationFunction **transfer=nullptr;
|
||||||
size_t *layerSizes=nullptr;
|
size_t *layerSizes=nullptr;
|
||||||
size_t layers;/**< Number of layers */
|
size_t layers;/**< Number of layers */
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ lib: $(LIBNAME).so $(LIBNAME).a
|
|||||||
$(LIBNAME).so: $(OBJFILES)
|
$(LIBNAME).so: $(OBJFILES)
|
||||||
$(CXX) -shared $(CXXFLAGS) $(OBJFILES) $(LINKFILES) -o $(LIBNAME).so
|
$(CXX) -shared $(CXXFLAGS) $(OBJFILES) $(LINKFILES) -o $(LIBNAME).so
|
||||||
|
|
||||||
$(LIBNAME).a: $(OBJFILES) ./Neuron.h ./Network.h ../Solution.h ../Problem.h ./TransferFunction/TransferFunction.h ./TransferFunction/Sigmoid.h
|
$(LIBNAME).a: $(OBJFILES) ./Neuron.h ./Network.h ../Solution.h ../Problem.h ./ActivationFunction/ActivationFunction.h ./ActivationFunction/Sigmoid.h
|
||||||
rm -f $(LIBNAME).a # create new library
|
rm -f $(LIBNAME).a # create new library
|
||||||
ar rcv $(LIBNAME).a $(OBJFILES) $(LINKFILES)
|
ar rcv $(LIBNAME).a $(OBJFILES) $(LINKFILES)
|
||||||
ranlib $(LIBNAME).a
|
ranlib $(LIBNAME).a
|
||||||
|
|||||||
Reference in New Issue
Block a user