reffactored and recurrent implementation
This commit is contained in:
@@ -0,0 +1,42 @@
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace ActivationFunction {
|
||||
|
||||
/**
|
||||
* @author Tomas Cernik (Tom.Cernik@gmail.com)
|
||||
* @brief Abstract class of activation function
|
||||
*/
|
||||
class ActivationFunction {
|
||||
public:
|
||||
|
||||
virtual ~ActivationFunction() {}
|
||||
|
||||
/**
|
||||
* @brief Returns derivation of output
|
||||
* @param input is input of function
|
||||
* @param output is output of function
|
||||
*/
|
||||
virtual float derivatedOutput(const float &input,const float &output)=0;
|
||||
|
||||
/**
|
||||
* @brief Returns value of output
|
||||
* @param x is input of function
|
||||
*/
|
||||
virtual float operator()(const float &x)=0;
|
||||
|
||||
/**
|
||||
* @brief Function returns clone of object
|
||||
*/
|
||||
virtual ActivationFunction* clone() const = 0;
|
||||
|
||||
/**
|
||||
* @brief This is a virtual function for storing Activation function
|
||||
* @returns json describing function
|
||||
*/
|
||||
virtual std::string stringify() const =0;
|
||||
};
|
||||
}
|
||||
}
|
||||
26
include/NeuralNetwork/ActivationFunction/Heaviside.h
Normal file
26
include/NeuralNetwork/ActivationFunction/Heaviside.h
Normal file
@@ -0,0 +1,26 @@
|
||||
#pragma once
|
||||
|
||||
#include "./ActivationFunction.h"
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace ActivationFunction {
|
||||
|
||||
class Heaviside: public ActivationFunction {
|
||||
public:
|
||||
Heaviside(const float &lambdaP=1.0): lambda(lambdaP) {}
|
||||
inline virtual float derivatedOutput(const float &,const float &) override { return 1.0; }
|
||||
inline virtual float operator()(const float &x) override { return x>lambda ? 1.0f : 0.0f; };
|
||||
|
||||
virtual ActivationFunction* clone() const override {
|
||||
return new Heaviside(lambda);
|
||||
}
|
||||
|
||||
virtual std::string stringify() const override {
|
||||
return "{ \"class\": \"NeuralNetwork::ActivationFunction::Heaviside\", \"lamba\" : "+std::to_string(lambda)+"}";
|
||||
}
|
||||
|
||||
protected:
|
||||
float lambda;
|
||||
};
|
||||
}
|
||||
}
|
||||
27
include/NeuralNetwork/ActivationFunction/HyperbolicTangent.h
Normal file
27
include/NeuralNetwork/ActivationFunction/HyperbolicTangent.h
Normal file
@@ -0,0 +1,27 @@
|
||||
#pragma once
|
||||
|
||||
#include "./ActivationFunction.h"
|
||||
|
||||
#include <cmath>
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace ActivationFunction {
|
||||
|
||||
class HyperbolicTangent: public ActivationFunction {
|
||||
public:
|
||||
HyperbolicTangent(const float& lam=1):lambda(lam) {}
|
||||
inline virtual float derivatedOutput(const float&,const float &output) override { return lambda*(1-output*output); }
|
||||
inline virtual float operator()(const float &x) override { return tanh(lambda*x); };
|
||||
virtual ActivationFunction* clone() const override {
|
||||
return new HyperbolicTangent(lambda);
|
||||
}
|
||||
|
||||
virtual std::string stringify() const override {
|
||||
return "{ \"class\": \"NeuralNetwork::ActivationFunction::HyperbolicTangent\", \"lamba\" : "+std::to_string(lambda)+"}";
|
||||
}
|
||||
|
||||
protected:
|
||||
float lambda;
|
||||
};
|
||||
}
|
||||
}
|
||||
35
include/NeuralNetwork/ActivationFunction/Sigmoid.h
Normal file
35
include/NeuralNetwork/ActivationFunction/Sigmoid.h
Normal file
@@ -0,0 +1,35 @@
|
||||
#pragma once
|
||||
|
||||
#include <cmath>
|
||||
|
||||
#include "./StreamingActivationFunction.h"
|
||||
#include "../../sse_mathfun.h"
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace ActivationFunction {
|
||||
|
||||
/**
|
||||
* @author Tomas Cernik (Tom.Cernik@gmail.com)
|
||||
* @brief Class for computing sigmoid
|
||||
*/
|
||||
class Sigmoid: public StreamingActivationFunction {
|
||||
public:
|
||||
Sigmoid(const float lambdaP = -0.5): lambda(lambdaP) {}
|
||||
inline virtual float derivatedOutput(const float&,const float &output) override { return lambda*output*(1.0f-output); }
|
||||
inline virtual float operator()(const float &x) override { return 1.0f / (1.0f +exp(lambda*x) ); };
|
||||
inline virtual __m128 operator()(const __m128 &x) override {
|
||||
// exp_ps is extremly slow!
|
||||
return _mm_div_ps(_mm_set1_ps(1.0),_mm_add_ps(exp_ps(_mm_mul_ps(_mm_set1_ps(lambda),x)),_mm_set1_ps(1.0)));
|
||||
}
|
||||
virtual ActivationFunction* clone() const override {
|
||||
return new Sigmoid(lambda);
|
||||
}
|
||||
|
||||
virtual std::string stringify() const override {
|
||||
return "{ \"class\": \"NeuralNetwork::ActivationFunction::Sigmoid\", \"lamba\" : "+std::to_string(lambda)+"}";
|
||||
}
|
||||
protected:
|
||||
float lambda;
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
#pragma once
|
||||
|
||||
#include <xmmintrin.h>
|
||||
|
||||
#include "./ActivationFunction.h"
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace ActivationFunction {
|
||||
|
||||
/**
|
||||
* @author Tomas Cernik (Tom.Cernik@gmail.com)
|
||||
* @brief Abstract class of activation function with support of SSE
|
||||
*/
|
||||
class StreamingActivationFunction : public ActivationFunction {
|
||||
public:
|
||||
virtual float derivatedOutput(const float &input,const float &output)=0;
|
||||
virtual float operator()(const float &x)=0;
|
||||
|
||||
/**
|
||||
* @brief Returns value of four outputs
|
||||
* @param x is float[4], in every array value can be stored
|
||||
*/
|
||||
virtual __m128 operator()(const __m128 &x)=0;
|
||||
};
|
||||
}
|
||||
}
|
||||
27
include/NeuralNetwork/BasisFunction/BasisFunction.h
Normal file
27
include/NeuralNetwork/BasisFunction/BasisFunction.h
Normal file
@@ -0,0 +1,27 @@
|
||||
#pragma once
|
||||
|
||||
#include <math.h>
|
||||
#include <vector>
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace BasisFunction {
|
||||
class BasisFunction {
|
||||
public:
|
||||
virtual ~BasisFunction() {}
|
||||
virtual float operator()(const std::vector<float>& weights, const std::vector<float>& input)=0;
|
||||
|
||||
/**
|
||||
* @brief Function returns clone of object
|
||||
*/
|
||||
virtual BasisFunction* clone() const = 0;
|
||||
|
||||
/**
|
||||
* @brief This is a virtual function for storing Basis function
|
||||
* @returns json describing function
|
||||
*/
|
||||
virtual std::string stringify() const =0;
|
||||
};
|
||||
}
|
||||
}
|
||||
68
include/NeuralNetwork/BasisFunction/Linear.h
Normal file
68
include/NeuralNetwork/BasisFunction/Linear.h
Normal file
@@ -0,0 +1,68 @@
|
||||
#pragma once
|
||||
|
||||
#include <mmintrin.h>
|
||||
#include <xmmintrin.h>
|
||||
#include <emmintrin.h>
|
||||
#include <xmmintrin.h>
|
||||
#include <pmmintrin.h>
|
||||
|
||||
#include "./StreamingBasisFunction.h"
|
||||
|
||||
#include "../../sse_mathfun.h"
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace BasisFunction {
|
||||
|
||||
class Linear: public StreamingBasisFunction {
|
||||
public:
|
||||
Linear() {}
|
||||
|
||||
inline virtual float computeStreaming(const std::vector<float>& weights, const std::vector<float>& input) override {
|
||||
size_t inputSize=input.size();
|
||||
size_t alignedPrev=inputSize-inputSize%4;
|
||||
|
||||
const float* weightsData=weights.data();
|
||||
const float* inputData=input.data();
|
||||
vec4f partialSolution;
|
||||
partialSolution.sse =_mm_setzero_ps();
|
||||
|
||||
//TODO prefetch ??
|
||||
for(register size_t k=0;k<alignedPrev;k+=4) {
|
||||
partialSolution.sse=_mm_add_ps(partialSolution.sse,_mm_mul_ps(_mm_load_ps(weightsData+k),_mm_load_ps(inputData+k)));
|
||||
}
|
||||
|
||||
for(register size_t k=alignedPrev;k<inputSize;k++) {
|
||||
partialSolution.sse=_mm_add_ps(partialSolution.sse,_mm_mul_ps(_mm_load_ss(weightsData+k),_mm_load_ss(inputData+k)));
|
||||
}
|
||||
|
||||
#ifdef USE_SSE2 //pre-SSE3 solution
|
||||
partialSolution.sse= _mm_add_ps(_mm_movehl_ps(partialSolution.sse, partialSolution.sse), partialSolution.sse);
|
||||
partialSolution.sse=_mm_add_ss(partialSolution.sse, _mm_shuffle_ps(partialSolution.sse,partialSolution.sse, 1));
|
||||
#else
|
||||
partialSolution.sse = _mm_hadd_ps(partialSolution.sse, partialSolution.sse);
|
||||
partialSolution.sse = _mm_hadd_ps(partialSolution.sse, partialSolution.sse);
|
||||
#endif
|
||||
|
||||
return partialSolution.f[0];
|
||||
}
|
||||
|
||||
inline virtual float compute(const std::vector<float>& weights, const std::vector<float>& input) override {
|
||||
register float tmp = 0;
|
||||
size_t inputSize=input.size();
|
||||
for(size_t k=0;k<inputSize;k++) {
|
||||
tmp+=input[k]*weights[k];
|
||||
}
|
||||
return tmp;
|
||||
}
|
||||
|
||||
virtual BasisFunction* clone() const override {
|
||||
return new Linear();
|
||||
}
|
||||
|
||||
virtual std::string stringify() const override {
|
||||
return "{ \"class\": \"NeuralNetwork::BasisFunction::Linear\" }";
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
}
|
||||
25
include/NeuralNetwork/BasisFunction/Radial.h
Normal file
25
include/NeuralNetwork/BasisFunction/Radial.h
Normal file
@@ -0,0 +1,25 @@
|
||||
#ifndef __BASIS_RADIAL_H_
|
||||
#define __BASIS_RADIAL_H_
|
||||
|
||||
#include "./BasisFunction.h"
|
||||
|
||||
namespace NeuralNetwork
|
||||
{
|
||||
namespace BasisFunction
|
||||
{
|
||||
class Radial: public BasisFunction
|
||||
{
|
||||
public:
|
||||
Radial() {}
|
||||
|
||||
virtual BasisFunction* clone() const override {
|
||||
return new Radial();
|
||||
}
|
||||
|
||||
virtual std::string stringify() const override {
|
||||
return "{ \"class\": \"NeuralNetwork::BasisFunction::Radial\" }";
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
#endif
|
||||
23
include/NeuralNetwork/BasisFunction/StreamingBasisFunction.h
Normal file
23
include/NeuralNetwork/BasisFunction/StreamingBasisFunction.h
Normal file
@@ -0,0 +1,23 @@
|
||||
#pragma once
|
||||
|
||||
#include <xmmintrin.h>
|
||||
|
||||
#include "./BasisFunction.h"
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace BasisFunction {
|
||||
class StreamingBasisFunction : public BasisFunction {
|
||||
public:
|
||||
union vec4f{
|
||||
__m128 sse;
|
||||
float f[4];
|
||||
};
|
||||
|
||||
virtual float operator()(const std::vector<float>& weights, const std::vector<float>& input) override {
|
||||
return computeStreaming(weights,input);
|
||||
}
|
||||
virtual float computeStreaming(const std::vector<float>& weights, const std::vector<float>& input) =0;
|
||||
virtual float compute(const std::vector<float>& weights, const std::vector<float>& input) =0;
|
||||
};
|
||||
}
|
||||
}
|
||||
33
include/NeuralNetwork/Layer.h
Normal file
33
include/NeuralNetwork/Layer.h
Normal file
@@ -0,0 +1,33 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstddef>
|
||||
#include <vector>
|
||||
|
||||
#include "Neuron.h"
|
||||
|
||||
namespace NeuralNetwork
|
||||
{
|
||||
/**
|
||||
* @author Tomas Cernik (Tom.Cernik@gmail.com)
|
||||
* @brief Abstract class for all Layers of neurons
|
||||
*/
|
||||
class Layer
|
||||
{
|
||||
public:
|
||||
|
||||
virtual ~Layer() {};
|
||||
|
||||
/**
|
||||
* @brief This is a virtual function for selecting neuron
|
||||
* @param neuron is position in layer
|
||||
* @returns Specific neuron
|
||||
*/
|
||||
|
||||
virtual Neuron& operator[](const size_t& neuron)=0;
|
||||
/**
|
||||
* @returns Size of layer
|
||||
*/
|
||||
virtual size_t size() const=0;
|
||||
};
|
||||
|
||||
}
|
||||
55
include/NeuralNetwork/Network.h
Normal file
55
include/NeuralNetwork/Network.h
Normal file
@@ -0,0 +1,55 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstddef>
|
||||
#include <vector>
|
||||
|
||||
#include "Neuron.h"
|
||||
|
||||
#include "Stringifiable.h"
|
||||
|
||||
#include <ostream>
|
||||
#include <sstream>
|
||||
|
||||
namespace NeuralNetwork
|
||||
{
|
||||
|
||||
/**
|
||||
* @author Tomas Cernik (Tom.Cernik@gmail.com)
|
||||
* @brief Abstract model of simple Network
|
||||
*/
|
||||
class Network : public Stringifiable
|
||||
{
|
||||
public:
|
||||
/**
|
||||
* @brief Constructor for Network
|
||||
*/
|
||||
inline Network() {};
|
||||
|
||||
/**
|
||||
* @brief Virtual destructor for Network
|
||||
*/
|
||||
virtual ~Network() {};
|
||||
|
||||
/**
|
||||
* @brief This is a virtual function for all networks
|
||||
* @param input is input of network
|
||||
* @returns output of network
|
||||
*/
|
||||
virtual std::vector<float> computeOutput(const std::vector<float>& input)=0;
|
||||
|
||||
/**
|
||||
* @param t is number of threads, if set to 0 or 1 then threading is disabled
|
||||
* @brief Enables or disables Threaded computing of ANN
|
||||
*/
|
||||
|
||||
inline virtual void setThreads(const unsigned& t) final {threads=t;}
|
||||
|
||||
using Stringifiable::stringify;
|
||||
|
||||
protected:
|
||||
/**
|
||||
* @brief Number of threads used by network
|
||||
*/
|
||||
unsigned threads=1;
|
||||
};
|
||||
}
|
||||
71
include/NeuralNetwork/Neuron.h
Normal file
71
include/NeuralNetwork/Neuron.h
Normal file
@@ -0,0 +1,71 @@
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace NeuralNetwork
|
||||
{
|
||||
/**
|
||||
* @author Tomas Cernik (Tom.Cernik@gmail.com)
|
||||
* @brief Abstract class of neuron. All Neuron classes should derive from this on
|
||||
*/
|
||||
class Neuron
|
||||
{
|
||||
public:
|
||||
|
||||
/**
|
||||
* @brief returns unique id for neuron
|
||||
*/
|
||||
virtual unsigned long id() const =0;
|
||||
|
||||
/**
|
||||
* @brief virtual destructor for Neuron
|
||||
*/
|
||||
virtual ~Neuron() {};
|
||||
|
||||
/**
|
||||
* @brief This is a virtual function for storing network
|
||||
* @returns json describing network and it's state
|
||||
*/
|
||||
virtual std::string stringify(const std::string &prefix="") const =0;
|
||||
|
||||
/**
|
||||
* @brief Gets weight
|
||||
* @param n is neuron
|
||||
*/
|
||||
virtual float getWeight(const Neuron &n) const =0;
|
||||
|
||||
/**
|
||||
* @brief Sets weight
|
||||
* @param n is neuron
|
||||
* @param w is new weight for input neuron n
|
||||
*/
|
||||
virtual void setWeight(const Neuron& n ,const float &w) =0;
|
||||
|
||||
/**
|
||||
* @brief Returns output of neuron
|
||||
*/
|
||||
virtual float output() const =0;
|
||||
|
||||
/**
|
||||
* @brief Returns input of neuron
|
||||
*/
|
||||
virtual float value() const=0;
|
||||
|
||||
/**
|
||||
* @brief Returns value for derivation of activation function
|
||||
*/
|
||||
// virtual float derivatedOutput() const=0;
|
||||
|
||||
/**
|
||||
* @brief Function sets bias for neuron
|
||||
* @param bias is new bias (initial value for neuron)
|
||||
*/
|
||||
virtual void setBias(const float &bias)=0;
|
||||
|
||||
/**
|
||||
* @brief Function returns bias for neuron
|
||||
*/
|
||||
virtual float getBias() const=0;
|
||||
protected:
|
||||
};
|
||||
}
|
||||
88
include/NeuralNetwork/Recurrent/Network.h
Normal file
88
include/NeuralNetwork/Recurrent/Network.h
Normal file
@@ -0,0 +1,88 @@
|
||||
#pragma once
|
||||
|
||||
#include "../Network.h"
|
||||
#include "Neuron.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
#include <limits>
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace Recurrent {
|
||||
|
||||
/**
|
||||
* @author Tomas Cernik (Tom.Cernik@gmail.com)
|
||||
* @brief Reccurent model of Artifical neural network
|
||||
*/
|
||||
class Network: public NeuralNetwork::Network {
|
||||
public:
|
||||
|
||||
/**
|
||||
* @brief Constructor for Network
|
||||
* @param _inputSize is number of inputs to network
|
||||
* @param _outputSize is size of output from network
|
||||
* @param hiddenUnits is number of hiddenUnits to be created
|
||||
*/
|
||||
inline Network(size_t _inputSize, size_t _outputSize,size_t hiddenUnits=0):NeuralNetwork::Network(),inputSize(_inputSize),outputSize(_outputSize), neurons(0) {
|
||||
for(size_t i=0;i<_inputSize+_outputSize;i++) {
|
||||
addNeuron();
|
||||
}
|
||||
|
||||
for(size_t i=0;i<hiddenUnits;i++) {
|
||||
addNeuron();
|
||||
}
|
||||
};
|
||||
|
||||
// todo: implement
|
||||
inline Network(const std::string &json) {
|
||||
|
||||
}
|
||||
/**
|
||||
* @brief Virtual destructor for Network
|
||||
*/
|
||||
virtual ~Network() {};
|
||||
|
||||
/**
|
||||
* @brief This is a function to compute one iterations of network
|
||||
* @param input is input of network
|
||||
* @returns output of network
|
||||
*/
|
||||
inline virtual std::vector<float> computeOutput(const std::vector<float>& input) override {
|
||||
return computeOutput(input,1);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief This is a function to compute iterations of network
|
||||
* @param input is input of network
|
||||
* @param iterations is number of iterations
|
||||
* @returns output of network
|
||||
*/
|
||||
std::vector<float> computeOutput(const std::vector<float>& input, unsigned int iterations);
|
||||
|
||||
std::vector<Neuron>& getNeurons () {
|
||||
return neurons;
|
||||
}
|
||||
|
||||
using NeuralNetwork::Network::stringify;
|
||||
|
||||
void stringify(std::ostream& out) const override;
|
||||
|
||||
Neuron& addNeuron() {
|
||||
neurons.push_back(Recurrent::Neuron(neurons.size()));
|
||||
Neuron &newNeuron=neurons.back();
|
||||
for(size_t i=0;i<neurons.size();i++) {
|
||||
neurons[i].setWeight(newNeuron,0.0);
|
||||
}
|
||||
return newNeuron;
|
||||
}
|
||||
|
||||
protected:
|
||||
size_t inputSize=0;
|
||||
size_t outputSize=0;
|
||||
|
||||
std::vector<Recurrent::Neuron> neurons;
|
||||
};
|
||||
}
|
||||
}
|
||||
123
include/NeuralNetwork/Recurrent/Neuron.h
Normal file
123
include/NeuralNetwork/Recurrent/Neuron.h
Normal file
@@ -0,0 +1,123 @@
|
||||
#pragma once
|
||||
|
||||
#include "../Neuron.h"
|
||||
#include <NeuralNetwork/ActivationFunction/Sigmoid.h>
|
||||
#include <NeuralNetwork/BasisFunction/Linear.h>
|
||||
#include <vector>
|
||||
|
||||
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
#include <limits>
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace Recurrent {
|
||||
|
||||
class Network;
|
||||
|
||||
/**
|
||||
* @author Tomas Cernik (Tom.Cernik@gmail.com)
|
||||
* @brief Class of recurrent neuron.
|
||||
*/
|
||||
class Neuron : public NeuralNetwork::Neuron
|
||||
{
|
||||
public:
|
||||
Neuron(unsigned long _id=0,const float& _bias = 0): NeuralNetwork::Neuron(), basis(new BasisFunction::Linear),
|
||||
activation(new ActivationFunction::Sigmoid(-4.9)),
|
||||
id_(_id),bias(_bias),weights(_id+1),_output(0),_value(0) {
|
||||
}
|
||||
|
||||
Neuron(const Neuron &r): NeuralNetwork::Neuron(), basis(r.basis->clone()), activation(r.activation->clone()),id_(r.id_),
|
||||
bias(r.bias), weights(r.weights), _output(r._output), _value(r._value) {
|
||||
}
|
||||
virtual ~Neuron() {
|
||||
delete basis;
|
||||
delete activation;
|
||||
};
|
||||
|
||||
virtual std::string stringify(const std::string &prefix="") const override;
|
||||
|
||||
Recurrent::Neuron& operator=(const NeuralNetwork::Recurrent::Neuron&r) {
|
||||
id_=r.id_;
|
||||
bias=r.bias;
|
||||
weights=r.weights;
|
||||
basis=r.basis->clone();
|
||||
activation=r.activation->clone();
|
||||
return *this;
|
||||
}
|
||||
|
||||
virtual long unsigned int id() const override {
|
||||
return id_;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Gets weight
|
||||
* @param n is neuron
|
||||
*/
|
||||
virtual float getWeight(const NeuralNetwork::Neuron &n) const override {
|
||||
return weights[n.id()];
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Sets weight
|
||||
* @param n is neuron
|
||||
* @param w is new weight for input neuron n
|
||||
*/
|
||||
virtual void setWeight(const NeuralNetwork::Neuron& n ,const float &w) override {
|
||||
if(weights.size()<n.id()+1) {
|
||||
weights.resize(n.id()+1);
|
||||
}
|
||||
weights[n.id()]=w;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Returns output of neuron
|
||||
*/
|
||||
virtual float output() const override {
|
||||
return _output;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Returns input of neuron
|
||||
*/
|
||||
virtual float value() const override {
|
||||
return _value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Function sets bias for neuron
|
||||
* @param _bias is new bias (initial value for neuron)
|
||||
*/
|
||||
virtual void setBias(const float &_bias) override {
|
||||
bias=_bias;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Function returns bias for neuron
|
||||
*/
|
||||
virtual float getBias() const override {
|
||||
return bias;
|
||||
}
|
||||
|
||||
float operator()(const std::vector<float>& inputs) {
|
||||
//compute value
|
||||
_value=basis->operator()(weights,inputs)+bias;
|
||||
|
||||
//compute output
|
||||
_output=activation->operator()(_value);
|
||||
|
||||
return _output;
|
||||
}
|
||||
|
||||
protected:
|
||||
BasisFunction::BasisFunction *basis;
|
||||
ActivationFunction::ActivationFunction *activation;
|
||||
|
||||
unsigned long id_;
|
||||
float bias;
|
||||
std::vector<float> weights;
|
||||
float _output;
|
||||
float _value;
|
||||
};
|
||||
}
|
||||
}
|
||||
29
include/NeuralNetwork/Stringifiable.h
Normal file
29
include/NeuralNetwork/Stringifiable.h
Normal file
@@ -0,0 +1,29 @@
|
||||
#pragma once
|
||||
|
||||
#include <sstream>
|
||||
|
||||
namespace NeuralNetwork {
|
||||
class Stringifiable {
|
||||
public:
|
||||
|
||||
virtual ~Stringifiable() {
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief This is a virtual function for class
|
||||
*/
|
||||
virtual void stringify(std::ostream& out) const =0;
|
||||
|
||||
virtual std::string stringify() final {
|
||||
std::ostringstream s;
|
||||
stringify(s);
|
||||
return s.str();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
static std::ostream& operator<<(std::ostream& o, const Stringifiable& n) {
|
||||
n.stringify(o);
|
||||
return o;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user