reffactored and recurrent implementation

This commit is contained in:
2016-01-22 13:21:34 +01:00
parent e61e616227
commit d424d87535
65 changed files with 12102 additions and 2361 deletions

View File

@@ -0,0 +1,42 @@
#pragma once
#include <string>
namespace NeuralNetwork {
namespace ActivationFunction {
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Abstract class of activation function
*/
class ActivationFunction {
public:
virtual ~ActivationFunction() {}
/**
* @brief Returns derivation of output
* @param input is input of function
* @param output is output of function
*/
virtual float derivatedOutput(const float &input,const float &output)=0;
/**
* @brief Returns value of output
* @param x is input of function
*/
virtual float operator()(const float &x)=0;
/**
* @brief Function returns clone of object
*/
virtual ActivationFunction* clone() const = 0;
/**
* @brief This is a virtual function for storing Activation function
* @returns json describing function
*/
virtual std::string stringify() const =0;
};
}
}

View File

@@ -0,0 +1,26 @@
#pragma once
#include "./ActivationFunction.h"
namespace NeuralNetwork {
namespace ActivationFunction {
class Heaviside: public ActivationFunction {
public:
Heaviside(const float &lambdaP=1.0): lambda(lambdaP) {}
inline virtual float derivatedOutput(const float &,const float &) override { return 1.0; }
inline virtual float operator()(const float &x) override { return x>lambda ? 1.0f : 0.0f; };
virtual ActivationFunction* clone() const override {
return new Heaviside(lambda);
}
virtual std::string stringify() const override {
return "{ \"class\": \"NeuralNetwork::ActivationFunction::Heaviside\", \"lamba\" : "+std::to_string(lambda)+"}";
}
protected:
float lambda;
};
}
}

View File

@@ -0,0 +1,27 @@
#pragma once
#include "./ActivationFunction.h"
#include <cmath>
namespace NeuralNetwork {
namespace ActivationFunction {
class HyperbolicTangent: public ActivationFunction {
public:
HyperbolicTangent(const float& lam=1):lambda(lam) {}
inline virtual float derivatedOutput(const float&,const float &output) override { return lambda*(1-output*output); }
inline virtual float operator()(const float &x) override { return tanh(lambda*x); };
virtual ActivationFunction* clone() const override {
return new HyperbolicTangent(lambda);
}
virtual std::string stringify() const override {
return "{ \"class\": \"NeuralNetwork::ActivationFunction::HyperbolicTangent\", \"lamba\" : "+std::to_string(lambda)+"}";
}
protected:
float lambda;
};
}
}

View File

@@ -0,0 +1,35 @@
#pragma once
#include <cmath>
#include "./StreamingActivationFunction.h"
#include "../../sse_mathfun.h"
namespace NeuralNetwork {
namespace ActivationFunction {
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Class for computing sigmoid
*/
class Sigmoid: public StreamingActivationFunction {
public:
Sigmoid(const float lambdaP = -0.5): lambda(lambdaP) {}
inline virtual float derivatedOutput(const float&,const float &output) override { return lambda*output*(1.0f-output); }
inline virtual float operator()(const float &x) override { return 1.0f / (1.0f +exp(lambda*x) ); };
inline virtual __m128 operator()(const __m128 &x) override {
// exp_ps is extremly slow!
return _mm_div_ps(_mm_set1_ps(1.0),_mm_add_ps(exp_ps(_mm_mul_ps(_mm_set1_ps(lambda),x)),_mm_set1_ps(1.0)));
}
virtual ActivationFunction* clone() const override {
return new Sigmoid(lambda);
}
virtual std::string stringify() const override {
return "{ \"class\": \"NeuralNetwork::ActivationFunction::Sigmoid\", \"lamba\" : "+std::to_string(lambda)+"}";
}
protected:
float lambda;
};
}
}

View File

@@ -0,0 +1,26 @@
#pragma once
#include <xmmintrin.h>
#include "./ActivationFunction.h"
namespace NeuralNetwork {
namespace ActivationFunction {
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Abstract class of activation function with support of SSE
*/
class StreamingActivationFunction : public ActivationFunction {
public:
virtual float derivatedOutput(const float &input,const float &output)=0;
virtual float operator()(const float &x)=0;
/**
* @brief Returns value of four outputs
* @param x is float[4], in every array value can be stored
*/
virtual __m128 operator()(const __m128 &x)=0;
};
}
}