reffactored and recurrent implementation
This commit is contained in:
35
include/NeuralNetwork/ActivationFunction/Sigmoid.h
Normal file
35
include/NeuralNetwork/ActivationFunction/Sigmoid.h
Normal file
@@ -0,0 +1,35 @@
|
||||
#pragma once
|
||||
|
||||
#include <cmath>
|
||||
|
||||
#include "./StreamingActivationFunction.h"
|
||||
#include "../../sse_mathfun.h"
|
||||
|
||||
namespace NeuralNetwork {
|
||||
namespace ActivationFunction {
|
||||
|
||||
/**
|
||||
* @author Tomas Cernik (Tom.Cernik@gmail.com)
|
||||
* @brief Class for computing sigmoid
|
||||
*/
|
||||
class Sigmoid: public StreamingActivationFunction {
|
||||
public:
|
||||
Sigmoid(const float lambdaP = -0.5): lambda(lambdaP) {}
|
||||
inline virtual float derivatedOutput(const float&,const float &output) override { return lambda*output*(1.0f-output); }
|
||||
inline virtual float operator()(const float &x) override { return 1.0f / (1.0f +exp(lambda*x) ); };
|
||||
inline virtual __m128 operator()(const __m128 &x) override {
|
||||
// exp_ps is extremly slow!
|
||||
return _mm_div_ps(_mm_set1_ps(1.0),_mm_add_ps(exp_ps(_mm_mul_ps(_mm_set1_ps(lambda),x)),_mm_set1_ps(1.0)));
|
||||
}
|
||||
virtual ActivationFunction* clone() const override {
|
||||
return new Sigmoid(lambda);
|
||||
}
|
||||
|
||||
virtual std::string stringify() const override {
|
||||
return "{ \"class\": \"NeuralNetwork::ActivationFunction::Sigmoid\", \"lamba\" : "+std::to_string(lambda)+"}";
|
||||
}
|
||||
protected:
|
||||
float lambda;
|
||||
};
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user