39 lines
1.2 KiB
C++
39 lines
1.2 KiB
C++
#pragma once
|
|
|
|
#include "./ActivationFunction.h"
|
|
|
|
#include <cassert>
|
|
|
|
namespace NeuralNetwork {
|
|
namespace ActivationFunction {
|
|
|
|
class LeakyRectifiedLinear: public ActivationFunction {
|
|
public:
|
|
LeakyRectifiedLinear(const float &lambdaP=0.04): lambda(lambdaP) {}
|
|
|
|
inline virtual float derivatedOutput(const float &inp,const float &) const override {
|
|
return inp > 0.0f ? lambda : 0.01f*lambda;
|
|
}
|
|
|
|
inline virtual float operator()(const float &x) const override {
|
|
return x > 0.0? x : 0.001f*x;
|
|
};
|
|
|
|
virtual ActivationFunction* clone() const override {
|
|
return new LeakyRectifiedLinear(lambda);
|
|
}
|
|
|
|
virtual SimpleJSON::Type::Object serialize() const override {
|
|
return {{"class", "NeuralNetwork::ActivationFunction::LeakyRectifiedLinear"}, {"lambda", lambda}};
|
|
}
|
|
|
|
static std::unique_ptr<LeakyRectifiedLinear> deserialize(const SimpleJSON::Type::Object &obj) {
|
|
return std::unique_ptr<LeakyRectifiedLinear>(new LeakyRectifiedLinear(obj["lambda"].as<double>()));
|
|
}
|
|
|
|
protected:
|
|
float lambda;
|
|
NEURAL_NETWORK_REGISTER_ACTIVATION_FUNCTION(NeuralNetwork::ActivationFunction::LeakyRectifiedLinear, LeakyRectifiedLinear::deserialize)
|
|
};
|
|
}
|
|
} |