documentation progress

This commit is contained in:
2015-01-26 19:08:16 +01:00
parent b6af3cec13
commit b59796b583
2 changed files with 43 additions and 9 deletions

View File

@@ -18,11 +18,10 @@ namespace NeuralNetwork
*/
const float lambda=0.8;
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Abstract class for all Layers of neurons
*/
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Abstract class for all Layers of neurons
*/
class Layer
{
public:
@@ -45,7 +44,6 @@ namespace NeuralNetwork
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Abstract model of simple Network
*
*/
class Network
{

View File

@@ -7,19 +7,55 @@ namespace Shin
{
namespace NeuralNetwork
{
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Abstract class of neuron. All Neuron classes should derive from this on
*/
class Neuron
{
public:
Neuron() {};
/**
* @brief virtual destructor for Neuron
*/
virtual ~Neuron() {};
/**
* @brief Returns potential of neuron
*/
virtual float getPotential() const =0;
/**
* @brief Sets potential of neuron
* @param p is new pontential
*/
virtual void setPotential(const float &p) =0;
virtual float getWeight(const size_t&) const =0;
virtual void setWeight(const size_t& i,const float &p) =0;
/**
* @brief Returns weight for w-th input neuron
* @param w is weight of neuron number w
*/
virtual float getWeight(const size_t &w) const =0;
/**
* @brief Sets weight
* @param i is number of neuron
* @param p is new weight for input neuron i
*/
virtual void setWeight(const size_t& i ,const float &p) =0;
/**
* @brief Returns output of neuron
*/
virtual float output() const =0;
/**
* @brief Returns input of neuron
*/
virtual float input() const=0;
/**
* @brief Returns value for derivation of activation function
*/
virtual float derivatedOutput() const=0;
protected:
};