preparing documentation

This commit is contained in:
2015-01-28 18:37:04 +01:00
parent b59796b583
commit 0f6efb8be6
8 changed files with 476 additions and 6 deletions

View File

@@ -33,7 +33,7 @@ FFNeuron& FFLayer::operator[](const size_t& neuron)
}
FeedForward::FeedForward(std::initializer_list<size_t> s, double lam, std::function<float(const size_t&layer, const size_t &neuron, const size_t &weight)> weightInit): ACyclicNetwork(lam),layers(s.size())
FeedForward::FeedForward(std::initializer_list<size_t> s, double lam, FeedForwardInitializer weightInit): ACyclicNetwork(lam),layers(s.size())
{
transfer = new TransferFunction::TransferFunction*[s.size()];
weights= new float**[s.size()];

View File

@@ -4,9 +4,9 @@
#include "../Problem"
#include "../Solution"
#include "Network"
#include "TransferFunction/Sigmoid.h"
#include "TransferFunction/TransferFunction.h"
#include "TransferFunction/HyperbolicTangent.h"
#include <vector>
#include <initializer_list>
@@ -78,16 +78,38 @@ namespace NeuralNetwork
float lambda;
};
/**
* @brief typedef for FeedForward network initializating function
*/
typedef std::function<float(const size_t&layer, const size_t &neuron, const size_t &weight)> FeedForwardInitializer;
/**
* @author Tomas Cernik (Tom.Cernik@gmail.com)
* @brief Class representing FeedForward network
* @see ACyclicNetwork
*/
class FeedForward:public ACyclicNetwork
{
public:
/**
* @brief Constructor for FeedForward
* @param s is initiaizer for layers (it's sizes)
* @param lam is parametr for TransferFunction
* @param weightInit is weight initializer function
*/
FeedForward(std::initializer_list<size_t> s, double lam=Shin::NeuralNetwork::lambda,
std::function<float(const size_t&layer, const size_t &neuron, const size_t &weight)> weightInit=
FeedForwardInitializer weightInit=
[](const size_t&, const size_t &, const size_t &)->float{ return 1.0-((float)(rand()%2001))/1000.0;}
);
virtual ~FeedForward();
/**
* @brief we don't want to allow network to be copied
*/
FeedForward(const FeedForward &f) = delete; //TODO
/**
* @brief we don't want to allow network to be copied
*/
FeedForward operator=(const FeedForward &f)=delete;
virtual Solution solve(const Problem& p) override;
@@ -103,7 +125,7 @@ namespace NeuralNetwork
float **inputs=nullptr;
TransferFunction::TransferFunction **transfer=nullptr;
size_t *layerSizes=nullptr;
size_t layers;
size_t layers;/**< Number of layers */
};
}