modified las namespace errors

This commit is contained in:
2015-08-28 18:06:39 +02:00
parent 9a76222841
commit b6934fb161
10 changed files with 24 additions and 36 deletions

View File

@@ -192,7 +192,7 @@ Shin::Solution FeedForward::solve(const Shin::Problem& p)
prevSize=layerSizes[i];
sol=newSolution;
}
Solution ret;
Shin::Solution ret;
for(size_t i=1;i<prevSize;i++)
{
ret.push_back(sol[i]);

View File

@@ -102,7 +102,7 @@ namespace NeuralNetwork
* @param lam is parametr for TransferFunction
* @param weightInit is weight initializer function
*/
FeedForward(std::initializer_list<size_t> s, double lam=Shin::NeuralNetwork::lambda,
FeedForward(std::initializer_list<size_t> s, double lam=NeuralNetwork::lambda,
FeedForwardInitializer weightInit=
[](const size_t&, const size_t &, const size_t &)->float{ return 1.0-((float)(rand()%2001))/1000.0;}
);
@@ -120,7 +120,7 @@ namespace NeuralNetwork
/**
* @brief computes output Solution from input Problem
*/
virtual Solution solve(const Problem& p) override;
virtual Shin::Solution solve(const Shin::Problem& p) override;
virtual size_t size() const override { return layers;};
virtual FFLayer& operator[](const size_t& l) override;
protected:

View File

@@ -1,6 +1,6 @@
#include "./BackPropagation"
Shin::NeuralNetwork::Learning::BackPropagation::~BackPropagation()
NeuralNetwork::Learning::BackPropagation::~BackPropagation()
{
if(deltas!=nullptr)
{
@@ -10,7 +10,7 @@ Shin::NeuralNetwork::Learning::BackPropagation::~BackPropagation()
delete[] deltas;
}
void Shin::NeuralNetwork::Learning::BackPropagation::propagate(const Shin::Solution& expectation)
void NeuralNetwork::Learning::BackPropagation::propagate(const Shin::Solution& expectation)
{
if(deltas==nullptr)
@@ -87,12 +87,12 @@ void Shin::NeuralNetwork::Learning::BackPropagation::propagate(const Shin::Solut
}
float Shin::NeuralNetwork::Learning::BackPropagation::teach(const Shin::Problem& p, const Shin::Solution& solution)
float NeuralNetwork::Learning::BackPropagation::teach(const Shin::Problem& p, const Shin::Solution& solution)
{
Shin::Solution a=network.solve(p);
double error=calculateError(solution,a);
Solution s;
Shin::Solution s;
if(noise)
{
for(size_t i=0;i<solution.size();i++)

View File

@@ -21,8 +21,6 @@
*
*/
namespace Shin
{
namespace NeuralNetwork
{
namespace Learning
@@ -33,11 +31,11 @@ namespace Learning
BackPropagation(FeedForward &n): Learning(), network(n) {}
virtual ~BackPropagation();
BackPropagation(const Shin::NeuralNetwork::Learning::BackPropagation&) =delete;
BackPropagation operator=(const Shin::NeuralNetwork::Learning::BackPropagation&) =delete;
BackPropagation(const NeuralNetwork::Learning::BackPropagation&) =delete;
BackPropagation operator=(const NeuralNetwork::Learning::BackPropagation&) =delete;
float teach(const Problem &p,const Solution &solution);
virtual void propagate(const Solution& expectation);
float teach(const Shin::Problem &p,const Shin::Solution &solution);
virtual void propagate(const Shin::Solution& expectation);
protected:
FeedForward &network;
@@ -47,5 +45,4 @@ namespace Learning
};
}
}
}
#endif

View File

@@ -1,6 +1,6 @@
#include "Learning.h"
float Shin::NeuralNetwork::Learning::Learning::calculateError(const Shin::Solution& expectation, const Shin::Solution& solution)
float NeuralNetwork::Learning::Learning::calculateError(const Shin::Solution& expectation, const Shin::Solution& solution)
{
register float a=0;
for (size_t i=0;i<expectation.size();i++)
@@ -10,7 +10,7 @@ float Shin::NeuralNetwork::Learning::Learning::calculateError(const Shin::Soluti
return a;
}
float Shin::NeuralNetwork::Learning::Learning::teachSet(const std::vector<std::pair<Shin::Problem,Shin::Solution>> &set)
float NeuralNetwork::Learning::Learning::teachSet(const std::vector<std::pair<Shin::Problem,Shin::Solution>> &set)
{
double error=0;
for (register size_t i=0;i<set.size();i++)

View File

@@ -6,8 +6,6 @@
#include "../../Solution.h"
#include "../FeedForward.h"
namespace Shin
{
namespace NeuralNetwork
{
namespace Learning
@@ -28,9 +26,9 @@ namespace Learning
inline virtual void disableNoise() final {noise=0;}
inline virtual void setNoiseSize(const unsigned& milipercents) final { noiseSize=milipercents; }
float calculateError(const Solution &expectation,const Solution &solution);
virtual float teach(const Problem &p,const Solution &solution)=0;
virtual float teachSet(const std::vector<std::pair<Problem,Solution>> &set) final;
float calculateError(const Shin::Solution &expectation,const Shin::Solution &solution);
virtual float teach(const Shin::Problem &p,const Shin::Solution &solution)=0;
virtual float teachSet(const std::vector<std::pair<Shin::Problem,Shin::Solution>> &set) final;
protected:
float learningCoeficient=LearningCoeficient;
@@ -40,5 +38,4 @@ namespace Learning
};
}
}
}
#endif

View File

@@ -1,8 +0,0 @@
#include "./OpticalBackPropagation"
float Shin::NeuralNetwork::Learning::OpticalBackPropagation::correction(const float& expected, const float& computed)
{
register float tmp=(expected-computed);
register float ret=1+exp(tmp*tmp);
return tmp < 0? -ret:ret;
}

View File

@@ -8,8 +8,6 @@
* http://proceedings.informingscience.org/InSITE2005/P106Otai.pdf
*/
namespace Shin
{
namespace NeuralNetwork
{
namespace Learning
@@ -19,9 +17,13 @@ namespace Learning
public:
inline OpticalBackPropagation(FeedForward &n): BackPropagation(n) {}
protected:
virtual float correction(const float& expected, const float& computed) override;
virtual float correction(const float& expected, const float& computed) override
{
register float tmp=(expected-computed);
register float ret=1+exp(tmp*tmp);
return tmp < 0? -ret:ret;
};
};
}
}
}
#endif

View File

@@ -1,6 +1,6 @@
OBJFILES=\
FeedForward.o\
Learning/Learning.o Learning/BackPropagation.o Learning/OpticalBackPropagation.o ../sse_mathfun.o
Learning/Learning.o Learning/BackPropagation.o ../sse_mathfun.o
LINKFILES=

View File

@@ -62,7 +62,7 @@ namespace NeuralNetwork
* @param p is a Problem to be solved
* @returns Solution of Network for Problem
*/
virtual Solution solve(const Problem&p)=0;
virtual Shin::Solution solve(const Shin::Problem&p)=0;
/**
* @brief Getter of layer