diff --git a/src/IO b/src/IO deleted file mode 120000 index fe29c48..0000000 --- a/src/IO +++ /dev/null @@ -1 +0,0 @@ -./IO.h \ No newline at end of file diff --git a/src/IO.cpp b/src/IO.cpp deleted file mode 100644 index c19f90e..0000000 --- a/src/IO.cpp +++ /dev/null @@ -1,15 +0,0 @@ -#include "./IO" - -Shin::IO Shin::IO::operator+(const IO &r) -{ - Shin::NeuronNetwork::IO tmp; - for(float a:this->data) - { - tmp.data.push_back(a); - } - for(float a:r.data) - { - tmp.data.push_back(a); - } - return tmp; -} \ No newline at end of file diff --git a/src/IO.h b/src/IO.h deleted file mode 100644 index 12acb00..0000000 --- a/src/IO.h +++ /dev/null @@ -1,28 +0,0 @@ -#ifndef _NN_IO_H_ -#define _NN_IO_H_ - -#include -#include - -namespace Shin -{ -class IO -{ - public: - IO() {}; - IO(const std::vector &d) : data(d) {} - IO(const IO &old) : data(old.data) {} - IO(const std::initializer_list &a):data(a) { } - virtual ~IO() {}; - IO operator+(const IO &r); - inline virtual operator std::vector&() final {return data;} - inline virtual operator std::vector() final {return data;} - virtual float& operator[] (size_t pos) final { return data[pos];} - virtual float operator[] (size_t pos) const final { return data[pos];} - inline virtual size_t size() const final {return data.size();} - protected: - std::vector data = {}; - private: -}; -} -#endif \ No newline at end of file diff --git a/src/NeuralNetwork/LayerNetwork.cpp b/src/NeuralNetwork/LayerNetwork.cpp index 18fbd13..e3b04a9 100644 --- a/src/NeuralNetwork/LayerNetwork.cpp +++ b/src/NeuralNetwork/LayerNetwork.cpp @@ -136,7 +136,7 @@ void LayerNetwork::solvePart(float *newSolution, register size_t begin, size_t e } } -Shin::Solution LayerNetwork::solve(const Shin::Problem& p) +std::vector LayerNetwork::solve(const std::vector& p) { register float* sol=outputs[0]; @@ -175,7 +175,7 @@ Shin::Solution LayerNetwork::solve(const Shin::Problem& p) prevSize=layerSizes[i]; sol=newSolution; } - Shin::Solution ret; + std::vector ret; for(size_t i=1;i solve(const std::vector& input) override; virtual LayerNetworkLayer& operator[](const size_t& l) override; protected: diff --git a/src/NeuralNetwork/Learning/BackPropagation.cpp b/src/NeuralNetwork/Learning/BackPropagation.cpp index c77b7e3..238bb24 100644 --- a/src/NeuralNetwork/Learning/BackPropagation.cpp +++ b/src/NeuralNetwork/Learning/BackPropagation.cpp @@ -10,7 +10,7 @@ NeuralNetwork::Learning::BackPropagation::~BackPropagation() delete[] deltas; } -void NeuralNetwork::Learning::BackPropagation::propagate(const Shin::Solution& expectation) +void NeuralNetwork::Learning::BackPropagation::propagate(const std::vector& expectation) { if(deltas==nullptr) @@ -87,12 +87,12 @@ void NeuralNetwork::Learning::BackPropagation::propagate(const Shin::Solution& e } -float NeuralNetwork::Learning::BackPropagation::teach(const Shin::Problem& p, const Shin::Solution& solution) +float NeuralNetwork::Learning::BackPropagation::teach(const std::vector& p, const std::vector& solution) { - Shin::Solution a=network.solve(p); + std::vector a=network.solve(p); double error=calculateError(solution,a); - Shin::Solution s; + std::vector s; if(noise) { for(size_t i=0;i #include -#include "../../Solution.h" #include "../LayerNetwork.h" #include "Learning.h" @@ -34,8 +33,8 @@ namespace Learning BackPropagation(const NeuralNetwork::Learning::BackPropagation&) =delete; BackPropagation operator=(const NeuralNetwork::Learning::BackPropagation&) =delete; - float teach(const Shin::Problem &p,const Shin::Solution &solution); - virtual void propagate(const Shin::Solution& expectation); + float teach(const std::vector&p,const std::vector&solution); + virtual void propagate(const std::vector& expectation); protected: LayerNetwork &network; diff --git a/src/NeuralNetwork/Learning/Learning.cpp b/src/NeuralNetwork/Learning/Learning.cpp index eaea863..a2041c7 100644 --- a/src/NeuralNetwork/Learning/Learning.cpp +++ b/src/NeuralNetwork/Learning/Learning.cpp @@ -1,6 +1,6 @@ #include "Learning.h" -float NeuralNetwork::Learning::Learning::calculateError(const Shin::Solution& expectation, const Shin::Solution& solution) +float NeuralNetwork::Learning::Learning::calculateError(const std::vector& expectation, const std::vector& solution) { register float a=0; for (size_t i=0;i> &set) +float NeuralNetwork::Learning::Learning::teachSet(const std::vector,std::vector>> &set) { double error=0; for (register size_t i=0;i -#include "../../Solution.h" #include "../FeedForward.h" namespace NeuralNetwork @@ -26,9 +25,9 @@ namespace Learning inline virtual void disableNoise() final {noise=0;} inline virtual void setNoiseSize(const unsigned& milipercents) final { noiseSize=milipercents; } - float calculateError(const Shin::Solution &expectation,const Shin::Solution &solution); - virtual float teach(const Shin::Problem &p,const Shin::Solution &solution)=0; - virtual float teachSet(const std::vector> &set) final; + float calculateError(const std::vector &expectation,const std::vector &solution); + virtual float teach(const std::vector &p,const std::vector &solution)=0; + virtual float teachSet(const std::vector,std::vector>> &set) final; protected: float learningCoeficient=LearningCoeficient; diff --git a/src/NeuralNetwork/Makefile b/src/NeuralNetwork/Makefile index 3ba2569..629ea7f 100644 --- a/src/NeuralNetwork/Makefile +++ b/src/NeuralNetwork/Makefile @@ -18,7 +18,7 @@ lib: $(LIBNAME).so $(LIBNAME).a $(LIBNAME).so: $(OBJFILES) $(CXX) -shared $(CXXFLAGS) $(OBJFILES) $(LINKFILES) -o $(LIBNAME).so -$(LIBNAME).a: $(OBJFILES) ./Neuron.h ./Network.h ../Solution.h ../Problem.h ./ActivationFunction/ActivationFunction.h ./ActivationFunction/Sigmoid.h +$(LIBNAME).a: $(OBJFILES) ./Neuron.h ./Network.h ./ActivationFunction/ActivationFunction.h ./ActivationFunction/Sigmoid.h rm -f $(LIBNAME).a # create new library ar rcv $(LIBNAME).a $(OBJFILES) $(LINKFILES) ranlib $(LIBNAME).a diff --git a/src/NeuralNetwork/Network.h b/src/NeuralNetwork/Network.h index bdd4f9e..0477314 100644 --- a/src/NeuralNetwork/Network.h +++ b/src/NeuralNetwork/Network.h @@ -1,11 +1,9 @@ #ifndef _S_NN_NN_H_ #define _S_NN_NN_H_ -#include +#include #include -#include "../Problem.h" -#include "../Solution.h" #include "Neuron.h" namespace NeuralNetwork @@ -61,7 +59,7 @@ namespace NeuralNetwork * @param p is a Problem to be solved * @returns Solution of Network for Problem */ - virtual Shin::Solution solve(const Shin::Problem&p)=0; + virtual std::vector solve(const std::vector& input)=0; /** * @brief Getter of layer diff --git a/src/NeuralNetwork/Neuron.h b/src/NeuralNetwork/Neuron.h index 27533c0..bccbf72 100644 --- a/src/NeuralNetwork/Neuron.h +++ b/src/NeuralNetwork/Neuron.h @@ -1,7 +1,7 @@ #ifndef _S_NN_NEURON_H_ #define _S_NN_NEURON_H_ -#include +#include namespace NeuralNetwork { diff --git a/src/Problem b/src/Problem deleted file mode 120000 index ebe36a1..0000000 --- a/src/Problem +++ /dev/null @@ -1 +0,0 @@ -./Problem.h \ No newline at end of file diff --git a/src/Problem.h b/src/Problem.h deleted file mode 100644 index aeb4abd..0000000 --- a/src/Problem.h +++ /dev/null @@ -1,21 +0,0 @@ -#ifndef _P_H_ -#define _P_H_ - -#include -#include -#include "IO.h" - -namespace Shin -{ - class Problem : public IO - { - public: - Problem(): IO() {}; - Problem(const std::vector &p):IO(p) {}; - Problem(const std::initializer_list &a) : IO(a) {}; - protected: - private: - }; -} -#endif - diff --git a/src/Solution b/src/Solution deleted file mode 120000 index 8cb0789..0000000 --- a/src/Solution +++ /dev/null @@ -1 +0,0 @@ -./Solution.h \ No newline at end of file diff --git a/src/Solution.h b/src/Solution.h deleted file mode 100644 index 63c16b0..0000000 --- a/src/Solution.h +++ /dev/null @@ -1,22 +0,0 @@ -#ifndef _SOL_H_ -#define _SOL_H_ - -#include "Problem" -#include "IO.h" - -namespace Shin -{ - class Solution : public IO - { - public: - Solution(): IO() {} - Solution(const Problem& p) :IO(p) {} - Solution(std::vector &solution):IO(solution) {} - Solution(std::vector solution): IO(solution) {} - Solution(const std::initializer_list &a) : IO(a) {}; - inline void push_back(const float &a) {data.push_back(a);}; - }; -} - -#endif -