new doc and optical backprop

This commit is contained in:
2016-02-07 23:38:19 +01:00
parent 0cdedd38f7
commit e5dddc926a
10 changed files with 220 additions and 6 deletions

View File

@@ -79,4 +79,16 @@ set_property(TEST recurrent PROPERTY LABELS unit)
add_test(feedforward_perf tests/feedforward_perf) add_test(feedforward_perf tests/feedforward_perf)
set_property(TEST feedforward_perf PROPERTY LABELS perf) set_property(TEST feedforward_perf PROPERTY LABELS perf)
add_test(optical_backpropagation tests/optical_backpropagation)
set_property(TEST optical_backpropagation PROPERTY LABELS unit)
add_test(backpropagation_perf tests/backpropagation_perf)
set_property(TEST backpropagation_perf PROPERTY LABELS perf)
add_test(recurrent_perf tests/recurrent_perf)
set_property(TEST recurrent_perf PROPERTY LABELS perf)
ENDIF(ENABLE_TESTS) ENDIF(ENABLE_TESTS)

View File

@@ -2,6 +2,18 @@ Artifitial Neural Network Library
================================= =================================
how to build: how to build:
mkdir build mkdir build
cmake .. cmake ..
make make
Perfomace
---------
i5-5300U & 8GB ram
| date | feedforward_perf | recurrent_perf | backpropagation_perf |
------------ | ---------------- | -------------- | -------------------- |
| 2016/02/07 | 8.27 sec | 7.15 sec | 6.00 sec |

View File

@@ -24,7 +24,7 @@ namespace Learning {
inline virtual void setLearningCoefficient (const float& coefficient) { learningCoefficient=coefficient; } inline virtual void setLearningCoefficient (const float& coefficient) { learningCoefficient=coefficient; }
protected: protected:
inline virtual float correction(const float & expected, const float &computed) { inline virtual float correction(const float & expected, const float &computed) const {
return expected-computed; return expected-computed;
}; };
float learningCoefficient; float learningCoefficient;

View File

@@ -0,0 +1,28 @@
#pragma once
#include "./BackPropagation.h"
namespace NeuralNetwork {
namespace Learning {
/** @class OpticalBackPropagation
* @brief
*/
class OpticalBackPropagation : public BackPropagation {
public:
OpticalBackPropagation(): BackPropagation() {
}
virtual ~OpticalBackPropagation() {
}
protected:
inline virtual float correction(const float & expected, const float &computed) const override {
register float tmp=(expected-computed);
register float ret=1+exp(tmp*tmp);
return tmp < 0? -ret:ret;
};
};
}
}

View File

@@ -5,5 +5,4 @@
@author Tomas Cernik (Tom.Cernik@gmail.com) @author Tomas Cernik (Tom.Cernik@gmail.com)
*/ */

View File

@@ -10,11 +10,20 @@ target_link_libraries(basis NeuralNetwork)
add_executable(backpropagation backpropagation.cpp) add_executable(backpropagation backpropagation.cpp)
target_link_libraries(backpropagation NeuralNetwork) target_link_libraries(backpropagation NeuralNetwork)
add_executable(backpropagation_perf backpropagation_perf.cpp)
target_link_libraries(backpropagation_perf NeuralNetwork)
add_executable(feedforward feedforward.cpp) add_executable(feedforward feedforward.cpp)
target_link_libraries(feedforward NeuralNetwork) target_link_libraries(feedforward NeuralNetwork)
add_executable(feedforward_perf feedforward_perf.cpp) add_executable(feedforward_perf feedforward_perf.cpp)
target_link_libraries(feedforward_perf NeuralNetwork) target_link_libraries(feedforward_perf NeuralNetwork)
add_executable(optical_backpropagation optical_backpropagation.cpp)
target_link_libraries(optical_backpropagation NeuralNetwork)
add_executable(recurrent recurrent.cpp) add_executable(recurrent recurrent.cpp)
target_link_libraries(recurrent NeuralNetwork) target_link_libraries(recurrent NeuralNetwork)
add_executable(recurrent_perf recurrent_perf.cpp)
target_link_libraries(recurrent_perf NeuralNetwork)

View File

@@ -0,0 +1,26 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <cassert>
#include <iostream>
#include "../include/NeuralNetwork/Learning/BackPropagation.h"
int main() {
{ // XOR problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(200,a);
n.appendLayer(500,a);
n.appendLayer(900,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::BackPropagation prop;
for(int i=0;i<100;i++) {
prop.teach(n,{1,0},{1});
prop.teach(n,{1,1},{0});
prop.teach(n,{0,0},{0});
prop.teach(n,{0,1},{1});
}
}
}

View File

@@ -7,8 +7,8 @@ int main() {
{ // XOR problem { // XOR problem
NeuralNetwork::FeedForward::Network n(2); NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1); NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2000,a); n.appendLayer(5000,a);
n.appendLayer(2000,a); n.appendLayer(5000,a);
n.appendLayer(1,a); n.appendLayer(1,a);
for(int i=0;i<500;i++) { for(int i=0;i<500;i++) {

View File

@@ -0,0 +1,116 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <cassert>
#include <iostream>
#include "../include/NeuralNetwork/Learning/OpticalBackPropagation.h"
int main() {
{ // XOR problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::OpticalBackPropagation prop;
for(int i=0;i<10000;i++) {
prop.teach(n,{1,0},{1});
prop.teach(n,{1,1},{0});
prop.teach(n,{0,0},{0});
prop.teach(n,{0,1},{1});
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
}
{ // AND problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::OpticalBackPropagation prop;
for(int i=0;i<10000;i++) {
prop.teach(n,{1,1},{1});
prop.teach(n,{0,0},{0});
prop.teach(n,{0,1},{0});
prop.teach(n,{1,0},{0});
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
}
{ // NOT AND problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::OpticalBackPropagation prop;
for(int i=0;i<10000;i++) {
prop.teach(n,{1,1},{0});
prop.teach(n,{0,0},{1});
prop.teach(n,{0,1},{1});
prop.teach(n,{1,0},{1});
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] > 0.9);
}
}
}

12
tests/recurrent_perf.cpp Normal file
View File

@@ -0,0 +1,12 @@
#include <NeuralNetwork/Recurrent/Network.h>
#include <assert.h>
#include <iostream>
int main() {
NeuralNetwork::Recurrent::Network a(2,1,1000);
for(size_t i=0;i<10000;i++) {
a.computeOutput({1,0.7});
}
}