new doc and optical backprop

This commit is contained in:
2016-02-07 23:38:19 +01:00
parent 0cdedd38f7
commit e5dddc926a
10 changed files with 220 additions and 6 deletions

View File

@@ -10,11 +10,20 @@ target_link_libraries(basis NeuralNetwork)
add_executable(backpropagation backpropagation.cpp)
target_link_libraries(backpropagation NeuralNetwork)
add_executable(backpropagation_perf backpropagation_perf.cpp)
target_link_libraries(backpropagation_perf NeuralNetwork)
add_executable(feedforward feedforward.cpp)
target_link_libraries(feedforward NeuralNetwork)
add_executable(feedforward_perf feedforward_perf.cpp)
target_link_libraries(feedforward_perf NeuralNetwork)
add_executable(optical_backpropagation optical_backpropagation.cpp)
target_link_libraries(optical_backpropagation NeuralNetwork)
add_executable(recurrent recurrent.cpp)
target_link_libraries(recurrent NeuralNetwork)
target_link_libraries(recurrent NeuralNetwork)
add_executable(recurrent_perf recurrent_perf.cpp)
target_link_libraries(recurrent_perf NeuralNetwork)

View File

@@ -0,0 +1,26 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <cassert>
#include <iostream>
#include "../include/NeuralNetwork/Learning/BackPropagation.h"
int main() {
{ // XOR problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(200,a);
n.appendLayer(500,a);
n.appendLayer(900,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::BackPropagation prop;
for(int i=0;i<100;i++) {
prop.teach(n,{1,0},{1});
prop.teach(n,{1,1},{0});
prop.teach(n,{0,0},{0});
prop.teach(n,{0,1},{1});
}
}
}

View File

@@ -7,8 +7,8 @@ int main() {
{ // XOR problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2000,a);
n.appendLayer(2000,a);
n.appendLayer(5000,a);
n.appendLayer(5000,a);
n.appendLayer(1,a);
for(int i=0;i<500;i++) {

View File

@@ -0,0 +1,116 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <cassert>
#include <iostream>
#include "../include/NeuralNetwork/Learning/OpticalBackPropagation.h"
int main() {
{ // XOR problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::OpticalBackPropagation prop;
for(int i=0;i<10000;i++) {
prop.teach(n,{1,0},{1});
prop.teach(n,{1,1},{0});
prop.teach(n,{0,0},{0});
prop.teach(n,{0,1},{1});
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
}
{ // AND problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::OpticalBackPropagation prop;
for(int i=0;i<10000;i++) {
prop.teach(n,{1,1},{1});
prop.teach(n,{0,0},{0});
prop.teach(n,{0,1},{0});
prop.teach(n,{1,0},{0});
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
}
{ // NOT AND problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::OpticalBackPropagation prop;
for(int i=0;i<10000;i++) {
prop.teach(n,{1,1},{0});
prop.teach(n,{0,0},{1});
prop.teach(n,{0,1},{1});
prop.teach(n,{1,0},{1});
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] > 0.9);
}
}
}

12
tests/recurrent_perf.cpp Normal file
View File

@@ -0,0 +1,12 @@
#include <NeuralNetwork/Recurrent/Network.h>
#include <assert.h>
#include <iostream>
int main() {
NeuralNetwork::Recurrent::Network a(2,1,1000);
for(size_t i=0;i<10000;i++) {
a.computeOutput({1,0.7});
}
}