new doc and optical backprop
This commit is contained in:
@@ -10,11 +10,20 @@ target_link_libraries(basis NeuralNetwork)
|
||||
add_executable(backpropagation backpropagation.cpp)
|
||||
target_link_libraries(backpropagation NeuralNetwork)
|
||||
|
||||
add_executable(backpropagation_perf backpropagation_perf.cpp)
|
||||
target_link_libraries(backpropagation_perf NeuralNetwork)
|
||||
|
||||
add_executable(feedforward feedforward.cpp)
|
||||
target_link_libraries(feedforward NeuralNetwork)
|
||||
|
||||
add_executable(feedforward_perf feedforward_perf.cpp)
|
||||
target_link_libraries(feedforward_perf NeuralNetwork)
|
||||
|
||||
add_executable(optical_backpropagation optical_backpropagation.cpp)
|
||||
target_link_libraries(optical_backpropagation NeuralNetwork)
|
||||
|
||||
add_executable(recurrent recurrent.cpp)
|
||||
target_link_libraries(recurrent NeuralNetwork)
|
||||
target_link_libraries(recurrent NeuralNetwork)
|
||||
|
||||
add_executable(recurrent_perf recurrent_perf.cpp)
|
||||
target_link_libraries(recurrent_perf NeuralNetwork)
|
||||
26
tests/backpropagation_perf.cpp
Normal file
26
tests/backpropagation_perf.cpp
Normal file
@@ -0,0 +1,26 @@
|
||||
#include <NeuralNetwork/FeedForward/Network.h>
|
||||
|
||||
#include <cassert>
|
||||
#include <iostream>
|
||||
#include "../include/NeuralNetwork/Learning/BackPropagation.h"
|
||||
|
||||
int main() {
|
||||
{ // XOR problem
|
||||
NeuralNetwork::FeedForward::Network n(2);
|
||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||
n.appendLayer(200,a);
|
||||
n.appendLayer(500,a);
|
||||
n.appendLayer(900,a);
|
||||
n.appendLayer(1,a);
|
||||
|
||||
n.randomizeWeights();
|
||||
|
||||
NeuralNetwork::Learning::BackPropagation prop;
|
||||
for(int i=0;i<100;i++) {
|
||||
prop.teach(n,{1,0},{1});
|
||||
prop.teach(n,{1,1},{0});
|
||||
prop.teach(n,{0,0},{0});
|
||||
prop.teach(n,{0,1},{1});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,8 +7,8 @@ int main() {
|
||||
{ // XOR problem
|
||||
NeuralNetwork::FeedForward::Network n(2);
|
||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||
n.appendLayer(2000,a);
|
||||
n.appendLayer(2000,a);
|
||||
n.appendLayer(5000,a);
|
||||
n.appendLayer(5000,a);
|
||||
n.appendLayer(1,a);
|
||||
|
||||
for(int i=0;i<500;i++) {
|
||||
|
||||
116
tests/optical_backpropagation.cpp
Normal file
116
tests/optical_backpropagation.cpp
Normal file
@@ -0,0 +1,116 @@
|
||||
#include <NeuralNetwork/FeedForward/Network.h>
|
||||
|
||||
#include <cassert>
|
||||
#include <iostream>
|
||||
#include "../include/NeuralNetwork/Learning/OpticalBackPropagation.h"
|
||||
|
||||
int main() {
|
||||
{ // XOR problem
|
||||
NeuralNetwork::FeedForward::Network n(2);
|
||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||
n.appendLayer(2,a);
|
||||
n.appendLayer(1,a);
|
||||
|
||||
n.randomizeWeights();
|
||||
|
||||
NeuralNetwork::Learning::OpticalBackPropagation prop;
|
||||
for(int i=0;i<10000;i++) {
|
||||
prop.teach(n,{1,0},{1});
|
||||
prop.teach(n,{1,1},{0});
|
||||
prop.teach(n,{0,0},{0});
|
||||
prop.teach(n,{0,1},{1});
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({1,1});
|
||||
assert(ret[0] < 0.1);
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({0,1});
|
||||
assert(ret[0] > 0.9);
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({1,0});
|
||||
assert(ret[0] > 0.9);
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({0,0});
|
||||
assert(ret[0] < 0.1);
|
||||
}
|
||||
}
|
||||
{ // AND problem
|
||||
NeuralNetwork::FeedForward::Network n(2);
|
||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||
n.appendLayer(2,a);
|
||||
n.appendLayer(1,a);
|
||||
|
||||
n.randomizeWeights();
|
||||
|
||||
NeuralNetwork::Learning::OpticalBackPropagation prop;
|
||||
for(int i=0;i<10000;i++) {
|
||||
prop.teach(n,{1,1},{1});
|
||||
prop.teach(n,{0,0},{0});
|
||||
prop.teach(n,{0,1},{0});
|
||||
prop.teach(n,{1,0},{0});
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({1,1});
|
||||
assert(ret[0] > 0.9);
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({0,1});
|
||||
assert(ret[0] < 0.1);
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({1,0});
|
||||
assert(ret[0] < 0.1);
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({0,0});
|
||||
assert(ret[0] < 0.1);
|
||||
}
|
||||
}
|
||||
{ // NOT AND problem
|
||||
NeuralNetwork::FeedForward::Network n(2);
|
||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||
n.appendLayer(2,a);
|
||||
n.appendLayer(1,a);
|
||||
|
||||
n.randomizeWeights();
|
||||
|
||||
NeuralNetwork::Learning::OpticalBackPropagation prop;
|
||||
for(int i=0;i<10000;i++) {
|
||||
prop.teach(n,{1,1},{0});
|
||||
prop.teach(n,{0,0},{1});
|
||||
prop.teach(n,{0,1},{1});
|
||||
prop.teach(n,{1,0},{1});
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({1,1});
|
||||
assert(ret[0] < 0.1);
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({0,1});
|
||||
assert(ret[0] > 0.9);
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({1,0});
|
||||
assert(ret[0] > 0.9);
|
||||
}
|
||||
|
||||
{
|
||||
std::vector<float> ret =n.computeOutput({0,0});
|
||||
assert(ret[0] > 0.9);
|
||||
}
|
||||
}
|
||||
}
|
||||
12
tests/recurrent_perf.cpp
Normal file
12
tests/recurrent_perf.cpp
Normal file
@@ -0,0 +1,12 @@
|
||||
#include <NeuralNetwork/Recurrent/Network.h>
|
||||
|
||||
#include <assert.h>
|
||||
#include <iostream>
|
||||
|
||||
int main() {
|
||||
NeuralNetwork::Recurrent::Network a(2,1,1000);
|
||||
|
||||
for(size_t i=0;i<10000;i++) {
|
||||
a.computeOutput({1,0.7});
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user