Merge branch 'tests'

This commit is contained in:
2016-04-18 16:15:35 +02:00
14 changed files with 629 additions and 594 deletions

View File

@@ -1,14 +1,38 @@
cmake_minimum_required(VERSION 3.2)
project(NeuralNetworkTests CXX)
set(CMAKE_CXX_FLAGS " --std=c++14")
add_executable(activation activation.cpp)
target_link_libraries(activation NeuralNetwork)
target_link_libraries(activation NeuralNetwork gtest gtest_main)
add_executable(basis basis.cpp)
target_link_libraries(basis NeuralNetwork)
target_link_libraries(basis NeuralNetwork gtest gtest_main)
add_executable(backpropagation backpropagation.cpp)
target_link_libraries(backpropagation NeuralNetwork)
target_link_libraries(backpropagation NeuralNetwork gtest gtest_main)
add_executable(feedforward feedforward.cpp)
target_link_libraries(feedforward NeuralNetwork gtest gtest_main)
add_executable(optical_backpropagation optical_backpropagation.cpp)
target_link_libraries(optical_backpropagation NeuralNetwork gtest gtest_main)
add_executable(perceptron perceptron.cpp)
target_link_libraries(perceptron NeuralNetwork gtest gtest_main)
add_executable(perceptron_learning perceptron_learning.cpp)
target_link_libraries(perceptron_learning NeuralNetwork gtest gtest_main)
add_executable(recurrent recurrent.cpp)
target_link_libraries(recurrent NeuralNetwork gtest gtest_main)
add_executable(quickpropagation quickpropagation.cpp)
target_link_libraries(quickpropagation NeuralNetwork gtest gtest_main)
# PERF
add_executable(backpropagation_function_cmp backpropagation_function_cmp.cpp)
target_link_libraries(backpropagation_function_cmp NeuralNetwork)
@@ -16,32 +40,14 @@ target_link_libraries(backpropagation_function_cmp NeuralNetwork)
add_executable(backpropagation_perf backpropagation_perf.cpp)
target_link_libraries(backpropagation_perf NeuralNetwork)
add_executable(feedforward feedforward.cpp)
target_link_libraries(feedforward NeuralNetwork)
add_executable(feedforward_perf feedforward_perf.cpp)
target_link_libraries(feedforward_perf NeuralNetwork)
add_executable(optical_backpropagation optical_backpropagation.cpp)
target_link_libraries(optical_backpropagation NeuralNetwork)
add_executable(perceptron perceptron.cpp)
target_link_libraries(perceptron NeuralNetwork)
add_executable(perceptron_learning perceptron_learning.cpp)
target_link_libraries(perceptron_learning NeuralNetwork)
add_executable(recurrent recurrent.cpp)
target_link_libraries(recurrent NeuralNetwork)
add_executable(recurrent_perf recurrent_perf.cpp)
target_link_libraries(recurrent_perf NeuralNetwork)
add_executable(quickpropagation quickpropagation.cpp)
target_link_libraries(quickpropagation NeuralNetwork)
add_executable(quickpropagation_perf quickpropagation_perf.cpp)
target_link_libraries(quickpropagation_perf NeuralNetwork)
add_executable(propagation_cmp propagation_cmp.cpp)
target_link_libraries(propagation_cmp NeuralNetwork)
target_link_libraries(propagation_cmp NeuralNetwork)

View File

@@ -1,103 +1,106 @@
#include <NeuralNetwork/Network.h>
#include <NeuralNetwork/ActivationFunction/Heaviside.h>
#include <NeuralNetwork/ActivationFunction/Sigmoid.h>
#include <NeuralNetwork/ActivationFunction/HyperbolicTangent.h>
#include <NeuralNetwork/ActivationFunction/Linear.h>
#include <NeuralNetwork/Network.h>
#include <gtest/gtest.h>
#include <cassert>
union {
__m128 v; // SSE 4 x float vector
float a[4]; // scalar array of 4 floats
} U;
union SSE {
__m128 sse; // SSE 4 x float vector
float floats[4]; // scalar array of 4 floats
};
NEURAL_NETWORK_INIT();
int main() {
{
NeuralNetwork::ActivationFunction::Heaviside h(1.0);
assert(h(0.2) == 0);
assert(h(1.2) == 1);
}
TEST(Heaviside, ParamOne) {
NeuralNetwork::ActivationFunction::Heaviside h(1.0);
ASSERT_EQ(h(0.2), 0);
ASSERT_EQ(h(1.2), 1);
}
{
NeuralNetwork::ActivationFunction::Heaviside h(0.7);
assert(h(0.2) == 0);
assert(h(0.8) == 1);
}
TEST(Heaviside, ParamZeroPointSeven) {
NeuralNetwork::ActivationFunction::Heaviside h(0.7);
ASSERT_EQ(h(0.2), 0);
ASSERT_EQ(h(0.8), 1);
}
{
NeuralNetwork::ActivationFunction::Sigmoid s(0.7);
assert(s(0.1) > 0.482407);
assert(s(0.1) < 0.482607);
TEST(Sigmoid, ParamZeroPointSeven) {
NeuralNetwork::ActivationFunction::Sigmoid s(0.7);
ASSERT_GT(s(0.1), 0.482407);
ASSERT_LT(s(0.1), 0.482607);
assert(s(10) > 0.000901051);
assert(s(10) < 0.000921051);
}
{
NeuralNetwork::ActivationFunction::Sigmoid s(-5);
assert(s(0.1) > 0.622359);
assert(s(0.1) < 0.622559);
assert(s(0.7) > 0.970588);
assert(s(0.7) < 0.970788);
}
{
NeuralNetwork::ActivationFunction::Sigmoid s(-0.7);
U.a[0]=0.1;
U.a[1]=10;
U.v=s(U.v);
ASSERT_GT(s(10), 0.000901051);
ASSERT_LT(s(10), 0.000921051);
}
assert(U.a[0] > 0.517483);
assert(U.a[0] < 0.51750);
TEST(Sigmoid, ParamMinusFive) {
NeuralNetwork::ActivationFunction::Sigmoid s(-5);
ASSERT_GT(s(0.1), 0.622359);
ASSERT_LT(s(0.1), 0.622559);
assert(U.a[1] > 0.998989);
assert(U.a[1] < 0.999189);
}
{
NeuralNetwork::ActivationFunction::Linear s(1.0);
assert(s(0.5) > 0.4999);
assert(s(0.5) < 0.5001);
ASSERT_GT(s(0.7), 0.970588);
ASSERT_LT(s(0.7), 0.970788);
}
assert(s(0.0) == 0.0);
}
{
NeuralNetwork::ActivationFunction::Linear s(0.7);
assert(s(0.0) == 0.0);
TEST(SigmoidSSE, ParamMinusZeroPointSeven) {
NeuralNetwork::ActivationFunction::Sigmoid s(-0.7);
SSE comp;
comp.floats[0] = 0.1;
comp.floats[1] = 10;
comp.sse = s(comp.sse);
assert(s(1.0) > 0.6999);
assert(s(1.0) < 0.7001);
}
ASSERT_GT(comp.floats[0], 0.517483);
ASSERT_LT(comp.floats[0], 0.51750);
{
NeuralNetwork::ActivationFunction::Linear l(2.5);
const std::string tmp = l.serialize().serialize();
NeuralNetwork::ActivationFunction::ActivationFunction* deserialized = NeuralNetwork::ActivationFunction::Factory::deserialize(l.serialize()).release();
assert(tmp == deserialized->serialize().serialize());
delete deserialized;
}
{
NeuralNetwork::ActivationFunction::Heaviside l(2.5);
const std::string tmp = l.serialize().serialize();
NeuralNetwork::ActivationFunction::ActivationFunction* deserialized = NeuralNetwork::ActivationFunction::Factory::deserialize(l.serialize()).release();
assert(tmp == deserialized->serialize().serialize());
delete deserialized;
}
{
NeuralNetwork::ActivationFunction::HyperbolicTangent l(2.5);
const std::string tmp = l.serialize().serialize();
NeuralNetwork::ActivationFunction::ActivationFunction* deserialized = NeuralNetwork::ActivationFunction::Factory::deserialize(l.serialize()).release();
assert(tmp == deserialized->serialize().serialize());
delete deserialized;
}
{
NeuralNetwork::ActivationFunction::Sigmoid l(2.5);
const std::string tmp = l.serialize().serialize();
NeuralNetwork::ActivationFunction::ActivationFunction* deserialized = NeuralNetwork::ActivationFunction::Factory::deserialize(l.serialize()).release();
assert(tmp == deserialized->serialize().serialize());
delete deserialized;
}
ASSERT_GT(comp.floats[1], 0.998989);
ASSERT_LT(comp.floats[1], 0.999189);
}
return 0;
TEST(Linear, ParamOne) {
NeuralNetwork::ActivationFunction::Linear s(1.0);
ASSERT_GT(s(0.5), 0.4999);
ASSERT_LT(s(0.5), 0.5001);
ASSERT_EQ(s(0.0), 0.0);
}
TEST(Linear, ParamZeroPointSeven) {
NeuralNetwork::ActivationFunction::Linear s(0.7);
ASSERT_GT(s(1.0), 0.6999);
ASSERT_LT(s(1.0), 0.7001);
ASSERT_EQ(s(0.0), 0.0);
}
TEST(Linear, Serialize) {
NeuralNetwork::ActivationFunction::Linear l(2.5);
const std::string tmp = l.serialize().serialize();
NeuralNetwork::ActivationFunction::ActivationFunction* deserialized = NeuralNetwork::ActivationFunction::Factory::deserialize(l.serialize()).release();
ASSERT_EQ(tmp, deserialized->serialize().serialize());
delete deserialized;
}
TEST(Heaviside, Serialize) {
NeuralNetwork::ActivationFunction::Heaviside l(2.5);
const std::string tmp = l.serialize().serialize();
NeuralNetwork::ActivationFunction::ActivationFunction* deserialized = NeuralNetwork::ActivationFunction::Factory::deserialize(l.serialize()).release();
ASSERT_EQ(tmp, deserialized->serialize().serialize());
delete deserialized;
}
TEST(HyperbolicTangent, Serialize) {
NeuralNetwork::ActivationFunction::HyperbolicTangent l(2.5);
const std::string tmp = l.serialize().serialize();
NeuralNetwork::ActivationFunction::ActivationFunction* deserialized = NeuralNetwork::ActivationFunction::Factory::deserialize(l.serialize()).release();
ASSERT_EQ(tmp, deserialized->serialize().serialize());
delete deserialized;
}
TEST(Sigmoid, Serialize) {
NeuralNetwork::ActivationFunction::Sigmoid l(2.5);
const std::string tmp = l.serialize().serialize();
NeuralNetwork::ActivationFunction::ActivationFunction* deserialized = NeuralNetwork::ActivationFunction::Factory::deserialize(l.serialize()).release();
ASSERT_EQ(tmp, deserialized->serialize().serialize());
delete deserialized;
}

View File

@@ -1,116 +1,118 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <NeuralNetwork/Learning/BackPropagation.h>
#include <cassert>
#include <iostream>
#include "../include/NeuralNetwork/Learning/BackPropagation.h"
#include <gtest/gtest.h>
int main() {
{ // XOR problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
TEST(BackProp,XOR) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
n.randomizeWeights();
NeuralNetwork::Learning::BackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,0},{1});
prop.teach({1,1},{0});
prop.teach({0,0},{0});
prop.teach({0,1},{1});
}
NeuralNetwork::Learning::BackPropagation prop(n);
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
for(int i=0;i<10000;i++) {
prop.teach({1,0},{1});
prop.teach({1,1},{0});
prop.teach({0,0},{0});
prop.teach({0,1},{1});
}
{ // AND problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::BackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{1});
prop.teach({0,0},{0});
prop.teach({0,1},{0});
prop.teach({1,0},{0});
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_LT(ret[0], 0.1);
}
{ // NOT AND problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_GT(ret[0], 0.9);
}
NeuralNetwork::Learning::BackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{0});
prop.teach({0,0},{1});
prop.teach({0,1},{1});
prop.teach({1,0},{1});
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_LT(ret[0], 0.1);
}
}
TEST(BackProp,AND) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::BackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{1});
prop.teach({0,0},{0});
prop.teach({0,1},{0});
prop.teach({1,0},{0});
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_LT(ret[0], 0.1);
}
}
TEST(BackProp,NOTAND) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::BackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{0});
prop.teach({0,0},{1});
prop.teach({0,1},{1});
prop.teach({1,0},{1});
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_GT(ret[0], 0.9);
}
}

View File

@@ -2,9 +2,9 @@
#include <cassert>
#include <iostream>
#include "../include/NeuralNetwork/Learning/BackPropagation.h"
#include "../include/NeuralNetwork/Learning/CorrectionFunction/Optical.h"
#include "../include/NeuralNetwork/Learning/CorrectionFunction/ArcTangent.h"
#include <NeuralNetwork/Learning/BackPropagation.h>
#include <NeuralNetwork/Learning/CorrectionFunction/Optical.h>
#include <NeuralNetwork/Learning/CorrectionFunction/ArcTangent.h>
#define LEARN(A,AR,B,BR,C,CR,D,DR,FUN,COEF) \
({\

View File

@@ -4,67 +4,67 @@
#include <NeuralNetwork/Network.h>
#include <iostream>
#include <cassert>
#include <chrono>
#include <gtest/gtest.h>
NEURAL_NETWORK_INIT();
int main() {
{
NeuralNetwork::BasisFunction::Linear l;
assert(39.0==l({1,2,3,5},{1,2,3,5}));
}
{
NeuralNetwork::BasisFunction::Linear l;
assert(88.0==l({1,2,3,5,7},{1,2,3,5,7}));
}
{
NeuralNetwork::BasisFunction::Linear l;
std::vector<float> w;
for(int in=0;in<100;in++) {
w.push_back(2);
}
assert(400.0==l(w,w));
}
{
NeuralNetwork::BasisFunction::Linear l;
std::vector<float> w;
for(int in=0;in<55;in++) {
w.push_back(2);
}
assert(220.0==l(w,w));
}
{
NeuralNetwork::BasisFunction::Product l;
std::vector<float> w({0,0.501,1});
std::vector<float> i({0,0.2,0.3});
TEST(Linear,FourElements) {
NeuralNetwork::BasisFunction::Linear l;
ASSERT_EQ(39.0, l({1,2,3,5},{1,2,3,5}));
}
assert(l(w,i) > 0.05999);
assert(l(w,i) < 0.06001);
}
TEST(Linear,FiveElements) {
NeuralNetwork::BasisFunction::Linear l;
ASSERT_EQ(88.0, l({1,2,3,5,7},{1,2,3,5,7}));
}
{
NeuralNetwork::BasisFunction::Linear l;
std::string tmp = l.serialize().serialize();
NeuralNetwork::BasisFunction::BasisFunction *deserialized =NeuralNetwork::BasisFunction::Factory::deserialize(l.serialize()).release();
assert(tmp==deserialized->serialize().serialize());
delete deserialized;
TEST(Linear,HundredElements) {
NeuralNetwork::BasisFunction::Linear l;
std::vector<float> w;
for(int in=0;in<100;in++) {
w.push_back(2);
}
ASSERT_EQ(400.0, l(w,w));
}
{
NeuralNetwork::BasisFunction::Product l;
std::string tmp = l.serialize().serialize();
NeuralNetwork::BasisFunction::BasisFunction *deserialized =NeuralNetwork::BasisFunction::Factory::deserialize(l.serialize()).release();
assert(tmp==deserialized->serialize().serialize());
delete deserialized;
TEST(Linear,FivetyFiveElements) {
NeuralNetwork::BasisFunction::Linear l;
std::vector<float> w;
for(int in = 0; in < 55; in++) {
w.push_back(2);
}
ASSERT_EQ(220.0, l(w, w));
}
{
NeuralNetwork::BasisFunction::Radial l;
std::string tmp = l.serialize().serialize();
NeuralNetwork::BasisFunction::BasisFunction *deserialized =NeuralNetwork::BasisFunction::Factory::deserialize(l.serialize()).release();
assert(tmp==deserialized->serialize().serialize());
delete deserialized;
}
TEST(Product,Product) {
NeuralNetwork::BasisFunction::Product p;
std::vector<float> w({0,0.501,1});
std::vector<float> i({0,0.2,0.3});
ASSERT_GT(p(w,i), 0.05999);
ASSERT_LT(p(w,i), 0.06001);
}
TEST(Linear, Serialize) {
NeuralNetwork::BasisFunction::Linear l;
std::string tmp = l.serialize().serialize();
NeuralNetwork::BasisFunction::BasisFunction *deserialized =NeuralNetwork::BasisFunction::Factory::deserialize(l.serialize()).release();
ASSERT_EQ(tmp, deserialized->serialize().serialize());
delete deserialized;
}
TEST(Product, Serialize) {
NeuralNetwork::BasisFunction::Product l;
std::string tmp = l.serialize().serialize();
NeuralNetwork::BasisFunction::BasisFunction *deserialized =NeuralNetwork::BasisFunction::Factory::deserialize(l.serialize()).release();
ASSERT_EQ(tmp, deserialized->serialize().serialize());
delete deserialized;
}
TEST(Radial, Serialize) {
NeuralNetwork::BasisFunction::Radial l;
std::string tmp = l.serialize().serialize();
NeuralNetwork::BasisFunction::BasisFunction *deserialized =NeuralNetwork::BasisFunction::Factory::deserialize(l.serialize()).release();
ASSERT_EQ(tmp, deserialized->serialize().serialize());
delete deserialized;
}

View File

@@ -1,73 +1,87 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <cassert>
#include <iostream>
#include <gtest/gtest.h>
int main() {
std::string serialized;
{ // XOR problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
NeuralNetwork::FeedForward::Layer &hidden=n.appendLayer(2,a);
NeuralNetwork::FeedForward::Layer &out = n.appendLayer(1,a);
TEST(FeedForward, XOR) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
NeuralNetwork::FeedForward::Layer &hidden=n.appendLayer(2,a);
NeuralNetwork::FeedForward::Layer &out = n.appendLayer(1,a);
hidden[1].weight(n[0][0])=7;
hidden[1].weight(n[0][1])=-4.7;
hidden[1].weight(n[0][2])=-4.7;
hidden[1].weight(n[0][0])=7;
hidden[1].weight(n[0][1])=-4.7;
hidden[1].weight(n[0][2])=-4.7;
hidden[2].weight(n[0][0])=2.6;
hidden[2].weight(n[0][1])=-6.4;
hidden[2].weight(n[0][2])=-6.4;
hidden[2].weight(n[0][0])=2.6;
hidden[2].weight(n[0][1])=-6.4;
hidden[2].weight(n[0][2])=-6.4;
out[1].weight(hidden[0])=-4.5;
out[1].weight(hidden[1])=9.6;
out[1].weight(hidden[2])=-6.8;
out[1].weight(hidden[0])=-4.5;
out[1].weight(hidden[1])=9.6;
out[1].weight(hidden[2])=-6.8;
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.5);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.5);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.5);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.5);
}
serialized = n.serialize().serialize();
}
{
NeuralNetwork::FeedForward::Network *deserialized=NeuralNetwork::FeedForward::Network::Factory::deserialize(serialized).release();
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_LT(ret[0], 0.5);
}
{
std::vector<float> ret =deserialized->computeOutput({1,1});
assert(ret[0] < 0.5);
}
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_GT(ret[0], 0.5);
}
{
std::vector<float> ret =deserialized->computeOutput({0,1});
assert(ret[0] > 0.5);
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_GT(ret[0], 0.5);
}
{
std::vector<float> ret =deserialized->computeOutput({1,0});
assert(ret[0] > 0.5);
}
{
std::vector<float> ret =deserialized->computeOutput({0,0});
assert(ret[0] < 0.5);
}
delete deserialized;
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_LT(ret[0], 0.5);
}
}
TEST(FeedForward, Serialization) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
NeuralNetwork::FeedForward::Layer &hidden=n.appendLayer(2,a);
NeuralNetwork::FeedForward::Layer &out = n.appendLayer(1,a);
hidden[1].weight(n[0][0])=7;
hidden[1].weight(n[0][1])=-4.7;
hidden[1].weight(n[0][2])=-4.7;
hidden[2].weight(n[0][0])=2.6;
hidden[2].weight(n[0][1])=-6.4;
hidden[2].weight(n[0][2])=-6.4;
out[1].weight(hidden[0])=-4.5;
out[1].weight(hidden[1])=9.6;
out[1].weight(hidden[2])=-6.8;
std::string serialized = n.serialize().serialize();
NeuralNetwork::FeedForward::Network *deserialized=NeuralNetwork::FeedForward::Network::Factory::deserialize(serialized).release();
{
std::vector<float> ret =deserialized->computeOutput({1,1});
ASSERT_LT(ret[0], 0.5);
}
{
std::vector<float> ret =deserialized->computeOutput({0,1});
ASSERT_GT(ret[0], 0.5);
}
{
std::vector<float> ret =deserialized->computeOutput({1,0});
ASSERT_GT(ret[0], 0.5);
}
{
std::vector<float> ret =deserialized->computeOutput({0,0});
ASSERT_LT(ret[0], 0.5);
}
delete deserialized;
}

View File

@@ -1,116 +1,120 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <cassert>
#include <iostream>
#include "../include/NeuralNetwork/Learning/OpticalBackPropagation.h"
#include <NeuralNetwork/Learning/OpticalBackPropagation.h>
int main() {
{ // XOR problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
#include <gtest/gtest.h>
n.randomizeWeights();
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,0},{1});
prop.teach({1,1},{0});
prop.teach({0,0},{0});
prop.teach({0,1},{1});
}
TEST(OpticalBackPropagation,XOR) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
}
n.randomizeWeights();
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
for(int i=0;i<10000;i++) {
prop.teach({1,0},{1});
prop.teach({1,1},{0});
prop.teach({0,0},{0});
prop.teach({0,1},{1});
}
{ // AND problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{1});
prop.teach({0,0},{0});
prop.teach({0,1},{0});
prop.teach({1,0},{0});
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_LT(ret[0], 0.1);
}
{ // NOT AND problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_GT(ret[0], 0.9);
}
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{0});
prop.teach({0,0},{1});
prop.teach({0,1},{1});
prop.teach({1,0},{1});
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_LT(ret[0], 0.1);
}
}
TEST(OpticalBackPropagation,AND) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{1});
prop.teach({0,0},{0});
prop.teach({0,1},{0});
prop.teach({1,0},{0});
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_LT(ret[0], 0.1);
}
}
TEST(OpticalBackPropagation,NOTAND) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{0});
prop.teach({0,0},{1});
prop.teach({0,1},{1});
prop.teach({1,0},{1});
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_GT(ret[0], 0.9);
}
}

View File

@@ -1,16 +1,17 @@
#include <NeuralNetwork/FeedForward/Perceptron.h>
#include <assert.h>
#include <iostream>
#include <gtest/gtest.h>
int main() {
TEST(Perceptron,Test) {
NeuralNetwork::FeedForward::Perceptron p(2,1);
p[1].weight(0)=-1.0;
p[1].weight(1)=1.001;
assert(p.computeOutput({1,1})[0] == 1.0);
p[1].weight(1)=0.999;
float ret =p.computeOutput({1,1})[0];
ASSERT_EQ(ret, 1.0);
assert(p.computeOutput({1,1})[0] == 0.0);
p[1].weight(1)=0.999;
ret =p.computeOutput({1,1})[0];
ASSERT_EQ(ret, 0.0);
}

View File

@@ -1,41 +1,39 @@
#include <NeuralNetwork/Learning/PerceptronLearning.h>
#include <cassert>
#include <iostream>
#include <gtest/gtest.h>
int main() {
{ // XOR problem
NeuralNetwork::FeedForward::Perceptron n(2,1);
n.randomizeWeights();
TEST(PerceptronLearning,XOR) {
NeuralNetwork::FeedForward::Perceptron n(2,1);
NeuralNetwork::Learning::PerceptronLearning learn(n);
n.randomizeWeights();
for(int i=0;i<10;i++) {
learn.teach({1,0},{1});
learn.teach({1,1},{1});
learn.teach({0,0},{0});
learn.teach({0,1},{1});
}
NeuralNetwork::Learning::PerceptronLearning learn(n);
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] > 0.9);
}
for(int i=0;i<10;i++) {
learn.teach({1,0},{1});
learn.teach({1,1},{1});
learn.teach({0,0},{0});
learn.teach({0,1},{1});
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_LT(ret[0], 0.1);
}
}

View File

@@ -1,116 +1,118 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <NeuralNetwork/Learning/QuickPropagation.h>
#include <cassert>
#include <iostream>
#include "../include/NeuralNetwork/Learning/QuickPropagation.h"
#include <gtest/gtest.h>
int main() {
{ // XOR problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
TEST(QuickPropagation,XOR) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
n.randomizeWeights();
NeuralNetwork::Learning::QuickPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,0},{1});
prop.teach({1,1},{0});
prop.teach({0,0},{0});
prop.teach({0,1},{1});
}
NeuralNetwork::Learning::QuickPropagation prop(n);
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
for(int i=0;i<10000;i++) {
prop.teach({1,0},{1});
prop.teach({1,1},{0});
prop.teach({0,0},{0});
prop.teach({0,1},{1});
}
{ // AND problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::QuickPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{1});
prop.teach({0,0},{0});
prop.teach({0,1},{0});
prop.teach({1,0},{0});
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_LT(ret[0], 0.1);
}
{ // NOT AND problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_GT(ret[0], 0.9);
}
NeuralNetwork::Learning::QuickPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{0});
prop.teach({0,0},{1});
prop.teach({0,1},{1});
prop.teach({1,0},{1});
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] > 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_LT(ret[0], 0.1);
}
}
TEST(QuickPropagation,AND) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::QuickPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{1});
prop.teach({0,0},{0});
prop.teach({0,1},{0});
prop.teach({1,0},{0});
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_LT(ret[0], 0.1);
}
}
TEST(QuickPropagation,NOTAND) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::QuickPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{0});
prop.teach({0,0},{1});
prop.teach({0,1},{1});
prop.teach({1,0},{1});
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_GT(ret[0], 0.9);
}
}

View File

@@ -1,9 +1,8 @@
#include <NeuralNetwork/Recurrent/Network.h>
#include <assert.h>
#include <iostream>
#include <gtest/gtest.h>
int main() {
TEST(Recurrent, Sample) {
NeuralNetwork::Recurrent::Network a(2,1,1);
a.getNeurons()[4]->weight(1)=0.05;
@@ -15,7 +14,7 @@ int main() {
for(size_t i=0;i<solutions.size();i++) {
float res= a.computeOutput({1,0.7})[0];
assert(res > solutions[i]*0.999 && res < solutions[i]*1.001);
ASSERT_FLOAT_EQ(res, solutions[i]);
}
std::string str = a.stringify();