tests cleaning

This commit is contained in:
2016-05-08 12:08:35 +02:00
parent 44793b78b5
commit 4b42a8c310
8 changed files with 119 additions and 7 deletions

View File

@@ -4,8 +4,13 @@
#include <NeuralNetwork/ActivationFunction/HyperbolicTangent.h>
#include <NeuralNetwork/ActivationFunction/Linear.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
#include <gtest/gtest.h>
#pragma GCC diagnostic pop
union SSE {
__m128 sse; // SSE 4 x float vector
float floats[4]; // scalar array of 4 floats

View File

@@ -1,8 +1,13 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <NeuralNetwork/Learning/BackPropagation.h>
#include <NeuralNetwork/ActivationFunction/HyperbolicTangent.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
#include <gtest/gtest.h>
#pragma GCC diagnostic pop
TEST(BackProp,XOR) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
@@ -41,10 +46,47 @@ TEST(BackProp,XOR) {
}
}
TEST(BackProp,XORHyperbolicTangent) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::HyperbolicTangent a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::BackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,0},{1});
prop.teach({1,1},{0});
prop.teach({0,0},{0});
prop.teach({0,1},{1});
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_LT(ret[0], 0.1);
}
}
TEST(BackProp,AND) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();

View File

@@ -1,7 +1,12 @@
#include <NeuralNetwork/FeedForward/Network.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
#include <gtest/gtest.h>
#pragma GCC diagnostic pop
TEST(FeedForward, XOR) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);

View File

@@ -2,8 +2,12 @@
#include <NeuralNetwork/Learning/OpticalBackPropagation.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
#include <gtest/gtest.h>
#pragma GCC diagnostic pop
TEST(OpticalBackPropagation,XOR) {
NeuralNetwork::FeedForward::Network n(2);

View File

@@ -1,7 +1,12 @@
#include <NeuralNetwork/FeedForward/Perceptron.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
#include <gtest/gtest.h>
#pragma GCC diagnostic pop
TEST(Perceptron,Test) {
NeuralNetwork::FeedForward::Perceptron p(2,1);

View File

@@ -1,7 +1,11 @@
#include <NeuralNetwork/Learning/PerceptronLearning.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
#include <gtest/gtest.h>
#pragma GCC diagnostic pop
TEST(PerceptronLearning,XOR) {
NeuralNetwork::FeedForward::Perceptron n(2,1);

View File

@@ -1,19 +1,25 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <NeuralNetwork/Learning/QuickPropagation.h>
#include <NeuralNetwork/ActivationFunction/HyperbolicTangent.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
#include <gtest/gtest.h>
#pragma GCC diagnostic pop
TEST(QuickPropagation,XOR) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
n.appendLayer(10,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::QuickPropagation prop(n);
for(int i=0;i<10000;i++) {
for(int i=0;i<400;i++) {
prop.teach({1,0},{1});
prop.teach({1,1},{0});
prop.teach({0,0},{0});
@@ -43,7 +49,7 @@ TEST(QuickPropagation,XOR) {
TEST(QuickPropagation,AND) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
NeuralNetwork::ActivationFunction::Sigmoid a(1.0);
n.appendLayer(2,a);
n.appendLayer(1,a);
@@ -51,11 +57,11 @@ TEST(QuickPropagation,AND) {
NeuralNetwork::Learning::QuickPropagation prop(n);
for(int i=0;i<10000;i++) {
for(int i=0;i<400;i++) {
prop.teach({1,1},{1});
prop.teach({1,0},{0});
prop.teach({0,0},{0});
prop.teach({0,1},{0});
prop.teach({1,0},{0});
}
{
@@ -115,4 +121,41 @@ TEST(QuickPropagation,NOTAND) {
std::vector<float> ret =n.computeOutput({0,0});
ASSERT_GT(ret[0], 0.9);
}
}
TEST(QuickPropagation,NOTANDHyperbolicTangent) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::HyperbolicTangent a(1);
n.appendLayer(2,a);
n.appendLayer(1,a);
n.randomizeWeights();
NeuralNetwork::Learning::QuickPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{-1});
prop.teach({-1,0},{1});
prop.teach({-1,1},{1});
prop.teach({1,-1},{1});
}
{
std::vector<float> ret =n.computeOutput({1,1});
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({-1,1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,-1});
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({-1,-1});
ASSERT_GT(ret[0], 0.9);
}
}

View File

@@ -1,7 +1,12 @@
#include <NeuralNetwork/Recurrent/Network.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
#include <gtest/gtest.h>
#pragma GCC diagnostic pop
TEST(Recurrent, Sample) {
NeuralNetwork::Recurrent::Network a(2,1,1);
@@ -18,7 +23,6 @@ TEST(Recurrent, Sample) {
}
std::string str = a.stringify();
std::cout << str << std::endl;;
//deserialize and check it!
NeuralNetwork::Recurrent::Network *deserialized = (NeuralNetwork::Recurrent::Network::Factory::deserialize(str).release());