tests cleaning
This commit is contained in:
@@ -4,8 +4,13 @@
|
|||||||
#include <NeuralNetwork/ActivationFunction/HyperbolicTangent.h>
|
#include <NeuralNetwork/ActivationFunction/HyperbolicTangent.h>
|
||||||
#include <NeuralNetwork/ActivationFunction/Linear.h>
|
#include <NeuralNetwork/ActivationFunction/Linear.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic push
|
||||||
|
#pragma GCC diagnostic ignored "-Weffc++"
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic pop
|
||||||
|
|
||||||
union SSE {
|
union SSE {
|
||||||
__m128 sse; // SSE 4 x float vector
|
__m128 sse; // SSE 4 x float vector
|
||||||
float floats[4]; // scalar array of 4 floats
|
float floats[4]; // scalar array of 4 floats
|
||||||
|
|||||||
@@ -1,8 +1,13 @@
|
|||||||
#include <NeuralNetwork/FeedForward/Network.h>
|
#include <NeuralNetwork/FeedForward/Network.h>
|
||||||
#include <NeuralNetwork/Learning/BackPropagation.h>
|
#include <NeuralNetwork/Learning/BackPropagation.h>
|
||||||
|
#include <NeuralNetwork/ActivationFunction/HyperbolicTangent.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic push
|
||||||
|
#pragma GCC diagnostic ignored "-Weffc++"
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic pop
|
||||||
TEST(BackProp,XOR) {
|
TEST(BackProp,XOR) {
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
@@ -41,10 +46,47 @@ TEST(BackProp,XOR) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST(BackProp,XORHyperbolicTangent) {
|
||||||
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
|
NeuralNetwork::ActivationFunction::HyperbolicTangent a(-1);
|
||||||
|
n.appendLayer(2,a);
|
||||||
|
n.appendLayer(1,a);
|
||||||
|
|
||||||
|
n.randomizeWeights();
|
||||||
|
|
||||||
|
NeuralNetwork::Learning::BackPropagation prop(n);
|
||||||
|
|
||||||
|
for(int i=0;i<10000;i++) {
|
||||||
|
prop.teach({1,0},{1});
|
||||||
|
prop.teach({1,1},{0});
|
||||||
|
prop.teach({0,0},{0});
|
||||||
|
prop.teach({0,1},{1});
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,1});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,1});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,0});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,0});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
TEST(BackProp,AND) {
|
TEST(BackProp,AND) {
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
n.appendLayer(2,a);
|
|
||||||
n.appendLayer(1,a);
|
n.appendLayer(1,a);
|
||||||
|
|
||||||
n.randomizeWeights();
|
n.randomizeWeights();
|
||||||
|
|||||||
@@ -1,7 +1,12 @@
|
|||||||
#include <NeuralNetwork/FeedForward/Network.h>
|
#include <NeuralNetwork/FeedForward/Network.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic push
|
||||||
|
#pragma GCC diagnostic ignored "-Weffc++"
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic pop
|
||||||
|
|
||||||
TEST(FeedForward, XOR) {
|
TEST(FeedForward, XOR) {
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
|
|||||||
@@ -2,8 +2,12 @@
|
|||||||
|
|
||||||
#include <NeuralNetwork/Learning/OpticalBackPropagation.h>
|
#include <NeuralNetwork/Learning/OpticalBackPropagation.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic push
|
||||||
|
#pragma GCC diagnostic ignored "-Weffc++"
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic pop
|
||||||
|
|
||||||
TEST(OpticalBackPropagation,XOR) {
|
TEST(OpticalBackPropagation,XOR) {
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
|
|||||||
@@ -1,7 +1,12 @@
|
|||||||
#include <NeuralNetwork/FeedForward/Perceptron.h>
|
#include <NeuralNetwork/FeedForward/Perceptron.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic push
|
||||||
|
#pragma GCC diagnostic ignored "-Weffc++"
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic pop
|
||||||
|
|
||||||
TEST(Perceptron,Test) {
|
TEST(Perceptron,Test) {
|
||||||
NeuralNetwork::FeedForward::Perceptron p(2,1);
|
NeuralNetwork::FeedForward::Perceptron p(2,1);
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,11 @@
|
|||||||
#include <NeuralNetwork/Learning/PerceptronLearning.h>
|
#include <NeuralNetwork/Learning/PerceptronLearning.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic push
|
||||||
|
#pragma GCC diagnostic ignored "-Weffc++"
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic pop
|
||||||
|
|
||||||
TEST(PerceptronLearning,XOR) {
|
TEST(PerceptronLearning,XOR) {
|
||||||
NeuralNetwork::FeedForward::Perceptron n(2,1);
|
NeuralNetwork::FeedForward::Perceptron n(2,1);
|
||||||
|
|||||||
@@ -1,19 +1,25 @@
|
|||||||
#include <NeuralNetwork/FeedForward/Network.h>
|
#include <NeuralNetwork/FeedForward/Network.h>
|
||||||
#include <NeuralNetwork/Learning/QuickPropagation.h>
|
#include <NeuralNetwork/Learning/QuickPropagation.h>
|
||||||
|
#include <NeuralNetwork/ActivationFunction/HyperbolicTangent.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic push
|
||||||
|
#pragma GCC diagnostic ignored "-Weffc++"
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic pop
|
||||||
|
|
||||||
TEST(QuickPropagation,XOR) {
|
TEST(QuickPropagation,XOR) {
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
n.appendLayer(2,a);
|
n.appendLayer(10,a);
|
||||||
n.appendLayer(1,a);
|
n.appendLayer(1,a);
|
||||||
|
|
||||||
n.randomizeWeights();
|
n.randomizeWeights();
|
||||||
|
|
||||||
NeuralNetwork::Learning::QuickPropagation prop(n);
|
NeuralNetwork::Learning::QuickPropagation prop(n);
|
||||||
|
|
||||||
for(int i=0;i<10000;i++) {
|
for(int i=0;i<400;i++) {
|
||||||
prop.teach({1,0},{1});
|
prop.teach({1,0},{1});
|
||||||
prop.teach({1,1},{0});
|
prop.teach({1,1},{0});
|
||||||
prop.teach({0,0},{0});
|
prop.teach({0,0},{0});
|
||||||
@@ -43,7 +49,7 @@ TEST(QuickPropagation,XOR) {
|
|||||||
|
|
||||||
TEST(QuickPropagation,AND) {
|
TEST(QuickPropagation,AND) {
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
NeuralNetwork::ActivationFunction::Sigmoid a(1.0);
|
||||||
n.appendLayer(2,a);
|
n.appendLayer(2,a);
|
||||||
n.appendLayer(1,a);
|
n.appendLayer(1,a);
|
||||||
|
|
||||||
@@ -51,11 +57,11 @@ TEST(QuickPropagation,AND) {
|
|||||||
|
|
||||||
NeuralNetwork::Learning::QuickPropagation prop(n);
|
NeuralNetwork::Learning::QuickPropagation prop(n);
|
||||||
|
|
||||||
for(int i=0;i<10000;i++) {
|
for(int i=0;i<400;i++) {
|
||||||
prop.teach({1,1},{1});
|
prop.teach({1,1},{1});
|
||||||
|
prop.teach({1,0},{0});
|
||||||
prop.teach({0,0},{0});
|
prop.teach({0,0},{0});
|
||||||
prop.teach({0,1},{0});
|
prop.teach({0,1},{0});
|
||||||
prop.teach({1,0},{0});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
@@ -115,4 +121,41 @@ TEST(QuickPropagation,NOTAND) {
|
|||||||
std::vector<float> ret =n.computeOutput({0,0});
|
std::vector<float> ret =n.computeOutput({0,0});
|
||||||
ASSERT_GT(ret[0], 0.9);
|
ASSERT_GT(ret[0], 0.9);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
TEST(QuickPropagation,NOTANDHyperbolicTangent) {
|
||||||
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
|
NeuralNetwork::ActivationFunction::HyperbolicTangent a(1);
|
||||||
|
n.appendLayer(2,a);
|
||||||
|
n.appendLayer(1,a);
|
||||||
|
|
||||||
|
n.randomizeWeights();
|
||||||
|
|
||||||
|
NeuralNetwork::Learning::QuickPropagation prop(n);
|
||||||
|
|
||||||
|
for(int i=0;i<10000;i++) {
|
||||||
|
prop.teach({1,1},{-1});
|
||||||
|
prop.teach({-1,0},{1});
|
||||||
|
prop.teach({-1,1},{1});
|
||||||
|
prop.teach({1,-1},{1});
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,1});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({-1,1});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,-1});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({-1,-1});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -1,7 +1,12 @@
|
|||||||
#include <NeuralNetwork/Recurrent/Network.h>
|
#include <NeuralNetwork/Recurrent/Network.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic push
|
||||||
|
#pragma GCC diagnostic ignored "-Weffc++"
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
#pragma GCC diagnostic pop
|
||||||
|
|
||||||
TEST(Recurrent, Sample) {
|
TEST(Recurrent, Sample) {
|
||||||
NeuralNetwork::Recurrent::Network a(2,1,1);
|
NeuralNetwork::Recurrent::Network a(2,1,1);
|
||||||
|
|
||||||
@@ -18,7 +23,6 @@ TEST(Recurrent, Sample) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
std::string str = a.stringify();
|
std::string str = a.stringify();
|
||||||
std::cout << str << std::endl;;
|
|
||||||
|
|
||||||
//deserialize and check it!
|
//deserialize and check it!
|
||||||
NeuralNetwork::Recurrent::Network *deserialized = (NeuralNetwork::Recurrent::Network::Factory::deserialize(str).release());
|
NeuralNetwork::Recurrent::Network *deserialized = (NeuralNetwork::Recurrent::Network::Factory::deserialize(str).release());
|
||||||
|
|||||||
Reference in New Issue
Block a user