addapting more tests to gtest

This commit is contained in:
2016-03-31 16:48:02 +02:00
parent 5c1d1efcc0
commit e6a5882e58
4 changed files with 169 additions and 174 deletions

View File

@@ -89,21 +89,20 @@ IF(ENABLE_TESTS)
add_test(activation tests/backpropagation)
set_property(TEST activation PROPERTY LABELS unit)
#[[
add_test(backpropagation tests/backpropagation)
set_property(TEST backpropagation PROPERTY LABELS unit)
add_test(backpropagation_function_cmp tests/backpropagation_function_cmp)
set_property(TEST backpropagation_function_cmp PROPERTY LABELS unit)
add_test(basis tests/basis)
set_property(TEST basis PROPERTY LABELS unit)
add_test(feedforward tests/feedforward)
set_property(TEST feedforward PROPERTY LABELS unit)
#[[
add_test(backpropagation_function_cmp tests/backpropagation_function_cmp)
set_property(TEST backpropagation_function_cmp PROPERTY LABELS unit)
add_test(recurrent tests/recurrent)
set_property(TEST recurrent PROPERTY LABELS unit)
@@ -131,5 +130,5 @@ IF(ENABLE_TESTS)
add_test(recurrent_perf tests/recurrent_perf)
set_property(TEST recurrent_perf PROPERTY LABELS perf)
]]
]]
ENDIF(ENABLE_TESTS)

View File

@@ -5,17 +5,18 @@ project(NeuralNetworkTests CXX)
set(CMAKE_CXX_FLAGS " --std=c++14")
add_executable(activation activation.cpp)
target_link_libraries(activation NeuralNetwork)
target_link_libraries(activation gtest gtest_main)
target_link_libraries(activation NeuralNetwork gtest gtest_main)
add_executable(basis basis.cpp)
target_link_libraries(basis NeuralNetwork)
target_link_libraries(basis gtest gtest_main)
#[[
target_link_libraries(basis NeuralNetwork gtest gtest_main)
add_executable(backpropagation backpropagation.cpp)
target_link_libraries(backpropagation NeuralNetwork)
target_link_libraries(backpropagation NeuralNetwork gtest gtest_main)
add_executable(feedforward feedforward.cpp)
target_link_libraries(feedforward NeuralNetwork gtest gtest_main)
#[[
add_executable(backpropagation_function_cmp backpropagation_function_cmp.cpp)
target_link_libraries(backpropagation_function_cmp NeuralNetwork)
@@ -23,9 +24,6 @@ target_link_libraries(backpropagation_function_cmp NeuralNetwork)
add_executable(backpropagation_perf backpropagation_perf.cpp)
target_link_libraries(backpropagation_perf NeuralNetwork)
add_executable(feedforward feedforward.cpp)
target_link_libraries(feedforward NeuralNetwork)
add_executable(feedforward_perf feedforward_perf.cpp)
target_link_libraries(feedforward_perf NeuralNetwork)

View File

@@ -1,11 +1,9 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <NeuralNetwork/Learning/BackPropagation.h>
#include <cassert>
#include <iostream>
#include "../include/NeuralNetwork/Learning/BackPropagation.h"
#include "gtest/gtest.h"
int main() {
{ // XOR problem
TEST(BackProp,XOR) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
@@ -14,6 +12,7 @@ int main() {
n.randomizeWeights();
NeuralNetwork::Learning::BackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,0},{1});
prop.teach({1,1},{0});
@@ -23,25 +22,26 @@ int main() {
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
ASSERT_LT(ret[0], 0.1);
}
}
{ // AND problem
}
TEST(BackProp,AND) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
@@ -50,6 +50,7 @@ int main() {
n.randomizeWeights();
NeuralNetwork::Learning::BackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{1});
prop.teach({0,0},{0});
@@ -59,25 +60,26 @@ int main() {
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] > 0.9);
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] < 0.1);
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] < 0.1);
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.1);
ASSERT_LT(ret[0], 0.1);
}
}
{ // NOT AND problem
}
TEST(BackProp,NOTAND) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
n.appendLayer(2,a);
@@ -86,6 +88,7 @@ int main() {
n.randomizeWeights();
NeuralNetwork::Learning::BackPropagation prop(n);
for(int i=0;i<10000;i++) {
prop.teach({1,1},{0});
prop.teach({0,0},{1});
@@ -95,22 +98,21 @@ int main() {
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.1);
ASSERT_LT(ret[0], 0.1);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.9);
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.9);
ASSERT_GT(ret[0], 0.9);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] > 0.9);
}
ASSERT_GT(ret[0], 0.9);
}
}

View File

@@ -1,10 +1,8 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <cassert>
#include <iostream>
#include "gtest/gtest.h"
int main() {
{ // XOR problem
TEST(FeedForward, XOR) {
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
NeuralNetwork::FeedForward::Layer &hidden=n.appendLayer(2,a);
@@ -25,23 +23,21 @@ int main() {
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.5);
ASSERT_LT(ret[0], 0.5);
}
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.5);
ASSERT_GT(ret[0], 0.5);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.5);
ASSERT_GT(ret[0], 0.5);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.5);
}
ASSERT_LT(ret[0], 0.5);
}
}