test rewritten
This commit is contained in:
@@ -98,37 +98,34 @@ IF(ENABLE_TESTS)
|
|||||||
add_test(feedforward tests/feedforward)
|
add_test(feedforward tests/feedforward)
|
||||||
set_property(TEST feedforward PROPERTY LABELS unit)
|
set_property(TEST feedforward PROPERTY LABELS unit)
|
||||||
|
|
||||||
#[[
|
add_test(optical_backpropagation tests/optical_backpropagation)
|
||||||
|
set_property(TEST optical_backpropagation PROPERTY LABELS unit)
|
||||||
|
|
||||||
add_test(backpropagation_function_cmp tests/backpropagation_function_cmp)
|
add_test(perceptron tests/perceptron)
|
||||||
set_property(TEST backpropagation_function_cmp PROPERTY LABELS unit)
|
set_property(TEST perceptron PROPERTY LABELS unit)
|
||||||
|
|
||||||
add_test(recurrent tests/recurrent)
|
add_test(perceptron_learning tests/perceptron_learning)
|
||||||
set_property(TEST recurrent PROPERTY LABELS unit)
|
set_property(TEST perceptron_learning PROPERTY LABELS unit)
|
||||||
|
|
||||||
add_test(optical_backpropagation tests/optical_backpropagation)
|
add_test(quickpropagation tests/quickpropagation)
|
||||||
set_property(TEST optical_backpropagation PROPERTY LABELS unit)
|
set_property(TEST quickpropagation PROPERTY LABELS unit)
|
||||||
|
|
||||||
add_test(quickpropagation tests/quickpropagation)
|
add_test(recurrent tests/recurrent)
|
||||||
set_property(TEST quickpropagation PROPERTY LABELS unit)
|
set_property(TEST recurrent PROPERTY LABELS unit)
|
||||||
|
|
||||||
add_test(perceptron tests/perceptron)
|
add_test(backpropagation_function_cmp tests/backpropagation_function_cmp)
|
||||||
set_property(TEST perceptron PROPERTY LABELS unit)
|
set_property(TEST backpropagation_function_cmp PROPERTY LABELS perf)
|
||||||
|
|
||||||
add_test(perceptron_learning tests/perceptron_learning)
|
add_test(feedforward_perf tests/feedforward_perf)
|
||||||
set_property(TEST perceptron_learning PROPERTY LABELS unit)
|
set_property(TEST feedforward_perf PROPERTY LABELS perf)
|
||||||
|
|
||||||
add_test(feedforward_perf tests/feedforward_perf)
|
add_test(quickpropagation_perf tests/quickpropagation_perf)
|
||||||
set_property(TEST feedforward_perf PROPERTY LABELS perf)
|
set_property(TEST quickpropagation_perf PROPERTY LABELS perf)
|
||||||
|
|
||||||
add_test(quickpropagation_perf tests/quickpropagation_perf)
|
add_test(backpropagation_perf tests/backpropagation_perf)
|
||||||
set_property(TEST quickpropagation_perf PROPERTY LABELS perf)
|
set_property(TEST backpropagation_perf PROPERTY LABELS perf)
|
||||||
|
|
||||||
add_test(backpropagation_perf tests/backpropagation_perf)
|
add_test(recurrent_perf tests/recurrent_perf)
|
||||||
set_property(TEST backpropagation_perf PROPERTY LABELS perf)
|
set_property(TEST recurrent_perf PROPERTY LABELS perf)
|
||||||
|
|
||||||
add_test(recurrent_perf tests/recurrent_perf)
|
|
||||||
set_property(TEST recurrent_perf PROPERTY LABELS perf)
|
|
||||||
|
|
||||||
]]
|
|
||||||
ENDIF(ENABLE_TESTS)
|
ENDIF(ENABLE_TESTS)
|
||||||
@@ -16,7 +16,23 @@ target_link_libraries(backpropagation NeuralNetwork gtest gtest_main)
|
|||||||
add_executable(feedforward feedforward.cpp)
|
add_executable(feedforward feedforward.cpp)
|
||||||
target_link_libraries(feedforward NeuralNetwork gtest gtest_main)
|
target_link_libraries(feedforward NeuralNetwork gtest gtest_main)
|
||||||
|
|
||||||
#[[
|
add_executable(optical_backpropagation optical_backpropagation.cpp)
|
||||||
|
target_link_libraries(optical_backpropagation NeuralNetwork gtest gtest_main)
|
||||||
|
|
||||||
|
add_executable(perceptron perceptron.cpp)
|
||||||
|
target_link_libraries(perceptron NeuralNetwork gtest gtest_main)
|
||||||
|
|
||||||
|
add_executable(perceptron_learning perceptron_learning.cpp)
|
||||||
|
target_link_libraries(perceptron_learning NeuralNetwork gtest gtest_main)
|
||||||
|
|
||||||
|
add_executable(recurrent recurrent.cpp)
|
||||||
|
target_link_libraries(recurrent NeuralNetwork gtest gtest_main)
|
||||||
|
|
||||||
|
add_executable(quickpropagation quickpropagation.cpp)
|
||||||
|
target_link_libraries(quickpropagation NeuralNetwork gtest gtest_main)
|
||||||
|
|
||||||
|
|
||||||
|
# PERF
|
||||||
|
|
||||||
add_executable(backpropagation_function_cmp backpropagation_function_cmp.cpp)
|
add_executable(backpropagation_function_cmp backpropagation_function_cmp.cpp)
|
||||||
target_link_libraries(backpropagation_function_cmp NeuralNetwork)
|
target_link_libraries(backpropagation_function_cmp NeuralNetwork)
|
||||||
@@ -27,26 +43,11 @@ target_link_libraries(backpropagation_perf NeuralNetwork)
|
|||||||
add_executable(feedforward_perf feedforward_perf.cpp)
|
add_executable(feedforward_perf feedforward_perf.cpp)
|
||||||
target_link_libraries(feedforward_perf NeuralNetwork)
|
target_link_libraries(feedforward_perf NeuralNetwork)
|
||||||
|
|
||||||
add_executable(optical_backpropagation optical_backpropagation.cpp)
|
|
||||||
target_link_libraries(optical_backpropagation NeuralNetwork)
|
|
||||||
|
|
||||||
add_executable(perceptron perceptron.cpp)
|
|
||||||
target_link_libraries(perceptron NeuralNetwork)
|
|
||||||
|
|
||||||
add_executable(perceptron_learning perceptron_learning.cpp)
|
|
||||||
target_link_libraries(perceptron_learning NeuralNetwork)
|
|
||||||
|
|
||||||
add_executable(recurrent recurrent.cpp)
|
|
||||||
target_link_libraries(recurrent NeuralNetwork)
|
|
||||||
|
|
||||||
add_executable(recurrent_perf recurrent_perf.cpp)
|
add_executable(recurrent_perf recurrent_perf.cpp)
|
||||||
target_link_libraries(recurrent_perf NeuralNetwork)
|
target_link_libraries(recurrent_perf NeuralNetwork)
|
||||||
|
|
||||||
add_executable(quickpropagation quickpropagation.cpp)
|
|
||||||
target_link_libraries(quickpropagation NeuralNetwork)
|
|
||||||
|
|
||||||
add_executable(quickpropagation_perf quickpropagation_perf.cpp)
|
add_executable(quickpropagation_perf quickpropagation_perf.cpp)
|
||||||
target_link_libraries(quickpropagation_perf NeuralNetwork)
|
target_link_libraries(quickpropagation_perf NeuralNetwork)
|
||||||
|
|
||||||
add_executable(propagation_cmp propagation_cmp.cpp)
|
add_executable(propagation_cmp propagation_cmp.cpp)
|
||||||
target_link_libraries(propagation_cmp NeuralNetwork)]]
|
target_link_libraries(propagation_cmp NeuralNetwork)
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
#include <NeuralNetwork/ActivationFunction/HyperbolicTangent.h>
|
#include <NeuralNetwork/ActivationFunction/HyperbolicTangent.h>
|
||||||
#include <NeuralNetwork/ActivationFunction/Linear.h>
|
#include <NeuralNetwork/ActivationFunction/Linear.h>
|
||||||
|
|
||||||
#include "gtest/gtest.h"
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
union SSE {
|
union SSE {
|
||||||
__m128 sse; // SSE 4 x float vector
|
__m128 sse; // SSE 4 x float vector
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#include <NeuralNetwork/FeedForward/Network.h>
|
#include <NeuralNetwork/FeedForward/Network.h>
|
||||||
#include <NeuralNetwork/Learning/BackPropagation.h>
|
#include <NeuralNetwork/Learning/BackPropagation.h>
|
||||||
|
|
||||||
#include "gtest/gtest.h"
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
TEST(BackProp,XOR) {
|
TEST(BackProp,XOR) {
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
|
|||||||
@@ -2,9 +2,9 @@
|
|||||||
|
|
||||||
#include <cassert>
|
#include <cassert>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include "../include/NeuralNetwork/Learning/BackPropagation.h"
|
#include <NeuralNetwork/Learning/BackPropagation.h>
|
||||||
#include "../include/NeuralNetwork/Learning/CorrectionFunction/Optical.h"
|
#include <NeuralNetwork/Learning/CorrectionFunction/Optical.h>
|
||||||
#include "../include/NeuralNetwork/Learning/CorrectionFunction/ArcTangent.h"
|
#include <NeuralNetwork/Learning/CorrectionFunction/ArcTangent.h>
|
||||||
|
|
||||||
#define LEARN(A,AR,B,BR,C,CR,D,DR,FUN,COEF) \
|
#define LEARN(A,AR,B,BR,C,CR,D,DR,FUN,COEF) \
|
||||||
({\
|
({\
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#include <NeuralNetwork/FeedForward/Network.h>
|
#include <NeuralNetwork/FeedForward/Network.h>
|
||||||
|
|
||||||
#include "gtest/gtest.h"
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
TEST(FeedForward, XOR) {
|
TEST(FeedForward, XOR) {
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
|
|||||||
@@ -1,116 +1,120 @@
|
|||||||
#include <NeuralNetwork/FeedForward/Network.h>
|
#include <NeuralNetwork/FeedForward/Network.h>
|
||||||
|
|
||||||
#include <cassert>
|
#include <NeuralNetwork/Learning/OpticalBackPropagation.h>
|
||||||
#include <iostream>
|
|
||||||
#include "../include/NeuralNetwork/Learning/OpticalBackPropagation.h"
|
|
||||||
|
|
||||||
int main() {
|
#include <gtest/gtest.h>
|
||||||
{ // XOR problem
|
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
|
||||||
n.appendLayer(2,a);
|
|
||||||
n.appendLayer(1,a);
|
|
||||||
|
|
||||||
n.randomizeWeights();
|
|
||||||
|
|
||||||
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
|
TEST(OpticalBackPropagation,XOR) {
|
||||||
for(int i=0;i<10000;i++) {
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
prop.teach({1,0},{1});
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
prop.teach({1,1},{0});
|
n.appendLayer(2,a);
|
||||||
prop.teach({0,0},{0});
|
n.appendLayer(1,a);
|
||||||
prop.teach({0,1},{1});
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
n.randomizeWeights();
|
||||||
std::vector<float> ret =n.computeOutput({1,1});
|
|
||||||
assert(ret[0] < 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
|
||||||
std::vector<float> ret =n.computeOutput({0,1});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
for(int i=0;i<10000;i++) {
|
||||||
std::vector<float> ret =n.computeOutput({1,0});
|
prop.teach({1,0},{1});
|
||||||
assert(ret[0] > 0.9);
|
prop.teach({1,1},{0});
|
||||||
}
|
prop.teach({0,0},{0});
|
||||||
|
prop.teach({0,1},{1});
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,0});
|
|
||||||
assert(ret[0] < 0.1);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
{ // AND problem
|
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
|
||||||
n.appendLayer(2,a);
|
|
||||||
n.appendLayer(1,a);
|
|
||||||
|
|
||||||
n.randomizeWeights();
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,1});
|
||||||
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
|
ASSERT_LT(ret[0], 0.1);
|
||||||
for(int i=0;i<10000;i++) {
|
|
||||||
prop.teach({1,1},{1});
|
|
||||||
prop.teach({0,0},{0});
|
|
||||||
prop.teach({0,1},{0});
|
|
||||||
prop.teach({1,0},{0});
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({1,1});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,1});
|
|
||||||
assert(ret[0] < 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({1,0});
|
|
||||||
assert(ret[0] < 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,0});
|
|
||||||
assert(ret[0] < 0.1);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
{ // NOT AND problem
|
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
|
||||||
n.appendLayer(2,a);
|
|
||||||
n.appendLayer(1,a);
|
|
||||||
|
|
||||||
n.randomizeWeights();
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,1});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
|
{
|
||||||
for(int i=0;i<10000;i++) {
|
std::vector<float> ret =n.computeOutput({1,0});
|
||||||
prop.teach({1,1},{0});
|
ASSERT_GT(ret[0], 0.9);
|
||||||
prop.teach({0,0},{1});
|
}
|
||||||
prop.teach({0,1},{1});
|
|
||||||
prop.teach({1,0},{1});
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
{
|
||||||
std::vector<float> ret =n.computeOutput({1,1});
|
std::vector<float> ret =n.computeOutput({0,0});
|
||||||
assert(ret[0] < 0.1);
|
ASSERT_LT(ret[0], 0.1);
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,1});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({1,0});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,0});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST(OpticalBackPropagation,AND) {
|
||||||
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
|
n.appendLayer(2,a);
|
||||||
|
n.appendLayer(1,a);
|
||||||
|
|
||||||
|
n.randomizeWeights();
|
||||||
|
|
||||||
|
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
|
||||||
|
|
||||||
|
for(int i=0;i<10000;i++) {
|
||||||
|
prop.teach({1,1},{1});
|
||||||
|
prop.teach({0,0},{0});
|
||||||
|
prop.teach({0,1},{0});
|
||||||
|
prop.teach({1,0},{0});
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,1});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,1});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,0});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,0});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OpticalBackPropagation,NOTAND) {
|
||||||
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
|
n.appendLayer(2,a);
|
||||||
|
n.appendLayer(1,a);
|
||||||
|
|
||||||
|
n.randomizeWeights();
|
||||||
|
|
||||||
|
NeuralNetwork::Learning::OpticalBackPropagation prop(n);
|
||||||
|
|
||||||
|
for(int i=0;i<10000;i++) {
|
||||||
|
prop.teach({1,1},{0});
|
||||||
|
prop.teach({0,0},{1});
|
||||||
|
prop.teach({0,1},{1});
|
||||||
|
prop.teach({1,0},{1});
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,1});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,1});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,0});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,0});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,16 +1,17 @@
|
|||||||
#include <NeuralNetwork/FeedForward/Perceptron.h>
|
#include <NeuralNetwork/FeedForward/Perceptron.h>
|
||||||
|
|
||||||
#include <assert.h>
|
#include <gtest/gtest.h>
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
int main() {
|
TEST(Perceptron,Test) {
|
||||||
NeuralNetwork::FeedForward::Perceptron p(2,1);
|
NeuralNetwork::FeedForward::Perceptron p(2,1);
|
||||||
|
|
||||||
p[1].weight(0)=-1.0;
|
p[1].weight(0)=-1.0;
|
||||||
p[1].weight(1)=1.001;
|
p[1].weight(1)=1.001;
|
||||||
|
|
||||||
assert(p.computeOutput({1,1})[0] == 1.0);
|
float ret =p.computeOutput({1,1})[0];
|
||||||
p[1].weight(1)=0.999;
|
ASSERT_EQ(ret, 1.0);
|
||||||
|
|
||||||
assert(p.computeOutput({1,1})[0] == 0.0);
|
p[1].weight(1)=0.999;
|
||||||
|
ret =p.computeOutput({1,1})[0];
|
||||||
|
ASSERT_EQ(ret, 0.0);
|
||||||
}
|
}
|
||||||
@@ -1,41 +1,39 @@
|
|||||||
#include <NeuralNetwork/Learning/PerceptronLearning.h>
|
#include <NeuralNetwork/Learning/PerceptronLearning.h>
|
||||||
|
|
||||||
#include <cassert>
|
#include <gtest/gtest.h>
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
{ // XOR problem
|
|
||||||
NeuralNetwork::FeedForward::Perceptron n(2,1);
|
|
||||||
|
|
||||||
n.randomizeWeights();
|
TEST(PerceptronLearning,XOR) {
|
||||||
|
NeuralNetwork::FeedForward::Perceptron n(2,1);
|
||||||
|
|
||||||
NeuralNetwork::Learning::PerceptronLearning learn(n);
|
n.randomizeWeights();
|
||||||
|
|
||||||
for(int i=0;i<10;i++) {
|
NeuralNetwork::Learning::PerceptronLearning learn(n);
|
||||||
learn.teach({1,0},{1});
|
|
||||||
learn.teach({1,1},{1});
|
|
||||||
learn.teach({0,0},{0});
|
|
||||||
learn.teach({0,1},{1});
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
for(int i=0;i<10;i++) {
|
||||||
std::vector<float> ret =n.computeOutput({1,1});
|
learn.teach({1,0},{1});
|
||||||
assert(ret[0] > 0.9);
|
learn.teach({1,1},{1});
|
||||||
}
|
learn.teach({0,0},{0});
|
||||||
|
learn.teach({0,1},{1});
|
||||||
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
std::vector<float> ret =n.computeOutput({0,1});
|
std::vector<float> ret =n.computeOutput({1,1});
|
||||||
assert(ret[0] > 0.9);
|
ASSERT_GT(ret[0], 0.9);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
std::vector<float> ret =n.computeOutput({1,0});
|
std::vector<float> ret =n.computeOutput({0,1});
|
||||||
assert(ret[0] > 0.9);
|
ASSERT_GT(ret[0], 0.9);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
std::vector<float> ret =n.computeOutput({0,0});
|
std::vector<float> ret =n.computeOutput({1,0});
|
||||||
assert(ret[0] < 0.1);
|
ASSERT_GT(ret[0], 0.9);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,0});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,116 +1,118 @@
|
|||||||
#include <NeuralNetwork/FeedForward/Network.h>
|
#include <NeuralNetwork/FeedForward/Network.h>
|
||||||
|
#include <NeuralNetwork/Learning/QuickPropagation.h>
|
||||||
|
|
||||||
#include <cassert>
|
#include <gtest/gtest.h>
|
||||||
#include <iostream>
|
|
||||||
#include "../include/NeuralNetwork/Learning/QuickPropagation.h"
|
|
||||||
|
|
||||||
int main() {
|
TEST(QuickPropagation,XOR) {
|
||||||
{ // XOR problem
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
n.appendLayer(2,a);
|
||||||
n.appendLayer(2,a);
|
n.appendLayer(1,a);
|
||||||
n.appendLayer(1,a);
|
|
||||||
|
|
||||||
n.randomizeWeights();
|
n.randomizeWeights();
|
||||||
|
|
||||||
NeuralNetwork::Learning::QuickPropagation prop(n);
|
NeuralNetwork::Learning::QuickPropagation prop(n);
|
||||||
for(int i=0;i<10000;i++) {
|
|
||||||
prop.teach({1,0},{1});
|
|
||||||
prop.teach({1,1},{0});
|
|
||||||
prop.teach({0,0},{0});
|
|
||||||
prop.teach({0,1},{1});
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
for(int i=0;i<10000;i++) {
|
||||||
std::vector<float> ret =n.computeOutput({1,1});
|
prop.teach({1,0},{1});
|
||||||
assert(ret[0] < 0.1);
|
prop.teach({1,1},{0});
|
||||||
}
|
prop.teach({0,0},{0});
|
||||||
|
prop.teach({0,1},{1});
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,1});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({1,0});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,0});
|
|
||||||
assert(ret[0] < 0.1);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
{ // AND problem
|
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
|
||||||
n.appendLayer(2,a);
|
|
||||||
n.appendLayer(1,a);
|
|
||||||
|
|
||||||
n.randomizeWeights();
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,1});
|
||||||
NeuralNetwork::Learning::QuickPropagation prop(n);
|
ASSERT_LT(ret[0], 0.1);
|
||||||
for(int i=0;i<10000;i++) {
|
|
||||||
prop.teach({1,1},{1});
|
|
||||||
prop.teach({0,0},{0});
|
|
||||||
prop.teach({0,1},{0});
|
|
||||||
prop.teach({1,0},{0});
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({1,1});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,1});
|
|
||||||
assert(ret[0] < 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({1,0});
|
|
||||||
assert(ret[0] < 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,0});
|
|
||||||
assert(ret[0] < 0.1);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
{ // NOT AND problem
|
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
|
||||||
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
|
||||||
n.appendLayer(2,a);
|
|
||||||
n.appendLayer(1,a);
|
|
||||||
|
|
||||||
n.randomizeWeights();
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,1});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
NeuralNetwork::Learning::QuickPropagation prop(n);
|
{
|
||||||
for(int i=0;i<10000;i++) {
|
std::vector<float> ret =n.computeOutput({1,0});
|
||||||
prop.teach({1,1},{0});
|
ASSERT_GT(ret[0], 0.9);
|
||||||
prop.teach({0,0},{1});
|
}
|
||||||
prop.teach({0,1},{1});
|
|
||||||
prop.teach({1,0},{1});
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
{
|
||||||
std::vector<float> ret =n.computeOutput({1,1});
|
std::vector<float> ret =n.computeOutput({0,0});
|
||||||
assert(ret[0] < 0.1);
|
ASSERT_LT(ret[0], 0.1);
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,1});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({1,0});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
std::vector<float> ret =n.computeOutput({0,0});
|
|
||||||
assert(ret[0] > 0.9);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST(QuickPropagation,AND) {
|
||||||
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
|
n.appendLayer(2,a);
|
||||||
|
n.appendLayer(1,a);
|
||||||
|
|
||||||
|
n.randomizeWeights();
|
||||||
|
|
||||||
|
NeuralNetwork::Learning::QuickPropagation prop(n);
|
||||||
|
|
||||||
|
for(int i=0;i<10000;i++) {
|
||||||
|
prop.teach({1,1},{1});
|
||||||
|
prop.teach({0,0},{0});
|
||||||
|
prop.teach({0,1},{0});
|
||||||
|
prop.teach({1,0},{0});
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,1});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,1});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,0});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,0});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(QuickPropagation,NOTAND) {
|
||||||
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
|
n.appendLayer(2,a);
|
||||||
|
n.appendLayer(1,a);
|
||||||
|
|
||||||
|
n.randomizeWeights();
|
||||||
|
|
||||||
|
NeuralNetwork::Learning::QuickPropagation prop(n);
|
||||||
|
|
||||||
|
for(int i=0;i<10000;i++) {
|
||||||
|
prop.teach({1,1},{0});
|
||||||
|
prop.teach({0,0},{1});
|
||||||
|
prop.teach({0,1},{1});
|
||||||
|
prop.teach({1,0},{1});
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,1});
|
||||||
|
ASSERT_LT(ret[0], 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,1});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,0});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,0});
|
||||||
|
ASSERT_GT(ret[0], 0.9);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
#include <NeuralNetwork/Recurrent/Network.h>
|
#include <NeuralNetwork/Recurrent/Network.h>
|
||||||
|
|
||||||
#include <assert.h>
|
#include <gtest/gtest.h>
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
int main() {
|
TEST(Recurrent, Sample) {
|
||||||
NeuralNetwork::Recurrent::Network a(2,1,1);
|
NeuralNetwork::Recurrent::Network a(2,1,1);
|
||||||
|
|
||||||
a.getNeurons()[4]->weight(1)=0.05;
|
a.getNeurons()[4]->weight(1)=0.05;
|
||||||
@@ -15,6 +14,6 @@ int main() {
|
|||||||
|
|
||||||
for(size_t i=0;i<solutions.size();i++) {
|
for(size_t i=0;i<solutions.size();i++) {
|
||||||
float res= a.computeOutput({1,0.7})[0];
|
float res= a.computeOutput({1,0.7})[0];
|
||||||
assert(res > solutions[i]*0.999 && res < solutions[i]*1.001);
|
ASSERT_FLOAT_EQ(res, solutions[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user