Modified FeedForward to allow set activation to whole Layer and added XOR test for FF

This commit is contained in:
2016-02-03 21:16:35 +01:00
parent ea4ce22867
commit 567fcd2373
5 changed files with 64 additions and 35 deletions

View File

@@ -1,5 +1,6 @@
#include <NeuralNetwork/FeedForward/Network.h>
#include <cassert>
#include <iostream>
void printVec(const std::vector<float> &v) {
@@ -10,22 +11,44 @@ void printVec(const std::vector<float> &v) {
}
int main() {
NeuralNetwork::FeedForward::Network n(2);
{ // XOR problem
NeuralNetwork::FeedForward::Network n(2);
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
NeuralNetwork::FeedForward::Layer &hidden=n.appendLayer(2,a);
NeuralNetwork::FeedForward::Layer &out = n.appendLayer(1,a);
NeuralNetwork::FeedForward::Layer &sec=n.appendLayer(4);
hidden[1].setWeight(n[0][0],7);
hidden[1].setWeight(n[0][1],-4.7);
hidden[1].setWeight(n[0][2],-4.7);
NeuralNetwork::FeedForward::Layer &in = n[0];
hidden[2].setWeight(n[0][0],2.6);
hidden[2].setWeight(n[0][1],-6.4);
hidden[2].setWeight(n[0][2],-6.4);
NeuralNetwork::FeedForward::Layer &t = n.appendLayer(4);
sec[1].setWeight(in[1],-1.0);
out[1].setWeight(hidden[0],-4.5);
out[1].setWeight(hidden[1],9.6);
out[1].setWeight(hidden[2],-6.8);
sec[1].setWeight(in[2],-1.0);
sec[2].setWeight(in[2],-1.0);
t[2].setWeight(sec[2],-1.0);
{
std::vector<float> ret =n.computeOutput({1,1});
assert(ret[0] < 0.5);
}
std::vector<float> ret =n.computeOutput({0.7,0.7});
printVec(ret);
std::cout << n;
{
std::vector<float> ret =n.computeOutput({0,1});
assert(ret[0] > 0.5);
}
{
std::vector<float> ret =n.computeOutput({1,0});
assert(ret[0] > 0.5);
}
{
std::vector<float> ret =n.computeOutput({0,0});
assert(ret[0] < 0.5);
}
}
}