Modified FeedForward to allow set activation to whole Layer and added XOR test for FF
This commit is contained in:
@@ -16,10 +16,10 @@ namespace FeedForward {
|
|||||||
class Layer : public Stringifiable {
|
class Layer : public Stringifiable {
|
||||||
|
|
||||||
public:
|
public:
|
||||||
Layer(std::size_t size = 0):neurons() {
|
Layer(std::size_t size, const ActivationFunction::ActivationFunction &activationFunction):neurons() {
|
||||||
neurons.push_back(new BiasNeuron);
|
neurons.push_back(new BiasNeuron);
|
||||||
for(std::size_t i=0;i<size;i++) {
|
for(std::size_t i=0;i<size;i++) {
|
||||||
neurons.push_back(new Neuron(neurons.size()));
|
neurons.push_back(new Neuron(neurons.size(),activationFunction));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,8 @@
|
|||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
#include <limits>
|
#include <limits>
|
||||||
|
|
||||||
|
#include <iostream>
|
||||||
|
|
||||||
namespace NeuralNetwork {
|
namespace NeuralNetwork {
|
||||||
namespace FeedForward {
|
namespace FeedForward {
|
||||||
|
|
||||||
@@ -29,24 +31,27 @@ namespace FeedForward {
|
|||||||
appendLayer(_inputSize);
|
appendLayer(_inputSize);
|
||||||
};
|
};
|
||||||
|
|
||||||
Layer& appendLayer(std::size_t size=1) {
|
|
||||||
layers.push_back(Layer(size));
|
|
||||||
|
|
||||||
if(layers.size() > 1)
|
|
||||||
layers.back().setInputSize(layers[layers.size()-2].size());
|
|
||||||
|
|
||||||
return layers.back();
|
|
||||||
}
|
|
||||||
|
|
||||||
Layer& operator[](const std::size_t &id) {
|
|
||||||
return layers[id];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Virtual destructor for Network
|
* @brief Virtual destructor for Network
|
||||||
*/
|
*/
|
||||||
virtual ~Network() {
|
virtual ~Network() {
|
||||||
};
|
for(auto &layer:layers) {
|
||||||
|
delete layer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Layer& appendLayer(std::size_t size=1, const ActivationFunction::ActivationFunction &activationFunction=ActivationFunction::Sigmoid(-4.9)) {
|
||||||
|
layers.push_back(new Layer(size,activationFunction));
|
||||||
|
|
||||||
|
if(layers.size() > 1)
|
||||||
|
layers.back()->setInputSize(layers[layers.size()-2]->size());
|
||||||
|
|
||||||
|
return *layers[layers.size()-1];//.back();
|
||||||
|
}
|
||||||
|
|
||||||
|
Layer& operator[](const std::size_t &id) {
|
||||||
|
return *layers[id];
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief This is a function to compute one iterations of network
|
* @brief This is a function to compute one iterations of network
|
||||||
@@ -66,7 +71,7 @@ namespace FeedForward {
|
|||||||
if(!first) {
|
if(!first) {
|
||||||
out << ",";
|
out << ",";
|
||||||
}
|
}
|
||||||
out << layer;
|
out << *layer;
|
||||||
first=false;
|
first=false;
|
||||||
}
|
}
|
||||||
out << "]";
|
out << "]";
|
||||||
@@ -74,7 +79,7 @@ namespace FeedForward {
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
std::vector<Layer> layers;
|
std::vector<Layer*> layers;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -92,9 +92,10 @@ namespace NeuralNetwork
|
|||||||
class Neuron: public NeuronInterface
|
class Neuron: public NeuronInterface
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
Neuron(unsigned long _id=0): NeuronInterface(), basis(new BasisFunction::Linear),
|
Neuron(unsigned long _id=0, const ActivationFunction::ActivationFunction &activationFunction=ActivationFunction::Sigmoid(-4.9)):
|
||||||
activation(new ActivationFunction::Sigmoid(-4.9)),
|
NeuronInterface(), basis(new BasisFunction::Linear),
|
||||||
id_(_id),weights(_id+1),_output(0),_value(0) {
|
activation(activationFunction.clone()),
|
||||||
|
id_(_id),weights(_id+1),_output(0),_value(0) {
|
||||||
}
|
}
|
||||||
|
|
||||||
Neuron(const Neuron &r): NeuronInterface(), basis(r.basis->clone()), activation(r.activation->clone()),id_(r.id_),
|
Neuron(const Neuron &r): NeuronInterface(), basis(r.basis->clone()), activation(r.activation->clone()),id_(r.id_),
|
||||||
|
|||||||
@@ -9,16 +9,16 @@ std::vector<float> NeuralNetwork::FeedForward::Network::computeOutput(const std:
|
|||||||
|
|
||||||
// 0 is bias
|
// 0 is bias
|
||||||
partial1[0]=1.0;
|
partial1[0]=1.0;
|
||||||
for(int i=0;i<input.size();i++) {
|
for(std::size_t i=0;i<input.size();i++) {
|
||||||
partial1[i+1]=input[i];
|
partial1[i+1]=input[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
for(std::size_t i=1;i<layers.size();i++) {
|
for(std::size_t i=1;i<layers.size();i++) {
|
||||||
layers[i].solve(*partialInputPtr,*partialOutputPtr);
|
layers[i]->solve(*partialInputPtr,*partialOutputPtr);
|
||||||
std::swap(partialInputPtr,partialOutputPtr);
|
std::swap(partialInputPtr,partialOutputPtr);
|
||||||
}
|
}
|
||||||
|
|
||||||
for(int i=0;i<partialInputPtr->size()-1;i++) {
|
for(std::size_t i=0;i<partialInputPtr->size()-1;i++) {
|
||||||
partialInputPtr->operator[](i)=partialInputPtr->operator[](i+1);
|
partialInputPtr->operator[](i)=partialInputPtr->operator[](i+1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
#include <NeuralNetwork/FeedForward/Network.h>
|
#include <NeuralNetwork/FeedForward/Network.h>
|
||||||
|
|
||||||
|
#include <cassert>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
|
||||||
void printVec(const std::vector<float> &v) {
|
void printVec(const std::vector<float> &v) {
|
||||||
@@ -10,22 +11,44 @@ void printVec(const std::vector<float> &v) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int main() {
|
int main() {
|
||||||
NeuralNetwork::FeedForward::Network n(2);
|
{ // XOR problem
|
||||||
|
NeuralNetwork::FeedForward::Network n(2);
|
||||||
|
NeuralNetwork::ActivationFunction::Sigmoid a(-1);
|
||||||
|
NeuralNetwork::FeedForward::Layer &hidden=n.appendLayer(2,a);
|
||||||
|
NeuralNetwork::FeedForward::Layer &out = n.appendLayer(1,a);
|
||||||
|
|
||||||
NeuralNetwork::FeedForward::Layer &sec=n.appendLayer(4);
|
hidden[1].setWeight(n[0][0],7);
|
||||||
|
hidden[1].setWeight(n[0][1],-4.7);
|
||||||
|
hidden[1].setWeight(n[0][2],-4.7);
|
||||||
|
|
||||||
NeuralNetwork::FeedForward::Layer &in = n[0];
|
hidden[2].setWeight(n[0][0],2.6);
|
||||||
|
hidden[2].setWeight(n[0][1],-6.4);
|
||||||
|
hidden[2].setWeight(n[0][2],-6.4);
|
||||||
|
|
||||||
NeuralNetwork::FeedForward::Layer &t = n.appendLayer(4);
|
out[1].setWeight(hidden[0],-4.5);
|
||||||
sec[1].setWeight(in[1],-1.0);
|
out[1].setWeight(hidden[1],9.6);
|
||||||
|
out[1].setWeight(hidden[2],-6.8);
|
||||||
|
|
||||||
sec[1].setWeight(in[2],-1.0);
|
|
||||||
sec[2].setWeight(in[2],-1.0);
|
|
||||||
|
|
||||||
t[2].setWeight(sec[2],-1.0);
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,1});
|
||||||
|
assert(ret[0] < 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
std::vector<float> ret =n.computeOutput({0.7,0.7});
|
{
|
||||||
printVec(ret);
|
std::vector<float> ret =n.computeOutput({0,1});
|
||||||
std::cout << n;
|
assert(ret[0] > 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({1,0});
|
||||||
|
assert(ret[0] > 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::vector<float> ret =n.computeOutput({0,0});
|
||||||
|
assert(ret[0] < 0.5);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user