removing potentials

This commit is contained in:
2015-08-29 18:07:54 +02:00
parent b664988da6
commit 644211a4f8
4 changed files with 4 additions and 30 deletions

View File

@@ -21,7 +21,7 @@ FFNeuron& FFLayer::operator[](const size_t& neuron)
neurons=new FFNeuron*[layerSize]; neurons=new FFNeuron*[layerSize];
for(size_t i=0;i<layerSize;i++) for(size_t i=0;i<layerSize;i++)
{ {
neurons[i]=new FFNeuron(potentials[i],weights[i],outputs[i],inputs[i],lambda,function); neurons[i]=new FFNeuron(weights[i],outputs[i],inputs[i],lambda,function);
} }
} }
@@ -36,7 +36,6 @@ FeedForward::FeedForward(std::initializer_list<size_t> s, double lam, FeedForwar
{ {
transfer = new ActivationFunction::ActivationFunction*[s.size()]; transfer = new ActivationFunction::ActivationFunction*[s.size()];
weights= new float**[s.size()]; weights= new float**[s.size()];
potentials= new float*[s.size()];
layerSizes= new size_t[s.size()]; layerSizes= new size_t[s.size()];
outputs= new float*[s.size()]; outputs= new float*[s.size()];
inputs= new float*[s.size()]; inputs= new float*[s.size()];
@@ -52,7 +51,6 @@ FeedForward::FeedForward(std::initializer_list<size_t> s, double lam, FeedForwar
} }
layerSizes[i]=layeSize; layerSizes[i]=layeSize;
weights[i]= new float*[layeSize]; weights[i]= new float*[layeSize];
potentials[i]= new float[layeSize];
outputs[i]= new float[layeSize]; outputs[i]= new float[layeSize];
inputs[i]= new float[layeSize]; inputs[i]= new float[layeSize];
@@ -81,13 +79,11 @@ FeedForward::~FeedForward()
delete[] weights[i][j]; delete[] weights[i][j];
} }
delete[] weights[i]; delete[] weights[i];
delete[] potentials[i];
delete[] outputs[i]; delete[] outputs[i];
delete[] inputs[i]; delete[] inputs[i];
delete transfer[i]; delete transfer[i];
} }
delete[] weights; delete[] weights;
delete[] potentials;
delete[] layerSizes; delete[] layerSizes;
delete[] outputs; delete[] outputs;
delete[] inputs; delete[] inputs;
@@ -182,7 +178,7 @@ FFLayer& FeedForward::operator[](const size_t& l)
ffLayers=new FFLayer*[layers]; ffLayers=new FFLayer*[layers];
for(size_t i=0;i<layers;i++) for(size_t i=0;i<layers;i++)
{ {
ffLayers[i]=new FFLayer(layerSizes[i],potentials[i],weights[i],outputs[i],inputs[i],lambda,*transfer[i]); ffLayers[i]=new FFLayer(layerSizes[i],weights[i],outputs[i],inputs[i],lambda,*transfer[i]);
} }
} }

View File

@@ -31,15 +31,12 @@ namespace NeuralNetwork
class FFNeuron : public Neuron class FFNeuron : public Neuron
{ {
public: public:
inline FFNeuron(float &pot, float *w, float &outputF, float &i,float lam,ActivationFunction::ActivationFunction &fun):function(fun),potential(pot),weights(w),out(outputF),inputs(i),lambda(lam) { } inline FFNeuron(float *w, float &outputF, float &i,float lam,ActivationFunction::ActivationFunction &fun):function(fun),weights(w),out(outputF),inputs(i),lambda(lam) { }
FFNeuron() = delete; FFNeuron() = delete;
FFNeuron(const FFNeuron&) = delete; FFNeuron(const FFNeuron&) = delete;
FFNeuron& operator=(const FFNeuron&) = delete; FFNeuron& operator=(const FFNeuron&) = delete;
inline virtual float getPotential() const override {return potential;}
inline virtual void setPotential(const float& p) override { potential=p;}
inline virtual float getWeight(const size_t& i ) const override { return weights[i];} inline virtual float getWeight(const size_t& i ) const override { return weights[i];}
inline virtual void setWeight(const size_t& i,const float &p) override { weights[i]=p; } inline virtual void setWeight(const size_t& i,const float &p) override { weights[i]=p; }
@@ -48,7 +45,6 @@ namespace NeuralNetwork
inline virtual float derivatedOutput() const override { return function.derivatedOutput(inputs,out); } inline virtual float derivatedOutput() const override { return function.derivatedOutput(inputs,out); }
protected: protected:
ActivationFunction::ActivationFunction &function; ActivationFunction::ActivationFunction &function;
float &potential;
float *weights; float *weights;
float &out; float &out;
float &inputs; float &inputs;
@@ -59,7 +55,7 @@ namespace NeuralNetwork
class FFLayer: public Layer class FFLayer: public Layer
{ {
public: public:
inline FFLayer(size_t s, float *p,float **w,float *out,float *in,float lam,ActivationFunction::ActivationFunction &fun): function(fun), layerSize(s),potentials(p),weights(w),outputs(out),inputs(in),lambda(lam) {} inline FFLayer(size_t s, float **w,float *out,float *in,float lam,ActivationFunction::ActivationFunction &fun): function(fun), layerSize(s),weights(w),outputs(out),inputs(in),lambda(lam) {}
~FFLayer(); ~FFLayer();
FFLayer(const FFLayer &) = delete; FFLayer(const FFLayer &) = delete;
@@ -71,7 +67,6 @@ namespace NeuralNetwork
ActivationFunction::ActivationFunction &function; ActivationFunction::ActivationFunction &function;
FFNeuron **neurons=nullptr; FFNeuron **neurons=nullptr;
size_t layerSize; size_t layerSize;
float *potentials;
float **weights; float **weights;
float *outputs; float *outputs;
float *inputs; float *inputs;

View File

@@ -17,22 +17,6 @@ namespace NeuralNetwork
*/ */
virtual ~Neuron() {}; virtual ~Neuron() {};
/**
* @brief Returns potential of neuron
*/
virtual float getPotential() const =0;
/**
* @brief Sets potential of neuron
* @param p is new pontential
*/
virtual void setPotential(const float &p) =0;
/**
* @brief Returns weight for w-th input neuron
* @param w is weight of neuron number w
*/
virtual float getWeight(const size_t &w) const =0; virtual float getWeight(const size_t &w) const =0;
/** /**

View File

@@ -10,7 +10,6 @@ int main()
srand(time(NULL)); srand(time(NULL));
NeuralNetwork::FeedForward ns({1,1}); NeuralNetwork::FeedForward ns({1,1});
ns[0][0].setPotential(0);
ns[1][1].setWeight(0,0); ns[1][1].setWeight(0,0);
ns[1][1].setWeight(1,1); ns[1][1].setWeight(1,1);