Just implemented output control through synaptical inertial momentum

This commit is contained in:
blacklight 2009-08-09 02:15:19 +02:00
parent 006bf64c74
commit 49b5472480
7 changed files with 122 additions and 78 deletions

View File

@ -1,21 +1,47 @@
-------------------------------------------------------------------------------
0.2.2:
Added initXML(), XMLFromSet() and closeXML() methods to auto-generate training
XML.
--- 0.3 release ---
2009-08-09 BlackLight <blacklight@autistici.org>
* Makefile: Totally changed
* neural++.hpp: Changed header name, added BETA0 macro
* synapsis.cpp: Added momentum() method to compute the inertial momentum
of a synapsis
* everything: Data type changed from float to double for everything
--- Release 0.2.2 ---
2008-11-04 BlackLight <blacklight@autistici.org>
* all: Added initXML(), XMLFromSet() and closeXML() methods to auto-
generate training XML.
train() method can now get XML both from a file and from a simple string.
-------------------------------------------------------------------------------
0.2.1:
Added `train()` method to NeuralNet class, that allows you to train a neural
network using an XML containing pre-saved training sets (input values,
expected output). See examples/adder.xml for an example of such XML.
-------------------------------------------------------------------------------
0.2:
Added `save()` method to NeuralNet class, that allows you to save a trained
neural network to a binary file, to load anytime you need using
NeuralNet(const char *fname) constructor.
-------------------------------------------------------------------------------
0.01b:
First release ^^
-------------------------------------------------------------------------------
--- Release 0.2.1 ---
2008-10-22 BlackLight <blacklight@autistici.org>
* all: Added `train()` method to NeuralNet class, that allows you to
train a neural network using an XML containing pre-saved training
sets (input values, expected output). See examples/adder.xml for an
example of such XML.
--- Release 0.2 ---
2008-10-12 BlackLight <blacklight@autistici.org>
* all: Added `save()` method to NeuralNet class, that allows you to
save a trained neural network to a binary file, to load anytime you
need using NeuralNet(const char *fname) constructor.
--- Release 0.01b ---
2008-04-03 BlackLight <blacklight@autistici.org>
* all: First beta release ^^
Copyright 2008, 2009, BlackLight
Copying and distribution of this file, with or without modification,
are permitted provided the copyright notice and this notice are
preserved.

View File

@ -1 +1 @@
0.2.2
0.3

View File

@ -7,3 +7,4 @@ clean:
rm learnAdd
rm doAdd
rm adderFromScratch
rm adder.net

View File

@ -31,6 +31,9 @@ using namespace std;
namespace neuralpp {
//! Default rand value: |sin(rand)|, always >= 0 and <= 1
#define RAND ( (double) abs( sinf((double) rand()) ) )
//! Initial value for the inertial momentum of the synapses
#define BETA0 0.7
class Synapsis;
class Neuron;
@ -221,42 +224,43 @@ namespace neuralpp {
*/
class Synapsis {
double delta;
double prev_delta;
double weight;
Neuron *in;
Neuron *out;
NeuralNet *net;
double (*actv_f)(double);
double (*deriv)(double);
public:
Synapsis(Neuron* i, Neuron* o, NeuralNet* n, double w, double d) {
in=i; out=o;
weight=w; delta=d;
net=n;
}
/**
* @brief Constructor
* @param i Input neuron
* @param o Output neuron
* @param w Weight for the synapsis
* @param d Delta for the synapsis
*/
Synapsis(Neuron* i, Neuron* o, double w, double d);
/**
* @brief Constructor
* @param i Input neuron
* @param o Output neuron
* @param n Reference to the neural network
* @param a Activation function
* @param d Derivate for activation function
*/
Synapsis (Neuron* i, Neuron* o, NeuralNet* n, double(*a)(double), double(*d)(double));
Synapsis (Neuron* i, Neuron* o, double(*a)(double), double(*d)(double));
/**
* @brief Constructor
* @param i Input neuron
* @param o Output neuron
* @param n Reference to the neural network
* @param w Weight for the synapsis (default: random)
* @param a Activation function
* @param d Derivate for activation function
*/
Synapsis (Neuron* i, Neuron* o, NeuralNet* n,
Synapsis (Neuron* i, Neuron* o,
double w, double(*a)(double), double(*d)(double));
/**
@ -289,6 +293,24 @@ namespace neuralpp {
* @return Delta of the synapsis
*/
double getDelta();
/**
* @brief Get the delta of the synapsis at the previous iteration
* @return The previous delta
*/
double getPrevDelta();
/**
* @brief Get the inertial momentum of a synapsis. This value is inversely proportional
* to the number of steps done in the learning phase (quite high at the beginning, decreasing
* to zero towards the end of the learning algorithm), and is needed to avoid strong
* oscillations in output values at the beginning, caused by the random values assigned to
* the synaptical weights
* @param N The number of iterations the network must have to adjust the output values
* @param x The number of iterations already taken
* @return The inertial momentum of the synapsis
*/
double momentum (int N, int x);
};
/**
@ -386,7 +408,6 @@ namespace neuralpp {
*/
class Layer {
vector< Neuron > elements;
NeuralNet *net;
void (*update_weights)();
double (*actv_f)(double);
@ -396,17 +417,16 @@ namespace neuralpp {
/**
* @brief Constructor
* @param sz Size of the layer
* @param n Reference to the neural network
* @param a Activation function
* @param d Its derivate
*/
Layer (size_t sz, NeuralNet* n, double (*a)(double), double(*d)(double));
Layer (size_t sz, double (*a)(double), double(*d)(double));
/**
* @brief Alternative constructor. It directly gets a vector of neurons to build
* the layer
*/
Layer (vector< Neuron >&, NeuralNet* net, double(*a)(double), double(*d)(double));
Layer (vector< Neuron >&, double(*a)(double), double(*d)(double));
/**
* @brief Redefinition for operator []. It gets the neuron at <i>i</i>

View File

@ -21,7 +21,7 @@ using namespace neuralpp;
* @param a Activation function
* @param d Its derivate
*/
Layer::Layer (size_t sz, NeuralNet* n, double(*a)(double), double(*d)(double)) {
Layer::Layer (size_t sz, double(*a)(double), double(*d)(double)) {
for (size_t i=0; i<sz; i++) {
Neuron n(a,d);
elements.push_back(n);
@ -29,18 +29,16 @@ Layer::Layer (size_t sz, NeuralNet* n, double(*a)(double), double(*d)(double))
actv_f=a;
deriv=d;
net=n;
}
/**
* @brief Alternative constructor. It directly gets a vector of neurons to build
* the layer
*/
Layer::Layer (vector< Neuron > &el, NeuralNet* n, double (*a)(double), double(*d)(double)) {
Layer::Layer (vector< Neuron > &el, double (*a)(double), double(*d)(double)) {
elements=el;
actv_f=a;
deriv=d;
net=n;
}
/**
@ -65,7 +63,7 @@ void Layer::link (Layer& l) {
for (size_t j=0; j<size(); j++) {
Neuron *n2 = &(elements[j]);
Synapsis s(n1, n2, net, RAND, actv_f, deriv);
Synapsis s(n1, n2, RAND, actv_f, deriv);
n1->push_out(s);
n2->push_in(s);

View File

@ -43,9 +43,9 @@ NeuralNet::NeuralNet (size_t in_size, size_t hidden_size, size_t out_size, doubl
actv_f=__actv;
deriv=__deriv;
input = new Layer(in_size, this, __actv, __deriv);
hidden = new Layer(hidden_size, this, __actv, __deriv);
output = new Layer(out_size, this, __actv, __deriv);
input = new Layer(in_size, __actv, __deriv);
hidden = new Layer(hidden_size, __actv, __deriv);
output = new Layer(out_size, __actv, __deriv);
link();
}
@ -70,9 +70,9 @@ NeuralNet::NeuralNet (size_t in_size, size_t hidden_size, size_t out_size,
actv_f=a;
deriv=d;
input = new Layer(in_size, this, a, d);
hidden = new Layer(hidden_size, this, a, d);
output = new Layer(out_size, this, a, d);
input = new Layer(in_size, a, d);
hidden = new Layer(hidden_size, a, d);
output = new Layer(out_size, a, d);
link();
}
@ -153,7 +153,13 @@ void NeuralNet::updateWeights() {
for (size_t j=0; j<n->nIn(); j++) {
Synapsis *s = &(n->synIn(j));
out_delta = s->getIn()->getActv() * error(ex) * (-l_rate);
if (ref_epochs - epochs > 0)
out_delta = s->getIn()->getActv() * error(ex) * (-l_rate) +
s->momentum(ref_epochs, ref_epochs-epochs) * s->getPrevDelta();
else
out_delta = s->getIn()->getActv() * error(ex) * (-l_rate);
s->setDelta(out_delta);
}
}
@ -164,7 +170,12 @@ void NeuralNet::updateWeights() {
for (size_t j=0; j<n->nIn(); j++) {
Synapsis *s = &(n->synIn(j));
s->setDelta((-l_rate) * d * s->getIn()->getActv());
if (ref_epochs - epochs > 0)
s->setDelta((-l_rate) * d * s->getIn()->getActv() +
s->momentum(ref_epochs, ref_epochs-epochs) * s->getPrevDelta());
else
s->setDelta((-l_rate) * d * s->getIn()->getActv());
}
}
}

View File

@ -15,67 +15,55 @@
#include "neural++.hpp"
using namespace neuralpp;
Synapsis::Synapsis (Neuron* i, Neuron* o, NeuralNet* n, double(*a)(double), double(*d)(double)) {
Synapsis::Synapsis(Neuron* i, Neuron* o, double w, double d) {
in=i; out=o;
weight=w;
delta=d; prev_delta=0;
}
Synapsis::Synapsis (Neuron* i, Neuron* o, double(*a)(double), double(*d)(double)) {
srand((unsigned) time(NULL));
delta=0;
prev_delta=0;
weight=RAND;
in=i;
out=o;
actv_f=a;
deriv=d;
net=n;
}
/**
* @brief Constructor
* @param i Input neuron
* @param o Output neuron
* @param w Weight for the synapsis (default: random)
* @param a Activation function
* @param d Derivate for activation function
*/
Synapsis::Synapsis (Neuron* i, Neuron* o, NeuralNet* n,
Synapsis::Synapsis (Neuron* i, Neuron* o,
double w, double(*a)(double), double(*d)(double)) {
delta=0;
prev_delta=0;
weight=w;
in=i;
out=o;
actv_f=a;
deriv=d;
net=n;
}
/**
* @return Reference to input neuron of the synapsis
*/
Neuron* Synapsis::getIn() { return in; }
/**
* @return Reference to output neuron of the synapsis
*/
Neuron* Synapsis::getOut() { return out; }
/**
* @return Weight of the synapsis
*/
double Synapsis::getWeight() { return weight; }
/**
* @return Delta of the synapsis
*/
double Synapsis::getDelta() { return delta; }
/**
* @brief It sets the weight of the synapsis
*/
double Synapsis::getPrevDelta() { return prev_delta; }
void Synapsis::setWeight(double w) { weight=w; }
/**
* @brief It sets the delta (how much to change the weight after an update)
* of the synapsis
*/
void Synapsis::setDelta(double d) { delta=d; }
void Synapsis::setDelta(double d) {
prev_delta=delta;
delta=d;
}
double Synapsis::momentum(int N, int x) {
return (BETA0*N)/(20*x + N);
}