The reference to the network is passed in Layer and Synapsis objects

This commit is contained in:
blacklight 2009-08-08 19:11:06 +02:00
parent 1aa4ec7646
commit 006bf64c74
5 changed files with 29 additions and 24 deletions

Binary file not shown.

View File

@ -225,34 +225,39 @@ namespace neuralpp {
Neuron *in; Neuron *in;
Neuron *out; Neuron *out;
NeuralNet *net;
double (*actv_f)(double); double (*actv_f)(double);
double (*deriv)(double); double (*deriv)(double);
public: public:
Synapsis(Neuron* i, Neuron* o, double w, double d) { Synapsis(Neuron* i, Neuron* o, NeuralNet* n, double w, double d) {
in=i; out=o; in=i; out=o;
weight=w; delta=d; weight=w; delta=d;
net=n;
} }
/** /**
* @brief Constructor * @brief Constructor
* @param i Input neuron * @param i Input neuron
* @param o Output neuron * @param o Output neuron
* @param n Reference to the neural network
* @param a Activation function * @param a Activation function
* @param d Derivate for activation function * @param d Derivate for activation function
*/ */
Synapsis (Neuron* i, Neuron* o, double(*)(double), double(*)(double)); Synapsis (Neuron* i, Neuron* o, NeuralNet* n, double(*a)(double), double(*d)(double));
/** /**
* @brief Constructor * @brief Constructor
* @param i Input neuron * @param i Input neuron
* @param o Output neuron * @param o Output neuron
* @param n Reference to the neural network
* @param w Weight for the synapsis (default: random) * @param w Weight for the synapsis (default: random)
* @param a Activation function * @param a Activation function
* @param d Derivate for activation function * @param d Derivate for activation function
*/ */
Synapsis (Neuron* i, Neuron* o, double w, double(*)(double), double(*)(double)); Synapsis (Neuron* i, Neuron* o, NeuralNet* n,
double w, double(*a)(double), double(*d)(double));
/** /**
* @return Reference to input neuron of the synapsis * @return Reference to input neuron of the synapsis
@ -381,8 +386,9 @@ namespace neuralpp {
*/ */
class Layer { class Layer {
vector< Neuron > elements; vector< Neuron > elements;
void (*update_weights)(); NeuralNet *net;
void (*update_weights)();
double (*actv_f)(double); double (*actv_f)(double);
double (*deriv)(double); double (*deriv)(double);
@ -390,16 +396,17 @@ namespace neuralpp {
/** /**
* @brief Constructor * @brief Constructor
* @param sz Size of the layer * @param sz Size of the layer
* @param n Reference to the neural network
* @param a Activation function * @param a Activation function
* @param d Its derivate * @param d Its derivate
*/ */
Layer (size_t sz, double (*)(double), double(*)(double)); Layer (size_t sz, NeuralNet* n, double (*a)(double), double(*d)(double));
/** /**
* @brief Alternative constructor. It directly gets a vector of neurons to build * @brief Alternative constructor. It directly gets a vector of neurons to build
* the layer * the layer
*/ */
Layer (vector< Neuron >&, double(*)(double), double(*)(double)); Layer (vector< Neuron >&, NeuralNet* net, double(*a)(double), double(*d)(double));
/** /**
* @brief Redefinition for operator []. It gets the neuron at <i>i</i> * @brief Redefinition for operator []. It gets the neuron at <i>i</i>

View File

@ -21,7 +21,7 @@ using namespace neuralpp;
* @param a Activation function * @param a Activation function
* @param d Its derivate * @param d Its derivate
*/ */
Layer::Layer (size_t sz, double(*a)(double), double(*d)(double)) { Layer::Layer (size_t sz, NeuralNet* n, double(*a)(double), double(*d)(double)) {
for (size_t i=0; i<sz; i++) { for (size_t i=0; i<sz; i++) {
Neuron n(a,d); Neuron n(a,d);
elements.push_back(n); elements.push_back(n);
@ -29,16 +29,18 @@ Layer::Layer (size_t sz, double(*a)(double), double(*d)(double)) {
actv_f=a; actv_f=a;
deriv=d; deriv=d;
net=n;
} }
/** /**
* @brief Alternative constructor. It directly gets a vector of neurons to build * @brief Alternative constructor. It directly gets a vector of neurons to build
* the layer * the layer
*/ */
Layer::Layer (vector< Neuron > &el, double (*a)(double), double(*d)(double)) { Layer::Layer (vector< Neuron > &el, NeuralNet* n, double (*a)(double), double(*d)(double)) {
elements=el; elements=el;
actv_f=a; actv_f=a;
deriv=d; deriv=d;
net=n;
} }
/** /**
@ -63,7 +65,7 @@ void Layer::link (Layer& l) {
for (size_t j=0; j<size(); j++) { for (size_t j=0; j<size(); j++) {
Neuron *n2 = &(elements[j]); Neuron *n2 = &(elements[j]);
Synapsis s(n1,n2,RAND,actv_f,deriv); Synapsis s(n1, n2, net, RAND, actv_f, deriv);
n1->push_out(s); n1->push_out(s);
n2->push_in(s); n2->push_in(s);

View File

@ -43,9 +43,9 @@ NeuralNet::NeuralNet (size_t in_size, size_t hidden_size, size_t out_size, doubl
actv_f=__actv; actv_f=__actv;
deriv=__deriv; deriv=__deriv;
input = new Layer(in_size, __actv, __deriv); input = new Layer(in_size, this, __actv, __deriv);
hidden = new Layer(hidden_size, __actv, __deriv); hidden = new Layer(hidden_size, this, __actv, __deriv);
output = new Layer(out_size, __actv, __deriv); output = new Layer(out_size, this, __actv, __deriv);
link(); link();
} }
@ -70,9 +70,9 @@ NeuralNet::NeuralNet (size_t in_size, size_t hidden_size, size_t out_size,
actv_f=a; actv_f=a;
deriv=d; deriv=d;
input = new Layer(in_size,a,d); input = new Layer(in_size, this, a, d);
hidden = new Layer(hidden_size,a,d); hidden = new Layer(hidden_size, this, a, d);
output = new Layer(out_size,a,d); output = new Layer(out_size, this, a, d);
link(); link();
} }

View File

@ -15,14 +15,7 @@
#include "neural++.hpp" #include "neural++.hpp"
using namespace neuralpp; using namespace neuralpp;
/** Synapsis::Synapsis (Neuron* i, Neuron* o, NeuralNet* n, double(*a)(double), double(*d)(double)) {
* @brief Constructor
* @param i Input neuron
* @param o Output neuron
* @param a Activation function
* @param d Derivate for activation function
*/
Synapsis::Synapsis (Neuron* i, Neuron* o, double(*a)(double), double(*d)(double)) {
srand((unsigned) time(NULL)); srand((unsigned) time(NULL));
delta=0; delta=0;
@ -32,6 +25,7 @@ Synapsis::Synapsis (Neuron* i, Neuron* o, double(*a)(double), double(*d)(double)
actv_f=a; actv_f=a;
deriv=d; deriv=d;
net=n;
} }
/** /**
@ -42,7 +36,8 @@ Synapsis::Synapsis (Neuron* i, Neuron* o, double(*a)(double), double(*d)(double)
* @param a Activation function * @param a Activation function
* @param d Derivate for activation function * @param d Derivate for activation function
*/ */
Synapsis::Synapsis (Neuron* i, Neuron* o, double w, double(*a)(double), double(*d)(double)) { Synapsis::Synapsis (Neuron* i, Neuron* o, NeuralNet* n,
double w, double(*a)(double), double(*d)(double)) {
delta=0; delta=0;
weight=w; weight=w;
in=i; in=i;
@ -50,6 +45,7 @@ Synapsis::Synapsis (Neuron* i, Neuron* o, double w, double(*a)(double), double(*
actv_f=a; actv_f=a;
deriv=d; deriv=d;
net=n;
} }
/** /**