mirror of
https://github.com/BlackLight/neuralpp.git
synced 2024-12-28 04:05:12 +01:00
Growing up...
This commit is contained in:
parent
b62dfe3967
commit
25996a5e70
10 changed files with 169 additions and 72 deletions
|
@ -1,5 +1,9 @@
|
|||
--- Release 0.4 ---
|
||||
|
||||
2009-08-16 BlackLight <blacklight@autistici.org>
|
||||
|
||||
* neuron.cpp: Fixing propagate() function
|
||||
|
||||
2009-08-15 BlackLight <blacklight@autistici.org>
|
||||
|
||||
* Makefile: Now you compile Neural++ with -Wall -pedantic
|
||||
|
|
42
doc/html/namespacemembers.html
Normal file
42
doc/html/namespacemembers.html
Normal file
|
@ -0,0 +1,42 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
|
||||
<html><head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8">
|
||||
<title>Neural++: Class Members</title>
|
||||
<link href="doxygen.css" rel="stylesheet" type="text/css">
|
||||
<link href="tabs.css" rel="stylesheet" type="text/css">
|
||||
</head><body>
|
||||
<!-- Generated by Doxygen 1.5.6 -->
|
||||
<div class="navigation" id="top">
|
||||
<div class="tabs">
|
||||
<ul>
|
||||
<li><a href="index.html"><span>Main Page</span></a></li>
|
||||
<li class="current"><a href="namespaces.html"><span>Namespaces</span></a></li>
|
||||
<li><a href="annotated.html"><span>Classes</span></a></li>
|
||||
<li><a href="files.html"><span>Files</span></a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="tabs">
|
||||
<ul>
|
||||
<li><a href="namespaces.html"><span>Namespace List</span></a></li>
|
||||
<li class="current"><a href="namespacemembers.html"><span>Namespace Members</span></a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="tabs">
|
||||
<ul>
|
||||
<li class="current"><a href="namespacemembers.html"><span>All</span></a></li>
|
||||
<li><a href="namespacemembers_func.html"><span>Functions</span></a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="contents">
|
||||
Here is a list of all namespace members with links to the namespace documentation for each member:
|
||||
<p>
|
||||
<ul>
|
||||
<li>df()
|
||||
: <a class="el" href="namespaceneuralpp.html#43c8197cc83f65fa9676386579671aec">neuralpp</a>
|
||||
</ul>
|
||||
</div>
|
||||
<hr size="1"><address style="text-align: right;"><small>Generated on Sat Aug 15 02:56:02 2009 for Neural++ by
|
||||
<a href="http://www.doxygen.org/index.html">
|
||||
<img src="doxygen.png" alt="doxygen" align="middle" border="0"></a> 1.5.6 </small></address>
|
||||
</body>
|
||||
</html>
|
42
doc/html/namespacemembers_func.html
Normal file
42
doc/html/namespacemembers_func.html
Normal file
|
@ -0,0 +1,42 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
|
||||
<html><head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8">
|
||||
<title>Neural++: Class Members</title>
|
||||
<link href="doxygen.css" rel="stylesheet" type="text/css">
|
||||
<link href="tabs.css" rel="stylesheet" type="text/css">
|
||||
</head><body>
|
||||
<!-- Generated by Doxygen 1.5.6 -->
|
||||
<div class="navigation" id="top">
|
||||
<div class="tabs">
|
||||
<ul>
|
||||
<li><a href="index.html"><span>Main Page</span></a></li>
|
||||
<li class="current"><a href="namespaces.html"><span>Namespaces</span></a></li>
|
||||
<li><a href="annotated.html"><span>Classes</span></a></li>
|
||||
<li><a href="files.html"><span>Files</span></a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="tabs">
|
||||
<ul>
|
||||
<li><a href="namespaces.html"><span>Namespace List</span></a></li>
|
||||
<li class="current"><a href="namespacemembers.html"><span>Namespace Members</span></a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="tabs">
|
||||
<ul>
|
||||
<li><a href="namespacemembers.html"><span>All</span></a></li>
|
||||
<li class="current"><a href="namespacemembers_func.html"><span>Functions</span></a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="contents">
|
||||
|
||||
<p>
|
||||
<ul>
|
||||
<li>df()
|
||||
: <a class="el" href="namespaceneuralpp.html#43c8197cc83f65fa9676386579671aec">neuralpp</a>
|
||||
</ul>
|
||||
</div>
|
||||
<hr size="1"><address style="text-align: right;"><small>Generated on Sat Aug 15 02:56:02 2009 for Neural++ by
|
||||
<a href="http://www.doxygen.org/index.html">
|
||||
<img src="doxygen.png" alt="doxygen" align="middle" border="0"></a> 1.5.6 </small></address>
|
||||
</body>
|
||||
</html>
|
|
@ -7,6 +7,8 @@
|
|||
|
||||
#include <iostream>
|
||||
#include <neural++.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace neuralpp;
|
||||
|
||||
int main() {
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
|
||||
#include <iostream>
|
||||
#include <neural++.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace neuralpp;
|
||||
|
||||
#define NETFILE "adder.net"
|
||||
|
|
|
@ -8,10 +8,12 @@
|
|||
|
||||
#include <iostream>
|
||||
#include <neural++.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace neuralpp;
|
||||
|
||||
int main() {
|
||||
NeuralNet net(2, 2, 1, 0.0005, 10000);
|
||||
NeuralNet net(2, 2, 1, 0.005, 1000);
|
||||
|
||||
cout << "Training in progress - This may take a while...\n";
|
||||
net.train("adder.xml", NeuralNet::file);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/**************************************************************************************************
|
||||
* LibNeural++ v.0.2 - All-purpose library for managing neural networks *
|
||||
* LibNeural++ v.0.4 - All-purpose library for managing neural networks *
|
||||
* Copyright (C) 2009, BlackLight *
|
||||
* *
|
||||
* This program is free software: you can redistribute it and/or modify it under the terms of the *
|
||||
|
@ -23,7 +23,6 @@
|
|||
#include <cmath>
|
||||
|
||||
#include "neural++_exception.hpp"
|
||||
using namespace std;
|
||||
|
||||
//! Default rand value: |sin(rand)|, always >= 0 and <= 1
|
||||
#define RAND (double) ( (rand() / (RAND_MAX/2)) - 1)
|
||||
|
@ -51,7 +50,8 @@ namespace neuralpp {
|
|||
int epochs;
|
||||
int ref_epochs;
|
||||
double l_rate;
|
||||
double ex;
|
||||
//double ex;
|
||||
std::vector<double> ex;
|
||||
|
||||
/**
|
||||
* @brief It updates the weights of the net's synapsis through back-propagation.
|
||||
|
@ -64,10 +64,10 @@ namespace neuralpp {
|
|||
* In-class use only
|
||||
* @param l Layer to commit the changes
|
||||
*/
|
||||
void commitChanges (Layer *l);
|
||||
void commitChanges (Layer& l);
|
||||
|
||||
/**
|
||||
* @brief Get the error made on the expected result as |v-v'|/v
|
||||
* @brief Get the error made on the expected result as squared deviance
|
||||
* @param ex Expected value
|
||||
* @return Mean error
|
||||
*/
|
||||
|
@ -111,7 +111,7 @@ namespace neuralpp {
|
|||
* @param file Binary file containing a neural network previously saved by save() method
|
||||
* @throw NetworkFileNotFoundException
|
||||
*/
|
||||
NeuralNet (const string file) throw(NetworkFileNotFoundException);
|
||||
NeuralNet (const std::string file) throw(NetworkFileNotFoundException);
|
||||
|
||||
|
||||
/**
|
||||
|
@ -139,21 +139,34 @@ namespace neuralpp {
|
|||
* @brief It gets the output of the network in case the output layer contains more neurons
|
||||
* @return A vector containing the output values of the network
|
||||
*/
|
||||
vector<double> getOutputs();
|
||||
std::vector<double> getOutputs();
|
||||
|
||||
/**
|
||||
* @brief It gets the value expected. Of course you should specify this when you
|
||||
* @brief Get the expected value (in case you have an only neuron in output layer). Of course you should specify this when you
|
||||
* build your network by using setExpected.
|
||||
* @return The expected output value for a certain training phase
|
||||
*/
|
||||
double expected() const;
|
||||
|
||||
/**
|
||||
* @brief It sets the value you expect from your network
|
||||
* @brief Get the expected value (in case you have an only neuron in output layer). Of course you should specify this when you
|
||||
* build your network by using setExpected.
|
||||
* @return The expected output value for a certain training phase
|
||||
*/
|
||||
std::vector<double> getExpected() const;
|
||||
|
||||
/**
|
||||
* @brief It sets the value you expect from your network (in case the network has an only neuron in its output layer)
|
||||
* @param ex Expected output value
|
||||
*/
|
||||
void setExpected(double ex);
|
||||
|
||||
/**
|
||||
* @brief Set the values you expect from your network
|
||||
* @param ex Expected output values
|
||||
*/
|
||||
void setExpected(std::vector<double> ex);
|
||||
|
||||
/**
|
||||
* @brief It updates through back-propagation the weights of the synapsis and
|
||||
* computes again the output value for <i>epochs</i> times, calling back
|
||||
|
@ -171,7 +184,7 @@ namespace neuralpp {
|
|||
* @brief It sets the input for the network
|
||||
* @param v Vector of doubles, containing the values to give to your network
|
||||
*/
|
||||
void setInput (vector<double>& v);
|
||||
void setInput (std::vector<double> v);
|
||||
|
||||
/**
|
||||
* @brief It links the layers of the network (input, hidden, output). Don't use unless
|
||||
|
@ -185,7 +198,7 @@ namespace neuralpp {
|
|||
* @throws NetworkFileWriteException When you get an error writing the network's information to
|
||||
* a file
|
||||
*/
|
||||
void save(const char* fname) throw(NetworkFileWriteException);
|
||||
void save (const char* fname) throw(NetworkFileWriteException);
|
||||
|
||||
/**
|
||||
* @brief Train a network using a training set loaded from an XML file. A sample XML file
|
||||
|
@ -194,13 +207,13 @@ namespace neuralpp {
|
|||
* @param src Source type from which the XML will be loaded (from a file [default] or from a string)
|
||||
* @throw InvalidXMLException
|
||||
*/
|
||||
void train (string xml, source src) throw(InvalidXMLException);
|
||||
void train (std::string xml, source src) throw(InvalidXMLException);
|
||||
|
||||
/**
|
||||
* @brief Initialize the training XML for the neural network
|
||||
* @param xml String that will contain the XML
|
||||
*/
|
||||
static void initXML (string& xml);
|
||||
static void initXML (std::string& xml);
|
||||
|
||||
/**
|
||||
* @brief Splits a string into a vector of doubles, given a delimitator
|
||||
|
@ -208,7 +221,7 @@ namespace neuralpp {
|
|||
* @param str String to be splitted
|
||||
* @return Vector of doubles containing splitted values
|
||||
*/
|
||||
static vector<double> split (char delim, string str);
|
||||
static std::vector<double> split (char delim, std::string str);
|
||||
|
||||
/**
|
||||
* @brief Get a training set from a string and copies it to an XML
|
||||
|
@ -222,13 +235,13 @@ namespace neuralpp {
|
|||
* @param set String containing input values and expected outputs
|
||||
* @return XML string
|
||||
*/
|
||||
static string XMLFromSet (int id, string set);
|
||||
static std::string XMLFromSet (int id, std::string set);
|
||||
|
||||
/**
|
||||
* @brief Closes an open XML document generated by "initXML" and "XMLFromSet"
|
||||
* @param xml XML string to be closed
|
||||
*/
|
||||
static void closeXML(string& xml);
|
||||
static void closeXML(std::string& xml);
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -337,8 +350,8 @@ namespace neuralpp {
|
|||
double actv_val;
|
||||
double prop_val;
|
||||
|
||||
vector< Synapsis > in;
|
||||
vector< Synapsis > out;
|
||||
std::vector< Synapsis > in;
|
||||
std::vector< Synapsis > out;
|
||||
|
||||
double (*actv_f)(double);
|
||||
|
||||
|
@ -355,7 +368,7 @@ namespace neuralpp {
|
|||
* @param out Output synapses
|
||||
* @param a Activation function
|
||||
*/
|
||||
Neuron (vector<Synapsis> in, vector<Synapsis> out,
|
||||
Neuron (std::vector<Synapsis> in, std::vector<Synapsis> out,
|
||||
double (*a)(double));
|
||||
|
||||
/**
|
||||
|
@ -376,13 +389,13 @@ namespace neuralpp {
|
|||
* @brief It pushes a new input synapsis
|
||||
* @param s Synapsis to be pushed
|
||||
*/
|
||||
void push_in (Synapsis& s);
|
||||
void push_in (Synapsis s);
|
||||
|
||||
/**
|
||||
* @brief It pushes a new output synapsis
|
||||
* @param s Synapsis to be pushed
|
||||
*/
|
||||
void push_out (Synapsis& s);
|
||||
void push_out (Synapsis s);
|
||||
|
||||
/**
|
||||
* @brief Change the activation value of the neuron
|
||||
|
@ -409,9 +422,9 @@ namespace neuralpp {
|
|||
double getProp();
|
||||
|
||||
/**
|
||||
* @brief It propagates its activation value to the connected neurons
|
||||
* @brief Compute the propagation value of the neuron and set it
|
||||
*/
|
||||
double propagate();
|
||||
void propagate();
|
||||
|
||||
/**
|
||||
* @brief Get the number of input synapsis for the neuron
|
||||
|
@ -437,7 +450,7 @@ namespace neuralpp {
|
|||
* you're doing, use NeuralNet instead
|
||||
*/
|
||||
class Layer {
|
||||
vector<Neuron> elements;
|
||||
std::vector<Neuron> elements;
|
||||
|
||||
void (*update_weights)();
|
||||
double (*actv_f)(double);
|
||||
|
@ -456,7 +469,7 @@ namespace neuralpp {
|
|||
* @param neurons Vector of neurons to be included in the layer
|
||||
* @param a Activation function
|
||||
*/
|
||||
Layer (vector<Neuron>& neurons, double(*a)(double));
|
||||
Layer (std::vector<Neuron>& neurons, double(*a)(double));
|
||||
|
||||
/**
|
||||
* @brief Redefinition for operator []. It gets the neuron at <i>i</i>
|
||||
|
@ -471,17 +484,11 @@ namespace neuralpp {
|
|||
*/
|
||||
void link (Layer& l);
|
||||
|
||||
/**
|
||||
* @brief It sets a vector of propagation values to all its neurons
|
||||
* @param v Vector of values to write as propagation values
|
||||
*/
|
||||
void setProp (vector<double>& v);
|
||||
|
||||
/**
|
||||
* @brief It sets a vector of activation values to all its neurons
|
||||
* @param v Vector of values to write as activation values
|
||||
* @brief Set the input values for the neurons of the layer (just use it for the input layer)
|
||||
* @param v Vector containing the input values
|
||||
*/
|
||||
void setActv (vector<double>& v);
|
||||
void setInput (std::vector<double> v);
|
||||
|
||||
/**
|
||||
* @brief It propagates its activation values to the output layers
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
#include <cstdlib>
|
||||
#include "neural++.hpp"
|
||||
|
||||
using std::vector;
|
||||
|
||||
namespace neuralpp {
|
||||
Layer::Layer(size_t sz, double (*a) (double)) {
|
||||
for (size_t i = 0; i < sz; i++) {
|
||||
|
@ -24,7 +26,7 @@ namespace neuralpp {
|
|||
actv_f = a;
|
||||
}
|
||||
|
||||
Layer::Layer(vector < Neuron > &el, double (*a) (double)) {
|
||||
Layer::Layer(vector<Neuron> &el, double (*a) (double)) {
|
||||
elements = el;
|
||||
actv_f = a;
|
||||
}
|
||||
|
@ -40,7 +42,7 @@ namespace neuralpp {
|
|||
return elements[i];
|
||||
}
|
||||
|
||||
void Layer::link(Layer & l) {
|
||||
void Layer::link(Layer& l) {
|
||||
srand((unsigned) time(NULL));
|
||||
|
||||
for (size_t i = 0; i < l.size(); i++) {
|
||||
|
@ -56,23 +58,16 @@ namespace neuralpp {
|
|||
}
|
||||
}
|
||||
|
||||
void Layer::setProp(vector < double >&v) {
|
||||
for (size_t i = 0; i < size(); i++)
|
||||
void Layer::setInput (vector<double> v) {
|
||||
for (size_t i = 0; i < size(); i++) {
|
||||
elements[i].setProp(v[i]);
|
||||
}
|
||||
|
||||
void Layer::setActv(vector < double >&v) {
|
||||
for (size_t i = 0; i < size(); i++)
|
||||
elements[i].setActv(v[i]);
|
||||
}
|
||||
}
|
||||
|
||||
void Layer::propagate() {
|
||||
for (size_t i = 0; i < size(); i++) {
|
||||
Neuron *n = &(elements[i]);
|
||||
|
||||
n->setProp(n->propagate());
|
||||
n->setActv(actv_f(n->getProp()));
|
||||
}
|
||||
for (size_t i = 0; i < size(); i++)
|
||||
elements[i].propagate();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
using namespace std;
|
||||
|
||||
#include "neural++.hpp"
|
||||
#include "Markup.h"
|
||||
|
@ -78,9 +79,8 @@ namespace neuralpp {
|
|||
output->propagate();
|
||||
}
|
||||
|
||||
void NeuralNet::setInput(vector <double>& v) {
|
||||
input->setProp(v);
|
||||
input->setActv(v);
|
||||
void NeuralNet::setInput(vector <double> v) {
|
||||
input->setInput(v);
|
||||
}
|
||||
|
||||
void NeuralNet::link() {
|
||||
|
@ -89,11 +89,12 @@ namespace neuralpp {
|
|||
}
|
||||
|
||||
void NeuralNet::setExpected(double e) {
|
||||
ex = e;
|
||||
ex.clear();
|
||||
ex.push_back(e);
|
||||
}
|
||||
|
||||
double NeuralNet::expected() const {
|
||||
return ex;
|
||||
return ex[0];
|
||||
}
|
||||
|
||||
void NeuralNet::updateWeights() {
|
||||
|
@ -101,24 +102,20 @@ namespace neuralpp {
|
|||
|
||||
for (size_t i = 0; i < output->size(); i++) {
|
||||
Neuron *n = &(*output)[i];
|
||||
double prop = 0.0;
|
||||
|
||||
for (size_t j = 0; j < n->nIn(); j++)
|
||||
prop += (n->synIn(j).getWeight() * n->synIn(j).getIn()->getActv());
|
||||
|
||||
|
||||
for (size_t j = 0; j < n->nIn(); j++) {
|
||||
Synapsis *s = &(n->synIn(j));
|
||||
|
||||
if (ref_epochs - epochs > 0)
|
||||
out_delta =
|
||||
(-l_rate) * (getOutput() - expected()) *
|
||||
df(actv_f, prop) * s->getIn()->getActv() +
|
||||
df(actv_f, n->getProp()) * s->getIn()->getActv() +
|
||||
s->momentum(ref_epochs, ref_epochs - epochs) *
|
||||
s->getPrevDelta();
|
||||
else
|
||||
out_delta =
|
||||
(-l_rate) * (getOutput() - expected()) *
|
||||
df(actv_f, prop) * s->getIn()->getActv();
|
||||
df(actv_f, n->getProp()) * s->getIn()->getActv();
|
||||
|
||||
s->setDelta(out_delta);
|
||||
}
|
||||
|
@ -148,9 +145,9 @@ namespace neuralpp {
|
|||
}
|
||||
}
|
||||
|
||||
void NeuralNet::commitChanges(Layer * l) {
|
||||
for (size_t i = 0; i < l->size(); i++) {
|
||||
Neuron *n = &(*l)[i];
|
||||
void NeuralNet::commitChanges(Layer& l) {
|
||||
for (size_t i = 0; i < l.size(); i++) {
|
||||
Neuron *n = &(l[i]);
|
||||
|
||||
for (size_t j = 0; j < n->nIn(); j++) {
|
||||
Synapsis *s = &(n->synIn(j));
|
||||
|
@ -164,8 +161,8 @@ namespace neuralpp {
|
|||
void NeuralNet::update() {
|
||||
while ((epochs--) > 0) {
|
||||
updateWeights();
|
||||
commitChanges(output);
|
||||
commitChanges(hidden);
|
||||
commitChanges(*output);
|
||||
commitChanges(*hidden);
|
||||
propagate();
|
||||
}
|
||||
}
|
||||
|
@ -183,7 +180,7 @@ namespace neuralpp {
|
|||
|
||||
record.epochs = ref_epochs;
|
||||
record.l_rate = l_rate;
|
||||
record.ex = ex;
|
||||
record.ex = ex[0];
|
||||
|
||||
if (out.write((char*) &record, sizeof(struct netrecord)) <= 0)
|
||||
throw NetworkFileWriteException();
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
|
||||
#include "neural++.hpp"
|
||||
|
||||
using std::vector;
|
||||
|
||||
namespace neuralpp {
|
||||
Neuron::Neuron(double (*a) (double)) {
|
||||
actv_f = a;
|
||||
|
@ -35,11 +37,11 @@ namespace neuralpp {
|
|||
return out[i];
|
||||
}
|
||||
|
||||
void Neuron::push_in(Synapsis & s) {
|
||||
void Neuron::push_in(Synapsis s) {
|
||||
in.push_back(s);
|
||||
}
|
||||
|
||||
void Neuron::push_out(Synapsis & s) {
|
||||
void Neuron::push_out(Synapsis s) {
|
||||
out.push_back(s);
|
||||
}
|
||||
|
||||
|
@ -67,13 +69,15 @@ namespace neuralpp {
|
|||
return actv_val;
|
||||
}
|
||||
|
||||
double Neuron::propagate() {
|
||||
double aux = 0;
|
||||
void Neuron::propagate() {
|
||||
double aux = 0.0;
|
||||
|
||||
for (size_t i = 0; i < nIn(); i++)
|
||||
aux +=
|
||||
(in[i].getWeight() * in[i].getIn()->actv_val);
|
||||
return aux;
|
||||
|
||||
setProp(aux);
|
||||
setActv( actv_f(getProp()) );
|
||||
}
|
||||
|
||||
void Neuron::synClear() {
|
||||
|
|
Loading…
Reference in a new issue