mirror of
https://github.com/BlackLight/neuralpp.git
synced 2024-12-28 04:05:12 +01:00
Hey it's becoming damn cool...
This commit is contained in:
parent
adfa58800f
commit
5cb0faef82
10 changed files with 116 additions and 88 deletions
|
@ -2,6 +2,10 @@
|
||||||
|
|
||||||
2009-08-16 BlackLight <blacklight@autistici.org>
|
2009-08-16 BlackLight <blacklight@autistici.org>
|
||||||
|
|
||||||
|
* all: Now it *REALLY* supports multiple output values, i.e. multiple
|
||||||
|
neurons, in the output layer.
|
||||||
|
Oh, and I've fixed too that nasty bug that made training from sets
|
||||||
|
containing more than a single training set.
|
||||||
* neuron.cpp: Fixing propagate() function
|
* neuron.cpp: Fixing propagate() function
|
||||||
|
|
||||||
2009-08-15 BlackLight <blacklight@autistici.org>
|
2009-08-15 BlackLight <blacklight@autistici.org>
|
||||||
|
|
|
@ -8,3 +8,4 @@ clean:
|
||||||
rm doAdd
|
rm doAdd
|
||||||
rm adderFromScratch
|
rm adderFromScratch
|
||||||
rm adder.net
|
rm adder.net
|
||||||
|
rm adder.xml
|
||||||
|
|
|
@ -1,34 +0,0 @@
|
||||||
<?xml version="1.0" encoding="iso-8859-1"?>
|
|
||||||
<!DOCTYPE NETWORK SYSTEM "http://blacklight.gotdns.org/prog/neuralpp/trainer.dtd">
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Sample XML containing a training set that teach your network to do simple sums
|
|
||||||
between integer numbers
|
|
||||||
|
|
||||||
BlackLight, 2009
|
|
||||||
~ LibNeural++ project ~
|
|
||||||
-->
|
|
||||||
|
|
||||||
<NETWORK NAME="adder">
|
|
||||||
<TRAINING ID="0">
|
|
||||||
<INPUT ID="1">3</INPUT>
|
|
||||||
<INPUT ID="2">2</INPUT>
|
|
||||||
<OUTPUT ID="3">5</OUTPUT>
|
|
||||||
<OUTPUT ID="4">1</OUTPUT>
|
|
||||||
</TRAINING>
|
|
||||||
|
|
||||||
<TRAINING ID="5">
|
|
||||||
<INPUT ID="6">5</INPUT>
|
|
||||||
<INPUT ID="7">3</INPUT>
|
|
||||||
<OUTPUT ID="8">8</OUTPUT>
|
|
||||||
<OUTPUT ID="9">2</OUTPUT>
|
|
||||||
</TRAINING>
|
|
||||||
|
|
||||||
<TRAINING ID="10">
|
|
||||||
<INPUT ID="11">6</INPUT>
|
|
||||||
<INPUT ID="12">3</INPUT>
|
|
||||||
<OUTPUT ID="13">9</OUTPUT>
|
|
||||||
<OUTPUT ID="14">3</OUTPUT>
|
|
||||||
</TRAINING>
|
|
||||||
</NETWORK>
|
|
||||||
|
|
|
@ -12,16 +12,17 @@ using namespace std;
|
||||||
using namespace neuralpp;
|
using namespace neuralpp;
|
||||||
|
|
||||||
int main() {
|
int main() {
|
||||||
NeuralNet net(2, 2, 2, 0.005, 1000);
|
NeuralNet net(2, 2, 2, 0.005, 100);
|
||||||
string xml;
|
string xml;
|
||||||
double tmp;
|
double tmp;
|
||||||
|
int id = 0;
|
||||||
|
|
||||||
// XML initialization. Then, I say XML that 2+3=5, 3+3=6, 5+4=9
|
// XML initialization. Then, I say XML that 2+3=5, 3+3=6, 5+4=9
|
||||||
// Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm
|
// Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm
|
||||||
NeuralNet::initXML(xml);
|
NeuralNet::initXML(xml);
|
||||||
xml += NeuralNet::XMLFromSet(0, "3,2;5,1");
|
xml += NeuralNet::XMLFromSet(id, "3,2;5,1");
|
||||||
xml += NeuralNet::XMLFromSet(1, "4,2;6,2");
|
xml += NeuralNet::XMLFromSet(id, "4,2;6,2");
|
||||||
xml += NeuralNet::XMLFromSet(2, "6,3;9,3");
|
xml += NeuralNet::XMLFromSet(id, "6,3;9,3");
|
||||||
NeuralNet::closeXML(xml);
|
NeuralNet::closeXML(xml);
|
||||||
cout << xml << endl;
|
cout << xml << endl;
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ int main() {
|
||||||
|
|
||||||
net.setInput(v);
|
net.setInput(v);
|
||||||
net.propagate();
|
net.propagate();
|
||||||
cout << "Neural net output: " << net.getOutputs()[0] << "; " << net.getOutputs()[1] << endl;
|
cout << "Neural net output: " << net.getOutput() << endl;
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,19 +7,46 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
#include <fstream>
|
||||||
|
#include <ctime>
|
||||||
#include <neural++.hpp>
|
#include <neural++.hpp>
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace neuralpp;
|
using namespace neuralpp;
|
||||||
|
|
||||||
int main() {
|
double f (double x) {
|
||||||
NeuralNet net(2, 2, 2, 0.005, 1000);
|
return (1.0/(1.0 + pow(M_E,x)));
|
||||||
|
}
|
||||||
|
|
||||||
|
int main() {
|
||||||
|
int id = 0;
|
||||||
|
string xml;
|
||||||
|
time_t t1, t2;
|
||||||
|
NeuralNet net(2, 2, 1, 0.005, 100);
|
||||||
|
|
||||||
|
NeuralNet::initXML(xml);
|
||||||
|
xml += NeuralNet::XMLFromSet(id, "2,3;5");
|
||||||
|
xml += NeuralNet::XMLFromSet(id, "3,2;5");
|
||||||
|
xml += NeuralNet::XMLFromSet(id, "6,2;8");
|
||||||
|
xml += NeuralNet::XMLFromSet(id, "2,2;4");
|
||||||
|
xml += NeuralNet::XMLFromSet(id, "1,2;3");
|
||||||
|
xml += NeuralNet::XMLFromSet(id, "-1,-2;-3");
|
||||||
|
xml += NeuralNet::XMLFromSet(id, "8,9;17");
|
||||||
|
xml += NeuralNet::XMLFromSet(id, "10,10;20");
|
||||||
|
NeuralNet::closeXML(xml);
|
||||||
|
|
||||||
|
ofstream out("adder.xml");
|
||||||
|
out << xml;
|
||||||
|
out.close();
|
||||||
|
cout << "Training file adder.xml has been written\n";
|
||||||
|
|
||||||
|
t1 = time(NULL);
|
||||||
cout << "Training in progress - This may take a while...\n";
|
cout << "Training in progress - This may take a while...\n";
|
||||||
net.train("adder.xml", NeuralNet::file);
|
net.train("adder.xml", NeuralNet::file);
|
||||||
|
t2 = time(NULL);
|
||||||
|
|
||||||
net.save("adder.net");
|
net.save("adder.net");
|
||||||
cout << "Network trained. You can use adder.net file now to load this network\n";
|
cout << "Network trained in " << (t2-t1) << " seconds. You can use adder.net file now to load this network\n";
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -41,6 +41,7 @@ namespace neuralpp {
|
||||||
class NeuralNet;
|
class NeuralNet;
|
||||||
|
|
||||||
double df (double (*f)(double), double x);
|
double df (double (*f)(double), double x);
|
||||||
|
double __actv(double prop);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @class NeuralNet
|
* @class NeuralNet
|
||||||
|
@ -50,6 +51,7 @@ namespace neuralpp {
|
||||||
int epochs;
|
int epochs;
|
||||||
int ref_epochs;
|
int ref_epochs;
|
||||||
double l_rate;
|
double l_rate;
|
||||||
|
double threshold;
|
||||||
std::vector<double> expect;
|
std::vector<double> expect;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -102,8 +104,11 @@ namespace neuralpp {
|
||||||
* keep its value quite low to be more accurate)
|
* keep its value quite low to be more accurate)
|
||||||
* @param e Epochs (cycles) to execute (the most you execute, the most the network
|
* @param e Epochs (cycles) to execute (the most you execute, the most the network
|
||||||
* can be accurate for its purpose)
|
* can be accurate for its purpose)
|
||||||
|
* @param th Threshold, value in [0,1] that establishes how much a neuron must be
|
||||||
|
* 'sensitive' on variations of the input values
|
||||||
|
* @param a Activation function to use (default: f(x)=x)
|
||||||
*/
|
*/
|
||||||
NeuralNet (size_t in_size, size_t hidden_size, size_t out_size, double l, int e);
|
NeuralNet (size_t in_size, size_t hidden_size, size_t out_size, double l, int e, double th = 0.0, double (*a)(double) = __actv);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Constructor
|
* @brief Constructor
|
||||||
|
@ -123,9 +128,11 @@ namespace neuralpp {
|
||||||
* keep its value quite low to be more accurate)
|
* keep its value quite low to be more accurate)
|
||||||
* @param e Epochs (cycles) to execute (the most you execute, the most the network
|
* @param e Epochs (cycles) to execute (the most you execute, the most the network
|
||||||
* can be accurate for its purpose)
|
* can be accurate for its purpose)
|
||||||
|
* @param th Threshold, value in [0,1] that establishes how much a neuron must be
|
||||||
|
* 'sensitive' on variations of the input values
|
||||||
*/
|
*/
|
||||||
NeuralNet (size_t in_size, size_t hidden_size, size_t out_size,
|
//NeuralNet (size_t in_size, size_t hidden_size, size_t out_size,
|
||||||
double(*actv)(double), double l, int e);
|
// double(*actv)(double), double l, int e, double th);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief It gets the output of the network (note: the layer output should contain
|
* @brief It gets the output of the network (note: the layer output should contain
|
||||||
|
@ -134,6 +141,12 @@ namespace neuralpp {
|
||||||
*/
|
*/
|
||||||
double getOutput() const;
|
double getOutput() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Get the threshold of the neurons in the network
|
||||||
|
* @return The threshold of the neurons
|
||||||
|
*/
|
||||||
|
double getThreshold() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief It gets the output of the network in case the output layer contains more neurons
|
* @brief It gets the output of the network in case the output layer contains more neurons
|
||||||
* @return A vector containing the output values of the network
|
* @return A vector containing the output values of the network
|
||||||
|
@ -234,7 +247,7 @@ namespace neuralpp {
|
||||||
* @param set String containing input values and expected outputs
|
* @param set String containing input values and expected outputs
|
||||||
* @return XML string
|
* @return XML string
|
||||||
*/
|
*/
|
||||||
static std::string XMLFromSet (int id, std::string set);
|
static std::string XMLFromSet (int& id, std::string set);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Closes an open XML document generated by "initXML" and "XMLFromSet"
|
* @brief Closes an open XML document generated by "initXML" and "XMLFromSet"
|
||||||
|
@ -348,6 +361,7 @@ namespace neuralpp {
|
||||||
class Neuron {
|
class Neuron {
|
||||||
double actv_val;
|
double actv_val;
|
||||||
double prop_val;
|
double prop_val;
|
||||||
|
double threshold;
|
||||||
|
|
||||||
std::vector< Synapsis > in;
|
std::vector< Synapsis > in;
|
||||||
std::vector< Synapsis > out;
|
std::vector< Synapsis > out;
|
||||||
|
@ -358,17 +372,21 @@ namespace neuralpp {
|
||||||
/**
|
/**
|
||||||
* @brief Constructor
|
* @brief Constructor
|
||||||
* @param a Activation function
|
* @param a Activation function
|
||||||
|
* @param th Threshold, value in [0,1] that establishes how much a neuron must be
|
||||||
|
* 'sensitive' on variations of the input values
|
||||||
*/
|
*/
|
||||||
Neuron (double (*a)(double));
|
Neuron (double (*a)(double), double th = 0.0);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Alternative constructor, that gets also the synapsis linked to the neuron
|
* @brief Alternative constructor, that gets also the synapsis linked to the neuron
|
||||||
* @param in Input synapses
|
* @param in Input synapses
|
||||||
* @param out Output synapses
|
* @param out Output synapses
|
||||||
* @param a Activation function
|
* @param a Activation function
|
||||||
|
* @param th Threshold, value in [0,1] that establishes how much a neuron must be
|
||||||
|
* 'sensitive' on variations of the input values
|
||||||
*/
|
*/
|
||||||
Neuron (std::vector<Synapsis> in, std::vector<Synapsis> out,
|
Neuron (std::vector<Synapsis> in, std::vector<Synapsis> out,
|
||||||
double (*a)(double));
|
double (*a)(double), double th = 0.0);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Get the i-th synapsis connected on the input of the neuron
|
* @brief Get the i-th synapsis connected on the input of the neuron
|
||||||
|
@ -450,6 +468,7 @@ namespace neuralpp {
|
||||||
*/
|
*/
|
||||||
class Layer {
|
class Layer {
|
||||||
std::vector<Neuron> elements;
|
std::vector<Neuron> elements;
|
||||||
|
double threshold;
|
||||||
|
|
||||||
void (*update_weights)();
|
void (*update_weights)();
|
||||||
double (*actv_f)(double);
|
double (*actv_f)(double);
|
||||||
|
@ -459,16 +478,20 @@ namespace neuralpp {
|
||||||
* @brief Constructor
|
* @brief Constructor
|
||||||
* @param sz Size of the layer
|
* @param sz Size of the layer
|
||||||
* @param a Activation function
|
* @param a Activation function
|
||||||
|
* @param th Threshold, value in [0,1] that establishes how much a neuron must be
|
||||||
|
* 'sensitive' on variations of the input values
|
||||||
*/
|
*/
|
||||||
Layer (size_t sz, double (*a)(double));
|
Layer (size_t sz, double (*a)(double), double th = 0.0);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Alternative constructor. It directly gets a vector of neurons to build
|
* @brief Alternative constructor. It directly gets a vector of neurons to build
|
||||||
* the layer
|
* the layer
|
||||||
* @param neurons Vector of neurons to be included in the layer
|
* @param neurons Vector of neurons to be included in the layer
|
||||||
* @param a Activation function
|
* @param a Activation function
|
||||||
|
* @param th Threshold, value in [0,1] that establishes how much a neuron must be
|
||||||
|
* 'sensitive' on variations of the input values
|
||||||
*/
|
*/
|
||||||
Layer (std::vector<Neuron>& neurons, double(*a)(double));
|
Layer (std::vector<Neuron>& neurons, double(*a)(double), double th = 0.0);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Redefinition for operator []. It gets the neuron at <i>i</i>
|
* @brief Redefinition for operator []. It gets the neuron at <i>i</i>
|
||||||
|
|
|
@ -17,18 +17,20 @@
|
||||||
using std::vector;
|
using std::vector;
|
||||||
|
|
||||||
namespace neuralpp {
|
namespace neuralpp {
|
||||||
Layer::Layer(size_t sz, double (*a) (double)) {
|
Layer::Layer(size_t sz, double (*a) (double), double th) {
|
||||||
for (size_t i = 0; i < sz; i++) {
|
for (size_t i = 0; i < sz; i++) {
|
||||||
Neuron n(a);
|
Neuron n(a);
|
||||||
elements.push_back(n);
|
elements.push_back(n);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
threshold = th;
|
||||||
actv_f = a;
|
actv_f = a;
|
||||||
}
|
}
|
||||||
|
|
||||||
Layer::Layer(vector<Neuron> &el, double (*a) (double)) {
|
Layer::Layer(vector<Neuron> &el, double (*a) (double), double th) {
|
||||||
elements = el;
|
elements = el;
|
||||||
actv_f = a;
|
actv_f = a;
|
||||||
|
threshold = th;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t Layer::size() const {
|
size_t Layer::size() const {
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
* this program. If not, see <http://www.gnu.org/licenses/>. *
|
* this program. If not, see <http://www.gnu.org/licenses/>. *
|
||||||
**************************************************************************************************/
|
**************************************************************************************************/
|
||||||
|
|
||||||
|
#include <iostream>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
|
|
||||||
|
@ -20,7 +21,7 @@ using namespace std;
|
||||||
#include "Markup.h"
|
#include "Markup.h"
|
||||||
|
|
||||||
namespace neuralpp {
|
namespace neuralpp {
|
||||||
double __actv(double prop) {
|
double __actv(double prop) {
|
||||||
return prop;
|
return prop;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,35 +31,36 @@ namespace neuralpp {
|
||||||
}
|
}
|
||||||
|
|
||||||
NeuralNet::NeuralNet(size_t in_size, size_t hidden_size,
|
NeuralNet::NeuralNet(size_t in_size, size_t hidden_size,
|
||||||
size_t out_size, double l, int e) {
|
size_t out_size, double l, int e, double th, double (*a)(double)) {
|
||||||
|
|
||||||
epochs = e;
|
epochs = e;
|
||||||
ref_epochs = epochs;
|
ref_epochs = epochs;
|
||||||
l_rate = l;
|
l_rate = l;
|
||||||
actv_f = __actv;
|
|
||||||
|
|
||||||
input = new Layer(in_size, __actv);
|
|
||||||
hidden = new Layer(hidden_size, __actv);
|
|
||||||
output = new Layer(out_size, __actv);
|
|
||||||
link();
|
|
||||||
}
|
|
||||||
|
|
||||||
NeuralNet::NeuralNet(size_t in_size, size_t hidden_size,
|
|
||||||
size_t out_size, double (*a) (double),
|
|
||||||
double l, int e) {
|
|
||||||
|
|
||||||
epochs = e;
|
|
||||||
ref_epochs = epochs;
|
|
||||||
l_rate = l;
|
|
||||||
|
|
||||||
actv_f = a;
|
actv_f = a;
|
||||||
|
threshold = th;
|
||||||
|
|
||||||
input = new Layer(in_size, a);
|
input = new Layer(in_size, __actv, th);
|
||||||
hidden = new Layer(hidden_size, a);
|
hidden = new Layer(hidden_size, __actv, th);
|
||||||
output = new Layer(out_size, a);
|
output = new Layer(out_size, __actv, th);
|
||||||
link();
|
link();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*NeuralNet::NeuralNet(size_t in_size, size_t hidden_size,
|
||||||
|
size_t out_size, double (*a) (double),
|
||||||
|
double l, int e, double th) {
|
||||||
|
|
||||||
|
epochs = e;
|
||||||
|
ref_epochs = epochs;
|
||||||
|
l_rate = l;
|
||||||
|
actv_f = a;
|
||||||
|
threshold = th;
|
||||||
|
|
||||||
|
input = new Layer(in_size, a, th);
|
||||||
|
hidden = new Layer(hidden_size, a, th);
|
||||||
|
output = new Layer(out_size, a, th);
|
||||||
|
link();
|
||||||
|
}*/
|
||||||
|
|
||||||
double NeuralNet::getOutput() const {
|
double NeuralNet::getOutput() const {
|
||||||
return (*output)[0].getActv();
|
return (*output)[0].getActv();
|
||||||
}
|
}
|
||||||
|
@ -179,6 +181,8 @@ namespace neuralpp {
|
||||||
}
|
}
|
||||||
|
|
||||||
void NeuralNet::update() {
|
void NeuralNet::update() {
|
||||||
|
epochs = ref_epochs;
|
||||||
|
|
||||||
while ((epochs--) > 0) {
|
while ((epochs--) > 0) {
|
||||||
updateWeights();
|
updateWeights();
|
||||||
commitChanges(*output);
|
commitChanges(*output);
|
||||||
|
@ -452,7 +456,6 @@ namespace neuralpp {
|
||||||
while (xml.FindChildElem("TRAINING")) {
|
while (xml.FindChildElem("TRAINING")) {
|
||||||
vector<double> input;
|
vector<double> input;
|
||||||
vector<double> output;
|
vector<double> output;
|
||||||
|
|
||||||
xml.IntoElem();
|
xml.IntoElem();
|
||||||
|
|
||||||
while (xml.FindChildElem("INPUT")) {
|
while (xml.FindChildElem("INPUT")) {
|
||||||
|
@ -462,7 +465,7 @@ namespace neuralpp {
|
||||||
|
|
||||||
xml.OutOfElem();
|
xml.OutOfElem();
|
||||||
}
|
}
|
||||||
|
|
||||||
while (xml.FindChildElem("OUTPUT")) {
|
while (xml.FindChildElem("OUTPUT")) {
|
||||||
xml.IntoElem();
|
xml.IntoElem();
|
||||||
output.push_back( atof(xml.GetData().c_str()) );
|
output.push_back( atof(xml.GetData().c_str()) );
|
||||||
|
@ -504,7 +507,7 @@ namespace neuralpp {
|
||||||
return v;
|
return v;
|
||||||
}
|
}
|
||||||
|
|
||||||
string NeuralNet::XMLFromSet(int id, string set) {
|
string NeuralNet::XMLFromSet (int& id, string set) {
|
||||||
string xml;
|
string xml;
|
||||||
vector<double> in, out;
|
vector<double> in, out;
|
||||||
stringstream ss (stringstream::in | stringstream::out);
|
stringstream ss (stringstream::in | stringstream::out);
|
||||||
|
@ -525,12 +528,12 @@ namespace neuralpp {
|
||||||
in = split(',', inStr);
|
in = split(',', inStr);
|
||||||
out = split(',', outStr);
|
out = split(',', outStr);
|
||||||
|
|
||||||
ss << id;
|
ss << (id++);
|
||||||
xml += "\t<TRAINING ID=\"" + ss.str() + "\">\n";
|
xml += "\t<TRAINING ID=\"" + ss.str() + "\">\n";
|
||||||
|
|
||||||
for (unsigned int i = 0; i < in.size(); i++) {
|
for (unsigned int i = 0; i < in.size(); i++, id++) {
|
||||||
ss.str(string());
|
ss.str(string());
|
||||||
ss << i;
|
ss << id;
|
||||||
xml += "\t\t<INPUT ID=\"" + ss.str() + "\">";
|
xml += "\t\t<INPUT ID=\"" + ss.str() + "\">";
|
||||||
|
|
||||||
ss.str(string());
|
ss.str(string());
|
||||||
|
@ -538,9 +541,9 @@ namespace neuralpp {
|
||||||
xml += ss.str() + "</INPUT>\n";
|
xml += ss.str() + "</INPUT>\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
for (unsigned int i = 0; i < out.size(); i++) {
|
for (unsigned int i = 0; i < out.size(); i++, id++) {
|
||||||
ss.str(string());
|
ss.str(string());
|
||||||
ss << i;
|
ss << id;
|
||||||
xml += "\t\t<OUTPUT ID=\"" + ss.str() + "\">";
|
xml += "\t\t<OUTPUT ID=\"" + ss.str() + "\">";
|
||||||
|
|
||||||
ss.str(string());
|
ss.str(string());
|
||||||
|
|
|
@ -16,17 +16,18 @@
|
||||||
using std::vector;
|
using std::vector;
|
||||||
|
|
||||||
namespace neuralpp {
|
namespace neuralpp {
|
||||||
Neuron::Neuron(double (*a) (double)) {
|
Neuron::Neuron(double (*a) (double), double th) {
|
||||||
actv_f = a;
|
actv_f = a;
|
||||||
|
threshold = th;
|
||||||
}
|
}
|
||||||
|
|
||||||
Neuron::Neuron(vector < Synapsis > i, vector < Synapsis > o,
|
Neuron::Neuron(vector < Synapsis > i, vector < Synapsis > o,
|
||||||
double (*a) (double)) {
|
double (*a) (double), double th) {
|
||||||
|
|
||||||
in = i;
|
in = i;
|
||||||
out = o;
|
out = o;
|
||||||
|
|
||||||
actv_f = a;
|
actv_f = a;
|
||||||
|
threshold = th;
|
||||||
}
|
}
|
||||||
|
|
||||||
Synapsis & Neuron::synIn(size_t i) {
|
Synapsis & Neuron::synIn(size_t i) {
|
||||||
|
@ -50,7 +51,8 @@ namespace neuralpp {
|
||||||
}
|
}
|
||||||
|
|
||||||
void Neuron::setActv(double val) {
|
void Neuron::setActv(double val) {
|
||||||
actv_val = actv_f(val);
|
//actv_val = actv_f(val);
|
||||||
|
actv_val = val;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t Neuron::nIn() {
|
size_t Neuron::nIn() {
|
||||||
|
@ -73,11 +75,10 @@ namespace neuralpp {
|
||||||
double aux = 0.0;
|
double aux = 0.0;
|
||||||
|
|
||||||
for (size_t i = 0; i < nIn(); i++)
|
for (size_t i = 0; i < nIn(); i++)
|
||||||
aux +=
|
aux += (in[i].getWeight() * in[i].getIn()->actv_val);
|
||||||
(in[i].getWeight() * in[i].getIn()->actv_val);
|
|
||||||
|
|
||||||
setProp(aux);
|
setProp(aux);
|
||||||
setActv( actv_f(getProp()) );
|
setActv( actv_f(aux) );
|
||||||
}
|
}
|
||||||
|
|
||||||
void Neuron::synClear() {
|
void Neuron::synClear() {
|
||||||
|
|
Loading…
Reference in a new issue