diff --git a/ChangeLog b/ChangeLog index 3d3dd38..ba0df92 100644 --- a/ChangeLog +++ b/ChangeLog @@ -2,6 +2,10 @@ 2009-08-16 BlackLight + * all: Now it *REALLY* supports multiple output values, i.e. multiple + neurons, in the output layer. + Oh, and I've fixed too that nasty bug that made training from sets + containing more than a single training set. * neuron.cpp: Fixing propagate() function 2009-08-15 BlackLight diff --git a/examples/Makefile b/examples/Makefile index b995a37..59d55e9 100644 --- a/examples/Makefile +++ b/examples/Makefile @@ -8,3 +8,4 @@ clean: rm doAdd rm adderFromScratch rm adder.net + rm adder.xml diff --git a/examples/adder.xml b/examples/adder.xml deleted file mode 100644 index cca0c96..0000000 --- a/examples/adder.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - 3 - 2 - 5 - 1 - - - - 5 - 3 - 8 - 2 - - - - 6 - 3 - 9 - 3 - - - diff --git a/examples/adderFromScratch.cpp b/examples/adderFromScratch.cpp index 76eeb75..b800853 100644 --- a/examples/adderFromScratch.cpp +++ b/examples/adderFromScratch.cpp @@ -12,16 +12,17 @@ using namespace std; using namespace neuralpp; int main() { - NeuralNet net(2, 2, 2, 0.005, 1000); + NeuralNet net(2, 2, 2, 0.005, 100); string xml; double tmp; + int id = 0; // XML initialization. Then, I say XML that 2+3=5, 3+3=6, 5+4=9 // Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm NeuralNet::initXML(xml); - xml += NeuralNet::XMLFromSet(0, "3,2;5,1"); - xml += NeuralNet::XMLFromSet(1, "4,2;6,2"); - xml += NeuralNet::XMLFromSet(2, "6,3;9,3"); + xml += NeuralNet::XMLFromSet(id, "3,2;5,1"); + xml += NeuralNet::XMLFromSet(id, "4,2;6,2"); + xml += NeuralNet::XMLFromSet(id, "6,3;9,3"); NeuralNet::closeXML(xml); cout << xml << endl; diff --git a/examples/doAdd.cpp b/examples/doAdd.cpp index de2c53c..f94bdf2 100644 --- a/examples/doAdd.cpp +++ b/examples/doAdd.cpp @@ -38,7 +38,7 @@ int main() { net.setInput(v); net.propagate(); - cout << "Neural net output: " << net.getOutputs()[0] << "; " << net.getOutputs()[1] << endl; + cout << "Neural net output: " << net.getOutput() << endl; return 0; } diff --git a/examples/learnAdd.cpp b/examples/learnAdd.cpp index e3e0dd1..8eb774a 100644 --- a/examples/learnAdd.cpp +++ b/examples/learnAdd.cpp @@ -7,19 +7,46 @@ */ #include +#include +#include #include using namespace std; using namespace neuralpp; -int main() { - NeuralNet net(2, 2, 2, 0.005, 1000); +double f (double x) { + return (1.0/(1.0 + pow(M_E,x))); +} +int main() { + int id = 0; + string xml; + time_t t1, t2; + NeuralNet net(2, 2, 1, 0.005, 100); + + NeuralNet::initXML(xml); + xml += NeuralNet::XMLFromSet(id, "2,3;5"); + xml += NeuralNet::XMLFromSet(id, "3,2;5"); + xml += NeuralNet::XMLFromSet(id, "6,2;8"); + xml += NeuralNet::XMLFromSet(id, "2,2;4"); + xml += NeuralNet::XMLFromSet(id, "1,2;3"); + xml += NeuralNet::XMLFromSet(id, "-1,-2;-3"); + xml += NeuralNet::XMLFromSet(id, "8,9;17"); + xml += NeuralNet::XMLFromSet(id, "10,10;20"); + NeuralNet::closeXML(xml); + + ofstream out("adder.xml"); + out << xml; + out.close(); + cout << "Training file adder.xml has been written\n"; + + t1 = time(NULL); cout << "Training in progress - This may take a while...\n"; net.train("adder.xml", NeuralNet::file); + t2 = time(NULL); net.save("adder.net"); - cout << "Network trained. You can use adder.net file now to load this network\n"; + cout << "Network trained in " << (t2-t1) << " seconds. You can use adder.net file now to load this network\n"; return 0; } diff --git a/include/neural++.hpp b/include/neural++.hpp index 38d4a8d..309caf5 100644 --- a/include/neural++.hpp +++ b/include/neural++.hpp @@ -41,6 +41,7 @@ namespace neuralpp { class NeuralNet; double df (double (*f)(double), double x); + double __actv(double prop); /** * @class NeuralNet @@ -50,6 +51,7 @@ namespace neuralpp { int epochs; int ref_epochs; double l_rate; + double threshold; std::vector expect; /** @@ -102,8 +104,11 @@ namespace neuralpp { * keep its value quite low to be more accurate) * @param e Epochs (cycles) to execute (the most you execute, the most the network * can be accurate for its purpose) + * @param th Threshold, value in [0,1] that establishes how much a neuron must be + * 'sensitive' on variations of the input values + * @param a Activation function to use (default: f(x)=x) */ - NeuralNet (size_t in_size, size_t hidden_size, size_t out_size, double l, int e); + NeuralNet (size_t in_size, size_t hidden_size, size_t out_size, double l, int e, double th = 0.0, double (*a)(double) = __actv); /** * @brief Constructor @@ -123,9 +128,11 @@ namespace neuralpp { * keep its value quite low to be more accurate) * @param e Epochs (cycles) to execute (the most you execute, the most the network * can be accurate for its purpose) + * @param th Threshold, value in [0,1] that establishes how much a neuron must be + * 'sensitive' on variations of the input values */ - NeuralNet (size_t in_size, size_t hidden_size, size_t out_size, - double(*actv)(double), double l, int e); + //NeuralNet (size_t in_size, size_t hidden_size, size_t out_size, + // double(*actv)(double), double l, int e, double th); /** * @brief It gets the output of the network (note: the layer output should contain @@ -134,6 +141,12 @@ namespace neuralpp { */ double getOutput() const; + /** + * @brief Get the threshold of the neurons in the network + * @return The threshold of the neurons + */ + double getThreshold() const; + /** * @brief It gets the output of the network in case the output layer contains more neurons * @return A vector containing the output values of the network @@ -234,7 +247,7 @@ namespace neuralpp { * @param set String containing input values and expected outputs * @return XML string */ - static std::string XMLFromSet (int id, std::string set); + static std::string XMLFromSet (int& id, std::string set); /** * @brief Closes an open XML document generated by "initXML" and "XMLFromSet" @@ -348,6 +361,7 @@ namespace neuralpp { class Neuron { double actv_val; double prop_val; + double threshold; std::vector< Synapsis > in; std::vector< Synapsis > out; @@ -358,17 +372,21 @@ namespace neuralpp { /** * @brief Constructor * @param a Activation function + * @param th Threshold, value in [0,1] that establishes how much a neuron must be + * 'sensitive' on variations of the input values */ - Neuron (double (*a)(double)); + Neuron (double (*a)(double), double th = 0.0); /** * @brief Alternative constructor, that gets also the synapsis linked to the neuron * @param in Input synapses * @param out Output synapses * @param a Activation function + * @param th Threshold, value in [0,1] that establishes how much a neuron must be + * 'sensitive' on variations of the input values */ Neuron (std::vector in, std::vector out, - double (*a)(double)); + double (*a)(double), double th = 0.0); /** * @brief Get the i-th synapsis connected on the input of the neuron @@ -450,6 +468,7 @@ namespace neuralpp { */ class Layer { std::vector elements; + double threshold; void (*update_weights)(); double (*actv_f)(double); @@ -459,16 +478,20 @@ namespace neuralpp { * @brief Constructor * @param sz Size of the layer * @param a Activation function + * @param th Threshold, value in [0,1] that establishes how much a neuron must be + * 'sensitive' on variations of the input values */ - Layer (size_t sz, double (*a)(double)); + Layer (size_t sz, double (*a)(double), double th = 0.0); /** * @brief Alternative constructor. It directly gets a vector of neurons to build * the layer * @param neurons Vector of neurons to be included in the layer * @param a Activation function + * @param th Threshold, value in [0,1] that establishes how much a neuron must be + * 'sensitive' on variations of the input values */ - Layer (std::vector& neurons, double(*a)(double)); + Layer (std::vector& neurons, double(*a)(double), double th = 0.0); /** * @brief Redefinition for operator []. It gets the neuron at i diff --git a/src/layer.cpp b/src/layer.cpp index e74fe76..1ae4ed8 100644 --- a/src/layer.cpp +++ b/src/layer.cpp @@ -17,18 +17,20 @@ using std::vector; namespace neuralpp { - Layer::Layer(size_t sz, double (*a) (double)) { + Layer::Layer(size_t sz, double (*a) (double), double th) { for (size_t i = 0; i < sz; i++) { Neuron n(a); elements.push_back(n); } + threshold = th; actv_f = a; } - Layer::Layer(vector &el, double (*a) (double)) { + Layer::Layer(vector &el, double (*a) (double), double th) { elements = el; actv_f = a; + threshold = th; } size_t Layer::size() const { diff --git a/src/neuralnet.cpp b/src/neuralnet.cpp index 96c40b9..faa72fe 100644 --- a/src/neuralnet.cpp +++ b/src/neuralnet.cpp @@ -11,6 +11,7 @@ * this program. If not, see . * **************************************************************************************************/ +#include #include #include @@ -20,7 +21,7 @@ using namespace std; #include "Markup.h" namespace neuralpp { - double __actv(double prop) { + double __actv(double prop) { return prop; } @@ -30,35 +31,36 @@ namespace neuralpp { } NeuralNet::NeuralNet(size_t in_size, size_t hidden_size, - size_t out_size, double l, int e) { + size_t out_size, double l, int e, double th, double (*a)(double)) { epochs = e; ref_epochs = epochs; l_rate = l; - actv_f = __actv; - - input = new Layer(in_size, __actv); - hidden = new Layer(hidden_size, __actv); - output = new Layer(out_size, __actv); - link(); - } - - NeuralNet::NeuralNet(size_t in_size, size_t hidden_size, - size_t out_size, double (*a) (double), - double l, int e) { - - epochs = e; - ref_epochs = epochs; - l_rate = l; - actv_f = a; + threshold = th; - input = new Layer(in_size, a); - hidden = new Layer(hidden_size, a); - output = new Layer(out_size, a); + input = new Layer(in_size, __actv, th); + hidden = new Layer(hidden_size, __actv, th); + output = new Layer(out_size, __actv, th); link(); } + /*NeuralNet::NeuralNet(size_t in_size, size_t hidden_size, + size_t out_size, double (*a) (double), + double l, int e, double th) { + + epochs = e; + ref_epochs = epochs; + l_rate = l; + actv_f = a; + threshold = th; + + input = new Layer(in_size, a, th); + hidden = new Layer(hidden_size, a, th); + output = new Layer(out_size, a, th); + link(); + }*/ + double NeuralNet::getOutput() const { return (*output)[0].getActv(); } @@ -179,6 +181,8 @@ namespace neuralpp { } void NeuralNet::update() { + epochs = ref_epochs; + while ((epochs--) > 0) { updateWeights(); commitChanges(*output); @@ -452,7 +456,6 @@ namespace neuralpp { while (xml.FindChildElem("TRAINING")) { vector input; vector output; - xml.IntoElem(); while (xml.FindChildElem("INPUT")) { @@ -462,7 +465,7 @@ namespace neuralpp { xml.OutOfElem(); } - + while (xml.FindChildElem("OUTPUT")) { xml.IntoElem(); output.push_back( atof(xml.GetData().c_str()) ); @@ -504,7 +507,7 @@ namespace neuralpp { return v; } - string NeuralNet::XMLFromSet(int id, string set) { + string NeuralNet::XMLFromSet (int& id, string set) { string xml; vector in, out; stringstream ss (stringstream::in | stringstream::out); @@ -525,12 +528,12 @@ namespace neuralpp { in = split(',', inStr); out = split(',', outStr); - ss << id; + ss << (id++); xml += "\t\n"; - for (unsigned int i = 0; i < in.size(); i++) { + for (unsigned int i = 0; i < in.size(); i++, id++) { ss.str(string()); - ss << i; + ss << id; xml += "\t\t"; ss.str(string()); @@ -538,9 +541,9 @@ namespace neuralpp { xml += ss.str() + "\n"; } - for (unsigned int i = 0; i < out.size(); i++) { + for (unsigned int i = 0; i < out.size(); i++, id++) { ss.str(string()); - ss << i; + ss << id; xml += "\t\t"; ss.str(string()); diff --git a/src/neuron.cpp b/src/neuron.cpp index fd648fe..ddd5d9c 100644 --- a/src/neuron.cpp +++ b/src/neuron.cpp @@ -16,17 +16,18 @@ using std::vector; namespace neuralpp { - Neuron::Neuron(double (*a) (double)) { + Neuron::Neuron(double (*a) (double), double th) { actv_f = a; + threshold = th; } Neuron::Neuron(vector < Synapsis > i, vector < Synapsis > o, - double (*a) (double)) { + double (*a) (double), double th) { in = i; out = o; - actv_f = a; + threshold = th; } Synapsis & Neuron::synIn(size_t i) { @@ -50,7 +51,8 @@ namespace neuralpp { } void Neuron::setActv(double val) { - actv_val = actv_f(val); + //actv_val = actv_f(val); + actv_val = val; } size_t Neuron::nIn() { @@ -73,11 +75,10 @@ namespace neuralpp { double aux = 0.0; for (size_t i = 0; i < nIn(); i++) - aux += - (in[i].getWeight() * in[i].getIn()->actv_val); + aux += (in[i].getWeight() * in[i].getIn()->actv_val); setProp(aux); - setActv( actv_f(getProp()) ); + setActv( actv_f(aux) ); } void Neuron::synClear() {