diff --git a/doc/html/Markup_8h-source.html b/doc/html/Markup_8h-source.html index 5e18d2a..da97ff2 100644 --- a/doc/html/Markup_8h-source.html +++ b/doc/html/Markup_8h-source.html @@ -661,7 +661,7 @@ 00644 00645 #endif // !defined(_MARKUP_H_INCLUDED_) -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/Markup_8h.html b/doc/html/Markup_8h.html index a3fa01e..babac45 100644 --- a/doc/html/Markup_8h.html +++ b/doc/html/Markup_8h.html @@ -1032,7 +1032,7 @@

-


Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/annotated.html b/doc/html/annotated.html index b388204..0f2f046 100644 --- a/doc/html/annotated.html +++ b/doc/html/annotated.html @@ -50,7 +50,7 @@ neuralpp::synrecord -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classCMarkup-members.html b/doc/html/classCMarkup-members.html index 0d4ea15..1b50652 100644 --- a/doc/html/classCMarkup-members.html +++ b/doc/html/classCMarkup-members.html @@ -213,7 +213,7 @@ x_WriteText(const MCD_STR &strDoc, FilePos &file)CMarkup [protected, static] ~CMarkup()CMarkup [inline] -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classCMarkup.html b/doc/html/classCMarkup.html index e7cb7b4..1ef741c 100644 --- a/doc/html/classCMarkup.html +++ b/doc/html/classCMarkup.html @@ -4479,7 +4479,7 @@
The documentation for this class was generated from the following file: -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1InvalidSynapticalWeightException-members.html b/doc/html/classneuralpp_1_1InvalidSynapticalWeightException-members.html index 13391d5..20fcac0 100644 --- a/doc/html/classneuralpp_1_1InvalidSynapticalWeightException-members.html +++ b/doc/html/classneuralpp_1_1InvalidSynapticalWeightException-members.html @@ -27,7 +27,7 @@ InvalidSynapticalWeightException()neuralpp::InvalidSynapticalWeightException [inline] what() const neuralpp::InvalidSynapticalWeightException [inline] -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1InvalidSynapticalWeightException.html b/doc/html/classneuralpp_1_1InvalidSynapticalWeightException.html index 403293b..e2a6960 100644 --- a/doc/html/classneuralpp_1_1InvalidSynapticalWeightException.html +++ b/doc/html/classneuralpp_1_1InvalidSynapticalWeightException.html @@ -84,7 +84,7 @@ Exception raised when, while trying the network or directly, the weight of a syn
The documentation for this class was generated from the following file: -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1InvalidXMLException-members.html b/doc/html/classneuralpp_1_1InvalidXMLException-members.html index bc02a4f..e73f1fc 100644 --- a/doc/html/classneuralpp_1_1InvalidXMLException-members.html +++ b/doc/html/classneuralpp_1_1InvalidXMLException-members.html @@ -24,10 +24,11 @@

neuralpp::InvalidXMLException Member List

This is the complete list of members for neuralpp::InvalidXMLException, including all inherited members.

- + +
InvalidXMLException()neuralpp::InvalidXMLException [inline]
errorneuralpp::InvalidXMLException [private]
InvalidXMLException(const char *err=" ")neuralpp::InvalidXMLException [inline]
what() const neuralpp::InvalidXMLException [inline]

-
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1InvalidXMLException.html b/doc/html/classneuralpp_1_1InvalidXMLException.html index dd32eb0..a7fbec2 100644 --- a/doc/html/classneuralpp_1_1InvalidXMLException.html +++ b/doc/html/classneuralpp_1_1InvalidXMLException.html @@ -35,21 +35,25 @@ List of all members. - + + + +

Public Member Functions

 InvalidXMLException ()
 InvalidXMLException (const char *err=" ")
const char * what () const throw ()

Private Attributes

char * error

Detailed Description

Exception thrown when trying parsing an invalid XML.

Constructor & Destructor Documentation

- +
- + + @@ -59,6 +63,8 @@ Exception thrown when trying parsing an invalid XML.

Constructor & D

+

References error.

+


Member Function Documentation

@@ -79,12 +85,32 @@ Exception thrown when trying parsing an invalid XML.

Constructor & D

+

References error.

+ + +

+


Member Data Documentation

+ +
+
+
neuralpp::InvalidXMLException::InvalidXMLException ( const char *  err = " "  )  [inline]
+ + + +
char* neuralpp::InvalidXMLException::error [private]
+
+
+ +

+ +

Referenced by InvalidXMLException(), and what().

+


The documentation for this class was generated from the following file: -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1Layer-members.html b/doc/html/classneuralpp_1_1Layer-members.html index 8f7142b..a4abaa1 100644 --- a/doc/html/classneuralpp_1_1Layer-members.html +++ b/doc/html/classneuralpp_1_1Layer-members.html @@ -36,7 +36,7 @@ thresholdneuralpp::Layer [private] update_weightsneuralpp::Layer [private] -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1Layer.html b/doc/html/classneuralpp_1_1Layer.html index 05e2f25..535c7b4 100644 --- a/doc/html/classneuralpp_1_1Layer.html +++ b/doc/html/classneuralpp_1_1Layer.html @@ -345,7 +345,7 @@ It propagates its activation values to the output layers.
The documentation for this class was generated from the following file: -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1NetworkFileNotFoundException-members.html b/doc/html/classneuralpp_1_1NetworkFileNotFoundException-members.html index 263f3c3..accc4ee 100644 --- a/doc/html/classneuralpp_1_1NetworkFileNotFoundException-members.html +++ b/doc/html/classneuralpp_1_1NetworkFileNotFoundException-members.html @@ -27,7 +27,7 @@ NetworkFileNotFoundException()neuralpp::NetworkFileNotFoundException [inline] what() const neuralpp::NetworkFileNotFoundException [inline] -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1NetworkFileNotFoundException.html b/doc/html/classneuralpp_1_1NetworkFileNotFoundException.html index e9598a5..ade7d49 100644 --- a/doc/html/classneuralpp_1_1NetworkFileNotFoundException.html +++ b/doc/html/classneuralpp_1_1NetworkFileNotFoundException.html @@ -88,7 +88,7 @@ Exception thrown when doing an attempt to load a network from an invalid file. <
The documentation for this class was generated from the following file: -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1NetworkFileWriteException-members.html b/doc/html/classneuralpp_1_1NetworkFileWriteException-members.html index 5041be2..b3df10d 100644 --- a/doc/html/classneuralpp_1_1NetworkFileWriteException-members.html +++ b/doc/html/classneuralpp_1_1NetworkFileWriteException-members.html @@ -27,7 +27,7 @@ NetworkFileWriteException()neuralpp::NetworkFileWriteException [inline] what() const neuralpp::NetworkFileWriteException [inline] -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1NetworkFileWriteException.html b/doc/html/classneuralpp_1_1NetworkFileWriteException.html index 83044d6..af3c95a 100644 --- a/doc/html/classneuralpp_1_1NetworkFileWriteException.html +++ b/doc/html/classneuralpp_1_1NetworkFileWriteException.html @@ -84,7 +84,7 @@ Exception thrown when trying to write the network's information to a file that c
The documentation for this class was generated from the following file: -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1NetworkIndexOutOfBoundsException-members.html b/doc/html/classneuralpp_1_1NetworkIndexOutOfBoundsException-members.html index 9f56122..e4194be 100644 --- a/doc/html/classneuralpp_1_1NetworkIndexOutOfBoundsException-members.html +++ b/doc/html/classneuralpp_1_1NetworkIndexOutOfBoundsException-members.html @@ -27,7 +27,7 @@ NetworkIndexOutOfBoundsException()neuralpp::NetworkIndexOutOfBoundsException [inline] what() const neuralpp::NetworkIndexOutOfBoundsException [inline] -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1NetworkIndexOutOfBoundsException.html b/doc/html/classneuralpp_1_1NetworkIndexOutOfBoundsException.html index 617e786..b41c14a 100644 --- a/doc/html/classneuralpp_1_1NetworkIndexOutOfBoundsException.html +++ b/doc/html/classneuralpp_1_1NetworkIndexOutOfBoundsException.html @@ -84,7 +84,7 @@ Exception raised when trying to access a neuron whose index is larger than the n
The documentation for this class was generated from the following file: -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1NeuralNet-members.html b/doc/html/classneuralpp_1_1NeuralNet-members.html index 2c506ff..3cbdf2e 100644 --- a/doc/html/classneuralpp_1_1NeuralNet-members.html +++ b/doc/html/classneuralpp_1_1NeuralNet-members.html @@ -26,13 +26,12 @@

neuralpp::NeuralNet Member List

This is the complete list of members for neuralpp::NeuralNet, including all inherited members.

- - + - + @@ -40,7 +39,8 @@ - + + @@ -48,19 +48,19 @@ - - + + + - - +
actv_fneuralpp::NeuralNet [private]
closeXML(std::string &xml)neuralpp::NeuralNet [static]
commitChanges(Layer &l)neuralpp::NeuralNet [private]
epochsneuralpp::NeuralNet [private]
error(double ex)neuralpp::NeuralNet [private]
expectneuralpp::NeuralNet [private]
expected() const neuralpp::NeuralNet
expected() const neuralpp::NeuralNet [private]
file enum valueneuralpp::NeuralNet
getExpected() const neuralpp::NeuralNet
getExpected() const neuralpp::NeuralNet [private]
getOutput() const neuralpp::NeuralNet
getOutputs()neuralpp::NeuralNet
getThreshold() const neuralpp::NeuralNet
initXML(std::string &xml)neuralpp::NeuralNet [static]
inputneuralpp::NeuralNet
l_rateneuralpp::NeuralNet [private]
link()neuralpp::NeuralNet
link()neuralpp::NeuralNet [private]
loadFromBinary(const std::string fname)neuralpp::NeuralNet
NeuralNet()neuralpp::NeuralNet [inline]
NeuralNet(size_t in_size, size_t hidden_size, size_t out_size, double l, int e, double th=0.0, double(*a)(double)=__actv)neuralpp::NeuralNet
NeuralNet(const std::string file)neuralpp::NeuralNet
propagate()neuralpp::NeuralNet
ref_epochsneuralpp::NeuralNet [private]
save(const char *fname)neuralpp::NeuralNet
setExpected(double ex)neuralpp::NeuralNet
setExpected(std::vector< double > ex)neuralpp::NeuralNet
saveToBinary(const char *fname)neuralpp::NeuralNet
setExpected(double ex)neuralpp::NeuralNet [private]
setExpected(std::vector< double > ex)neuralpp::NeuralNet [private]
setInput(std::vector< double > v)neuralpp::NeuralNet
source enum nameneuralpp::NeuralNet
split(char delim, std::string str)neuralpp::NeuralNet [static]
str enum valueneuralpp::NeuralNet
thresholdneuralpp::NeuralNet [private]
train(std::string xml, source src)neuralpp::NeuralNet
update()neuralpp::NeuralNet
update()neuralpp::NeuralNet [private]
updateWeights()neuralpp::NeuralNet [private]
XMLFromSet(int &id, std::string set)neuralpp::NeuralNet [static]
-


Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1NeuralNet.html b/doc/html/classneuralpp_1_1NeuralNet.html index b2b2f79..bf6588e 100644 --- a/doc/html/classneuralpp_1_1NeuralNet.html +++ b/doc/html/classneuralpp_1_1NeuralNet.html @@ -53,39 +53,27 @@ double getOutput () const  It gets the output of the network (note: the layer output should contain an only neuron).
-double getThreshold () const - - Get the threshold of the neurons in the network.
std::vector< double > getOutputs ()  It gets the output of the network in case the output layer contains more neurons.
-double expected () const +double getThreshold () const - Get the expected value (in case you have an only neuron in output layer).
-std::vector< double > getExpected () const - - Get the expected value (in case you have an only neuron in output layer).
-void setExpected (double ex) - - It sets the value you expect from your network (in case the network has an only neuron in its output layer).
-void setExpected (std::vector< double > ex) - - Set the values you expect from your network.
-void update () - - It updates through back-propagation the weights of the synapsis and computes again the output value for epochs times, calling back updateWeights and commitChanges functions.
+ Get the threshold of the neurons in the network.
void propagate ()  It propagates values through the network.
void setInput (std::vector< double > v)  It sets the input for the network.
-void link () - - It links the layers of the network (input, hidden, output).
void save (const char *fname) throw (NetworkFileWriteException)  Save a trained neural network to a binary file.
+void loadFromBinary (const std::string fname) throw (NetworkFileNotFoundException) + + DEPRECATED.
+void saveToBinary (const char *fname) throw (NetworkFileWriteException) + + DEPRECATED.
void train (std::string xml, source src) throw (InvalidXMLException)  Train a network using a training set loaded from an XML file.
@@ -93,9 +81,6 @@ static void initXML (std::string &xml)  Initialize the training XML for the neural network.
-static std::vector< double > split (char delim, std::string str) - - Splits a string into a vector of doubles, given a delimitator.
static std::string XMLFromSet (int &id, std::string set)  Get a training set from a string and copies it to an XML For example, these strings could be training sets for making sums: "2,3;5" - "5,6;11" - "2,2;4" - "4,5:9" This method called on the first string will return an XML such this: '<training id="0"><input id="0">2</input><input id="1">3</input><output id="0">5</output> &lt/training>'.
@@ -113,12 +98,27 @@ void updateWeights ()  It updates the weights of the net's synapsis through back-propagation.
-void commitChanges (Layer &l) - - It commits the changes made by updateWeights() to the layer l.
double error (double ex)  Get the error made on the expected result as squared deviance.
+double expected () const + + Get the expected value (in case you have an only neuron in output layer).
+std::vector< double > getExpected () const + + Get the expected value (in case you have an only neuron in output layer).
+void setExpected (double ex) + + It sets the value you expect from your network (in case the network has an only neuron in its output layer).
+void setExpected (std::vector< double > ex) + + Set the values you expect from your network.
+void update () + + It updates through back-propagation the weights of the synapsis and computes again the output value for epochs times, calling back updateWeights and commitChanges functions.
+void link () + + It links the layers of the network (input, hidden, output).

Private Attributes

int epochs @@ -140,7 +140,7 @@ Main project's class. Use *ONLY* this class, unless you know what you're doing
Examples:

-examples/adderFromScratch.cpp, examples/doAdd.cpp, and examples/learnAdd.cpp.


Member Enumeration Documentation

+examples/adderFromString.cpp, examples/doAdd.cpp, examples/learnAdd.cpp, and examples/networkForSumsAndSubtractions.cpp.

Member Enumeration Documentation

@@ -312,33 +312,6 @@ Constructor. It updates the weights of the net's synapsis through back-propagation.

In-class use only -

-

- -

-
- - - - - - - - - -
void neuralpp::NeuralNet::commitChanges (Layer l  )  [private]
-
-
- -

-It commits the changes made by updateWeights() to the layer l. -

-In-class use only

Parameters:
- - -
l Layer to commit the changes
-
-

@@ -367,74 +340,6 @@ Get the error made on the expected result as squared deviance.

Returns:
Mean error
- -

- -

-
- - - - - - - - -
double neuralpp::NeuralNet::getOutput (  )  const
-
-
- -

-It gets the output of the network (note: the layer output should contain an only neuron). -

-

Returns:
The output value of the network
-
Examples:
-examples/doAdd.cpp.
-
-

- -

-
- - - - - - - - -
double neuralpp::NeuralNet::getThreshold (  )  const
-
-
- -

-Get the threshold of the neurons in the network. -

-

Returns:
The threshold of the neurons
- -
-

- -

-
- - - - - - - - -
std::vector<double> neuralpp::NeuralNet::getOutputs (  ) 
-
-
- -

-It gets the output of the network in case the output layer contains more neurons. -

-

Returns:
A vector containing the output values of the network
-
Examples:
-examples/adderFromScratch.cpp.

@@ -446,7 +351,7 @@ It gets the output of the network in case the output layer contains more neurons (  )  - const + const [private] @@ -468,7 +373,7 @@ Of course you should specify this when you build your network by using setExpect (  )  - const + const [private] @@ -491,7 +396,7 @@ Of course you should specify this when you build your network by using setExpect double  ex  )  - + [private] @@ -518,7 +423,7 @@ It sets the value you expect from your network (in case the network has an only std::vector< double >  ex  )  - + [private] @@ -544,7 +449,7 @@ Set the values you expect from your network. (  )  - + [private] @@ -554,6 +459,95 @@ Set the values you expect from your network. It updates through back-propagation the weights of the synapsis and computes again the output value for epochs times, calling back updateWeights and commitChanges functions.

+ +

+ +

+
+ + + + + + + + +
void neuralpp::NeuralNet::link (  )  [private]
+
+
+ +

+It links the layers of the network (input, hidden, output). +

+ +

+

+ +

+
+ + + + + + + + +
double neuralpp::NeuralNet::getOutput (  )  const
+
+
+ +

+It gets the output of the network (note: the layer output should contain an only neuron). +

+

Returns:
The output value of the network
+
Examples:
+examples/adderFromString.cpp, and examples/doAdd.cpp.
+
+

+ +

+
+ + + + + + + + +
std::vector<double> neuralpp::NeuralNet::getOutputs (  ) 
+
+
+ +

+It gets the output of the network in case the output layer contains more neurons. +

+

Returns:
A vector containing the output values of the network
+
Examples:
+examples/networkForSumsAndSubtractions.cpp.
+
+

+ +

+
+ + + + + + + + +
double neuralpp::NeuralNet::getThreshold (  )  const
+
+
+ +

+Get the threshold of the neurons in the network. +

+

Returns:
The threshold of the neurons
+

@@ -575,7 +569,7 @@ It updates through back-propagation the weights of the synapsis and computes aga It propagates values through the network.

Use this when you want to give an already trained network some new values the get to the output

Examples:
-examples/adderFromScratch.cpp, and examples/doAdd.cpp.
+examples/adderFromString.cpp, examples/doAdd.cpp, and examples/networkForSumsAndSubtractions.cpp.

@@ -603,28 +597,7 @@ It sets the input for the network.

Examples:
-examples/adderFromScratch.cpp, and examples/doAdd.cpp.
- -

- -

-
- - - - - - - - -
void neuralpp::NeuralNet::link (  ) 
-
-
- -

-It links the layers of the network (input, hidden, output). -

-Don't use unless you exactly know what you're doing, it is already called by the constructor +examples/adderFromString.cpp, examples/doAdd.cpp, and examples/networkForSumsAndSubtractions.cpp.

@@ -658,6 +631,70 @@ Save a trained neural network to a binary file.

Examples:
examples/learnAdd.cpp.
+ +

+ +

+
+ + + + + + + + + +
void neuralpp::NeuralNet::loadFromBinary (const std::string  fname  )  throw (NetworkFileNotFoundException)
+
+
+ +

+DEPRECATED. +

+Load a trained neural network from a binary file. This function is deprecated and kept for back-compatibility. Use the XML format instead to load and neural networks and, respectly, the NeuralNetwork(const std::string) constructor or the save(const char*) methods.

Parameters:
+ + +
fname Name of the file to be loaded
+
+
Exceptions:
+ + +
NetworkFileNotFoundException When you're trying to load an invalid network file
+
+ +
+

+ +

+
+ + + + + + + + + +
void neuralpp::NeuralNet::saveToBinary (const char *  fname  )  throw (NetworkFileWriteException)
+
+
+ +

+DEPRECATED. +

+Save a trained neural network to a binary file. This function is deprecated and kept for back-compatibility. Use the XML format instead to load and neural networks and, respectly, the NeuralNetwork(const std::string) constructor or the save(const char*) methods.

Parameters:
+ + +
fname Name of the file to be saved with the network information
+
+
Exceptions:
+ + +
NetworkFileWriteException When you try to write the network information to an invalid file
+
+

@@ -700,7 +737,7 @@ A sample XML file is available in examples/adder.xml

Paramete
Examples:
-examples/adderFromScratch.cpp, and examples/learnAdd.cpp.
+examples/adderFromString.cpp, examples/learnAdd.cpp, and examples/networkForSumsAndSubtractions.cpp.

@@ -728,44 +765,6 @@ Initialize the training XML for the neural network. - -

- -

-
- - - - - - - - - - - - - - - - - - -
static std::vector<double> neuralpp::NeuralNet::split (char  delim,
std::string  str 
) [static]
-
-
- -

-Splits a string into a vector of doubles, given a delimitator. -

-

Parameters:
- - - -
delim Delimitator
str String to be splitted
-
-
Returns:
Vector of doubles containing splitted values
-

@@ -974,7 +973,7 @@ Private pointer to function, containing the function to be used as activation fu


The documentation for this class was generated from the following file: -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1Neuron-members.html b/doc/html/classneuralpp_1_1Neuron-members.html index 87dfe46..3978af9 100644 --- a/doc/html/classneuralpp_1_1Neuron-members.html +++ b/doc/html/classneuralpp_1_1Neuron-members.html @@ -40,12 +40,14 @@ push_out(Synapsis s)neuralpp::Neuron setActv(double a)neuralpp::Neuron setProp(double p)neuralpp::Neuron + setSynIn(size_t n)neuralpp::Neuron + setSynOut(size_t n)neuralpp::Neuron synClear()neuralpp::Neuron synIn(size_t i)neuralpp::Neuron synOut(size_t i)neuralpp::Neuron thresholdneuralpp::Neuron [private] -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1Neuron.html b/doc/html/classneuralpp_1_1Neuron.html index f7d733f..0bd9b93 100644 --- a/doc/html/classneuralpp_1_1Neuron.html +++ b/doc/html/classneuralpp_1_1Neuron.html @@ -59,6 +59,10 @@ void setProp (double p)  Change the propagation value of the neuron.
+void setSynIn (size_t n) + +void setSynOut (size_t n) + double getActv ()  Get the activation value of the neuron.
@@ -346,6 +350,46 @@ Change the propagation value of the neuron. + +

+ +

+
+ + + + + + + + + +
void neuralpp::Neuron::setSynIn (size_t  n  ) 
+
+
+ +

+ +

+

+ +

+
+ + + + + + + + + +
void neuralpp::Neuron::setSynOut (size_t  n  ) 
+
+
+ +

+

@@ -572,7 +616,7 @@ Remove input and output synapsis from a neuron.


The documentation for this class was generated from the following file: -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1Synapsis-members.html b/doc/html/classneuralpp_1_1Synapsis-members.html index 95d5289..28ced0f 100644 --- a/doc/html/classneuralpp_1_1Synapsis-members.html +++ b/doc/html/classneuralpp_1_1Synapsis-members.html @@ -37,12 +37,12 @@ prev_deltaneuralpp::Synapsis [private] setDelta(double d)neuralpp::Synapsis setWeight(double w)neuralpp::Synapsis - Synapsis(Neuron *i, Neuron *o, double w, double d)neuralpp::Synapsis + Synapsis()neuralpp::Synapsis [inline] Synapsis(Neuron *i, Neuron *o, double(*a)(double))neuralpp::Synapsis Synapsis(Neuron *i, Neuron *o, double w, double(*a)(double))neuralpp::Synapsis weightneuralpp::Synapsis [private] -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/classneuralpp_1_1Synapsis.html b/doc/html/classneuralpp_1_1Synapsis.html index 98cb71e..672353c 100644 --- a/doc/html/classneuralpp_1_1Synapsis.html +++ b/doc/html/classneuralpp_1_1Synapsis.html @@ -35,9 +35,9 @@ List of all members. - + - + @@ -84,54 +84,24 @@ Class for managing synapsis.

Don't use this class directly unless you know what you're doing, use NeuralNet instead


Constructor & Destructor Documentation

- +

Public Member Functions

 Synapsis (Neuron *i, Neuron *o, double w, double d)
 Synapsis ()
 Constructor.
 Empty constructor (it does nothing).
 Synapsis (Neuron *i, Neuron *o, double(*a)(double))
 Constructor.
- - - - - - - - - - - - - - - - - - - - - - - - - + + +
neuralpp::Synapsis::Synapsis (Neuron i,
Neuron o,
double  w,
double  d 
)  )  [inline]

-Constructor. +Empty constructor (it does nothing).

-

Parameters:
- - - - - -
i Input neuron
o Output neuron
w Weight for the synapsis
d Delta for the synapsis
-

@@ -523,7 +493,7 @@ This value is inversely proportional to the number of steps done in the learning


The documentation for this class was generated from the following file: -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/examples.html b/doc/html/examples.html index f8793be..d7e3b91 100644 --- a/doc/html/examples.html +++ b/doc/html/examples.html @@ -18,12 +18,13 @@ -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/examples_2adderFromScratch_8cpp-example.html b/doc/html/examples_2adderFromScratch_8cpp-example.html index 50ee45a..3ab5250 100644 --- a/doc/html/examples_2adderFromScratch_8cpp-example.html +++ b/doc/html/examples_2adderFromScratch_8cpp-example.html @@ -59,7 +59,7 @@ } -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Thu Sep 3 17:01:39 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/examples_2adderFromString_8cpp-example.html b/doc/html/examples_2adderFromString_8cpp-example.html new file mode 100644 index 0000000..3db3b8d --- /dev/null +++ b/doc/html/examples_2adderFromString_8cpp-example.html @@ -0,0 +1,67 @@ + + +Neural++: examples/adderFromString.cpp + + + + + +
+

examples/adderFromString.cpp

Similar to learnAdd.cpp, but this time the training XML is generated as a string and not saved to a file, and parsed by the program itself to build the network. Then, the program asks two real numbers, and performs both the sum and the difference between them, putting the sum's output on the first output neuron and the difference's on the second output neuron. Anyway, using more than one neuron in the output layer is strongly discouraged, as the network usually won't set correctly the synaptical weights to give satisfying and accurate answers for all of the operations.

+

+#include <iostream>
+#include <neural++.hpp>
+
+using namespace std;
+using namespace neuralpp;
+
+int main()  {
+        NeuralNet net(2, 2, 1, 0.002, 1000);
+        string xml;
+        double tmp;
+        int id = 0;
+
+        // XML initialization. Then, I say XML that 2+3=5, 3+3=6, 5+4=9
+        // Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm
+        NeuralNet::initXML(xml);
+        xml += NeuralNet::XMLFromSet(id, "3,2;5");
+        xml += NeuralNet::XMLFromSet(id, "6,3;9");
+        xml += NeuralNet::XMLFromSet(id, "2,3;5");
+        xml += NeuralNet::XMLFromSet(id, "4,4;8");
+        NeuralNet::closeXML(xml);
+        cout << xml << endl;
+        
+        net.train(xml, NeuralNet::str);
+        vector<double> v;
+        cout << "Network status: trained\n\n";
+
+        cout << "First number to add: ";
+        cin >> tmp;
+        v.push_back(tmp);
+
+        cout << "Second number to add: ";
+        cin >> tmp;
+        v.push_back(tmp);
+
+        net.setInput(v);
+        net.propagate();
+        cout << "Output: " << net.getOutput() << endl;
+        return 0;
+}
+
+
+
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  + +doxygen 1.5.6
+ + diff --git a/doc/html/examples_2doAdd_8cpp-example.html b/doc/html/examples_2doAdd_8cpp-example.html index 4bd7c74..fe76f55 100644 --- a/doc/html/examples_2doAdd_8cpp-example.html +++ b/doc/html/examples_2doAdd_8cpp-example.html @@ -25,7 +25,7 @@ using namespace std; using namespace neuralpp; -#define NETFILE "adder.net" +#define NETFILE "network.xml" int main() { double a,b; @@ -61,7 +61,7 @@ } -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/examples_2learnAdd_8cpp-example.html b/doc/html/examples_2learnAdd_8cpp-example.html index 42a06c9..a4126f1 100644 --- a/doc/html/examples_2learnAdd_8cpp-example.html +++ b/doc/html/examples_2learnAdd_8cpp-example.html @@ -36,10 +36,10 @@ // => 2 neurons for the input layer // => 2 neurons for the hidden layer // => 1 neuron for the output layer - // => a learning rate == 0.005 (just get it doing some tests until satisfied) + // => a learning rate == 0.002 (just get it doing some tests until satisfied, but remember to keep its value quite low and ~ 0 to keep the network stable) // => 1000 learning steps (i.e. the network will be ready after 1000 training steps to adjust the synaptical weights // => 0.1 as neural threshold (the threshold above which a neuron activates) - NeuralNet net(2, 2, 1, 0.005, 1000, 0.1); + NeuralNet net(2, 2, 1, 0.002, 2000); // Initialize a training XML as a string in 'xml' NeuralNet::initXML(xml); @@ -56,6 +56,12 @@ xml += NeuralNet::XMLFromSet(id, "-1,-2;-3"); xml += NeuralNet::XMLFromSet(id, "8,9;17"); xml += NeuralNet::XMLFromSet(id, "10,10;20"); + xml += NeuralNet::XMLFromSet(id, "4,1;5"); + xml += NeuralNet::XMLFromSet(id, "2,6;8"); + xml += NeuralNet::XMLFromSet(id, "2,7;9"); + xml += NeuralNet::XMLFromSet(id, "8,9;17"); + xml += NeuralNet::XMLFromSet(id, "4,7;11"); + xml += NeuralNet::XMLFromSet(id, "5,2;7"); NeuralNet::closeXML(xml); // Save the XML string just created to a file @@ -72,13 +78,13 @@ // Save the trained network to a binary file, that can be reloaded from any // application that is going to use that network - net.save("adder.net"); + net.save("network.xml"); cout << "Network trained in " << (t2-t1) << " seconds. You can use adder.net file now to load this network\n"; return 0; } -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/examples_2networkForSumsAndSubtractions_8cpp-example.html b/doc/html/examples_2networkForSumsAndSubtractions_8cpp-example.html new file mode 100644 index 0000000..b6db3a9 --- /dev/null +++ b/doc/html/examples_2networkForSumsAndSubtractions_8cpp-example.html @@ -0,0 +1,66 @@ + + +Neural++: examples/networkForSumsAndSubtractions.cpp + + + + + +
+

examples/networkForSumsAndSubtractions.cpp

This program creates a neural network from scratch. Its purpose is to get two numbers and learn to compute their sum and difference (so the network provides two output values). The training set is auto-generated to an XML string, and then the network is trained.

+

+#include <iostream>
+#include <neural++.hpp>
+
+using namespace std;
+using namespace neuralpp;
+
+int main()  {
+        NeuralNet net(2, 2, 2, 0.002, 1000);
+        string xml;
+        double tmp;
+        int id = 0;
+
+        // XML initialization. Then, I say XML that 3+2=5, 3-2=1; 4+2=6, 4-2=2; 6+3=9, 6-3=3
+        // Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm
+        NeuralNet::initXML(xml);
+        xml += NeuralNet::XMLFromSet(id, "3,2;5,1");
+        xml += NeuralNet::XMLFromSet(id, "4,2;6,2");
+        xml += NeuralNet::XMLFromSet(id, "6,3;9,3");
+        NeuralNet::closeXML(xml);
+        cout << xml << endl;
+        
+        net.train(xml, NeuralNet::str);
+        vector<double> v;
+        cout << "Network status: trained\n\n";
+
+        cout << "First number: ";
+        cin >> tmp;
+        v.push_back(tmp);
+
+        cout << "Second number: ";
+        cin >> tmp;
+        v.push_back(tmp);
+
+        net.setInput(v);
+        net.propagate();
+        cout << "Output: " << net.getOutputs()[0] << "; " << net.getOutputs()[1] << endl;
+        return 0;
+}
+
+
+
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  + +doxygen 1.5.6
+ + diff --git a/doc/html/examples_2networkForSumsAndSubtrations_8cpp-example.html b/doc/html/examples_2networkForSumsAndSubtrations_8cpp-example.html new file mode 100644 index 0000000..b49e1f8 --- /dev/null +++ b/doc/html/examples_2networkForSumsAndSubtrations_8cpp-example.html @@ -0,0 +1,26 @@ + + +Neural++: examples/networkForSumsAndSubtrations.cpp + + + + + +
+

examples/networkForSumsAndSubtrations.cpp

This program creates a neural network from scratch. Its purpose is to get two numbers and learn to compute their sum and difference (so the network provides two output values). The training set is auto-generated to an XML string, and then the network is trained.

+

+
Generated on Fri Sep 4 11:24:50 2009 for Neural++ by  + +doxygen 1.5.6
+ + diff --git a/doc/html/files.html b/doc/html/files.html index 47d4825..0244c97 100644 --- a/doc/html/files.html +++ b/doc/html/files.html @@ -29,7 +29,7 @@ neural++_exception.hpp [code] -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/functions.html b/doc/html/functions.html index 560d017..6a7bff9 100644 --- a/doc/html/functions.html +++ b/doc/html/functions.html @@ -94,7 +94,7 @@ Here is a list of all class members with links to the classes they belong to: : CMarkup -
Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
diff --git a/doc/html/functions_0x63.html b/doc/html/functions_0x63.html index 97f2db7..b3ccd40 100644 --- a/doc/html/functions_0x63.html +++ b/doc/html/functions_0x63.html @@ -68,8 +68,6 @@ Here is a list of all class members with links to the classes they belong to: : neuralpp::NeuralNet
  • CMarkup() : CMarkup -
  • commitChanges() -: neuralpp::NeuralNet
  • ContentLen() : CMarkup::ElemPos
  • ConvertEncoding() @@ -78,7 +76,7 @@ Here is a list of all class members with links to the classes they belong to: : CMarkup::NodeStack -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x64.html b/doc/html/functions_0x64.html index bd78e0a..c1411e8 100644 --- a/doc/html/functions_0x64.html +++ b/doc/html/functions_0x64.html @@ -71,7 +71,7 @@ Here is a list of all class members with links to the classes they belong to: : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x65.html b/doc/html/functions_0x65.html index f733c8f..9ebaa1a 100644 --- a/doc/html/functions_0x65.html +++ b/doc/html/functions_0x65.html @@ -72,8 +72,9 @@ Here is a list of all class members with links to the classes they belong to:
  • epochs : neuralpp::netrecord , neuralpp::NeuralNet -
  • error() -: neuralpp::NeuralNet +
  • error +: neuralpp::InvalidXMLException +, neuralpp::NeuralNet
  • EscapeText() : CMarkup
  • ex @@ -84,7 +85,7 @@ Here is a list of all class members with links to the classes they belong to: : neuralpp::NeuralNet -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x66.html b/doc/html/functions_0x66.html index c8f46db..97d5e27 100644 --- a/doc/html/functions_0x66.html +++ b/doc/html/functions_0x66.html @@ -73,7 +73,7 @@ Here is a list of all class members with links to the classes they belong to: : CMarkup::FilePos -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x67.html b/doc/html/functions_0x67.html index f915335..69a2576 100644 --- a/doc/html/functions_0x67.html +++ b/doc/html/functions_0x67.html @@ -115,7 +115,7 @@ Here is a list of all class members with links to the classes they belong to: : neuralpp::Synapsis -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x68.html b/doc/html/functions_0x68.html index b90465b..e73513b 100644 --- a/doc/html/functions_0x68.html +++ b/doc/html/functions_0x68.html @@ -65,7 +65,7 @@ Here is a list of all class members with links to the classes they belong to: : neuralpp::netrecord -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x69.html b/doc/html/functions_0x69.html index dcfc975..17b3d25 100644 --- a/doc/html/functions_0x69.html +++ b/doc/html/functions_0x69.html @@ -93,7 +93,7 @@ Here is a list of all class members with links to the classes they belong to:
  • InvalidSynapticalWeightException() : neuralpp::InvalidSynapticalWeightException
  • InvalidXMLException() -: neuralpp::InvalidXMLException +: neuralpp::InvalidXMLException
  • iPos : CMarkup::SavedPos
  • IsEmptyElement() @@ -102,7 +102,7 @@ Here is a list of all class members with links to the classes they belong to: : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x6c.html b/doc/html/functions_0x6c.html index 186aac3..eb594f0 100644 --- a/doc/html/functions_0x6c.html +++ b/doc/html/functions_0x6c.html @@ -69,13 +69,15 @@ Here is a list of all class members with links to the classes they belong to:
  • Level() : CMarkup::ElemPos
  • link() -: neuralpp::Layer -, neuralpp::NeuralNet +: neuralpp::NeuralNet +, neuralpp::Layer
  • Load() : CMarkup +
  • loadFromBinary() +: neuralpp::NeuralNet -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x6d.html b/doc/html/functions_0x6d.html index 901ed6a..547a2e9 100644 --- a/doc/html/functions_0x6d.html +++ b/doc/html/functions_0x6d.html @@ -171,7 +171,7 @@ Here is a list of all class members with links to the classes they belong to: : neuralpp::Synapsis -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x6e.html b/doc/html/functions_0x6e.html index 4db9890..097ece8 100644 --- a/doc/html/functions_0x6e.html +++ b/doc/html/functions_0x6e.html @@ -134,7 +134,7 @@ Here is a list of all class members with links to the classes they belong to: : CMarkup::NodeStack -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x6f.html b/doc/html/functions_0x6f.html index 1c8a17b..cd69088 100644 --- a/doc/html/functions_0x6f.html +++ b/doc/html/functions_0x6f.html @@ -77,7 +77,7 @@ Here is a list of all class members with links to the classes they belong to: : neuralpp::netrecord -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x70.html b/doc/html/functions_0x70.html index 84a5d96..606fce4 100644 --- a/doc/html/functions_0x70.html +++ b/doc/html/functions_0x70.html @@ -95,7 +95,7 @@ Here is a list of all class members with links to the classes they belong to: : neuralpp::Neuron -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x72.html b/doc/html/functions_0x72.html index 839fd40..7be19ab 100644 --- a/doc/html/functions_0x72.html +++ b/doc/html/functions_0x72.html @@ -86,7 +86,7 @@ Here is a list of all class members with links to the classes they belong to: : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x73.html b/doc/html/functions_0x73.html index d9d49c6..04417e6 100644 --- a/doc/html/functions_0x73.html +++ b/doc/html/functions_0x73.html @@ -59,10 +59,10 @@ Here is a list of all class members with links to the classes they belong to:

    - s -

    -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x74.html b/doc/html/functions_0x74.html index 7ee0df2..64716f2 100644 --- a/doc/html/functions_0x74.html +++ b/doc/html/functions_0x74.html @@ -73,7 +73,7 @@ Here is a list of all class members with links to the classes they belong to: : neuralpp::NeuralNet -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x75.html b/doc/html/functions_0x75.html index 2cdfba0..b4fc8f2 100644 --- a/doc/html/functions_0x75.html +++ b/doc/html/functions_0x75.html @@ -75,7 +75,7 @@ Here is a list of all class members with links to the classes they belong to: : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x77.html b/doc/html/functions_0x77.html index 359d56f..c2b0814 100644 --- a/doc/html/functions_0x77.html +++ b/doc/html/functions_0x77.html @@ -73,7 +73,7 @@ Here is a list of all class members with links to the classes they belong to: : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x78.html b/doc/html/functions_0x78.html index 1f2beee..c428baf 100644 --- a/doc/html/functions_0x78.html +++ b/doc/html/functions_0x78.html @@ -165,7 +165,7 @@ Here is a list of all class members with links to the classes they belong to: : neuralpp::NeuralNet -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_0x7e.html b/doc/html/functions_0x7e.html index 4f6c270..a5ad4d1 100644 --- a/doc/html/functions_0x7e.html +++ b/doc/html/functions_0x7e.html @@ -71,7 +71,7 @@ Here is a list of all class members with links to the classes they belong to: : CMarkup::SavedPosMapArray -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_enum.html b/doc/html/functions_enum.html index a0d861e..cc72a15 100644 --- a/doc/html/functions_enum.html +++ b/doc/html/functions_enum.html @@ -47,7 +47,7 @@ : neuralpp::NeuralNet -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_eval.html b/doc/html/functions_eval.html index cb3e3af..b9c447c 100644 --- a/doc/html/functions_eval.html +++ b/doc/html/functions_eval.html @@ -137,7 +137,7 @@ : neuralpp::NeuralNet -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func.html b/doc/html/functions_func.html index d48e878..4a9e97f 100644 --- a/doc/html/functions_func.html +++ b/doc/html/functions_func.html @@ -84,7 +84,7 @@ : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x63.html b/doc/html/functions_func_0x63.html index 96f411e..35048b7 100644 --- a/doc/html/functions_func_0x63.html +++ b/doc/html/functions_func_0x63.html @@ -67,8 +67,6 @@ : neuralpp::NeuralNet
  • CMarkup() : CMarkup -
  • commitChanges() -: neuralpp::NeuralNet
  • ContentLen() : CMarkup::ElemPos
  • ConvertEncoding() @@ -77,7 +75,7 @@ : CMarkup::NodeStack -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x64.html b/doc/html/functions_func_0x64.html index 22d262d..42cc4a4 100644 --- a/doc/html/functions_func_0x64.html +++ b/doc/html/functions_func_0x64.html @@ -66,7 +66,7 @@ : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x65.html b/doc/html/functions_func_0x65.html index c12f282..3cd0666 100644 --- a/doc/html/functions_func_0x65.html +++ b/doc/html/functions_func_0x65.html @@ -74,7 +74,7 @@ : neuralpp::NeuralNet -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x66.html b/doc/html/functions_func_0x66.html index 4ee0407..7b6b2c4 100644 --- a/doc/html/functions_func_0x66.html +++ b/doc/html/functions_func_0x66.html @@ -68,7 +68,7 @@ : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x67.html b/doc/html/functions_func_0x67.html index 2fc538c..841f765 100644 --- a/doc/html/functions_func_0x67.html +++ b/doc/html/functions_func_0x67.html @@ -114,7 +114,7 @@ : neuralpp::Synapsis -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x69.html b/doc/html/functions_func_0x69.html index 5c00476..c5f911e 100644 --- a/doc/html/functions_func_0x69.html +++ b/doc/html/functions_func_0x69.html @@ -77,14 +77,14 @@
  • InvalidSynapticalWeightException() : neuralpp::InvalidSynapticalWeightException
  • InvalidXMLException() -: neuralpp::InvalidXMLException +: neuralpp::InvalidXMLException
  • IsEmptyElement() : CMarkup::ElemPos
  • IsWellFormed() : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x6c.html b/doc/html/functions_func_0x6c.html index 7190d8a..ce80b5d 100644 --- a/doc/html/functions_func_0x6c.html +++ b/doc/html/functions_func_0x6c.html @@ -69,9 +69,11 @@ , neuralpp::Layer
  • Load() : CMarkup +
  • loadFromBinary() +: neuralpp::NeuralNet -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x6d.html b/doc/html/functions_func_0x6d.html index 8691ead..e6277a9 100644 --- a/doc/html/functions_func_0x6d.html +++ b/doc/html/functions_func_0x6d.html @@ -66,7 +66,7 @@ : neuralpp::Synapsis -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x6e.html b/doc/html/functions_func_0x6e.html index cb10d03..ae17b3a 100644 --- a/doc/html/functions_func_0x6e.html +++ b/doc/html/functions_func_0x6e.html @@ -78,7 +78,7 @@ : neuralpp::Neuron -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x6f.html b/doc/html/functions_func_0x6f.html index 2c7ef99..630e485 100644 --- a/doc/html/functions_func_0x6f.html +++ b/doc/html/functions_func_0x6f.html @@ -69,7 +69,7 @@ : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x70.html b/doc/html/functions_func_0x70.html index 55da19b..4651c36 100644 --- a/doc/html/functions_func_0x70.html +++ b/doc/html/functions_func_0x70.html @@ -70,7 +70,7 @@ : neuralpp::Neuron -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x72.html b/doc/html/functions_func_0x72.html index c68c290..c6876fa 100644 --- a/doc/html/functions_func_0x72.html +++ b/doc/html/functions_func_0x72.html @@ -83,7 +83,7 @@ : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x73.html b/doc/html/functions_func_0x73.html index c270c56..6f563b8 100644 --- a/doc/html/functions_func_0x73.html +++ b/doc/html/functions_func_0x73.html @@ -58,10 +58,10 @@  

    - s -

    -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x74.html b/doc/html/functions_func_0x74.html index 39bfda9..0fb8c6c 100644 --- a/doc/html/functions_func_0x74.html +++ b/doc/html/functions_func_0x74.html @@ -68,7 +68,7 @@ : neuralpp::NeuralNet -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x75.html b/doc/html/functions_func_0x75.html index 81e36b4..054e175 100644 --- a/doc/html/functions_func_0x75.html +++ b/doc/html/functions_func_0x75.html @@ -72,7 +72,7 @@ : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x77.html b/doc/html/functions_func_0x77.html index 1dfeb64..b3fc992 100644 --- a/doc/html/functions_func_0x77.html +++ b/doc/html/functions_func_0x77.html @@ -68,7 +68,7 @@ : CMarkup -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x78.html b/doc/html/functions_func_0x78.html index 70093ab..4ab370d 100644 --- a/doc/html/functions_func_0x78.html +++ b/doc/html/functions_func_0x78.html @@ -164,7 +164,7 @@ : neuralpp::NeuralNet -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_func_0x7e.html b/doc/html/functions_func_0x7e.html index 8c9c166..b02ab96 100644 --- a/doc/html/functions_func_0x7e.html +++ b/doc/html/functions_func_0x7e.html @@ -70,7 +70,7 @@ : CMarkup::SavedPosMapArray -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/functions_vars.html b/doc/html/functions_vars.html index 0f04d28..05277c3 100644 --- a/doc/html/functions_vars.html +++ b/doc/html/functions_vars.html @@ -77,6 +77,8 @@
  • epochs : neuralpp::NeuralNet , neuralpp::netrecord +
  • error +: neuralpp::InvalidXMLException
  • ex : neuralpp::netrecord
  • expect @@ -267,7 +269,7 @@ : neuralpp::Synapsis -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/globals.html b/doc/html/globals.html index 9ab8579..60344a1 100644 --- a/doc/html/globals.html +++ b/doc/html/globals.html @@ -146,7 +146,7 @@ Here is a list of all file members with links to the files they belong to: : neural++.hpp -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/globals_defs.html b/doc/html/globals_defs.html index 4d82616..f1875c7 100644 --- a/doc/html/globals_defs.html +++ b/doc/html/globals_defs.html @@ -146,7 +146,7 @@ : neural++.hpp -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/index.html b/doc/html/index.html index 7a4de1e..e810f77 100644 --- a/doc/html/index.html +++ b/doc/html/index.html @@ -20,7 +20,7 @@

    Neural++ Documentation

    0.4

    -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/namespacemembers.html b/doc/html/namespacemembers.html index 79480c6..d726a41 100644 --- a/doc/html/namespacemembers.html +++ b/doc/html/namespacemembers.html @@ -36,9 +36,17 @@ Here is a list of all namespace members with links to the namespace documentatio : neuralpp
  • df() : neuralpp +
  • split() +: neuralpp::neuralutils +
  • splitLines() +: neuralpp::neuralutils +
  • toLower() +: neuralpp::neuralutils +
  • toUpper() +: neuralpp::neuralutils -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/namespacemembers_func.html b/doc/html/namespacemembers_func.html index d4e9d6a..8d80006 100644 --- a/doc/html/namespacemembers_func.html +++ b/doc/html/namespacemembers_func.html @@ -36,9 +36,17 @@ : neuralpp
  • df() : neuralpp +
  • split() +: neuralpp::neuralutils +
  • splitLines() +: neuralpp::neuralutils +
  • toLower() +: neuralpp::neuralutils +
  • toUpper() +: neuralpp::neuralutils -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/namespaceneuralpp.html b/doc/html/namespaceneuralpp.html index 1af6f2e..9f0d9ba 100644 --- a/doc/html/namespaceneuralpp.html +++ b/doc/html/namespaceneuralpp.html @@ -62,6 +62,9 @@ class  InvalidSynapticalWeightException  Exception raised when, while trying the network or directly, the weight of a synapsis is set to a value |w| > 1. More...
    +

    Namespaces

    +namespace  neuralutils +

    Functions

    double df (double(*f)(double), double x) @@ -120,7 +123,7 @@ Main namespace for the library.

    Function Documentation

    -


    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/namespaceneuralpp_1_1neuralutils.html b/doc/html/namespaceneuralpp_1_1neuralutils.html new file mode 100644 index 0000000..12b2e2d --- /dev/null +++ b/doc/html/namespaceneuralpp_1_1neuralutils.html @@ -0,0 +1,172 @@ + + +Neural++: neuralpp::neuralutils Namespace Reference + + + + + +
    +

    neuralpp::neuralutils Namespace Reference

    +

    + + + + + + + + + + + + + + + +

    Functions

    std::vector< double > split (char delim, std::string str)
     Split a string into a vector of doubles, given a delimitator.
    std::vector< std::string > splitLines (std::string str)
     Split the lines of a string.
    void toLower (std::string &str)
     Convert the characters of a string to lower case.
    void toUpper (std::string &str)
     Convert the characters of a string to upper case.
    +


    Function Documentation

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    std::vector<double> neuralpp::neuralutils::split (char  delim,
    std::string  str 
    )
    +
    +
    + +

    +Split a string into a vector of doubles, given a delimitator. +

    +

    Parameters:
    + + + +
    delim Delimitator
    str String to be splitted
    +
    +
    Returns:
    Vector of doubles containing splitted value
    + +
    +

    + +

    +
    + + + + + + + + + +
    std::vector<std::string> neuralpp::neuralutils::splitLines (std::string  str  ) 
    +
    +
    + +

    +Split the lines of a string. +

    +

    Parameters:
    + + +
    str String to be splitted
    +
    +
    Returns:
    An array of strings containing the lines of the original string
    + +
    +

    + +

    +
    + + + + + + + + + +
    void neuralpp::neuralutils::toLower (std::string &  str  ) 
    +
    +
    + +

    +Convert the characters of a string to lower case. +

    +

    Parameters:
    + + +
    str String to be converted
    +
    + +
    +

    + +

    +
    + + + + + + + + + +
    void neuralpp::neuralutils::toUpper (std::string &  str  ) 
    +
    +
    + +

    +Convert the characters of a string to upper case. +

    +

    Parameters:
    + + +
    str String to be converted
    +
    + +
    +

    +

    +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  + +doxygen 1.5.6
    + + diff --git a/doc/html/namespaces.html b/doc/html/namespaces.html index 380d4cd..eb60e02 100644 --- a/doc/html/namespaces.html +++ b/doc/html/namespaces.html @@ -25,9 +25,10 @@

    Namespace List

    Here is a list of all namespaces with brief descriptions: +
    neuralppMain namespace for the library
    neuralpp::neuralutils
    -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/neural_09_09_8hpp-source.html b/doc/html/neural_09_09_8hpp-source.html index e657264..f674be4 100644 --- a/doc/html/neural_09_09_8hpp-source.html +++ b/doc/html/neural_09_09_8hpp-source.html @@ -37,207 +37,219 @@ 00020 00021 #include "neural++_exception.hpp" 00022 -00024 #define RAND (double) ( (rand() / (RAND_MAX/2)) - 1) +00023 #define RAND (double) ( (rand() / 10.0) / ((double) RAND_MAX) ) +00024 #define BETA0 0.8 00025 -00027 #define BETA0 1.0 -00028 -00033 namespace neuralpp { -00034 class Synapsis; -00035 class Neuron; -00036 class Layer; -00037 class NeuralNet; +00030 namespace neuralpp { +00031 class Synapsis; +00032 class Neuron; +00033 class Layer; +00034 class NeuralNet; +00035 +00036 double df (double (*f)(double), double x); +00037 double __actv(double prop); 00038 -00039 double df (double (*f)(double), double x); -00040 double __actv(double prop); -00041 -00064 class NeuralNet { -00065 int epochs; -00066 int ref_epochs; -00067 double l_rate; -00068 double threshold; -00069 std::vector<double> expect; -00070 -00075 void updateWeights(); -00076 -00082 void commitChanges (Layer& l); -00083 -00089 double error (double ex); -00090 -00095 double (*actv_f)(double); -00096 -00097 public: -00098 Layer* input; -00099 Layer* hidden; -00100 Layer* output; -00101 -00105 typedef enum { file, str } source; +00067 class NeuralNet { +00068 int epochs; +00069 int ref_epochs; +00070 double l_rate; +00071 double threshold; +00072 std::vector<double> expect; +00073 +00078 void updateWeights(); +00079 +00085 double error (double ex); +00086 +00091 double (*actv_f)(double); +00092 +00098 double expected() const; +00099 +00105 std::vector<double> getExpected() const; 00106 -00110 NeuralNet() {} -00111 -00125 NeuralNet (size_t in_size, size_t hidden_size, size_t out_size, double l, -00126 int e, double th = 0.0, double (*a)(double) = __actv); -00127 -00133 NeuralNet (const std::string file) throw(NetworkFileNotFoundException); -00134 -00140 double getOutput() const; -00141 -00146 double getThreshold() const; -00147 -00152 std::vector<double> getOutputs(); -00153 -00159 double expected() const; -00160 -00166 std::vector<double> getExpected() const; -00167 -00172 void setExpected(double ex); -00173 -00178 void setExpected(std::vector<double> ex); -00179 -00185 void update(); -00186 -00191 void propagate(); -00192 -00197 void setInput (std::vector<double> v); -00198 -00203 void link(); -00204 -00211 void save (const char* fname) throw(NetworkFileWriteException); -00212 -00220 void train (std::string xml, source src) throw(InvalidXMLException); -00221 -00226 static void initXML (std::string& xml); -00227 -00234 static std::vector<double> split (char delim, std::string str); -00235 -00248 static std::string XMLFromSet (int& id, std::string set); -00249 -00254 static void closeXML(std::string& xml); -00255 }; -00256 -00262 class Synapsis { -00263 double delta; -00264 double prev_delta; -00265 double weight; -00266 -00267 Neuron *in; -00268 Neuron *out; -00269 -00270 double (*actv_f)(double); -00271 -00272 public: -00280 Synapsis(Neuron* i, Neuron* o, double w, double d); -00281 -00288 Synapsis (Neuron* i, Neuron* o, double(*a)(double)); -00289 -00297 Synapsis (Neuron* i, Neuron* o, -00298 double w, double(*a)(double)); -00299 -00303 Neuron* getIn() const; -00304 -00308 Neuron* getOut() const; -00309 -00314 void setWeight(double w) throw(InvalidSynapticalWeightException); -00315 -00321 void setDelta(double d) throw(InvalidSynapticalWeightException); -00322 -00327 double getWeight() const; -00328 -00333 double getDelta() const; -00334 -00339 double getPrevDelta() const; -00340 -00351 double momentum (int N, int x) const; -00352 }; -00353 -00359 class Neuron { -00360 double actv_val; -00361 double prop_val; -00362 double threshold; -00363 -00364 std::vector< Synapsis > in; -00365 std::vector< Synapsis > out; -00366 -00367 double (*actv_f)(double); -00368 -00369 public: -00376 Neuron (double (*a)(double), double th = 0.0); -00377 -00386 Neuron (std::vector<Synapsis> in, std::vector<Synapsis> out, -00387 double (*a)(double), double th = 0.0); -00388 -00394 Synapsis& synIn (size_t i); -00395 -00401 Synapsis& synOut (size_t i); -00402 -00407 void push_in (Synapsis s); -00408 -00413 void push_out (Synapsis s); -00414 -00419 void setActv (double a); -00420 -00425 void setProp (double p); -00426 -00431 double getActv(); -00432 -00437 double getProp(); -00438 -00442 void propagate(); -00443 -00448 size_t nIn(); -00449 -00454 size_t nOut(); -00455 -00459 void synClear(); -00460 }; -00461 -00467 class Layer { -00468 std::vector<Neuron> elements; -00469 double threshold; -00470 -00471 void (*update_weights)(); -00472 double (*actv_f)(double); -00473 -00474 public: -00482 Layer (size_t sz, double (*a)(double), double th = 0.0); +00111 void setExpected(double ex); +00112 +00117 void setExpected(std::vector<double> ex); +00118 +00124 void update(); +00125 +00129 void link(); +00130 +00131 public: +00132 Layer* input; +00133 Layer* hidden; +00134 Layer* output; +00135 +00139 typedef enum { file, str } source; +00140 +00144 NeuralNet() {} +00145 +00159 NeuralNet (size_t in_size, size_t hidden_size, size_t out_size, double l, +00160 int e, double th = 0.0, double (*a)(double) = __actv); +00161 +00167 NeuralNet (const std::string file) throw(NetworkFileNotFoundException); +00168 +00174 double getOutput() const; +00175 +00180 std::vector<double> getOutputs(); +00181 +00186 double getThreshold() const; +00187 +00192 void propagate(); +00193 +00198 void setInput (std::vector<double> v); +00199 +00206 void save (const char* fname) throw(NetworkFileWriteException); +00207 +00218 void loadFromBinary (const std::string fname) throw(NetworkFileNotFoundException); +00219 +00230 void saveToBinary (const char* fname) throw(NetworkFileWriteException); +00231 +00239 void train (std::string xml, source src) throw(InvalidXMLException); +00240 +00245 static void initXML (std::string& xml); +00246 +00259 static std::string XMLFromSet (int& id, std::string set); +00260 +00265 static void closeXML(std::string& xml); +00266 }; +00267 +00273 class Synapsis { +00274 double delta; +00275 double prev_delta; +00276 double weight; +00277 +00278 Neuron *in; +00279 Neuron *out; +00280 +00281 double (*actv_f)(double); +00282 +00283 public: +00287 Synapsis() {} +00288 +00295 Synapsis (Neuron* i, Neuron* o, double(*a)(double)); +00296 +00304 Synapsis (Neuron* i, Neuron* o, +00305 double w, double(*a)(double)); +00306 +00310 Neuron* getIn() const; +00311 +00315 Neuron* getOut() const; +00316 +00321 void setWeight(double w) throw(InvalidSynapticalWeightException); +00322 +00328 void setDelta(double d) throw(InvalidSynapticalWeightException); +00329 +00334 double getWeight() const; +00335 +00340 double getDelta() const; +00341 +00346 double getPrevDelta() const; +00347 +00358 double momentum (int N, int x) const; +00359 }; +00360 +00366 class Neuron { +00367 double actv_val; +00368 double prop_val; +00369 double threshold; +00370 +00371 std::vector< Synapsis > in; +00372 std::vector< Synapsis > out; +00373 +00374 double (*actv_f)(double); +00375 +00376 public: +00383 Neuron (double (*a)(double), double th = 0.0); +00384 +00393 Neuron (std::vector<Synapsis> in, std::vector<Synapsis> out, +00394 double (*a)(double), double th = 0.0); +00395 +00401 Synapsis& synIn (size_t i); +00402 +00408 Synapsis& synOut (size_t i); +00409 +00414 void push_in (Synapsis s); +00415 +00420 void push_out (Synapsis s); +00421 +00426 void setActv (double a); +00427 +00432 void setProp (double p); +00433 +00434 void setSynIn (size_t n); +00435 void setSynOut (size_t n); +00436 +00441 double getActv(); +00442 +00447 double getProp(); +00448 +00452 void propagate(); +00453 +00458 size_t nIn(); +00459 +00464 size_t nOut(); +00465 +00469 void synClear(); +00470 }; +00471 +00477 class Layer { +00478 std::vector<Neuron> elements; +00479 double threshold; +00480 +00481 void (*update_weights)(); +00482 double (*actv_f)(double); 00483 -00492 Layer (std::vector<Neuron>& neurons, double(*a)(double), double th = 0.0); +00484 public: +00492 Layer (size_t sz, double (*a)(double), double th = 0.0); 00493 -00499 Neuron& operator[] (size_t i) throw(NetworkIndexOutOfBoundsException); -00500 -00505 void link (Layer& l); -00506 -00511 void setInput (std::vector<double> v); -00512 -00516 void propagate(); -00517 -00521 size_t size() const; -00522 }; -00523 -00524 struct netrecord { -00525 int input_size; -00526 int hidden_size; -00527 int output_size; -00528 -00529 int epochs; -00530 double l_rate; -00531 double ex; +00502 Layer (std::vector<Neuron>& neurons, double(*a)(double), double th = 0.0); +00503 +00509 Neuron& operator[] (size_t i) throw(NetworkIndexOutOfBoundsException); +00510 +00515 void link (Layer& l); +00516 +00521 void setInput (std::vector<double> v); +00522 +00526 void propagate(); +00527 +00531 size_t size() const; 00532 }; 00533 -00534 struct neuronrecord { -00535 double prop; -00536 double actv; -00537 }; +00534 struct netrecord { +00535 int input_size; +00536 int hidden_size; +00537 int output_size; 00538 -00539 struct synrecord { -00540 double w; -00541 double d; +00539 int epochs; +00540 double l_rate; +00541 double ex; 00542 }; -00543 } -00544 -00545 #endif -00546 +00543 +00544 struct neuronrecord { +00545 double prop; +00546 double actv; +00547 }; +00548 +00549 struct synrecord { +00550 double w; +00551 double d; +00552 }; +00553 +00554 namespace neuralutils { +00561 std::vector<double> split (char delim, std::string str); +00562 +00568 std::vector<std::string> splitLines (std::string str); +00569 +00574 void toLower (std::string& str); +00575 +00580 void toUpper (std::string& str); +00581 } +00582 } +00583 +00584 #endif +00585 -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/neural_09_09_8hpp.html b/doc/html/neural_09_09_8hpp.html index 2b17f33..f7cc6fb 100644 --- a/doc/html/neural_09_09_8hpp.html +++ b/doc/html/neural_09_09_8hpp.html @@ -28,6 +28,8 @@

    Namespaces

    namespace  neuralpp +namespace  neuralpp::neuralutils +

    Classes

    class  neuralpp::NeuralNet @@ -48,17 +50,27 @@ struct  neuralpp::synrecord

    Defines

    -#define RAND   (double) ( (rand() / (RAND_MAX/2)) - 1) +#define RAND   (double) ( (rand() / 10.0) / ((double) RAND_MAX) ) - Default rand value: |sin(rand)|, always >= 0 and <= 1.
    -#define BETA0   1.0 +#define BETA0   0.8 - Initial value for the inertial momentum of the synapses.

    Functions

    double neuralpp::df (double(*f)(double), double x) double neuralpp::__actv (double prop) +std::vector< double > neuralpp::neuralutils::split (char delim, std::string str) + + Split a string into a vector of doubles, given a delimitator.
    +std::vector< std::string > neuralpp::neuralutils::splitLines (std::string str) + + Split the lines of a string.
    +void neuralpp::neuralutils::toLower (std::string &str) + + Convert the characters of a string to lower case.
    +void neuralpp::neuralutils::toUpper (std::string &str) + + Convert the characters of a string to upper case.

    Define Documentation

    @@ -66,14 +78,12 @@
    - +
    #define BETA0   1.0 #define BETA0   0.8
    -

    -Initial value for the inertial momentum of the synapses.

    @@ -83,20 +93,18 @@ Initial value for the inertial momentum of the synapses.
    - +
    #define RAND   (double) ( (rand() / (RAND_MAX/2)) - 1) #define RAND   (double) ( (rand() / 10.0) / ((double) RAND_MAX) )
    -

    -Default rand value: |sin(rand)|, always >= 0 and <= 1.

    -


    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/neural_09_09__exception_8hpp-source.html b/doc/html/neural_09_09__exception_8hpp-source.html index 3818972..c33d865 100644 --- a/doc/html/neural_09_09__exception_8hpp-source.html +++ b/doc/html/neural_09_09__exception_8hpp-source.html @@ -31,44 +31,52 @@ 00014 #ifndef __NEURALPP_EXCEPTION 00015 #define __NEURALPP_EXCEPTION 00016 -00017 #include <exception> -00018 -00019 namespace neuralpp { -00024 class NetworkFileNotFoundException : public std::exception { -00025 public: -00026 NetworkFileNotFoundException() {} -00027 const char* what() const throw() { return "Attempt to load a neural network from an invalid network file"; } -00028 }; -00029 -00035 class NetworkFileWriteException : public std::exception { -00036 public: -00037 NetworkFileWriteException() {} -00038 const char* what() const throw() { return "There was an error while writing the network file"; } -00039 }; -00040 -00045 class InvalidXMLException : public std::exception { -00046 public: -00047 InvalidXMLException() {} -00048 const char* what() const throw() { return "Attempt to load an invalid XML file"; } -00049 }; -00050 -00056 class NetworkIndexOutOfBoundsException : public std::exception { -00057 public: -00058 NetworkIndexOutOfBoundsException() {} -00059 const char* what() const throw() { return "Attempt to access a non-existing neuron"; } -00060 }; -00061 -00067 class InvalidSynapticalWeightException : public std::exception { -00068 public: -00069 InvalidSynapticalWeightException() {} -00070 const char* what() const throw() { return "Attempt to set an invalid weight for the synapsis"; } -00071 }; -00072 } -00073 -00074 #endif -00075 +00017 #include <cstdio> +00018 #include <cstring> +00019 #include <exception> +00020 +00021 namespace neuralpp { +00026 class NetworkFileNotFoundException : public std::exception { +00027 public: +00028 NetworkFileNotFoundException() {} +00029 const char* what() const throw() { return "Attempt to load a neural network from an invalid network file"; } +00030 }; +00031 +00037 class NetworkFileWriteException : public std::exception { +00038 public: +00039 NetworkFileWriteException() {} +00040 const char* what() const throw() { return "There was an error while writing the network file"; } +00041 }; +00042 +00047 class InvalidXMLException : public std::exception { +00048 char *error; +00049 +00050 public: +00051 InvalidXMLException(const char *err = " ") { +00052 error = new char[strlen(err)+40]; +00053 sprintf (error, "Attempt to load an invalid XML file: %s", err); +00054 } +00055 +00056 const char* what() const throw() { return error; } +00057 }; +00058 +00064 class NetworkIndexOutOfBoundsException : public std::exception { +00065 public: +00066 NetworkIndexOutOfBoundsException() {} +00067 const char* what() const throw() { return "Attempt to access a non-existing neuron"; } +00068 }; +00069 +00075 class InvalidSynapticalWeightException : public std::exception { +00076 public: +00077 InvalidSynapticalWeightException() {} +00078 const char* what() const throw() { return "Attempt to set an invalid weight for the synapsis"; } +00079 }; +00080 } +00081 +00082 #endif +00083 -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/neural_09_09__exception_8hpp.html b/doc/html/neural_09_09__exception_8hpp.html index 751129f..e62727a 100644 --- a/doc/html/neural_09_09__exception_8hpp.html +++ b/doc/html/neural_09_09__exception_8hpp.html @@ -17,7 +17,9 @@
    -

    neural++_exception.hpp File Reference

    #include <exception>
    +

    neural++_exception.hpp File Reference

    #include <cstdio>
    +#include <cstring>
    +#include <exception>

    Go to the source code of this file. @@ -43,7 +45,7 @@
     Exception raised when, while trying the network or directly, the weight of a synapsis is set to a value |w| > 1. More...

    -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1ConvertEncoding-members.html b/doc/html/structCMarkup_1_1ConvertEncoding-members.html index 3a282cb..a1cd9d6 100644 --- a/doc/html/structCMarkup_1_1ConvertEncoding-members.html +++ b/doc/html/structCMarkup_1_1ConvertEncoding-members.html @@ -32,7 +32,7 @@ strFromEncodingCMarkup::ConvertEncoding strToEncodingCMarkup::ConvertEncoding -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1ConvertEncoding.html b/doc/html/structCMarkup_1_1ConvertEncoding.html index 3a21a55..4dcbc5e 100644 --- a/doc/html/structCMarkup_1_1ConvertEncoding.html +++ b/doc/html/structCMarkup_1_1ConvertEncoding.html @@ -198,7 +198,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1ElemPos-members.html b/doc/html/structCMarkup_1_1ElemPos-members.html index 9974420..6e61a32 100644 --- a/doc/html/structCMarkup_1_1ElemPos-members.html +++ b/doc/html/structCMarkup_1_1ElemPos-members.html @@ -48,7 +48,7 @@ StartContent() const CMarkup::ElemPos [inline] StartTagLen() const CMarkup::ElemPos [inline] -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1ElemPos.html b/doc/html/structCMarkup_1_1ElemPos.html index 4c72ed7..8ad0405 100644 --- a/doc/html/structCMarkup_1_1ElemPos.html +++ b/doc/html/structCMarkup_1_1ElemPos.html @@ -528,7 +528,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1FilePos-members.html b/doc/html/structCMarkup_1_1FilePos-members.html index 994a117..0b99f44 100644 --- a/doc/html/structCMarkup_1_1FilePos-members.html +++ b/doc/html/structCMarkup_1_1FilePos-members.html @@ -34,7 +34,7 @@ strEncodingCMarkup::FilePos strIOResultCMarkup::FilePos -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1FilePos.html b/doc/html/structCMarkup_1_1FilePos.html index 7e38b66..04e448f 100644 --- a/doc/html/structCMarkup_1_1FilePos.html +++ b/doc/html/structCMarkup_1_1FilePos.html @@ -196,7 +196,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1NodePos-members.html b/doc/html/structCMarkup_1_1NodePos-members.html index 96c128a..0838b97 100644 --- a/doc/html/structCMarkup_1_1NodePos-members.html +++ b/doc/html/structCMarkup_1_1NodePos-members.html @@ -32,7 +32,7 @@ nStartCMarkup::NodePos strMetaCMarkup::NodePos -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1NodePos.html b/doc/html/structCMarkup_1_1NodePos.html index 49ac3bc..251a86c 100644 --- a/doc/html/structCMarkup_1_1NodePos.html +++ b/doc/html/structCMarkup_1_1NodePos.html @@ -177,7 +177,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1NodeStack-members.html b/doc/html/structCMarkup_1_1NodeStack-members.html index cc770e2..dbb25e7 100644 --- a/doc/html/structCMarkup_1_1NodeStack-members.html +++ b/doc/html/structCMarkup_1_1NodeStack-members.html @@ -37,7 +37,7 @@ TopIndex()CMarkup::NodeStack [inline] ~NodeStack()CMarkup::NodeStack [inline] -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1NodeStack.html b/doc/html/structCMarkup_1_1NodeStack.html index d3e1bc7..b145c5b 100644 --- a/doc/html/structCMarkup_1_1NodeStack.html +++ b/doc/html/structCMarkup_1_1NodeStack.html @@ -310,7 +310,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1PosArray-members.html b/doc/html/structCMarkup_1_1PosArray-members.html index aa8c5c5..31bc5e5 100644 --- a/doc/html/structCMarkup_1_1PosArray-members.html +++ b/doc/html/structCMarkup_1_1PosArray-members.html @@ -38,7 +38,7 @@ SegsUsed() const CMarkup::PosArray [inline] ~PosArray()CMarkup::PosArray [inline] -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1PosArray.html b/doc/html/structCMarkup_1_1PosArray.html index 10f1254..1be108f 100644 --- a/doc/html/structCMarkup_1_1PosArray.html +++ b/doc/html/structCMarkup_1_1PosArray.html @@ -317,7 +317,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1SavedPos-members.html b/doc/html/structCMarkup_1_1SavedPos-members.html index 03405d9..add381e 100644 --- a/doc/html/structCMarkup_1_1SavedPos-members.html +++ b/doc/html/structCMarkup_1_1SavedPos-members.html @@ -33,7 +33,7 @@ SPM_USED enum valueCMarkup::SavedPos strNameCMarkup::SavedPos -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1SavedPos.html b/doc/html/structCMarkup_1_1SavedPos.html index d371a5c..ebcc414 100644 --- a/doc/html/structCMarkup_1_1SavedPos.html +++ b/doc/html/structCMarkup_1_1SavedPos.html @@ -146,7 +146,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1SavedPosMap-members.html b/doc/html/structCMarkup_1_1SavedPosMap-members.html index eec355c..f06344f 100644 --- a/doc/html/structCMarkup_1_1SavedPosMap-members.html +++ b/doc/html/structCMarkup_1_1SavedPosMap-members.html @@ -29,7 +29,7 @@ SavedPosMap(int nSize)CMarkup::SavedPosMap [inline] ~SavedPosMap()CMarkup::SavedPosMap [inline] -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1SavedPosMap.html b/doc/html/structCMarkup_1_1SavedPosMap.html index 81cf427..9773202 100644 --- a/doc/html/structCMarkup_1_1SavedPosMap.html +++ b/doc/html/structCMarkup_1_1SavedPosMap.html @@ -124,7 +124,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1SavedPosMapArray-members.html b/doc/html/structCMarkup_1_1SavedPosMapArray-members.html index 87be25d..23c5bec 100644 --- a/doc/html/structCMarkup_1_1SavedPosMapArray-members.html +++ b/doc/html/structCMarkup_1_1SavedPosMapArray-members.html @@ -29,7 +29,7 @@ SavedPosMapArray()CMarkup::SavedPosMapArray [inline] ~SavedPosMapArray()CMarkup::SavedPosMapArray [inline] -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1SavedPosMapArray.html b/doc/html/structCMarkup_1_1SavedPosMapArray.html index 8b93392..81847cf 100644 --- a/doc/html/structCMarkup_1_1SavedPosMapArray.html +++ b/doc/html/structCMarkup_1_1SavedPosMapArray.html @@ -130,7 +130,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1TokenPos-members.html b/doc/html/structCMarkup_1_1TokenPos-members.html index 5b3e30b..d57536c 100644 --- a/doc/html/structCMarkup_1_1TokenPos-members.html +++ b/doc/html/structCMarkup_1_1TokenPos-members.html @@ -37,7 +37,7 @@ StrNIACmp(MCD_PCSZ p1, MCD_PCSZ p2, int n)CMarkup::TokenPos [inline] TokenPos(MCD_CSTR sz, int n)CMarkup::TokenPos [inline] -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structCMarkup_1_1TokenPos.html b/doc/html/structCMarkup_1_1TokenPos.html index 3c9ed47..f364b7f 100644 --- a/doc/html/structCMarkup_1_1TokenPos.html +++ b/doc/html/structCMarkup_1_1TokenPos.html @@ -315,7 +315,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structMCD__CSTR-members.html b/doc/html/structMCD__CSTR-members.html index b308a85..14f2078 100644 --- a/doc/html/structMCD__CSTR-members.html +++ b/doc/html/structMCD__CSTR-members.html @@ -30,7 +30,7 @@ operator MCD_PCSZ() const MCD_CSTR [inline] pcszMCD_CSTR -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structMCD__CSTR.html b/doc/html/structMCD__CSTR.html index d794817..3031a43 100644 --- a/doc/html/structMCD__CSTR.html +++ b/doc/html/structMCD__CSTR.html @@ -149,7 +149,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structneuralpp_1_1netrecord-members.html b/doc/html/structneuralpp_1_1netrecord-members.html index f207ba9..352333d 100644 --- a/doc/html/structneuralpp_1_1netrecord-members.html +++ b/doc/html/structneuralpp_1_1netrecord-members.html @@ -31,7 +31,7 @@ l_rateneuralpp::netrecord output_sizeneuralpp::netrecord -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structneuralpp_1_1netrecord.html b/doc/html/structneuralpp_1_1netrecord.html index 1067452..877abab 100644 --- a/doc/html/structneuralpp_1_1netrecord.html +++ b/doc/html/structneuralpp_1_1netrecord.html @@ -139,7 +139,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structneuralpp_1_1neuronrecord-members.html b/doc/html/structneuralpp_1_1neuronrecord-members.html index 97153a6..e2b8918 100644 --- a/doc/html/structneuralpp_1_1neuronrecord-members.html +++ b/doc/html/structneuralpp_1_1neuronrecord-members.html @@ -27,7 +27,7 @@ actvneuralpp::neuronrecord propneuralpp::neuronrecord -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structneuralpp_1_1neuronrecord.html b/doc/html/structneuralpp_1_1neuronrecord.html index ba87269..c761c88 100644 --- a/doc/html/structneuralpp_1_1neuronrecord.html +++ b/doc/html/structneuralpp_1_1neuronrecord.html @@ -71,7 +71,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structneuralpp_1_1synrecord-members.html b/doc/html/structneuralpp_1_1synrecord-members.html index e553f48..8ea8f7e 100644 --- a/doc/html/structneuralpp_1_1synrecord-members.html +++ b/doc/html/structneuralpp_1_1synrecord-members.html @@ -27,7 +27,7 @@ dneuralpp::synrecord wneuralpp::synrecord -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/html/structneuralpp_1_1synrecord.html b/doc/html/structneuralpp_1_1synrecord.html index b19c30e..080c39b 100644 --- a/doc/html/structneuralpp_1_1synrecord.html +++ b/doc/html/structneuralpp_1_1synrecord.html @@ -71,7 +71,7 @@
    The documentation for this struct was generated from the following file: -
    Generated on Sun Aug 16 20:53:42 2009 for Neural++ by  +
    Generated on Fri Sep 4 11:25:49 2009 for Neural++ by  doxygen 1.5.6
    diff --git a/doc/latex/classneuralpp_1_1InvalidXMLException.tex b/doc/latex/classneuralpp_1_1InvalidXMLException.tex index 9622c60..99c39bd 100644 --- a/doc/latex/classneuralpp_1_1InvalidXMLException.tex +++ b/doc/latex/classneuralpp_1_1InvalidXMLException.tex @@ -8,10 +8,15 @@ Exception thrown when trying parsing an invalid XML. \subsection*{Public Member Functions} \begin{CompactItemize} \item -{\bf InvalidXMLException} () +{\bf InvalidXMLException} (const char $\ast$err=\char`\"{} \char`\"{}) \item const char $\ast$ {\bf what} () const throw () \end{CompactItemize} +\subsection*{Private Attributes} +\begin{CompactItemize} +\item +char $\ast$ {\bf error} +\end{CompactItemize} \subsection{Detailed Description} @@ -20,11 +25,13 @@ Exception thrown when trying parsing an invalid XML. \subsection{Constructor \& Destructor Documentation} \index{neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}!InvalidXMLException@{InvalidXMLException}} \index{InvalidXMLException@{InvalidXMLException}!neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}} -\subsubsection[InvalidXMLException]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::InvalidXMLException::InvalidXMLException ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1InvalidXMLException_10279e6f42a1ccb934afcfef2770c537} +\subsubsection[InvalidXMLException]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::InvalidXMLException::InvalidXMLException (const char $\ast$ {\em err} = {\tt \char`\"{}~\char`\"{}})\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1InvalidXMLException_793d311be88606908abf8c5be1348586} +References error. + \subsection{Member Function Documentation} \index{neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}!what@{what}} \index{what@{what}!neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}} @@ -33,6 +40,18 @@ Exception thrown when trying parsing an invalid XML. +References error. + +\subsection{Member Data Documentation} +\index{neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}!error@{error}} +\index{error@{error}!neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}} +\subsubsection[error]{\setlength{\rightskip}{0pt plus 5cm}char$\ast$ {\bf neuralpp::InvalidXMLException::error}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1InvalidXMLException_e0c63b17f7b5b0df20d8361bc9ee92b4} + + + + +Referenced by InvalidXMLException(), and what(). + The documentation for this class was generated from the following file:\begin{CompactItemize} \item {\bf neural++\_\-exception.hpp}\end{CompactItemize} diff --git a/doc/latex/classneuralpp_1_1NeuralNet.tex b/doc/latex/classneuralpp_1_1NeuralNet.tex index 512fcdc..975e384 100644 --- a/doc/latex/classneuralpp_1_1NeuralNet.tex +++ b/doc/latex/classneuralpp_1_1NeuralNet.tex @@ -23,28 +23,20 @@ enum {\bf source} \{ {\bf file}, \begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item double {\bf getOutput} () const \begin{CompactList}\small\item\em It gets the output of the network (note: the layer output should contain an only neuron). \item\end{CompactList}\item -double {\bf getThreshold} () const -\begin{CompactList}\small\item\em Get the threshold of the neurons in the network. \item\end{CompactList}\item std::vector$<$ double $>$ {\bf getOutputs} () \begin{CompactList}\small\item\em It gets the output of the network in case the output layer contains more neurons. \item\end{CompactList}\item -double {\bf expected} () const -\begin{CompactList}\small\item\em Get the expected value (in case you have an only neuron in output layer). \item\end{CompactList}\item -std::vector$<$ double $>$ {\bf getExpected} () const -\begin{CompactList}\small\item\em Get the expected value (in case you have an only neuron in output layer). \item\end{CompactList}\item -void {\bf setExpected} (double ex) -\begin{CompactList}\small\item\em It sets the value you expect from your network (in case the network has an only neuron in its output layer). \item\end{CompactList}\item -void {\bf setExpected} (std::vector$<$ double $>$ ex) -\begin{CompactList}\small\item\em Set the values you expect from your network. \item\end{CompactList}\item -void {\bf update} () -\begin{CompactList}\small\item\em It updates through back-propagation the weights of the synapsis and computes again the output value for {\em epochs\/} times, calling back updateWeights and commitChanges functions. \item\end{CompactList}\item +double {\bf getThreshold} () const +\begin{CompactList}\small\item\em Get the threshold of the neurons in the network. \item\end{CompactList}\item void {\bf propagate} () \begin{CompactList}\small\item\em It propagates values through the network. \item\end{CompactList}\item void {\bf setInput} (std::vector$<$ double $>$ v) \begin{CompactList}\small\item\em It sets the input for the network. \item\end{CompactList}\item -void {\bf link} () -\begin{CompactList}\small\item\em It links the layers of the network (input, hidden, output). \item\end{CompactList}\item void {\bf save} (const char $\ast$fname) throw (NetworkFileWriteException) \begin{CompactList}\small\item\em Save a trained neural network to a binary file. \item\end{CompactList}\item +void {\bf loadFromBinary} (const std::string fname) throw (NetworkFileNotFoundException) +\begin{CompactList}\small\item\em DEPRECATED. \item\end{CompactList}\item +void {\bf saveToBinary} (const char $\ast$fname) throw (NetworkFileWriteException) +\begin{CompactList}\small\item\em DEPRECATED. \item\end{CompactList}\item void {\bf train} (std::string xml, {\bf source} src) throw (InvalidXMLException) \begin{CompactList}\small\item\em Train a network using a training set loaded from an XML file. \item\end{CompactList}\end{CompactItemize} \subsection*{Static Public Member Functions} @@ -52,8 +44,6 @@ void {\bf train} (std::string xml, {\bf source} src) throw (InvalidXMLException \item static void {\bf initXML} (std::string \&xml) \begin{CompactList}\small\item\em Initialize the training XML for the neural network. \item\end{CompactList}\item -static std::vector$<$ double $>$ {\bf split} (char delim, std::string str) -\begin{CompactList}\small\item\em Splits a string into a vector of doubles, given a delimitator. \item\end{CompactList}\item static std::string {\bf XMLFromSet} (int \&id, std::string set) \begin{CompactList}\small\item\em Get a training set from a string and copies it to an XML For example, these strings could be training sets for making sums: \char`\"{}2,3;5\char`\"{} - \char`\"{}5,6;11\char`\"{} - \char`\"{}2,2;4\char`\"{} - \char`\"{}4,5:9\char`\"{} This method called on the first string will return an XML such this: '$<$training id=\char`\"{}0\char`\"{}$>$$<$input id=\char`\"{}0\char`\"{}$>$2$<$/input$>$$<$input id=\char`\"{}1\char`\"{}$>$3$<$/input$>$$<$output id=\char`\"{}0\char`\"{}$>$5$<$/output$>$ \</training$>$'. \item\end{CompactList}\item static void {\bf closeXML} (std::string \&xml) @@ -72,10 +62,20 @@ static void {\bf closeXML} (std::string \&xml) \item void {\bf updateWeights} () \begin{CompactList}\small\item\em It updates the weights of the net's synapsis through back-propagation. \item\end{CompactList}\item -void {\bf commitChanges} ({\bf Layer} \&l) -\begin{CompactList}\small\item\em It commits the changes made by \doxyref{updateWeights()}{p.}{classneuralpp_1_1NeuralNet_94169c89a7cd47122ab5dbf1d5c5e108} to the layer l. \item\end{CompactList}\item double {\bf error} (double ex) -\begin{CompactList}\small\item\em Get the error made on the expected result as squared deviance. \item\end{CompactList}\end{CompactItemize} +\begin{CompactList}\small\item\em Get the error made on the expected result as squared deviance. \item\end{CompactList}\item +double {\bf expected} () const +\begin{CompactList}\small\item\em Get the expected value (in case you have an only neuron in output layer). \item\end{CompactList}\item +std::vector$<$ double $>$ {\bf getExpected} () const +\begin{CompactList}\small\item\em Get the expected value (in case you have an only neuron in output layer). \item\end{CompactList}\item +void {\bf setExpected} (double ex) +\begin{CompactList}\small\item\em It sets the value you expect from your network (in case the network has an only neuron in its output layer). \item\end{CompactList}\item +void {\bf setExpected} (std::vector$<$ double $>$ ex) +\begin{CompactList}\small\item\em Set the values you expect from your network. \item\end{CompactList}\item +void {\bf update} () +\begin{CompactList}\small\item\em It updates through back-propagation the weights of the synapsis and computes again the output value for {\em epochs\/} times, calling back updateWeights and commitChanges functions. \item\end{CompactList}\item +void {\bf link} () +\begin{CompactList}\small\item\em It links the layers of the network (input, hidden, output). \item\end{CompactList}\end{CompactItemize} \subsection*{Private Attributes} \begin{CompactItemize} \item @@ -100,7 +100,7 @@ Use $\ast$ONLY$\ast$ this class, unless you know what you're doing \begin{Desc} \item[Examples: ]\par -{\bf examples/adderFromScratch.cpp}, {\bf examples/doAdd.cpp}, and {\bf examples/learnAdd.cpp}.\end{Desc} +{\bf examples/adderFromString.cpp}, {\bf examples/doAdd.cpp}, {\bf examples/learnAdd.cpp}, and {\bf examples/networkForSumsAndSubtractions.cpp}.\end{Desc} \subsection{Member Enumeration Documentation} @@ -170,19 +170,7 @@ Constructor. It updates the weights of the net's synapsis through back-propagation. -In-class use only \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!commitChanges@{commitChanges}} -\index{commitChanges@{commitChanges}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[commitChanges]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::commitChanges ({\bf Layer} \& {\em l})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_f697a8d9967ad8f03e5a16a42cd110c5} - - -It commits the changes made by \doxyref{updateWeights()}{p.}{classneuralpp_1_1NeuralNet_94169c89a7cd47122ab5dbf1d5c5e108} to the layer l. - -In-class use only \begin{Desc} -\item[Parameters:] -\begin{description} -\item[{\em l}]\doxyref{Layer}{p.}{classneuralpp_1_1Layer} to commit the changes \end{description} -\end{Desc} -\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!error@{error}} +In-class use only \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!error@{error}} \index{error@{error}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} \subsubsection[error]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::error (double {\em ex})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_8a140d28e6dd4097470c7c138801ad01} @@ -196,42 +184,9 @@ Get the error made on the expected result as squared deviance. \end{Desc} \begin{Desc} \item[Returns:]Mean error \end{Desc} -\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutput@{getOutput}} -\index{getOutput@{getOutput}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[getOutput]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::getOutput () const}\label{classneuralpp_1_1NeuralNet_961dce8913264bf64c899dce4e25f810} - - -It gets the output of the network (note: the layer output should contain an only neuron). - -\begin{Desc} -\item[Returns:]The output value of the network \end{Desc} -\begin{Desc} -\item[Examples: ]\par -{\bf examples/doAdd.cpp}.\end{Desc} -\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getThreshold@{getThreshold}} -\index{getThreshold@{getThreshold}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[getThreshold]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::getThreshold () const}\label{classneuralpp_1_1NeuralNet_e08cdcf4b70f987700e553d9914f6179} - - -Get the threshold of the neurons in the network. - -\begin{Desc} -\item[Returns:]The threshold of the neurons \end{Desc} -\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutputs@{getOutputs}} -\index{getOutputs@{getOutputs}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[getOutputs]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::NeuralNet::getOutputs ()}\label{classneuralpp_1_1NeuralNet_e6d2215ecc8b560db2f6797db642191c} - - -It gets the output of the network in case the output layer contains more neurons. - -\begin{Desc} -\item[Returns:]A vector containing the output values of the network \end{Desc} -\begin{Desc} -\item[Examples: ]\par -{\bf examples/adderFromScratch.cpp}.\end{Desc} \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!expected@{expected}} \index{expected@{expected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[expected]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::expected () const}\label{classneuralpp_1_1NeuralNet_562dfe9fb8d73bf25a23ce608451d3aa} +\subsubsection[expected]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::expected () const\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_562dfe9fb8d73bf25a23ce608451d3aa} Get the expected value (in case you have an only neuron in output layer). @@ -240,7 +195,7 @@ Of course you should specify this when you build your network by using setExpect \item[Returns:]The expected output value for a certain training phase \end{Desc} \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getExpected@{getExpected}} \index{getExpected@{getExpected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[getExpected]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::NeuralNet::getExpected () const}\label{classneuralpp_1_1NeuralNet_51a1851ed07b85bec091c9053ae99cf7} +\subsubsection[getExpected]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::NeuralNet::getExpected () const\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_51a1851ed07b85bec091c9053ae99cf7} Get the expected value (in case you have an only neuron in output layer). @@ -249,7 +204,7 @@ Of course you should specify this when you build your network by using setExpect \item[Returns:]The expected output value for a certain training phase \end{Desc} \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setExpected@{setExpected}} \index{setExpected@{setExpected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (double {\em ex})}\label{classneuralpp_1_1NeuralNet_b6475762b7e9eab086befdc511f7c236} +\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (double {\em ex})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_b6475762b7e9eab086befdc511f7c236} It sets the value you expect from your network (in case the network has an only neuron in its output layer). @@ -261,7 +216,7 @@ It sets the value you expect from your network (in case the network has an only \end{Desc} \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setExpected@{setExpected}} \index{setExpected@{setExpected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (std::vector$<$ double $>$ {\em ex})}\label{classneuralpp_1_1NeuralNet_e649edc3d86bec7c0e178d5c892b4fd7} +\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (std::vector$<$ double $>$ {\em ex})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_e649edc3d86bec7c0e178d5c892b4fd7} Set the values you expect from your network. @@ -273,11 +228,51 @@ Set the values you expect from your network. \end{Desc} \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!update@{update}} \index{update@{update}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[update]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::update ()}\label{classneuralpp_1_1NeuralNet_b0bd1daadb06980dff1f50d33a7c098e} +\subsubsection[update]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::update ()\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_b0bd1daadb06980dff1f50d33a7c098e} It updates through back-propagation the weights of the synapsis and computes again the output value for {\em epochs\/} times, calling back updateWeights and commitChanges functions. +\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!link@{link}} +\index{link@{link}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} +\subsubsection[link]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::link ()\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_46f23f462318a4ffc037a4e806364c3f} + + +It links the layers of the network (input, hidden, output). + +\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutput@{getOutput}} +\index{getOutput@{getOutput}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} +\subsubsection[getOutput]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::getOutput () const}\label{classneuralpp_1_1NeuralNet_961dce8913264bf64c899dce4e25f810} + + +It gets the output of the network (note: the layer output should contain an only neuron). + +\begin{Desc} +\item[Returns:]The output value of the network \end{Desc} +\begin{Desc} +\item[Examples: ]\par +{\bf examples/adderFromString.cpp}, and {\bf examples/doAdd.cpp}.\end{Desc} +\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutputs@{getOutputs}} +\index{getOutputs@{getOutputs}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} +\subsubsection[getOutputs]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::NeuralNet::getOutputs ()}\label{classneuralpp_1_1NeuralNet_e6d2215ecc8b560db2f6797db642191c} + + +It gets the output of the network in case the output layer contains more neurons. + +\begin{Desc} +\item[Returns:]A vector containing the output values of the network \end{Desc} +\begin{Desc} +\item[Examples: ]\par +{\bf examples/networkForSumsAndSubtractions.cpp}.\end{Desc} +\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getThreshold@{getThreshold}} +\index{getThreshold@{getThreshold}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} +\subsubsection[getThreshold]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::getThreshold () const}\label{classneuralpp_1_1NeuralNet_e08cdcf4b70f987700e553d9914f6179} + + +Get the threshold of the neurons in the network. + +\begin{Desc} +\item[Returns:]The threshold of the neurons \end{Desc} \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!propagate@{propagate}} \index{propagate@{propagate}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} \subsubsection[propagate]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::propagate ()}\label{classneuralpp_1_1NeuralNet_c129c180647362da963758bfd1ba6890} @@ -287,7 +282,7 @@ It propagates values through the network. Use this when you want to give an already trained network some new values the get to the output \begin{Desc} \item[Examples: ]\par -{\bf examples/adderFromScratch.cpp}, and {\bf examples/doAdd.cpp}.\end{Desc} +{\bf examples/adderFromString.cpp}, {\bf examples/doAdd.cpp}, and {\bf examples/networkForSumsAndSubtractions.cpp}.\end{Desc} \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setInput@{setInput}} \index{setInput@{setInput}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} \subsubsection[setInput]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setInput (std::vector$<$ double $>$ {\em v})}\label{classneuralpp_1_1NeuralNet_405b32d2928344314ecf0469070b0f17} @@ -302,15 +297,8 @@ It sets the input for the network. \end{Desc} \begin{Desc} \item[Examples: ]\par -{\bf examples/adderFromScratch.cpp}, and {\bf examples/doAdd.cpp}.\end{Desc} -\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!link@{link}} -\index{link@{link}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[link]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::link ()}\label{classneuralpp_1_1NeuralNet_46f23f462318a4ffc037a4e806364c3f} - - -It links the layers of the network (input, hidden, output). - -Don't use unless you exactly know what you're doing, it is already called by the constructor \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!save@{save}} +{\bf examples/adderFromString.cpp}, {\bf examples/doAdd.cpp}, and {\bf examples/networkForSumsAndSubtractions.cpp}.\end{Desc} +\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!save@{save}} \index{save@{save}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} \subsubsection[save]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::save (const char $\ast$ {\em fname}) throw ({\bf NetworkFileWriteException})}\label{classneuralpp_1_1NeuralNet_fdf94c276720c25e565cac834fe8a407} @@ -330,6 +318,40 @@ Save a trained neural network to a binary file. \begin{Desc} \item[Examples: ]\par {\bf examples/learnAdd.cpp}.\end{Desc} +\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!loadFromBinary@{loadFromBinary}} +\index{loadFromBinary@{loadFromBinary}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} +\subsubsection[loadFromBinary]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::loadFromBinary (const std::string {\em fname}) throw ({\bf NetworkFileNotFoundException})}\label{classneuralpp_1_1NeuralNet_a060e28b438613a6cc9e0895ddbc292b} + + +DEPRECATED. + +Load a trained neural network from a binary file. This function is deprecated and kept for back-compatibility. Use the XML format instead to load and neural networks and, respectly, the NeuralNetwork(const std::string) constructor or the \doxyref{save(const char$\ast$)}{p.}{classneuralpp_1_1NeuralNet_fdf94c276720c25e565cac834fe8a407} methods. \begin{Desc} +\item[Parameters:] +\begin{description} +\item[{\em fname}]Name of the file to be loaded \end{description} +\end{Desc} +\begin{Desc} +\item[Exceptions:] +\begin{description} +\item[{\em \doxyref{NetworkFileNotFoundException}{p.}{classneuralpp_1_1NetworkFileNotFoundException}}]When you're trying to load an invalid network file \end{description} +\end{Desc} +\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!saveToBinary@{saveToBinary}} +\index{saveToBinary@{saveToBinary}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} +\subsubsection[saveToBinary]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::saveToBinary (const char $\ast$ {\em fname}) throw ({\bf NetworkFileWriteException})}\label{classneuralpp_1_1NeuralNet_520147d9b47b69565567bd3fdcfd8897} + + +DEPRECATED. + +Save a trained neural network to a binary file. This function is deprecated and kept for back-compatibility. Use the XML format instead to load and neural networks and, respectly, the NeuralNetwork(const std::string) constructor or the \doxyref{save(const char$\ast$)}{p.}{classneuralpp_1_1NeuralNet_fdf94c276720c25e565cac834fe8a407} methods. \begin{Desc} +\item[Parameters:] +\begin{description} +\item[{\em fname}]Name of the file to be saved with the network information \end{description} +\end{Desc} +\begin{Desc} +\item[Exceptions:] +\begin{description} +\item[{\em \doxyref{NetworkFileWriteException}{p.}{classneuralpp_1_1NetworkFileWriteException}}]When you try to write the network information to an invalid file \end{description} +\end{Desc} \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!train@{train}} \index{train@{train}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} \subsubsection[train]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::train (std::string {\em xml}, \/ {\bf source} {\em src}) throw ({\bf InvalidXMLException})}\label{classneuralpp_1_1NeuralNet_1c9e17437d41a7048611e21a3cc1c7dd} @@ -349,7 +371,7 @@ A sample XML file is available in examples/adder.xml \begin{Desc} \end{Desc} \begin{Desc} \item[Examples: ]\par -{\bf examples/adderFromScratch.cpp}, and {\bf examples/learnAdd.cpp}.\end{Desc} +{\bf examples/adderFromString.cpp}, {\bf examples/learnAdd.cpp}, and {\bf examples/networkForSumsAndSubtractions.cpp}.\end{Desc} \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!initXML@{initXML}} \index{initXML@{initXML}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} \subsubsection[initXML]{\setlength{\rightskip}{0pt plus 5cm}static void neuralpp::NeuralNet::initXML (std::string \& {\em xml})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_96da6712a72051cf34ad961761ef6e08} @@ -362,20 +384,6 @@ Initialize the training XML for the neural network. \begin{description} \item[{\em xml}]String that will contain the XML \end{description} \end{Desc} -\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!split@{split}} -\index{split@{split}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} -\subsubsection[split]{\setlength{\rightskip}{0pt plus 5cm}static std::vector$<$double$>$ neuralpp::NeuralNet::split (char {\em delim}, \/ std::string {\em str})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_83c6555520856d5867752904349ab6ca} - - -Splits a string into a vector of doubles, given a delimitator. - -\begin{Desc} -\item[Parameters:] -\begin{description} -\item[{\em delim}]Delimitator \item[{\em str}]String to be splitted \end{description} -\end{Desc} -\begin{Desc} -\item[Returns:]Vector of doubles containing splitted values \end{Desc} \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!XMLFromSet@{XMLFromSet}} \index{XMLFromSet@{XMLFromSet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}} \subsubsection[XMLFromSet]{\setlength{\rightskip}{0pt plus 5cm}static std::string neuralpp::NeuralNet::XMLFromSet (int \& {\em id}, \/ std::string {\em set})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_0a2733037af912b3e6a10146e7b7172f} diff --git a/doc/latex/classneuralpp_1_1Neuron.tex b/doc/latex/classneuralpp_1_1Neuron.tex index c6f0ba2..620a4ae 100644 --- a/doc/latex/classneuralpp_1_1Neuron.tex +++ b/doc/latex/classneuralpp_1_1Neuron.tex @@ -24,6 +24,10 @@ void {\bf setActv} (double a) \begin{CompactList}\small\item\em Change the activation value of the neuron. \item\end{CompactList}\item void {\bf setProp} (double p) \begin{CompactList}\small\item\em Change the propagation value of the neuron. \item\end{CompactList}\item +void {\bf setSynIn} (size\_\-t n) +\item +void {\bf setSynOut} (size\_\-t n) +\item double {\bf getActv} () \begin{CompactList}\small\item\em Get the activation value of the neuron. \item\end{CompactList}\item double {\bf getProp} () @@ -162,6 +166,16 @@ Change the propagation value of the neuron. \begin{description} \item[{\em p}]Propagation value \end{description} \end{Desc} +\index{neuralpp::Neuron@{neuralpp::Neuron}!setSynIn@{setSynIn}} +\index{setSynIn@{setSynIn}!neuralpp::Neuron@{neuralpp::Neuron}} +\subsubsection[setSynIn]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Neuron::setSynIn (size\_\-t {\em n})}\label{classneuralpp_1_1Neuron_6fa3e8afc1c6e6e427773ba89a6fcb68} + + +\index{neuralpp::Neuron@{neuralpp::Neuron}!setSynOut@{setSynOut}} +\index{setSynOut@{setSynOut}!neuralpp::Neuron@{neuralpp::Neuron}} +\subsubsection[setSynOut]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Neuron::setSynOut (size\_\-t {\em n})}\label{classneuralpp_1_1Neuron_d6ae3a93ecd11c345d931aee4ca248c1} + + \index{neuralpp::Neuron@{neuralpp::Neuron}!getActv@{getActv}} \index{getActv@{getActv}!neuralpp::Neuron@{neuralpp::Neuron}} \subsubsection[getActv]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Neuron::getActv ()}\label{classneuralpp_1_1Neuron_55993867179f0ac7d1e0e2c460ceb611} diff --git a/doc/latex/classneuralpp_1_1Synapsis.tex b/doc/latex/classneuralpp_1_1Synapsis.tex index db47e64..5ee504b 100644 --- a/doc/latex/classneuralpp_1_1Synapsis.tex +++ b/doc/latex/classneuralpp_1_1Synapsis.tex @@ -8,8 +8,8 @@ Class for managing synapsis. \subsection*{Public Member Functions} \begin{CompactItemize} \item -{\bf Synapsis} ({\bf Neuron} $\ast$i, {\bf Neuron} $\ast$o, double w, double d) -\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item +{\bf Synapsis} () +\begin{CompactList}\small\item\em Empty constructor (it does nothing). \item\end{CompactList}\item {\bf Synapsis} ({\bf Neuron} $\ast$i, {\bf Neuron} $\ast$o, double($\ast$a)(double)) \begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item {\bf Synapsis} ({\bf Neuron} $\ast$i, {\bf Neuron} $\ast$o, double w, double($\ast$a)(double)) @@ -55,16 +55,11 @@ Don't use this class directly unless you know what you're doing, use \doxyref{Ne \subsection{Constructor \& Destructor Documentation} \index{neuralpp::Synapsis@{neuralpp::Synapsis}!Synapsis@{Synapsis}} \index{Synapsis@{Synapsis}!neuralpp::Synapsis@{neuralpp::Synapsis}} -\subsubsection[Synapsis]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Synapsis::Synapsis ({\bf Neuron} $\ast$ {\em i}, \/ {\bf Neuron} $\ast$ {\em o}, \/ double {\em w}, \/ double {\em d})}\label{classneuralpp_1_1Synapsis_0729de9e737b9967421edcfc4b410bd8} +\subsubsection[Synapsis]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Synapsis::Synapsis ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1Synapsis_c7760b19c56e9f69994970311703c5fa} -Constructor. +Empty constructor (it does nothing). -\begin{Desc} -\item[Parameters:] -\begin{description} -\item[{\em i}]Input neuron \item[{\em o}]Output neuron \item[{\em w}]Weight for the synapsis \item[{\em d}]Delta for the synapsis \end{description} -\end{Desc} \index{neuralpp::Synapsis@{neuralpp::Synapsis}!Synapsis@{Synapsis}} \index{Synapsis@{Synapsis}!neuralpp::Synapsis@{neuralpp::Synapsis}} \subsubsection[Synapsis]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Synapsis::Synapsis ({\bf Neuron} $\ast$ {\em i}, \/ {\bf Neuron} $\ast$ {\em o}, \/ double($\ast$)(double) {\em a})}\label{classneuralpp_1_1Synapsis_b767332fdce81af25486b8969e5d2fb3} diff --git a/doc/latex/doxygen.sty b/doc/latex/doxygen.sty index 6fde198..a972728 100644 --- a/doc/latex/doxygen.sty +++ b/doc/latex/doxygen.sty @@ -10,8 +10,8 @@ {\fancyplain{}{\bfseries\rightmark}} \rhead[\fancyplain{}{\bfseries\leftmark}] {\fancyplain{}{\bfseries\thepage}} -\rfoot[\fancyplain{}{\bfseries\scriptsize Generated on Sun Aug 16 20:53:42 2009 for Neural++ by Doxygen }]{} -\lfoot[]{\fancyplain{}{\bfseries\scriptsize Generated on Sun Aug 16 20:53:42 2009 for Neural++ by Doxygen }} +\rfoot[\fancyplain{}{\bfseries\scriptsize Generated on Fri Sep 4 11:25:49 2009 for Neural++ by Doxygen }]{} +\lfoot[]{\fancyplain{}{\bfseries\scriptsize Generated on Fri Sep 4 11:25:49 2009 for Neural++ by Doxygen }} \cfoot{} \newenvironment{Code} {\footnotesize} diff --git a/doc/latex/examples.tex b/doc/latex/examples.tex index da45cf7..9546a85 100644 --- a/doc/latex/examples.tex +++ b/doc/latex/examples.tex @@ -1,9 +1,11 @@ \section{Examples} Here is a list of all examples:\begin{CompactItemize} \item -{\bf examples/adderFromScratch.cpp} +{\bf examples/adderFromString.cpp} \item {\bf examples/doAdd.cpp} \item {\bf examples/learnAdd.cpp} +\item +{\bf examples/networkForSumsAndSubtractions.cpp} \end{CompactItemize} diff --git a/doc/latex/examples_2adderFromString_8cpp-example.tex b/doc/latex/examples_2adderFromString_8cpp-example.tex new file mode 100644 index 0000000..f14ae8f --- /dev/null +++ b/doc/latex/examples_2adderFromString_8cpp-example.tex @@ -0,0 +1,49 @@ +\section{examples/adderFromString.cpp} +Similar to learnAdd.cpp, but this time the training XML is generated as a string and not saved to a file, and parsed by the program itself to build the network. Then, the program asks two real numbers, and performs both the sum and the difference between them, putting the sum's output on the first output neuron and the difference's on the second output neuron. Anyway, using more than one neuron in the output layer is strongly discouraged, as the network usually won't set correctly the synaptical weights to give satisfying and accurate answers for all of the operations. + + + +\begin{DocInclude}\begin{verbatim} +#include +#include + +using namespace std; +using namespace neuralpp; + +int main() { + NeuralNet net(2, 2, 1, 0.002, 1000); + string xml; + double tmp; + int id = 0; + + // XML initialization. Then, I say XML that 2+3=5, 3+3=6, 5+4=9 + // Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm + NeuralNet::initXML(xml); + xml += NeuralNet::XMLFromSet(id, "3,2;5"); + xml += NeuralNet::XMLFromSet(id, "6,3;9"); + xml += NeuralNet::XMLFromSet(id, "2,3;5"); + xml += NeuralNet::XMLFromSet(id, "4,4;8"); + NeuralNet::closeXML(xml); + cout << xml << endl; + + net.train(xml, NeuralNet::str); + vector v; + cout << "Network status: trained\n\n"; + + cout << "First number to add: "; + cin >> tmp; + v.push_back(tmp); + + cout << "Second number to add: "; + cin >> tmp; + v.push_back(tmp); + + net.setInput(v); + net.propagate(); + cout << "Output: " << net.getOutput() << endl; + return 0; +} + +\end{verbatim} +\end{DocInclude} + \ No newline at end of file diff --git a/doc/latex/examples_2doAdd_8cpp-example.tex b/doc/latex/examples_2doAdd_8cpp-example.tex index 42aee3e..989d259 100644 --- a/doc/latex/examples_2doAdd_8cpp-example.tex +++ b/doc/latex/examples_2doAdd_8cpp-example.tex @@ -10,7 +10,7 @@ Show how to use a network already trained and saved to a binary file. In this ca using namespace std; using namespace neuralpp; -#define NETFILE "adder.net" +#define NETFILE "network.xml" int main() { double a,b; diff --git a/doc/latex/examples_2learnAdd_8cpp-example.tex b/doc/latex/examples_2learnAdd_8cpp-example.tex index 9c4de9c..1ee6d68 100644 --- a/doc/latex/examples_2learnAdd_8cpp-example.tex +++ b/doc/latex/examples_2learnAdd_8cpp-example.tex @@ -21,10 +21,10 @@ int main() { // => 2 neurons for the input layer // => 2 neurons for the hidden layer // => 1 neuron for the output layer - // => a learning rate == 0.005 (just get it doing some tests until satisfied) + // => a learning rate == 0.002 (just get it doing some tests until satisfied, but remember to keep its value quite low and ~ 0 to keep the network stable) // => 1000 learning steps (i.e. the network will be ready after 1000 training steps to adjust the synaptical weights // => 0.1 as neural threshold (the threshold above which a neuron activates) - NeuralNet net(2, 2, 1, 0.005, 1000, 0.1); + NeuralNet net(2, 2, 1, 0.002, 2000); // Initialize a training XML as a string in 'xml' NeuralNet::initXML(xml); @@ -41,6 +41,12 @@ int main() { xml += NeuralNet::XMLFromSet(id, "-1,-2;-3"); xml += NeuralNet::XMLFromSet(id, "8,9;17"); xml += NeuralNet::XMLFromSet(id, "10,10;20"); + xml += NeuralNet::XMLFromSet(id, "4,1;5"); + xml += NeuralNet::XMLFromSet(id, "2,6;8"); + xml += NeuralNet::XMLFromSet(id, "2,7;9"); + xml += NeuralNet::XMLFromSet(id, "8,9;17"); + xml += NeuralNet::XMLFromSet(id, "4,7;11"); + xml += NeuralNet::XMLFromSet(id, "5,2;7"); NeuralNet::closeXML(xml); // Save the XML string just created to a file @@ -57,7 +63,7 @@ int main() { // Save the trained network to a binary file, that can be reloaded from any // application that is going to use that network - net.save("adder.net"); + net.save("network.xml"); cout << "Network trained in " << (t2-t1) << " seconds. You can use adder.net file now to load this network\n"; return 0; } diff --git a/doc/latex/examples_2networkForSumsAndSubtractions_8cpp-example.tex b/doc/latex/examples_2networkForSumsAndSubtractions_8cpp-example.tex new file mode 100644 index 0000000..f057bfb --- /dev/null +++ b/doc/latex/examples_2networkForSumsAndSubtractions_8cpp-example.tex @@ -0,0 +1,48 @@ +\section{examples/networkForSumsAndSubtractions.cpp} +This program creates a neural network from scratch. Its purpose is to get two numbers and learn to compute their sum and difference (so the network provides two output values). The training set is auto-generated to an XML string, and then the network is trained. + + + +\begin{DocInclude}\begin{verbatim} +#include +#include + +using namespace std; +using namespace neuralpp; + +int main() { + NeuralNet net(2, 2, 2, 0.002, 1000); + string xml; + double tmp; + int id = 0; + + // XML initialization. Then, I say XML that 3+2=5, 3-2=1; 4+2=6, 4-2=2; 6+3=9, 6-3=3 + // Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm + NeuralNet::initXML(xml); + xml += NeuralNet::XMLFromSet(id, "3,2;5,1"); + xml += NeuralNet::XMLFromSet(id, "4,2;6,2"); + xml += NeuralNet::XMLFromSet(id, "6,3;9,3"); + NeuralNet::closeXML(xml); + cout << xml << endl; + + net.train(xml, NeuralNet::str); + vector v; + cout << "Network status: trained\n\n"; + + cout << "First number: "; + cin >> tmp; + v.push_back(tmp); + + cout << "Second number: "; + cin >> tmp; + v.push_back(tmp); + + net.setInput(v); + net.propagate(); + cout << "Output: " << net.getOutputs()[0] << "; " << net.getOutputs()[1] << endl; + return 0; +} + +\end{verbatim} +\end{DocInclude} + \ No newline at end of file diff --git a/doc/latex/examples_2networkForSumsAndSubtrations_8cpp-example.tex b/doc/latex/examples_2networkForSumsAndSubtrations_8cpp-example.tex new file mode 100644 index 0000000..ec154c7 --- /dev/null +++ b/doc/latex/examples_2networkForSumsAndSubtrations_8cpp-example.tex @@ -0,0 +1,8 @@ +\section{examples/networkForSumsAndSubtrations.cpp} +This program creates a neural network from scratch. Its purpose is to get two numbers and learn to compute their sum and difference (so the network provides two output values). The training set is auto-generated to an XML string, and then the network is trained. + + + +\begin{DocInclude}\begin{verbatim}\end{verbatim} +\end{DocInclude} + \ No newline at end of file diff --git a/doc/latex/namespaceneuralpp.tex b/doc/latex/namespaceneuralpp.tex index d088738..f91ca82 100644 --- a/doc/latex/namespaceneuralpp.tex +++ b/doc/latex/namespaceneuralpp.tex @@ -30,6 +30,11 @@ class {\bf NetworkIndexOutOfBoundsException} \begin{CompactList}\small\item\em Exception raised when trying to access a neuron whose index is larger than the number of neurons in the network. \item\end{CompactList}\item class {\bf InvalidSynapticalWeightException} \begin{CompactList}\small\item\em Exception raised when, while trying the network or directly, the weight of a synapsis is set to a value $|$w$|$ $>$ 1. \item\end{CompactList}\end{CompactItemize} +\subsection*{Namespaces} +\begin{CompactItemize} +\item +namespace {\bf neuralutils} +\end{CompactItemize} \subsection*{Functions} \begin{CompactItemize} \item diff --git a/doc/latex/namespaceneuralpp_1_1neuralutils.tex b/doc/latex/namespaceneuralpp_1_1neuralutils.tex new file mode 100644 index 0000000..f00109a --- /dev/null +++ b/doc/latex/namespaceneuralpp_1_1neuralutils.tex @@ -0,0 +1,70 @@ +\section{neuralpp::neuralutils Namespace Reference} +\label{namespaceneuralpp_1_1neuralutils}\index{neuralpp::neuralutils@{neuralpp::neuralutils}} + + +\subsection*{Functions} +\begin{CompactItemize} +\item +std::vector$<$ double $>$ {\bf split} (char delim, std::string str) +\begin{CompactList}\small\item\em Split a string into a vector of doubles, given a delimitator. \item\end{CompactList}\item +std::vector$<$ std::string $>$ {\bf splitLines} (std::string str) +\begin{CompactList}\small\item\em Split the lines of a string. \item\end{CompactList}\item +void {\bf toLower} (std::string \&str) +\begin{CompactList}\small\item\em Convert the characters of a string to lower case. \item\end{CompactList}\item +void {\bf toUpper} (std::string \&str) +\begin{CompactList}\small\item\em Convert the characters of a string to upper case. \item\end{CompactList}\end{CompactItemize} + + +\subsection{Function Documentation} +\index{neuralpp::neuralutils@{neuralpp::neuralutils}!split@{split}} +\index{split@{split}!neuralpp::neuralutils@{neuralpp::neuralutils}} +\subsubsection[split]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::neuralutils::split (char {\em delim}, \/ std::string {\em str})}\label{namespaceneuralpp_1_1neuralutils_68719b3d63ca48ed264e1b730a1aaa4a} + + +Split a string into a vector of doubles, given a delimitator. + +\begin{Desc} +\item[Parameters:] +\begin{description} +\item[{\em delim}]Delimitator \item[{\em str}]String to be splitted \end{description} +\end{Desc} +\begin{Desc} +\item[Returns:]Vector of doubles containing splitted value \end{Desc} +\index{neuralpp::neuralutils@{neuralpp::neuralutils}!splitLines@{splitLines}} +\index{splitLines@{splitLines}!neuralpp::neuralutils@{neuralpp::neuralutils}} +\subsubsection[splitLines]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$std::string$>$ neuralpp::neuralutils::splitLines (std::string {\em str})}\label{namespaceneuralpp_1_1neuralutils_1d887e4bcc7ef2d50cbeca984767a78b} + + +Split the lines of a string. + +\begin{Desc} +\item[Parameters:] +\begin{description} +\item[{\em str}]String to be splitted \end{description} +\end{Desc} +\begin{Desc} +\item[Returns:]An array of strings containing the lines of the original string \end{Desc} +\index{neuralpp::neuralutils@{neuralpp::neuralutils}!toLower@{toLower}} +\index{toLower@{toLower}!neuralpp::neuralutils@{neuralpp::neuralutils}} +\subsubsection[toLower]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::neuralutils::toLower (std::string \& {\em str})}\label{namespaceneuralpp_1_1neuralutils_f7932c25bd82b19173d2f3d2e5cef488} + + +Convert the characters of a string to lower case. + +\begin{Desc} +\item[Parameters:] +\begin{description} +\item[{\em str}]String to be converted \end{description} +\end{Desc} +\index{neuralpp::neuralutils@{neuralpp::neuralutils}!toUpper@{toUpper}} +\index{toUpper@{toUpper}!neuralpp::neuralutils@{neuralpp::neuralutils}} +\subsubsection[toUpper]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::neuralutils::toUpper (std::string \& {\em str})}\label{namespaceneuralpp_1_1neuralutils_265b22d1a6110646b42693b96c21ca8b} + + +Convert the characters of a string to upper case. + +\begin{Desc} +\item[Parameters:] +\begin{description} +\item[{\em str}]String to be converted \end{description} +\end{Desc} diff --git a/doc/latex/namespaces.tex b/doc/latex/namespaces.tex index 735d3ad..67d79b7 100644 --- a/doc/latex/namespaces.tex +++ b/doc/latex/namespaces.tex @@ -1,4 +1,5 @@ \section{Namespace List} Here is a list of all namespaces with brief descriptions:\begin{CompactList} \item\contentsline{section}{{\bf neuralpp} (Main namespace for the library )}{\pageref{namespaceneuralpp}}{} +\item\contentsline{section}{{\bf neuralpp::neuralutils} }{\pageref{namespaceneuralpp_1_1neuralutils}}{} \end{CompactList} diff --git a/doc/latex/neural_09_09_8hpp.tex b/doc/latex/neural_09_09_8hpp.tex index d7fe7ba..4ea06ec 100644 --- a/doc/latex/neural_09_09_8hpp.tex +++ b/doc/latex/neural_09_09_8hpp.tex @@ -8,6 +8,8 @@ \begin{CompactItemize} \item namespace {\bf neuralpp} +\item +namespace {\bf neuralpp::neuralutils} \end{CompactItemize} \subsection*{Classes} \begin{CompactItemize} @@ -29,31 +31,35 @@ struct {\bf neuralpp::synrecord} \subsection*{Defines} \begin{CompactItemize} \item -\#define {\bf RAND}~(double) ( (rand() / (RAND\_\-MAX/2)) - 1) -\begin{CompactList}\small\item\em Default rand value: $|$sin(rand)$|$, always $>$= 0 and $<$= 1. \item\end{CompactList}\item -\#define {\bf BETA0}~1.0 -\begin{CompactList}\small\item\em Initial value for the inertial momentum of the synapses. \item\end{CompactList}\end{CompactItemize} +\#define {\bf RAND}~(double) ( (rand() / 10.0) / ((double) RAND\_\-MAX) ) +\item +\#define {\bf BETA0}~0.8 +\end{CompactItemize} \subsection*{Functions} \begin{CompactItemize} \item double {\bf neuralpp::df} (double($\ast$f)(double), double x) \item double {\bf neuralpp::\_\-\_\-actv} (double prop) -\end{CompactItemize} +\item +std::vector$<$ double $>$ {\bf neuralpp::neuralutils::split} (char delim, std::string str) +\begin{CompactList}\small\item\em Split a string into a vector of doubles, given a delimitator. \item\end{CompactList}\item +std::vector$<$ std::string $>$ {\bf neuralpp::neuralutils::splitLines} (std::string str) +\begin{CompactList}\small\item\em Split the lines of a string. \item\end{CompactList}\item +void {\bf neuralpp::neuralutils::toLower} (std::string \&str) +\begin{CompactList}\small\item\em Convert the characters of a string to lower case. \item\end{CompactList}\item +void {\bf neuralpp::neuralutils::toUpper} (std::string \&str) +\begin{CompactList}\small\item\em Convert the characters of a string to upper case. \item\end{CompactList}\end{CompactItemize} \subsection{Define Documentation} \index{neural++.hpp@{neural++.hpp}!BETA0@{BETA0}} \index{BETA0@{BETA0}!neural++.hpp@{neural++.hpp}} -\subsubsection[BETA0]{\setlength{\rightskip}{0pt plus 5cm}\#define BETA0~1.0}\label{neural_09_09_8hpp_05e2bb5b9fc32f0b6b4d84fe43177d72} +\subsubsection[BETA0]{\setlength{\rightskip}{0pt plus 5cm}\#define BETA0~0.8}\label{neural_09_09_8hpp_05e2bb5b9fc32f0b6b4d84fe43177d72} -Initial value for the inertial momentum of the synapses. - \index{neural++.hpp@{neural++.hpp}!RAND@{RAND}} \index{RAND@{RAND}!neural++.hpp@{neural++.hpp}} -\subsubsection[RAND]{\setlength{\rightskip}{0pt plus 5cm}\#define RAND~(double) ( (rand() / (RAND\_\-MAX/2)) - 1)}\label{neural_09_09_8hpp_839a9222721835f53c5b248241f535f4} +\subsubsection[RAND]{\setlength{\rightskip}{0pt plus 5cm}\#define RAND~(double) ( (rand() / 10.0) / ((double) RAND\_\-MAX) )}\label{neural_09_09_8hpp_839a9222721835f53c5b248241f535f4} -Default rand value: $|$sin(rand)$|$, always $>$= 0 and $<$= 1. - diff --git a/doc/latex/neural_09_09__exception_8hpp.tex b/doc/latex/neural_09_09__exception_8hpp.tex index ca36c7c..ba29a28 100644 --- a/doc/latex/neural_09_09__exception_8hpp.tex +++ b/doc/latex/neural_09_09__exception_8hpp.tex @@ -1,5 +1,7 @@ \section{neural++\_\-exception.hpp File Reference} \label{neural_09_09__exception_8hpp}\index{neural++\_\-exception.hpp@{neural++\_\-exception.hpp}} +{\tt \#include $<$cstdio$>$}\par +{\tt \#include $<$cstring$>$}\par {\tt \#include $<$exception$>$}\par \subsection*{Namespaces} \begin{CompactItemize} diff --git a/doc/latex/refman.tex b/doc/latex/refman.tex index 43ad1c9..0e8e641 100644 --- a/doc/latex/refman.tex +++ b/doc/latex/refman.tex @@ -20,7 +20,7 @@ \vspace*{1cm} {\large Generated by Doxygen 1.5.6}\\ \vspace*{0.5cm} -{\small Sun Aug 16 20:53:42 2009}\\ +{\small Fri Sep 4 11:25:49 2009}\\ \end{center} \end{titlepage} \clearemptydoublepage @@ -36,6 +36,7 @@ \input{files} \chapter{Namespace Documentation} \input{namespaceneuralpp} +\include{namespaceneuralpp_1_1neuralutils} \chapter{Class Documentation} \input{classCMarkup} \include{structCMarkup_1_1ConvertEncoding} @@ -66,8 +67,9 @@ \include{neural_09_09_8hpp} \include{neural_09_09__exception_8hpp} \chapter{Example Documentation} -\input{examples_2adderFromScratch_8cpp-example} +\input{examples_2adderFromString_8cpp-example} \include{examples_2doAdd_8cpp-example} \include{examples_2learnAdd_8cpp-example} +\include{examples_2networkForSumsAndSubtractions_8cpp-example} \printindex \end{document} diff --git a/examples/Makefile b/examples/Makefile index 45d7a1a..75e14a6 100644 --- a/examples/Makefile +++ b/examples/Makefile @@ -1,10 +1,11 @@ all: g++ -Wall -o learnAdd learnAdd.cpp -lneural++ g++ -Wall -o doAdd doAdd.cpp -lneural++ - g++ -Wall -o adderFromScratch adderFromScratch.cpp -lneural++ - g++ -Wall -o Add Add.cpp -lneural++ + g++ -Wall -o networkForSumsAndSubtractions networkForSumsAndSubtractions.cpp -lneural++ + g++ -Wall -o adderFromString adderFromString.cpp -lneural++ clean: rm learnAdd rm doAdd - rm adderFromScratch + rm networkForSumsAndSubtractions + rm adderFromString diff --git a/examples/README b/examples/README deleted file mode 100644 index 74a07a1..0000000 --- a/examples/README +++ /dev/null @@ -1,19 +0,0 @@ -* @example examples/learnAdd.cpp Show how to train a network that performs sums between -* two real numbers. The training XML is built from scratch, then saved to a file, then -* the network is initialized using that XML file, trained, and the resulting trained -* network is saved to adder.net. Then, you should take a look at doAdd.cpp to see how -* to use that file to use the network. - -* @example examples/doAdd.cpp Show how to use a network already trained and saved to a -* binary file. In this case, a network trained to simply perform sums between two real -* numbers, that should have already been created using learnAdd. - -* @example examples/adderFromScratch.cpp Similar to learnAdd.cpp, but this time the -* training XML is generated as a string and not saved to a file, and parsed by the -* program itself to build the network. Then, the program asks two real numbers, and -* performs both the sum and the difference between them, putting the sum's output on -* the first output neuron and the difference's on the second output neuron. Anyway, -* using more than one neuron in the output layer is strongly discouraged, as the network -* usually won't set correctly the synaptical weights to give satisfying and accurate -* answers for all of the operations. - diff --git a/examples/adder.xml b/examples/adder.xml new file mode 100644 index 0000000..a9e6d20 --- /dev/null +++ b/examples/adder.xml @@ -0,0 +1,91 @@ + + + + + + + 2 + 3 + 5 + + + + 3 + 2 + 5 + + + + 6 + 2 + 8 + + + + 2 + 2 + 4 + + + + 1 + 2 + 3 + + + + -1 + -2 + -3 + + + + 8 + 9 + 17 + + + + 10 + 10 + 20 + + + + 4 + 1 + 5 + + + + 2 + 6 + 8 + + + + 2 + 7 + 9 + + + + 8 + 9 + 17 + + + + 4 + 7 + 11 + + + + 5 + 2 + 7 + + + + diff --git a/examples/adderFromScratch.cpp b/examples/adderFromString.cpp similarity index 82% rename from examples/adderFromScratch.cpp rename to examples/adderFromString.cpp index 648cfe3..f3b0020 100644 --- a/examples/adderFromScratch.cpp +++ b/examples/adderFromString.cpp @@ -18,7 +18,7 @@ using namespace std; using namespace neuralpp; int main() { - NeuralNet net(2, 2, 2, 0.005, 100); + NeuralNet net(2, 2, 1, 0.002, 1000); string xml; double tmp; int id = 0; @@ -26,9 +26,10 @@ int main() { // XML initialization. Then, I say XML that 2+3=5, 3+3=6, 5+4=9 // Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm NeuralNet::initXML(xml); - xml += NeuralNet::XMLFromSet(id, "3,2;5,1"); - xml += NeuralNet::XMLFromSet(id, "4,2;6,2"); - xml += NeuralNet::XMLFromSet(id, "6,3;9,3"); + xml += NeuralNet::XMLFromSet(id, "3,2;5"); + xml += NeuralNet::XMLFromSet(id, "6,3;9"); + xml += NeuralNet::XMLFromSet(id, "2,3;5"); + xml += NeuralNet::XMLFromSet(id, "4,4;8"); NeuralNet::closeXML(xml); cout << xml << endl; @@ -46,7 +47,7 @@ int main() { net.setInput(v); net.propagate(); - cout << "Output: " << net.getOutputs()[0] << "; " << net.getOutputs()[1] << endl; + cout << "Output: " << net.getOutput() << endl; return 0; } diff --git a/examples/learnAdd.cpp b/examples/learnAdd.cpp index 911eb59..93e8572 100644 --- a/examples/learnAdd.cpp +++ b/examples/learnAdd.cpp @@ -49,6 +49,8 @@ int main() { xml += NeuralNet::XMLFromSet(id, "2,6;8"); xml += NeuralNet::XMLFromSet(id, "2,7;9"); xml += NeuralNet::XMLFromSet(id, "8,9;17"); + xml += NeuralNet::XMLFromSet(id, "4,7;11"); + xml += NeuralNet::XMLFromSet(id, "5,2;7"); NeuralNet::closeXML(xml); // Save the XML string just created to a file diff --git a/examples/network.xml b/examples/network.xml new file mode 100644 index 0000000..75278ae --- /dev/null +++ b/examples/network.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/examples/networkForSumsAndSubtractions.cpp b/examples/networkForSumsAndSubtractions.cpp new file mode 100644 index 0000000..78a7cee --- /dev/null +++ b/examples/networkForSumsAndSubtractions.cpp @@ -0,0 +1,48 @@ +/** + * This program creates a neural network from scratch. Its purpose is to get + * two numbers and learn to compute their sum and difference (so the network + * provides two output values). The training set is auto-generated to an XML + * string, and then the network is trained. + * + * by BlackLight, 2009 + */ + +#include +#include + +using namespace std; +using namespace neuralpp; + +int main() { + NeuralNet net(2, 2, 2, 0.002, 1000); + string xml; + double tmp; + int id = 0; + + // XML initialization. Then, I say XML that 3+2=5, 3-2=1; 4+2=6, 4-2=2; 6+3=9, 6-3=3 + // Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm + NeuralNet::initXML(xml); + xml += NeuralNet::XMLFromSet(id, "3,2;5,1"); + xml += NeuralNet::XMLFromSet(id, "4,2;6,2"); + xml += NeuralNet::XMLFromSet(id, "6,3;9,3"); + NeuralNet::closeXML(xml); + cout << xml << endl; + + net.train(xml, NeuralNet::str); + vector v; + cout << "Network status: trained\n\n"; + + cout << "First number: "; + cin >> tmp; + v.push_back(tmp); + + cout << "Second number: "; + cin >> tmp; + v.push_back(tmp); + + net.setInput(v); + net.propagate(); + cout << "Output: " << net.getOutputs()[0] << "; " << net.getOutputs()[1] << endl; + return 0; +} + diff --git a/include/neural++.hpp b/include/neural++.hpp index 8a05518..9f9f0c3 100644 --- a/include/neural++.hpp +++ b/include/neural++.hpp @@ -39,6 +39,7 @@ namespace neuralpp { /** * @class NeuralNet * @brief Main project's class. Use *ONLY* this class, unless you know what you're doing + * * @example examples/learnAdd.cpp Show how to train a network that performs sums between * two real numbers. The training XML is built from scratch, then saved to a file, then * the network is initialized using that XML file, trained, and the resulting trained @@ -49,7 +50,7 @@ namespace neuralpp { * binary file. In this case, a network trained to simply perform sums between two real * numbers, that should have already been created using learnAdd. * - * @example examples/adderFromScratch.cpp Similar to learnAdd.cpp, but this time the + * @example examples/adderFromString.cpp Similar to learnAdd.cpp, but this time the * training XML is generated as a string and not saved to a file, and parsed by the * program itself to build the network. Then, the program asks two real numbers, and * performs both the sum and the difference between them, putting the sum's output on @@ -57,6 +58,11 @@ namespace neuralpp { * using more than one neuron in the output layer is strongly discouraged, as the network * usually won't set correctly the synaptical weights to give satisfying and accurate * answers for all of the operations. + * + * @example examples/networkForSumsAndSubtractions.cpp This program creates a neural + * network from scratch. Its purpose is to get two numbers and learn to compute their + * sum and difference (so the network provides two output values). The training set is + * auto-generated to an XML string, and then the network is trained. */ class NeuralNet { int epochs; @@ -122,14 +128,6 @@ namespace neuralpp { */ void link(); - /** - * @brief Splits a string into a vector of doubles, given a delimitator - * @param delim Delimitator - * @param str String to be splitted - * @return Vector of doubles containing splitted values - */ - static std::vector split (char delim, std::string str); - public: Layer* input; Layer* hidden; @@ -552,6 +550,35 @@ namespace neuralpp { double w; double d; }; + + namespace neuralutils { + /** + * @brief Split a string into a vector of doubles, given a delimitator + * @param delim Delimitator + * @param str String to be splitted + * @return Vector of doubles containing splitted value + */ + std::vector split (char delim, std::string str); + + /** + * @brief Split the lines of a string + * @param str String to be splitted + * @return An array of strings containing the lines of the original string + */ + std::vector splitLines (std::string str); + + /** + * @brief Convert the characters of a string to lower case + * @param str String to be converted + */ + void toLower (std::string& str); + + /** + * @brief Convert the characters of a string to upper case + * @param str String to be converted + */ + void toUpper (std::string& str); + } } #endif diff --git a/include/neural++_exception.hpp b/include/neural++_exception.hpp index 65f5834..13722fb 100644 --- a/include/neural++_exception.hpp +++ b/include/neural++_exception.hpp @@ -14,6 +14,8 @@ #ifndef __NEURALPP_EXCEPTION #define __NEURALPP_EXCEPTION +#include +#include #include namespace neuralpp { @@ -43,9 +45,15 @@ namespace neuralpp { * @brief Exception thrown when trying parsing an invalid XML */ class InvalidXMLException : public std::exception { + char *error; + public: - InvalidXMLException() {} - const char* what() const throw() { return "Attempt to load an invalid XML file"; } + InvalidXMLException(const char *err = " ") { + error = new char[strlen(err)+40]; + sprintf (error, "Attempt to load an invalid XML file: %s", err); + } + + const char* what() const throw() { return error; } }; /** diff --git a/src/neuralnet.cpp b/src/neuralnet.cpp index 7a434d8..5d0cd20 100644 --- a/src/neuralnet.cpp +++ b/src/neuralnet.cpp @@ -254,16 +254,16 @@ namespace neuralpp { xml.Load(fname.c_str()); if (!xml.IsWellFormed()) { - throw InvalidXMLException(); + throw InvalidXMLException("Malformed XML"); return; } if (xml.FindElem("network")) { if (xml.GetAttrib("epochs").empty()) - throw InvalidXMLException(); + throw InvalidXMLException("'epochs' parameter not defined"); if (xml.GetAttrib("learning_rate").empty()) - throw InvalidXMLException(); + throw InvalidXMLException("'learning_rate' parameter not defined"); epochs = atoi(xml.GetAttrib("epochs").c_str()); l_rate = atof(xml.GetAttrib("learning_rate").c_str()); @@ -274,10 +274,10 @@ namespace neuralpp { while (xml.FindChildElem("layer")) { if (xml.GetChildAttrib("class").empty()) - throw InvalidXMLException(); + throw InvalidXMLException("'layer' tag with no class specified"); if (xml.GetChildAttrib("size").empty()) - throw InvalidXMLException(); + throw InvalidXMLException("'layer' tag without size specification"); if (!xml.GetChildAttrib("class").compare("input")) in_size = atoi(xml.GetChildAttrib("size").c_str()); @@ -286,7 +286,7 @@ namespace neuralpp { else if (!xml.GetChildAttrib("class").compare("output")) out_size = atoi(xml.GetChildAttrib("size").c_str()); else - throw InvalidXMLException(); + throw InvalidXMLException("Invalid attribute inside 'layer' tag"); } if (in_size && hid_size && out_size) { @@ -299,37 +299,38 @@ namespace neuralpp { for (unsigned int i=0; i < hid_size; i++) hid_out_synapses[i] = vector(out_size); - } + } else + throw InvalidXMLException ("In your XML all the specifications about input, hidden and output layers should be present"); while (xml.FindChildElem("synapsis")) { if (!(in_size && hid_size && out_size)) - throw InvalidXMLException(); + throw InvalidXMLException("In your XML all the specifications about input, hidden and output layers should be present before defining a synapsis"); if (xml.GetChildAttrib("class").empty()) - throw InvalidXMLException(); + throw InvalidXMLException("'synapsis' tag with no class specified"); if (xml.GetChildAttrib("input").empty()) - throw InvalidXMLException(); + throw InvalidXMLException("'synapsis' tag with no input neuron specified"); if (xml.GetChildAttrib("output").empty()) - throw InvalidXMLException(); + throw InvalidXMLException("'synapsis' tag with no output neuron specified"); if (xml.GetChildAttrib("weight").empty()) - throw InvalidXMLException(); + throw InvalidXMLException("'synapsis' tag with no weight specified"); unsigned int in = atoi(xml.GetChildAttrib("input").c_str()); unsigned int out = atoi(xml.GetChildAttrib("output").c_str()); if (xml.GetChildAttrib("class") == "inhid") { if (in >= in_size || out >= hid_size) - throw InvalidXMLException(); + throw InvalidXMLException("The id of the input or output neuron is greater than the size of the layer"); in_hid_synapses[in][out] = atof(xml.GetChildAttrib("weight").c_str()); } if (xml.GetChildAttrib("class") == "hidout") { if (in >= hid_size || out >= out_size) - throw InvalidXMLException(); + throw InvalidXMLException("The id of the input or output neuron is greater than the size of the layer"); hid_out_synapses[in][out] = atof(xml.GetChildAttrib("weight").c_str()); } @@ -619,18 +620,16 @@ namespace neuralpp { else xml.SetDoc(xmlsrc.c_str()); - if (!xml.IsWellFormed()) { - throw InvalidXMLException(); - return; - } + if (!xml.IsWellFormed()) + throw InvalidXMLException("Malformed XML"); - if (xml.FindElem("NETWORK")) { - while (xml.FindChildElem("TRAINING")) { + if (xml.FindElem("network")) { + while (xml.FindChildElem("training")) { vector input; vector output; xml.IntoElem(); - while (xml.FindChildElem("INPUT")) { + while (xml.FindChildElem("input")) { xml.IntoElem(); input.push_back(atof( xml.GetData().c_str())); @@ -638,7 +637,7 @@ namespace neuralpp { xml.OutOfElem(); } - while (xml.FindChildElem("OUTPUT")) { + while (xml.FindChildElem("output")) { xml.IntoElem(); output.push_back( atof(xml.GetData().c_str()) ); xml.OutOfElem(); @@ -650,7 +649,8 @@ namespace neuralpp { setExpected(output); update(); } - } + } else + throw InvalidXMLException("No 'network' tag specified"); } void NeuralNet::initXML(string& xml) { @@ -658,24 +658,7 @@ namespace neuralpp { ("\n" "\n" "\n\n" - "\n"); - } - - vector NeuralNet::split(char delim, string str) { - char tmp[1024]; - vector v; - memset(tmp, 0x0, sizeof(tmp)); - - for (unsigned int i = 0, j = 0; i <= str.length(); i++) { - if (str[i] == delim || i == str.length()) { - v.push_back(atof(tmp)); - memset(tmp, 0x0, sizeof(tmp)); - j = 0; - } else - tmp[j++] = str[i]; - } - - return v; + "\n"); } string NeuralNet::XMLFromSet (int& id, string set) { @@ -696,38 +679,85 @@ namespace neuralpp { string inStr = set.substr(0, delimPos); string outStr = set.substr(delimPos + 1, set.length()); - in = split(',', inStr); - out = split(',', outStr); + in = neuralutils::split(',', inStr); + out = neuralutils::split(',', outStr); ss << (id++); - xml += "\t\n"; + xml += "\t\n"; for (unsigned int i = 0; i < in.size(); i++, id++) { ss.str(string()); ss << id; - xml += "\t\t"; + xml += "\t\t"; ss.str(string()); ss << in[i]; - xml += ss.str() + "\n"; + xml += ss.str() + "\n"; } for (unsigned int i = 0; i < out.size(); i++, id++) { ss.str(string()); ss << id; - xml += "\t\t"; + xml += "\t\t"; ss.str(string()); ss << out[i]; - xml += ss.str() + "\n"; + xml += ss.str() + "\n"; } - xml += "\t\n\n"; + xml += "\t\n\n"; return xml; } void NeuralNet::closeXML(string & xml) { - xml.append("\n\n"); + xml.append("\n\n"); + } + + vector neuralutils::split(char delim, string str) { + char tmp[1024]; + vector v; + memset(tmp, 0x0, sizeof(tmp)); + + for (unsigned int i = 0, j = 0; i <= str.length(); i++) { + if (str[i] == delim || i == str.length()) { + v.push_back(atof(tmp)); + memset(tmp, 0x0, sizeof(tmp)); + j = 0; + } else + tmp[j++] = str[i]; + } + + return v; + } + + vector neuralutils::splitLines (string str) { + vector v; + string buf = ""; + + for (unsigned int i=0; i < str.size(); i++) { + buf += str[i]; + + if (str[i] == '\n') { + v.push_back(str); + buf = ""; + } + } + + return v; + } + + void neuralutils::toLower (string& str) { + for (unsigned int i=0; i < str.size(); i++) { + if (str[i] >= 'A' && str[i] <= 'Z') + str[i] = (str[i] - 'A') + 'a'; + } + } + + void neuralutils::toUpper (string& str) { + for (unsigned int i=0; i < str.size(); i++) { + if (str[i] >= 'a' && str[i] <= 'z') + str[i] = (str[i] - 'a') + 'A'; + } } }