2009-08-09 11:17:39 +02:00
\section { neuralpp::NeuralNet Class Reference}
\label { classneuralpp_ 1_ 1NeuralNet} \index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
Main project's class.
{ \tt \# include $ < $ neural++.hpp$ > $ }
\subsection * { Public Types}
\begin { CompactItemize}
\item
enum { \bf source} \{ { \bf file} ,
{ \bf str}
\}
\begin { CompactList} \small \item \em Enum to choose the eventual training source for our network (XML from a file or from a string). \item \end { CompactList} \end { CompactItemize}
\subsection * { Public Member Functions}
\begin { CompactItemize}
\item
{ \bf NeuralNet} ()
\begin { CompactList} \small \item \em Empty constructor for the class - it just makes nothing. \item \end { CompactList} \item
2009-08-16 20:57:15 +02:00
{ \bf NeuralNet} (size\_ \- t in\_ \- size, size\_ \- t hidden\_ \- size, size\_ \- t out\_ \- size, double l, int e, double th=0.0, double($ \ast $ a)(double)=\_ \- \_ \- actv)
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em Constructor. \item \end { CompactList} \item
2009-08-16 20:57:15 +02:00
{ \bf NeuralNet} (const std::string file) throw (NetworkFileNotFoundException)
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em Constructor. \item \end { CompactList} \item
2009-08-15 02:59:09 +02:00
double { \bf getOutput} () const
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em It gets the output of the network (note: the layer output should contain an only neuron). \item \end { CompactList} \item
2009-08-16 20:57:15 +02:00
double { \bf getThreshold} () const
\begin { CompactList} \small \item \em Get the threshold of the neurons in the network. \item \end { CompactList} \item
std::vector$ < $ double $ > $ { \bf getOutputs} ()
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em It gets the output of the network in case the output layer contains more neurons. \item \end { CompactList} \item
2009-08-15 02:59:09 +02:00
double { \bf expected} () const
2009-08-16 20:57:15 +02:00
\begin { CompactList} \small \item \em Get the expected value (in case you have an only neuron in output layer). \item \end { CompactList} \item
std::vector$ < $ double $ > $ { \bf getExpected} () const
\begin { CompactList} \small \item \em Get the expected value (in case you have an only neuron in output layer). \item \end { CompactList} \item
void { \bf setExpected} (double ex)
\begin { CompactList} \small \item \em It sets the value you expect from your network (in case the network has an only neuron in its output layer). \item \end { CompactList} \item
void { \bf setExpected} (std::vector$ < $ double $ > $ ex)
\begin { CompactList} \small \item \em Set the values you expect from your network. \item \end { CompactList} \item
2009-08-09 11:17:39 +02:00
void { \bf update} ()
\begin { CompactList} \small \item \em It updates through back-propagation the weights of the synapsis and computes again the output value for { \em epochs\/ } times, calling back updateWeights and commitChanges functions. \item \end { CompactList} \item
void { \bf propagate} ()
\begin { CompactList} \small \item \em It propagates values through the network. \item \end { CompactList} \item
2009-08-16 20:57:15 +02:00
void { \bf setInput} (std::vector$ < $ double $ > $ v)
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em It sets the input for the network. \item \end { CompactList} \item
void { \bf link} ()
\begin { CompactList} \small \item \em It links the layers of the network (input, hidden, output). \item \end { CompactList} \item
2009-08-15 02:59:09 +02:00
void { \bf save} (const char $ \ast $ fname) throw (NetworkFileWriteException)
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em Save a trained neural network to a binary file. \item \end { CompactList} \item
2009-08-16 20:57:15 +02:00
void { \bf train} (std::string xml, { \bf source} src) throw (InvalidXMLException)
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em Train a network using a training set loaded from an XML file. \item \end { CompactList} \end { CompactItemize}
\subsection * { Static Public Member Functions}
\begin { CompactItemize}
\item
2009-08-16 20:57:15 +02:00
static void { \bf initXML} (std::string \& xml)
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em Initialize the training XML for the neural network. \item \end { CompactList} \item
2009-08-16 20:57:15 +02:00
static std::vector$ < $ double $ > $ { \bf split} (char delim, std::string str)
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em Splits a string into a vector of doubles, given a delimitator. \item \end { CompactList} \item
2009-08-16 20:57:15 +02:00
static std::string { \bf XMLFromSet} (int \& id, std::string set)
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em Get a training set from a string and copies it to an XML For example, these strings could be training sets for making sums: \char `\" { } 2,3;5\char `\" { } - \char `\" { } 5,6;11\char `\" { } - \char `\" { } 2,2;4\char `\" { } - \char `\" { } 4,5:9\char `\" { } This method called on the first string will return an XML such this: '$ < $ training id=\char `\" { } 0\char `\" { } $ > $ $ < $ input id=\char `\" { } 0\char `\" { } $ > $ 2$ < $ /input$ > $ $ < $ input id=\char `\" { } 1\char `\" { } $ > $ 3$ < $ /input$ > $ $ < $ output id=\char `\" { } 0\char `\" { } $ > $ 5$ < $ /output$ > $ \& lt/training$ > $ '. \item \end { CompactList} \item
2009-08-16 20:57:15 +02:00
static void { \bf closeXML} (std::string \& xml)
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em Closes an open XML document generated by \char `\" { } initXML\char `\" { } and \char `\" { } XMLFromSet\char `\" { } . \item \end { CompactList} \end { CompactItemize}
2009-08-15 02:59:09 +02:00
\subsection * { Public Attributes}
\begin { CompactItemize}
\item
{ \bf Layer} $ \ast $ { \bf input}
\item
{ \bf Layer} $ \ast $ { \bf hidden}
\item
{ \bf Layer} $ \ast $ { \bf output}
\end { CompactItemize}
2009-08-09 11:17:39 +02:00
\subsection * { Private Member Functions}
\begin { CompactItemize}
\item
void { \bf updateWeights} ()
\begin { CompactList} \small \item \em It updates the weights of the net's synapsis through back-propagation. \item \end { CompactList} \item
2009-08-16 20:57:15 +02:00
void { \bf commitChanges} ({ \bf Layer} \& l)
2009-08-09 11:17:39 +02:00
\begin { CompactList} \small \item \em It commits the changes made by \doxyref { updateWeights()} { p.} { classneuralpp_ 1_ 1NeuralNet_ 94169c89a7cd47122ab5dbf1d5c5e108} to the layer l. \item \end { CompactList} \item
2009-08-16 20:57:15 +02:00
double { \bf error} (double ex)
\begin { CompactList} \small \item \em Get the error made on the expected result as squared deviance. \item \end { CompactList} \end { CompactItemize}
2009-08-09 11:17:39 +02:00
\subsection * { Private Attributes}
\begin { CompactItemize}
\item
int { \bf epochs}
\item
int { \bf ref\_ \- epochs}
\item
double { \bf l\_ \- rate}
\item
2009-08-16 20:57:15 +02:00
double { \bf threshold}
\item
std::vector$ < $ double $ > $ { \bf expect}
2009-08-09 11:17:39 +02:00
\item
double($ \ast $ { \bf actv\_ \- f} )(double)
2009-08-15 02:59:09 +02:00
\begin { CompactList} \small \item \em Private pointer to function, containing the function to be used as activation function. \item \end { CompactList} \end { CompactItemize}
2009-08-09 11:17:39 +02:00
\subsection { Detailed Description}
Main project's class.
2009-08-16 20:57:15 +02:00
Use $ \ast $ ONLY$ \ast $ this class, unless you know what you're doing \begin { Desc}
\item [Examples: ] \par
{ \bf examples/adderFromScratch.cpp} , { \bf examples/doAdd.cpp} , and { \bf examples/learnAdd.cpp} .\end { Desc}
2009-08-09 11:17:39 +02:00
\subsection { Member Enumeration Documentation}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !source@{ source} }
\index { source@{ source} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [source] { \setlength { \rightskip } { 0pt plus 5cm} enum { \bf neuralpp::NeuralNet::source} } \label { classneuralpp_ 1_ 1NeuralNet_ 94c36c94060e785ea67a0014c4182f8f}
Enum to choose the eventual training source for our network (XML from a file or from a string).
\begin { Desc}
\item [Enumerator: ] \par
\begin { description}
\index { file@{ file} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} } \index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !file@{ file} } \item [{ \em
file\label { classneuralpp_ 1_ 1NeuralNet_ 94c36c94060e785ea67a0014c4182f8f5ec2727c0756ddb097b53efe49b81afb}
} ]\index { str@{ str} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} } \index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !str@{ str} } \item [{ \em
str\label { classneuralpp_ 1_ 1NeuralNet_ 94c36c94060e785ea67a0014c4182f8f6d06b4fe9414a158c97aee1a3679a904}
} ]\end { description}
\end { Desc}
\subsection { Constructor \& Destructor Documentation}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !NeuralNet@{ NeuralNet} }
\index { NeuralNet@{ NeuralNet} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [NeuralNet] { \setlength { \rightskip } { 0pt plus 5cm} neuralpp::NeuralNet::NeuralNet ()\hspace { 0.3cm} { \tt [inline]} } \label { classneuralpp_ 1_ 1NeuralNet_ 92b145f2f6f00bf1ba645ce2235882c2}
Empty constructor for the class - it just makes nothing.
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !NeuralNet@{ NeuralNet} }
\index { NeuralNet@{ NeuralNet} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [NeuralNet] { \setlength { \rightskip } { 0pt plus 5cm} neuralpp::NeuralNet::NeuralNet (size\_ \- t { \em in\_ \- size} , \/ size\_ \- t { \em hidden\_ \- size} , \/ size\_ \- t { \em out\_ \- size} , \/ double { \em l} , \/ int { \em e} , \/ double { \em th} = { \tt 0.0} , \/ double($ \ast $ )(double) { \em a} = { \tt \_ \- \_ \- actv} )} \label { classneuralpp_ 1_ 1NeuralNet_ 3d602f3988a9a3e2c77dc6955674f412}
2009-08-09 11:17:39 +02:00
Constructor.
\begin { Desc}
\item [Parameters:]
\begin { description}
2009-08-16 20:57:15 +02:00
\item [{\em in\_\-size}] Size of the input layer \item [{\em hidden\_\-size}] Size of the hidden layer \item [{\em out\_\-size}] Size of the output layer \item [{\em l}] learn rate (get it after doing some experiments, but generally try to keep its value quite low to be more accurate) \item [{\em e}] Epochs (cycles) to execute (the most you execute, the most the network can be accurate for its purpose) \item [{\em th}] Threshold, value in [0,1] that establishes how much a neuron must be 'sensitive' on variations of the input values \item [{\em a}] Activation function to use (default: f(x)=x) \end { description}
2009-08-09 11:17:39 +02:00
\end { Desc}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !NeuralNet@{ NeuralNet} }
\index { NeuralNet@{ NeuralNet} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [NeuralNet] { \setlength { \rightskip } { 0pt plus 5cm} neuralpp::NeuralNet::NeuralNet (const std::string { \em file} ) throw ({ \bf NetworkFileNotFoundException} )} \label { classneuralpp_ 1_ 1NeuralNet_ b4bfa407d28bb17abf7f735a049987d9}
2009-08-09 11:17:39 +02:00
Constructor.
\begin { Desc}
\item [Parameters:]
\begin { description}
2009-08-15 02:59:09 +02:00
\item [{\em file}] Binary file containing a neural network previously saved by \doxyref { save()} { p.} { classneuralpp_ 1_ 1NeuralNet_ fdf94c276720c25e565cac834fe8a407} method \end { description}
2009-08-09 11:17:39 +02:00
\end { Desc}
\begin { Desc}
\item [Exceptions:]
\begin { description}
2009-08-16 20:57:15 +02:00
\item [{\em \doxyref{NetworkFileNotFoundException}{p.}{classneuralpp_1_1NetworkFileNotFoundException}}] \end { description}
2009-08-09 11:17:39 +02:00
\end { Desc}
\subsection { Member Function Documentation}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !updateWeights@{ updateWeights} }
\index { updateWeights@{ updateWeights} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [updateWeights] { \setlength { \rightskip } { 0pt plus 5cm} void neuralpp::NeuralNet::updateWeights ()\hspace { 0.3cm} { \tt [private]} } \label { classneuralpp_ 1_ 1NeuralNet_ 94169c89a7cd47122ab5dbf1d5c5e108}
It updates the weights of the net's synapsis through back-propagation.
In-class use only \index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !commitChanges@{ commitChanges} }
\index { commitChanges@{ commitChanges} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [commitChanges] { \setlength { \rightskip } { 0pt plus 5cm} void neuralpp::NeuralNet::commitChanges ({ \bf Layer} \& { \em l} )\hspace { 0.3cm} { \tt [private]} } \label { classneuralpp_ 1_ 1NeuralNet_ f697a8d9967ad8f03e5a16a42cd110c5}
2009-08-09 11:17:39 +02:00
It commits the changes made by \doxyref { updateWeights()} { p.} { classneuralpp_ 1_ 1NeuralNet_ 94169c89a7cd47122ab5dbf1d5c5e108} to the layer l.
In-class use only \begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em l}] \doxyref { Layer} { p.} { classneuralpp_ 1_ 1Layer} to commit the changes \end { description}
\end { Desc}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !error@{ error} }
\index { error@{ error} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [error] { \setlength { \rightskip } { 0pt plus 5cm} double neuralpp::NeuralNet::error (double { \em ex} )\hspace { 0.3cm} { \tt [private]} } \label { classneuralpp_ 1_ 1NeuralNet_ 8a140d28e6dd4097470c7c138801ad01}
2009-08-09 11:17:39 +02:00
2009-08-16 20:57:15 +02:00
Get the error made on the expected result as squared deviance.
2009-08-09 11:17:39 +02:00
\begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em ex}] Expected value \end { description}
\end { Desc}
\begin { Desc}
\item [Returns:] Mean error \end { Desc}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !getOutput@{ getOutput} }
\index { getOutput@{ getOutput} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-15 02:59:09 +02:00
\subsubsection [getOutput] { \setlength { \rightskip } { 0pt plus 5cm} double neuralpp::NeuralNet::getOutput () const} \label { classneuralpp_ 1_ 1NeuralNet_ 961dce8913264bf64c899dce4e25f810}
2009-08-09 11:17:39 +02:00
It gets the output of the network (note: the layer output should contain an only neuron).
\begin { Desc}
\item [Returns:] The output value of the network \end { Desc}
2009-08-16 20:57:15 +02:00
\begin { Desc}
\item [Examples: ] \par
{ \bf examples/doAdd.cpp} .\end { Desc}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !getThreshold@{ getThreshold} }
\index { getThreshold@{ getThreshold} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [getThreshold] { \setlength { \rightskip } { 0pt plus 5cm} double neuralpp::NeuralNet::getThreshold () const} \label { classneuralpp_ 1_ 1NeuralNet_ e08cdcf4b70f987700e553d9914f6179}
Get the threshold of the neurons in the network.
\begin { Desc}
\item [Returns:] The threshold of the neurons \end { Desc}
2009-08-09 11:17:39 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !getOutputs@{ getOutputs} }
\index { getOutputs@{ getOutputs} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [getOutputs] { \setlength { \rightskip } { 0pt plus 5cm} std::vector$ < $ double$ > $ neuralpp::NeuralNet::getOutputs ()} \label { classneuralpp_ 1_ 1NeuralNet_ e6d2215ecc8b560db2f6797db642191c}
2009-08-09 11:17:39 +02:00
It gets the output of the network in case the output layer contains more neurons.
\begin { Desc}
\item [Returns:] A vector containing the output values of the network \end { Desc}
2009-08-16 20:57:15 +02:00
\begin { Desc}
\item [Examples: ] \par
{ \bf examples/adderFromScratch.cpp} .\end { Desc}
2009-08-09 11:17:39 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !expected@{ expected} }
\index { expected@{ expected} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-15 02:59:09 +02:00
\subsubsection [expected] { \setlength { \rightskip } { 0pt plus 5cm} double neuralpp::NeuralNet::expected () const} \label { classneuralpp_ 1_ 1NeuralNet_ 562dfe9fb8d73bf25a23ce608451d3aa}
2009-08-09 11:17:39 +02:00
2009-08-16 20:57:15 +02:00
Get the expected value (in case you have an only neuron in output layer).
Of course you should specify this when you build your network by using setExpected. \begin { Desc}
\item [Returns:] The expected output value for a certain training phase \end { Desc}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !getExpected@{ getExpected} }
\index { getExpected@{ getExpected} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [getExpected] { \setlength { \rightskip } { 0pt plus 5cm} std::vector$ < $ double$ > $ neuralpp::NeuralNet::getExpected () const} \label { classneuralpp_ 1_ 1NeuralNet_ 51a1851ed07b85bec091c9053ae99cf7}
Get the expected value (in case you have an only neuron in output layer).
2009-08-09 11:17:39 +02:00
Of course you should specify this when you build your network by using setExpected. \begin { Desc}
\item [Returns:] The expected output value for a certain training phase \end { Desc}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !setExpected@{ setExpected} }
\index { setExpected@{ setExpected} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [setExpected] { \setlength { \rightskip } { 0pt plus 5cm} void neuralpp::NeuralNet::setExpected (double { \em ex} )} \label { classneuralpp_ 1_ 1NeuralNet_ b6475762b7e9eab086befdc511f7c236}
2009-08-16 20:57:15 +02:00
It sets the value you expect from your network (in case the network has an only neuron in its output layer).
2009-08-09 11:17:39 +02:00
\begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em ex}] Expected output value \end { description}
\end { Desc}
2009-08-16 20:57:15 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !setExpected@{ setExpected} }
\index { setExpected@{ setExpected} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [setExpected] { \setlength { \rightskip } { 0pt plus 5cm} void neuralpp::NeuralNet::setExpected (std::vector$ < $ double $ > $ { \em ex} )} \label { classneuralpp_ 1_ 1NeuralNet_ e649edc3d86bec7c0e178d5c892b4fd7}
Set the values you expect from your network.
\begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em ex}] Expected output values \end { description}
\end { Desc}
2009-08-09 11:17:39 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !update@{ update} }
\index { update@{ update} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [update] { \setlength { \rightskip } { 0pt plus 5cm} void neuralpp::NeuralNet::update ()} \label { classneuralpp_ 1_ 1NeuralNet_ b0bd1daadb06980dff1f50d33a7c098e}
It updates through back-propagation the weights of the synapsis and computes again the output value for { \em epochs\/ } times, calling back updateWeights and commitChanges functions.
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !propagate@{ propagate} }
\index { propagate@{ propagate} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [propagate] { \setlength { \rightskip } { 0pt plus 5cm} void neuralpp::NeuralNet::propagate ()} \label { classneuralpp_ 1_ 1NeuralNet_ c129c180647362da963758bfd1ba6890}
It propagates values through the network.
2009-08-16 20:57:15 +02:00
Use this when you want to give an already trained network some new values the get to the output \begin { Desc}
\item [Examples: ] \par
{ \bf examples/adderFromScratch.cpp} , and { \bf examples/doAdd.cpp} .\end { Desc}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !setInput@{ setInput} }
2009-08-09 11:17:39 +02:00
\index { setInput@{ setInput} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [setInput] { \setlength { \rightskip } { 0pt plus 5cm} void neuralpp::NeuralNet::setInput (std::vector$ < $ double $ > $ { \em v} )} \label { classneuralpp_ 1_ 1NeuralNet_ 405b32d2928344314ecf0469070b0f17}
2009-08-09 11:17:39 +02:00
It sets the input for the network.
\begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em v}] Vector of doubles, containing the values to give to your network \end { description}
\end { Desc}
2009-08-16 20:57:15 +02:00
\begin { Desc}
\item [Examples: ] \par
{ \bf examples/adderFromScratch.cpp} , and { \bf examples/doAdd.cpp} .\end { Desc}
2009-08-09 11:17:39 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !link@{ link} }
\index { link@{ link} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [link] { \setlength { \rightskip } { 0pt plus 5cm} void neuralpp::NeuralNet::link ()} \label { classneuralpp_ 1_ 1NeuralNet_ 46f23f462318a4ffc037a4e806364c3f}
It links the layers of the network (input, hidden, output).
Don't use unless you exactly know what you're doing, it is already called by the constructor \index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !save@{ save} }
\index { save@{ save} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [save] { \setlength { \rightskip } { 0pt plus 5cm} void neuralpp::NeuralNet::save (const char $ \ast $ { \em fname} ) throw ({ \bf NetworkFileWriteException} )} \label { classneuralpp_ 1_ 1NeuralNet_ fdf94c276720c25e565cac834fe8a407}
2009-08-09 11:17:39 +02:00
Save a trained neural network to a binary file.
\begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em fname}] Binary file where you're going to save your network \end { description}
\end { Desc}
2009-08-15 02:59:09 +02:00
\begin { Desc}
\item [Exceptions:]
\begin { description}
2009-08-16 20:57:15 +02:00
\item [{\em \doxyref{NetworkFileWriteException}{p.}{classneuralpp_1_1NetworkFileWriteException}}] When you get an error writing the network's information to a file \end { description}
2009-08-15 02:59:09 +02:00
\end { Desc}
2009-08-16 20:57:15 +02:00
\begin { Desc}
\item [Examples: ] \par
{ \bf examples/learnAdd.cpp} .\end { Desc}
2009-08-09 11:17:39 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !train@{ train} }
\index { train@{ train} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [train] { \setlength { \rightskip } { 0pt plus 5cm} void neuralpp::NeuralNet::train (std::string { \em xml} , \/ { \bf source} { \em src} ) throw ({ \bf InvalidXMLException} )} \label { classneuralpp_ 1_ 1NeuralNet_ 1c9e17437d41a7048611e21a3cc1c7dd}
2009-08-09 11:17:39 +02:00
Train a network using a training set loaded from an XML file.
A sample XML file is available in examples/adder.xml \begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em xml}] XML file containing our training set \item [{\em src}] Source type from which the XML will be loaded (from a file [default] or from a string) \end { description}
\end { Desc}
\begin { Desc}
\item [Exceptions:]
\begin { description}
2009-08-16 20:57:15 +02:00
\item [{\em \doxyref{InvalidXMLException}{p.}{classneuralpp_1_1InvalidXMLException}}] \end { description}
2009-08-09 11:17:39 +02:00
\end { Desc}
2009-08-16 20:57:15 +02:00
\begin { Desc}
\item [Examples: ] \par
{ \bf examples/adderFromScratch.cpp} , and { \bf examples/learnAdd.cpp} .\end { Desc}
2009-08-09 11:17:39 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !initXML@{ initXML} }
\index { initXML@{ initXML} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [initXML] { \setlength { \rightskip } { 0pt plus 5cm} static void neuralpp::NeuralNet::initXML (std::string \& { \em xml} )\hspace { 0.3cm} { \tt [static]} } \label { classneuralpp_ 1_ 1NeuralNet_ 96da6712a72051cf34ad961761ef6e08}
2009-08-09 11:17:39 +02:00
Initialize the training XML for the neural network.
\begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em xml}] String that will contain the XML \end { description}
\end { Desc}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !split@{ split} }
\index { split@{ split} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [split] { \setlength { \rightskip } { 0pt plus 5cm} static std::vector$ < $ double$ > $ neuralpp::NeuralNet::split (char { \em delim} , \/ std::string { \em str} )\hspace { 0.3cm} { \tt [static]} } \label { classneuralpp_ 1_ 1NeuralNet_ 83c6555520856d5867752904349ab6ca}
2009-08-09 11:17:39 +02:00
Splits a string into a vector of doubles, given a delimitator.
\begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em delim}] Delimitator \item [{\em str}] String to be splitted \end { description}
\end { Desc}
\begin { Desc}
\item [Returns:] Vector of doubles containing splitted values \end { Desc}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !XMLFromSet@{ XMLFromSet} }
\index { XMLFromSet@{ XMLFromSet} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [XMLFromSet] { \setlength { \rightskip } { 0pt plus 5cm} static std::string neuralpp::NeuralNet::XMLFromSet (int \& { \em id} , \/ std::string { \em set} )\hspace { 0.3cm} { \tt [static]} } \label { classneuralpp_ 1_ 1NeuralNet_ 0a2733037af912b3e6a10146e7b7172f}
2009-08-09 11:17:39 +02:00
Get a training set from a string and copies it to an XML For example, these strings could be training sets for making sums: \char `\" { } 2,3;5\char `\" { } - \char `\" { } 5,6;11\char `\" { } - \char `\" { } 2,2;4\char `\" { } - \char `\" { } 4,5:9\char `\" { } This method called on the first string will return an XML such this: '$ < $ training id=\char `\" { } 0\char `\" { } $ > $ $ < $ input id=\char `\" { } 0\char `\" { } $ > $ 2$ < $ /input$ > $ $ < $ input id=\char `\" { } 1\char `\" { } $ > $ 3$ < $ /input$ > $ $ < $ output id=\char `\" { } 0\char `\" { } $ > $ 5$ < $ /output$ > $ \& lt/training$ > $ '.
\begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em id}] ID for the given training set (0,1,..,n) \item [{\em set}] String containing input values and expected outputs \end { description}
\end { Desc}
\begin { Desc}
\item [Returns:] XML string \end { Desc}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !closeXML@{ closeXML} }
\index { closeXML@{ closeXML} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-16 20:57:15 +02:00
\subsubsection [closeXML] { \setlength { \rightskip } { 0pt plus 5cm} static void neuralpp::NeuralNet::closeXML (std::string \& { \em xml} )\hspace { 0.3cm} { \tt [static]} } \label { classneuralpp_ 1_ 1NeuralNet_ e17732ed578bc4bd6032bfae58a5cf51}
2009-08-09 11:17:39 +02:00
Closes an open XML document generated by \char `\" { } initXML\char `\" { } and \char `\" { } XMLFromSet\char `\" { } .
\begin { Desc}
\item [Parameters:]
\begin { description}
\item [{\em xml}] XML string to be closed \end { description}
\end { Desc}
\subsection { Member Data Documentation}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !epochs@{ epochs} }
\index { epochs@{ epochs} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [epochs] { \setlength { \rightskip } { 0pt plus 5cm} int { \bf neuralpp::NeuralNet::epochs} \hspace { 0.3cm} { \tt [private]} } \label { classneuralpp_ 1_ 1NeuralNet_ 4cb52dae7b43d03fac73afca7b9f3a51}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !ref\_ \- epochs@{ ref\_ \- epochs} }
\index { ref\_ \- epochs@{ ref\_ \- epochs} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [ref\_\-epochs] { \setlength { \rightskip } { 0pt plus 5cm} int { \bf neuralpp::NeuralNet::ref\_ \- epochs} \hspace { 0.3cm} { \tt [private]} } \label { classneuralpp_ 1_ 1NeuralNet_ 4f88106c9e542c39eac43b4ca1974a2a}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !l\_ \- rate@{ l\_ \- rate} }
\index { l\_ \- rate@{ l\_ \- rate} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [l\_\-rate] { \setlength { \rightskip } { 0pt plus 5cm} double { \bf neuralpp::NeuralNet::l\_ \- rate} \hspace { 0.3cm} { \tt [private]} } \label { classneuralpp_ 1_ 1NeuralNet_ 6bd7be443e46b2fdbf1da2edb8e611ab}
2009-08-16 20:57:15 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !threshold@{ threshold} }
\index { threshold@{ threshold} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [threshold] { \setlength { \rightskip } { 0pt plus 5cm} double { \bf neuralpp::NeuralNet::threshold} \hspace { 0.3cm} { \tt [private]} } \label { classneuralpp_ 1_ 1NeuralNet_ 327dbfdd72b0a74293f8f29630525aa3}
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !expect@{ expect} }
\index { expect@{ expect} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [expect] { \setlength { \rightskip } { 0pt plus 5cm} std::vector$ < $ double$ > $ { \bf neuralpp::NeuralNet::expect} \hspace { 0.3cm} { \tt [private]} } \label { classneuralpp_ 1_ 1NeuralNet_ a9e4ff43427f56663739c4c7450de8ee}
2009-08-09 11:17:39 +02:00
2009-08-15 02:59:09 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !actv\_ \- f@{ actv\_ \- f} }
\index { actv\_ \- f@{ actv\_ \- f} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
\subsubsection [actv\_\-f] { \setlength { \rightskip } { 0pt plus 5cm} double($ \ast $ { \bf neuralpp::NeuralNet::actv\_ \- f} )(double)\hspace { 0.3cm} { \tt [private]} } \label { classneuralpp_ 1_ 1NeuralNet_ c1469e6afd87d85b82f14bc246f82457}
Private pointer to function, containing the function to be used as activation function.
2009-08-09 11:17:39 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !input@{ input} }
\index { input@{ input} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-15 02:59:09 +02:00
\subsubsection [input] { \setlength { \rightskip } { 0pt plus 5cm} { \bf Layer} $ \ast $ { \bf neuralpp::NeuralNet::input} } \label { classneuralpp_ 1_ 1NeuralNet_ e2b4e8405f9d25edab395d61502bdba9}
2009-08-09 11:17:39 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !hidden@{ hidden} }
\index { hidden@{ hidden} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-15 02:59:09 +02:00
\subsubsection [hidden] { \setlength { \rightskip } { 0pt plus 5cm} { \bf Layer} $ \ast $ { \bf neuralpp::NeuralNet::hidden} } \label { classneuralpp_ 1_ 1NeuralNet_ bbdaa1b6c0a1a95d2b18cd25fda2a266}
2009-08-09 11:17:39 +02:00
\index { neuralpp::NeuralNet@{ neuralpp::NeuralNet} !output@{ output} }
\index { output@{ output} !neuralpp::NeuralNet@{ neuralpp::NeuralNet} }
2009-08-15 02:59:09 +02:00
\subsubsection [output] { \setlength { \rightskip } { 0pt plus 5cm} { \bf Layer} $ \ast $ { \bf neuralpp::NeuralNet::output} } \label { classneuralpp_ 1_ 1NeuralNet_ fa9b2dbcbb39d0fc70f790ac24069a74}
2009-08-09 11:17:39 +02:00
The documentation for this class was generated from the following file:\begin { CompactItemize}
\item
{ \bf neural++.hpp} \end { CompactItemize}