Is it time to release 1.0 version?

This commit is contained in:
blacklight 2009-09-04 11:27:14 +02:00
parent 89b0ad2f8a
commit a7fa05ba40
140 changed files with 1710 additions and 829 deletions

View file

@ -8,10 +8,15 @@ Exception thrown when trying parsing an invalid XML.
\subsection*{Public Member Functions}
\begin{CompactItemize}
\item
{\bf InvalidXMLException} ()
{\bf InvalidXMLException} (const char $\ast$err=\char`\"{} \char`\"{})
\item
const char $\ast$ {\bf what} () const throw ()
\end{CompactItemize}
\subsection*{Private Attributes}
\begin{CompactItemize}
\item
char $\ast$ {\bf error}
\end{CompactItemize}
\subsection{Detailed Description}
@ -20,11 +25,13 @@ Exception thrown when trying parsing an invalid XML.
\subsection{Constructor \& Destructor Documentation}
\index{neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}!InvalidXMLException@{InvalidXMLException}}
\index{InvalidXMLException@{InvalidXMLException}!neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}}
\subsubsection[InvalidXMLException]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::InvalidXMLException::InvalidXMLException ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1InvalidXMLException_10279e6f42a1ccb934afcfef2770c537}
\subsubsection[InvalidXMLException]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::InvalidXMLException::InvalidXMLException (const char $\ast$ {\em err} = {\tt \char`\"{}~\char`\"{}})\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1InvalidXMLException_793d311be88606908abf8c5be1348586}
References error.
\subsection{Member Function Documentation}
\index{neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}!what@{what}}
\index{what@{what}!neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}}
@ -33,6 +40,18 @@ Exception thrown when trying parsing an invalid XML.
References error.
\subsection{Member Data Documentation}
\index{neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}!error@{error}}
\index{error@{error}!neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}}
\subsubsection[error]{\setlength{\rightskip}{0pt plus 5cm}char$\ast$ {\bf neuralpp::InvalidXMLException::error}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1InvalidXMLException_e0c63b17f7b5b0df20d8361bc9ee92b4}
Referenced by InvalidXMLException(), and what().
The documentation for this class was generated from the following file:\begin{CompactItemize}
\item
{\bf neural++\_\-exception.hpp}\end{CompactItemize}

View file

@ -23,28 +23,20 @@ enum {\bf source} \{ {\bf file},
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
double {\bf getOutput} () const
\begin{CompactList}\small\item\em It gets the output of the network (note: the layer output should contain an only neuron). \item\end{CompactList}\item
double {\bf getThreshold} () const
\begin{CompactList}\small\item\em Get the threshold of the neurons in the network. \item\end{CompactList}\item
std::vector$<$ double $>$ {\bf getOutputs} ()
\begin{CompactList}\small\item\em It gets the output of the network in case the output layer contains more neurons. \item\end{CompactList}\item
double {\bf expected} () const
\begin{CompactList}\small\item\em Get the expected value (in case you have an only neuron in output layer). \item\end{CompactList}\item
std::vector$<$ double $>$ {\bf getExpected} () const
\begin{CompactList}\small\item\em Get the expected value (in case you have an only neuron in output layer). \item\end{CompactList}\item
void {\bf setExpected} (double ex)
\begin{CompactList}\small\item\em It sets the value you expect from your network (in case the network has an only neuron in its output layer). \item\end{CompactList}\item
void {\bf setExpected} (std::vector$<$ double $>$ ex)
\begin{CompactList}\small\item\em Set the values you expect from your network. \item\end{CompactList}\item
void {\bf update} ()
\begin{CompactList}\small\item\em It updates through back-propagation the weights of the synapsis and computes again the output value for {\em epochs\/} times, calling back updateWeights and commitChanges functions. \item\end{CompactList}\item
double {\bf getThreshold} () const
\begin{CompactList}\small\item\em Get the threshold of the neurons in the network. \item\end{CompactList}\item
void {\bf propagate} ()
\begin{CompactList}\small\item\em It propagates values through the network. \item\end{CompactList}\item
void {\bf setInput} (std::vector$<$ double $>$ v)
\begin{CompactList}\small\item\em It sets the input for the network. \item\end{CompactList}\item
void {\bf link} ()
\begin{CompactList}\small\item\em It links the layers of the network (input, hidden, output). \item\end{CompactList}\item
void {\bf save} (const char $\ast$fname) throw (NetworkFileWriteException)
\begin{CompactList}\small\item\em Save a trained neural network to a binary file. \item\end{CompactList}\item
void {\bf loadFromBinary} (const std::string fname) throw (NetworkFileNotFoundException)
\begin{CompactList}\small\item\em DEPRECATED. \item\end{CompactList}\item
void {\bf saveToBinary} (const char $\ast$fname) throw (NetworkFileWriteException)
\begin{CompactList}\small\item\em DEPRECATED. \item\end{CompactList}\item
void {\bf train} (std::string xml, {\bf source} src) throw (InvalidXMLException)
\begin{CompactList}\small\item\em Train a network using a training set loaded from an XML file. \item\end{CompactList}\end{CompactItemize}
\subsection*{Static Public Member Functions}
@ -52,8 +44,6 @@ void {\bf train} (std::string xml, {\bf source} src) throw (InvalidXMLException
\item
static void {\bf initXML} (std::string \&xml)
\begin{CompactList}\small\item\em Initialize the training XML for the neural network. \item\end{CompactList}\item
static std::vector$<$ double $>$ {\bf split} (char delim, std::string str)
\begin{CompactList}\small\item\em Splits a string into a vector of doubles, given a delimitator. \item\end{CompactList}\item
static std::string {\bf XMLFromSet} (int \&id, std::string set)
\begin{CompactList}\small\item\em Get a training set from a string and copies it to an XML For example, these strings could be training sets for making sums: \char`\"{}2,3;5\char`\"{} - \char`\"{}5,6;11\char`\"{} - \char`\"{}2,2;4\char`\"{} - \char`\"{}4,5:9\char`\"{} This method called on the first string will return an XML such this: '$<$training id=\char`\"{}0\char`\"{}$>$$<$input id=\char`\"{}0\char`\"{}$>$2$<$/input$>$$<$input id=\char`\"{}1\char`\"{}$>$3$<$/input$>$$<$output id=\char`\"{}0\char`\"{}$>$5$<$/output$>$ \&lt/training$>$'. \item\end{CompactList}\item
static void {\bf closeXML} (std::string \&xml)
@ -72,10 +62,20 @@ static void {\bf closeXML} (std::string \&xml)
\item
void {\bf updateWeights} ()
\begin{CompactList}\small\item\em It updates the weights of the net's synapsis through back-propagation. \item\end{CompactList}\item
void {\bf commitChanges} ({\bf Layer} \&l)
\begin{CompactList}\small\item\em It commits the changes made by \doxyref{updateWeights()}{p.}{classneuralpp_1_1NeuralNet_94169c89a7cd47122ab5dbf1d5c5e108} to the layer l. \item\end{CompactList}\item
double {\bf error} (double ex)
\begin{CompactList}\small\item\em Get the error made on the expected result as squared deviance. \item\end{CompactList}\end{CompactItemize}
\begin{CompactList}\small\item\em Get the error made on the expected result as squared deviance. \item\end{CompactList}\item
double {\bf expected} () const
\begin{CompactList}\small\item\em Get the expected value (in case you have an only neuron in output layer). \item\end{CompactList}\item
std::vector$<$ double $>$ {\bf getExpected} () const
\begin{CompactList}\small\item\em Get the expected value (in case you have an only neuron in output layer). \item\end{CompactList}\item
void {\bf setExpected} (double ex)
\begin{CompactList}\small\item\em It sets the value you expect from your network (in case the network has an only neuron in its output layer). \item\end{CompactList}\item
void {\bf setExpected} (std::vector$<$ double $>$ ex)
\begin{CompactList}\small\item\em Set the values you expect from your network. \item\end{CompactList}\item
void {\bf update} ()
\begin{CompactList}\small\item\em It updates through back-propagation the weights of the synapsis and computes again the output value for {\em epochs\/} times, calling back updateWeights and commitChanges functions. \item\end{CompactList}\item
void {\bf link} ()
\begin{CompactList}\small\item\em It links the layers of the network (input, hidden, output). \item\end{CompactList}\end{CompactItemize}
\subsection*{Private Attributes}
\begin{CompactItemize}
\item
@ -100,7 +100,7 @@ Use $\ast$ONLY$\ast$ this class, unless you know what you're doing \begin{Desc}
\item[Examples: ]\par
{\bf examples/adderFromScratch.cpp}, {\bf examples/doAdd.cpp}, and {\bf examples/learnAdd.cpp}.\end{Desc}
{\bf examples/adderFromString.cpp}, {\bf examples/doAdd.cpp}, {\bf examples/learnAdd.cpp}, and {\bf examples/networkForSumsAndSubtractions.cpp}.\end{Desc}
\subsection{Member Enumeration Documentation}
@ -170,19 +170,7 @@ Constructor.
It updates the weights of the net's synapsis through back-propagation.
In-class use only \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!commitChanges@{commitChanges}}
\index{commitChanges@{commitChanges}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[commitChanges]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::commitChanges ({\bf Layer} \& {\em l})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_f697a8d9967ad8f03e5a16a42cd110c5}
It commits the changes made by \doxyref{updateWeights()}{p.}{classneuralpp_1_1NeuralNet_94169c89a7cd47122ab5dbf1d5c5e108} to the layer l.
In-class use only \begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em l}]\doxyref{Layer}{p.}{classneuralpp_1_1Layer} to commit the changes \end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!error@{error}}
In-class use only \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!error@{error}}
\index{error@{error}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[error]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::error (double {\em ex})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_8a140d28e6dd4097470c7c138801ad01}
@ -196,42 +184,9 @@ Get the error made on the expected result as squared deviance.
\end{Desc}
\begin{Desc}
\item[Returns:]Mean error \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutput@{getOutput}}
\index{getOutput@{getOutput}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[getOutput]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::getOutput () const}\label{classneuralpp_1_1NeuralNet_961dce8913264bf64c899dce4e25f810}
It gets the output of the network (note: the layer output should contain an only neuron).
\begin{Desc}
\item[Returns:]The output value of the network \end{Desc}
\begin{Desc}
\item[Examples: ]\par
{\bf examples/doAdd.cpp}.\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getThreshold@{getThreshold}}
\index{getThreshold@{getThreshold}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[getThreshold]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::getThreshold () const}\label{classneuralpp_1_1NeuralNet_e08cdcf4b70f987700e553d9914f6179}
Get the threshold of the neurons in the network.
\begin{Desc}
\item[Returns:]The threshold of the neurons \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutputs@{getOutputs}}
\index{getOutputs@{getOutputs}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[getOutputs]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::NeuralNet::getOutputs ()}\label{classneuralpp_1_1NeuralNet_e6d2215ecc8b560db2f6797db642191c}
It gets the output of the network in case the output layer contains more neurons.
\begin{Desc}
\item[Returns:]A vector containing the output values of the network \end{Desc}
\begin{Desc}
\item[Examples: ]\par
{\bf examples/adderFromScratch.cpp}.\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!expected@{expected}}
\index{expected@{expected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[expected]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::expected () const}\label{classneuralpp_1_1NeuralNet_562dfe9fb8d73bf25a23ce608451d3aa}
\subsubsection[expected]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::expected () const\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_562dfe9fb8d73bf25a23ce608451d3aa}
Get the expected value (in case you have an only neuron in output layer).
@ -240,7 +195,7 @@ Of course you should specify this when you build your network by using setExpect
\item[Returns:]The expected output value for a certain training phase \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getExpected@{getExpected}}
\index{getExpected@{getExpected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[getExpected]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::NeuralNet::getExpected () const}\label{classneuralpp_1_1NeuralNet_51a1851ed07b85bec091c9053ae99cf7}
\subsubsection[getExpected]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::NeuralNet::getExpected () const\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_51a1851ed07b85bec091c9053ae99cf7}
Get the expected value (in case you have an only neuron in output layer).
@ -249,7 +204,7 @@ Of course you should specify this when you build your network by using setExpect
\item[Returns:]The expected output value for a certain training phase \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setExpected@{setExpected}}
\index{setExpected@{setExpected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (double {\em ex})}\label{classneuralpp_1_1NeuralNet_b6475762b7e9eab086befdc511f7c236}
\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (double {\em ex})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_b6475762b7e9eab086befdc511f7c236}
It sets the value you expect from your network (in case the network has an only neuron in its output layer).
@ -261,7 +216,7 @@ It sets the value you expect from your network (in case the network has an only
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setExpected@{setExpected}}
\index{setExpected@{setExpected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (std::vector$<$ double $>$ {\em ex})}\label{classneuralpp_1_1NeuralNet_e649edc3d86bec7c0e178d5c892b4fd7}
\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (std::vector$<$ double $>$ {\em ex})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_e649edc3d86bec7c0e178d5c892b4fd7}
Set the values you expect from your network.
@ -273,11 +228,51 @@ Set the values you expect from your network.
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!update@{update}}
\index{update@{update}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[update]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::update ()}\label{classneuralpp_1_1NeuralNet_b0bd1daadb06980dff1f50d33a7c098e}
\subsubsection[update]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::update ()\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_b0bd1daadb06980dff1f50d33a7c098e}
It updates through back-propagation the weights of the synapsis and computes again the output value for {\em epochs\/} times, calling back updateWeights and commitChanges functions.
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!link@{link}}
\index{link@{link}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[link]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::link ()\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_46f23f462318a4ffc037a4e806364c3f}
It links the layers of the network (input, hidden, output).
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutput@{getOutput}}
\index{getOutput@{getOutput}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[getOutput]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::getOutput () const}\label{classneuralpp_1_1NeuralNet_961dce8913264bf64c899dce4e25f810}
It gets the output of the network (note: the layer output should contain an only neuron).
\begin{Desc}
\item[Returns:]The output value of the network \end{Desc}
\begin{Desc}
\item[Examples: ]\par
{\bf examples/adderFromString.cpp}, and {\bf examples/doAdd.cpp}.\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutputs@{getOutputs}}
\index{getOutputs@{getOutputs}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[getOutputs]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::NeuralNet::getOutputs ()}\label{classneuralpp_1_1NeuralNet_e6d2215ecc8b560db2f6797db642191c}
It gets the output of the network in case the output layer contains more neurons.
\begin{Desc}
\item[Returns:]A vector containing the output values of the network \end{Desc}
\begin{Desc}
\item[Examples: ]\par
{\bf examples/networkForSumsAndSubtractions.cpp}.\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getThreshold@{getThreshold}}
\index{getThreshold@{getThreshold}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[getThreshold]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::getThreshold () const}\label{classneuralpp_1_1NeuralNet_e08cdcf4b70f987700e553d9914f6179}
Get the threshold of the neurons in the network.
\begin{Desc}
\item[Returns:]The threshold of the neurons \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!propagate@{propagate}}
\index{propagate@{propagate}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[propagate]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::propagate ()}\label{classneuralpp_1_1NeuralNet_c129c180647362da963758bfd1ba6890}
@ -287,7 +282,7 @@ It propagates values through the network.
Use this when you want to give an already trained network some new values the get to the output \begin{Desc}
\item[Examples: ]\par
{\bf examples/adderFromScratch.cpp}, and {\bf examples/doAdd.cpp}.\end{Desc}
{\bf examples/adderFromString.cpp}, {\bf examples/doAdd.cpp}, and {\bf examples/networkForSumsAndSubtractions.cpp}.\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setInput@{setInput}}
\index{setInput@{setInput}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[setInput]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setInput (std::vector$<$ double $>$ {\em v})}\label{classneuralpp_1_1NeuralNet_405b32d2928344314ecf0469070b0f17}
@ -302,15 +297,8 @@ It sets the input for the network.
\end{Desc}
\begin{Desc}
\item[Examples: ]\par
{\bf examples/adderFromScratch.cpp}, and {\bf examples/doAdd.cpp}.\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!link@{link}}
\index{link@{link}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[link]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::link ()}\label{classneuralpp_1_1NeuralNet_46f23f462318a4ffc037a4e806364c3f}
It links the layers of the network (input, hidden, output).
Don't use unless you exactly know what you're doing, it is already called by the constructor \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!save@{save}}
{\bf examples/adderFromString.cpp}, {\bf examples/doAdd.cpp}, and {\bf examples/networkForSumsAndSubtractions.cpp}.\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!save@{save}}
\index{save@{save}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[save]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::save (const char $\ast$ {\em fname}) throw ({\bf NetworkFileWriteException})}\label{classneuralpp_1_1NeuralNet_fdf94c276720c25e565cac834fe8a407}
@ -330,6 +318,40 @@ Save a trained neural network to a binary file.
\begin{Desc}
\item[Examples: ]\par
{\bf examples/learnAdd.cpp}.\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!loadFromBinary@{loadFromBinary}}
\index{loadFromBinary@{loadFromBinary}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[loadFromBinary]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::loadFromBinary (const std::string {\em fname}) throw ({\bf NetworkFileNotFoundException})}\label{classneuralpp_1_1NeuralNet_a060e28b438613a6cc9e0895ddbc292b}
DEPRECATED.
Load a trained neural network from a binary file. This function is deprecated and kept for back-compatibility. Use the XML format instead to load and neural networks and, respectly, the NeuralNetwork(const std::string) constructor or the \doxyref{save(const char$\ast$)}{p.}{classneuralpp_1_1NeuralNet_fdf94c276720c25e565cac834fe8a407} methods. \begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em fname}]Name of the file to be loaded \end{description}
\end{Desc}
\begin{Desc}
\item[Exceptions:]
\begin{description}
\item[{\em \doxyref{NetworkFileNotFoundException}{p.}{classneuralpp_1_1NetworkFileNotFoundException}}]When you're trying to load an invalid network file \end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!saveToBinary@{saveToBinary}}
\index{saveToBinary@{saveToBinary}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[saveToBinary]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::saveToBinary (const char $\ast$ {\em fname}) throw ({\bf NetworkFileWriteException})}\label{classneuralpp_1_1NeuralNet_520147d9b47b69565567bd3fdcfd8897}
DEPRECATED.
Save a trained neural network to a binary file. This function is deprecated and kept for back-compatibility. Use the XML format instead to load and neural networks and, respectly, the NeuralNetwork(const std::string) constructor or the \doxyref{save(const char$\ast$)}{p.}{classneuralpp_1_1NeuralNet_fdf94c276720c25e565cac834fe8a407} methods. \begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em fname}]Name of the file to be saved with the network information \end{description}
\end{Desc}
\begin{Desc}
\item[Exceptions:]
\begin{description}
\item[{\em \doxyref{NetworkFileWriteException}{p.}{classneuralpp_1_1NetworkFileWriteException}}]When you try to write the network information to an invalid file \end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!train@{train}}
\index{train@{train}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[train]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::train (std::string {\em xml}, \/ {\bf source} {\em src}) throw ({\bf InvalidXMLException})}\label{classneuralpp_1_1NeuralNet_1c9e17437d41a7048611e21a3cc1c7dd}
@ -349,7 +371,7 @@ A sample XML file is available in examples/adder.xml \begin{Desc}
\end{Desc}
\begin{Desc}
\item[Examples: ]\par
{\bf examples/adderFromScratch.cpp}, and {\bf examples/learnAdd.cpp}.\end{Desc}
{\bf examples/adderFromString.cpp}, {\bf examples/learnAdd.cpp}, and {\bf examples/networkForSumsAndSubtractions.cpp}.\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!initXML@{initXML}}
\index{initXML@{initXML}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[initXML]{\setlength{\rightskip}{0pt plus 5cm}static void neuralpp::NeuralNet::initXML (std::string \& {\em xml})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_96da6712a72051cf34ad961761ef6e08}
@ -362,20 +384,6 @@ Initialize the training XML for the neural network.
\begin{description}
\item[{\em xml}]String that will contain the XML \end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!split@{split}}
\index{split@{split}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[split]{\setlength{\rightskip}{0pt plus 5cm}static std::vector$<$double$>$ neuralpp::NeuralNet::split (char {\em delim}, \/ std::string {\em str})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_83c6555520856d5867752904349ab6ca}
Splits a string into a vector of doubles, given a delimitator.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em delim}]Delimitator \item[{\em str}]String to be splitted \end{description}
\end{Desc}
\begin{Desc}
\item[Returns:]Vector of doubles containing splitted values \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!XMLFromSet@{XMLFromSet}}
\index{XMLFromSet@{XMLFromSet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[XMLFromSet]{\setlength{\rightskip}{0pt plus 5cm}static std::string neuralpp::NeuralNet::XMLFromSet (int \& {\em id}, \/ std::string {\em set})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_0a2733037af912b3e6a10146e7b7172f}

View file

@ -24,6 +24,10 @@ void {\bf setActv} (double a)
\begin{CompactList}\small\item\em Change the activation value of the neuron. \item\end{CompactList}\item
void {\bf setProp} (double p)
\begin{CompactList}\small\item\em Change the propagation value of the neuron. \item\end{CompactList}\item
void {\bf setSynIn} (size\_\-t n)
\item
void {\bf setSynOut} (size\_\-t n)
\item
double {\bf getActv} ()
\begin{CompactList}\small\item\em Get the activation value of the neuron. \item\end{CompactList}\item
double {\bf getProp} ()
@ -162,6 +166,16 @@ Change the propagation value of the neuron.
\begin{description}
\item[{\em p}]Propagation value \end{description}
\end{Desc}
\index{neuralpp::Neuron@{neuralpp::Neuron}!setSynIn@{setSynIn}}
\index{setSynIn@{setSynIn}!neuralpp::Neuron@{neuralpp::Neuron}}
\subsubsection[setSynIn]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Neuron::setSynIn (size\_\-t {\em n})}\label{classneuralpp_1_1Neuron_6fa3e8afc1c6e6e427773ba89a6fcb68}
\index{neuralpp::Neuron@{neuralpp::Neuron}!setSynOut@{setSynOut}}
\index{setSynOut@{setSynOut}!neuralpp::Neuron@{neuralpp::Neuron}}
\subsubsection[setSynOut]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Neuron::setSynOut (size\_\-t {\em n})}\label{classneuralpp_1_1Neuron_d6ae3a93ecd11c345d931aee4ca248c1}
\index{neuralpp::Neuron@{neuralpp::Neuron}!getActv@{getActv}}
\index{getActv@{getActv}!neuralpp::Neuron@{neuralpp::Neuron}}
\subsubsection[getActv]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Neuron::getActv ()}\label{classneuralpp_1_1Neuron_55993867179f0ac7d1e0e2c460ceb611}

View file

@ -8,8 +8,8 @@ Class for managing synapsis.
\subsection*{Public Member Functions}
\begin{CompactItemize}
\item
{\bf Synapsis} ({\bf Neuron} $\ast$i, {\bf Neuron} $\ast$o, double w, double d)
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
{\bf Synapsis} ()
\begin{CompactList}\small\item\em Empty constructor (it does nothing). \item\end{CompactList}\item
{\bf Synapsis} ({\bf Neuron} $\ast$i, {\bf Neuron} $\ast$o, double($\ast$a)(double))
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
{\bf Synapsis} ({\bf Neuron} $\ast$i, {\bf Neuron} $\ast$o, double w, double($\ast$a)(double))
@ -55,16 +55,11 @@ Don't use this class directly unless you know what you're doing, use \doxyref{Ne
\subsection{Constructor \& Destructor Documentation}
\index{neuralpp::Synapsis@{neuralpp::Synapsis}!Synapsis@{Synapsis}}
\index{Synapsis@{Synapsis}!neuralpp::Synapsis@{neuralpp::Synapsis}}
\subsubsection[Synapsis]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Synapsis::Synapsis ({\bf Neuron} $\ast$ {\em i}, \/ {\bf Neuron} $\ast$ {\em o}, \/ double {\em w}, \/ double {\em d})}\label{classneuralpp_1_1Synapsis_0729de9e737b9967421edcfc4b410bd8}
\subsubsection[Synapsis]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Synapsis::Synapsis ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1Synapsis_c7760b19c56e9f69994970311703c5fa}
Constructor.
Empty constructor (it does nothing).
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em i}]Input neuron \item[{\em o}]Output neuron \item[{\em w}]Weight for the synapsis \item[{\em d}]Delta for the synapsis \end{description}
\end{Desc}
\index{neuralpp::Synapsis@{neuralpp::Synapsis}!Synapsis@{Synapsis}}
\index{Synapsis@{Synapsis}!neuralpp::Synapsis@{neuralpp::Synapsis}}
\subsubsection[Synapsis]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Synapsis::Synapsis ({\bf Neuron} $\ast$ {\em i}, \/ {\bf Neuron} $\ast$ {\em o}, \/ double($\ast$)(double) {\em a})}\label{classneuralpp_1_1Synapsis_b767332fdce81af25486b8969e5d2fb3}

View file

@ -10,8 +10,8 @@
{\fancyplain{}{\bfseries\rightmark}}
\rhead[\fancyplain{}{\bfseries\leftmark}]
{\fancyplain{}{\bfseries\thepage}}
\rfoot[\fancyplain{}{\bfseries\scriptsize Generated on Sun Aug 16 20:53:42 2009 for Neural++ by Doxygen }]{}
\lfoot[]{\fancyplain{}{\bfseries\scriptsize Generated on Sun Aug 16 20:53:42 2009 for Neural++ by Doxygen }}
\rfoot[\fancyplain{}{\bfseries\scriptsize Generated on Fri Sep 4 11:25:49 2009 for Neural++ by Doxygen }]{}
\lfoot[]{\fancyplain{}{\bfseries\scriptsize Generated on Fri Sep 4 11:25:49 2009 for Neural++ by Doxygen }}
\cfoot{}
\newenvironment{Code}
{\footnotesize}

View file

@ -1,9 +1,11 @@
\section{Examples}
Here is a list of all examples:\begin{CompactItemize}
\item
{\bf examples/adderFromScratch.cpp}
{\bf examples/adderFromString.cpp}
\item
{\bf examples/doAdd.cpp}
\item
{\bf examples/learnAdd.cpp}
\item
{\bf examples/networkForSumsAndSubtractions.cpp}
\end{CompactItemize}

View file

@ -0,0 +1,49 @@
\section{examples/adderFromString.cpp}
Similar to learnAdd.cpp, but this time the training XML is generated as a string and not saved to a file, and parsed by the program itself to build the network. Then, the program asks two real numbers, and performs both the sum and the difference between them, putting the sum's output on the first output neuron and the difference's on the second output neuron. Anyway, using more than one neuron in the output layer is strongly discouraged, as the network usually won't set correctly the synaptical weights to give satisfying and accurate answers for all of the operations.
\begin{DocInclude}\begin{verbatim}
#include <iostream>
#include <neural++.hpp>
using namespace std;
using namespace neuralpp;
int main() {
NeuralNet net(2, 2, 1, 0.002, 1000);
string xml;
double tmp;
int id = 0;
// XML initialization. Then, I say XML that 2+3=5, 3+3=6, 5+4=9
// Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm
NeuralNet::initXML(xml);
xml += NeuralNet::XMLFromSet(id, "3,2;5");
xml += NeuralNet::XMLFromSet(id, "6,3;9");
xml += NeuralNet::XMLFromSet(id, "2,3;5");
xml += NeuralNet::XMLFromSet(id, "4,4;8");
NeuralNet::closeXML(xml);
cout << xml << endl;
net.train(xml, NeuralNet::str);
vector<double> v;
cout << "Network status: trained\n\n";
cout << "First number to add: ";
cin >> tmp;
v.push_back(tmp);
cout << "Second number to add: ";
cin >> tmp;
v.push_back(tmp);
net.setInput(v);
net.propagate();
cout << "Output: " << net.getOutput() << endl;
return 0;
}
\end{verbatim}
\end{DocInclude}

View file

@ -10,7 +10,7 @@ Show how to use a network already trained and saved to a binary file. In this ca
using namespace std;
using namespace neuralpp;
#define NETFILE "adder.net"
#define NETFILE "network.xml"
int main() {
double a,b;

View file

@ -21,10 +21,10 @@ int main() {
// => 2 neurons for the input layer
// => 2 neurons for the hidden layer
// => 1 neuron for the output layer
// => a learning rate == 0.005 (just get it doing some tests until satisfied)
// => a learning rate == 0.002 (just get it doing some tests until satisfied, but remember to keep its value quite low and ~ 0 to keep the network stable)
// => 1000 learning steps (i.e. the network will be ready after 1000 training steps to adjust the synaptical weights
// => 0.1 as neural threshold (the threshold above which a neuron activates)
NeuralNet net(2, 2, 1, 0.005, 1000, 0.1);
NeuralNet net(2, 2, 1, 0.002, 2000);
// Initialize a training XML as a string in 'xml'
NeuralNet::initXML(xml);
@ -41,6 +41,12 @@ int main() {
xml += NeuralNet::XMLFromSet(id, "-1,-2;-3");
xml += NeuralNet::XMLFromSet(id, "8,9;17");
xml += NeuralNet::XMLFromSet(id, "10,10;20");
xml += NeuralNet::XMLFromSet(id, "4,1;5");
xml += NeuralNet::XMLFromSet(id, "2,6;8");
xml += NeuralNet::XMLFromSet(id, "2,7;9");
xml += NeuralNet::XMLFromSet(id, "8,9;17");
xml += NeuralNet::XMLFromSet(id, "4,7;11");
xml += NeuralNet::XMLFromSet(id, "5,2;7");
NeuralNet::closeXML(xml);
// Save the XML string just created to a file
@ -57,7 +63,7 @@ int main() {
// Save the trained network to a binary file, that can be reloaded from any
// application that is going to use that network
net.save("adder.net");
net.save("network.xml");
cout << "Network trained in " << (t2-t1) << " seconds. You can use adder.net file now to load this network\n";
return 0;
}

View file

@ -0,0 +1,48 @@
\section{examples/networkForSumsAndSubtractions.cpp}
This program creates a neural network from scratch. Its purpose is to get two numbers and learn to compute their sum and difference (so the network provides two output values). The training set is auto-generated to an XML string, and then the network is trained.
\begin{DocInclude}\begin{verbatim}
#include <iostream>
#include <neural++.hpp>
using namespace std;
using namespace neuralpp;
int main() {
NeuralNet net(2, 2, 2, 0.002, 1000);
string xml;
double tmp;
int id = 0;
// XML initialization. Then, I say XML that 3+2=5, 3-2=1; 4+2=6, 4-2=2; 6+3=9, 6-3=3
// Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm
NeuralNet::initXML(xml);
xml += NeuralNet::XMLFromSet(id, "3,2;5,1");
xml += NeuralNet::XMLFromSet(id, "4,2;6,2");
xml += NeuralNet::XMLFromSet(id, "6,3;9,3");
NeuralNet::closeXML(xml);
cout << xml << endl;
net.train(xml, NeuralNet::str);
vector<double> v;
cout << "Network status: trained\n\n";
cout << "First number: ";
cin >> tmp;
v.push_back(tmp);
cout << "Second number: ";
cin >> tmp;
v.push_back(tmp);
net.setInput(v);
net.propagate();
cout << "Output: " << net.getOutputs()[0] << "; " << net.getOutputs()[1] << endl;
return 0;
}
\end{verbatim}
\end{DocInclude}

View file

@ -0,0 +1,8 @@
\section{examples/networkForSumsAndSubtrations.cpp}
This program creates a neural network from scratch. Its purpose is to get two numbers and learn to compute their sum and difference (so the network provides two output values). The training set is auto-generated to an XML string, and then the network is trained.
\begin{DocInclude}\begin{verbatim}\end{verbatim}
\end{DocInclude}

View file

@ -30,6 +30,11 @@ class {\bf NetworkIndexOutOfBoundsException}
\begin{CompactList}\small\item\em Exception raised when trying to access a neuron whose index is larger than the number of neurons in the network. \item\end{CompactList}\item
class {\bf InvalidSynapticalWeightException}
\begin{CompactList}\small\item\em Exception raised when, while trying the network or directly, the weight of a synapsis is set to a value $|$w$|$ $>$ 1. \item\end{CompactList}\end{CompactItemize}
\subsection*{Namespaces}
\begin{CompactItemize}
\item
namespace {\bf neuralutils}
\end{CompactItemize}
\subsection*{Functions}
\begin{CompactItemize}
\item

View file

@ -0,0 +1,70 @@
\section{neuralpp::neuralutils Namespace Reference}
\label{namespaceneuralpp_1_1neuralutils}\index{neuralpp::neuralutils@{neuralpp::neuralutils}}
\subsection*{Functions}
\begin{CompactItemize}
\item
std::vector$<$ double $>$ {\bf split} (char delim, std::string str)
\begin{CompactList}\small\item\em Split a string into a vector of doubles, given a delimitator. \item\end{CompactList}\item
std::vector$<$ std::string $>$ {\bf splitLines} (std::string str)
\begin{CompactList}\small\item\em Split the lines of a string. \item\end{CompactList}\item
void {\bf toLower} (std::string \&str)
\begin{CompactList}\small\item\em Convert the characters of a string to lower case. \item\end{CompactList}\item
void {\bf toUpper} (std::string \&str)
\begin{CompactList}\small\item\em Convert the characters of a string to upper case. \item\end{CompactList}\end{CompactItemize}
\subsection{Function Documentation}
\index{neuralpp::neuralutils@{neuralpp::neuralutils}!split@{split}}
\index{split@{split}!neuralpp::neuralutils@{neuralpp::neuralutils}}
\subsubsection[split]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::neuralutils::split (char {\em delim}, \/ std::string {\em str})}\label{namespaceneuralpp_1_1neuralutils_68719b3d63ca48ed264e1b730a1aaa4a}
Split a string into a vector of doubles, given a delimitator.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em delim}]Delimitator \item[{\em str}]String to be splitted \end{description}
\end{Desc}
\begin{Desc}
\item[Returns:]Vector of doubles containing splitted value \end{Desc}
\index{neuralpp::neuralutils@{neuralpp::neuralutils}!splitLines@{splitLines}}
\index{splitLines@{splitLines}!neuralpp::neuralutils@{neuralpp::neuralutils}}
\subsubsection[splitLines]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$std::string$>$ neuralpp::neuralutils::splitLines (std::string {\em str})}\label{namespaceneuralpp_1_1neuralutils_1d887e4bcc7ef2d50cbeca984767a78b}
Split the lines of a string.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em str}]String to be splitted \end{description}
\end{Desc}
\begin{Desc}
\item[Returns:]An array of strings containing the lines of the original string \end{Desc}
\index{neuralpp::neuralutils@{neuralpp::neuralutils}!toLower@{toLower}}
\index{toLower@{toLower}!neuralpp::neuralutils@{neuralpp::neuralutils}}
\subsubsection[toLower]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::neuralutils::toLower (std::string \& {\em str})}\label{namespaceneuralpp_1_1neuralutils_f7932c25bd82b19173d2f3d2e5cef488}
Convert the characters of a string to lower case.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em str}]String to be converted \end{description}
\end{Desc}
\index{neuralpp::neuralutils@{neuralpp::neuralutils}!toUpper@{toUpper}}
\index{toUpper@{toUpper}!neuralpp::neuralutils@{neuralpp::neuralutils}}
\subsubsection[toUpper]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::neuralutils::toUpper (std::string \& {\em str})}\label{namespaceneuralpp_1_1neuralutils_265b22d1a6110646b42693b96c21ca8b}
Convert the characters of a string to upper case.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em str}]String to be converted \end{description}
\end{Desc}

View file

@ -1,4 +1,5 @@
\section{Namespace List}
Here is a list of all namespaces with brief descriptions:\begin{CompactList}
\item\contentsline{section}{{\bf neuralpp} (Main namespace for the library )}{\pageref{namespaceneuralpp}}{}
\item\contentsline{section}{{\bf neuralpp::neuralutils} }{\pageref{namespaceneuralpp_1_1neuralutils}}{}
\end{CompactList}

View file

@ -8,6 +8,8 @@
\begin{CompactItemize}
\item
namespace {\bf neuralpp}
\item
namespace {\bf neuralpp::neuralutils}
\end{CompactItemize}
\subsection*{Classes}
\begin{CompactItemize}
@ -29,31 +31,35 @@ struct {\bf neuralpp::synrecord}
\subsection*{Defines}
\begin{CompactItemize}
\item
\#define {\bf RAND}~(double) ( (rand() / (RAND\_\-MAX/2)) - 1)
\begin{CompactList}\small\item\em Default rand value: $|$sin(rand)$|$, always $>$= 0 and $<$= 1. \item\end{CompactList}\item
\#define {\bf BETA0}~1.0
\begin{CompactList}\small\item\em Initial value for the inertial momentum of the synapses. \item\end{CompactList}\end{CompactItemize}
\#define {\bf RAND}~(double) ( (rand() / 10.0) / ((double) RAND\_\-MAX) )
\item
\#define {\bf BETA0}~0.8
\end{CompactItemize}
\subsection*{Functions}
\begin{CompactItemize}
\item
double {\bf neuralpp::df} (double($\ast$f)(double), double x)
\item
double {\bf neuralpp::\_\-\_\-actv} (double prop)
\end{CompactItemize}
\item
std::vector$<$ double $>$ {\bf neuralpp::neuralutils::split} (char delim, std::string str)
\begin{CompactList}\small\item\em Split a string into a vector of doubles, given a delimitator. \item\end{CompactList}\item
std::vector$<$ std::string $>$ {\bf neuralpp::neuralutils::splitLines} (std::string str)
\begin{CompactList}\small\item\em Split the lines of a string. \item\end{CompactList}\item
void {\bf neuralpp::neuralutils::toLower} (std::string \&str)
\begin{CompactList}\small\item\em Convert the characters of a string to lower case. \item\end{CompactList}\item
void {\bf neuralpp::neuralutils::toUpper} (std::string \&str)
\begin{CompactList}\small\item\em Convert the characters of a string to upper case. \item\end{CompactList}\end{CompactItemize}
\subsection{Define Documentation}
\index{neural++.hpp@{neural++.hpp}!BETA0@{BETA0}}
\index{BETA0@{BETA0}!neural++.hpp@{neural++.hpp}}
\subsubsection[BETA0]{\setlength{\rightskip}{0pt plus 5cm}\#define BETA0~1.0}\label{neural_09_09_8hpp_05e2bb5b9fc32f0b6b4d84fe43177d72}
\subsubsection[BETA0]{\setlength{\rightskip}{0pt plus 5cm}\#define BETA0~0.8}\label{neural_09_09_8hpp_05e2bb5b9fc32f0b6b4d84fe43177d72}
Initial value for the inertial momentum of the synapses.
\index{neural++.hpp@{neural++.hpp}!RAND@{RAND}}
\index{RAND@{RAND}!neural++.hpp@{neural++.hpp}}
\subsubsection[RAND]{\setlength{\rightskip}{0pt plus 5cm}\#define RAND~(double) ( (rand() / (RAND\_\-MAX/2)) - 1)}\label{neural_09_09_8hpp_839a9222721835f53c5b248241f535f4}
\subsubsection[RAND]{\setlength{\rightskip}{0pt plus 5cm}\#define RAND~(double) ( (rand() / 10.0) / ((double) RAND\_\-MAX) )}\label{neural_09_09_8hpp_839a9222721835f53c5b248241f535f4}
Default rand value: $|$sin(rand)$|$, always $>$= 0 and $<$= 1.

View file

@ -1,5 +1,7 @@
\section{neural++\_\-exception.hpp File Reference}
\label{neural_09_09__exception_8hpp}\index{neural++\_\-exception.hpp@{neural++\_\-exception.hpp}}
{\tt \#include $<$cstdio$>$}\par
{\tt \#include $<$cstring$>$}\par
{\tt \#include $<$exception$>$}\par
\subsection*{Namespaces}
\begin{CompactItemize}

View file

@ -20,7 +20,7 @@
\vspace*{1cm}
{\large Generated by Doxygen 1.5.6}\\
\vspace*{0.5cm}
{\small Sun Aug 16 20:53:42 2009}\\
{\small Fri Sep 4 11:25:49 2009}\\
\end{center}
\end{titlepage}
\clearemptydoublepage
@ -36,6 +36,7 @@
\input{files}
\chapter{Namespace Documentation}
\input{namespaceneuralpp}
\include{namespaceneuralpp_1_1neuralutils}
\chapter{Class Documentation}
\input{classCMarkup}
\include{structCMarkup_1_1ConvertEncoding}
@ -66,8 +67,9 @@
\include{neural_09_09_8hpp}
\include{neural_09_09__exception_8hpp}
\chapter{Example Documentation}
\input{examples_2adderFromScratch_8cpp-example}
\input{examples_2adderFromString_8cpp-example}
\include{examples_2doAdd_8cpp-example}
\include{examples_2learnAdd_8cpp-example}
\include{examples_2networkForSumsAndSubtractions_8cpp-example}
\printindex
\end{document}