neuralpp/doc/latex/classneuralpp_1_1NeuralNet.tex

408 lines
22 KiB
TeX

\section{neuralpp::NeuralNet Class Reference}
\label{classneuralpp_1_1NeuralNet}\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}}
Main project's class.
{\tt \#include $<$neural++.hpp$>$}
\subsection*{Public Types}
\begin{CompactItemize}
\item
enum {\bf source} \{ {\bf file},
{\bf str}
\}
\begin{CompactList}\small\item\em Enum to choose the eventual training source for our network (XML from a file or from a string). \item\end{CompactList}\end{CompactItemize}
\subsection*{Public Member Functions}
\begin{CompactItemize}
\item
{\bf NeuralNet} ()
\begin{CompactList}\small\item\em Empty constructor for the class - it just makes nothing. \item\end{CompactList}\item
{\bf NeuralNet} (size\_\-t in\_\-size, size\_\-t hidden\_\-size, size\_\-t out\_\-size, double l, int e)
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
{\bf NeuralNet} (const string file) throw (NetworkFileNotFoundException)
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
{\bf NeuralNet} (size\_\-t in\_\-size, size\_\-t hidden\_\-size, size\_\-t out\_\-size, double($\ast$actv)(double), double l, int e)
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
double {\bf getOutput} () const
\begin{CompactList}\small\item\em It gets the output of the network (note: the layer output should contain an only neuron). \item\end{CompactList}\item
vector$<$ double $>$ {\bf getOutputs} ()
\begin{CompactList}\small\item\em It gets the output of the network in case the output layer contains more neurons. \item\end{CompactList}\item
double {\bf expected} () const
\begin{CompactList}\small\item\em It gets the value expected. \item\end{CompactList}\item
void {\bf setExpected} (double {\bf ex})
\begin{CompactList}\small\item\em It sets the value you expect from your network. \item\end{CompactList}\item
void {\bf update} ()
\begin{CompactList}\small\item\em It updates through back-propagation the weights of the synapsis and computes again the output value for {\em epochs\/} times, calling back updateWeights and commitChanges functions. \item\end{CompactList}\item
void {\bf propagate} ()
\begin{CompactList}\small\item\em It propagates values through the network. \item\end{CompactList}\item
void {\bf setInput} (vector$<$ double $>$ \&v)
\begin{CompactList}\small\item\em It sets the input for the network. \item\end{CompactList}\item
void {\bf link} ()
\begin{CompactList}\small\item\em It links the layers of the network (input, hidden, output). \item\end{CompactList}\item
void {\bf save} (const char $\ast$fname) throw (NetworkFileWriteException)
\begin{CompactList}\small\item\em Save a trained neural network to a binary file. \item\end{CompactList}\item
void {\bf train} (string xml, {\bf source} src) throw (InvalidXMLException)
\begin{CompactList}\small\item\em Train a network using a training set loaded from an XML file. \item\end{CompactList}\end{CompactItemize}
\subsection*{Static Public Member Functions}
\begin{CompactItemize}
\item
static void {\bf initXML} (string \&xml)
\begin{CompactList}\small\item\em Initialize the training XML for the neural network. \item\end{CompactList}\item
static vector$<$ double $>$ {\bf split} (char delim, string str)
\begin{CompactList}\small\item\em Splits a string into a vector of doubles, given a delimitator. \item\end{CompactList}\item
static string {\bf XMLFromSet} (int id, string set)
\begin{CompactList}\small\item\em Get a training set from a string and copies it to an XML For example, these strings could be training sets for making sums: \char`\"{}2,3;5\char`\"{} - \char`\"{}5,6;11\char`\"{} - \char`\"{}2,2;4\char`\"{} - \char`\"{}4,5:9\char`\"{} This method called on the first string will return an XML such this: '$<$training id=\char`\"{}0\char`\"{}$>$$<$input id=\char`\"{}0\char`\"{}$>$2$<$/input$>$$<$input id=\char`\"{}1\char`\"{}$>$3$<$/input$>$$<$output id=\char`\"{}0\char`\"{}$>$5$<$/output$>$ \&lt/training$>$'. \item\end{CompactList}\item
static void {\bf closeXML} (string \&xml)
\begin{CompactList}\small\item\em Closes an open XML document generated by \char`\"{}initXML\char`\"{} and \char`\"{}XMLFromSet\char`\"{}. \item\end{CompactList}\end{CompactItemize}
\subsection*{Public Attributes}
\begin{CompactItemize}
\item
{\bf Layer} $\ast$ {\bf input}
\item
{\bf Layer} $\ast$ {\bf hidden}
\item
{\bf Layer} $\ast$ {\bf output}
\end{CompactItemize}
\subsection*{Private Member Functions}
\begin{CompactItemize}
\item
void {\bf updateWeights} ()
\begin{CompactList}\small\item\em It updates the weights of the net's synapsis through back-propagation. \item\end{CompactList}\item
void {\bf commitChanges} ({\bf Layer} $\ast$l)
\begin{CompactList}\small\item\em It commits the changes made by \doxyref{updateWeights()}{p.}{classneuralpp_1_1NeuralNet_94169c89a7cd47122ab5dbf1d5c5e108} to the layer l. \item\end{CompactList}\item
double {\bf error} (double {\bf ex}) const
\begin{CompactList}\small\item\em Get the error made on the expected result as $|$v-v'$|$/v. \item\end{CompactList}\end{CompactItemize}
\subsection*{Private Attributes}
\begin{CompactItemize}
\item
int {\bf epochs}
\item
int {\bf ref\_\-epochs}
\item
double {\bf l\_\-rate}
\item
double {\bf ex}
\item
double($\ast$ {\bf actv\_\-f} )(double)
\begin{CompactList}\small\item\em Private pointer to function, containing the function to be used as activation function. \item\end{CompactList}\end{CompactItemize}
\subsection{Detailed Description}
Main project's class.
Use $\ast$ONLY$\ast$ this class, unless you know what you're doing
\subsection{Member Enumeration Documentation}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!source@{source}}
\index{source@{source}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[source]{\setlength{\rightskip}{0pt plus 5cm}enum {\bf neuralpp::NeuralNet::source}}\label{classneuralpp_1_1NeuralNet_94c36c94060e785ea67a0014c4182f8f}
Enum to choose the eventual training source for our network (XML from a file or from a string).
\begin{Desc}
\item[Enumerator: ]\par
\begin{description}
\index{file@{file}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!file@{file}}\item[{\em
file\label{classneuralpp_1_1NeuralNet_94c36c94060e785ea67a0014c4182f8f5ec2727c0756ddb097b53efe49b81afb}
}]\index{str@{str}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!str@{str}}\item[{\em
str\label{classneuralpp_1_1NeuralNet_94c36c94060e785ea67a0014c4182f8f6d06b4fe9414a158c97aee1a3679a904}
}]\end{description}
\end{Desc}
\subsection{Constructor \& Destructor Documentation}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!NeuralNet@{NeuralNet}}
\index{NeuralNet@{NeuralNet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[NeuralNet]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NeuralNet::NeuralNet ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1NeuralNet_92b145f2f6f00bf1ba645ce2235882c2}
Empty constructor for the class - it just makes nothing.
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!NeuralNet@{NeuralNet}}
\index{NeuralNet@{NeuralNet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[NeuralNet]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NeuralNet::NeuralNet (size\_\-t {\em in\_\-size}, \/ size\_\-t {\em hidden\_\-size}, \/ size\_\-t {\em out\_\-size}, \/ double {\em l}, \/ int {\em e})}\label{classneuralpp_1_1NeuralNet_c79534c7c0dfb20d1d03be2ad7569b78}
Constructor.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em in\_\-size}]Size of the input layer \item[{\em hidden\_\-size}]Size of the hidden layer \item[{\em out\_\-size}]Size of the output layer \item[{\em l}]learn rate (get it after doing some experiments, but generally try to keep its value quite low to be more accurate) \item[{\em e}]Epochs (cycles) to execute (the most you execute, the most the network can be accurate for its purpose) \end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!NeuralNet@{NeuralNet}}
\index{NeuralNet@{NeuralNet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[NeuralNet]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NeuralNet::NeuralNet (const string {\em file}) throw (NetworkFileNotFoundException)}\label{classneuralpp_1_1NeuralNet_7fc7fc3e3220c138ffa5356fef6b9757}
Constructor.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em file}]Binary file containing a neural network previously saved by \doxyref{save()}{p.}{classneuralpp_1_1NeuralNet_fdf94c276720c25e565cac834fe8a407} method \end{description}
\end{Desc}
\begin{Desc}
\item[Exceptions:]
\begin{description}
\item[{\em NetworkFileNotFoundException}]\end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!NeuralNet@{NeuralNet}}
\index{NeuralNet@{NeuralNet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[NeuralNet]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NeuralNet::NeuralNet (size\_\-t {\em in\_\-size}, \/ size\_\-t {\em hidden\_\-size}, \/ size\_\-t {\em out\_\-size}, \/ double($\ast$)(double) {\em actv}, \/ double {\em l}, \/ int {\em e})}\label{classneuralpp_1_1NeuralNet_0c16df2e0701503052c63749930b238e}
Constructor.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em in\_\-size}]Size of the input layer \item[{\em hidden\_\-size}]Size of the hidden layer \item[{\em out\_\-size}]Size of the output layer \item[{\em actv}]Activation function to use (default: f(x)=x) \item[{\em l}]learn rate (get it after doing some experiments, but generally try to keep its value quite low to be more accurate) \item[{\em e}]Epochs (cycles) to execute (the most you execute, the most the network can be accurate for its purpose) \end{description}
\end{Desc}
\subsection{Member Function Documentation}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!updateWeights@{updateWeights}}
\index{updateWeights@{updateWeights}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[updateWeights]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::updateWeights ()\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_94169c89a7cd47122ab5dbf1d5c5e108}
It updates the weights of the net's synapsis through back-propagation.
In-class use only \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!commitChanges@{commitChanges}}
\index{commitChanges@{commitChanges}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[commitChanges]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::commitChanges ({\bf Layer} $\ast$ {\em l})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_62695a82dfb1df758a44150921aec8e0}
It commits the changes made by \doxyref{updateWeights()}{p.}{classneuralpp_1_1NeuralNet_94169c89a7cd47122ab5dbf1d5c5e108} to the layer l.
In-class use only \begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em l}]\doxyref{Layer}{p.}{classneuralpp_1_1Layer} to commit the changes \end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!error@{error}}
\index{error@{error}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[error]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::error (double {\em ex}) const\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_0616c51404efaca2714e37dd7478997e}
Get the error made on the expected result as $|$v-v'$|$/v.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em ex}]Expected value \end{description}
\end{Desc}
\begin{Desc}
\item[Returns:]Mean error \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutput@{getOutput}}
\index{getOutput@{getOutput}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[getOutput]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::getOutput () const}\label{classneuralpp_1_1NeuralNet_961dce8913264bf64c899dce4e25f810}
It gets the output of the network (note: the layer output should contain an only neuron).
\begin{Desc}
\item[Returns:]The output value of the network \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutputs@{getOutputs}}
\index{getOutputs@{getOutputs}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[getOutputs]{\setlength{\rightskip}{0pt plus 5cm}vector$<$double$>$ neuralpp::NeuralNet::getOutputs ()}\label{classneuralpp_1_1NeuralNet_a6b8bf3800b43b58843c65fc431207ae}
It gets the output of the network in case the output layer contains more neurons.
\begin{Desc}
\item[Returns:]A vector containing the output values of the network \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!expected@{expected}}
\index{expected@{expected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[expected]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::expected () const}\label{classneuralpp_1_1NeuralNet_562dfe9fb8d73bf25a23ce608451d3aa}
It gets the value expected.
Of course you should specify this when you build your network by using setExpected. \begin{Desc}
\item[Returns:]The expected output value for a certain training phase \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setExpected@{setExpected}}
\index{setExpected@{setExpected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (double {\em ex})}\label{classneuralpp_1_1NeuralNet_b6475762b7e9eab086befdc511f7c236}
It sets the value you expect from your network.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em ex}]Expected output value \end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!update@{update}}
\index{update@{update}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[update]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::update ()}\label{classneuralpp_1_1NeuralNet_b0bd1daadb06980dff1f50d33a7c098e}
It updates through back-propagation the weights of the synapsis and computes again the output value for {\em epochs\/} times, calling back updateWeights and commitChanges functions.
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!propagate@{propagate}}
\index{propagate@{propagate}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[propagate]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::propagate ()}\label{classneuralpp_1_1NeuralNet_c129c180647362da963758bfd1ba6890}
It propagates values through the network.
Use this when you want to give an already trained network some new values the get to the output \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setInput@{setInput}}
\index{setInput@{setInput}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[setInput]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setInput (vector$<$ double $>$ \& {\em v})}\label{classneuralpp_1_1NeuralNet_0de170e8ab561ad63d0739b4c4b74f68}
It sets the input for the network.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em v}]Vector of doubles, containing the values to give to your network \end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!link@{link}}
\index{link@{link}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[link]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::link ()}\label{classneuralpp_1_1NeuralNet_46f23f462318a4ffc037a4e806364c3f}
It links the layers of the network (input, hidden, output).
Don't use unless you exactly know what you're doing, it is already called by the constructor \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!save@{save}}
\index{save@{save}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[save]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::save (const char $\ast$ {\em fname}) throw (NetworkFileWriteException)}\label{classneuralpp_1_1NeuralNet_fdf94c276720c25e565cac834fe8a407}
Save a trained neural network to a binary file.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em fname}]Binary file where you're going to save your network \end{description}
\end{Desc}
\begin{Desc}
\item[Exceptions:]
\begin{description}
\item[{\em NetworkFileWriteException}]When you get an error writing the network's information to a file \end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!train@{train}}
\index{train@{train}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[train]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::train (string {\em xml}, \/ {\bf source} {\em src}) throw (InvalidXMLException)}\label{classneuralpp_1_1NeuralNet_ead4bdef0602a5cadbe3beb685e01f5f}
Train a network using a training set loaded from an XML file.
A sample XML file is available in examples/adder.xml \begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em xml}]XML file containing our training set \item[{\em src}]Source type from which the XML will be loaded (from a file [default] or from a string) \end{description}
\end{Desc}
\begin{Desc}
\item[Exceptions:]
\begin{description}
\item[{\em InvalidXMLException}]\end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!initXML@{initXML}}
\index{initXML@{initXML}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[initXML]{\setlength{\rightskip}{0pt plus 5cm}static void neuralpp::NeuralNet::initXML (string \& {\em xml})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_45c7645d4affe65752d37cd230afba24}
Initialize the training XML for the neural network.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em xml}]String that will contain the XML \end{description}
\end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!split@{split}}
\index{split@{split}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[split]{\setlength{\rightskip}{0pt plus 5cm}static vector$<$double$>$ neuralpp::NeuralNet::split (char {\em delim}, \/ string {\em str})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_e07af23ceb8666518da0c035bf1e0376}
Splits a string into a vector of doubles, given a delimitator.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em delim}]Delimitator \item[{\em str}]String to be splitted \end{description}
\end{Desc}
\begin{Desc}
\item[Returns:]Vector of doubles containing splitted values \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!XMLFromSet@{XMLFromSet}}
\index{XMLFromSet@{XMLFromSet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[XMLFromSet]{\setlength{\rightskip}{0pt plus 5cm}static string neuralpp::NeuralNet::XMLFromSet (int {\em id}, \/ string {\em set})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_4be31ecb0b543a192997bd83c6995ccb}
Get a training set from a string and copies it to an XML For example, these strings could be training sets for making sums: \char`\"{}2,3;5\char`\"{} - \char`\"{}5,6;11\char`\"{} - \char`\"{}2,2;4\char`\"{} - \char`\"{}4,5:9\char`\"{} This method called on the first string will return an XML such this: '$<$training id=\char`\"{}0\char`\"{}$>$$<$input id=\char`\"{}0\char`\"{}$>$2$<$/input$>$$<$input id=\char`\"{}1\char`\"{}$>$3$<$/input$>$$<$output id=\char`\"{}0\char`\"{}$>$5$<$/output$>$ \&lt/training$>$'.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em id}]ID for the given training set (0,1,..,n) \item[{\em set}]String containing input values and expected outputs \end{description}
\end{Desc}
\begin{Desc}
\item[Returns:]XML string \end{Desc}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!closeXML@{closeXML}}
\index{closeXML@{closeXML}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[closeXML]{\setlength{\rightskip}{0pt plus 5cm}static void neuralpp::NeuralNet::closeXML (string \& {\em xml})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_28b9966c5f197b8e86d57dd104aa32a6}
Closes an open XML document generated by \char`\"{}initXML\char`\"{} and \char`\"{}XMLFromSet\char`\"{}.
\begin{Desc}
\item[Parameters:]
\begin{description}
\item[{\em xml}]XML string to be closed \end{description}
\end{Desc}
\subsection{Member Data Documentation}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!epochs@{epochs}}
\index{epochs@{epochs}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[epochs]{\setlength{\rightskip}{0pt plus 5cm}int {\bf neuralpp::NeuralNet::epochs}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_4cb52dae7b43d03fac73afca7b9f3a51}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!ref\_\-epochs@{ref\_\-epochs}}
\index{ref\_\-epochs@{ref\_\-epochs}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[ref\_\-epochs]{\setlength{\rightskip}{0pt plus 5cm}int {\bf neuralpp::NeuralNet::ref\_\-epochs}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_4f88106c9e542c39eac43b4ca1974a2a}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!l\_\-rate@{l\_\-rate}}
\index{l\_\-rate@{l\_\-rate}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[l\_\-rate]{\setlength{\rightskip}{0pt plus 5cm}double {\bf neuralpp::NeuralNet::l\_\-rate}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_6bd7be443e46b2fdbf1da2edb8e611ab}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!ex@{ex}}
\index{ex@{ex}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[ex]{\setlength{\rightskip}{0pt plus 5cm}double {\bf neuralpp::NeuralNet::ex}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_261f5f68fcc5be54250cfa03945266dd}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!actv\_\-f@{actv\_\-f}}
\index{actv\_\-f@{actv\_\-f}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[actv\_\-f]{\setlength{\rightskip}{0pt plus 5cm}double($\ast$ {\bf neuralpp::NeuralNet::actv\_\-f})(double)\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_c1469e6afd87d85b82f14bc246f82457}
Private pointer to function, containing the function to be used as activation function.
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!input@{input}}
\index{input@{input}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[input]{\setlength{\rightskip}{0pt plus 5cm}{\bf Layer}$\ast$ {\bf neuralpp::NeuralNet::input}}\label{classneuralpp_1_1NeuralNet_e2b4e8405f9d25edab395d61502bdba9}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!hidden@{hidden}}
\index{hidden@{hidden}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[hidden]{\setlength{\rightskip}{0pt plus 5cm}{\bf Layer}$\ast$ {\bf neuralpp::NeuralNet::hidden}}\label{classneuralpp_1_1NeuralNet_bbdaa1b6c0a1a95d2b18cd25fda2a266}
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!output@{output}}
\index{output@{output}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
\subsubsection[output]{\setlength{\rightskip}{0pt plus 5cm}{\bf Layer}$\ast$ {\bf neuralpp::NeuralNet::output}}\label{classneuralpp_1_1NeuralNet_fa9b2dbcbb39d0fc70f790ac24069a74}
The documentation for this class was generated from the following file:\begin{CompactItemize}
\item
{\bf neural++.hpp}\end{CompactItemize}