mirror of
https://github.com/BlackLight/neuralpp.git
synced 2025-07-13 18:08:07 +02:00
Documentation re-generated, a lot of minor stuff
This commit is contained in:
parent
d52976e74e
commit
7861e56f35
144 changed files with 2589 additions and 851 deletions
doc/latex
_2examples_2learnAdd_8cpp-example.texannotated.texclassneuralpp_1_1InvalidSynapticalWeightException.texclassneuralpp_1_1InvalidXMLException.texclassneuralpp_1_1Layer.texclassneuralpp_1_1NetworkFileNotFoundException.texclassneuralpp_1_1NetworkFileWriteException.texclassneuralpp_1_1NetworkIndexOutOfBoundsException.texclassneuralpp_1_1NeuralNet.texclassneuralpp_1_1Neuron.texclassneuralpp_1_1Synapsis.texdoxygen.styexamples.texexamples_2adderFromScratch_8cpp-example.texexamples_2doAdd_8cpp-example.texexamples_2learnAdd_8cpp-example.texnamespaceneuralpp.texneural_09_09_8hpp.texneural_09_09__exception_8hpp.texrefman.tex
6
doc/latex/_2examples_2learnAdd_8cpp-example.tex
Normal file
6
doc/latex/_2examples_2learnAdd_8cpp-example.tex
Normal file
|
@ -0,0 +1,6 @@
|
|||
\section{/examples/learnAdd.cpp}
|
||||
|
||||
|
||||
\begin{DocInclude}\begin{verbatim}\end{verbatim}
|
||||
\end{DocInclude}
|
||||
|
|
@ -11,9 +11,14 @@ Here are the classes, structs, unions and interfaces with brief descriptions:\be
|
|||
\item\contentsline{section}{{\bf CMarkup::SavedPosMap} }{\pageref{structCMarkup_1_1SavedPosMap}}{}
|
||||
\item\contentsline{section}{{\bf CMarkup::SavedPosMapArray} }{\pageref{structCMarkup_1_1SavedPosMapArray}}{}
|
||||
\item\contentsline{section}{{\bf CMarkup::TokenPos} }{\pageref{structCMarkup_1_1TokenPos}}{}
|
||||
\item\contentsline{section}{{\bf neuralpp::InvalidSynapticalWeightException} (Exception raised when, while trying the network or directly, the weight of a synapsis is set to a value $|$w$|$ $>$ 1 )}{\pageref{classneuralpp_1_1InvalidSynapticalWeightException}}{}
|
||||
\item\contentsline{section}{{\bf neuralpp::InvalidXMLException} (Exception thrown when trying parsing an invalid XML )}{\pageref{classneuralpp_1_1InvalidXMLException}}{}
|
||||
\item\contentsline{section}{{\bf neuralpp::Layer} (Class for managing layers of neurons )}{\pageref{classneuralpp_1_1Layer}}{}
|
||||
\item\contentsline{section}{{\bf MCD\_\-CSTR} }{\pageref{structMCD__CSTR}}{}
|
||||
\item\contentsline{section}{{\bf neuralpp::netrecord} }{\pageref{structneuralpp_1_1netrecord}}{}
|
||||
\item\contentsline{section}{{\bf neuralpp::NetworkFileNotFoundException} (Exception thrown when doing an attempt to load a network from an invalid file )}{\pageref{classneuralpp_1_1NetworkFileNotFoundException}}{}
|
||||
\item\contentsline{section}{{\bf neuralpp::NetworkFileWriteException} (Exception thrown when trying to write the network's information to a file that cannot be written )}{\pageref{classneuralpp_1_1NetworkFileWriteException}}{}
|
||||
\item\contentsline{section}{{\bf neuralpp::NetworkIndexOutOfBoundsException} (Exception raised when trying to access a neuron whose index is larger than the number of neurons in the network )}{\pageref{classneuralpp_1_1NetworkIndexOutOfBoundsException}}{}
|
||||
\item\contentsline{section}{{\bf neuralpp::NeuralNet} (Main project's class )}{\pageref{classneuralpp_1_1NeuralNet}}{}
|
||||
\item\contentsline{section}{{\bf neuralpp::Neuron} (Class for managing neurons )}{\pageref{classneuralpp_1_1Neuron}}{}
|
||||
\item\contentsline{section}{{\bf neuralpp::neuronrecord} }{\pageref{structneuralpp_1_1neuronrecord}}{}
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
\section{neuralpp::InvalidSynapticalWeightException Class Reference}
|
||||
\label{classneuralpp_1_1InvalidSynapticalWeightException}\index{neuralpp::InvalidSynapticalWeightException@{neuralpp::InvalidSynapticalWeightException}}
|
||||
Exception raised when, while trying the network or directly, the weight of a synapsis is set to a value $|$w$|$ $>$ 1.
|
||||
|
||||
|
||||
{\tt \#include $<$neural++\_\-exception.hpp$>$}
|
||||
|
||||
\subsection*{Public Member Functions}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
{\bf InvalidSynapticalWeightException} ()
|
||||
\item
|
||||
const char $\ast$ {\bf what} () const throw ()
|
||||
\end{CompactItemize}
|
||||
|
||||
|
||||
\subsection{Detailed Description}
|
||||
Exception raised when, while trying the network or directly, the weight of a synapsis is set to a value $|$w$|$ $>$ 1.
|
||||
|
||||
\subsection{Constructor \& Destructor Documentation}
|
||||
\index{neuralpp::InvalidSynapticalWeightException@{neuralpp::InvalidSynapticalWeightException}!InvalidSynapticalWeightException@{InvalidSynapticalWeightException}}
|
||||
\index{InvalidSynapticalWeightException@{InvalidSynapticalWeightException}!neuralpp::InvalidSynapticalWeightException@{neuralpp::InvalidSynapticalWeightException}}
|
||||
\subsubsection[InvalidSynapticalWeightException]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::InvalidSynapticalWeightException::InvalidSynapticalWeightException ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1InvalidSynapticalWeightException_42e54de0ef9d9f711a483287f4f95916}
|
||||
|
||||
|
||||
|
||||
|
||||
\subsection{Member Function Documentation}
|
||||
\index{neuralpp::InvalidSynapticalWeightException@{neuralpp::InvalidSynapticalWeightException}!what@{what}}
|
||||
\index{what@{what}!neuralpp::InvalidSynapticalWeightException@{neuralpp::InvalidSynapticalWeightException}}
|
||||
\subsubsection[what]{\setlength{\rightskip}{0pt plus 5cm}const char$\ast$ neuralpp::InvalidSynapticalWeightException::what () const throw ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1InvalidSynapticalWeightException_8d374dd7e8363b2551ae6e306b11473a}
|
||||
|
||||
|
||||
|
||||
|
||||
The documentation for this class was generated from the following file:\begin{CompactItemize}
|
||||
\item
|
||||
{\bf neural++\_\-exception.hpp}\end{CompactItemize}
|
38
doc/latex/classneuralpp_1_1InvalidXMLException.tex
Normal file
38
doc/latex/classneuralpp_1_1InvalidXMLException.tex
Normal file
|
@ -0,0 +1,38 @@
|
|||
\section{neuralpp::InvalidXMLException Class Reference}
|
||||
\label{classneuralpp_1_1InvalidXMLException}\index{neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}}
|
||||
Exception thrown when trying parsing an invalid XML.
|
||||
|
||||
|
||||
{\tt \#include $<$neural++\_\-exception.hpp$>$}
|
||||
|
||||
\subsection*{Public Member Functions}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
{\bf InvalidXMLException} ()
|
||||
\item
|
||||
const char $\ast$ {\bf what} () const throw ()
|
||||
\end{CompactItemize}
|
||||
|
||||
|
||||
\subsection{Detailed Description}
|
||||
Exception thrown when trying parsing an invalid XML.
|
||||
|
||||
\subsection{Constructor \& Destructor Documentation}
|
||||
\index{neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}!InvalidXMLException@{InvalidXMLException}}
|
||||
\index{InvalidXMLException@{InvalidXMLException}!neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}}
|
||||
\subsubsection[InvalidXMLException]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::InvalidXMLException::InvalidXMLException ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1InvalidXMLException_10279e6f42a1ccb934afcfef2770c537}
|
||||
|
||||
|
||||
|
||||
|
||||
\subsection{Member Function Documentation}
|
||||
\index{neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}!what@{what}}
|
||||
\index{what@{what}!neuralpp::InvalidXMLException@{neuralpp::InvalidXMLException}}
|
||||
\subsubsection[what]{\setlength{\rightskip}{0pt plus 5cm}const char$\ast$ neuralpp::InvalidXMLException::what () const throw ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1InvalidXMLException_4a8b81678ed0c319e1715ab5ef9112da}
|
||||
|
||||
|
||||
|
||||
|
||||
The documentation for this class was generated from the following file:\begin{CompactItemize}
|
||||
\item
|
||||
{\bf neural++\_\-exception.hpp}\end{CompactItemize}
|
|
@ -8,18 +8,16 @@ Class for managing layers of neurons.
|
|||
\subsection*{Public Member Functions}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
{\bf Layer} (size\_\-t sz, double($\ast$a)(double))
|
||||
{\bf Layer} (size\_\-t sz, double($\ast$a)(double), double th=0.0)
|
||||
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
|
||||
{\bf Layer} (vector$<$ {\bf Neuron} $>$ \&neurons, double($\ast$a)(double))
|
||||
{\bf Layer} (std::vector$<$ {\bf Neuron} $>$ \&neurons, double($\ast$a)(double), double th=0.0)
|
||||
\begin{CompactList}\small\item\em Alternative constructor. \item\end{CompactList}\item
|
||||
{\bf Neuron} \& {\bf operator[$\,$]} (size\_\-t i) throw (NetworkIndexOutOfBoundsException)
|
||||
\begin{CompactList}\small\item\em Redefinition for operator []. \item\end{CompactList}\item
|
||||
void {\bf link} ({\bf Layer} \&l)
|
||||
\begin{CompactList}\small\item\em It links a layer to another. \item\end{CompactList}\item
|
||||
void {\bf setProp} (vector$<$ double $>$ \&v)
|
||||
\begin{CompactList}\small\item\em It sets a vector of propagation values to all its neurons. \item\end{CompactList}\item
|
||||
void {\bf setActv} (vector$<$ double $>$ \&v)
|
||||
\begin{CompactList}\small\item\em It sets a vector of activation values to all its neurons. \item\end{CompactList}\item
|
||||
void {\bf setInput} (std::vector$<$ double $>$ v)
|
||||
\begin{CompactList}\small\item\em Set the input values for the neurons of the layer (just use it for the input layer). \item\end{CompactList}\item
|
||||
void {\bf propagate} ()
|
||||
\begin{CompactList}\small\item\em It propagates its activation values to the output layers. \item\end{CompactList}\item
|
||||
size\_\-t {\bf size} () const
|
||||
|
@ -27,7 +25,9 @@ size\_\-t {\bf size} () const
|
|||
\subsection*{Private Attributes}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
vector$<$ {\bf Neuron} $>$ {\bf elements}
|
||||
std::vector$<$ {\bf Neuron} $>$ {\bf elements}
|
||||
\item
|
||||
double {\bf threshold}
|
||||
\item
|
||||
void($\ast$ {\bf update\_\-weights} )()
|
||||
\item
|
||||
|
@ -43,7 +43,7 @@ Don't use this class directly unless you know what you're doing, use \doxyref{Ne
|
|||
\subsection{Constructor \& Destructor Documentation}
|
||||
\index{neuralpp::Layer@{neuralpp::Layer}!Layer@{Layer}}
|
||||
\index{Layer@{Layer}!neuralpp::Layer@{neuralpp::Layer}}
|
||||
\subsubsection[Layer]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Layer::Layer (size\_\-t {\em sz}, \/ double($\ast$)(double) {\em a})}\label{classneuralpp_1_1Layer_fb08bddd85d36570dabfe915461f07c5}
|
||||
\subsubsection[Layer]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Layer::Layer (size\_\-t {\em sz}, \/ double($\ast$)(double) {\em a}, \/ double {\em th} = {\tt 0.0})}\label{classneuralpp_1_1Layer_a235767701b5e9dcf28c5e9e0d04cb0b}
|
||||
|
||||
|
||||
Constructor.
|
||||
|
@ -51,11 +51,11 @@ Constructor.
|
|||
\begin{Desc}
|
||||
\item[Parameters:]
|
||||
\begin{description}
|
||||
\item[{\em sz}]Size of the layer \item[{\em a}]Activation function \end{description}
|
||||
\item[{\em sz}]Size of the layer \item[{\em a}]Activation function \item[{\em th}]Threshold, value in [0,1] that establishes how much a neuron must be 'sensitive' on variations of the input values \end{description}
|
||||
\end{Desc}
|
||||
\index{neuralpp::Layer@{neuralpp::Layer}!Layer@{Layer}}
|
||||
\index{Layer@{Layer}!neuralpp::Layer@{neuralpp::Layer}}
|
||||
\subsubsection[Layer]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Layer::Layer (vector$<$ {\bf Neuron} $>$ \& {\em neurons}, \/ double($\ast$)(double) {\em a})}\label{classneuralpp_1_1Layer_609ed16b8d79dc384b01ba7d16b72bd1}
|
||||
\subsubsection[Layer]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Layer::Layer (std::vector$<$ {\bf Neuron} $>$ \& {\em neurons}, \/ double($\ast$)(double) {\em a}, \/ double {\em th} = {\tt 0.0})}\label{classneuralpp_1_1Layer_d79f684523f8a6e086b962c8eef37623}
|
||||
|
||||
|
||||
Alternative constructor.
|
||||
|
@ -63,14 +63,14 @@ Alternative constructor.
|
|||
It directly gets a vector of neurons to build the layer \begin{Desc}
|
||||
\item[Parameters:]
|
||||
\begin{description}
|
||||
\item[{\em neurons}]Vector of neurons to be included in the layer \item[{\em a}]Activation function \end{description}
|
||||
\item[{\em neurons}]Vector of neurons to be included in the layer \item[{\em a}]Activation function \item[{\em th}]Threshold, value in [0,1] that establishes how much a neuron must be 'sensitive' on variations of the input values \end{description}
|
||||
\end{Desc}
|
||||
|
||||
|
||||
\subsection{Member Function Documentation}
|
||||
\index{neuralpp::Layer@{neuralpp::Layer}!operator[]@{operator[]}}
|
||||
\index{operator[]@{operator[]}!neuralpp::Layer@{neuralpp::Layer}}
|
||||
\subsubsection[operator[]]{\setlength{\rightskip}{0pt plus 5cm}{\bf Neuron}\& neuralpp::Layer::operator[$\,$] (size\_\-t {\em i}) throw (NetworkIndexOutOfBoundsException)}\label{classneuralpp_1_1Layer_45ff7554830558155c6fbce3b6827122}
|
||||
\subsubsection[operator[]]{\setlength{\rightskip}{0pt plus 5cm}{\bf Neuron}\& neuralpp::Layer::operator[$\,$] (size\_\-t {\em i}) throw ({\bf NetworkIndexOutOfBoundsException})}\label{classneuralpp_1_1Layer_45ff7554830558155c6fbce3b6827122}
|
||||
|
||||
|
||||
Redefinition for operator [].
|
||||
|
@ -94,29 +94,17 @@ It links a layer to another.
|
|||
\begin{description}
|
||||
\item[{\em l}]\doxyref{Layer}{p.}{classneuralpp_1_1Layer} to connect to the current as input layer \end{description}
|
||||
\end{Desc}
|
||||
\index{neuralpp::Layer@{neuralpp::Layer}!setProp@{setProp}}
|
||||
\index{setProp@{setProp}!neuralpp::Layer@{neuralpp::Layer}}
|
||||
\subsubsection[setProp]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Layer::setProp (vector$<$ double $>$ \& {\em v})}\label{classneuralpp_1_1Layer_dbad95f635343a2998794113d7762cf7}
|
||||
\index{neuralpp::Layer@{neuralpp::Layer}!setInput@{setInput}}
|
||||
\index{setInput@{setInput}!neuralpp::Layer@{neuralpp::Layer}}
|
||||
\subsubsection[setInput]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Layer::setInput (std::vector$<$ double $>$ {\em v})}\label{classneuralpp_1_1Layer_88ceffc23f02a9dc24d4355767b7cca7}
|
||||
|
||||
|
||||
It sets a vector of propagation values to all its neurons.
|
||||
Set the input values for the neurons of the layer (just use it for the input layer).
|
||||
|
||||
\begin{Desc}
|
||||
\item[Parameters:]
|
||||
\begin{description}
|
||||
\item[{\em v}]Vector of values to write as propagation values \end{description}
|
||||
\end{Desc}
|
||||
\index{neuralpp::Layer@{neuralpp::Layer}!setActv@{setActv}}
|
||||
\index{setActv@{setActv}!neuralpp::Layer@{neuralpp::Layer}}
|
||||
\subsubsection[setActv]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Layer::setActv (vector$<$ double $>$ \& {\em v})}\label{classneuralpp_1_1Layer_98f79281b680b1d18fd91fa5794c0899}
|
||||
|
||||
|
||||
It sets a vector of activation values to all its neurons.
|
||||
|
||||
\begin{Desc}
|
||||
\item[Parameters:]
|
||||
\begin{description}
|
||||
\item[{\em v}]Vector of values to write as activation values \end{description}
|
||||
\item[{\em v}]Vector containing the input values \end{description}
|
||||
\end{Desc}
|
||||
\index{neuralpp::Layer@{neuralpp::Layer}!propagate@{propagate}}
|
||||
\index{propagate@{propagate}!neuralpp::Layer@{neuralpp::Layer}}
|
||||
|
@ -137,7 +125,12 @@ It propagates its activation values to the output layers.
|
|||
\subsection{Member Data Documentation}
|
||||
\index{neuralpp::Layer@{neuralpp::Layer}!elements@{elements}}
|
||||
\index{elements@{elements}!neuralpp::Layer@{neuralpp::Layer}}
|
||||
\subsubsection[elements]{\setlength{\rightskip}{0pt plus 5cm}vector$<${\bf Neuron}$>$ {\bf neuralpp::Layer::elements}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1Layer_855685c9da56b06b629e13a2f8c719ce}
|
||||
\subsubsection[elements]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<${\bf Neuron}$>$ {\bf neuralpp::Layer::elements}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1Layer_8188cb5c264e6021cee9979b968a0305}
|
||||
|
||||
|
||||
\index{neuralpp::Layer@{neuralpp::Layer}!threshold@{threshold}}
|
||||
\index{threshold@{threshold}!neuralpp::Layer@{neuralpp::Layer}}
|
||||
\subsubsection[threshold]{\setlength{\rightskip}{0pt plus 5cm}double {\bf neuralpp::Layer::threshold}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1Layer_02cf4efe1da02a7404d25375c85ed71f}
|
||||
|
||||
|
||||
\index{neuralpp::Layer@{neuralpp::Layer}!update\_\-weights@{update\_\-weights}}
|
||||
|
|
46
doc/latex/classneuralpp_1_1NetworkFileNotFoundException.tex
Normal file
46
doc/latex/classneuralpp_1_1NetworkFileNotFoundException.tex
Normal file
|
@ -0,0 +1,46 @@
|
|||
\section{neuralpp::NetworkFileNotFoundException Class Reference}
|
||||
\label{classneuralpp_1_1NetworkFileNotFoundException}\index{neuralpp::NetworkFileNotFoundException@{neuralpp::NetworkFileNotFoundException}}
|
||||
Exception thrown when doing an attempt to load a network from an invalid file.
|
||||
|
||||
|
||||
{\tt \#include $<$neural++\_\-exception.hpp$>$}
|
||||
|
||||
\subsection*{Public Member Functions}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
{\bf NetworkFileNotFoundException} ()
|
||||
\item
|
||||
const char $\ast$ {\bf what} () const throw ()
|
||||
\end{CompactItemize}
|
||||
|
||||
|
||||
\subsection{Detailed Description}
|
||||
Exception thrown when doing an attempt to load a network from an invalid file. \begin{Desc}
|
||||
\item[Examples: ]\par
|
||||
|
||||
|
||||
{\bf examples/doAdd.cpp}.\end{Desc}
|
||||
|
||||
|
||||
\subsection{Constructor \& Destructor Documentation}
|
||||
\index{neuralpp::NetworkFileNotFoundException@{neuralpp::NetworkFileNotFoundException}!NetworkFileNotFoundException@{NetworkFileNotFoundException}}
|
||||
\index{NetworkFileNotFoundException@{NetworkFileNotFoundException}!neuralpp::NetworkFileNotFoundException@{neuralpp::NetworkFileNotFoundException}}
|
||||
\subsubsection[NetworkFileNotFoundException]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NetworkFileNotFoundException::NetworkFileNotFoundException ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1NetworkFileNotFoundException_9603eebdb841f06c9b1007e65cba60f6}
|
||||
|
||||
|
||||
|
||||
|
||||
\subsection{Member Function Documentation}
|
||||
\index{neuralpp::NetworkFileNotFoundException@{neuralpp::NetworkFileNotFoundException}!what@{what}}
|
||||
\index{what@{what}!neuralpp::NetworkFileNotFoundException@{neuralpp::NetworkFileNotFoundException}}
|
||||
\subsubsection[what]{\setlength{\rightskip}{0pt plus 5cm}const char$\ast$ neuralpp::NetworkFileNotFoundException::what () const throw ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1NetworkFileNotFoundException_24cde7729ab22d343f36e638cfc40702}
|
||||
|
||||
|
||||
\begin{Desc}
|
||||
\item[Examples: ]\par
|
||||
{\bf examples/doAdd.cpp}.\end{Desc}
|
||||
|
||||
|
||||
The documentation for this class was generated from the following file:\begin{CompactItemize}
|
||||
\item
|
||||
{\bf neural++\_\-exception.hpp}\end{CompactItemize}
|
38
doc/latex/classneuralpp_1_1NetworkFileWriteException.tex
Normal file
38
doc/latex/classneuralpp_1_1NetworkFileWriteException.tex
Normal file
|
@ -0,0 +1,38 @@
|
|||
\section{neuralpp::NetworkFileWriteException Class Reference}
|
||||
\label{classneuralpp_1_1NetworkFileWriteException}\index{neuralpp::NetworkFileWriteException@{neuralpp::NetworkFileWriteException}}
|
||||
Exception thrown when trying to write the network's information to a file that cannot be written.
|
||||
|
||||
|
||||
{\tt \#include $<$neural++\_\-exception.hpp$>$}
|
||||
|
||||
\subsection*{Public Member Functions}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
{\bf NetworkFileWriteException} ()
|
||||
\item
|
||||
const char $\ast$ {\bf what} () const throw ()
|
||||
\end{CompactItemize}
|
||||
|
||||
|
||||
\subsection{Detailed Description}
|
||||
Exception thrown when trying to write the network's information to a file that cannot be written.
|
||||
|
||||
\subsection{Constructor \& Destructor Documentation}
|
||||
\index{neuralpp::NetworkFileWriteException@{neuralpp::NetworkFileWriteException}!NetworkFileWriteException@{NetworkFileWriteException}}
|
||||
\index{NetworkFileWriteException@{NetworkFileWriteException}!neuralpp::NetworkFileWriteException@{neuralpp::NetworkFileWriteException}}
|
||||
\subsubsection[NetworkFileWriteException]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NetworkFileWriteException::NetworkFileWriteException ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1NetworkFileWriteException_83bc897b0ea11a384e0839a39e9fdb48}
|
||||
|
||||
|
||||
|
||||
|
||||
\subsection{Member Function Documentation}
|
||||
\index{neuralpp::NetworkFileWriteException@{neuralpp::NetworkFileWriteException}!what@{what}}
|
||||
\index{what@{what}!neuralpp::NetworkFileWriteException@{neuralpp::NetworkFileWriteException}}
|
||||
\subsubsection[what]{\setlength{\rightskip}{0pt plus 5cm}const char$\ast$ neuralpp::NetworkFileWriteException::what () const throw ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1NetworkFileWriteException_c8802852cb38d5edf6f490ff3acb6078}
|
||||
|
||||
|
||||
|
||||
|
||||
The documentation for this class was generated from the following file:\begin{CompactItemize}
|
||||
\item
|
||||
{\bf neural++\_\-exception.hpp}\end{CompactItemize}
|
|
@ -0,0 +1,38 @@
|
|||
\section{neuralpp::NetworkIndexOutOfBoundsException Class Reference}
|
||||
\label{classneuralpp_1_1NetworkIndexOutOfBoundsException}\index{neuralpp::NetworkIndexOutOfBoundsException@{neuralpp::NetworkIndexOutOfBoundsException}}
|
||||
Exception raised when trying to access a neuron whose index is larger than the number of neurons in the network.
|
||||
|
||||
|
||||
{\tt \#include $<$neural++\_\-exception.hpp$>$}
|
||||
|
||||
\subsection*{Public Member Functions}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
{\bf NetworkIndexOutOfBoundsException} ()
|
||||
\item
|
||||
const char $\ast$ {\bf what} () const throw ()
|
||||
\end{CompactItemize}
|
||||
|
||||
|
||||
\subsection{Detailed Description}
|
||||
Exception raised when trying to access a neuron whose index is larger than the number of neurons in the network.
|
||||
|
||||
\subsection{Constructor \& Destructor Documentation}
|
||||
\index{neuralpp::NetworkIndexOutOfBoundsException@{neuralpp::NetworkIndexOutOfBoundsException}!NetworkIndexOutOfBoundsException@{NetworkIndexOutOfBoundsException}}
|
||||
\index{NetworkIndexOutOfBoundsException@{NetworkIndexOutOfBoundsException}!neuralpp::NetworkIndexOutOfBoundsException@{neuralpp::NetworkIndexOutOfBoundsException}}
|
||||
\subsubsection[NetworkIndexOutOfBoundsException]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NetworkIndexOutOfBoundsException::NetworkIndexOutOfBoundsException ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1NetworkIndexOutOfBoundsException_07141d2225ea9f2d486e755a3660d594}
|
||||
|
||||
|
||||
|
||||
|
||||
\subsection{Member Function Documentation}
|
||||
\index{neuralpp::NetworkIndexOutOfBoundsException@{neuralpp::NetworkIndexOutOfBoundsException}!what@{what}}
|
||||
\index{what@{what}!neuralpp::NetworkIndexOutOfBoundsException@{neuralpp::NetworkIndexOutOfBoundsException}}
|
||||
\subsubsection[what]{\setlength{\rightskip}{0pt plus 5cm}const char$\ast$ neuralpp::NetworkIndexOutOfBoundsException::what () const throw ()\hspace{0.3cm}{\tt [inline]}}\label{classneuralpp_1_1NetworkIndexOutOfBoundsException_8b65d77cb375a0aa40d5a9ea692bd5c3}
|
||||
|
||||
|
||||
|
||||
|
||||
The documentation for this class was generated from the following file:\begin{CompactItemize}
|
||||
\item
|
||||
{\bf neural++\_\-exception.hpp}\end{CompactItemize}
|
|
@ -17,42 +17,46 @@ enum {\bf source} \{ {\bf file},
|
|||
\item
|
||||
{\bf NeuralNet} ()
|
||||
\begin{CompactList}\small\item\em Empty constructor for the class - it just makes nothing. \item\end{CompactList}\item
|
||||
{\bf NeuralNet} (size\_\-t in\_\-size, size\_\-t hidden\_\-size, size\_\-t out\_\-size, double l, int e)
|
||||
{\bf NeuralNet} (size\_\-t in\_\-size, size\_\-t hidden\_\-size, size\_\-t out\_\-size, double l, int e, double th=0.0, double($\ast$a)(double)=\_\-\_\-actv)
|
||||
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
|
||||
{\bf NeuralNet} (const string file) throw (NetworkFileNotFoundException)
|
||||
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
|
||||
{\bf NeuralNet} (size\_\-t in\_\-size, size\_\-t hidden\_\-size, size\_\-t out\_\-size, double($\ast$actv)(double), double l, int e)
|
||||
{\bf NeuralNet} (const std::string file) throw (NetworkFileNotFoundException)
|
||||
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
|
||||
double {\bf getOutput} () const
|
||||
\begin{CompactList}\small\item\em It gets the output of the network (note: the layer output should contain an only neuron). \item\end{CompactList}\item
|
||||
vector$<$ double $>$ {\bf getOutputs} ()
|
||||
double {\bf getThreshold} () const
|
||||
\begin{CompactList}\small\item\em Get the threshold of the neurons in the network. \item\end{CompactList}\item
|
||||
std::vector$<$ double $>$ {\bf getOutputs} ()
|
||||
\begin{CompactList}\small\item\em It gets the output of the network in case the output layer contains more neurons. \item\end{CompactList}\item
|
||||
double {\bf expected} () const
|
||||
\begin{CompactList}\small\item\em It gets the value expected. \item\end{CompactList}\item
|
||||
void {\bf setExpected} (double {\bf ex})
|
||||
\begin{CompactList}\small\item\em It sets the value you expect from your network. \item\end{CompactList}\item
|
||||
\begin{CompactList}\small\item\em Get the expected value (in case you have an only neuron in output layer). \item\end{CompactList}\item
|
||||
std::vector$<$ double $>$ {\bf getExpected} () const
|
||||
\begin{CompactList}\small\item\em Get the expected value (in case you have an only neuron in output layer). \item\end{CompactList}\item
|
||||
void {\bf setExpected} (double ex)
|
||||
\begin{CompactList}\small\item\em It sets the value you expect from your network (in case the network has an only neuron in its output layer). \item\end{CompactList}\item
|
||||
void {\bf setExpected} (std::vector$<$ double $>$ ex)
|
||||
\begin{CompactList}\small\item\em Set the values you expect from your network. \item\end{CompactList}\item
|
||||
void {\bf update} ()
|
||||
\begin{CompactList}\small\item\em It updates through back-propagation the weights of the synapsis and computes again the output value for {\em epochs\/} times, calling back updateWeights and commitChanges functions. \item\end{CompactList}\item
|
||||
void {\bf propagate} ()
|
||||
\begin{CompactList}\small\item\em It propagates values through the network. \item\end{CompactList}\item
|
||||
void {\bf setInput} (vector$<$ double $>$ \&v)
|
||||
void {\bf setInput} (std::vector$<$ double $>$ v)
|
||||
\begin{CompactList}\small\item\em It sets the input for the network. \item\end{CompactList}\item
|
||||
void {\bf link} ()
|
||||
\begin{CompactList}\small\item\em It links the layers of the network (input, hidden, output). \item\end{CompactList}\item
|
||||
void {\bf save} (const char $\ast$fname) throw (NetworkFileWriteException)
|
||||
\begin{CompactList}\small\item\em Save a trained neural network to a binary file. \item\end{CompactList}\item
|
||||
void {\bf train} (string xml, {\bf source} src) throw (InvalidXMLException)
|
||||
void {\bf train} (std::string xml, {\bf source} src) throw (InvalidXMLException)
|
||||
\begin{CompactList}\small\item\em Train a network using a training set loaded from an XML file. \item\end{CompactList}\end{CompactItemize}
|
||||
\subsection*{Static Public Member Functions}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
static void {\bf initXML} (string \&xml)
|
||||
static void {\bf initXML} (std::string \&xml)
|
||||
\begin{CompactList}\small\item\em Initialize the training XML for the neural network. \item\end{CompactList}\item
|
||||
static vector$<$ double $>$ {\bf split} (char delim, string str)
|
||||
static std::vector$<$ double $>$ {\bf split} (char delim, std::string str)
|
||||
\begin{CompactList}\small\item\em Splits a string into a vector of doubles, given a delimitator. \item\end{CompactList}\item
|
||||
static string {\bf XMLFromSet} (int id, string set)
|
||||
static std::string {\bf XMLFromSet} (int \&id, std::string set)
|
||||
\begin{CompactList}\small\item\em Get a training set from a string and copies it to an XML For example, these strings could be training sets for making sums: \char`\"{}2,3;5\char`\"{} - \char`\"{}5,6;11\char`\"{} - \char`\"{}2,2;4\char`\"{} - \char`\"{}4,5:9\char`\"{} This method called on the first string will return an XML such this: '$<$training id=\char`\"{}0\char`\"{}$>$$<$input id=\char`\"{}0\char`\"{}$>$2$<$/input$>$$<$input id=\char`\"{}1\char`\"{}$>$3$<$/input$>$$<$output id=\char`\"{}0\char`\"{}$>$5$<$/output$>$ \</training$>$'. \item\end{CompactList}\item
|
||||
static void {\bf closeXML} (string \&xml)
|
||||
static void {\bf closeXML} (std::string \&xml)
|
||||
\begin{CompactList}\small\item\em Closes an open XML document generated by \char`\"{}initXML\char`\"{} and \char`\"{}XMLFromSet\char`\"{}. \item\end{CompactList}\end{CompactItemize}
|
||||
\subsection*{Public Attributes}
|
||||
\begin{CompactItemize}
|
||||
|
@ -68,10 +72,10 @@ static void {\bf closeXML} (string \&xml)
|
|||
\item
|
||||
void {\bf updateWeights} ()
|
||||
\begin{CompactList}\small\item\em It updates the weights of the net's synapsis through back-propagation. \item\end{CompactList}\item
|
||||
void {\bf commitChanges} ({\bf Layer} $\ast$l)
|
||||
void {\bf commitChanges} ({\bf Layer} \&l)
|
||||
\begin{CompactList}\small\item\em It commits the changes made by \doxyref{updateWeights()}{p.}{classneuralpp_1_1NeuralNet_94169c89a7cd47122ab5dbf1d5c5e108} to the layer l. \item\end{CompactList}\item
|
||||
double {\bf error} (double {\bf ex}) const
|
||||
\begin{CompactList}\small\item\em Get the error made on the expected result as $|$v-v'$|$/v. \item\end{CompactList}\end{CompactItemize}
|
||||
double {\bf error} (double ex)
|
||||
\begin{CompactList}\small\item\em Get the error made on the expected result as squared deviance. \item\end{CompactList}\end{CompactItemize}
|
||||
\subsection*{Private Attributes}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
|
@ -81,7 +85,9 @@ int {\bf ref\_\-epochs}
|
|||
\item
|
||||
double {\bf l\_\-rate}
|
||||
\item
|
||||
double {\bf ex}
|
||||
double {\bf threshold}
|
||||
\item
|
||||
std::vector$<$ double $>$ {\bf expect}
|
||||
\item
|
||||
double($\ast$ {\bf actv\_\-f} )(double)
|
||||
\begin{CompactList}\small\item\em Private pointer to function, containing the function to be used as activation function. \item\end{CompactList}\end{CompactItemize}
|
||||
|
@ -90,7 +96,12 @@ double($\ast$ {\bf actv\_\-f} )(double)
|
|||
\subsection{Detailed Description}
|
||||
Main project's class.
|
||||
|
||||
Use $\ast$ONLY$\ast$ this class, unless you know what you're doing
|
||||
Use $\ast$ONLY$\ast$ this class, unless you know what you're doing \begin{Desc}
|
||||
\item[Examples: ]\par
|
||||
|
||||
|
||||
{\bf examples/adderFromScratch.cpp}, {\bf examples/doAdd.cpp}, and {\bf examples/learnAdd.cpp}.\end{Desc}
|
||||
|
||||
|
||||
\subsection{Member Enumeration Documentation}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!source@{source}}
|
||||
|
@ -122,7 +133,7 @@ Empty constructor for the class - it just makes nothing.
|
|||
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!NeuralNet@{NeuralNet}}
|
||||
\index{NeuralNet@{NeuralNet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[NeuralNet]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NeuralNet::NeuralNet (size\_\-t {\em in\_\-size}, \/ size\_\-t {\em hidden\_\-size}, \/ size\_\-t {\em out\_\-size}, \/ double {\em l}, \/ int {\em e})}\label{classneuralpp_1_1NeuralNet_c79534c7c0dfb20d1d03be2ad7569b78}
|
||||
\subsubsection[NeuralNet]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NeuralNet::NeuralNet (size\_\-t {\em in\_\-size}, \/ size\_\-t {\em hidden\_\-size}, \/ size\_\-t {\em out\_\-size}, \/ double {\em l}, \/ int {\em e}, \/ double {\em th} = {\tt 0.0}, \/ double($\ast$)(double) {\em a} = {\tt \_\-\_\-actv})}\label{classneuralpp_1_1NeuralNet_3d602f3988a9a3e2c77dc6955674f412}
|
||||
|
||||
|
||||
Constructor.
|
||||
|
@ -130,11 +141,11 @@ Constructor.
|
|||
\begin{Desc}
|
||||
\item[Parameters:]
|
||||
\begin{description}
|
||||
\item[{\em in\_\-size}]Size of the input layer \item[{\em hidden\_\-size}]Size of the hidden layer \item[{\em out\_\-size}]Size of the output layer \item[{\em l}]learn rate (get it after doing some experiments, but generally try to keep its value quite low to be more accurate) \item[{\em e}]Epochs (cycles) to execute (the most you execute, the most the network can be accurate for its purpose) \end{description}
|
||||
\item[{\em in\_\-size}]Size of the input layer \item[{\em hidden\_\-size}]Size of the hidden layer \item[{\em out\_\-size}]Size of the output layer \item[{\em l}]learn rate (get it after doing some experiments, but generally try to keep its value quite low to be more accurate) \item[{\em e}]Epochs (cycles) to execute (the most you execute, the most the network can be accurate for its purpose) \item[{\em th}]Threshold, value in [0,1] that establishes how much a neuron must be 'sensitive' on variations of the input values \item[{\em a}]Activation function to use (default: f(x)=x) \end{description}
|
||||
\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!NeuralNet@{NeuralNet}}
|
||||
\index{NeuralNet@{NeuralNet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[NeuralNet]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NeuralNet::NeuralNet (const string {\em file}) throw (NetworkFileNotFoundException)}\label{classneuralpp_1_1NeuralNet_7fc7fc3e3220c138ffa5356fef6b9757}
|
||||
\subsubsection[NeuralNet]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NeuralNet::NeuralNet (const std::string {\em file}) throw ({\bf NetworkFileNotFoundException})}\label{classneuralpp_1_1NeuralNet_b4bfa407d28bb17abf7f735a049987d9}
|
||||
|
||||
|
||||
Constructor.
|
||||
|
@ -147,19 +158,7 @@ Constructor.
|
|||
\begin{Desc}
|
||||
\item[Exceptions:]
|
||||
\begin{description}
|
||||
\item[{\em NetworkFileNotFoundException}]\end{description}
|
||||
\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!NeuralNet@{NeuralNet}}
|
||||
\index{NeuralNet@{NeuralNet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[NeuralNet]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::NeuralNet::NeuralNet (size\_\-t {\em in\_\-size}, \/ size\_\-t {\em hidden\_\-size}, \/ size\_\-t {\em out\_\-size}, \/ double($\ast$)(double) {\em actv}, \/ double {\em l}, \/ int {\em e})}\label{classneuralpp_1_1NeuralNet_0c16df2e0701503052c63749930b238e}
|
||||
|
||||
|
||||
Constructor.
|
||||
|
||||
\begin{Desc}
|
||||
\item[Parameters:]
|
||||
\begin{description}
|
||||
\item[{\em in\_\-size}]Size of the input layer \item[{\em hidden\_\-size}]Size of the hidden layer \item[{\em out\_\-size}]Size of the output layer \item[{\em actv}]Activation function to use (default: f(x)=x) \item[{\em l}]learn rate (get it after doing some experiments, but generally try to keep its value quite low to be more accurate) \item[{\em e}]Epochs (cycles) to execute (the most you execute, the most the network can be accurate for its purpose) \end{description}
|
||||
\item[{\em \doxyref{NetworkFileNotFoundException}{p.}{classneuralpp_1_1NetworkFileNotFoundException}}]\end{description}
|
||||
\end{Desc}
|
||||
|
||||
|
||||
|
@ -173,7 +172,7 @@ It updates the weights of the net's synapsis through back-propagation.
|
|||
|
||||
In-class use only \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!commitChanges@{commitChanges}}
|
||||
\index{commitChanges@{commitChanges}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[commitChanges]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::commitChanges ({\bf Layer} $\ast$ {\em l})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_62695a82dfb1df758a44150921aec8e0}
|
||||
\subsubsection[commitChanges]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::commitChanges ({\bf Layer} \& {\em l})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_f697a8d9967ad8f03e5a16a42cd110c5}
|
||||
|
||||
|
||||
It commits the changes made by \doxyref{updateWeights()}{p.}{classneuralpp_1_1NeuralNet_94169c89a7cd47122ab5dbf1d5c5e108} to the layer l.
|
||||
|
@ -185,10 +184,10 @@ In-class use only \begin{Desc}
|
|||
\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!error@{error}}
|
||||
\index{error@{error}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[error]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::error (double {\em ex}) const\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_0616c51404efaca2714e37dd7478997e}
|
||||
\subsubsection[error]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::error (double {\em ex})\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_8a140d28e6dd4097470c7c138801ad01}
|
||||
|
||||
|
||||
Get the error made on the expected result as $|$v-v'$|$/v.
|
||||
Get the error made on the expected result as squared deviance.
|
||||
|
||||
\begin{Desc}
|
||||
\item[Parameters:]
|
||||
|
@ -206,21 +205,45 @@ It gets the output of the network (note: the layer output should contain an only
|
|||
|
||||
\begin{Desc}
|
||||
\item[Returns:]The output value of the network \end{Desc}
|
||||
\begin{Desc}
|
||||
\item[Examples: ]\par
|
||||
{\bf examples/doAdd.cpp}.\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getThreshold@{getThreshold}}
|
||||
\index{getThreshold@{getThreshold}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[getThreshold]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::getThreshold () const}\label{classneuralpp_1_1NeuralNet_e08cdcf4b70f987700e553d9914f6179}
|
||||
|
||||
|
||||
Get the threshold of the neurons in the network.
|
||||
|
||||
\begin{Desc}
|
||||
\item[Returns:]The threshold of the neurons \end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getOutputs@{getOutputs}}
|
||||
\index{getOutputs@{getOutputs}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[getOutputs]{\setlength{\rightskip}{0pt plus 5cm}vector$<$double$>$ neuralpp::NeuralNet::getOutputs ()}\label{classneuralpp_1_1NeuralNet_a6b8bf3800b43b58843c65fc431207ae}
|
||||
\subsubsection[getOutputs]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::NeuralNet::getOutputs ()}\label{classneuralpp_1_1NeuralNet_e6d2215ecc8b560db2f6797db642191c}
|
||||
|
||||
|
||||
It gets the output of the network in case the output layer contains more neurons.
|
||||
|
||||
\begin{Desc}
|
||||
\item[Returns:]A vector containing the output values of the network \end{Desc}
|
||||
\begin{Desc}
|
||||
\item[Examples: ]\par
|
||||
{\bf examples/adderFromScratch.cpp}.\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!expected@{expected}}
|
||||
\index{expected@{expected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[expected]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::NeuralNet::expected () const}\label{classneuralpp_1_1NeuralNet_562dfe9fb8d73bf25a23ce608451d3aa}
|
||||
|
||||
|
||||
It gets the value expected.
|
||||
Get the expected value (in case you have an only neuron in output layer).
|
||||
|
||||
Of course you should specify this when you build your network by using setExpected. \begin{Desc}
|
||||
\item[Returns:]The expected output value for a certain training phase \end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!getExpected@{getExpected}}
|
||||
\index{getExpected@{getExpected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[getExpected]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ neuralpp::NeuralNet::getExpected () const}\label{classneuralpp_1_1NeuralNet_51a1851ed07b85bec091c9053ae99cf7}
|
||||
|
||||
|
||||
Get the expected value (in case you have an only neuron in output layer).
|
||||
|
||||
Of course you should specify this when you build your network by using setExpected. \begin{Desc}
|
||||
\item[Returns:]The expected output value for a certain training phase \end{Desc}
|
||||
|
@ -229,13 +252,25 @@ Of course you should specify this when you build your network by using setExpect
|
|||
\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (double {\em ex})}\label{classneuralpp_1_1NeuralNet_b6475762b7e9eab086befdc511f7c236}
|
||||
|
||||
|
||||
It sets the value you expect from your network.
|
||||
It sets the value you expect from your network (in case the network has an only neuron in its output layer).
|
||||
|
||||
\begin{Desc}
|
||||
\item[Parameters:]
|
||||
\begin{description}
|
||||
\item[{\em ex}]Expected output value \end{description}
|
||||
\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setExpected@{setExpected}}
|
||||
\index{setExpected@{setExpected}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[setExpected]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setExpected (std::vector$<$ double $>$ {\em ex})}\label{classneuralpp_1_1NeuralNet_e649edc3d86bec7c0e178d5c892b4fd7}
|
||||
|
||||
|
||||
Set the values you expect from your network.
|
||||
|
||||
\begin{Desc}
|
||||
\item[Parameters:]
|
||||
\begin{description}
|
||||
\item[{\em ex}]Expected output values \end{description}
|
||||
\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!update@{update}}
|
||||
\index{update@{update}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[update]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::update ()}\label{classneuralpp_1_1NeuralNet_b0bd1daadb06980dff1f50d33a7c098e}
|
||||
|
@ -250,9 +285,12 @@ It updates through back-propagation the weights of the synapsis and computes aga
|
|||
|
||||
It propagates values through the network.
|
||||
|
||||
Use this when you want to give an already trained network some new values the get to the output \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setInput@{setInput}}
|
||||
Use this when you want to give an already trained network some new values the get to the output \begin{Desc}
|
||||
\item[Examples: ]\par
|
||||
{\bf examples/adderFromScratch.cpp}, and {\bf examples/doAdd.cpp}.\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!setInput@{setInput}}
|
||||
\index{setInput@{setInput}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[setInput]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setInput (vector$<$ double $>$ \& {\em v})}\label{classneuralpp_1_1NeuralNet_0de170e8ab561ad63d0739b4c4b74f68}
|
||||
\subsubsection[setInput]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::setInput (std::vector$<$ double $>$ {\em v})}\label{classneuralpp_1_1NeuralNet_405b32d2928344314ecf0469070b0f17}
|
||||
|
||||
|
||||
It sets the input for the network.
|
||||
|
@ -262,6 +300,9 @@ It sets the input for the network.
|
|||
\begin{description}
|
||||
\item[{\em v}]Vector of doubles, containing the values to give to your network \end{description}
|
||||
\end{Desc}
|
||||
\begin{Desc}
|
||||
\item[Examples: ]\par
|
||||
{\bf examples/adderFromScratch.cpp}, and {\bf examples/doAdd.cpp}.\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!link@{link}}
|
||||
\index{link@{link}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[link]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::link ()}\label{classneuralpp_1_1NeuralNet_46f23f462318a4ffc037a4e806364c3f}
|
||||
|
@ -271,7 +312,7 @@ It links the layers of the network (input, hidden, output).
|
|||
|
||||
Don't use unless you exactly know what you're doing, it is already called by the constructor \index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!save@{save}}
|
||||
\index{save@{save}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[save]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::save (const char $\ast$ {\em fname}) throw (NetworkFileWriteException)}\label{classneuralpp_1_1NeuralNet_fdf94c276720c25e565cac834fe8a407}
|
||||
\subsubsection[save]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::save (const char $\ast$ {\em fname}) throw ({\bf NetworkFileWriteException})}\label{classneuralpp_1_1NeuralNet_fdf94c276720c25e565cac834fe8a407}
|
||||
|
||||
|
||||
Save a trained neural network to a binary file.
|
||||
|
@ -284,11 +325,14 @@ Save a trained neural network to a binary file.
|
|||
\begin{Desc}
|
||||
\item[Exceptions:]
|
||||
\begin{description}
|
||||
\item[{\em NetworkFileWriteException}]When you get an error writing the network's information to a file \end{description}
|
||||
\item[{\em \doxyref{NetworkFileWriteException}{p.}{classneuralpp_1_1NetworkFileWriteException}}]When you get an error writing the network's information to a file \end{description}
|
||||
\end{Desc}
|
||||
\begin{Desc}
|
||||
\item[Examples: ]\par
|
||||
{\bf examples/learnAdd.cpp}.\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!train@{train}}
|
||||
\index{train@{train}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[train]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::train (string {\em xml}, \/ {\bf source} {\em src}) throw (InvalidXMLException)}\label{classneuralpp_1_1NeuralNet_ead4bdef0602a5cadbe3beb685e01f5f}
|
||||
\subsubsection[train]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::NeuralNet::train (std::string {\em xml}, \/ {\bf source} {\em src}) throw ({\bf InvalidXMLException})}\label{classneuralpp_1_1NeuralNet_1c9e17437d41a7048611e21a3cc1c7dd}
|
||||
|
||||
|
||||
Train a network using a training set loaded from an XML file.
|
||||
|
@ -301,11 +345,14 @@ A sample XML file is available in examples/adder.xml \begin{Desc}
|
|||
\begin{Desc}
|
||||
\item[Exceptions:]
|
||||
\begin{description}
|
||||
\item[{\em InvalidXMLException}]\end{description}
|
||||
\item[{\em \doxyref{InvalidXMLException}{p.}{classneuralpp_1_1InvalidXMLException}}]\end{description}
|
||||
\end{Desc}
|
||||
\begin{Desc}
|
||||
\item[Examples: ]\par
|
||||
{\bf examples/adderFromScratch.cpp}, and {\bf examples/learnAdd.cpp}.\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!initXML@{initXML}}
|
||||
\index{initXML@{initXML}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[initXML]{\setlength{\rightskip}{0pt plus 5cm}static void neuralpp::NeuralNet::initXML (string \& {\em xml})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_45c7645d4affe65752d37cd230afba24}
|
||||
\subsubsection[initXML]{\setlength{\rightskip}{0pt plus 5cm}static void neuralpp::NeuralNet::initXML (std::string \& {\em xml})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_96da6712a72051cf34ad961761ef6e08}
|
||||
|
||||
|
||||
Initialize the training XML for the neural network.
|
||||
|
@ -317,7 +364,7 @@ Initialize the training XML for the neural network.
|
|||
\end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!split@{split}}
|
||||
\index{split@{split}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[split]{\setlength{\rightskip}{0pt plus 5cm}static vector$<$double$>$ neuralpp::NeuralNet::split (char {\em delim}, \/ string {\em str})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_e07af23ceb8666518da0c035bf1e0376}
|
||||
\subsubsection[split]{\setlength{\rightskip}{0pt plus 5cm}static std::vector$<$double$>$ neuralpp::NeuralNet::split (char {\em delim}, \/ std::string {\em str})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_83c6555520856d5867752904349ab6ca}
|
||||
|
||||
|
||||
Splits a string into a vector of doubles, given a delimitator.
|
||||
|
@ -331,7 +378,7 @@ Splits a string into a vector of doubles, given a delimitator.
|
|||
\item[Returns:]Vector of doubles containing splitted values \end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!XMLFromSet@{XMLFromSet}}
|
||||
\index{XMLFromSet@{XMLFromSet}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[XMLFromSet]{\setlength{\rightskip}{0pt plus 5cm}static string neuralpp::NeuralNet::XMLFromSet (int {\em id}, \/ string {\em set})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_4be31ecb0b543a192997bd83c6995ccb}
|
||||
\subsubsection[XMLFromSet]{\setlength{\rightskip}{0pt plus 5cm}static std::string neuralpp::NeuralNet::XMLFromSet (int \& {\em id}, \/ std::string {\em set})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_0a2733037af912b3e6a10146e7b7172f}
|
||||
|
||||
|
||||
Get a training set from a string and copies it to an XML For example, these strings could be training sets for making sums: \char`\"{}2,3;5\char`\"{} - \char`\"{}5,6;11\char`\"{} - \char`\"{}2,2;4\char`\"{} - \char`\"{}4,5:9\char`\"{} This method called on the first string will return an XML such this: '$<$training id=\char`\"{}0\char`\"{}$>$$<$input id=\char`\"{}0\char`\"{}$>$2$<$/input$>$$<$input id=\char`\"{}1\char`\"{}$>$3$<$/input$>$$<$output id=\char`\"{}0\char`\"{}$>$5$<$/output$>$ \</training$>$'.
|
||||
|
@ -345,7 +392,7 @@ Get a training set from a string and copies it to an XML For example, these stri
|
|||
\item[Returns:]XML string \end{Desc}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!closeXML@{closeXML}}
|
||||
\index{closeXML@{closeXML}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[closeXML]{\setlength{\rightskip}{0pt plus 5cm}static void neuralpp::NeuralNet::closeXML (string \& {\em xml})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_28b9966c5f197b8e86d57dd104aa32a6}
|
||||
\subsubsection[closeXML]{\setlength{\rightskip}{0pt plus 5cm}static void neuralpp::NeuralNet::closeXML (std::string \& {\em xml})\hspace{0.3cm}{\tt [static]}}\label{classneuralpp_1_1NeuralNet_e17732ed578bc4bd6032bfae58a5cf51}
|
||||
|
||||
|
||||
Closes an open XML document generated by \char`\"{}initXML\char`\"{} and \char`\"{}XMLFromSet\char`\"{}.
|
||||
|
@ -373,9 +420,14 @@ Closes an open XML document generated by \char`\"{}initXML\char`\"{} and \char`\
|
|||
\subsubsection[l\_\-rate]{\setlength{\rightskip}{0pt plus 5cm}double {\bf neuralpp::NeuralNet::l\_\-rate}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_6bd7be443e46b2fdbf1da2edb8e611ab}
|
||||
|
||||
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!ex@{ex}}
|
||||
\index{ex@{ex}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[ex]{\setlength{\rightskip}{0pt plus 5cm}double {\bf neuralpp::NeuralNet::ex}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_261f5f68fcc5be54250cfa03945266dd}
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!threshold@{threshold}}
|
||||
\index{threshold@{threshold}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[threshold]{\setlength{\rightskip}{0pt plus 5cm}double {\bf neuralpp::NeuralNet::threshold}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_327dbfdd72b0a74293f8f29630525aa3}
|
||||
|
||||
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!expect@{expect}}
|
||||
\index{expect@{expect}!neuralpp::NeuralNet@{neuralpp::NeuralNet}}
|
||||
\subsubsection[expect]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$double$>$ {\bf neuralpp::NeuralNet::expect}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1NeuralNet_a9e4ff43427f56663739c4c7450de8ee}
|
||||
|
||||
|
||||
\index{neuralpp::NeuralNet@{neuralpp::NeuralNet}!actv\_\-f@{actv\_\-f}}
|
||||
|
|
|
@ -8,17 +8,17 @@ Class for managing neurons.
|
|||
\subsection*{Public Member Functions}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
{\bf Neuron} (double($\ast$a)(double))
|
||||
{\bf Neuron} (double($\ast$a)(double), double th=0.0)
|
||||
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
|
||||
{\bf Neuron} (vector$<$ {\bf Synapsis} $>$ {\bf in}, vector$<$ {\bf Synapsis} $>$ {\bf out}, double($\ast$a)(double))
|
||||
{\bf Neuron} (std::vector$<$ {\bf Synapsis} $>$ {\bf in}, std::vector$<$ {\bf Synapsis} $>$ {\bf out}, double($\ast$a)(double), double th=0.0)
|
||||
\begin{CompactList}\small\item\em Alternative constructor, that gets also the synapsis linked to the neuron. \item\end{CompactList}\item
|
||||
{\bf Synapsis} \& {\bf synIn} (size\_\-t i)
|
||||
\begin{CompactList}\small\item\em Get the i-th synapsis connected on the input of the neuron. \item\end{CompactList}\item
|
||||
{\bf Synapsis} \& {\bf synOut} (size\_\-t i)
|
||||
\begin{CompactList}\small\item\em Get the i-th synapsis connected on the output of the neuron. \item\end{CompactList}\item
|
||||
void {\bf push\_\-in} ({\bf Synapsis} \&s)
|
||||
void {\bf push\_\-in} ({\bf Synapsis} s)
|
||||
\begin{CompactList}\small\item\em It pushes a new input synapsis. \item\end{CompactList}\item
|
||||
void {\bf push\_\-out} ({\bf Synapsis} \&s)
|
||||
void {\bf push\_\-out} ({\bf Synapsis} s)
|
||||
\begin{CompactList}\small\item\em It pushes a new output synapsis. \item\end{CompactList}\item
|
||||
void {\bf setActv} (double a)
|
||||
\begin{CompactList}\small\item\em Change the activation value of the neuron. \item\end{CompactList}\item
|
||||
|
@ -28,8 +28,8 @@ double {\bf getActv} ()
|
|||
\begin{CompactList}\small\item\em Get the activation value of the neuron. \item\end{CompactList}\item
|
||||
double {\bf getProp} ()
|
||||
\begin{CompactList}\small\item\em Get the propagation value of the neuron. \item\end{CompactList}\item
|
||||
double {\bf propagate} ()
|
||||
\begin{CompactList}\small\item\em It propagates its activation value to the connected neurons. \item\end{CompactList}\item
|
||||
void {\bf propagate} ()
|
||||
\begin{CompactList}\small\item\em Compute the propagation value of the neuron and set it. \item\end{CompactList}\item
|
||||
size\_\-t {\bf nIn} ()
|
||||
\begin{CompactList}\small\item\em Get the number of input synapsis for the neuron. \item\end{CompactList}\item
|
||||
size\_\-t {\bf nOut} ()
|
||||
|
@ -43,9 +43,11 @@ double {\bf actv\_\-val}
|
|||
\item
|
||||
double {\bf prop\_\-val}
|
||||
\item
|
||||
vector$<$ {\bf Synapsis} $>$ {\bf in}
|
||||
double {\bf threshold}
|
||||
\item
|
||||
vector$<$ {\bf Synapsis} $>$ {\bf out}
|
||||
std::vector$<$ {\bf Synapsis} $>$ {\bf in}
|
||||
\item
|
||||
std::vector$<$ {\bf Synapsis} $>$ {\bf out}
|
||||
\item
|
||||
double($\ast$ {\bf actv\_\-f} )(double)
|
||||
\end{CompactItemize}
|
||||
|
@ -59,7 +61,7 @@ Don't use this class directly unless you know what you're doing, use \doxyref{Ne
|
|||
\subsection{Constructor \& Destructor Documentation}
|
||||
\index{neuralpp::Neuron@{neuralpp::Neuron}!Neuron@{Neuron}}
|
||||
\index{Neuron@{Neuron}!neuralpp::Neuron@{neuralpp::Neuron}}
|
||||
\subsubsection[Neuron]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Neuron::Neuron (double($\ast$)(double) {\em a})}\label{classneuralpp_1_1Neuron_9863a08b73bc97c8b514aca6c580ff7b}
|
||||
\subsubsection[Neuron]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Neuron::Neuron (double($\ast$)(double) {\em a}, \/ double {\em th} = {\tt 0.0})}\label{classneuralpp_1_1Neuron_c9bd4f5f618fcf1adcebf1ab63ee0960}
|
||||
|
||||
|
||||
Constructor.
|
||||
|
@ -67,11 +69,11 @@ Constructor.
|
|||
\begin{Desc}
|
||||
\item[Parameters:]
|
||||
\begin{description}
|
||||
\item[{\em a}]Activation function \end{description}
|
||||
\item[{\em a}]Activation function \item[{\em th}]Threshold, value in [0,1] that establishes how much a neuron must be 'sensitive' on variations of the input values \end{description}
|
||||
\end{Desc}
|
||||
\index{neuralpp::Neuron@{neuralpp::Neuron}!Neuron@{Neuron}}
|
||||
\index{Neuron@{Neuron}!neuralpp::Neuron@{neuralpp::Neuron}}
|
||||
\subsubsection[Neuron]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Neuron::Neuron (vector$<$ {\bf Synapsis} $>$ {\em in}, \/ vector$<$ {\bf Synapsis} $>$ {\em out}, \/ double($\ast$)(double) {\em a})}\label{classneuralpp_1_1Neuron_f1bf19ec93174f60b368ee4a91b03f46}
|
||||
\subsubsection[Neuron]{\setlength{\rightskip}{0pt plus 5cm}neuralpp::Neuron::Neuron (std::vector$<$ {\bf Synapsis} $>$ {\em in}, \/ std::vector$<$ {\bf Synapsis} $>$ {\em out}, \/ double($\ast$)(double) {\em a}, \/ double {\em th} = {\tt 0.0})}\label{classneuralpp_1_1Neuron_d15e96b0316d880953288cc6e2550bec}
|
||||
|
||||
|
||||
Alternative constructor, that gets also the synapsis linked to the neuron.
|
||||
|
@ -79,7 +81,7 @@ Alternative constructor, that gets also the synapsis linked to the neuron.
|
|||
\begin{Desc}
|
||||
\item[Parameters:]
|
||||
\begin{description}
|
||||
\item[{\em in}]Input synapses \item[{\em out}]Output synapses \item[{\em a}]Activation function \end{description}
|
||||
\item[{\em in}]Input synapses \item[{\em out}]Output synapses \item[{\em a}]Activation function \item[{\em th}]Threshold, value in [0,1] that establishes how much a neuron must be 'sensitive' on variations of the input values \end{description}
|
||||
\end{Desc}
|
||||
|
||||
|
||||
|
@ -114,7 +116,7 @@ Get the i-th synapsis connected on the output of the neuron.
|
|||
\item[Returns:]Reference to the i-th synapsis \end{Desc}
|
||||
\index{neuralpp::Neuron@{neuralpp::Neuron}!push\_\-in@{push\_\-in}}
|
||||
\index{push\_\-in@{push\_\-in}!neuralpp::Neuron@{neuralpp::Neuron}}
|
||||
\subsubsection[push\_\-in]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Neuron::push\_\-in ({\bf Synapsis} \& {\em s})}\label{classneuralpp_1_1Neuron_583ada6e1dd3f2e113415b4d89196e62}
|
||||
\subsubsection[push\_\-in]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Neuron::push\_\-in ({\bf Synapsis} {\em s})}\label{classneuralpp_1_1Neuron_4d252151c35839975838539d846d70be}
|
||||
|
||||
|
||||
It pushes a new input synapsis.
|
||||
|
@ -126,7 +128,7 @@ It pushes a new input synapsis.
|
|||
\end{Desc}
|
||||
\index{neuralpp::Neuron@{neuralpp::Neuron}!push\_\-out@{push\_\-out}}
|
||||
\index{push\_\-out@{push\_\-out}!neuralpp::Neuron@{neuralpp::Neuron}}
|
||||
\subsubsection[push\_\-out]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Neuron::push\_\-out ({\bf Synapsis} \& {\em s})}\label{classneuralpp_1_1Neuron_bca65db84f56f9d40694bfbcd25812cb}
|
||||
\subsubsection[push\_\-out]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Neuron::push\_\-out ({\bf Synapsis} {\em s})}\label{classneuralpp_1_1Neuron_2c0acb0e6d413c4e0fc9e7939da1a684}
|
||||
|
||||
|
||||
It pushes a new output synapsis.
|
||||
|
@ -180,10 +182,10 @@ Get the propagation value of the neuron.
|
|||
\item[Returns:]Propagation value for the neuron \end{Desc}
|
||||
\index{neuralpp::Neuron@{neuralpp::Neuron}!propagate@{propagate}}
|
||||
\index{propagate@{propagate}!neuralpp::Neuron@{neuralpp::Neuron}}
|
||||
\subsubsection[propagate]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Neuron::propagate ()}\label{classneuralpp_1_1Neuron_8b0ca61cd0e047c8691ab39aae56dbda}
|
||||
\subsubsection[propagate]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Neuron::propagate ()}\label{classneuralpp_1_1Neuron_928d9bf5aed600119c640779e4034f30}
|
||||
|
||||
|
||||
It propagates its activation value to the connected neurons.
|
||||
Compute the propagation value of the neuron and set it.
|
||||
|
||||
\index{neuralpp::Neuron@{neuralpp::Neuron}!nIn@{nIn}}
|
||||
\index{nIn@{nIn}!neuralpp::Neuron@{neuralpp::Neuron}}
|
||||
|
@ -223,14 +225,19 @@ Remove input and output synapsis from a neuron.
|
|||
\subsubsection[prop\_\-val]{\setlength{\rightskip}{0pt plus 5cm}double {\bf neuralpp::Neuron::prop\_\-val}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1Neuron_da75259de98b1a893c736666af6bfdc3}
|
||||
|
||||
|
||||
\index{neuralpp::Neuron@{neuralpp::Neuron}!threshold@{threshold}}
|
||||
\index{threshold@{threshold}!neuralpp::Neuron@{neuralpp::Neuron}}
|
||||
\subsubsection[threshold]{\setlength{\rightskip}{0pt plus 5cm}double {\bf neuralpp::Neuron::threshold}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1Neuron_7b16632868821d73dedcb8edaf6dbbef}
|
||||
|
||||
|
||||
\index{neuralpp::Neuron@{neuralpp::Neuron}!in@{in}}
|
||||
\index{in@{in}!neuralpp::Neuron@{neuralpp::Neuron}}
|
||||
\subsubsection[in]{\setlength{\rightskip}{0pt plus 5cm}vector$<$ {\bf Synapsis} $>$ {\bf neuralpp::Neuron::in}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1Neuron_ead827210fa18c2baae03927b2c798ff}
|
||||
\subsubsection[in]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$ {\bf Synapsis} $>$ {\bf neuralpp::Neuron::in}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1Neuron_bd07357465bb7b29bb344400e9e08710}
|
||||
|
||||
|
||||
\index{neuralpp::Neuron@{neuralpp::Neuron}!out@{out}}
|
||||
\index{out@{out}!neuralpp::Neuron@{neuralpp::Neuron}}
|
||||
\subsubsection[out]{\setlength{\rightskip}{0pt plus 5cm}vector$<$ {\bf Synapsis} $>$ {\bf neuralpp::Neuron::out}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1Neuron_82a4297f84d6403e52a8386d26117b4f}
|
||||
\subsubsection[out]{\setlength{\rightskip}{0pt plus 5cm}std::vector$<$ {\bf Synapsis} $>$ {\bf neuralpp::Neuron::out}\hspace{0.3cm}{\tt [private]}}\label{classneuralpp_1_1Neuron_982aceef324b7764e778d5949720bc55}
|
||||
|
||||
|
||||
\index{neuralpp::Neuron@{neuralpp::Neuron}!actv\_\-f@{actv\_\-f}}
|
||||
|
|
|
@ -14,21 +14,21 @@ Class for managing synapsis.
|
|||
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
|
||||
{\bf Synapsis} ({\bf Neuron} $\ast$i, {\bf Neuron} $\ast$o, double w, double($\ast$a)(double))
|
||||
\begin{CompactList}\small\item\em Constructor. \item\end{CompactList}\item
|
||||
{\bf Neuron} $\ast$ {\bf getIn} ()
|
||||
{\bf Neuron} $\ast$ {\bf getIn} () const
|
||||
\item
|
||||
{\bf Neuron} $\ast$ {\bf getOut} ()
|
||||
{\bf Neuron} $\ast$ {\bf getOut} () const
|
||||
\item
|
||||
void {\bf setWeight} (double w) throw (InvalidSynapticalWeightException)
|
||||
\begin{CompactList}\small\item\em Set the weight of the synapsis. \item\end{CompactList}\item
|
||||
void {\bf setDelta} (double d) throw (InvalidSynapticalWeightException)
|
||||
\begin{CompactList}\small\item\em It sets the delta (how much to change the weight after an update) of the synapsis. \item\end{CompactList}\item
|
||||
double {\bf getWeight} ()
|
||||
double {\bf getWeight} () const
|
||||
\begin{CompactList}\small\item\em Return the weight of the synapsis. \item\end{CompactList}\item
|
||||
double {\bf getDelta} ()
|
||||
double {\bf getDelta} () const
|
||||
\begin{CompactList}\small\item\em Return the delta of the synapsis. \item\end{CompactList}\item
|
||||
double {\bf getPrevDelta} ()
|
||||
double {\bf getPrevDelta} () const
|
||||
\begin{CompactList}\small\item\em Get the delta of the synapsis at the previous iteration. \item\end{CompactList}\item
|
||||
double {\bf momentum} (int N, int x)
|
||||
double {\bf momentum} (int N, int x) const
|
||||
\begin{CompactList}\small\item\em Get the inertial momentum of a synapsis. \item\end{CompactList}\end{CompactItemize}
|
||||
\subsection*{Private Attributes}
|
||||
\begin{CompactItemize}
|
||||
|
@ -94,21 +94,21 @@ Constructor.
|
|||
\subsection{Member Function Documentation}
|
||||
\index{neuralpp::Synapsis@{neuralpp::Synapsis}!getIn@{getIn}}
|
||||
\index{getIn@{getIn}!neuralpp::Synapsis@{neuralpp::Synapsis}}
|
||||
\subsubsection[getIn]{\setlength{\rightskip}{0pt plus 5cm}{\bf Neuron}$\ast$ neuralpp::Synapsis::getIn ()}\label{classneuralpp_1_1Synapsis_5ba8a93a5741f4855390eb8a46e99435}
|
||||
\subsubsection[getIn]{\setlength{\rightskip}{0pt plus 5cm}{\bf Neuron}$\ast$ neuralpp::Synapsis::getIn () const}\label{classneuralpp_1_1Synapsis_298fd3c7483ad572899fecec01ac8fdf}
|
||||
|
||||
|
||||
\begin{Desc}
|
||||
\item[Returns:]Reference to input neuron of the synapsis \end{Desc}
|
||||
\index{neuralpp::Synapsis@{neuralpp::Synapsis}!getOut@{getOut}}
|
||||
\index{getOut@{getOut}!neuralpp::Synapsis@{neuralpp::Synapsis}}
|
||||
\subsubsection[getOut]{\setlength{\rightskip}{0pt plus 5cm}{\bf Neuron}$\ast$ neuralpp::Synapsis::getOut ()}\label{classneuralpp_1_1Synapsis_61c9a04e03291a01f44520cef143cbdd}
|
||||
\subsubsection[getOut]{\setlength{\rightskip}{0pt plus 5cm}{\bf Neuron}$\ast$ neuralpp::Synapsis::getOut () const}\label{classneuralpp_1_1Synapsis_b46d876761a73a24db87f5a144a0e899}
|
||||
|
||||
|
||||
\begin{Desc}
|
||||
\item[Returns:]Reference to output neuron of the synapsis \end{Desc}
|
||||
\index{neuralpp::Synapsis@{neuralpp::Synapsis}!setWeight@{setWeight}}
|
||||
\index{setWeight@{setWeight}!neuralpp::Synapsis@{neuralpp::Synapsis}}
|
||||
\subsubsection[setWeight]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Synapsis::setWeight (double {\em w}) throw (InvalidSynapticalWeightException)}\label{classneuralpp_1_1Synapsis_acee77d0fdf9889464ab5ed27beae0ff}
|
||||
\subsubsection[setWeight]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Synapsis::setWeight (double {\em w}) throw ({\bf InvalidSynapticalWeightException})}\label{classneuralpp_1_1Synapsis_acee77d0fdf9889464ab5ed27beae0ff}
|
||||
|
||||
|
||||
Set the weight of the synapsis.
|
||||
|
@ -120,7 +120,7 @@ Set the weight of the synapsis.
|
|||
\end{Desc}
|
||||
\index{neuralpp::Synapsis@{neuralpp::Synapsis}!setDelta@{setDelta}}
|
||||
\index{setDelta@{setDelta}!neuralpp::Synapsis@{neuralpp::Synapsis}}
|
||||
\subsubsection[setDelta]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Synapsis::setDelta (double {\em d}) throw (InvalidSynapticalWeightException)}\label{classneuralpp_1_1Synapsis_429ad5b25930faf436a9d725582802e1}
|
||||
\subsubsection[setDelta]{\setlength{\rightskip}{0pt plus 5cm}void neuralpp::Synapsis::setDelta (double {\em d}) throw ({\bf InvalidSynapticalWeightException})}\label{classneuralpp_1_1Synapsis_429ad5b25930faf436a9d725582802e1}
|
||||
|
||||
|
||||
It sets the delta (how much to change the weight after an update) of the synapsis.
|
||||
|
@ -132,7 +132,7 @@ It sets the delta (how much to change the weight after an update) of the synapsi
|
|||
\end{Desc}
|
||||
\index{neuralpp::Synapsis@{neuralpp::Synapsis}!getWeight@{getWeight}}
|
||||
\index{getWeight@{getWeight}!neuralpp::Synapsis@{neuralpp::Synapsis}}
|
||||
\subsubsection[getWeight]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Synapsis::getWeight ()}\label{classneuralpp_1_1Synapsis_aa79c16ec6b59949e5d2f75a3f10d530}
|
||||
\subsubsection[getWeight]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Synapsis::getWeight () const}\label{classneuralpp_1_1Synapsis_bcbf7228632ff4d6bbb67703323d2db0}
|
||||
|
||||
|
||||
Return the weight of the synapsis.
|
||||
|
@ -141,7 +141,7 @@ Return the weight of the synapsis.
|
|||
\item[Returns:]Weight of the synapsis \end{Desc}
|
||||
\index{neuralpp::Synapsis@{neuralpp::Synapsis}!getDelta@{getDelta}}
|
||||
\index{getDelta@{getDelta}!neuralpp::Synapsis@{neuralpp::Synapsis}}
|
||||
\subsubsection[getDelta]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Synapsis::getDelta ()}\label{classneuralpp_1_1Synapsis_18f15b920609be8b818d43a0227aada5}
|
||||
\subsubsection[getDelta]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Synapsis::getDelta () const}\label{classneuralpp_1_1Synapsis_00c8e9c0804662f2b3247d6dddb4ca6c}
|
||||
|
||||
|
||||
Return the delta of the synapsis.
|
||||
|
@ -150,7 +150,7 @@ Return the delta of the synapsis.
|
|||
\item[Returns:]Delta of the synapsis \end{Desc}
|
||||
\index{neuralpp::Synapsis@{neuralpp::Synapsis}!getPrevDelta@{getPrevDelta}}
|
||||
\index{getPrevDelta@{getPrevDelta}!neuralpp::Synapsis@{neuralpp::Synapsis}}
|
||||
\subsubsection[getPrevDelta]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Synapsis::getPrevDelta ()}\label{classneuralpp_1_1Synapsis_2fe3e9ec97542f1476d8b9306aa09756}
|
||||
\subsubsection[getPrevDelta]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Synapsis::getPrevDelta () const}\label{classneuralpp_1_1Synapsis_0148b9c8db870c928711168702ae51c5}
|
||||
|
||||
|
||||
Get the delta of the synapsis at the previous iteration.
|
||||
|
@ -159,7 +159,7 @@ Get the delta of the synapsis at the previous iteration.
|
|||
\item[Returns:]The previous delta \end{Desc}
|
||||
\index{neuralpp::Synapsis@{neuralpp::Synapsis}!momentum@{momentum}}
|
||||
\index{momentum@{momentum}!neuralpp::Synapsis@{neuralpp::Synapsis}}
|
||||
\subsubsection[momentum]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Synapsis::momentum (int {\em N}, \/ int {\em x})}\label{classneuralpp_1_1Synapsis_ecdb17182de791f7fdd417232e184350}
|
||||
\subsubsection[momentum]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::Synapsis::momentum (int {\em N}, \/ int {\em x}) const}\label{classneuralpp_1_1Synapsis_cff10a022d4c021688e4df944c05d8bd}
|
||||
|
||||
|
||||
Get the inertial momentum of a synapsis.
|
||||
|
|
|
@ -10,8 +10,8 @@
|
|||
{\fancyplain{}{\bfseries\rightmark}}
|
||||
\rhead[\fancyplain{}{\bfseries\leftmark}]
|
||||
{\fancyplain{}{\bfseries\thepage}}
|
||||
\rfoot[\fancyplain{}{\bfseries\scriptsize Generated on Sat Aug 15 02:56:02 2009 for Neural++ by Doxygen }]{}
|
||||
\lfoot[]{\fancyplain{}{\bfseries\scriptsize Generated on Sat Aug 15 02:56:02 2009 for Neural++ by Doxygen }}
|
||||
\rfoot[\fancyplain{}{\bfseries\scriptsize Generated on Sun Aug 16 20:53:42 2009 for Neural++ by Doxygen }]{}
|
||||
\lfoot[]{\fancyplain{}{\bfseries\scriptsize Generated on Sun Aug 16 20:53:42 2009 for Neural++ by Doxygen }}
|
||||
\cfoot{}
|
||||
\newenvironment{Code}
|
||||
{\footnotesize}
|
||||
|
|
9
doc/latex/examples.tex
Normal file
9
doc/latex/examples.tex
Normal file
|
@ -0,0 +1,9 @@
|
|||
\section{Examples}
|
||||
Here is a list of all examples:\begin{CompactItemize}
|
||||
\item
|
||||
{\bf examples/adderFromScratch.cpp}
|
||||
\item
|
||||
{\bf examples/doAdd.cpp}
|
||||
\item
|
||||
{\bf examples/learnAdd.cpp}
|
||||
\end{CompactItemize}
|
48
doc/latex/examples_2adderFromScratch_8cpp-example.tex
Normal file
48
doc/latex/examples_2adderFromScratch_8cpp-example.tex
Normal file
|
@ -0,0 +1,48 @@
|
|||
\section{examples/adderFromScratch.cpp}
|
||||
Similar to learnAdd.cpp, but this time the training XML is generated as a string and not saved to a file, and parsed by the program itself to build the network. Then, the program asks two real numbers, and performs both the sum and the difference between them, putting the sum's output on the first output neuron and the difference's on the second output neuron. Anyway, using more than one neuron in the output layer is strongly discouraged, as the network usually won't set correctly the synaptical weights to give satisfying and accurate answers for all of the operations.
|
||||
|
||||
|
||||
|
||||
\begin{DocInclude}\begin{verbatim}
|
||||
#include <iostream>
|
||||
#include <neural++.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace neuralpp;
|
||||
|
||||
int main() {
|
||||
NeuralNet net(2, 2, 2, 0.005, 100);
|
||||
string xml;
|
||||
double tmp;
|
||||
int id = 0;
|
||||
|
||||
// XML initialization. Then, I say XML that 2+3=5, 3+3=6, 5+4=9
|
||||
// Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm
|
||||
NeuralNet::initXML(xml);
|
||||
xml += NeuralNet::XMLFromSet(id, "3,2;5,1");
|
||||
xml += NeuralNet::XMLFromSet(id, "4,2;6,2");
|
||||
xml += NeuralNet::XMLFromSet(id, "6,3;9,3");
|
||||
NeuralNet::closeXML(xml);
|
||||
cout << xml << endl;
|
||||
|
||||
net.train(xml, NeuralNet::str);
|
||||
vector<double> v;
|
||||
cout << "Network status: trained\n\n";
|
||||
|
||||
cout << "First number to add: ";
|
||||
cin >> tmp;
|
||||
v.push_back(tmp);
|
||||
|
||||
cout << "Second number to add: ";
|
||||
cin >> tmp;
|
||||
v.push_back(tmp);
|
||||
|
||||
net.setInput(v);
|
||||
net.propagate();
|
||||
cout << "Output: " << net.getOutputs()[0] << "; " << net.getOutputs()[1] << endl;
|
||||
return 0;
|
||||
}
|
||||
|
||||
\end{verbatim}
|
||||
\end{DocInclude}
|
||||
|
50
doc/latex/examples_2doAdd_8cpp-example.tex
Normal file
50
doc/latex/examples_2doAdd_8cpp-example.tex
Normal file
|
@ -0,0 +1,50 @@
|
|||
\section{examples/doAdd.cpp}
|
||||
Show how to use a network already trained and saved to a binary file. In this case, a network trained to simply perform sums between two real numbers, that should have already been created using learnAdd.
|
||||
|
||||
|
||||
|
||||
\begin{DocInclude}\begin{verbatim}
|
||||
#include <iostream>
|
||||
#include <neural++.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace neuralpp;
|
||||
|
||||
#define NETFILE "adder.net"
|
||||
|
||||
int main() {
|
||||
double a,b;
|
||||
NeuralNet net;
|
||||
|
||||
// Load the pre-trained network from "adder.net" file
|
||||
try {
|
||||
net = NeuralNet(NETFILE);
|
||||
}
|
||||
|
||||
catch (NetworkFileNotFoundException e) {
|
||||
cerr << "Fatal error while opening " << NETFILE << ": " << e.what();
|
||||
return 1;
|
||||
}
|
||||
|
||||
cout << "First number to add: ";
|
||||
cin >> a;
|
||||
|
||||
cout << "Second number to add: ";
|
||||
cin >> b;
|
||||
|
||||
vector<double> v;
|
||||
v.push_back(a);
|
||||
v.push_back(b);
|
||||
|
||||
// Set the numbers just read as input values, propagate those values, and get
|
||||
// the output
|
||||
net.setInput(v);
|
||||
net.propagate();
|
||||
cout << "Neural net output: " << net.getOutput() << endl;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
\end{verbatim}
|
||||
\end{DocInclude}
|
||||
|
67
doc/latex/examples_2learnAdd_8cpp-example.tex
Normal file
67
doc/latex/examples_2learnAdd_8cpp-example.tex
Normal file
|
@ -0,0 +1,67 @@
|
|||
\section{examples/learnAdd.cpp}
|
||||
Show how to train a network that performs sums between two real numbers. The training XML is built from scratch, then saved to a file, then the network is initialized using that XML file, trained, and the resulting trained network is saved to adder.net. Then, you should take a look at doAdd.cpp to see how to use that file to use the network.
|
||||
|
||||
|
||||
|
||||
\begin{DocInclude}\begin{verbatim}
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <ctime>
|
||||
#include <neural++.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace neuralpp;
|
||||
|
||||
int main() {
|
||||
int id = 0;
|
||||
string xml;
|
||||
time_t t1, t2;
|
||||
|
||||
// Create the neural network. The network is going to have
|
||||
// => 2 neurons for the input layer
|
||||
// => 2 neurons for the hidden layer
|
||||
// => 1 neuron for the output layer
|
||||
// => a learning rate == 0.005 (just get it doing some tests until satisfied)
|
||||
// => 1000 learning steps (i.e. the network will be ready after 1000 training steps to adjust the synaptical weights
|
||||
// => 0.1 as neural threshold (the threshold above which a neuron activates)
|
||||
NeuralNet net(2, 2, 1, 0.005, 1000, 0.1);
|
||||
|
||||
// Initialize a training XML as a string in 'xml'
|
||||
NeuralNet::initXML(xml);
|
||||
|
||||
// Build some training sets for the XML. The format is:
|
||||
// "input1,input2,...,inputn;output1,output2,...,outputn
|
||||
// The 'id' variable is passed as reference, starting from 0,
|
||||
// and it's used to enumerate the sets in the XML file.
|
||||
xml += NeuralNet::XMLFromSet(id, "2,3;5");
|
||||
xml += NeuralNet::XMLFromSet(id, "3,2;5");
|
||||
xml += NeuralNet::XMLFromSet(id, "6,2;8");
|
||||
xml += NeuralNet::XMLFromSet(id, "2,2;4");
|
||||
xml += NeuralNet::XMLFromSet(id, "1,2;3");
|
||||
xml += NeuralNet::XMLFromSet(id, "-1,-2;-3");
|
||||
xml += NeuralNet::XMLFromSet(id, "8,9;17");
|
||||
xml += NeuralNet::XMLFromSet(id, "10,10;20");
|
||||
NeuralNet::closeXML(xml);
|
||||
|
||||
// Save the XML string just created to a file
|
||||
ofstream out("adder.xml");
|
||||
out << xml;
|
||||
out.close();
|
||||
cout << "Training file adder.xml has been written\n";
|
||||
|
||||
// Start the training from the XML file
|
||||
t1 = time(NULL);
|
||||
cout << "Training in progress - This may take a while...\n";
|
||||
net.train("adder.xml", NeuralNet::file);
|
||||
t2 = time(NULL);
|
||||
|
||||
// Save the trained network to a binary file, that can be reloaded from any
|
||||
// application that is going to use that network
|
||||
net.save("adder.net");
|
||||
cout << "Network trained in " << (t2-t1) << " seconds. You can use adder.net file now to load this network\n";
|
||||
return 0;
|
||||
}
|
||||
|
||||
\end{verbatim}
|
||||
\end{DocInclude}
|
||||
|
|
@ -19,11 +19,23 @@ struct {\bf netrecord}
|
|||
struct {\bf neuronrecord}
|
||||
\item
|
||||
struct {\bf synrecord}
|
||||
\end{CompactItemize}
|
||||
\item
|
||||
class {\bf NetworkFileNotFoundException}
|
||||
\begin{CompactList}\small\item\em Exception thrown when doing an attempt to load a network from an invalid file. \item\end{CompactList}\item
|
||||
class {\bf NetworkFileWriteException}
|
||||
\begin{CompactList}\small\item\em Exception thrown when trying to write the network's information to a file that cannot be written. \item\end{CompactList}\item
|
||||
class {\bf InvalidXMLException}
|
||||
\begin{CompactList}\small\item\em Exception thrown when trying parsing an invalid XML. \item\end{CompactList}\item
|
||||
class {\bf NetworkIndexOutOfBoundsException}
|
||||
\begin{CompactList}\small\item\em Exception raised when trying to access a neuron whose index is larger than the number of neurons in the network. \item\end{CompactList}\item
|
||||
class {\bf InvalidSynapticalWeightException}
|
||||
\begin{CompactList}\small\item\em Exception raised when, while trying the network or directly, the weight of a synapsis is set to a value $|$w$|$ $>$ 1. \item\end{CompactList}\end{CompactItemize}
|
||||
\subsection*{Functions}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
double {\bf df} (double($\ast$f)(double), double x)
|
||||
\item
|
||||
double {\bf \_\-\_\-actv} (double prop)
|
||||
\end{CompactItemize}
|
||||
|
||||
|
||||
|
@ -31,6 +43,11 @@ double {\bf df} (double($\ast$f)(double), double x)
|
|||
Main namespace for the library.
|
||||
|
||||
\subsection{Function Documentation}
|
||||
\index{neuralpp@{neuralpp}!\_\-\_\-actv@{\_\-\_\-actv}}
|
||||
\index{\_\-\_\-actv@{\_\-\_\-actv}!neuralpp@{neuralpp}}
|
||||
\subsubsection[\_\-\_\-actv]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::\_\-\_\-actv (double {\em prop})}\label{namespaceneuralpp_7df3a0de999c7a635f2289749ed4f194}
|
||||
|
||||
|
||||
\index{neuralpp@{neuralpp}!df@{df}}
|
||||
\index{df@{df}!neuralpp@{neuralpp}}
|
||||
\subsubsection[df]{\setlength{\rightskip}{0pt plus 5cm}double neuralpp::df (double($\ast$)(double) {\em f}, \/ double {\em x})}\label{namespaceneuralpp_43c8197cc83f65fa9676386579671aec}
|
||||
|
|
|
@ -31,19 +31,21 @@ struct {\bf neuralpp::synrecord}
|
|||
\item
|
||||
\#define {\bf RAND}~(double) ( (rand() / (RAND\_\-MAX/2)) - 1)
|
||||
\begin{CompactList}\small\item\em Default rand value: $|$sin(rand)$|$, always $>$= 0 and $<$= 1. \item\end{CompactList}\item
|
||||
\#define {\bf BETA0}~0.8
|
||||
\#define {\bf BETA0}~1.0
|
||||
\begin{CompactList}\small\item\em Initial value for the inertial momentum of the synapses. \item\end{CompactList}\end{CompactItemize}
|
||||
\subsection*{Functions}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
double {\bf neuralpp::df} (double($\ast$f)(double), double x)
|
||||
\item
|
||||
double {\bf neuralpp::\_\-\_\-actv} (double prop)
|
||||
\end{CompactItemize}
|
||||
|
||||
|
||||
\subsection{Define Documentation}
|
||||
\index{neural++.hpp@{neural++.hpp}!BETA0@{BETA0}}
|
||||
\index{BETA0@{BETA0}!neural++.hpp@{neural++.hpp}}
|
||||
\subsubsection[BETA0]{\setlength{\rightskip}{0pt plus 5cm}\#define BETA0~0.8}\label{neural_09_09_8hpp_05e2bb5b9fc32f0b6b4d84fe43177d72}
|
||||
\subsubsection[BETA0]{\setlength{\rightskip}{0pt plus 5cm}\#define BETA0~1.0}\label{neural_09_09_8hpp_05e2bb5b9fc32f0b6b4d84fe43177d72}
|
||||
|
||||
|
||||
Initial value for the inertial momentum of the synapses.
|
||||
|
|
|
@ -1,2 +1,21 @@
|
|||
\section{neural++\_\-exception.hpp File Reference}
|
||||
\label{neural_09_09__exception_8hpp}\index{neural++\_\-exception.hpp@{neural++\_\-exception.hpp}}
|
||||
{\tt \#include $<$exception$>$}\par
|
||||
\subsection*{Namespaces}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
namespace {\bf neuralpp}
|
||||
\end{CompactItemize}
|
||||
\subsection*{Classes}
|
||||
\begin{CompactItemize}
|
||||
\item
|
||||
class {\bf neuralpp::NetworkFileNotFoundException}
|
||||
\begin{CompactList}\small\item\em Exception thrown when doing an attempt to load a network from an invalid file. \item\end{CompactList}\item
|
||||
class {\bf neuralpp::NetworkFileWriteException}
|
||||
\begin{CompactList}\small\item\em Exception thrown when trying to write the network's information to a file that cannot be written. \item\end{CompactList}\item
|
||||
class {\bf neuralpp::InvalidXMLException}
|
||||
\begin{CompactList}\small\item\em Exception thrown when trying parsing an invalid XML. \item\end{CompactList}\item
|
||||
class {\bf neuralpp::NetworkIndexOutOfBoundsException}
|
||||
\begin{CompactList}\small\item\em Exception raised when trying to access a neuron whose index is larger than the number of neurons in the network. \item\end{CompactList}\item
|
||||
class {\bf neuralpp::InvalidSynapticalWeightException}
|
||||
\begin{CompactList}\small\item\em Exception raised when, while trying the network or directly, the weight of a synapsis is set to a value $|$w$|$ $>$ 1. \item\end{CompactList}\end{CompactItemize}
|
||||
|
|
|
@ -16,11 +16,11 @@
|
|||
\begin{titlepage}
|
||||
\vspace*{7cm}
|
||||
\begin{center}
|
||||
{\Large Neural++ \\[1ex]\large 0.3 }\\
|
||||
{\Large Neural++ \\[1ex]\large 0.4 }\\
|
||||
\vspace*{1cm}
|
||||
{\large Generated by Doxygen 1.5.6}\\
|
||||
\vspace*{0.5cm}
|
||||
{\small Sat Aug 15 02:56:02 2009}\\
|
||||
{\small Sun Aug 16 20:53:42 2009}\\
|
||||
\end{center}
|
||||
\end{titlepage}
|
||||
\clearemptydoublepage
|
||||
|
@ -48,9 +48,14 @@
|
|||
\include{structCMarkup_1_1SavedPosMap}
|
||||
\include{structCMarkup_1_1SavedPosMapArray}
|
||||
\include{structCMarkup_1_1TokenPos}
|
||||
\include{classneuralpp_1_1InvalidSynapticalWeightException}
|
||||
\include{classneuralpp_1_1InvalidXMLException}
|
||||
\include{classneuralpp_1_1Layer}
|
||||
\include{structMCD__CSTR}
|
||||
\include{structneuralpp_1_1netrecord}
|
||||
\include{classneuralpp_1_1NetworkFileNotFoundException}
|
||||
\include{classneuralpp_1_1NetworkFileWriteException}
|
||||
\include{classneuralpp_1_1NetworkIndexOutOfBoundsException}
|
||||
\include{classneuralpp_1_1NeuralNet}
|
||||
\include{classneuralpp_1_1Neuron}
|
||||
\include{structneuralpp_1_1neuronrecord}
|
||||
|
@ -60,5 +65,9 @@
|
|||
\input{Markup_8h}
|
||||
\include{neural_09_09_8hpp}
|
||||
\include{neural_09_09__exception_8hpp}
|
||||
\chapter{Example Documentation}
|
||||
\input{examples_2adderFromScratch_8cpp-example}
|
||||
\include{examples_2doAdd_8cpp-example}
|
||||
\include{examples_2learnAdd_8cpp-example}
|
||||
\printindex
|
||||
\end{document}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue