diff --git a/include/neural++.hpp b/include/neural++.hpp
index 5889758..8da7070 100644
--- a/include/neural++.hpp
+++ b/include/neural++.hpp
@@ -26,10 +26,10 @@
using namespace std;
//! Default rand value: |sin(rand)|, always >= 0 and <= 1
-#define RAND ( abs( sin(rand()) ) )
+#define RAND (double) ( (rand() / (RAND_MAX/2)) - 1)
//! Initial value for the inertial momentum of the synapses
-#define BETA0 0.7
+#define BETA0 0.8
/**
* @namespace neuralpp
@@ -40,8 +40,6 @@ namespace neuralpp {
class Neuron;
class Layer;
class NeuralNet;
- class NetworkFileNotFoundException;
- class InvalidXMLException;
/**
* @class NeuralNet
@@ -67,11 +65,11 @@ namespace neuralpp {
void commitChanges (Layer *l);
/**
- * @brief It get the error made on the expected result as |v-v'|/v
+ * @brief Get the error made on the expected result as |v-v'|/v
* @param ex Expected value
* @return Mean error
*/
- double error(double ex);
+ double error(double ex) const;
/**
* @brief Private pointer to function, containing the function to
@@ -117,7 +115,7 @@ namespace neuralpp {
* @param file Binary file containing a neural network previously saved by save() method
* @throw NetworkFileNotFoundException
*/
- NeuralNet (const char* file) throw(NetworkFileNotFoundException);
+ NeuralNet (const string file) throw(NetworkFileNotFoundException);
/**
@@ -140,7 +138,7 @@ namespace neuralpp {
* an only neuron)
* @return The output value of the network
*/
- double getOutput();
+ double getOutput() const;
/**
* @brief It gets the output of the network in case the output layer contains more neurons
@@ -153,7 +151,7 @@ namespace neuralpp {
* build your network by using setExpected.
* @return The expected output value for a certain training phase
*/
- double expected();
+ double expected() const;
/**
* @brief It sets the value you expect from your network
@@ -189,8 +187,10 @@ namespace neuralpp {
/**
* @brief Save a trained neural network to a binary file
* @param fname Binary file where you're going to save your network
+ * @throws NetworkFileWriteException When you get an error writing the network's information to
+ * a file
*/
- bool save(const char* fname);
+ void save(const char* fname) throw(NetworkFileWriteException);
/**
* @brief Train a network using a training set loaded from an XML file. A sample XML file
@@ -296,14 +296,14 @@ namespace neuralpp {
* @brief Set the weight of the synapsis
* @param w Weight to be set
*/
- void setWeight(double w);
+ void setWeight(double w) throw(InvalidSynapticalWeightException);
/**
* @brief It sets the delta (how much to change the weight after an update)
* of the synapsis
* @param d Delta to be set
*/
- void setDelta(double d);
+ void setDelta(double d) throw(InvalidSynapticalWeightException);
/**
* @brief Return the weight of the synapsis
@@ -476,7 +476,7 @@ namespace neuralpp {
* @param i Index of the neuron to get in the layer
* @return Reference to the i-th neuron
*/
- Neuron& operator[] (size_t i);
+ Neuron& operator[] (size_t i) throw(NetworkIndexOutOfBoundsException);
/**
* @brief It links a layer to another
@@ -504,7 +504,7 @@ namespace neuralpp {
/**
* @return Number of neurons in the layer
*/
- size_t size();
+ size_t size() const;
};
struct netrecord {
diff --git a/include/neural++_exception.hpp b/include/neural++_exception.hpp
index fee799a..2bf1b6a 100644
--- a/include/neural++_exception.hpp
+++ b/include/neural++_exception.hpp
@@ -31,6 +31,17 @@ namespace neuralpp {
const char* what() const throw() { return "Attempt to load a neural network from an invalid network file"; }
};
+ /**
+ * @class NetworkFileWriteException
+ * @brief Exception thrown when trying to write the network's information to a file that cannot
+ * be written
+ */
+ class NetworkFileWriteException : public std::exception {
+ public:
+ NetworkFileWriteException() {}
+ const char* what() const throw() { return "There was an error while writing the network file"; }
+ };
+
/**
* @class InvalidXMLException
* @brief Exception thrown when trying parsing an invalid XML
@@ -40,6 +51,28 @@ namespace neuralpp {
InvalidXMLException() {}
const char* what() const throw() { return "Attempt to load an invalid XML file"; }
};
+
+ /**
+ * @class NetworkIndexOutOfBoundsException
+ * @brief Exception raised when trying to access a neuron whose index is larger than the number
+ * of neurons in the network
+ */
+ class NetworkIndexOutOfBoundsException : public std::exception {
+ public:
+ NetworkIndexOutOfBoundsException() {}
+ const char* what() const throw() { return "Attempt to access a non-existing neuron"; }
+ };
+
+ /**
+ * @class InvalidSynapticalWeightException
+ * @brief Exception raised when, while trying the network or directly, the weight of a synapsis is
+ * set to a value |w| > 1
+ */
+ class InvalidSynapticalWeightException : public std::exception {
+ public:
+ InvalidSynapticalWeightException() {}
+ const char* what() const throw() { return "Attempt to set an invalid weight for the synapsis"; }
+ };
}
#endif
diff --git a/src/layer.cpp b/src/layer.cpp
index 45d5e53..1f50579 100644
--- a/src/layer.cpp
+++ b/src/layer.cpp
@@ -19,7 +19,9 @@ namespace neuralpp {
for (size_t i = 0; i < sz; i++) {
Neuron n(a, d);
elements.push_back(n);
- } actv_f = a;
+ }
+
+ actv_f = a;
deriv = d;
}
@@ -30,11 +32,14 @@ namespace neuralpp {
deriv = d;
}
- size_t Layer::size() {
+ size_t Layer::size() const {
return elements.size();
}
- Neuron & Layer::operator[](size_t i) {
+ Neuron & Layer::operator[](size_t i) throw(NetworkIndexOutOfBoundsException) {
+ if (i > size())
+ throw NetworkIndexOutOfBoundsException();
+
return elements[i];
}
diff --git a/src/neuralnet.cpp b/src/neuralnet.cpp
index d50ef5b..06cbaa7 100644
--- a/src/neuralnet.cpp
+++ b/src/neuralnet.cpp
@@ -11,7 +11,9 @@
* this program. If not, see . *
**************************************************************************************************/
+#include
#include
+
#include "neural++.hpp"
#include "Markup.h"
@@ -56,7 +58,7 @@ namespace neuralpp {
link();
}
- double NeuralNet::getOutput() {
+ double NeuralNet::getOutput() const {
return (*output)[0].getActv();
}
@@ -68,7 +70,7 @@ namespace neuralpp {
return v;
}
- double NeuralNet::error(double expected) {
+ double NeuralNet::error(double expected) const {
return abs((getOutput() - expected *
deriv(getOutput())) / (abs(expected)));
}
@@ -92,7 +94,7 @@ namespace neuralpp {
ex = e;
}
- double NeuralNet::expected() {
+ double NeuralNet::expected() const {
return ex;
}
@@ -168,12 +170,12 @@ namespace neuralpp {
}
}
- bool NeuralNet::save(const char *fname) {
- FILE *fp;
+ void NeuralNet::save (const char *fname) throw(NetworkFileWriteException) {
struct netrecord record;
+ ofstream out(fname);
- if (!(fp = fopen(fname, "wb")))
- return false;
+ if (!out)
+ throw NetworkFileWriteException();
record.input_size = input->size();
record.hidden_size = hidden->size();
@@ -183,96 +185,113 @@ namespace neuralpp {
record.l_rate = l_rate;
record.ex = ex;
- if (fwrite(&record, sizeof(struct netrecord), 1, fp) <= 0)
- return false;
+ if (out.write((char*) &record, sizeof(struct netrecord)) <= 0)
+ throw NetworkFileWriteException();
// Saving neurons' state
for (unsigned int i = 0; i < input->size(); i++) {
struct neuronrecord r;
r.prop = (*input)[i].getProp();
r.actv = (*input)[i].getActv();
- fwrite(&r, sizeof(struct neuronrecord), 1, fp);
+
+ if (out.write((char*) &r, sizeof(struct neuronrecord)) <= 0)
+ throw NetworkFileWriteException();
}
for (unsigned int i = 0; i < hidden->size(); i++) {
struct neuronrecord r;
r.prop = (*hidden)[i].getProp();
r.actv = (*hidden)[i].getActv();
- fwrite(&r, sizeof(struct neuronrecord), 1, fp);
+
+ if (out.write((char*) &r, sizeof(struct neuronrecord)) <= 0)
+ throw NetworkFileWriteException();
}
for (unsigned int i = 0; i < output->size(); i++) {
struct neuronrecord r;
r.prop = (*output)[i].getProp();
r.actv = (*output)[i].getActv();
- fwrite(&r, sizeof(struct neuronrecord), 1, fp);
+
+ if (out.write((char*) &r, sizeof(struct neuronrecord)) <= 0)
+ throw NetworkFileWriteException();
}
// Saving synapsis' state
for (unsigned int i = 0; i < input->size(); i++) {
int nout = (*input)[i].nOut();
- fwrite(&nout, sizeof(int), 1, fp);
+
+ if (out.write((char*) &nout, sizeof(int)) <= 0)
+ throw NetworkFileWriteException();
for (int j = 0; j < nout; j++) {
struct synrecord r;
r.w = (*input)[i].synOut(j).getWeight();
r.d = (*input)[i].synOut(j).getDelta();
- fwrite(&r, sizeof(struct synrecord), 1,
- fp);
+
+ if (out.write((char*) &r, sizeof(struct synrecord)) <= 0)
+ throw NetworkFileWriteException();
}
}
for (unsigned int i = 0; i < output->size(); i++) {
int nin = (*output)[i].nIn();
- fwrite(&nin, sizeof(int), 1, fp);
+ if (out.write((char*) &nin, sizeof(int)) <= 0)
+ throw NetworkFileWriteException();
+
for (int j = 0; j < nin; j++) {
struct synrecord r;
r.w = (*output)[i].synIn(j).getWeight();
r.d = (*output)[i].synIn(j).getDelta();
- fwrite(&r, sizeof(struct synrecord), 1,
- fp);
+
+ if (out.write((char*) &r, sizeof(struct synrecord)) <= 0)
+ throw NetworkFileWriteException();
}
}
for (unsigned int i = 0; i < hidden->size(); i++) {
int nin = (*hidden)[i].nIn();
- fwrite(&nin, sizeof(int), 1, fp);
+
+ if (out.write((char*) &nin, sizeof(int)) <= 0)
+ throw NetworkFileWriteException();
for (int j = 0; j < nin; j++) {
struct synrecord r;
r.w = (*hidden)[i].synIn(j).getWeight();
r.d = (*hidden)[i].synIn(j).getDelta();
- fwrite(&r, sizeof(struct synrecord), 1,
- fp);
+
+ if (out.write((char*) &r, sizeof(struct synrecord)) <= 0)
+ throw NetworkFileWriteException();
}
}
for (unsigned int i = 0; i < hidden->size(); i++) {
int nout = (*hidden)[i].nOut();
- fwrite(&nout, sizeof(int), 1, fp);
+
+ if (out.write((char*) &nout, sizeof(int)) <= 0)
+ throw NetworkFileWriteException();
for (int j = 0; j < nout; j++) {
struct synrecord r;
r.w = (*hidden)[i].synOut(j).getWeight();
r.d = (*hidden)[i].synOut(j).getDelta();
- fwrite(&r, sizeof(struct synrecord), 1,
- fp);
+
+ if (out.write((char*) &r, sizeof(struct synrecord)) <= 0)
+ throw NetworkFileWriteException();
}
}
- fclose(fp);
- return true;
+ out.close();
}
- NeuralNet::NeuralNet(const char *fname) throw(NetworkFileNotFoundException) {
+ NeuralNet::NeuralNet(const string fname) throw(NetworkFileNotFoundException) {
struct netrecord record;
- FILE *fp;
+ ifstream in(fname.c_str());
- if (!(fp = fopen(fname, "rb")))
+ if (!in)
throw NetworkFileNotFoundException();
- if (fread(&record, sizeof(struct netrecord), 1, fp) <= 0)
+ if (in.read((char*) &record, sizeof(struct netrecord)) <= 0)
throw NetworkFileNotFoundException();
*this =
@@ -283,7 +302,9 @@ namespace neuralpp {
// Restore neurons
for (unsigned int i = 0; i < input->size(); i++) {
struct neuronrecord r;
- fread(&r, sizeof(struct neuronrecord), 1, fp);
+
+ if (in.read((char*) &r, sizeof(struct neuronrecord)) <= 0)
+ throw NetworkFileNotFoundException();
(*input)[i].setProp(r.prop);
(*input)[i].setActv(r.actv);
@@ -292,7 +313,9 @@ namespace neuralpp {
for (unsigned int i = 0; i < hidden->size(); i++) {
struct neuronrecord r;
- fread(&r, sizeof(struct neuronrecord), 1, fp);
+
+ if (in.read((char*) &r, sizeof(struct neuronrecord)) <= 0)
+ throw NetworkFileNotFoundException();
(*hidden)[i].setProp(r.prop);
(*hidden)[i].setActv(r.actv);
@@ -301,7 +324,9 @@ namespace neuralpp {
for (unsigned int i = 0; i < output->size(); i++) {
struct neuronrecord r;
- fread(&r, sizeof(struct neuronrecord), 1, fp);
+
+ if (in.read((char*) &r, sizeof(struct neuronrecord)) <= 0)
+ throw NetworkFileNotFoundException();
(*output)[i].setProp(r.prop);
(*output)[i].setActv(r.actv);
@@ -323,12 +348,16 @@ namespace neuralpp {
// Restore synapsis
for (unsigned int i = 0; i < input->size(); i++) {
int nout;
- fread(&nout, sizeof(int), 1, fp);
+ if (in.read((char*) &nout, sizeof(int)) <= 0 )
+ throw NetworkFileNotFoundException();
+
for (int j = 0; j < nout; j++) {
struct synrecord r;
- fread(&r, sizeof(struct synrecord), 1, fp);
+ if (in.read((char*) &r, sizeof(struct synrecord)) <= 0)
+ throw NetworkFileNotFoundException();
+
(*input)[i].synOut(j).setWeight(r.w);
(*input)[i].synOut(j).setDelta(r.d);
}
@@ -336,11 +365,15 @@ namespace neuralpp {
for (unsigned int i = 0; i < output->size(); i++) {
int nin;
- fread(&nin, sizeof(int), 1, fp);
+
+ if (in.read((char*) &nin, sizeof(int)) <= 0)
+ throw NetworkFileNotFoundException();
for (int j = 0; j < nin; j++) {
struct synrecord r;
- fread(&r, sizeof(struct synrecord), 1, fp);
+
+ if (in.read((char*) &r, sizeof(struct synrecord)) <= 0)
+ throw NetworkFileNotFoundException();
(*output)[i].synIn(j).setWeight(r.w);
(*output)[i].synIn(j).setDelta(r.d);
@@ -349,11 +382,15 @@ namespace neuralpp {
for (unsigned int i = 0; i < hidden->size(); i++) {
int nin;
- fread(&nin, sizeof(int), 1, fp);
+
+ if (in.read((char*) &nin, sizeof(int)) <= 0)
+ throw NetworkFileNotFoundException();
for (int j = 0; j < nin; j++) {
struct synrecord r;
- fread(&r, sizeof(struct synrecord), 1, fp);
+
+ if (in.read((char*) &r, sizeof(struct synrecord)) <= 0)
+ throw NetworkFileNotFoundException();
(*hidden)[i].synIn(j).setWeight(r.w);
(*hidden)[i].synIn(j).setDelta(r.d);
@@ -362,18 +399,22 @@ namespace neuralpp {
for (unsigned int i = 0; i < hidden->size(); i++) {
int nout;
- fread(&nout, sizeof(int), 1, fp);
+
+ if (in.read((char*) &nout, sizeof(int)) <= 0)
+ throw NetworkFileNotFoundException();
for (int j = 0; j < nout; j++) {
struct synrecord r;
- fread(&r, sizeof(struct synrecord), 1, fp);
+
+ if (in.read((char*) &r, sizeof(struct synrecord)) <= 0)
+ throw NetworkFileNotFoundException();
(*hidden)[i].synOut(j).setWeight(r.w);
(*hidden)[i].synOut(j).setDelta(r.d);
}
}
- fclose(fp);
+ in.close();
}
void NeuralNet::train(string xmlsrc, NeuralNet::source src =
@@ -438,7 +479,7 @@ namespace neuralpp {
return;
}
- void NeuralNet::initXML(string & xml) {
+ void NeuralNet::initXML(string& xml) {
xml.append
("\n"
"\n"
diff --git a/src/synapsis.cpp b/src/synapsis.cpp
index 46543e6..8519aa4 100644
--- a/src/synapsis.cpp
+++ b/src/synapsis.cpp
@@ -72,14 +72,17 @@ namespace neuralpp {
return prev_delta;
}
- void Synapsis::setWeight(double w) {
+ void Synapsis::setWeight(double w) throw(InvalidSynapticalWeightException) {
if (weight > 1.0)
- weight = 1.0;
- else
- weight = w;
+ throw InvalidSynapticalWeightException();
+
+ weight = w;
}
- void Synapsis::setDelta(double d) {
+ void Synapsis::setDelta(double d) throw(InvalidSynapticalWeightException) {
+ if (d > 1.0)
+ throw InvalidSynapticalWeightException();
+
prev_delta = delta;
delta = d;
}