Testing, testing, testing

This commit is contained in:
blacklight 2009-08-09 19:53:21 +02:00
parent b50b2c08ae
commit aac905673b
5 changed files with 22 additions and 15 deletions

View file

@ -10,16 +10,16 @@
using namespace neuralpp; using namespace neuralpp;
int main() { int main() {
NeuralNet net(2, 2, 1, 0.005, 10000); NeuralNet net(3, 3, 1, 0.005, 1000);
string xml; string xml;
double tmp; double tmp;
// XML initialization. Then, I say XML that 2+3=5, 3+3=6, 5+4=9 // XML initialization. Then, I say XML that 2+3=5, 3+3=6, 5+4=9
// Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm // Strings' format is "input1,input2,...,inputn;output1,output2,...,outputm
NeuralNet::initXML(xml); NeuralNet::initXML(xml);
xml += NeuralNet::XMLFromSet(0, "2,3;5"); xml += NeuralNet::XMLFromSet(0, "2,3,4;9");
xml += NeuralNet::XMLFromSet(1, "3,3;6"); xml += NeuralNet::XMLFromSet(1, "3,3,1;7");
xml += NeuralNet::XMLFromSet(2, "5,4;9"); xml += NeuralNet::XMLFromSet(2, "5,4,2;11");
NeuralNet::closeXML(xml); NeuralNet::closeXML(xml);
net.train(xml, NeuralNet::str); net.train(xml, NeuralNet::str);
@ -34,6 +34,10 @@ int main() {
cin >> tmp; cin >> tmp;
v.push_back(tmp); v.push_back(tmp);
cout << "Third number to add: ";
cin >> tmp;
v.push_back(tmp);
net.setInput(v); net.setInput(v);
net.propagate(); net.propagate();
cout << "Output: " << net.getOutput() << endl; cout << "Output: " << net.getOutput() << endl;

View file

@ -11,7 +11,7 @@
using namespace neuralpp; using namespace neuralpp;
int main() { int main() {
NeuralNet net(2, 2, 1, 0.005, 10000); NeuralNet net(2, 2, 1, 0.005, 1000);
cout << "Training in progress - This may take a while...if it gets stuck, interrupt and restart the app\n"; cout << "Training in progress - This may take a while...if it gets stuck, interrupt and restart the app\n";
net.train("adder.xml", NeuralNet::file); net.train("adder.xml", NeuralNet::file);

View file

@ -53,10 +53,6 @@ namespace neuralpp {
double l_rate; double l_rate;
double ex; double ex;
Layer* input;
Layer* hidden;
Layer* output;
/** /**
* @brief It updates the weights of the net's synapsis through back-propagation. * @brief It updates the weights of the net's synapsis through back-propagation.
* In-class use only * In-class use only
@ -90,6 +86,10 @@ namespace neuralpp {
double (*deriv)(double); double (*deriv)(double);
public: public:
Layer* input;
Layer* hidden;
Layer* output;
/** /**
* @brief Enum to choose the eventual training source for our network (XML from a file or from a string) * @brief Enum to choose the eventual training source for our network (XML from a file or from a string)
*/ */

View file

@ -11,6 +11,7 @@
* this program. If not, see <http://www.gnu.org/licenses/>. * * this program. If not, see <http://www.gnu.org/licenses/>. *
**************************************************************************************************/ **************************************************************************************************/
#include <sstream>
#include "neural++.hpp" #include "neural++.hpp"
#include "Markup.h" #include "Markup.h"
@ -417,7 +418,7 @@ namespace neuralpp {
xml.OutOfElem(); xml.OutOfElem();
while (!valid) { while (!valid) {
char str[BUFSIZ]; stringstream ss(stringstream::in | stringstream::out);
setInput(input); setInput(input);
propagate(); propagate();
@ -425,11 +426,10 @@ namespace neuralpp {
update(); update();
out = getOutput(); out = getOutput();
memset(str, 0x0, sizeof(str));
snprintf(str, sizeof(str), "%f",
out);
if (!strstr(str, "inf")) ss << out;
if (ss.str().find("inf") == string::npos)
valid = true; valid = true;
} }
} }

View file

@ -73,6 +73,9 @@ namespace neuralpp {
} }
void Synapsis::setWeight(double w) { void Synapsis::setWeight(double w) {
if (weight > 1.0)
weight = 1.0;
else
weight = w; weight = w;
} }