neuralpp/src/neuralnet.cpp

556 lines
14 KiB
C++
Raw Normal View History

2009-02-18 00:10:57 +01:00
/**************************************************************************************************
* LibNeural++ v.0.2 - All-purpose library for managing neural networks *
* Copyright (C) 2009, BlackLight *
* *
* This program is free software: you can redistribute it and/or modify it under the terms of the *
* GNU General Public License as published by the Free Software Foundation, either version 3 of *
* the License, or (at your option) any later version. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for *
* more details. You should have received a copy of the GNU General Public License along with *
* this program. If not, see <http://www.gnu.org/licenses/>. *
**************************************************************************************************/
2009-08-10 18:06:52 +02:00
#include <fstream>
2009-08-09 19:53:21 +02:00
#include <sstream>
2009-08-16 11:09:42 +02:00
using namespace std;
2009-08-10 18:06:52 +02:00
#include "neural++.hpp"
2009-02-18 00:10:57 +01:00
#include "Markup.h"
2009-08-09 11:17:39 +02:00
namespace neuralpp {
double __actv(double prop) {
return prop;
}
2009-08-15 02:59:09 +02:00
double df (double (*f)(double), double x) {
double h = 0.000001;
return (f(x+h) - f(x)) / h;
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
NeuralNet::NeuralNet(size_t in_size, size_t hidden_size,
size_t out_size, double l, int e) {
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
epochs = e;
ref_epochs = epochs;
l_rate = l;
actv_f = __actv;
2009-02-18 00:10:57 +01:00
2009-08-15 02:59:09 +02:00
input = new Layer(in_size, __actv);
hidden = new Layer(hidden_size, __actv);
output = new Layer(out_size, __actv);
2009-08-09 11:17:39 +02:00
link();
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
NeuralNet::NeuralNet(size_t in_size, size_t hidden_size,
size_t out_size, double (*a) (double),
2009-08-15 02:59:09 +02:00
double l, int e) {
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
epochs = e;
ref_epochs = epochs;
l_rate = l;
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
actv_f = a;
2009-02-18 00:10:57 +01:00
2009-08-15 02:59:09 +02:00
input = new Layer(in_size, a);
hidden = new Layer(hidden_size, a);
output = new Layer(out_size, a);
2009-08-09 11:17:39 +02:00
link();
}
2009-02-18 00:10:57 +01:00
2009-08-10 18:06:52 +02:00
double NeuralNet::getOutput() const {
2009-08-09 11:17:39 +02:00
return (*output)[0].getActv();
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
vector<double> NeuralNet::getOutputs() {
vector<double> v;
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (size_t i = 0; i < output->size(); i++)
v.push_back((*output)[i].getActv());
return v;
}
2009-02-18 00:10:57 +01:00
2009-08-10 18:06:52 +02:00
double NeuralNet::error(double expected) const {
2009-08-15 02:59:09 +02:00
return 0.5*(getOutput()-expected)*(getOutput()-expected);
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
void NeuralNet::propagate() {
hidden->propagate();
output->propagate();
}
2009-02-18 00:10:57 +01:00
2009-08-16 11:09:42 +02:00
void NeuralNet::setInput(vector <double> v) {
input->setInput(v);
2009-08-09 11:17:39 +02:00
}
2009-08-09 11:17:39 +02:00
void NeuralNet::link() {
hidden->link(*input);
output->link(*hidden);
}
2009-08-09 11:17:39 +02:00
void NeuralNet::setExpected(double e) {
2009-08-16 11:09:42 +02:00
ex.clear();
ex.push_back(e);
2009-02-18 00:10:57 +01:00
}
2009-08-10 18:06:52 +02:00
double NeuralNet::expected() const {
2009-08-16 11:09:42 +02:00
return ex[0];
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
void NeuralNet::updateWeights() {
double out_delta;
for (size_t i = 0; i < output->size(); i++) {
Neuron *n = &(*output)[i];
2009-08-16 11:09:42 +02:00
2009-08-09 11:17:39 +02:00
for (size_t j = 0; j < n->nIn(); j++) {
Synapsis *s = &(n->synIn(j));
if (ref_epochs - epochs > 0)
out_delta =
2009-08-15 02:59:09 +02:00
(-l_rate) * (getOutput() - expected()) *
2009-08-16 11:09:42 +02:00
df(actv_f, n->getProp()) * s->getIn()->getActv() +
2009-08-15 02:59:09 +02:00
s->momentum(ref_epochs, ref_epochs - epochs) *
2009-08-09 11:17:39 +02:00
s->getPrevDelta();
else
out_delta =
2009-08-15 02:59:09 +02:00
(-l_rate) * (getOutput() - expected()) *
2009-08-16 11:09:42 +02:00
df(actv_f, n->getProp()) * s->getIn()->getActv();
2009-08-09 11:17:39 +02:00
s->setDelta(out_delta);
}
}
2009-08-09 11:17:39 +02:00
for (size_t i = 0; i < hidden->size(); i++) {
Neuron *n = &(*hidden)[i];
double d =
2009-08-15 02:59:09 +02:00
df(actv_f, n->getProp()) *
2009-08-09 11:17:39 +02:00
n->synOut(0).getWeight() * out_delta;
for (size_t j = 0; j < n->nIn(); j++) {
Synapsis *s = &(n->synIn(j));
if (ref_epochs - epochs > 0)
s->setDelta((-l_rate) * d *
s->getIn()->getActv() +
s->momentum(ref_epochs,
ref_epochs
-
epochs) *
s->getPrevDelta());
else
s->setDelta((-l_rate) * d *
s->getIn()->getActv());
}
2009-02-18 00:10:57 +01:00
}
}
2009-08-16 11:09:42 +02:00
void NeuralNet::commitChanges(Layer& l) {
for (size_t i = 0; i < l.size(); i++) {
Neuron *n = &(l[i]);
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (size_t j = 0; j < n->nIn(); j++) {
Synapsis *s = &(n->synIn(j));
s->setWeight(s->getWeight() +
s->getDelta());
s->setDelta(0);
}
2009-02-18 00:10:57 +01:00
}
}
2009-08-09 11:17:39 +02:00
void NeuralNet::update() {
while ((epochs--) > 0) {
updateWeights();
2009-08-16 11:09:42 +02:00
commitChanges(*output);
commitChanges(*hidden);
2009-08-09 11:17:39 +02:00
propagate();
}
2009-02-18 00:10:57 +01:00
}
2009-08-10 18:06:52 +02:00
void NeuralNet::save (const char *fname) throw(NetworkFileWriteException) {
2009-08-09 11:17:39 +02:00
struct netrecord record;
2009-08-10 18:06:52 +02:00
ofstream out(fname);
2009-02-18 00:10:57 +01:00
2009-08-10 18:06:52 +02:00
if (!out)
throw NetworkFileWriteException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
record.input_size = input->size();
record.hidden_size = hidden->size();
record.output_size = output->size();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
record.epochs = ref_epochs;
record.l_rate = l_rate;
2009-08-16 11:09:42 +02:00
record.ex = ex[0];
2009-02-18 00:10:57 +01:00
2009-08-10 18:06:52 +02:00
if (out.write((char*) &record, sizeof(struct netrecord)) <= 0)
throw NetworkFileWriteException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
// Saving neurons' state
for (unsigned int i = 0; i < input->size(); i++) {
struct neuronrecord r;
r.prop = (*input)[i].getProp();
r.actv = (*input)[i].getActv();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &r, sizeof(struct neuronrecord)) <= 0)
throw NetworkFileWriteException();
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < hidden->size(); i++) {
struct neuronrecord r;
r.prop = (*hidden)[i].getProp();
r.actv = (*hidden)[i].getActv();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &r, sizeof(struct neuronrecord)) <= 0)
throw NetworkFileWriteException();
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < output->size(); i++) {
struct neuronrecord r;
r.prop = (*output)[i].getProp();
r.actv = (*output)[i].getActv();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &r, sizeof(struct neuronrecord)) <= 0)
throw NetworkFileWriteException();
2009-02-18 00:10:57 +01:00
}
2009-08-09 11:17:39 +02:00
// Saving synapsis' state
for (unsigned int i = 0; i < input->size(); i++) {
int nout = (*input)[i].nOut();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &nout, sizeof(int)) <= 0)
throw NetworkFileWriteException();
2009-08-09 11:17:39 +02:00
for (int j = 0; j < nout; j++) {
struct synrecord r;
r.w = (*input)[i].synOut(j).getWeight();
r.d = (*input)[i].synOut(j).getDelta();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &r, sizeof(struct synrecord)) <= 0)
throw NetworkFileWriteException();
2009-08-09 11:17:39 +02:00
}
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < output->size(); i++) {
int nin = (*output)[i].nIn();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &nin, sizeof(int)) <= 0)
throw NetworkFileWriteException();
2009-08-09 11:17:39 +02:00
for (int j = 0; j < nin; j++) {
struct synrecord r;
r.w = (*output)[i].synIn(j).getWeight();
r.d = (*output)[i].synIn(j).getDelta();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &r, sizeof(struct synrecord)) <= 0)
throw NetworkFileWriteException();
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
}
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < hidden->size(); i++) {
int nin = (*hidden)[i].nIn();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &nin, sizeof(int)) <= 0)
throw NetworkFileWriteException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (int j = 0; j < nin; j++) {
struct synrecord r;
r.w = (*hidden)[i].synIn(j).getWeight();
r.d = (*hidden)[i].synIn(j).getDelta();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &r, sizeof(struct synrecord)) <= 0)
throw NetworkFileWriteException();
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
}
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < hidden->size(); i++) {
int nout = (*hidden)[i].nOut();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &nout, sizeof(int)) <= 0)
throw NetworkFileWriteException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (int j = 0; j < nout; j++) {
struct synrecord r;
r.w = (*hidden)[i].synOut(j).getWeight();
r.d = (*hidden)[i].synOut(j).getDelta();
2009-08-10 18:06:52 +02:00
if (out.write((char*) &r, sizeof(struct synrecord)) <= 0)
throw NetworkFileWriteException();
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
}
2009-08-10 18:06:52 +02:00
out.close();
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
2009-08-10 18:06:52 +02:00
NeuralNet::NeuralNet(const string fname) throw(NetworkFileNotFoundException) {
2009-08-09 11:17:39 +02:00
struct netrecord record;
2009-08-10 18:06:52 +02:00
ifstream in(fname.c_str());
2009-02-18 00:10:57 +01:00
2009-08-10 18:06:52 +02:00
if (!in)
2009-08-09 11:17:39 +02:00
throw NetworkFileNotFoundException();
2009-02-18 00:10:57 +01:00
2009-08-10 18:06:52 +02:00
if (in.read((char*) &record, sizeof(struct netrecord)) <= 0)
2009-08-09 11:17:39 +02:00
throw NetworkFileNotFoundException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
*this =
NeuralNet(record.input_size, record.hidden_size,
record.output_size, record.l_rate,
record.epochs);
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
// Restore neurons
for (unsigned int i = 0; i < input->size(); i++) {
struct neuronrecord r;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &r, sizeof(struct neuronrecord)) <= 0)
throw NetworkFileNotFoundException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
(*input)[i].setProp(r.prop);
(*input)[i].setActv(r.actv);
(*input)[i].synClear();
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < hidden->size(); i++) {
struct neuronrecord r;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &r, sizeof(struct neuronrecord)) <= 0)
throw NetworkFileNotFoundException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
(*hidden)[i].setProp(r.prop);
(*hidden)[i].setActv(r.actv);
(*hidden)[i].synClear();
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < output->size(); i++) {
struct neuronrecord r;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &r, sizeof(struct neuronrecord)) <= 0)
throw NetworkFileNotFoundException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
(*output)[i].setProp(r.prop);
(*output)[i].setActv(r.actv);
(*output)[i].synClear();
2009-02-18 00:10:57 +01:00
}
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < input->size(); i++)
(*input)[i].synClear();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < hidden->size(); i++)
(*hidden)[i].synClear();
for (unsigned int i = 0; i < output->size(); i++)
(*output)[i].synClear();
hidden->link(*input);
output->link(*hidden);
// Restore synapsis
for (unsigned int i = 0; i < input->size(); i++) {
int nout;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &nout, sizeof(int)) <= 0 )
throw NetworkFileNotFoundException();
2009-08-09 11:17:39 +02:00
for (int j = 0; j < nout; j++) {
struct synrecord r;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &r, sizeof(struct synrecord)) <= 0)
throw NetworkFileNotFoundException();
2009-08-09 11:17:39 +02:00
(*input)[i].synOut(j).setWeight(r.w);
(*input)[i].synOut(j).setDelta(r.d);
}
2009-02-18 00:10:57 +01:00
}
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < output->size(); i++) {
int nin;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &nin, sizeof(int)) <= 0)
throw NetworkFileNotFoundException();
2009-08-09 11:17:39 +02:00
for (int j = 0; j < nin; j++) {
struct synrecord r;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &r, sizeof(struct synrecord)) <= 0)
throw NetworkFileNotFoundException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
(*output)[i].synIn(j).setWeight(r.w);
(*output)[i].synIn(j).setDelta(r.d);
}
2009-02-18 00:10:57 +01:00
}
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < hidden->size(); i++) {
int nin;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &nin, sizeof(int)) <= 0)
throw NetworkFileNotFoundException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (int j = 0; j < nin; j++) {
struct synrecord r;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &r, sizeof(struct synrecord)) <= 0)
throw NetworkFileNotFoundException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
(*hidden)[i].synIn(j).setWeight(r.w);
(*hidden)[i].synIn(j).setDelta(r.d);
}
2009-02-18 00:10:57 +01:00
}
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < hidden->size(); i++) {
int nout;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &nout, sizeof(int)) <= 0)
throw NetworkFileNotFoundException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (int j = 0; j < nout; j++) {
struct synrecord r;
2009-08-10 18:06:52 +02:00
if (in.read((char*) &r, sizeof(struct synrecord)) <= 0)
throw NetworkFileNotFoundException();
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
(*hidden)[i].synOut(j).setWeight(r.w);
(*hidden)[i].synOut(j).setDelta(r.d);
}
}
2009-02-18 00:10:57 +01:00
2009-08-10 18:06:52 +02:00
in.close();
2009-02-18 00:10:57 +01:00
}
2009-08-09 11:17:39 +02:00
void NeuralNet::train(string xmlsrc, NeuralNet::source src =
file) throw(InvalidXMLException) {
double out;
CMarkup xml;
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
if (src == file)
xml.Load(xmlsrc.c_str());
else
xml.SetDoc(xmlsrc.c_str());
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
if (!xml.IsWellFormed()) {
throw InvalidXMLException();
return;
}
if (xml.FindElem("NETWORK")) {
while (xml.FindChildElem("TRAINING")) {
2009-08-15 02:59:09 +02:00
vector<double> input;
2009-08-09 11:17:39 +02:00
double output;
bool valid = false;
2009-02-18 00:10:57 +01:00
xml.IntoElem();
2009-08-09 11:17:39 +02:00
while (xml.FindChildElem("INPUT")) {
xml.IntoElem();
2009-08-15 02:59:09 +02:00
input.push_back(atof(
xml.GetData().c_str()));
2009-08-09 11:17:39 +02:00
xml.OutOfElem();
}
if (xml.FindChildElem("OUTPUT")) {
xml.IntoElem();
output =
atof(xml.GetData().c_str());
xml.OutOfElem();
}
2009-02-18 00:10:57 +01:00
xml.OutOfElem();
2009-08-09 11:17:39 +02:00
while (!valid) {
2009-08-09 19:53:21 +02:00
stringstream ss(stringstream::in | stringstream::out);
2009-08-09 11:17:39 +02:00
setInput(input);
propagate();
setExpected(output);
update();
out = getOutput();
2009-08-09 19:53:21 +02:00
ss << out;
if (ss.str().find("inf") == string::npos)
2009-08-09 11:17:39 +02:00
valid = true;
}
2009-02-18 00:10:57 +01:00
}
}
2009-08-09 11:17:39 +02:00
return;
2009-02-18 00:10:57 +01:00
}
2009-08-10 18:06:52 +02:00
void NeuralNet::initXML(string& xml) {
2009-08-09 11:17:39 +02:00
xml.append
("<?xml version=\"1.0\" encoding=\"iso-8859-1\"?>\n"
"<!DOCTYPE NETWORK SYSTEM \"http://blacklight.gotdns.org/prog/neuralpp/trainer.dtd\">\n"
"<!-- Automatically generated by Neural++ library - by BlackLight -->\n\n"
"<NETWORK>\n");
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
vector <double> NeuralNet::split(char delim, string str) {
char tmp[1024];
vector <double> v;
memset(tmp, 0x0, sizeof(tmp));
for (unsigned int i = 0, j = 0; i <= str.length(); i++) {
if (str[i] == delim || i == str.length()) {
v.push_back(atof(tmp));
memset(tmp, 0x0, sizeof(tmp));
j = 0;
} else
tmp[j++] = str[i];
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
return v;
2009-02-18 00:10:57 +01:00
}
2009-08-09 11:17:39 +02:00
string NeuralNet::XMLFromSet(int id, string set) {
string xml;
2009-08-09 20:49:59 +02:00
vector<double> in, out;
stringstream ss (stringstream::in | stringstream::out);
2009-08-09 11:17:39 +02:00
unsigned int delimPos = -1;
char delim = ';';
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (delimPos = 0;
delimPos < set.length() && set[delimPos] != delim;
delimPos++);
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
if (delimPos == set.length())
return xml;
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
string inStr = set.substr(0, delimPos);
string outStr = set.substr(delimPos + 1, set.length());
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
in = split(',', inStr);
out = split(',', outStr);
2009-02-18 00:10:57 +01:00
2009-08-09 20:49:59 +02:00
ss << id;
xml += "\t<TRAINING ID=\"" + ss.str() + "\">\n";
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < in.size(); i++) {
2009-08-09 20:49:59 +02:00
ss.str(string());
ss << i;
xml += "\t\t<INPUT ID=\"" + ss.str() + "\">";
2009-02-18 00:10:57 +01:00
2009-08-09 20:49:59 +02:00
ss.str(string());
ss << in[i];
xml += ss.str() + "</INPUT>\n";
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
for (unsigned int i = 0; i < out.size(); i++) {
2009-08-09 20:49:59 +02:00
ss.str(string());
ss << i;
xml += "\t\t<OUTPUT ID=\"" + ss.str() + "\">";
2009-02-18 00:10:57 +01:00
2009-08-09 20:49:59 +02:00
ss.str(string());
ss << out[i];
xml += ss.str() + "</OUTPUT>\n";
2009-08-09 11:17:39 +02:00
}
2009-02-18 00:10:57 +01:00
2009-08-09 11:17:39 +02:00
xml += "\t</TRAINING>\n\n";
return xml;
}
void NeuralNet::closeXML(string & xml) {
xml.append("</NETWORK>\n\n");
}
2009-08-09 10:24:52 +02:00
}
2009-02-18 00:10:57 +01:00