mirror of
https://github.com/BlackLight/Snort_AIPreproc.git
synced 2024-11-27 22:25:12 +01:00
Adding fsom library for SOM neural networks
This commit is contained in:
parent
5aa118e4e5
commit
af14a6b826
10 changed files with 1049 additions and 14 deletions
|
@ -25,6 +25,7 @@ bayesian.c \
|
||||||
cluster.c \
|
cluster.c \
|
||||||
correlation.c \
|
correlation.c \
|
||||||
db.c \
|
db.c \
|
||||||
|
fsom/fsom.c \
|
||||||
mysql.c \
|
mysql.c \
|
||||||
outdb.c \
|
outdb.c \
|
||||||
postgresql.c \
|
postgresql.c \
|
||||||
|
|
12
Makefile.in
12
Makefile.in
|
@ -84,10 +84,10 @@ am_libsf_ai_preproc_la_OBJECTS = libsf_ai_preproc_la-alert_history.lo \
|
||||||
libsf_ai_preproc_la-cencode.lo libsf_ai_preproc_la-bayesian.lo \
|
libsf_ai_preproc_la-cencode.lo libsf_ai_preproc_la-bayesian.lo \
|
||||||
libsf_ai_preproc_la-cluster.lo \
|
libsf_ai_preproc_la-cluster.lo \
|
||||||
libsf_ai_preproc_la-correlation.lo libsf_ai_preproc_la-db.lo \
|
libsf_ai_preproc_la-correlation.lo libsf_ai_preproc_la-db.lo \
|
||||||
libsf_ai_preproc_la-mysql.lo libsf_ai_preproc_la-outdb.lo \
|
libsf_ai_preproc_la-fsom.lo libsf_ai_preproc_la-mysql.lo \
|
||||||
libsf_ai_preproc_la-postgresql.lo libsf_ai_preproc_la-regex.lo \
|
libsf_ai_preproc_la-outdb.lo libsf_ai_preproc_la-postgresql.lo \
|
||||||
libsf_ai_preproc_la-spp_ai.lo libsf_ai_preproc_la-stream.lo \
|
libsf_ai_preproc_la-regex.lo libsf_ai_preproc_la-spp_ai.lo \
|
||||||
libsf_ai_preproc_la-webserv.lo
|
libsf_ai_preproc_la-stream.lo libsf_ai_preproc_la-webserv.lo
|
||||||
nodist_libsf_ai_preproc_la_OBJECTS = \
|
nodist_libsf_ai_preproc_la_OBJECTS = \
|
||||||
libsf_ai_preproc_la-sf_dynamic_preproc_lib.lo \
|
libsf_ai_preproc_la-sf_dynamic_preproc_lib.lo \
|
||||||
libsf_ai_preproc_la-sfPolicyUserData.lo
|
libsf_ai_preproc_la-sfPolicyUserData.lo
|
||||||
|
@ -266,6 +266,7 @@ bayesian.c \
|
||||||
cluster.c \
|
cluster.c \
|
||||||
correlation.c \
|
correlation.c \
|
||||||
db.c \
|
db.c \
|
||||||
|
fsom/fsom.c \
|
||||||
mysql.c \
|
mysql.c \
|
||||||
outdb.c \
|
outdb.c \
|
||||||
postgresql.c \
|
postgresql.c \
|
||||||
|
@ -412,6 +413,9 @@ libsf_ai_preproc_la-correlation.lo: correlation.c
|
||||||
libsf_ai_preproc_la-db.lo: db.c
|
libsf_ai_preproc_la-db.lo: db.c
|
||||||
$(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libsf_ai_preproc_la_CFLAGS) $(CFLAGS) -c -o libsf_ai_preproc_la-db.lo `test -f 'db.c' || echo '$(srcdir)/'`db.c
|
$(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libsf_ai_preproc_la_CFLAGS) $(CFLAGS) -c -o libsf_ai_preproc_la-db.lo `test -f 'db.c' || echo '$(srcdir)/'`db.c
|
||||||
|
|
||||||
|
libsf_ai_preproc_la-fsom.lo: fsom/fsom.c
|
||||||
|
$(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libsf_ai_preproc_la_CFLAGS) $(CFLAGS) -c -o libsf_ai_preproc_la-fsom.lo `test -f 'fsom/fsom.c' || echo '$(srcdir)/'`fsom/fsom.c
|
||||||
|
|
||||||
libsf_ai_preproc_la-mysql.lo: mysql.c
|
libsf_ai_preproc_la-mysql.lo: mysql.c
|
||||||
$(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libsf_ai_preproc_la_CFLAGS) $(CFLAGS) -c -o libsf_ai_preproc_la-mysql.lo `test -f 'mysql.c' || echo '$(srcdir)/'`mysql.c
|
$(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libsf_ai_preproc_la_CFLAGS) $(CFLAGS) -c -o libsf_ai_preproc_la-mysql.lo `test -f 'mysql.c' || echo '$(srcdir)/'`mysql.c
|
||||||
|
|
||||||
|
|
1
TODO
1
TODO
|
@ -2,7 +2,6 @@
|
||||||
AVERAGE/HIGH PRIORITY:
|
AVERAGE/HIGH PRIORITY:
|
||||||
======================
|
======================
|
||||||
|
|
||||||
- Bayesian network
|
|
||||||
- Modules for correlation coefficients
|
- Modules for correlation coefficients
|
||||||
- Code profiling
|
- Code profiling
|
||||||
- Comment all the code!!!
|
- Comment all the code!!!
|
||||||
|
|
|
@ -233,8 +233,9 @@ AI_file_alertparser_thread ( void* arg )
|
||||||
* The first time the thread is called, the flow exits instantly from the while,
|
* The first time the thread is called, the flow exits instantly from the while,
|
||||||
* so this first time the stats structure has to be initialized properly.
|
* so this first time the stats structure has to be initialized properly.
|
||||||
*/
|
*/
|
||||||
if( last_mod_time == (time_t)0 ){
|
if( last_mod_time == (time_t) 0 )
|
||||||
fstats( fd, &stats );
|
{
|
||||||
|
fstats ( fd, &stats );
|
||||||
}
|
}
|
||||||
|
|
||||||
last_mod_time = stats.st_mtime;
|
last_mod_time = stats.st_mtime;
|
||||||
|
@ -257,14 +258,15 @@ AI_file_alertparser_thread ( void* arg )
|
||||||
{
|
{
|
||||||
if ( in_alert )
|
if ( in_alert )
|
||||||
{
|
{
|
||||||
if ( alert->ip_src_addr && ( alert->ip_proto == IPPROTO_TCP || alert->ip_proto == IPPROTO_UDP ))
|
if ( alert->ip_src_addr )
|
||||||
{
|
{
|
||||||
key.src_ip = alert->ip_src_addr;
|
|
||||||
key.dst_port = alert->tcp_dst_port;
|
|
||||||
|
|
||||||
if ( alert->ip_proto == IPPROTO_TCP )
|
if ( alert->ip_proto == IPPROTO_TCP )
|
||||||
{
|
{
|
||||||
if (( info = AI_get_stream_by_key ( key ) ))
|
memset ( &key, 0, sizeof ( key ));
|
||||||
|
key.src_ip = alert->ip_src_addr;
|
||||||
|
key.dst_port = alert->tcp_dst_port;
|
||||||
|
|
||||||
|
if (( info = AI_get_stream_by_key ( key )))
|
||||||
{
|
{
|
||||||
AI_set_stream_observed ( key );
|
AI_set_stream_observed ( key );
|
||||||
alert->stream = info;
|
alert->stream = info;
|
||||||
|
|
|
@ -144,7 +144,6 @@ AI_alert_bayesian_correlation ( AI_snort_alert *a, AI_snort_alert *b )
|
||||||
|
|
||||||
corr /= (double) corr_count;
|
corr /= (double) corr_count;
|
||||||
corr -= ( events_a->count - corr_count_a ) / events_a->count;
|
corr -= ( events_a->count - corr_count_a ) / events_a->count;
|
||||||
/* _dpd.logMsg ( " Number of '%s' alerts correlated to '%s': %u over %u\\n", a->desc, b->desc, corr_count_a, events_a->count ); */
|
|
||||||
|
|
||||||
if ( found )
|
if ( found )
|
||||||
{
|
{
|
||||||
|
|
|
@ -221,6 +221,7 @@ __AI_correlated_alerts_to_json ()
|
||||||
|
|
||||||
for ( pkt_iterator = alert_iterator->stream; pkt_iterator; pkt_iterator = pkt_iterator->next )
|
for ( pkt_iterator = alert_iterator->stream; pkt_iterator; pkt_iterator = pkt_iterator->next )
|
||||||
{
|
{
|
||||||
|
encoded_pkt = NULL;
|
||||||
pkt_len = pkt_iterator->pkt->pcap_header->len + pkt_iterator->pkt->payload_size;
|
pkt_len = pkt_iterator->pkt->pcap_header->len + pkt_iterator->pkt->payload_size;
|
||||||
|
|
||||||
if ( !( encoded_pkt = (char*) malloc ( 4*pkt_len + 1 )))
|
if ( !( encoded_pkt = (char*) malloc ( 4*pkt_len + 1 )))
|
||||||
|
|
958
fsom/fsom.c
Normal file
958
fsom/fsom.c
Normal file
|
@ -0,0 +1,958 @@
|
||||||
|
/*
|
||||||
|
* =====================================================================================
|
||||||
|
*
|
||||||
|
* Filename: fsom.c
|
||||||
|
*
|
||||||
|
* Description: Manage a self-organizing map (SOM) as a neural network
|
||||||
|
*
|
||||||
|
* Version: 0.1
|
||||||
|
* Created: 15/10/2010 13:53:31
|
||||||
|
* Revision: none
|
||||||
|
* Compiler: gcc
|
||||||
|
*
|
||||||
|
* Author: BlackLight (http://0x00.ath.cx), <blacklight@autistici.org>
|
||||||
|
* Licence: GNU GPL v.3
|
||||||
|
* Company: DO WHAT YOU WANT CAUSE A PIRATE IS FREE, YOU ARE A PIRATE!
|
||||||
|
*
|
||||||
|
* =====================================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "fsom.h"
|
||||||
|
|
||||||
|
#include <alloca.h>
|
||||||
|
#include <float.h>
|
||||||
|
#include <limits.h>
|
||||||
|
#include <math.h>
|
||||||
|
#include <memory.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
|
||||||
|
#ifndef M_E
|
||||||
|
#define M_E 2.7182818284590452354
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Create a new synapsis between two neurons
|
||||||
|
* \param input_neuron Input neuron for the synapsis
|
||||||
|
* \param output_neuron Output neuron for the synapsis
|
||||||
|
* \param weight Weight of the synapsis (set it to 0 for a random value between 0 and 1)
|
||||||
|
* \return A pointer representing the new synapsis
|
||||||
|
*/
|
||||||
|
|
||||||
|
static som_synapsis_t*
|
||||||
|
som_synapsis_new ( som_neuron_t *input_neuron, som_neuron_t *output_neuron, double weight )
|
||||||
|
{
|
||||||
|
som_synapsis_t *synapsis = NULL;
|
||||||
|
|
||||||
|
if ( !( synapsis = ( som_synapsis_t* ) malloc ( sizeof ( som_synapsis_t ))))
|
||||||
|
{
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
synapsis->neuron_in = input_neuron;
|
||||||
|
synapsis->neuron_out = output_neuron;
|
||||||
|
|
||||||
|
if ( weight == 0.0 )
|
||||||
|
{
|
||||||
|
synapsis->weight = (double) rand() / (double) UINT_MAX;
|
||||||
|
} else {
|
||||||
|
synapsis->weight = weight;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( !( input_neuron->synapses = ( som_synapsis_t** ) realloc ( input_neuron->synapses, (++( input_neuron->synapses_count )) * sizeof ( som_synapsis_t ))))
|
||||||
|
{
|
||||||
|
free ( synapsis );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( !( output_neuron->synapses = ( som_synapsis_t** ) realloc ( output_neuron->synapses, (++( output_neuron->synapses_count )) * sizeof ( som_synapsis_t ))))
|
||||||
|
{
|
||||||
|
free ( synapsis );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
input_neuron->synapses[ input_neuron->synapses_count - 1 ] = synapsis;
|
||||||
|
output_neuron->synapses[ output_neuron->synapses_count - 1 ] = synapsis;
|
||||||
|
return synapsis;
|
||||||
|
} /* ----- end of function som_synapsis_new ----- */
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Create a new neuron
|
||||||
|
* \return The new neuron
|
||||||
|
*/
|
||||||
|
|
||||||
|
static som_neuron_t*
|
||||||
|
som_neuron_new ()
|
||||||
|
{
|
||||||
|
som_neuron_t *neuron = NULL;
|
||||||
|
|
||||||
|
if ( !( neuron = ( som_neuron_t* ) malloc ( sizeof ( som_neuron_t ))))
|
||||||
|
{
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
neuron->output = 0.0;
|
||||||
|
neuron->input = 0.0;
|
||||||
|
neuron->synapses = NULL;
|
||||||
|
neuron->synapses_count = 0;
|
||||||
|
|
||||||
|
return neuron;
|
||||||
|
} /* ----- end of function som_neuron_new ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Deallocate a neuron
|
||||||
|
* \param neuron Neuron to be deallocated
|
||||||
|
*/
|
||||||
|
|
||||||
|
static void
|
||||||
|
som_neuron_destroy ( som_neuron_t *neuron )
|
||||||
|
{
|
||||||
|
if ( !neuron )
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( neuron );
|
||||||
|
neuron = NULL;
|
||||||
|
} /* ----- end of function som_neuron_destroy ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Create a new input layer
|
||||||
|
* \param neurons_count Number of neurons in the new input layer
|
||||||
|
* \return The new layer
|
||||||
|
*/
|
||||||
|
|
||||||
|
static som_input_layer_t*
|
||||||
|
som_input_layer_new ( size_t neurons_count )
|
||||||
|
{
|
||||||
|
size_t i = 0,
|
||||||
|
j = 0;
|
||||||
|
|
||||||
|
som_input_layer_t *layer = NULL;
|
||||||
|
|
||||||
|
if ( !( layer = ( som_input_layer_t* ) malloc ( sizeof ( som_input_layer_t ))))
|
||||||
|
{
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
layer->neurons_count = neurons_count;
|
||||||
|
|
||||||
|
if ( !( layer->neurons = ( som_neuron_t** ) malloc ( neurons_count * sizeof ( som_neuron_t* ))))
|
||||||
|
{
|
||||||
|
free ( layer );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( i=0; i < neurons_count; i++ )
|
||||||
|
{
|
||||||
|
if ( !( layer->neurons[i] = som_neuron_new() ))
|
||||||
|
{
|
||||||
|
for ( j=0; j < i; j++ )
|
||||||
|
{
|
||||||
|
som_neuron_destroy ( layer->neurons[j] );
|
||||||
|
layer->neurons[j] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( layer->neurons );
|
||||||
|
free ( layer );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return layer;
|
||||||
|
} /* ----- end of function som_input_layer_new ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Create a new output layer
|
||||||
|
* \param neurons_rows Number of rows in the matrix of output neurons
|
||||||
|
* \param neurons_cols Number of cols in the matrix of output neurons
|
||||||
|
* \return The new layer
|
||||||
|
*/
|
||||||
|
|
||||||
|
static som_output_layer_t*
|
||||||
|
som_output_layer_new ( size_t neurons_rows, size_t neurons_cols )
|
||||||
|
{
|
||||||
|
size_t i = 0,
|
||||||
|
j = 0,
|
||||||
|
k = 0,
|
||||||
|
l = 0;
|
||||||
|
|
||||||
|
som_output_layer_t *layer = NULL;
|
||||||
|
|
||||||
|
if ( !( layer = ( som_output_layer_t* ) malloc ( sizeof ( som_output_layer_t ))))
|
||||||
|
{
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
layer->neurons_rows = neurons_rows;
|
||||||
|
layer->neurons_cols = neurons_cols;
|
||||||
|
|
||||||
|
if ( !( layer->neurons = ( som_neuron_t*** ) malloc ( neurons_rows * neurons_cols * sizeof ( som_neuron_t** ))))
|
||||||
|
{
|
||||||
|
free ( layer );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( i=0; i < neurons_rows; i++ )
|
||||||
|
{
|
||||||
|
if ( !( layer->neurons[i] = ( som_neuron_t** ) malloc ( neurons_cols * sizeof ( som_neuron_t* ))))
|
||||||
|
{
|
||||||
|
for ( j=0; j < i; j++ )
|
||||||
|
{
|
||||||
|
free ( layer->neurons[j] );
|
||||||
|
layer->neurons[j] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( layer->neurons );
|
||||||
|
free ( layer );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( i=0; i < neurons_rows; i++ )
|
||||||
|
{
|
||||||
|
for ( j=0; j < neurons_cols; j++ )
|
||||||
|
{
|
||||||
|
if ( !( layer->neurons[i][j] = som_neuron_new() ))
|
||||||
|
{
|
||||||
|
for ( k=0; k < i; k++ )
|
||||||
|
{
|
||||||
|
for ( l=0; l < j; l++ )
|
||||||
|
{
|
||||||
|
som_neuron_destroy ( layer->neurons[k][l] );
|
||||||
|
layer->neurons[k][l] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( layer->neurons[k] );
|
||||||
|
layer->neurons[k] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( layer->neurons );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return layer;
|
||||||
|
} /* ----- end of function som_output_layer_new ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Connect two layers of a neural SOM
|
||||||
|
* \param input_layer Reference to the input layer
|
||||||
|
* \param output_layer Reference to the output layer
|
||||||
|
*/
|
||||||
|
|
||||||
|
static void
|
||||||
|
som_connect_layers ( som_input_layer_t **input_layer, som_output_layer_t **output_layer )
|
||||||
|
{
|
||||||
|
size_t i = 0,
|
||||||
|
j = 0,
|
||||||
|
k = 0;
|
||||||
|
|
||||||
|
for ( i=0; i < (*output_layer)->neurons_rows; i++ )
|
||||||
|
{
|
||||||
|
for ( j=0; j < (*output_layer)->neurons_cols; j++ )
|
||||||
|
{
|
||||||
|
for ( k=0; k < (*input_layer)->neurons_count; k++ )
|
||||||
|
{
|
||||||
|
if ( !( som_synapsis_new ( (*input_layer)->neurons[k], (*output_layer)->neurons[i][j], 0.0 )))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} /* ----- end of function som_connect_layers ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Initialize a new SOM neural network
|
||||||
|
* \param input_neurons Number of neurons in the input layer
|
||||||
|
* \param output_neurons_rows Number of rows of neurons in the output layer
|
||||||
|
* \param output_neurons_cols Number of cols of neurons in the output layer
|
||||||
|
* \return The new SOM neural network
|
||||||
|
*/
|
||||||
|
|
||||||
|
som_network_t*
|
||||||
|
som_network_new ( size_t input_neurons, size_t output_neurons_rows, size_t output_neurons_cols )
|
||||||
|
{
|
||||||
|
som_network_t *net = NULL;
|
||||||
|
srand ( time ( NULL ));
|
||||||
|
|
||||||
|
if ( !( net = ( som_network_t* ) malloc ( sizeof ( som_network_t ))))
|
||||||
|
{
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
memset ( net, 0, sizeof ( som_network_t ));
|
||||||
|
|
||||||
|
if ( !( net->input_layer = som_input_layer_new ( input_neurons )))
|
||||||
|
{
|
||||||
|
free ( net );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( !( net->output_layer = som_output_layer_new ( output_neurons_rows, output_neurons_cols )))
|
||||||
|
{
|
||||||
|
free ( net->input_layer );
|
||||||
|
free ( net );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
net->T_learning_param = 0.0;
|
||||||
|
net->serialization_time = ( time_t ) 0;
|
||||||
|
som_connect_layers ( &( net->input_layer ), &( net->output_layer ));
|
||||||
|
return net;
|
||||||
|
} /* ----- end of function som_network_new ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Deallocate an input layer
|
||||||
|
* \param net Network whose input layer should be deallocated
|
||||||
|
*/
|
||||||
|
|
||||||
|
static void
|
||||||
|
som_input_layer_destroy ( som_network_t *net )
|
||||||
|
{
|
||||||
|
size_t i = 0,
|
||||||
|
j = 0,
|
||||||
|
k = 0;
|
||||||
|
|
||||||
|
if ( !( net->input_layer ))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( i=0; i < net->input_layer->neurons_count; i++ )
|
||||||
|
{
|
||||||
|
for ( j=0; j < net->input_layer->neurons[i]->synapses_count; j++ )
|
||||||
|
{
|
||||||
|
if ( (int) j < 0 )
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( net->input_layer->neurons[i]->synapses )
|
||||||
|
{
|
||||||
|
if ( net->input_layer->neurons[i]->synapses[j] )
|
||||||
|
{
|
||||||
|
if ( net->input_layer->neurons[i]->synapses[j]->neuron_out )
|
||||||
|
{
|
||||||
|
/* net->input_layer->neurons[i]->synapses[j]->neuron_out->synapses[k]->neuron_in = NULL; */
|
||||||
|
|
||||||
|
for ( k=0; k < net->input_layer->neurons[i]->synapses[j]->neuron_out->synapses_count; k++ )
|
||||||
|
{
|
||||||
|
if ( net->input_layer->neurons[i]->synapses[j]->neuron_out->synapses[k] )
|
||||||
|
{
|
||||||
|
net->input_layer->neurons[i]->synapses[j]->neuron_out->synapses[k]->neuron_in = NULL;
|
||||||
|
net->input_layer->neurons[i]->synapses[j]->neuron_out->synapses[k] = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( net->input_layer->neurons[i]->synapses[j] );
|
||||||
|
net->input_layer->neurons[i]->synapses[j] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( net->input_layer->neurons[i]->synapses );
|
||||||
|
net->input_layer->neurons[i]->synapses = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
som_neuron_destroy ( net->input_layer->neurons[i] );
|
||||||
|
net->input_layer->neurons[i] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( net->input_layer->neurons );
|
||||||
|
net->input_layer->neurons = NULL;
|
||||||
|
|
||||||
|
free ( net->input_layer );
|
||||||
|
net->input_layer = NULL;
|
||||||
|
} /* ----- end of function som_input_layer_destroy ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Deallocate an output layer
|
||||||
|
* \param net Network whose output layer should be deallocated
|
||||||
|
*/
|
||||||
|
|
||||||
|
static void
|
||||||
|
som_output_layer_destroy ( som_network_t *net )
|
||||||
|
{
|
||||||
|
size_t i = 0,
|
||||||
|
j = 0,
|
||||||
|
k = 0;
|
||||||
|
|
||||||
|
if ( !( net->output_layer ))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( i=0; i < net->output_layer->neurons_rows; i++ )
|
||||||
|
{
|
||||||
|
for ( j=0; j < net->output_layer->neurons_cols; j++ )
|
||||||
|
{
|
||||||
|
for ( k=0; k < net->output_layer->neurons[i][j]->synapses_count; k++ )
|
||||||
|
{
|
||||||
|
if ( net->output_layer->neurons[i][j]->synapses )
|
||||||
|
{
|
||||||
|
if ( net->output_layer->neurons[i][j]->synapses[k] )
|
||||||
|
{
|
||||||
|
free ( net->output_layer->neurons[i][j]->synapses[k] );
|
||||||
|
net->output_layer->neurons[i][j]->synapses[k] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( net->output_layer->neurons[i][j]->synapses );
|
||||||
|
net->output_layer->neurons[i][j]->synapses = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
som_neuron_destroy ( net->output_layer->neurons[i][j] );
|
||||||
|
net->output_layer->neurons[i][j] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( net->output_layer->neurons[i] );
|
||||||
|
net->output_layer->neurons[i] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
free ( net->output_layer->neurons );
|
||||||
|
net->output_layer->neurons = NULL;
|
||||||
|
|
||||||
|
free ( net->output_layer );
|
||||||
|
net->output_layer = NULL;
|
||||||
|
} /* ----- end of function som_output_layer_destroy ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Deallocate a SOM neural network
|
||||||
|
* \param net Network to be deallocated
|
||||||
|
*/
|
||||||
|
|
||||||
|
void
|
||||||
|
som_network_destroy ( som_network_t *net )
|
||||||
|
{
|
||||||
|
if ( !net )
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
som_input_layer_destroy ( net );
|
||||||
|
som_output_layer_destroy ( net );
|
||||||
|
free ( net );
|
||||||
|
net = NULL;
|
||||||
|
} /* ----- end of function som_network_destroy ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Set a vector as input for the network
|
||||||
|
* \param net SOM neural network
|
||||||
|
* \param data Vector to be passed as input for the network
|
||||||
|
*/
|
||||||
|
|
||||||
|
void
|
||||||
|
som_set_inputs ( som_network_t *net, double *data )
|
||||||
|
{
|
||||||
|
size_t i = 0;
|
||||||
|
|
||||||
|
for ( i=0; i < net->input_layer->neurons_count; i++ )
|
||||||
|
{
|
||||||
|
net->input_layer->neurons[i]->input = data[i];
|
||||||
|
}
|
||||||
|
} /* ----- end of function som_set_inputs ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Get the coordinates of the output neuron closest to the current input data
|
||||||
|
* \param net SOM neural network
|
||||||
|
* \param x Reference to the X coordinate of the best output neuron
|
||||||
|
* \param y Reference to the Y coordinate of the best output neuron
|
||||||
|
* \return The value of the module ||X-W|| (squared euclidean distance) for the best neuron
|
||||||
|
*/
|
||||||
|
|
||||||
|
double
|
||||||
|
som_get_best_neuron_coordinates ( som_network_t *net, size_t *x, size_t *y )
|
||||||
|
{
|
||||||
|
size_t i = 0,
|
||||||
|
j = 0,
|
||||||
|
k = 0;
|
||||||
|
|
||||||
|
double mod = 0.0,
|
||||||
|
best_dist = 0.0;
|
||||||
|
|
||||||
|
for ( i=0; i < net->output_layer->neurons_rows; i++ )
|
||||||
|
{
|
||||||
|
for ( j=0; j < net->output_layer->neurons_cols; j++ )
|
||||||
|
{
|
||||||
|
mod = 0.0;
|
||||||
|
|
||||||
|
for ( k=0; k < net->output_layer->neurons[i][j]->synapses_count; k++ )
|
||||||
|
{
|
||||||
|
mod += ( net->input_layer->neurons[k]->input - net->output_layer->neurons[i][j]->synapses[k]->weight ) *
|
||||||
|
( net->input_layer->neurons[k]->input - net->output_layer->neurons[i][j]->synapses[k]->weight );
|
||||||
|
}
|
||||||
|
|
||||||
|
if (( i == 0 && j == 0 ) || ( mod < best_dist ))
|
||||||
|
{
|
||||||
|
best_dist = mod;
|
||||||
|
*x = i;
|
||||||
|
*y = j;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return mod;
|
||||||
|
} /* ----- end of function som_get_best_neuron_coordinates ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Get the n-th approximated step of the analytic continuation of the Lambert W-function of a real number x (see "Numerical Evaluation of the Lambert W Function and Application to Generation of Generalized Gaussian Noise With Exponent 1/2" from Chapeau-Blondeau and Monir, IEEE Transactions on Signal Processing, vol.50, no.9, Sep.2002)
|
||||||
|
* \param x Input variable of which we're going to compute W[-1](x)
|
||||||
|
* \param n Number of steps in the series computation
|
||||||
|
* \return W[-1](x)
|
||||||
|
*/
|
||||||
|
|
||||||
|
static double
|
||||||
|
lambert_W1_function ( double x, int n )
|
||||||
|
{
|
||||||
|
int j = 0,
|
||||||
|
k = 0;
|
||||||
|
|
||||||
|
double *alphas = NULL,
|
||||||
|
*mus = NULL,
|
||||||
|
p = 0.0,
|
||||||
|
res = 0.0;
|
||||||
|
|
||||||
|
if ( !( alphas = (double*) alloca ( (n+1) * sizeof ( double ))))
|
||||||
|
return 0.0;
|
||||||
|
|
||||||
|
if ( !( mus = (double*) alloca ( (n+1) * sizeof ( double ))))
|
||||||
|
return 0.0;
|
||||||
|
|
||||||
|
p = - sqrt ( 2 * ( M_E * x + 1 ));
|
||||||
|
|
||||||
|
for ( k=0; k < n; k++ )
|
||||||
|
{
|
||||||
|
if ( k == 0 )
|
||||||
|
{
|
||||||
|
mus[k] = -1;
|
||||||
|
alphas[k] = 2;
|
||||||
|
} else if ( k == 1 ) {
|
||||||
|
mus[k] = 1;
|
||||||
|
alphas[k] = -1;
|
||||||
|
} else {
|
||||||
|
alphas[k] = 0.0;
|
||||||
|
|
||||||
|
for ( j=2; j < k; j++ )
|
||||||
|
{
|
||||||
|
alphas[k] += mus[j] * mus[k-j+1];
|
||||||
|
}
|
||||||
|
|
||||||
|
mus[k] = ((double) ( k - 1 ) / (double) ( k + 1 )) * ( (mus[k-2] / 2.0) + (alphas[k-2] / 4.0) ) - ( alphas[k] / 2.0 ) - ( mus[k-1] / ((double) k + 1 ));
|
||||||
|
}
|
||||||
|
|
||||||
|
res += ( mus[k] * pow ( p, (double) k ));
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
} /* ----- end of function lambert_W1_function ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Get the learning rate of a step of the learning process in function of the current iteration number
|
||||||
|
* \param net SOM neural network
|
||||||
|
* \param t Iteration number
|
||||||
|
* \param M Maximum value for the learning rate (in [0:1])
|
||||||
|
* \param N Iteration number after which the function equals the "cutoff" value (0.01), i.e. the learning rate becomes almost meaningless
|
||||||
|
* \return Learning rate
|
||||||
|
*/
|
||||||
|
|
||||||
|
static double
|
||||||
|
som_learning_rate ( som_network_t* net, size_t t, double M, size_t N )
|
||||||
|
{
|
||||||
|
double value = 0.0,
|
||||||
|
T = 0.0,
|
||||||
|
K = 0.0,
|
||||||
|
W = 0.0,
|
||||||
|
W_arg = 0.0;
|
||||||
|
|
||||||
|
if ( net->T_learning_param == 0.0 )
|
||||||
|
{
|
||||||
|
K = ( M * (double) N * M_E ) / 0.01;
|
||||||
|
W_arg = -((double) N ) / K;
|
||||||
|
W = lambert_W1_function ( W_arg, 1000 );
|
||||||
|
T = K * exp ( W );
|
||||||
|
net->T_learning_param = T;
|
||||||
|
} else {
|
||||||
|
T = net->T_learning_param;
|
||||||
|
}
|
||||||
|
|
||||||
|
value = M * ( (double) t / T) * exp ( 1 - ( (double) t / T ));
|
||||||
|
return value;
|
||||||
|
} /* ----- end of function som_learning_rate ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Training iteration for the network given a single input data set
|
||||||
|
* \param net SOM neural network
|
||||||
|
* \param data Input data
|
||||||
|
* \param iter Iteration number
|
||||||
|
*/
|
||||||
|
|
||||||
|
static void
|
||||||
|
som_train_iteration ( som_network_t *net, double *data, size_t iter )
|
||||||
|
{
|
||||||
|
size_t x = 0,
|
||||||
|
y = 0,
|
||||||
|
i = 0,
|
||||||
|
j = 0,
|
||||||
|
k = 0,
|
||||||
|
dist = 0;
|
||||||
|
|
||||||
|
double l_rate = 0.0;
|
||||||
|
|
||||||
|
l_rate = som_learning_rate ( net, iter, 0.8, 200 );
|
||||||
|
som_set_inputs ( net, data );
|
||||||
|
som_get_best_neuron_coordinates ( net, &x, &y );
|
||||||
|
|
||||||
|
for ( i=0; i < net->output_layer->neurons_rows; i++ )
|
||||||
|
{
|
||||||
|
for ( j=0; j < net->output_layer->neurons_cols; j++ )
|
||||||
|
{
|
||||||
|
dist = abs ( x-i ) + abs ( y-j );
|
||||||
|
dist = dist * dist * dist * dist;
|
||||||
|
|
||||||
|
for ( k=0; k < net->input_layer->neurons_count; k++ )
|
||||||
|
{
|
||||||
|
net->output_layer->neurons[i][j]->synapses[k]->weight +=
|
||||||
|
(( 1.0 / ((double) dist + 1) ) *
|
||||||
|
l_rate * ( net->input_layer->neurons[k]->input - net->output_layer->neurons[i][j]->synapses[k]->weight ));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} /* ----- end of function som_train_loop ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Initialize the synaptical weights of the network using the algorithm proposed in "Improving the Self-Organization Feature Map Algorithm Using an Efficient Initialization Scheme", by Su, Liu and Chang, on "Tamkang Journal of Science and Engineering", vol.5, no.1, pp.35-48, 2002
|
||||||
|
* \param net SOM neural network
|
||||||
|
* \param data Input data set
|
||||||
|
* \param n_data Number of vectors in the input set
|
||||||
|
*/
|
||||||
|
|
||||||
|
static void
|
||||||
|
som_init_weights ( som_network_t *net, double **data, size_t n_data )
|
||||||
|
{
|
||||||
|
size_t i = 0,
|
||||||
|
j = 0,
|
||||||
|
k = 0,
|
||||||
|
out_rows = 0,
|
||||||
|
out_cols = 0,
|
||||||
|
in_size = 0,
|
||||||
|
max_i = 0,
|
||||||
|
max_j = 0,
|
||||||
|
medium_i = 0,
|
||||||
|
medium_j = 0;
|
||||||
|
|
||||||
|
double dist = 0.0,
|
||||||
|
max_dist = 0.0;
|
||||||
|
|
||||||
|
double *avg_data = NULL;
|
||||||
|
|
||||||
|
if ( !( avg_data = (double*) alloca ( net->input_layer->neurons_count * sizeof ( double ))))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Find the couple of data sets with the maximum distance */
|
||||||
|
for ( i=0; i < n_data; i++ )
|
||||||
|
{
|
||||||
|
for ( j=0; j < n_data; j++ )
|
||||||
|
{
|
||||||
|
if ( i != j )
|
||||||
|
{
|
||||||
|
dist = 0.0;
|
||||||
|
|
||||||
|
for ( k=0; k < net->input_layer->neurons_count; k++ )
|
||||||
|
{
|
||||||
|
dist += fabs ( data[i][k] - data[j][k] );
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( dist > max_dist )
|
||||||
|
{
|
||||||
|
max_dist = dist;
|
||||||
|
max_i = i;
|
||||||
|
max_j = j;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Compute the avg_data vector as the vector containing the average values of (data[max_i], data[max_j]) */
|
||||||
|
for ( i=0; i < net->input_layer->neurons_count; i++ )
|
||||||
|
{
|
||||||
|
avg_data[i] = fabs ( data[max_i][i] + data[max_j][i] ) / 2.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Initialize the upper-right and bottom-left vertex of the output matrix with these values */
|
||||||
|
for ( i=0; i < net->input_layer->neurons_count; i++ )
|
||||||
|
{
|
||||||
|
net->output_layer->neurons[0][ net->output_layer->neurons_cols - 1 ]->synapses[i]->weight = data[max_i][i];
|
||||||
|
net->output_layer->neurons[ net->output_layer->neurons_rows - 1 ][0]->synapses[i]->weight = data[max_j][i];
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Find the vector having the maximum distance from the maximum distance vectors */
|
||||||
|
max_dist = DBL_MAX;
|
||||||
|
|
||||||
|
for ( i=0; i < n_data; i++ )
|
||||||
|
{
|
||||||
|
if ( i != max_i && i != max_j )
|
||||||
|
{
|
||||||
|
dist = 0.0;
|
||||||
|
|
||||||
|
for ( k=0; k < net->input_layer->neurons_count; k++ )
|
||||||
|
{
|
||||||
|
dist += fabs ( data[i][k] - avg_data[i] );
|
||||||
|
|
||||||
|
if ( dist < max_dist )
|
||||||
|
{
|
||||||
|
max_dist = dist;
|
||||||
|
medium_i = i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Initialize the upper-left corner with the values of this vector */
|
||||||
|
for ( i=0; i < net->input_layer->neurons_count; i++ )
|
||||||
|
{
|
||||||
|
net->output_layer->neurons[0][0]->synapses[i]->weight = data[medium_i][i];
|
||||||
|
}
|
||||||
|
|
||||||
|
/* avg_data contains the average values of the 3 vectors computed above */
|
||||||
|
for ( i=0; i < net->input_layer->neurons_count; i++ )
|
||||||
|
{
|
||||||
|
avg_data[i] = fabs ( data[max_i][i] + data[max_j][i] + data[medium_i][i] ) / 3.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Find the vector having the maximum distance from the 3 vectors above */
|
||||||
|
max_dist = DBL_MAX;
|
||||||
|
|
||||||
|
for ( i=0; i < n_data; i++ )
|
||||||
|
{
|
||||||
|
if ( i != max_i && i != max_j && i != medium_i )
|
||||||
|
{
|
||||||
|
dist = 0.0;
|
||||||
|
|
||||||
|
for ( k=0; k < net->input_layer->neurons_count; k++ )
|
||||||
|
{
|
||||||
|
dist += fabs ( data[i][k] - avg_data[i] );
|
||||||
|
|
||||||
|
if ( dist < max_dist )
|
||||||
|
{
|
||||||
|
max_dist = dist;
|
||||||
|
medium_j = i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Initialize the bottom-right corner with the values of this vector */
|
||||||
|
for ( i=0; i < net->input_layer->neurons_count; i++ )
|
||||||
|
{
|
||||||
|
net->output_layer->neurons[ net->output_layer->neurons_rows - 1 ][ net->output_layer->neurons_cols - 1 ]->synapses[i]->weight = data[medium_j][i];
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Initialize the weights on the 4 edges */
|
||||||
|
out_rows = net->output_layer->neurons_rows;
|
||||||
|
out_cols = net->output_layer->neurons_cols;
|
||||||
|
in_size = net->input_layer->neurons_count;
|
||||||
|
|
||||||
|
for ( j=1; j < out_cols - 1; j++ )
|
||||||
|
{
|
||||||
|
for ( k=0; k < in_size; k++ )
|
||||||
|
{
|
||||||
|
net->output_layer->neurons[0][j]->synapses[k]->weight =
|
||||||
|
( ((double) j - 1) / ( out_cols - 1 )) * net->output_layer->neurons[0][ out_cols - 1 ]->synapses[k]->weight +
|
||||||
|
( (double) ( out_cols - j ) / ((double) out_cols - 1 )) * net->output_layer->neurons[0][0]->synapses[k]->weight;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( j=1; j < out_cols - 1; j++ )
|
||||||
|
{
|
||||||
|
for ( k=0; k < in_size; k++ )
|
||||||
|
{
|
||||||
|
net->output_layer->neurons[ out_rows - 1 ][j]->synapses[k]->weight =
|
||||||
|
( ((double) j - 1) / ((double) out_cols - 1 )) * net->output_layer->neurons[ out_rows - 1 ][ out_cols - 1 ]->synapses[k]->weight +
|
||||||
|
( (double) ( out_cols - j ) / ((double) out_cols - 1 )) * net->output_layer->neurons[ out_rows - 1 ][0]->synapses[k]->weight;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( i=1; i < out_rows - 1; i++ )
|
||||||
|
{
|
||||||
|
for ( k=0; k < in_size; k++ )
|
||||||
|
{
|
||||||
|
net->output_layer->neurons[i][0]->synapses[k]->weight =
|
||||||
|
( ((double) i - 1) / ((double) out_rows - 1 )) * net->output_layer->neurons[ out_rows-1 ][0]->synapses[k]->weight +
|
||||||
|
( (double) ( out_rows - i ) / ((double) out_rows - 1 )) * net->output_layer->neurons[0][0]->synapses[k]->weight;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( i=1; i < out_rows - 1; i++ )
|
||||||
|
{
|
||||||
|
for ( k=0; k < in_size; k++ )
|
||||||
|
{
|
||||||
|
net->output_layer->neurons[i][ out_cols - 1 ]->synapses[k]->weight =
|
||||||
|
( ((double) i - 1) / ((double) out_rows - 1 )) * net->output_layer->neurons[ out_rows - 1 ][ out_cols - 1 ]->synapses[k]->weight +
|
||||||
|
( (double) ( out_rows - i ) / ((double) out_rows - 1 )) * net->output_layer->neurons[0][ out_cols - 1 ]->synapses[k]->weight;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Initialize the weights in the middle of the matrix */
|
||||||
|
for ( i=1; i < out_rows - 1; i++ )
|
||||||
|
{
|
||||||
|
for ( j=1; j < out_cols - 1; j++ )
|
||||||
|
{
|
||||||
|
for ( k=0; k < in_size; k++ )
|
||||||
|
{
|
||||||
|
net->output_layer->neurons[i][j]->synapses[k]->weight =
|
||||||
|
( (((double) j - 1)*((double) i - 1)) / (((double) out_rows - 1)*((double) out_cols - 1))) * net->output_layer->neurons[ out_rows - 1 ][ out_cols - 1 ]->synapses[k]->weight +
|
||||||
|
( (((double) j - 1)*(double) (out_rows - i)) / (((double) out_rows - 1)*((double) out_cols - 1))) * net->output_layer->neurons[0][ out_cols - 1 ]->synapses[k]->weight +
|
||||||
|
( ((double) (out_cols - j)*((double) i - 1)) / (((double) out_rows - 1)*((double) out_cols - 1))) * net->output_layer->neurons[ out_rows - 1 ][0]->synapses[k]->weight +
|
||||||
|
( ((double) (out_cols - j)*(double) (out_rows - i)) / (((double) out_rows - 1)*((double) out_cols - 1))) * net->output_layer->neurons[0][0]->synapses[k]->weight;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} /* ----- end of function som_init_weights ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Train the self-organizing map through a data set
|
||||||
|
* \param net SOM neural network
|
||||||
|
* \param data Data set (set of input vectors)
|
||||||
|
* \param n_data Number of input vectors in data
|
||||||
|
* \param iter Number of iterations
|
||||||
|
*/
|
||||||
|
|
||||||
|
void
|
||||||
|
som_train ( som_network_t *net, double **data, size_t n_data, size_t iter )
|
||||||
|
{
|
||||||
|
size_t n = 0,
|
||||||
|
k = 0,
|
||||||
|
x = 0,
|
||||||
|
y = 0;
|
||||||
|
|
||||||
|
som_init_weights ( net, data, n_data );
|
||||||
|
|
||||||
|
for ( n=0; n < n_data; n++ )
|
||||||
|
{
|
||||||
|
for ( k=1; k <= iter; k++ )
|
||||||
|
{
|
||||||
|
som_train_iteration ( net, data[n], k );
|
||||||
|
|
||||||
|
if ( som_get_best_neuron_coordinates ( net, &x, &y ) == 0.0 )
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} /* ----- end of function som_train ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Serialize a neural network on a binary file
|
||||||
|
* \param net SOM network to be serialized
|
||||||
|
* \param fname Output file name
|
||||||
|
*/
|
||||||
|
|
||||||
|
void
|
||||||
|
som_serialize ( som_network_t *net, const char *fname )
|
||||||
|
{
|
||||||
|
FILE *fp = NULL;
|
||||||
|
size_t i = 0,
|
||||||
|
j = 0,
|
||||||
|
k = 0;
|
||||||
|
|
||||||
|
if ( !( fp = fopen ( fname, "w" )))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
net->serialization_time = time ( NULL );
|
||||||
|
fwrite ( &(net->serialization_time), sizeof ( time_t ), 1, fp );
|
||||||
|
fwrite ( &(net->T_learning_param), sizeof ( double ), 1, fp );
|
||||||
|
fwrite ( &(net->input_layer->neurons_count), sizeof ( size_t ), 1, fp );
|
||||||
|
fwrite ( &(net->output_layer->neurons_rows), sizeof ( size_t ), 1, fp );
|
||||||
|
fwrite ( &(net->output_layer->neurons_cols), sizeof ( size_t ), 1, fp );
|
||||||
|
|
||||||
|
for ( i=0; i < net->output_layer->neurons_rows; i++ )
|
||||||
|
{
|
||||||
|
for ( j=0; j < net->output_layer->neurons_cols; j++ )
|
||||||
|
{
|
||||||
|
for ( k=0; k < net->output_layer->neurons[i][j]->synapses_count; k++ )
|
||||||
|
{
|
||||||
|
fwrite ( &(net->output_layer->neurons[i][j]->synapses[k]->weight), sizeof ( double ), 1, fp );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fclose ( fp );
|
||||||
|
} /* ----- end of function som_serialize ----- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \brief Initialize a SOM neural network from a serialized one on a file
|
||||||
|
* \param fname Binary file containing the network
|
||||||
|
* \return The initialized network in case of success, NULL otherwise
|
||||||
|
*/
|
||||||
|
|
||||||
|
som_network_t*
|
||||||
|
som_deserialize ( const char* fname )
|
||||||
|
{
|
||||||
|
som_network_t *net = NULL;
|
||||||
|
FILE *fp = NULL;
|
||||||
|
double weight = 0.0;
|
||||||
|
size_t i = 0,
|
||||||
|
j = 0,
|
||||||
|
k = 0,
|
||||||
|
input_neurons = 0,
|
||||||
|
output_neurons_rows = 0,
|
||||||
|
output_neurons_cols = 0;
|
||||||
|
|
||||||
|
if ( !( fp = fopen ( fname, "r" )))
|
||||||
|
{
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( !( net = ( som_network_t* ) malloc ( sizeof ( som_network_t ))))
|
||||||
|
{
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
memset ( net, 0, sizeof ( som_network_t ));
|
||||||
|
|
||||||
|
fread ( &(net->serialization_time), sizeof ( time_t ), 1, fp );
|
||||||
|
fread ( &(net->T_learning_param ), sizeof ( double ), 1, fp );
|
||||||
|
fread ( &input_neurons, sizeof ( size_t ), 1, fp );
|
||||||
|
fread ( &output_neurons_rows, sizeof ( size_t ), 1, fp );
|
||||||
|
fread ( &output_neurons_cols, sizeof ( size_t ), 1, fp );
|
||||||
|
|
||||||
|
if ( !( net->input_layer = som_input_layer_new ( input_neurons )))
|
||||||
|
{
|
||||||
|
free ( net );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( !( net->output_layer = som_output_layer_new ( output_neurons_rows, output_neurons_cols )))
|
||||||
|
{
|
||||||
|
free ( net->input_layer );
|
||||||
|
free ( net );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( i=0; i < output_neurons_rows; i++ )
|
||||||
|
{
|
||||||
|
for ( j=0; j < output_neurons_cols; j++ )
|
||||||
|
{
|
||||||
|
for ( k=0; k < input_neurons; k++ )
|
||||||
|
{
|
||||||
|
fread ( &weight, sizeof ( double ), 1, fp );
|
||||||
|
|
||||||
|
if ( !( som_synapsis_new ( net->input_layer->neurons[k], net->output_layer->neurons[i][j], weight )))
|
||||||
|
{
|
||||||
|
som_input_layer_destroy ( net );
|
||||||
|
som_output_layer_destroy ( net );
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return net;
|
||||||
|
} /* ----- end of function som_deserialize ----- */
|
||||||
|
|
67
fsom/fsom.h
Normal file
67
fsom/fsom.h
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
/*
|
||||||
|
* =====================================================================================
|
||||||
|
*
|
||||||
|
* Filename: neural_som.h
|
||||||
|
*
|
||||||
|
* Description: Header file for neural_som mini-library
|
||||||
|
*
|
||||||
|
* Version: 0.1
|
||||||
|
* Created: 15/10/2010 15:31:50
|
||||||
|
* Revision: none
|
||||||
|
* Compiler: gcc
|
||||||
|
*
|
||||||
|
* Author: BlackLight (http://0x00.ath.cx), <blacklight@autistici.org>
|
||||||
|
* Licence: GNU GPL v.3
|
||||||
|
* Company: DO WHAT YOU WANT CAUSE A PIRATE IS FREE, YOU ARE A PIRATE!
|
||||||
|
*
|
||||||
|
* =====================================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef __NEURAL_SOM_H
|
||||||
|
#define __NEURAL_SOM_H
|
||||||
|
|
||||||
|
#include <stddef.h>
|
||||||
|
#include <time.h>
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
double output;
|
||||||
|
double input;
|
||||||
|
|
||||||
|
struct som_synapsis_s **synapses;
|
||||||
|
size_t synapses_count;
|
||||||
|
} som_neuron_t;
|
||||||
|
|
||||||
|
typedef struct som_synapsis_s {
|
||||||
|
som_neuron_t *neuron_in;
|
||||||
|
som_neuron_t *neuron_out;
|
||||||
|
double weight;
|
||||||
|
} som_synapsis_t;
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
som_neuron_t **neurons;
|
||||||
|
size_t neurons_count;
|
||||||
|
} som_input_layer_t;
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
som_neuron_t ***neurons;
|
||||||
|
size_t neurons_rows;
|
||||||
|
size_t neurons_cols;
|
||||||
|
} som_output_layer_t;
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
som_input_layer_t *input_layer;
|
||||||
|
som_output_layer_t *output_layer;
|
||||||
|
double T_learning_param;
|
||||||
|
time_t serialization_time;
|
||||||
|
} som_network_t;
|
||||||
|
|
||||||
|
void som_network_destroy ( som_network_t* );
|
||||||
|
void som_set_inputs ( som_network_t*, double* );
|
||||||
|
void som_train ( som_network_t*, double**, size_t, size_t );
|
||||||
|
void som_serialize ( som_network_t*, const char* );
|
||||||
|
double som_get_best_neuron_coordinates ( som_network_t*, size_t*, size_t* );
|
||||||
|
som_network_t* som_deserialize ( const char* fname );
|
||||||
|
som_network_t* som_network_new ( size_t, size_t, size_t );
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
|
@ -220,6 +220,10 @@ window.onload = function() {
|
||||||
var to_gid = json[correlationToIndex].snortGID;
|
var to_gid = json[correlationToIndex].snortGID;
|
||||||
var to_rev = json[correlationToIndex].snortREV;
|
var to_rev = json[correlationToIndex].snortREV;
|
||||||
|
|
||||||
|
// If I'm correlating an alert to itself, STOP!!
|
||||||
|
if ( from_sid == to_sid && from_gid == to_gid && from_rev == to_rev )
|
||||||
|
return;
|
||||||
|
|
||||||
var corr_req = new XMLHttpRequest();
|
var corr_req = new XMLHttpRequest();
|
||||||
corr_req.open ( 'GET', 'http://' + window.location.host +
|
corr_req.open ( 'GET', 'http://' + window.location.host +
|
||||||
'/correlate.cgi?' +
|
'/correlate.cgi?' +
|
||||||
|
|
|
@ -628,7 +628,7 @@ AI_webserv_thread ( void *arg )
|
||||||
{
|
{
|
||||||
int on = 1,
|
int on = 1,
|
||||||
sd,
|
sd,
|
||||||
sockaddr_size;
|
sockaddr_size = sizeof ( struct sockaddr );
|
||||||
|
|
||||||
struct sockaddr_in addr;
|
struct sockaddr_in addr;
|
||||||
pthread_t servlet_thread;
|
pthread_t servlet_thread;
|
||||||
|
|
Loading…
Reference in a new issue