SourceForge.net Logo

Subsections


B. Source Code

B..1 The library


B..1.1 fann.h

indentation
/$ \ast$ This file defines the user interface to the fann library.
It is included from fixedfann.h, floatfann.h and doublefann.h and should
NOT be included directly.
$ \ast$/

#include "fann_data.h"
#include "fann_internal.h"

#ifdef __cplusplus
extern "C" {
#endif


/$ \ast$ --- Initialisation and configuration --- $ \ast$/

/$ \ast$ Constructs a backpropagation neural network, from an connection rate,
a learning rate, the number of layers and the number of neurons in each
of the layers.

The connection rate controls how many connections there will be in the
network. If the connection rate is set to 1, the network will be fully
connected, but if it is set to 0.5 only half of the connections will be set.

There will be a bias neuron in each layer (except the output layer),
and this bias neuron will be connected to all neurons in the next layer.
When running the network, the bias nodes always emits 1
$ \ast$/
struct fann $ \ast$ fann_create(float connection_rate, float learning_rate,
/$ \ast$ the number of layers, including the input and output layer $ \ast$/
unsigned int num_layers,
/$ \ast$ the number of neurons in each of the layers, starting with
the input layer and ending with the output layer $ \ast$/
$ \ldots$);

/$ \ast$ Constructs a backpropagation neural network from a configuration file.
$ \ast$/
struct fann $ \ast$ fann_create_from_file(const char $ \ast$configuration_file);

/$ \ast$ Destructs the entire network.
Be sure to call this function after finished using the network.
$ \ast$/
void fann_destroy(struct fann $ \ast$ann);

/$ \ast$ Save the entire network to a configuration file.
$ \ast$/
void fann_save(struct fann $ \ast$ann, const char $ \ast$configuration_file);

/$ \ast$ Saves the entire network to a configuration file.
But it is saved in fixed point format no matter which
format it is currently in.

This is usefull for training a network in floating points,
and then later executing it in fixed point.

The function returns the bit position of the fix point, which
can be used to find out how accurate the fixed point network will be.
A high value indicates high precision, and a low value indicates low
precision.

A negative value indicates very low precision, and a very
strong possibility for overflow.
(the actual fix point will be set to 0, since a negative
fix point does not make sence).

Generally, a fix point lower than 6 is bad, and should be avoided.
The best way to avoid this, is to have less connections to each neuron,
or just less neurons in each layer.

The fixed point use of this network is only intended for use on machines that
have no floating point processor, like an iPAQ. On normal computers the floating
point version is actually faster.
$ \ast$/
int fann_save_to_fixed(struct fann $ \ast$ann, const char $ \ast$configuration_file);

/$ \ast$ --- Some stuff to set options on the network on the fly. --- $ \ast$/

/$ \ast$ Set the learning rate.
$ \ast$/
void fann_set_learning_rate(struct fann $ \ast$ann, float learning_rate);

/$ \ast$ The possible activation functions.
Threshold can not be used, when training the network.
$ \ast$/
#define FANN_SIGMOID 1
#define FANN_THRESHOLD 2

/$ \ast$ Set the activation function for the hidden layers (default SIGMOID).
$ \ast$/
void fann_set_activation_function_hidden(struct fann $ \ast$ann, unsigned int activation_function);

/$ \ast$ Set the activation function for the output layer (default SIGMOID).
$ \ast$/
void fann_set_activation_function_output(struct fann $ \ast$ann, unsigned int activation_function);

/$ \ast$ Set the steepness of the sigmoid function used in the hidden layers.
Only usefull if sigmoid function is used in the hidden layers (default 0.5).
$ \ast$/
void fann_set_activation_hidden_steepness(struct fann $ \ast$ann, fann_type steepness);

/$ \ast$ Set the steepness of the sigmoid function used in the output layer.
Only usefull if sigmoid function is used in the output layer (default 0.5).
$ \ast$/
void fann_set_activation_output_steepness(struct fann $ \ast$ann, fann_type steepness);

/$ \ast$ --- Some stuff to read network options from the network. --- $ \ast$/

/$ \ast$ Get the learning rate.
$ \ast$/
float fann_get_learning_rate(struct fann $ \ast$ann);

/$ \ast$ Get the number of input neurons.
$ \ast$/
unsigned int fann_get_num_input(struct fann $ \ast$ann);

/$ \ast$ Get the number of output neurons.
$ \ast$/
unsigned int fann_get_num_output(struct fann $ \ast$ann);

/$ \ast$ Get the activation function used in the hidden layers.
$ \ast$/
unsigned int fann_get_activation_function_hidden(struct fann $ \ast$ann);

/$ \ast$ Get the activation function used in the output layer.
$ \ast$/
unsigned int fann_get_activation_function_output(struct fann $ \ast$ann);

/$ \ast$ Get the steepness parameter for the sigmoid function used in the hidden layers.
$ \ast$/
fann_type fann_get_activation_hidden_steepness(struct fann $ \ast$ann);

/$ \ast$ Get the steepness parameter for the sigmoid function used in the output layer.
$ \ast$/
fann_type fann_get_activation_output_steepness(struct fann $ \ast$ann);

/$ \ast$ Get the total number of neurons in the entire network.
$ \ast$/
unsigned int fann_get_total_neurons(struct fann $ \ast$ann);

/$ \ast$ Get the total number of connections in the entire network.
$ \ast$/
unsigned int fann_get_total_connections(struct fann $ \ast$ann);

/$ \ast$ Randomize weights (from the beginning the weights are random between -0.1 and 0.1)
$ \ast$/
void fann_randomize_weights(struct fann $ \ast$ann, fann_type min_weight, fann_type max_weight);

/$ \ast$ --- Training --- $ \ast$/

#ifndef FIXEDFANN
/$ \ast$ Train one iteration with a set of inputs, and a set of desired outputs.
$ \ast$/
void fann_train(struct fann $ \ast$ann, fann_type $ \ast$input, fann_type $ \ast$desired_output);
#endif

/$ \ast$ Test with a set of inputs, and a set of desired outputs.
This operation updates the mean square error, but does not
change the network in any way.
$ \ast$/
fann_type $ \ast$fann_test(struct fann $ \ast$ann, fann_type $ \ast$input, fann_type $ \ast$desired_output);

/$ \ast$ Reads a file that stores training data, in the format:
num_train_data num_input num_output $ \backslash$n
inputdata seperated by space $ \backslash$n
outputdata seperated by space $ \backslash$n

.
.
.

inputdata seperated by space $ \backslash$n
outputdata seperated by space $ \backslash$n
$ \ast$/
struct fann_train_data$ \ast$ fann_read_train_from_file(char $ \ast$filename);

/$ \ast$ Destructs the training data
Be sure to call this function after finished using the training data.
$ \ast$/
void fann_destroy_train(struct fann_train_data$ \ast$ train_data);

#ifndef FIXEDFANN
/$ \ast$ Trains on an entire dataset, for a maximum of max_epochs
epochs or until mean square error is lower than desired_error.
Reports about the progress is given every
epochs_between_reports epochs.
If epochs_between_reports is zero, no reports are given.
$ \ast$/
void fann_train_on_data(struct fann $ \ast$ann, struct fann_train_data $ \ast$data, unsigned int max_epochs, unsigned int epochs_between_reports, float desired_error);

/$ \ast$ Does the same as train_on_data, but reads the data directly from a file.
$ \ast$/
void fann_train_on_file(struct fann $ \ast$ann, char $ \ast$filename, unsigned int max_epochs, unsigned int epochs_between_reports, float desired_error);
#endif

/$ \ast$ Save the training structure to a file.
$ \ast$/
void fann_save_train(struct fann_train_data$ \ast$ data, char $ \ast$filename);

/$ \ast$ Saves the training structure to a fixed point data file.
$ \ast$ (Very usefull for testing the quality of a fixed point network).
$ \ast$/
void fann_save_train_to_fixed(struct fann_train_data$ \ast$ data, char $ \ast$filename, unsigned int decimal_point);

/$ \ast$ Reads the mean square error from the network.
$ \ast$/
float fann_get_error(struct fann $ \ast$ann);

/$ \ast$ Resets the mean square error from the network.
$ \ast$/
void fann_reset_error(struct fann $ \ast$ann);

/$ \ast$ --- Running --- $ \ast$/

/$ \ast$ Runs a input through the network, and returns the output.
$ \ast$/
fann_type$ \ast$ fann_run(struct fann $ \ast$ann, fann_type $ \ast$input);

#ifdef FIXEDFANN

/$ \ast$ returns the position of the decimal point.
$ \ast$/
unsigned int fann_get_decimal_point(struct fann $ \ast$ann);

/$ \ast$ returns the multiplier that fix point data is multiplied with.
$ \ast$/
unsigned int fann_get_multiplier(struct fann $ \ast$ann);
#endif

#ifdef __cplusplus
}
#endif


B..1.2 fann_data.h

indentation
#ifndef __fann_data_h__
#define __fann_data_h__

/$ \ast$ --- Data structures ---
$ \ast$ No data within these structures should be altered directly by the user.
$ \ast$/

struct fann_neuron
{
fann_type $ \ast$weights;
struct fann_neuron $ \ast$$ \ast$connected_neurons;
unsigned int num_connections;
fann_type value;
}__attribute__((packed));

/$ \ast$ A single layer in the neural network.
$ \ast$/
struct fann_layer
{
/$ \ast$ A pointer to the first neuron in the layer
$ \ast$ When allocated, all the neurons in all the layers are actually
$ \ast$ in one long array, this is because we wan't to easily clear all
$ \ast$ the neurons at once.
$ \ast$/
struct fann_neuron $ \ast$first_neuron;

/$ \ast$ A pointer to the neuron past the last neuron in the layer $ \ast$/
/$ \ast$ the number of neurons is last_neuron - first_neuron $ \ast$/
struct fann_neuron $ \ast$last_neuron;
};

/$ \ast$ The fast artificial neural network(fann) structure
$ \ast$/
struct fann
{
/$ \ast$ the learning rate of the network $ \ast$/
float learning_rate;

/$ \ast$ the connection rate of the network
$ \ast$ between 0 and 1, 1 meaning fully connected
$ \ast$/
float connection_rate;

/$ \ast$ pointer to the first layer (input layer) in an array af all the layers,
$ \ast$ including the input and outputlayers
$ \ast$/
struct fann_layer $ \ast$first_layer;

/$ \ast$ pointer to the layer past the last layer in an array af all the layers,
$ \ast$ including the input and outputlayers
$ \ast$/
struct fann_layer $ \ast$last_layer;

/$ \ast$ Total number of neurons.
$ \ast$ very usefull, because the actual neurons are allocated in one long array
$ \ast$/
unsigned int total_neurons;

/$ \ast$ Number of input neurons (not calculating bias) $ \ast$/
unsigned int num_input;

/$ \ast$ Number of output neurons (not calculating bias) $ \ast$/
unsigned int num_output;

/$ \ast$ Used to contain the error deltas used during training
$ \ast$ Is allocated during first training session,
$ \ast$ which means that if we do not train, it is never allocated.
$ \ast$/
fann_type $ \ast$train_deltas;

/$ \ast$ Used to choose which activation function to use

Sometimes it can be smart, to set the activation function for the hidden neurons
to THRESHOLD and the activation function for the output neurons to SIGMOID,
in this way you get a very fast network, that is still cabable of
producing real valued output.
$ \ast$/
unsigned int activation_function_hidden, activation_function_output;

/$ \ast$ Parameters for the activation function $ \ast$/
fann_type activation_hidden_steepness;
fann_type activation_output_steepness;

#ifdef FIXEDFANN
/$ \ast$ the decimal_point, used for shifting the fix point
in fixed point integer operatons.
$ \ast$/
unsigned int decimal_point;

/$ \ast$ the multiplier, used for multiplying the fix point
in fixed point integer operatons.
Only used in special cases, since the decimal_point is much faster.
$ \ast$/
unsigned int multiplier;

/$ \ast$ When in fixed point, the sigmoid function is calculated as a stepwise linear
function. In the activation_results array, the result is saved, and in the two values arrays,
the values that gives the results are saved.
$ \ast$/
fann_type activation_results[6];
fann_type activation_hidden_values[6];
fann_type activation_output_values[6];

#endif

/$ \ast$ Total number of connections.
$ \ast$ very usefull, because the actual connections
$ \ast$ are allocated in one long array
$ \ast$/
unsigned int total_connections;

/$ \ast$ used to store outputs in $ \ast$/
fann_type $ \ast$output;

/$ \ast$ the number of data used to calculate the error.
$ \ast$/
unsigned int num_errors;

/$ \ast$ the total error value.
the real mean square error is error_value/num_errors
$ \ast$/
float error_value;
};

/$ \ast$ Structure used to store data, for use with training. $ \ast$/
struct fann_train_data
{
unsigned int num_data;
unsigned int num_input;
unsigned int num_output;
fann_type $ \ast$$ \ast$input;
fann_type $ \ast$$ \ast$output;
};

#endif


B..1.3 floatfann.h

indentation
#ifndef __floatfann_h__
#define __floatfann_h__

typedef float fann_type;
#define FLOATFANN
#define FANNPRINTF "%.20e"
#define FANNSCANF "%f"

#include "fann.h"

#endif


B..1.4 doublefann.h

indentation
#ifndef __doublefann_h__
#define __doublefann_h__

typedef double fann_type;
#define DOUBLEFANN
#define FANNPRINTF "%.20e"
#define FANNSCANF "%le"

#include "fann.h"

#endif


B..1.5 fixedfann.h

indentation
#ifndef __fixedfann_h__
#define __fixedfann_h__

typedef int fann_type;
#define FIXEDFANN
#define FANNPRINTF "%d"
#define FANNSCANF "%d"

#include "fann.h"

#endif


B..1.6 fann_internal.h

indentation
#ifndef __fann_internal_h__
#define __fann_internal_h__
/$ \ast$ internal include file, not to be included directly
$ \ast$/

#include $ <$math.h$ >$
#include "fann_data.h"

#define FANN_FIX_VERSION "FANN_FIX_0.1"
#define FANN_FLO_VERSION "FANN_FLO_0.1"

#ifdef FIXEDFANN
#define FANN_VERSION FANN_FIX_VERSION
#else
#define FANN_VERSION FANN_FLO_VERSION
#endif

struct fann $ \ast$ fann_allocate_structure(float learning_rate, unsigned int num_layers);
void fann_allocate_neurons(struct fann $ \ast$ann);

void fann_allocate_connections(struct fann $ \ast$ann);

int fann_save_internal(struct fann $ \ast$ann, const char $ \ast$configuration_file, unsigned int save_as_fixed);
void fann_save_train_internal(struct fann_train_data$ \ast$ data, char $ \ast$filename, unsigned int save_as_fixed, unsigned int decimal_point);

int fann_compare_connections(const void$ \ast$ c1, const void$ \ast$ c2);
void fann_seed_rand();

/$ \ast$ called fann_max, in order to not interferre with predefined versions of max $ \ast$/
#define fann_max(x, y) (((x) $ >$ (y)) ? (x) : (y))
#define fann_min(x, y) (((x) $ <$ (y)) ? (x) : (y))

#define fann_rand(min_value, max_value) (((double)(min_value))+(((double)(max_value)-((double)(min_value)))$ \ast$rand()$ /$(RAND_MAX+1.0)))

#define fann_abs(value) (((value) $ >$ 0) ? (value) : -(value))

#ifdef FIXEDFANN

#define fann_mult(x,y) ((x$ \ast$y) $ \gg$ decimal_point)
#define fann_div(x,y) (((x) $ \ll$ decimal_point)$ /$y)
#define fann_random_weight() (fann_type)(fann_rand(-multiplier$ /$10,multiplier$ /$10))
/$ \ast$ sigmoid calculated with use of floats, only as reference $ \ast$/
#define fann_sigmoid(steepness, value) ((fann_type)(0.5+((1.0$ /$(1.0 + exp(-2.0 $ \ast$ ((float)steepness$ /$multiplier) $ \ast$ ((float)value$ /$multiplier))))$ \ast$multiplier)))
/$ \ast$ sigmoid as a stepwise linear function $ \ast$/
#define fann_linear(v1, r1, v2, r2, value) ((((r2-r1) $ \ast$ (value-v1))$ /$(v2-v1)) + r1)
#define fann_sigmoid_stepwise(v1, v2, v3, v4, v5, v6, r1, r2, r3, r4, r5, r6, value, multiplier) (value $ <$ v5 ? (value $ <$ v3 ? (value $ <$ v2 ? (value $ <$ v1 ? 0 : fann_linear(v1, r1, v2, r2, value)) : fann_linear(v2, r2, v3, r3, value)) : (value $ <$ v4 ? fann_linear(v3, r3, v4, r4, value) : fann_linear(v4, r4, v5, r5, value))) : (value $ <$ v6 ? fann_linear(v5, r5, v6, r6, value) : multiplier))
#else

#define fann_mult(x,y) (x$ \ast$y)
#define fann_div(x,y) (x$ /$y)
#define fann_random_weight() (fann_rand(-0.1,0.1))
#define fann_sigmoid(steepness, value) (1.0$ /$(1.0 + exp(-2.0 $ \ast$ steepness $ \ast$ value)))
#define fann_sigmoid_derive(steepness, value) (2.0 $ \ast$ steepness $ \ast$ value $ \ast$ (1.0 - value))

#endif

#endif

B..2 Test programs


B..2.1 xor_train.c

indentation
#include $ <$stdio.h$ >$

/$ \ast$ In this file I do not need to include floatfann or doublefann,
because it is included in the makefile. Normaly you would need
to do a #include "floatfann.h".
$ \ast$/

int main()
{
fann_type $ \ast$calc_out;
const float connection_rate = 1;
const float learning_rate = 0.7;
const unsigned int num_input = 2;
const unsigned int num_output = 1;
const unsigned int num_layers = 3;
const unsigned int num_neurons_hidden = 4;
const float desired_error = 0.0001;
const unsigned int max_iterations = 500000;
const unsigned int iterations_between_reports = 1000;
struct fann $ \ast$ann;
struct fann_train_data $ \ast$data;

unsigned int i = 0;
unsigned int decimal_point;

printf("Creating network. $ \backslash$n");

ann = fann_create(connection_rate, learning_rate, num_layers,
num_input,
num_neurons_hidden,
num_output);

printf("Training network. $ \backslash$n");

data = fann_read_train_from_file("xor.data");
fann_train_on_data(ann, data, max_iterations, iterations_between_reports, desired_error);

printf("Testing network. $ \backslash$n");

for(i = 0; i $ <$ data $ \rightarrow$num_data; i++){
calc_out = fann_run(ann, data $ \rightarrow$input[i]);
printf("XOR test (%f,%f) -$ >$ %f, should be %f, difference$ =$%f $ \backslash$n",
data $ \rightarrow$input[i][0], data $ \rightarrow$input[i][1], $ \ast$calc_out, data $ \rightarrow$output[i][0], fann_abs($ \ast$calc_out - data $ \rightarrow$output[i][0]));
}

printf("Saving network. $ \backslash$n");

fann_save(ann, "xor_float.net");

decimal_point = fann_save_to_fixed(ann, "xor_fixed.net");
fann_save_train_to_fixed(data, "xor_fixed.data", decimal_point);

printf("Cleaning up. $ \backslash$n");
fann_destroy_train(data);
fann_destroy(ann);

return 0;
}


B..2.2 xor_test.c

indentation
#include $ <$time.h$ >$
#include $ <$sys/time.h$ >$
#include $ <$stdio.h$ >$

/$ \ast$ In this file I do not need to include fixedfann, floatfann or doublefann,
because it is included in the makefile. Normaly you would need
to do a #include "floatfann.h" or #include "fixedfann.h".
$ \ast$/

int main()
{
fann_type $ \ast$calc_out;
unsigned int i;
int ret = 0;

struct fann $ \ast$ann;
struct fann_train_data $ \ast$data;

printf("Creating network. $ \backslash$n");

#ifdef FIXEDFANN
ann = fann_create_from_file("xor_fixed.net");
#else
ann = fann_create_from_file("xor_float.net");
#endif

if(!ann){
printf("Error creating ann -- ABORTING. $ \backslash$n");
return 0;
}

printf("Testing network. $ \backslash$n");

#ifdef FIXEDFANN
data = fann_read_train_from_file("xor_fixed.data");
#else
data = fann_read_train_from_file("xor.data");
#endif

for(i = 0; i $ <$ data $ \rightarrow$num_data; i++){
fann_reset_error(ann);
calc_out = fann_test(ann, data $ \rightarrow$input[i], data $ \rightarrow$output[i]);
#ifdef FIXEDFANN
printf("XOR test (%d, %d) -$ >$ %d, should be %d, difference$ =$%f $ \backslash$n",
data $ \rightarrow$input[i][0], data $ \rightarrow$input[i][1], $ \ast$calc_out, data $ \rightarrow$output[i][0], (float)fann_abs($ \ast$calc_out - data $ \rightarrow$output[i][0])$ /$fann_get_multiplier(ann));

if((float)fann_abs($ \ast$calc_out - data $ \rightarrow$output[i][0])$ /$fann_get_multiplier(ann) $ >$ 0.1){
printf("Test failed $ \backslash$n");
ret = -1;
}
#else
printf("XOR test (%f, %f) -$ >$ %f, should be %f, difference$ =$%f $ \backslash$n",
data $ \rightarrow$input[i][0], data $ \rightarrow$input[i][1], $ \ast$calc_out, data $ \rightarrow$output[i][0], (float)fann_abs($ \ast$calc_out - data $ \rightarrow$output[i][0]));
#endif
}

printf("Cleaning up. $ \backslash$n");
fann_destroy(ann);

return ret;
}


B..2.3 steepness_train.c

indentation
/$ \ast$ In this file I do not need to include floatfann or doublefann,
because it is included in the makefile. Normaly you would need
to do a #include "floatfann.h".
$ \ast$/

#include $ <$stdio.h$ >$

void train_on_steepness_file(struct fann $ \ast$ann, char $ \ast$filename,
unsigned int max_epochs, unsigned int epochs_between_reports,
float desired_error, float steepness_start,
float steepness_step, float steepness_end)
{
float error;
unsigned int i, j;

struct fann_train_data $ \ast$data = fann_read_train_from_file(filename);
if(epochs_between_reports){
printf("Max epochs %8d. Desired error: %.10f $ \backslash$n",
max_epochs, desired_error);
}

fann_set_activation_hidden_steepness(ann, steepness_start);
fann_set_activation_output_steepness(ann, steepness_start);
for(i = 1; i $ \leq$ max_epochs; i++){
/$ \ast$ train $ \ast$/
fann_reset_error(ann);

for(j = 0; j $ \neq$ data $ \rightarrow$num_data; j++){
fann_train(ann, data $ \rightarrow$input[j], data $ \rightarrow$output[j]);
}

error = fann_get_error(ann);

/$ \ast$ print current output $ \ast$/
if(epochs_between_reports &&
(i % epochs_between_reports == 0
$ \mid\mid$ i == max_epochs
$ \mid\mid$ i == 1
$ \mid\mid$ error $ <$ desired_error)){
printf("Epochs %8d. Current error: %.10f $ \backslash$n", i, error);
}

if(error $ <$ desired_error){
steepness_start += steepness_step;
if(steepness_start $ \leq$ steepness_end){
printf("Steepness: %f $ \backslash$n", steepness_start);
fann_set_activation_hidden_steepness(ann, steepness_start);
fann_set_activation_output_steepness(ann, steepness_start);
}else{
break;
}
}
}
fann_destroy_train(data);
}

int main()
{
const float connection_rate = 1;
const float learning_rate = 0.7;
const unsigned int num_input = 2;
const unsigned int num_output = 1;
const unsigned int num_layers = 3;
const unsigned int num_neurons_hidden = 4;
const float desired_error = 0.0001;
const unsigned int max_iterations = 500000;
const unsigned int iterations_between_reports = 1000;
unsigned int i;
fann_type $ \ast$calc_out;

struct fann_train_data $ \ast$data;

struct fann $ \ast$ann = fann_create(connection_rate,
learning_rate, num_layers,
num_input, num_neurons_hidden, num_output);

data = fann_read_train_from_file("xor.data");

train_on_steepness_file(ann, "xor.data", max_iterations,
iterations_between_reports, desired_error, 0.5, 0.1, 20.0);

fann_set_activation_function_hidden(ann, FANN_THRESHOLD);
fann_set_activation_function_output(ann, FANN_THRESHOLD);

for(i = 0; i $ \neq$ data $ \rightarrow$num_data; i++){
calc_out = fann_run(ann, data $ \rightarrow$input[i]);
printf("XOR test (%f, %f) -$ >$ %f, should be %f, difference$ =$%f $ \backslash$n",
data $ \rightarrow$input[i][0], data $ \rightarrow$input[i][1], $ \ast$calc_out, data $ \rightarrow$output[i][0],
(float)fann_abs($ \ast$calc_out - data $ \rightarrow$output[i][0]));
}


fann_save(ann, "xor_float.net");

fann_destroy(ann);
fann_destroy_train(data);

return 0;
}

B..3 Benchmark programs


B..3.1 quality.cc

indentation
#include $ <$stdio.h$ >$
#include $ <$stdlib.h$ >$
#include $ <$string.h$ >$

#include "nets/backprop.h"
#include "ctimer.h"
#include "lwneuralnet.h"

unsigned int num_errors = 0;
double error_value = 0;

void clear_error()
{
num_errors = 0;
error_value = 0;
}

void update_error(fann_type $ \ast$output, fann_type $ \ast$desired_output, unsigned int num_output)
{
unsigned int i = 0;
/$ \ast$ calculate the error $ \ast$/
for(i = 0; i $ <$ num_output; i++){
error_value += (desired_output[i] - output[i]) $ \ast$ (desired_output[i] - output[i]);
}
num_errors++;
}

double mean_error()
{
return error_value$ /$(double)num_errors;
}


void quality_benchmark_jneural(
struct fann_train_data $ \ast$train_data,
struct fann_train_data $ \ast$test_data,
FILE $ \ast$train_out, FILE $ \ast$test_out,
unsigned int num_input, unsigned int num_neurons_hidden1,
unsigned int num_neurons_hidden2, unsigned int num_output,
unsigned int seconds_of_training, unsigned int seconds_between_reports)
{
float train_error, test_error;
unsigned int i;
unsigned int epochs = 0;
double elapsed = 0;
double total_elapsed = 0;
fann_type $ \ast$output;
struct backprop $ \ast$ann;

if(num_neurons_hidden2){
ann = new backprop(0.7, num_input, num_output, 2, num_neurons_hidden1, num_neurons_hidden2);
}else{
ann = new backprop(0.7, num_input, num_output, 1, num_neurons_hidden1);
}

calibrate_timer();

while(total_elapsed $ <$ (double)seconds_of_training){
/$ \ast$ train $ \ast$/
elapsed = 0;
start_timer();
while(elapsed $ <$ (double)seconds_between_reports){
for(i = 0; i $ \neq$ train_data $ \rightarrow$num_data; i++){
ann $ \rightarrow$set_input(train_data $ \rightarrow$input[i]);
ann $ \rightarrow$train_on(train_data $ \rightarrow$output[i]);
}

elapsed = time_elapsed();
epochs++;
}
stop_timer();
total_elapsed += getSecs();

/$ \ast$ make report $ \ast$/
clear_error();
for(i = 0; i $ \neq$ train_data $ \rightarrow$num_data; i++){
ann $ \rightarrow$set_input(train_data $ \rightarrow$input[i]);
output = ann $ \rightarrow$query_output();
update_error(output, train_data $ \rightarrow$output[i], train_data $ \rightarrow$num_output);
}
train_error = mean_error();

clear_error();
for(i = 0; i $ \neq$ test_data $ \rightarrow$num_data; i++){
ann $ \rightarrow$set_input(test_data $ \rightarrow$input[i]);
output = ann $ \rightarrow$query_output();
update_error(output, test_data $ \rightarrow$output[i], test_data $ \rightarrow$num_output);
}
test_error = mean_error();

fprintf(train_out, "%f %.20e %d $ \backslash$n", total_elapsed, train_error, epochs);
fprintf(test_out, "%f %.20e %d $ \backslash$n", total_elapsed, test_error, epochs);
fprintf(stderr, ".");
}

delete ann;
}

void quality_benchmark_fann(float connection_rate,
char $ \ast$filename,
struct fann_train_data $ \ast$train_data,
struct fann_train_data $ \ast$test_data,
FILE $ \ast$train_out, FILE $ \ast$test_out,
unsigned int num_input, unsigned int num_neurons_hidden1,
unsigned int num_neurons_hidden2, unsigned int num_output,
unsigned int seconds_of_training, unsigned int seconds_between_reports)
{
float train_error, test_error;
unsigned int i, decimal_point;
unsigned int epochs = 0;
double elapsed = 0;
double total_elapsed = 0;
fann_type $ \ast$output;
struct fann $ \ast$ann;
char fixed_point_file[256];

if(num_neurons_hidden2){
ann = fann_create(connection_rate, 0.7, 4,
num_input, num_neurons_hidden1, num_neurons_hidden2, num_output);
}else{
ann = fann_create(connection_rate, 0.7, 3,
num_input, num_neurons_hidden1, num_output);
}

calibrate_timer();

while(total_elapsed $ <$ (double)seconds_of_training){
/$ \ast$ train $ \ast$/
elapsed = 0;
start_timer();
while(elapsed $ <$ (double)seconds_between_reports){
for(i = 0; i $ \neq$ train_data $ \rightarrow$num_data; i++){
fann_train(ann, train_data $ \rightarrow$input[i], train_data $ \rightarrow$output[i]);
}

elapsed = time_elapsed();
epochs++;
}
stop_timer();
total_elapsed += getSecs();

/$ \ast$ make report $ \ast$/
clear_error();
for(i = 0; i $ \neq$ train_data $ \rightarrow$num_data; i++){
output = fann_run(ann, train_data $ \rightarrow$input[i]);
update_error(output, train_data $ \rightarrow$output[i], train_data $ \rightarrow$num_output);
}
train_error = mean_error();

clear_error();
for(i = 0; i $ \neq$ test_data $ \rightarrow$num_data; i++){
output = fann_run(ann, test_data $ \rightarrow$input[i]);
update_error(output, test_data $ \rightarrow$output[i], test_data $ \rightarrow$num_output);
}
test_error = mean_error();

fprintf(train_out, "%f %.20e %d $ \backslash$n", total_elapsed, train_error, epochs);
fprintf(test_out, "%f %.20e %d $ \backslash$n", total_elapsed, test_error, epochs);
fprintf(stderr, ".");

/$ \ast$ Save the data as fixed point, to allow for drawing of
a fixed point graph $ \ast$/
if(connection_rate == 1){
/$ \ast$ buffer overflow could occur here $ \ast$/
sprintf(fixed_point_file, "%05d_%f_%s_fixed", epochs, total_elapsed, filename);
decimal_point = fann_save_to_fixed(ann, fixed_point_file);

sprintf(fixed_point_file, "%s_fixed_train_%d", filename, decimal_point);
fann_save_train_to_fixed(train_data, fixed_point_file, decimal_point);

sprintf(fixed_point_file, "%s_fixed_test_%d", filename, decimal_point);
fann_save_train_to_fixed(test_data, fixed_point_file, decimal_point);
}
}

fann_destroy(ann);
}

void quality_benchmark_lwnn(
struct fann_train_data $ \ast$train_data,
struct fann_train_data $ \ast$test_data,
FILE $ \ast$train_out, FILE $ \ast$test_out,
unsigned int num_input, unsigned int num_neurons_hidden1,
unsigned int num_neurons_hidden2, unsigned int num_output,
unsigned int seconds_of_training, unsigned int seconds_between_reports)
{
float train_error = 0;
float test_error = 0;
unsigned int i;
unsigned int epochs = 0;
double elapsed = 0;
double total_elapsed = 0;
fann_type $ \ast$output;
network_t $ \ast$ann;

if(num_neurons_hidden2){
ann = net_allocate (4, num_input, num_neurons_hidden1, num_neurons_hidden2, num_output);
}else{
ann = net_allocate (3, num_input, num_neurons_hidden1, num_output);
}

net_set_learning_rate(ann, 0.7);

calibrate_timer();

output = (fann_type $ \ast$)calloc(num_output, sizeof(fann_type));

while(total_elapsed $ <$ (double)seconds_of_training){
/$ \ast$ train $ \ast$/
elapsed = 0;
start_timer();
while(elapsed $ <$ (double)seconds_between_reports){
for(i = 0; i $ \neq$ train_data $ \rightarrow$num_data; i++){
/$ \ast$ compute the outputs for inputs(i) $ \ast$/
net_compute (ann, train_data $ \rightarrow$input[i], output);

/$ \ast$ find the error with respect to targets(i) $ \ast$/
net_compute_output_error (ann, train_data $ \rightarrow$output[i]);

/$ \ast$ train the network one step $ \ast$/
net_train (ann);
}

elapsed = time_elapsed();
epochs++;
}
stop_timer();
total_elapsed += getSecs();

/$ \ast$ make report $ \ast$/

clear_error();
for(i = 0; i $ \neq$ train_data $ \rightarrow$num_data; i++){
net_compute (ann, train_data $ \rightarrow$input[i], output);
update_error(output, train_data $ \rightarrow$output[i], train_data $ \rightarrow$num_output);
}
train_error = mean_error();

clear_error();
for(i = 0; i $ \neq$ test_data $ \rightarrow$num_data; i++){
net_compute (ann, test_data $ \rightarrow$input[i], output);
update_error(output, test_data $ \rightarrow$output[i], test_data $ \rightarrow$num_output);
}
test_error = mean_error();


fprintf(train_out, "%f %.20e %d $ \backslash$n", total_elapsed, train_error, epochs);
fprintf(test_out, "%f %.20e %d $ \backslash$n", total_elapsed, test_error, epochs);
fprintf(stderr, ".");
}

net_free(ann);
}

int main(int argc, char$ \ast$ argv[])
{
/$ \ast$ parameters $ \ast$/
unsigned int num_neurons_hidden1;
unsigned int num_neurons_hidden2;
unsigned int seconds_of_training;
unsigned int seconds_between_reports;

struct fann_train_data $ \ast$train_data, $ \ast$test_data;
FILE $ \ast$train_out, $ \ast$test_out;

if(argc $ \neq$ 10){
printf("usage %s net train_file test_file train_file_out test_file_out num_hidden1 num_hidden2 seconds_of_training seconds_between_reports $ \backslash$n", argv[0]);
return -1;
}

num_neurons_hidden1 = atoi(argv[6]);
num_neurons_hidden2 = atoi(argv[7]);
seconds_of_training = atoi(argv[8]);
seconds_between_reports = atoi(argv[9]);

train_data = fann_read_train_from_file(argv[2]);
test_data = fann_read_train_from_file(argv[3]);

if(strlen(argv[4]) == 1 && argv[4][0] == '-'){
train_out = stdout;
}else{
train_out = fopen(argv[4], "w");
}

if(strlen(argv[5]) == 1 && argv[5][0] == '-'){
test_out = stdout;
}else{
test_out = fopen(argv[5], "w");
}

fprintf(stderr, "Quality test of %s %s ", argv[1], argv[2]);

if(strcmp(argv[1], "lwnn") == 0){
quality_benchmark_lwnn(train_data, test_data,
train_out, test_out,
train_data $ \rightarrow$num_input, num_neurons_hidden1,
num_neurons_hidden2, train_data $ \rightarrow$num_output,
seconds_of_training, seconds_between_reports);
}else if(strcmp(argv[1], "fann") == 0){
quality_benchmark_fann(1, argv[4], train_data, test_data,
train_out, test_out,
train_data $ \rightarrow$num_input, num_neurons_hidden1,
num_neurons_hidden2, train_data $ \rightarrow$num_output,
seconds_of_training, seconds_between_reports);
}else if(strcmp(argv[1], "fann_half") == 0){
quality_benchmark_fann(0.75, NULL, train_data, test_data,
train_out, test_out,
train_data $ \rightarrow$num_input, num_neurons_hidden1,
num_neurons_hidden2, train_data $ \rightarrow$num_output,
seconds_of_training, seconds_between_reports);
}else if(strcmp(argv[1], "jneural") == 0){
quality_benchmark_jneural(train_data, test_data,
train_out, test_out,
train_data $ \rightarrow$num_input, num_neurons_hidden1,
num_neurons_hidden2, train_data $ \rightarrow$num_output,
seconds_of_training, seconds_between_reports);
}

fprintf(stderr, " $ \backslash$n");

fann_destroy_train(train_data);
fann_destroy_train(test_data);

return 0;
}


B..3.2 quality_fixed.c

indentation
#include $ <$stdio.h$ >$
#include "fixedfann.h"

int main(int argc, char$ \ast$ argv[])
{
struct fann_train_data $ \ast$train_data, $ \ast$test_data;
FILE $ \ast$train_out, $ \ast$test_out;
struct fann $ \ast$ann;
float train_error, test_error;
unsigned int i, j;
unsigned int epochs = 0;
double total_elapsed = 0;
char file[256];

if(argc $ <$ 6){
printf("usage %s train_file test_file train_file_out test_file_out fixed_conf_files $ \backslash$n", argv[0]);
return -1;
}

if(strlen(argv[3]) == 1 && argv[3][0] == '-'){
train_out = stdout;
}else{
train_out = fopen(argv[3], "w");
}

if(strlen(argv[4]) == 1 && argv[4][0] == '-'){
test_out = stdout;
}else{
test_out = fopen(argv[4], "w");
}

for(j = 5; j $ <$ argc; j++){
ann = fann_create_from_file(argv[j]);

sprintf(file, "%s_%d", argv[1], fann_get_decimal_point(ann));
train_data = fann_read_train_from_file(file);

sprintf(file, "%s_%d", argv[2], fann_get_decimal_point(ann));
test_data = fann_read_train_from_file(file);

fann_reset_error(ann);
for(i = 0; i $ \neq$ train_data $ \rightarrow$num_data; i++){
fann_test(ann, train_data $ \rightarrow$input[i], train_data $ \rightarrow$output[i]);
}
train_error = fann_get_error(ann);

fann_reset_error(ann);
for(i = 0; i $ \neq$ test_data $ \rightarrow$num_data; i++){
fann_test(ann, test_data $ \rightarrow$input[i], test_data $ \rightarrow$output[i]);
}
test_error = fann_get_error(ann);

sscanf(argv[j], "%d_%lf", &epochs, &total_elapsed);
fprintf(train_out, "%f %.20e %d $ \backslash$n", total_elapsed, train_error, epochs);
fprintf(test_out, "%f %.20e %d $ \backslash$n", total_elapsed, test_error, epochs);
fprintf(stderr, ".");

fann_destroy(ann);
}

return 0;
}


B..3.3 performance.cc

indentation
#include $ <$stdio.h$ >$
#include $ <$stdlib.h$ >$
#include $ <$string.h$ >$
#include "ctimer.h"

#ifndef FIXEDFANN

#include "nets/backprop.h"
#include "lwneuralnet.h"

void performance_benchmark_jneural(FILE $ \ast$out, fann_type $ \ast$input,
unsigned int num_neurons, unsigned int seconds_per_test)
{
unsigned int i, total_connections;
fann_type $ \ast$output;

backprop $ \ast$ann = new backprop(0.7, num_neurons, num_neurons, 2, num_neurons, num_neurons);

total_connections = (num_neurons+1) $ \ast$ num_neurons $ \ast$ 3;

start_timer();

for(i = 0; time_elapsed() $ <$ (double)seconds_per_test; i++){
ann $ \rightarrow$set_input(input);
output = ann $ \rightarrow$query_output();
}

stop_timer();

fprintf(out, "%d %.20e $ \backslash$n", num_neurons, getNanoPerN(i)$ /$total_connections);
fprintf(stderr, "%d ", num_neurons);

delete ann;
}

void performance_benchmark_lwnn(FILE $ \ast$out, fann_type $ \ast$input,
unsigned int num_neurons, unsigned int seconds_per_test)
{
unsigned int i, total_connections;
fann_type $ \ast$output;

output = (fann_type$ \ast$)calloc(num_neurons, sizeof(fann_type));
network_t $ \ast$ann = net_allocate (4, num_neurons, num_neurons, num_neurons, num_neurons);

total_connections = num_neurons $ \ast$ num_neurons $ \ast$ 3;

start_timer();

for(i = 0; time_elapsed() $ <$ (double)seconds_per_test; i++){
net_compute (ann, input, output);
}

stop_timer();

fprintf(out, "%d %.20e $ \backslash$n", num_neurons, getNanoPerN(i)$ /$total_connections);
fprintf(stderr, "%d ", num_neurons);

net_free(ann);
free(output);
}

void performance_benchmark_fann_noopt(FILE $ \ast$out, fann_type $ \ast$input,
unsigned int num_neurons, unsigned int seconds_per_test)
{
unsigned int i, total_connections;
fann_type $ \ast$output;

struct fann $ \ast$ann = fann_create(1, 0.7, 4,
num_neurons, num_neurons, num_neurons, num_neurons);

//just to fool the optimizer into thinking that the network is not fully connected
ann $ \rightarrow$connection_rate = 0.9;

total_connections = (num_neurons+1) $ \ast$ num_neurons $ \ast$ 3;

start_timer();

for(i = 0; time_elapsed() $ <$ (double)seconds_per_test; i++){
output = fann_run(ann, input);
}

stop_timer();

fprintf(out, "%d %.20e $ \backslash$n", num_neurons, getNanoPerN(i)$ /$total_connections);
fprintf(stderr, "%d ", num_neurons);
fann_destroy(ann);
}

void performance_benchmark_fann_thres(FILE $ \ast$out, fann_type $ \ast$input,
unsigned int num_neurons, unsigned int seconds_per_test)
{
unsigned int i, total_connections;
fann_type $ \ast$output;

struct fann $ \ast$ann = fann_create(1, 0.7, 4,
num_neurons, num_neurons, num_neurons, num_neurons);

fann_set_activation_function_hidden(ann, FANN_THRESHOLD);
fann_set_activation_function_output(ann, FANN_THRESHOLD);

total_connections = (num_neurons+1) $ \ast$ num_neurons $ \ast$ 3;

start_timer();

for(i = 0; time_elapsed() $ <$ (double)seconds_per_test; i++){
output = fann_run(ann, input);
}

stop_timer();

fprintf(out, "%d %.20e $ \backslash$n", num_neurons, getNanoPerN(i)$ /$total_connections);
fprintf(stderr, "%d ", num_neurons);
fann_destroy(ann);
}

#endif

void performance_benchmark_fann(FILE $ \ast$out, fann_type $ \ast$input,
unsigned int num_neurons, unsigned int seconds_per_test)
{
unsigned int i, total_connections;
fann_type $ \ast$output;

struct fann $ \ast$ann = fann_create(1, 0.7, 4,
num_neurons, num_neurons, num_neurons, num_neurons);

total_connections = (num_neurons+1) $ \ast$ num_neurons $ \ast$ 3;

start_timer();

for(i = 0; time_elapsed() $ <$ (double)seconds_per_test; i++){
output = fann_run(ann, input);
}

stop_timer();

fprintf(out, "%d %.20e $ \backslash$n", num_neurons, getNanoPerN(i)$ /$total_connections);
fprintf(stderr, "%d ", num_neurons);
fann_destroy(ann);
}

int main(int argc, char$ \ast$ argv[])
{
/$ \ast$ parameters $ \ast$/
unsigned int num_neurons_first;
unsigned int num_neurons_last;
double multiplier;
unsigned int seconds_per_test;
FILE $ \ast$out;

fann_type $ \ast$input;
unsigned int num_neurons, i;

if(argc $ \neq$ 7){
printf("usage %s net file_out num_neurons_first num_neurons_last multiplier seconds_per_test $ \backslash$n", argv[0]);
return -1;
}

calibrate_timer();

num_neurons_first = atoi(argv[3]);
num_neurons_last = atoi(argv[4]);
multiplier = atof(argv[5]);
seconds_per_test = atoi(argv[6]);

if(strlen(argv[2]) == 1 && argv[2][0] == '-'){
out = stdout;
}else{
out = fopen(argv[2], "w");
}

fprintf(stderr, "Performance test of %s %s ", argv[1], argv[2]);

input = (fann_type$ \ast$)calloc(num_neurons_last, sizeof(fann_type));
for(i = 0; i $ <$ num_neurons_last; i++){
input[i] = fann_random_weight(); //fill input with random variables
}

for(num_neurons = num_neurons_first;
num_neurons $ \leq$ num_neurons_last; num_neurons = (int)(num_neurons $ \ast$ multiplier)){

#ifndef FIXEDFANN
if(strcmp(argv[1], "lwnn") == 0){
performance_benchmark_lwnn(out, input,
num_neurons, seconds_per_test);
}else if(strcmp(argv[1], "fann") == 0){
#endif
performance_benchmark_fann(out, input,
num_neurons, seconds_per_test);
#ifndef FIXEDFANN
}else if(strcmp(argv[1], "fann_noopt") == 0){
performance_benchmark_fann_noopt(out, input,
num_neurons, seconds_per_test);
}else if(strcmp(argv[1], "fann_thres") == 0){
performance_benchmark_fann_thres(out, input,
num_neurons, seconds_per_test);
}else if(strcmp(argv[1], "jneural") == 0){
performance_benchmark_jneural(out, input,
num_neurons, seconds_per_test);
}
#endif

}

fprintf(stderr, " $ \backslash$n");
free(input);

return 0;
}


B..3.4 benchmark.sh

indentation
#!$ /$bin$ /$sh
test$ /$performance fann fann_performance.out 1 2048 2 20
test$ /$performance fann_noopt fann_noopt_performance.out 1 2048 2 20
test$ /$performance fann_thres fann_thres_performance.out 1 2048 2 20
test$ /$performance_fixed fann fann_fixed_performance.out 1 2048 2 20
test$ /$performance lwnn lwnn_performance.out 1 2048 2 20
test$ /$performance jneural jneural_performance.out 1 512 2 20

#.$ /$performance_arm fann fann_performance_arm.out 1 512 2 20
#.$ /$performance_arm fann_noopt fann_noopt_performance_arm.out 1 512 2 20
#.$ /$performance_arm fann_thres fann_thres_performance_arm.out 1 512 2 20
#.$ /$performance_fixed_arm fann fann_fixed_performance_arm.out 1 512 2 20
#.$ /$performance_arm lwnn lwnn_performance_arm.out 1 512 2 20
#.$ /$performance_arm jneural jneural_performance_arm.out 1 512 2 20

rm -f $ \ast$_fixed
test$ /$quality fann datasets$ /$mydata$ /$building.train datasets$ /$mydata$ /$building.test building_fann_train.out building_fann_test.out 16 0 200 1
test$ /$quality_fixed building_fann_train.out_fixed_train building_fann_train.out_fixed_test building_fann_fixed_train.out building_fann_fixed_test.out $ \ast$_fixed
test$ /$quality fann_half datasets$ /$mydata$ /$building.train datasets$ /$mydata$ /$building.test building_fann_half_train.out building_fann_half_test.out 16 0 200 1
test$ /$quality lwnn datasets$ /$mydata$ /$building.train datasets$ /$mydata$ /$building.test building_lwnn_train.out building_lwnn_test.out 16 0 200 1
test$ /$quality jneural datasets$ /$mydata$ /$building.train datasets$ /$mydata$ /$building.test building_jneural_train.out building_jneural_test.out 16 0 200 1

rm -f $ \ast$_fixed
test$ /$quality fann datasets$ /$mydata$ /$card.train datasets$ /$mydata$ /$card.test card_fann_train.out card_fann_test.out 32 0 200 1
test$ /$quality_fixed card_fann_train.out_fixed_train card_fann_train.out_fixed_test card_fann_fixed_train.out card_fann_fixed_test.out $ \ast$_fixed
test$ /$quality fann_half datasets$ /$mydata$ /$card.train datasets$ /$mydata$ /$card.test card_fann_half_train.out card_fann_half_test.out 32 0 200 1
test$ /$quality lwnn datasets$ /$mydata$ /$card.train datasets$ /$mydata$ /$card.test card_lwnn_train.out card_lwnn_test.out 32 0 200 1
test$ /$quality jneural datasets$ /$mydata$ /$card.train datasets$ /$mydata$ /$card.test card_jneural_train.out card_jneural_test.out 32 0 200 1

rm -f $ \ast$_fixed
test$ /$quality fann datasets$ /$mydata$ /$gene.train datasets$ /$mydata$ /$gene.test gene_fann_train.out gene_fann_test.out 4 2 200 1
test$ /$quality_fixed gene_fann_train.out_fixed_train gene_fann_train.out_fixed_test gene_fann_fixed_train.out gene_fann_fixed_test.out $ \ast$_fixed
test$ /$quality fann_half datasets$ /$mydata$ /$gene.train datasets$ /$mydata$ /$gene.test gene_fann_half_train.out gene_fann_half_test.out 4 2 200 1
test$ /$quality lwnn datasets$ /$mydata$ /$gene.train datasets$ /$mydata$ /$gene.test gene_lwnn_train.out gene_lwnn_test.out 4 2 200 1
test$ /$quality jneural datasets$ /$mydata$ /$gene.train datasets$ /$mydata$ /$gene.test gene_jneural_train.out gene_jneural_test.out 4 2 200 1

rm -f $ \ast$_fixed
test$ /$quality fann datasets$ /$mydata$ /$mushroom.train datasets$ /$mydata$ /$mushroom.test mushroom_fann_train.out mushroom_fann_test.out 32 0 200 1
test$ /$quality_fixed mushroom_fann_train.out_fixed_train mushroom_fann_train.out_fixed_test mushroom_fann_fixed_train.out mushroom_fann_fixed_test.out $ \ast$_fixed
test$ /$quality fann_half datasets$ /$mydata$ /$mushroom.train datasets$ /$mydata$ /$mushroom.test mushroom_fann_half_train.out mushroom_fann_half_test.out 32 0 200 1
test$ /$quality lwnn datasets$ /$mydata$ /$mushroom.train datasets$ /$mydata$ /$mushroom.test mushroom_lwnn_train.out mushroom_lwnn_test.out 32 0 200 1
test$ /$quality jneural datasets$ /$mydata$ /$mushroom.train datasets$ /$mydata$ /$mushroom.test mushroom_jneural_train.out mushroom_jneural_test.out 32 0 200 1

rm -f $ \ast$_fixed
test$ /$quality fann datasets$ /$mydata$ /$soybean.train datasets$ /$mydata$ /$soybean.test soybean_fann_train.out soybean_fann_test.out 16 8 200 1
test$ /$quality_fixed soybean_fann_train.out_fixed_train soybean_fann_train.out_fixed_test soybean_fann_fixed_train.out soybean_fann_fixed_test.out $ \ast$_fixed
test$ /$quality fann_half datasets$ /$mydata$ /$soybean.train datasets$ /$mydata$ /$soybean.test soybean_fann_half_train.out soybean_fann_half_test.out 16 8 200 1
test$ /$quality lwnn datasets$ /$mydata$ /$soybean.train datasets$ /$mydata$ /$soybean.test soybean_lwnn_train.out soybean_lwnn_test.out 16 8 200 1
test$ /$quality jneural datasets$ /$mydata$ /$soybean.train datasets$ /$mydata$ /$soybean.test soybean_jneural_train.out soybean_jneural_test.out 16 8 200 1

rm -f $ \ast$_fixed
test$ /$quality fann datasets$ /$mydata$ /$thyroid.train datasets$ /$mydata$ /$thyroid.test thyroid_fann_train.out thyroid_fann_test.out 16 8 200 1
test$ /$quality_fixed thyroid_fann_train.out_fixed_train thyroid_fann_train.out_fixed_test thyroid_fann_fixed_train.out thyroid_fann_fixed_test.out $ \ast$_fixed
test$ /$quality fann_half datasets$ /$mydata$ /$thyroid.train datasets$ /$mydata$ /$thyroid.test thyroid_fann_half_train.out thyroid_fann_half_test.out 16 8 200 1
test$ /$quality lwnn datasets$ /$mydata$ /$thyroid.train datasets$ /$mydata$ /$thyroid.test thyroid_lwnn_train.out thyroid_lwnn_test.out 16 8 200 1
test$ /$quality jneural datasets$ /$mydata$ /$thyroid.train datasets$ /$mydata$ /$thyroid.test thyroid_jneural_train.out thyroid_jneural_test.out 16 8 200 1

Steffen Nissen 2003-11-07