/*
    main.cpp

    Main driver for adaline network

    Written by Keith Hughes, 21 May, 1999
    CSC 143, North Seattle Community College
*/

#include <fstream.h>
#include <stdlib.h>
#include <time.h>

#include "adaline.h"

// How many samples we'll take to check out what our current
// error rate for the network is.
static int ERROR_RESOLUTION = 1000;

// What we want the overall error rate of the network to be
// before we finish training it.
static double NETWORK_ERROR_TOLERANCE = 0.001;

// When training for a particular problem, we want our error
// rate to be below this value.
static double INDIVIDUAL_ERROR_TOLERANCE = 0.01;

double ErrorRate(AdalineNet &);
void GroupTest(AdalineNet &, char *);
void Train(AdalineNet &, double);

/*
    double f(x, y)

    The function we want to train the network to mimic.

    Doesn't really matter what the function is as long as it is
    linearly separable.
*/

double f(double x, double y) { 
    return ((-5.0 * x - 2.0) / 4.0 >= y) ? 1.0 : -1.0;
}

/*
    int main(void)

    Main driver for the neural simulator.

    Creates a simple two layer Adaline network, train it against
    random samples against the f() function given above, then
    do a final test to see how it is performing.
*/

int
main(void)

{
    // srand((unsigned int) time(NULL));

    // cout << "Creating an AdalineNet an1(3)..." << endl;
    // AdalineNet an1(3);
    // cout << "Write to file \"testfile.txt\" " << endl;
    // ofstream test("testfile.txt);
    // an1.WriteToFile(test);

    ifstream test1("testfile.txt");
    cout << "Creating an empty AdalineNet an..." << endl;
    AdalineNet an;
    cout << "Reading from file \"testfile.txt\" " << endl;
    an.ReadFromFile(test1);
    cout << "Write to file \"testfile2.txt\" " << endl;
    ofstream test2("testfile2.txt");
    an.WriteToFile(test2);

    // Starting and ending times of our training run.
    time_t begin, end;

    cout << "Welcome to the Adaline Network Simulator V1.0\n\n";

    // Tell user what we're doing and open file for output data.
    cout << "Creating network.\n\n";

    // Create the network with two layers, give two nodes in the input
    // layer and 1 in the output layer and connect the neurons together
    AdalineNet n(2);

    // Let's see how the initial network does.
    GroupTest(n, "Initial test");

    // Train the network, timing how long it takes.
    cout << "\n\nTraining" << endl;
    begin = time(NULL);
    Train(n, NETWORK_ERROR_TOLERANCE);
    end = time(NULL);
    cout << "\n\nDone training (" << end - begin << 
            " seconds)" << endl;

    // Now demonstrate that our error has gone down, but isn't
    // exact.
    cout << "\n\n";
    GroupTest(n, "Final test");

    // yeah, it all worked.
    return 0;
}

/*
    double ErrorRate(AdalineNet &n)

    Calculate the current error rate of adaline network <n>.
    We hand the network a series of random problems, count how many
    it gets wrong, and return the percentage wrong.
*/
double
ErrorRate(AdalineNet &n)

{
    // Inputs to the problem
    double x, y;

    // How many wrong answers we've seen.
    int count = 0;

    // Run a series of problems.
    for (int problem = 0; problem < ERROR_RESOLUTION; problem++) {
        // Create the problem values
        x = ((double)rand()/(double)RAND_MAX) * 2.0 - 1.0;
        y = ((double)rand()/(double)RAND_MAX) * 2.0 - 1.0;

        // Make the network calculate an answer
        n.SetInputValue(0, x);
        n.SetInputValue(1, y);
        n.Fire();
        
        // Increase error count if we didn't get it.
        if (f(x,y) != n.GetOutputValue(0))
            count++;
    }

    // Return the percentage wrong.
    return (double)count/(double)ERROR_RESOLUTION;
}

/*
    void GroupTest(AdalineNet &n, char *label)

    Run several error rate tests on the network, printing out the
    error rate of each test. Label each test with <label>.
*/

void
GroupTest(
    AdalineNet &n,
    char *label
)

{

    cout << label << " 1: " << ErrorRate(n) << endl;
    cout << label << " 2: " << ErrorRate(n) << endl;
    cout << label << " 3: " << ErrorRate(n) << endl;
    cout << label << " 4: " << ErrorRate(n) << endl;
    cout << label << " 5: " << ErrorRate(n) << endl;
    cout << label << " 6: " << ErrorRate(n) << endl;
}

/*
    void Train(AdalineNet &n, double errorThreshold)

    Train network <n> by handing it problems until the error
    rate for the network goes below a certain thershold.
*/

void
Train(
    AdalineNet &n,
    double errorThreshold
)

{
    // Output we want from the network from a given set of inputs
    double desiredOutput;

    // The inputs we'll give the network for the function at hand.
    double x, y;

    // Current error in caculating a network output with
    // what we want.
    double error;

    // Create a learning situation. We'll feed test cases with
    // known answers into the system until we get an
    // error rate from the whole network that is liveable.
    while (ErrorRate(n) >= errorThreshold) {
        // Generate two numbers between +1.0.
        x = ((double)rand()/(double)RAND_MAX) * 2.0 - 1.0;
        y = ((double)rand()/(double)RAND_MAX) * 2.0 - 1.0;

        // What our desired output is.
        desiredOutput = f(x,y);

        // These numbers will be our inputs into the network
        n.SetInputValue(0, x);
        n.SetInputValue(1, y);

        // We'll keep feeding these two number to the network and
        // firing it to calculate a result that we would like to be
        // the same as f(x,y).
        // We'll calculate the error between what the network
        // calculates and what f(x,y) actually returns. While
        // the error isn't within the desired error tolerance,
        // we'll force the network to learn.
        do {
            n.Fire();
            error = desiredOutput - n.GetOutputValue(0); 
            // Still not close enough. Force the network to learn
            n.DesiredOutput(desiredOutput);
            n.Learn();
        } while (error >= INDIVIDUAL_ERROR_TOLERANCE);
    }
}
Back	Top