Classification Library (ClassificationLib)


NeuralNetwork
to make classification using Neural Network




FUNCTIONS

NN - NeuralNetwork(double[][] input_sequence, int[] target, double miu, int[] hidden_unit)
NN - NeuralNetwork(double[][] input_sequence, int[] target, double miu, int[] hidden_unit, String[] activation_function)




EXAMPLES

Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0}; // 2 class labels

double miu=0.1;
int[] hidden_unit={5}; //1 hidden layer with 5 hidden units
int epoch=10000;

// Activation function using Sigmoid
NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);
nn.Learning(epoch);
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0037
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>


Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 2}; // 3 class labels

double miu=0.1;
int[] hidden_unit={5}; //1 hidden layer with 5 hidden units
int epoch=10000;

// Activation function using Sigmoid
NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);
nn.Learning(epoch);
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0024
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>


Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={5, 3}; //1st hidden layer with 5 hidden units, 2nd hidden layer with 3 hidden units
int epoch=10000;

// Activation function using Sigmoid
NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);
nn.Learning(epoch);
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0037
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>


Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={5};
int epoch=10000;

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);

//To see MSE Gradient Descent
nn.Learning(epoch, true);
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0037
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>



Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={5};
int epoch=10000;

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);
nn.Learning(epoch);

//To test with the training data
nn.viewTesting();
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.003
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>

0: correct --> [0.9699      0.0293] = 0
1: correct --> [0.028      0.974] = 1
2: correct --> [0.0495      0.9515] = 1
3: correct --> [0.9536      0.0448] = 0


Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={5};
int epoch=10000;

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);
nn.Learning(epoch);

//To save learning parameters in a file with .nn extension
nn.save("my_nn");
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0037
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>

Learning parameters saved succesfully in my_nn.nn


Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={5};

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);

//To load learning parameters from a file with .nn extension
nn.load("my_nn");
nn.viewTesting();
Output Learning parameters loaded succesfully from my_nn.nn

0: correct --> [0.9559      0.0416] = 0
1: correct --> [0.0387      0.9619] = 1
2: correct --> [0.0392      0.961] = 1
3: correct --> [0.9622      0.0395] = 0


Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={5};

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);

//To load learning parameters from a file with .nn extension
nn.load("my_nn");
nn.viewMSE();
Output Learning parameters loaded succesfully from my_nn.nn


Example VectorLib vlib = new VectorLib();
ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={5};

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);

//To load learning parameters from a file with .nn extension
nn.load("my_nn");
double[][][] w=nn.getWeight();
double[][] wi=w[0];
vlib.view("W in input layer", wi);

double[][] wh=w[1];
vlib.view("W in 1st hidden layer", wh);
Output Learning parameters loaded succesfully from my_nn.nn

W in input layer =
0.43825038209949224      2.3797197244449295      6.503495317107739      2.3375066689811144      -0.914394706921757
-0.9704703493723167      -6.193625128133846      -3.8114405983677835      4.701293933770404      1.8533936590108075
1.7071578689229496      -5.507367495088548      -4.701364284885894      -3.9626942802098184      -2.6096717307872583

W in 1st hidden layer =
0.14337971658122545      0.5244080821737196
1.011204678902618      -1.5654061146903337
6.780196378148771      -6.675024541139949
-6.260002785410893      6.066026640923934
3.631455355952464      -3.8321165438650406
-2.265525113190952      2.1406898058511126


Example VectorLib vlib = new VectorLib();
ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={5};
int epoch=10000;

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);
nn.Learning(epoch);

double err=nn.getError();
vlib.view("Error ratio (%)", err);
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0037
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>

Error ratio (%) = 0.0


Example VectorLib vlib = new VectorLib();
ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={5};
int epoch=10000;

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);
nn.Learning(epoch);

double acc=nn.getAccuracy();
vlib.view("Accuracy (%)", acc);
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0037
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>

Accuracy (%) = 100.0


Example VectorLib vlib = new VectorLib();
ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={5};
int epoch=10000;

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit);
nn.Learning(epoch);

double[] testingdata={0,1};

//To get output with testing data
double[] output=nn.Testing(testingdata);
vlib.view("Output", output);

//To get output target with testing data
int outputtarget=nn.getTestingClass(testingdata);
vlib.view("Output target", outputtarget);
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0037
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>

Output =
0.03868347854348802      0.9618823315604247

Output target = 1


aaa
Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={10, 5, 3};
int epoch=10000;

// Activation function (sigmoid, tanh, relu, leakyrelu, swish)
String[] af={"sigmoid"}; // all hidden layers use sigmoid

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit,af);
nn.Learning(epoch);
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0015
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>


Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={10, 5, 3};
int epoch=10000;

// Activation function (sigmoid, tanh, relu, leakyrelu, swish)
String[] af={"sigmoid", "swish"}; // First and middle hidden layer use sigmoid, the last uses swish

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit, af);
nn.Learning(epoch);
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0015
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>


Example ClassificationLib clib = new ClassificationLib();

double[][] input_sequence = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
int[] target = {0, 1, 1, 0};

double miu=0.1;
int[] hidden_unit={10, 5, 3};
int epoch=10000;

// Activation function (sigmoid, tanh, relu, leakyrelu, swish)
String[] af={"sigmoid", "tanh", "swish"}; // First hidden layer uses sigmoid, the middle uses tanh, the last uses swish

NN nn = clib.NeuralNetwork(input_sequence, target, miu, hidden_unit, af);
nn.Learning(epoch);
Output Learning process is running....
-------------------------------------------------- 10000 epoch
..................................................
Iteration for learning: 10000 epoch
Final MSE: 0.0015
Accuracy: 100.0%
Error ratio: 0.0%
<Learning process done>