Skip to content

Commit 301467a

Browse files
committed
Made some code independant by adding another dataset
1 parent 44a0cd2 commit 301467a

File tree

7 files changed

+33
-99
lines changed

7 files changed

+33
-99
lines changed

.gitignore

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,5 @@
1-
/.DS_Store
1+
/.DS_Store
2+
/.mnist_train.csv
3+
/.mnist_train.csv
4+
/.emnist-letters-test.csv
5+
/.emnist-letters-train.csv

Main.java

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,22 +3,24 @@
33
public class Main {
44
public static void main(String[] args) {
55
parameter.setOutputFile("output.txt",true);
6-
parameter.setLayerArray(784, 32, 16, 16, 10);
7-
parameter.setTrainingFileReader("mnist_train.csv", "mnist");
8-
parameter.setTestingFileReader("mnist_test.csv", "mnist");
6+
parameter.setLayerArray(784, 32, 16, 16, 26);
7+
parameter.setTrainingFileReader("emnist-letters-train.csv", "mnist");
8+
parameter.setTestingFileReader("emnist-letters-test.csv", "mnist");
9+
910
parameter.setLearningRate(1);
1011
parameter.setBiasLearningRate(1);
1112
parameter.setEpsillion(0);
1213
parameter.setBatchsize(10);
1314
parameter.setRectificationFunction("sigmoid");
15+
1416
Trainer myTrainer = new Trainer();
15-
myTrainer.train(50,10);
17+
myTrainer.train(88800,1);
1618
NeuronObserver myNeuronObserver = new NeuronObserver();
1719
myNeuronObserver.setModel(myTrainer.getLayerManager());
1820
myTrainer.test(9990);
1921

2022
myNeuronObserver.addNeuronToBeObserved(1, 31);
21-
myTrainer.printConfusionMatrix();
23+
//myTrainer.printConfusionMatrix();
2224
myTrainer.test(2);
2325

2426
myNeuronObserver.clear();

ann4j/MNISTDataBaseFileReader.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ public void next() {
3535
inputArray = generateInputFromBigArray(array);
3636

3737
} catch (IOException ex) {
38+
ex.printStackTrace();
3839
}
3940
}
4041

@@ -68,7 +69,7 @@ public ArrayList<Double> generateExpectedOutputArrayFromLabel() {
6869
// this needs to be overridden for changing the dataset
6970
// responsible for generating the output neurons.
7071
ArrayList<Double> expectedOutputArray = new ArrayList<Double>();
71-
for (int i = 0; i < 10; i++) {
72+
for (int i = 0; i < parameter.numberOfOutputNeurons; i++) {
7273
if (i == label) {
7374
// Works only for 0-9 digits, 0 output neuron corresponds to 0, 1st to 1 etc.
7475
// But you can change this and jumble the order as required. This may be one of

ann4j/ModelEvaluator.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ public class ModelEvaluator {
1313

1414
public void updateConfusionMatrix(ArrayList<Double> expectedOutputArray, int predictedNeuronNum) {
1515
ArrayList<Double> temp = new ArrayList<Double>();
16-
for (int i = 0; i <= expectedOutputArray.size(); i++) {
16+
for (int i = 0; i <= parameter.numberOfOutputNeurons; i++) {
1717
if (i == predictedNeuronNum) {
1818
temp.add(i, 1.0);
1919
} else {
@@ -42,10 +42,10 @@ public void updateConfusionMatrix(ArrayList<Double> expectedOutputArray, ArrayLi
4242
// This is checking if the confusion matrix has been initialized, if it has not
4343
// been
4444
// initialized, it initializes it.
45-
if (confusionMatrix.size() != expectedOutputArray.size()) {// arraylist has not been initialized
46-
initializeList(expectedOutputArray.size());
45+
if (confusionMatrix.size() != parameter.numberOfOutputNeurons) {// arraylist has not been initialized
46+
initializeList(parameter.numberOfOutputNeurons);
4747
}
48-
for (int i = 0; i < expectedOutputArray.size(); i++) {
48+
for (int i = 0; i < parameter.numberOfOutputNeurons; i++) {
4949

5050
// This is updating the confusion matrix.
5151
if (expectedOutputArray.get(i) == 1.0 & actualOutputArray.get(i) == 1.0) { // True positives

ann4j/Trainer.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ public LayerManager getLayerManager() {
3030
public void train(int noOfSamples, int epochs) {
3131
for(int j = 0; j < epochs; j++){
3232

33-
parameter.setTrainingFileReader("mnist_train.csv", "mnist");
3433
this.trainingFileReader = parameter.getTrainingFileReader();
3534
for (int i = 0; i < noOfSamples; i++) {
3635

ann4j/parameter.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@ public class parameter {
1515
private static int batchsize;
1616
private static double biasLearningRate;
1717
private static double epsillion;
18+
// Used to store the number of input and output neurons.
19+
public static int numberOfOutputNeurons;
20+
public static int numberOfInputNeurons;
1821

1922
public static double getBiasLearningRate() {
2023
return biasLearningRate;
@@ -51,6 +54,7 @@ public static void setTestingFileReader(String fileName, String type) {
5154
// The layer array must be initialized before this always
5255
}
5356
} catch (FileNotFoundException ex) {
57+
ex.printStackTrace();
5458
}
5559
}
5660

@@ -64,6 +68,7 @@ public static void setTrainingFileReader(String fileName, String type) {
6468
parameter.trainingFileReader = new MNISTDataBaseFileReader(fileName, layerArray[layerArray.length - 1]);
6569
}
6670
} catch (FileNotFoundException ex) {
71+
ex.printStackTrace();
6772
}
6873
}
6974

@@ -97,6 +102,8 @@ public static double rectify(double numToBeRectified) {
97102
}
98103

99104
public static void setLayerArray(int... LayerArray) {
105+
parameter.numberOfInputNeurons = LayerArray[0];
106+
parameter.numberOfOutputNeurons = LayerArray[LayerArray.length-1];
100107
parameter.layerArray = LayerArray;
101108
}
102109

output.txt

Lines changed: 8 additions & 87 deletions
Original file line numberDiff line numberDiff line change
@@ -1,95 +1,16 @@
1-
Training accuracy in epoch 0 is 10.0
2-
Training accuracy in epoch 1 is 10.0
3-
Training accuracy in epoch 2 is 12.0
4-
Training accuracy in epoch 3 is 10.0
5-
Training accuracy in epoch 4 is 10.0
6-
Training accuracy in epoch 5 is 10.0
7-
Training accuracy in epoch 6 is 10.0
8-
Training accuracy in epoch 7 is 12.0
9-
Training accuracy in epoch 8 is 12.0
10-
Training accuracy in epoch 9 is 12.0
11-
Testing accuracy 10.13013013013013
12-
Label:0.0
13-
True Positive :0.0
14-
True Negative :9011.0
15-
False Positive :0.0
16-
False Negative :979.0
17-
18-
19-
Label:1.0
20-
True Positive :0.0
21-
True Negative :8856.0
22-
False Positive :0.0
23-
False Negative :1134.0
24-
25-
26-
Label:2.0
27-
True Positive :0.0
28-
True Negative :8959.0
29-
False Positive :0.0
30-
False Negative :1031.0
31-
32-
33-
Label:3.0
34-
True Positive :5.0
35-
True Negative :8896.0
36-
False Positive :85.0
37-
False Negative :1004.0
38-
39-
40-
Label:4.0
41-
True Positive :0.0
42-
True Negative :9009.0
43-
False Positive :0.0
44-
False Negative :981.0
45-
46-
47-
Label:5.0
48-
True Positive :0.0
49-
True Negative :9099.0
50-
False Positive :0.0
51-
False Negative :891.0
52-
53-
54-
Label:6.0
55-
True Positive :0.0
56-
True Negative :9033.0
57-
False Positive :0.0
58-
False Negative :957.0
59-
60-
61-
Label:7.0
62-
True Positive :0.0
63-
True Negative :8963.0
64-
False Positive :0.0
65-
False Negative :1027.0
66-
67-
68-
Label:8.0
69-
True Positive :0.0
70-
True Negative :9017.0
71-
False Positive :0.0
72-
False Negative :973.0
73-
74-
75-
Label:9.0
76-
True Positive :1007.0
77-
True Negative :89.0
78-
False Positive :8893.0
79-
False Negative :1.0
80-
81-
1+
Training accuracy in epoch 0 is 10.66891891891892
2+
Testing accuracy 28.92892892892893
823
The neuron 31 in layer 1 has been updated by forward propagation
83-
Neuron #31 has activation 0.44070545272313627
4+
Neuron #31 has activation 6.567825572210979E-4
845

856
The neuron 31 in layer 1 has been updated by forward propagation
86-
Neuron #31 has activation 0.805525963558943
7+
Neuron #31 has activation 0.003181628304117291
878

88-
Testing accuracy 10.128102481985588
9+
Testing accuracy 28.93314651721377
8910
The neuron 0 in layer 2 has been updated by forward propagation
90-
Neuron #0 has activation 0.7362380271929501
11+
Neuron #0 has activation 0.25373727956231534
9112

9213
The neuron 0 in layer 2 has been updated by forward propagation
93-
Neuron #0 has activation 0.6814481765071231
14+
Neuron #0 has activation 0.7220061457959416
9415

95-
Testing accuracy 10.136081648989393
16+
Testing accuracy 28.94736842105263

0 commit comments

Comments
 (0)