-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathNN.java
More file actions
71 lines (57 loc) · 1.63 KB
/
NN.java
File metadata and controls
71 lines (57 loc) · 1.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import java.util.*;
import java.lang.*;
/*
*
* A( Summation(previous layer output * weight))
* A(v) = 1 / (1 + e^-t)
*
*/
public class NN {
//layer 1
double[][] weights1to2;
double[][] weights2to3;
double[][] weights3to4;
double[] layer1;
double[] layer2;
double[] layer3;
double[] layer4;
public NN(int i, int m1, int m2, int f) {
weights1to2 = new double[i][m1];
weights2to3 = new double[m1][m2];
weights3to4 = new double[m2][f];
layer1 = new double[i];
layer2 = new double[m1];
layer3 = new double[m3];
layer4 = new double[m4];
// random weights
for(int a = 0, a < i; i++) {
for(int j = 0; j < m1; j++) {
weights1to2[a][j] = 10.0 * Math.random();
}
}
// random weights
for(int a = 0, a < m1; i++) {
for(int j = 0; j < m2; j++) {
weights1to2[a][j] = 10.0 * Math.random();
}
}
// random weights
for(int a = 0, a < m2; i++) {
for(int j = 0; j < f; j++) {
weights1to2[a][j] = 10.0 * Math.random();
}
}
}
public int run(double[] input) {
layer1 = input;
for(int i = 0; i < layer2.length; i++) {
layer2[i] = activate(layer1[0]*weights1to2[0][0] + layer1[1]*weights1to2[0][1] + layer1[2]*weights1to2[0][2]);
}
// layer 2
// layer 3
// layer 4
return z;
}
public void train(Data[] train) {
}
}