This example uses linear discrimination with equal prior probabilities on Fisher's (1936) iris data. This example illustrates the use of the DiscriminantAnalysis class.
import java.text.*;
import com.imsl.stat.*;
import com.imsl.math.PrintMatrix;
public class DiscriminantAnalysisEx1 {
public static void main(String args[]) throws Exception {
double[][] xorig = {
{1.0, 5.1, 3.5, 1.4, .2},
{1.0, 4.9, 3.0, 1.4, .2},
{1.0, 4.7, 3.2, 1.3, .2},
{1.0, 4.6, 3.1, 1.5, .2},
{1.0, 5.0, 3.6, 1.4, .2},
{1.0, 5.4, 3.9, 1.7, .4},
{1.0, 4.6, 3.4, 1.4, .3},
{1.0, 5.0, 3.4, 1.5, .2},
{1.0, 4.4, 2.9, 1.4, .2},
{1.0, 4.9, 3.1, 1.5, .1},
{1.0, 5.4, 3.7, 1.5, .2},
{1.0, 4.8, 3.4, 1.6, .2},
{1.0, 4.8, 3.0, 1.4, .1},
{1.0, 4.3, 3.0, 1.1, .1},
{1.0, 5.8, 4.0, 1.2, .2},
{1.0, 5.7, 4.4, 1.5, .4},
{1.0, 5.4, 3.9, 1.3, .4},
{1.0, 5.1, 3.5, 1.4, .3},
{1.0, 5.7, 3.8, 1.7, .3},
{1.0, 5.1, 3.8, 1.5, .3},
{1.0, 5.4, 3.4, 1.7, .2},
{1.0, 5.1, 3.7, 1.5, .4},
{1.0, 4.6, 3.6, 1.0, .2},
{1.0, 5.1, 3.3, 1.7, .5},
{1.0, 4.8, 3.4, 1.9, .2},
{1.0, 5.0, 3.0, 1.6, .2},
{1.0, 5.0, 3.4, 1.6, .4},
{1.0, 5.2, 3.5, 1.5, .2},
{1.0, 5.2, 3.4, 1.4, .2},
{1.0, 4.7, 3.2, 1.6, .2},
{1.0, 4.8, 3.1, 1.6, .2},
{1.0, 5.4, 3.4, 1.5, .4},
{1.0, 5.2, 4.1, 1.5, .1},
{1.0, 5.5, 4.2, 1.4, .2},
{1.0, 4.9, 3.1, 1.5, .2},
{1.0, 5.0, 3.2, 1.2, .2},
{1.0, 5.5, 3.5, 1.3, .2},
{1.0, 4.9, 3.6, 1.4, .1},
{1.0, 4.4, 3.0, 1.3, .2},
{1.0, 5.1, 3.4, 1.5, .2},
{1.0, 5.0, 3.5, 1.3, .3},
{1.0, 4.5, 2.3, 1.3, .3},
{1.0, 4.4, 3.2, 1.3, .2},
{1.0, 5.0, 3.5, 1.6, .6},
{1.0, 5.1, 3.8, 1.9, .4},
{1.0, 4.8, 3.0, 1.4, .3},
{1.0, 5.1, 3.8, 1.6, .2},
{1.0, 4.6, 3.2, 1.4, .2},
{1.0, 5.3, 3.7, 1.5, .2},
{1.0, 5.0, 3.3, 1.4, .2},
{2.0, 7.0, 3.2, 4.7, 1.4},
{2.0, 6.4, 3.2, 4.5, 1.5},
{2.0, 6.9, 3.1, 4.9, 1.5},
{2.0, 5.5, 2.3, 4.0, 1.3},
{2.0, 6.5, 2.8, 4.6, 1.5},
{2.0, 5.7, 2.8, 4.5, 1.3},
{2.0, 6.3, 3.3, 4.7, 1.6},
{2.0, 4.9, 2.4, 3.3, 1.0},
{2.0, 6.6, 2.9, 4.6, 1.3},
{2.0, 5.2, 2.7, 3.9, 1.4},
{2.0, 5.0, 2.0, 3.5, 1.0},
{2.0, 5.9, 3.0, 4.2, 1.5},
{2.0, 6.0, 2.2, 4.0, 1.0},
{2.0, 6.1, 2.9, 4.7, 1.4},
{2.0, 5.6, 2.9, 3.6, 1.3},
{2.0, 6.7, 3.1, 4.4, 1.4},
{2.0, 5.6, 3.0, 4.5, 1.5},
{2.0, 5.8, 2.7, 4.1, 1.0},
{2.0, 6.2, 2.2, 4.5, 1.5},
{2.0, 5.6, 2.5, 3.9, 1.1},
{2.0, 5.9, 3.2, 4.8, 1.8},
{2.0, 6.1, 2.8, 4.0, 1.3},
{2.0, 6.3, 2.5, 4.9, 1.5},
{2.0, 6.1, 2.8, 4.7, 1.2},
{2.0, 6.4, 2.9, 4.3, 1.3},
{2.0, 6.6, 3.0, 4.4, 1.4},
{2.0, 6.8, 2.8, 4.8, 1.4},
{2.0, 6.7, 3.0, 5.0, 1.7},
{2.0, 6.0, 2.9, 4.5, 1.5},
{2.0, 5.7, 2.6, 3.5, 1.0},
{2.0, 5.5, 2.4, 3.8, 1.1},
{2.0, 5.5, 2.4, 3.7, 1.0},
{2.0, 5.8, 2.7, 3.9, 1.2},
{2.0, 6.0, 2.7, 5.1, 1.6},
{2.0, 5.4, 3.0, 4.5, 1.5},
{2.0, 6.0, 3.4, 4.5, 1.6},
{2.0, 6.7, 3.1, 4.7, 1.5},
{2.0, 6.3, 2.3, 4.4, 1.3},
{2.0, 5.6, 3.0, 4.1, 1.3},
{2.0, 5.5, 2.5, 4.0, 1.3},
{2.0, 5.5, 2.6, 4.4, 1.2},
{2.0, 6.1, 3.0, 4.6, 1.4},
{2.0, 5.8, 2.6, 4.0, 1.2},
{2.0, 5.0, 2.3, 3.3, 1.0},
{2.0, 5.6, 2.7, 4.2, 1.3},
{2.0, 5.7, 3.0, 4.2, 1.2},
{2.0, 5.7, 2.9, 4.2, 1.3},
{2.0, 6.2, 2.9, 4.3, 1.3},
{2.0, 5.1, 2.5, 3.0, 1.1},
{2.0, 5.7, 2.8, 4.1, 1.3},
{3.0, 6.3, 3.3, 6.0, 2.5},
{3.0, 5.8, 2.7, 5.1, 1.9},
{3.0, 7.1, 3.0, 5.9, 2.1},
{3.0, 6.3, 2.9, 5.6, 1.8},
{3.0, 6.5, 3.0, 5.8, 2.2},
{3.0, 7.6, 3.0, 6.6, 2.1},
{3.0, 4.9, 2.5, 4.5, 1.7},
{3.0, 7.3, 2.9, 6.3, 1.8},
{3.0, 6.7, 2.5, 5.8, 1.8},
{3.0, 7.2, 3.6, 6.1, 2.5},
{3.0, 6.5, 3.2, 5.1, 2.0},
{3.0, 6.4, 2.7, 5.3, 1.9},
{3.0, 6.8, 3.0, 5.5, 2.1},
{3.0, 5.7, 2.5, 5.0, 2.0},
{3.0, 5.8, 2.8, 5.1, 2.4},
{3.0, 6.4, 3.2, 5.3, 2.3},
{3.0, 6.5, 3.0, 5.5, 1.8},
{3.0, 7.7, 3.8, 6.7, 2.2},
{3.0, 7.7, 2.6, 6.9, 2.3},
{3.0, 6.0, 2.2, 5.0, 1.5},
{3.0, 6.9, 3.2, 5.7, 2.3},
{3.0, 5.6, 2.8, 4.9, 2.0},
{3.0, 7.7, 2.8, 6.7, 2.0},
{3.0, 6.3, 2.7, 4.9, 1.8},
{3.0, 6.7, 3.3, 5.7, 2.1},
{3.0, 7.2, 3.2, 6.0, 1.8},
{3.0, 6.2, 2.8, 4.8, 1.8},
{3.0, 6.1, 3.0, 4.9, 1.8},
{3.0, 6.4, 2.8, 5.6, 2.1},
{3.0, 7.2, 3.0, 5.8, 1.6},
{3.0, 7.4, 2.8, 6.1, 1.9},
{3.0, 7.9, 3.8, 6.4, 2.0},
{3.0, 6.4, 2.8, 5.6, 2.2},
{3.0, 6.3, 2.8, 5.1, 1.5},
{3.0, 6.1, 2.6, 5.6, 1.4},
{3.0, 7.7, 3.0, 6.1, 2.3},
{3.0, 6.3, 3.4, 5.6, 2.4},
{3.0, 6.4, 3.1, 5.5, 1.8},
{3.0, 6.0, 3.0, 4.8, 1.8},
{3.0, 6.9, 3.1, 5.4, 2.1},
{3.0, 6.7, 3.1, 5.6, 2.4},
{3.0, 6.9, 3.1, 5.1, 2.3},
{3.0, 5.8, 2.7, 5.1, 1.9},
{3.0, 6.8, 3.2, 5.9, 2.3},
{3.0, 6.7, 3.3, 5.7, 2.5},
{3.0, 6.7, 3.0, 5.2, 2.3},
{3.0, 6.3, 2.5, 5.0, 1.9},
{3.0, 6.5, 3.0, 5.2, 2.0},
{3.0, 6.2, 3.4, 5.4, 2.3},
{3.0, 5.9, 3.0, 5.1, 1.8}};
int i, j, jj, k;
int ipermu[] = {2, 3, 4, 5, 1};
double temp;
double x[][];
x = new double[xorig.length][xorig[0].length];
for (i = 0; i< xorig.length; i++) {
for (j = 1; j < xorig[0].length; j++) {
x[i][j-1] = xorig[i][j];
}
}
for (i = 0; i< xorig.length; i++) {
x[i][4] = xorig[i][0];
}
int nvar = x[0].length -1;
DiscriminantAnalysis da = new DiscriminantAnalysis(nvar, 3);
da.setCovarianceComputation(da.POOLED);
da.setClassificationMethod(da.RECLASSIFICATION);
da.update(x);
new PrintMatrix("Xmean are: ").print(da.getMeans());
new PrintMatrix("Coef: ").print(da.getCoefficients());
new PrintMatrix("Counts: ").print(da.getGroupCounts());
new PrintMatrix("Stats: ").print(da.getStatistics());
new PrintMatrix("ClassMembership: ").print(da.getClassMembership());
new PrintMatrix("ClassTable: ").print(da.getClassTable());
double cov[][][] = da.getCovariance();
for (i= 0; i<cov.length;i++) {
new PrintMatrix("Covariance Matrix "+i+" : ").print(cov[i]);
}
new PrintMatrix("Prior : ").print(da.getPrior());
new PrintMatrix("PROB: ").print(da.getProbability());
new PrintMatrix("MAHALANOBIS: ").print(da.getMahalanobis());
System.out.println("nrmiss = " + da.getNRowsMissing());
}
}
Xmean are:
0 1 2 3
0 5.006 3.428 1.462 0.246
1 5.936 2.77 4.26 1.326
2 6.588 2.974 5.552 2.026
Coef:
0 1 2 3 4
0 -86.308 23.544 23.588 -16.431 -17.398
1 -72.853 15.698 7.073 5.211 6.434
2 -104.368 12.446 3.685 12.767 21.079
Counts:
0
0 50
1 50
2 50
Stats:
0
0 147
1 ?
2 ?
3 ?
4 ?
5 ?
6 ?
7 -9.959
8 50
9 50
10 50
11 150
ClassMembership:
0
0 1
1 1
2 1
3 1
4 1
5 1
6 1
7 1
8 1
9 1
10 1
11 1
12 1
13 1
14 1
15 1
16 1
17 1
18 1
19 1
20 1
21 1
22 1
23 1
24 1
25 1
26 1
27 1
28 1
29 1
30 1
31 1
32 1
33 1
34 1
35 1
36 1
37 1
38 1
39 1
40 1
41 1
42 1
43 1
44 1
45 1
46 1
47 1
48 1
49 1
50 2
51 2
52 2
53 2
54 2
55 2
56 2
57 2
58 2
59 2
60 2
61 2
62 2
63 2
64 2
65 2
66 2
67 2
68 2
69 2
70 3
71 2
72 2
73 2
74 2
75 2
76 2
77 2
78 2
79 2
80 2
81 2
82 2
83 3
84 2
85 2
86 2
87 2
88 2
89 2
90 2
91 2
92 2
93 2
94 2
95 2
96 2
97 2
98 2
99 2
100 3
101 3
102 3
103 3
104 3
105 3
106 3
107 3
108 3
109 3
110 3
111 3
112 3
113 3
114 3
115 3
116 3
117 3
118 3
119 3
120 3
121 3
122 3
123 3
124 3
125 3
126 3
127 3
128 3
129 3
130 3
131 3
132 3
133 2
134 3
135 3
136 3
137 3
138 3
139 3
140 3
141 3
142 3
143 3
144 3
145 3
146 3
147 3
148 3
149 3
ClassTable:
0 1 2
0 50 0 0
1 0 48 2
2 0 1 49
Covariance Matrix 0 :
0 1 2 3
0 0.265 0.093 0.168 0.038
1 0.093 0.115 0.055 0.033
2 0.168 0.055 0.185 0.043
3 0.038 0.033 0.043 0.042
Prior :
0
0 0.333
1 0.333
2 0.333
PROB:
0 1 2
0 1 0 0
1 1 0 0
2 1 0 0
3 1 0 0
4 1 0 0
5 1 0 0
6 1 0 0
7 1 0 0
8 1 0 0
9 1 0 0
10 1 0 0
11 1 0 0
12 1 0 0
13 1 0 0
14 1 0 0
15 1 0 0
16 1 0 0
17 1 0 0
18 1 0 0
19 1 0 0
20 1 0 0
21 1 0 0
22 1 0 0
23 1 0 0
24 1 0 0
25 1 0 0
26 1 0 0
27 1 0 0
28 1 0 0
29 1 0 0
30 1 0 0
31 1 0 0
32 1 0 0
33 1 0 0
34 1 0 0
35 1 0 0
36 1 0 0
37 1 0 0
38 1 0 0
39 1 0 0
40 1 0 0
41 1 0 0
42 1 0 0
43 1 0 0
44 1 0 0
45 1 0 0
46 1 0 0
47 1 0 0
48 1 0 0
49 1 0 0
50 0 1 0
51 0 0.999 0.001
52 0 0.996 0.004
53 0 1 0
54 0 0.996 0.004
55 0 0.999 0.001
56 0 0.986 0.014
57 0 1 0
58 0 1 0
59 0 1 0
60 0 1 0
61 0 0.999 0.001
62 0 1 0
63 0 0.994 0.006
64 0 1 0
65 0 1 0
66 0 0.981 0.019
67 0 1 0
68 0 0.96 0.04
69 0 1 0
70 0 0.253 0.747
71 0 1 0
72 0 0.816 0.184
73 0 1 0
74 0 1 0
75 0 1 0
76 0 0.998 0.002
77 0 0.689 0.311
78 0 0.993 0.007
79 0 1 0
80 0 1 0
81 0 1 0
82 0 1 0
83 0 0.143 0.857
84 0 0.964 0.036
85 0 0.994 0.006
86 0 0.998 0.002
87 0 0.999 0.001
88 0 1 0
89 0 1 0
90 0 0.999 0.001
91 0 0.998 0.002
92 0 1 0
93 0 1 0
94 0 1 0
95 0 1 0
96 0 1 0
97 0 1 0
98 0 1 0
99 0 1 0
100 0 0 1
101 0 0.001 0.999
102 0 0 1
103 0 0.001 0.999
104 0 0 1
105 0 0 1
106 0 0.049 0.951
107 0 0 1
108 0 0 1
109 0 0 1
110 0 0.013 0.987
111 0 0.002 0.998
112 0 0 1
113 0 0 1
114 0 0 1
115 0 0 1
116 0 0.006 0.994
117 0 0 1
118 0 0 1
119 0 0.221 0.779
120 0 0 1
121 0 0.001 0.999
122 0 0 1
123 0 0.097 0.903
124 0 0 1
125 0 0.003 0.997
126 0 0.188 0.812
127 0 0.134 0.866
128 0 0 1
129 0 0.104 0.896
130 0 0 1
131 0 0.001 0.999
132 0 0 1
133 0 0.729 0.271
134 0 0.066 0.934
135 0 0 1
136 0 0 1
137 0 0.006 0.994
138 0 0.193 0.807
139 0 0.001 0.999
140 0 0 1
141 0 0 1
142 0 0.001 0.999
143 0 0 1
144 0 0 1
145 0 0 1
146 0 0.006 0.994
147 0 0.003 0.997
148 0 0 1
149 0 0.018 0.982
MAHALANOBIS:
0 1 2
0 0 89.864 179.385
1 89.864 0 17.201
2 179.385 17.201 0
nrmiss = 0
Link to Java source.