demo_classify

Load data and set train and test datasets

 1 from numpy import *
 2 from iaOPF import *
 3 
 4 from iaOPF import iafit, iapredict,iaconfmtx
 5 from scikits.learn import datasets, metrics
 6 from libopf_py import OPF
 7 
 8 def load_iris():
 9     iris = datasets.load_iris()
10     feats = iris.data
11     labels = iris.target
12     rnd = random.permutation(arange(labels.shape[0]))
13     return feats[rnd], labels[rnd]
14 
15 
16 def euclidian(X, Y):
17     E = X - Y
18     return dot(E, E)
19 
20 feats, labels = load_iris()
21 for i in arange(labels.shape[0]):
22     if labels[i] == 2:
23         labels[i] = 0
24 
25 
26 ntrain = 0.5 # porcentagem dos dados utilizados no treinamento
27 feats_train = feats[:feats.shape[0]*ntrain,:]
28 labels_train = labels[:feats.shape[0]*ntrain]
29 feats_test = feats[feats.shape[0]*(ntrain)::,:]
30 labels_test = labels[feats.shape[0]*(ntrain)::]

iafit

1 opf_pc, _, opf_labels = iafit(feats_train,labels_train,euclidian)

iapredict

1 predicted = iapredict(feats_test,feats_train,opf_pc, opf_labels, euclidian)

Confusion Matrix

1 CM = iaconfmtx(labels_test, predicted)
2 print 'Confusion Matrix'
3 print CM
4 TP = CM[0,0] # Verdadeiro positivo
5 TN = CM[1,1] # Verdadeiro negativo
6 FN = CM[1,0] # Falso Negativo
7 FP = CM[0,1] # Falso Positivo
Confusion Matrix
[[ 50.   2.]
 [  1.  22.]]

Classification Metrics

 1 from scikits.learn.metrics import *
 2 
 3 print 'Accuracy'
 4 print (TP+TN)/sum(sum(CM))
 5 print 'Precision'
 6 precision = TP/(TP+FN) # precision_score(labels_test, predicted)
 7 print precision
 8 print 'Recall'
 9 recall = TN/(FN+TN) # recall_score(labels_test, predicted)
10 print recall
11 print 'f1 Score'
12 print 2 * (precision * recall) / (precision + recall) #f1_score(labels_test, predicted)
Accuracy
0.96
Precision
0.980392156863
Recall
0.95652173913
f1 Score
0.968309859155

Equations