-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathevaluation_metrics.py
43 lines (36 loc) · 1.46 KB
/
evaluation_metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import numpy as np
import copy
from math import sqrt
from scipy import stats
from sklearn import preprocessing, metrics
from sklearn.metrics import confusion_matrix
def prec_rec_f1_acc_mcc(y_true, y_pred):
performance_threshold_dict = dict()
### ADDED on 28 July 2020 by YIP YEW MUN ###
y_true_tmp = []
for each_y_true in y_true:
y_true_tmp.append(each_y_true.item())
y_true = y_true_tmp
y_pred_tmp = []
for each_y_pred in y_pred:
y_pred_tmp.append(each_y_pred.item())
y_pred = y_pred_tmp
### ADDED on 28 July 2020 by YIP YEW MUN ###
precision = metrics.precision_score(y_true, y_pred)
recall = metrics.recall_score(y_true, y_pred)
f1_score = metrics.f1_score(y_true, y_pred)
accuracy = metrics.accuracy_score(y_true, y_pred)
mcc = metrics.matthews_corrcoef(y_true, y_pred)
tn, fp, fn, tp = confusion_matrix(y_true, y_pred).ravel()
performance_threshold_dict["Precision"] = precision
performance_threshold_dict["Recall"] = recall
performance_threshold_dict["F1-Score"] = f1_score
performance_threshold_dict["Accuracy"] = accuracy
performance_threshold_dict["MCC"] = mcc
performance_threshold_dict["TP"] = tp
performance_threshold_dict["FP"] = fp
performance_threshold_dict["TN"] = tn
performance_threshold_dict["FN"] = fn
return performance_threshold_dict
def get_list_of_scores():
return ["Precision", "Recall", "F1-Score", "Accuracy", "MCC", "TP", "FP", "TN", "FN"]