-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathperformance_evaluation.R
103 lines (90 loc) · 3.19 KB
/
performance_evaluation.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
# -------------------------- Performance Evalutation --------------------------
# Library
## ROCR
install.packages("ROCR")
library(ROCR)
## pROC
install.packages("pROC")
library(pROC)
# Utils function
opt.cut <- function(perf, pred) {
mapply(FUN = function(x, y, p) {
d <- (x - 0)^2 + (y - 1)^2
ind <- which(d == min(d))
c(
sensitivity = y[[ind]], specificity = 1 - x[[ind]],
cutoff = p[[ind]]
)
}, perf@x.values, perf@y.values, pred@cutoffs)
}
# Performance
## SVM
print("Support Vector Machine")
print(confusionMatrix(svm.pred, testset$is_safe, mode = "everything", positive = "1"))
## DT
print("Decision Tree")
print(confusionMatrix(Prediction, testset$is_safe, mode = "everything", positive = "1"))
## NB
print("Naive Bayes")
print(confusionMatrix(nb_test.predicted, testset$is_safe, mode = "everything", positive = "1"))
# ROC ~ AUC & cut-off
## SVM
pred <- predict(svm.model, testset[, !names(testset) %in% c("is_safe")], probability = TRUE)
pred.prob <- attr(pred, "probabilities")
pred.to.roc <- pred.prob[, 2]
pred.rocr <- prediction(pred.to.roc, testset$is_safe)
perf.rocr <- performance(pred.rocr, measure = "auc", x.measure = "cutoff")
perf.tpr.rocr <- performance(pred.rocr, "tpr", "fpr")
## Plot
plot(perf.tpr.rocr, colorize = T, main = paste("Support Vector Machine ROC Curve \n AUC:", (perf.rocr@y.values)))
abline(a = 0, b = 1)
## cut-off
print(opt.cut(perf.tpr.rocr, pred.rocr))
acc.perf <- performance(pred.rocr, measure = "acc")
### Plot
plot(acc.perf)
### Values
ind <- which.max(slot(acc.perf, "y.values")[[1]])
acc <- slot(acc.perf, "y.values")[[1]][ind]
cutoff <- slot(acc.perf, "x.values")[[1]][ind]
print(c(accuracy = acc, cutoff = cutoff))
## Decision Tree
### probability prediction
tree_prob <- predict(prunedDecisionTree, newdata = testset[1:20], type = "prob")
tree_pred <- prediction(tree_prob[, 2], testset$is_safe)
### performance
tree_perf <- performance(tree_pred, measure = "auc", x.measure = "cutoff")
tree_perf.tpr <- performance(tree_pred, "tpr", "fpr")
## Plot
plot(tree_perf.tpr, colorize = T, main = paste("Decision Tree ROC Curve \n AUC:", (tree_perf@y.values)))
abline(a = 0, b = 1)
## cut-off
print(opt.cut(tree_perf.tpr, tree_pred))
acc.perf <- performance(tree_pred, measure = "acc")
### Plot
plot(acc.perf)
### Values
ind <- which.max(slot(acc.perf, "y.values")[[1]])
acc <- slot(acc.perf, "y.values")[[1]][ind]
cutoff <- slot(acc.perf, "x.values")[[1]][ind]
print(c(accuracy = acc, cutoff = cutoff))
## Naive Bayes
### probability prediction
nb_prob <- predict(nb_classifier, testset[1:20], type = "prob")
nb_pred <- prediction(nb_prob[, 2], testset$is_safe)
### performance
nb_perf <- performance(nb_pred, measure = "auc", x.measure = "cutoff")
nb_perf.tpr <- performance(nb_pred, "tpr", "fpr")
## Plot
plot(nb_perf.tpr, colorize = T, main = paste("Naive Bayes ROC Curve \n AUC:", (nb_perf@y.values)))
abline(a = 0, b = 1)
## cut-off
print(opt.cut(nb_perf.tpr, nb_pred))
acc.perf <- performance(nb_pred, measure = "acc")
### Plot
plot(acc.perf)
### Values
ind <- which.max(slot(acc.perf, "y.values")[[1]])
acc <- slot(acc.perf, "y.values")[[1]][ind]
cutoff <- slot(acc.perf, "x.values")[[1]][ind]
print(c(accuracy = acc, cutoff = cutoff))