Transformer running

This commit is contained in:
Steffen Illium
2021-03-04 12:01:08 +01:00
parent b5e3e5aec1
commit f89f0f8528
14 changed files with 349 additions and 80 deletions

View File

@ -2,7 +2,9 @@ from itertools import cycle
import numpy as np
import torch
from sklearn.metrics import f1_score, roc_curve, auc, roc_auc_score, ConfusionMatrixDisplay, confusion_matrix
from pytorch_lightning.metrics import Recall
from sklearn.metrics import f1_score, roc_curve, auc, roc_auc_score, ConfusionMatrixDisplay, confusion_matrix, \
recall_score
from ml_lib.metrics._base_score import _BaseScores
from ml_lib.utils.tools import to_one_hot
@ -16,20 +18,21 @@ class MultiClassScores(_BaseScores):
super(MultiClassScores, self).__init__(*args)
pass
def __call__(self, outputs):
def __call__(self, outputs, class_names=None):
summary_dict = dict()
class_names = class_names or range(self.model.params.n_classes)
#######################################################################################
# Additional Score - UAR - ROC - Conf. Matrix - F1
#######################################################################################
#
# INIT
y_true = torch.cat([output['batch_y'] for output in outputs]).cpu().numpy()
y_true_one_hot = to_one_hot(y_true, self.model.n_classes)
y_true_one_hot = to_one_hot(y_true, self.model.params.n_classes)
y_pred = torch.cat([output['y'] for output in outputs]).squeeze().cpu().float().numpy()
y_pred_max = np.argmax(y_pred, axis=1)
class_names = {val: key for key, val in self.model.dataset.test_dataset.classes.items()}
class_names = {val: key for val, key in enumerate(class_names)}
######################################################################################
#
# F1 SCORE
@ -38,7 +41,12 @@ class MultiClassScores(_BaseScores):
macro_f1_score = f1_score(y_true, y_pred_max, labels=None, pos_label=1, average='macro', sample_weight=None,
zero_division=True)
summary_dict.update(dict(micro_f1_score=micro_f1_score, macro_f1_score=macro_f1_score))
######################################################################################
#
# Unweichted Average Recall
uar = recall_score(y_true, y_pred_max, labels=[0, 1, 2, 3, 4], average='macro',
sample_weight=None, zero_division='warn')
summary_dict.update(dict(uar_score=uar))
#######################################################################################
#
# ROC Curve
@ -47,7 +55,7 @@ class MultiClassScores(_BaseScores):
fpr = dict()
tpr = dict()
roc_auc = dict()
for i in range(self.model.n_classes):
for i in range(self.model.params.n_classes):
fpr[i], tpr[i], _ = roc_curve(y_true_one_hot[:, i], y_pred[:, i])
roc_auc[i] = auc(fpr[i], tpr[i])
@ -56,15 +64,15 @@ class MultiClassScores(_BaseScores):
roc_auc["micro"] = auc(fpr["micro"], tpr["micro"])
# First aggregate all false positive rates
all_fpr = np.unique(np.concatenate([fpr[i] for i in range(self.model.n_classes)]))
all_fpr = np.unique(np.concatenate([fpr[i] for i in range(self.model.params.n_classes)]))
# Then interpolate all ROC curves at this points
mean_tpr = np.zeros_like(all_fpr)
for i in range(self.model.n_classes):
for i in range(self.model.params.n_classes):
mean_tpr += np.interp(all_fpr, fpr[i], tpr[i])
# Finally average it and compute AUC
mean_tpr /= self.model.n_classes
mean_tpr /= self.model.params.n_classes
fpr["macro"] = all_fpr
tpr["macro"] = mean_tpr
@ -83,7 +91,7 @@ class MultiClassScores(_BaseScores):
colors = cycle(['firebrick', 'orangered', 'gold', 'olive', 'limegreen', 'aqua',
'dodgerblue', 'slategrey', 'royalblue', 'indigo', 'fuchsia'], )
for i, color in zip(range(self.model.n_classes), colors):
for i, color in zip(range(self.model.params.n_classes), colors):
plt.plot(fpr[i], tpr[i], color=color, lw=2, label=f'{class_names[i]} ({round(roc_auc[i], 2)})')
plt.plot([0, 1], [0, 1], 'k--', lw=2)
@ -116,9 +124,9 @@ class MultiClassScores(_BaseScores):
fig1, ax1 = plt.subplots(dpi=96)
cm = confusion_matrix([class_names[x] for x in y_true], [class_names[x] for x in y_pred_max],
labels=[class_names[key] for key in class_names.keys()],
normalize='all')
normalize='true')
disp = ConfusionMatrixDisplay(confusion_matrix=cm,
display_labels=[class_names[i] for i in range(self.model.n_classes)]
display_labels=[class_names[i] for i in range(self.model.params.n_classes)]
)
disp.plot(include_values=True, ax=ax1)