bug in metric calculation

This commit is contained in:
Steffen Illium
2021-03-27 16:39:07 +01:00
parent 6816e423ff
commit 1d1b154460
5 changed files with 38 additions and 15 deletions

View File

@ -5,6 +5,7 @@ from sklearn.ensemble import IsolationForest
from sklearn.metrics import recall_score, roc_auc_score, average_precision_score
from ml_lib.metrics._base_score import _BaseScores
from ml_lib.utils.tools import to_one_hot
class BinaryScores(_BaseScores):
@ -17,16 +18,27 @@ class BinaryScores(_BaseScores):
# Additional Score like the unweighted Average Recall:
#########################
# INIT
if isinstance(outputs['batch_y'], torch.Tensor):
y_true = outputs['batch_y'].cpu().numpy()
else:
y_true = torch.cat([output['batch_y'] for output in outputs]).cpu().numpy()
if isinstance(outputs['y'], torch.Tensor):
y_pred = outputs['y'].cpu().numpy()
else:
y_pred = torch.cat([output['y'] for output in outputs]).squeeze().cpu().float().numpy()
# UnweightedAverageRecall
y_true = torch.cat([output['batch_y'] for output in outputs]) .cpu().numpy()
y_pred = torch.cat([output['element_wise_recon_error'] for output in outputs]).squeeze().cpu().numpy()
# y_true = torch.cat([output['batch_y'] for output in outputs]).cpu().numpy()
# y_pred = torch.cat([output['element_wise_recon_error'] for output in outputs]).squeeze().cpu().numpy()
# How to apply a threshold manualy
# y_pred = (y_pred >= 0.5).astype(np.float32)
# How to apply a threshold by IF (Isolation Forest)
clf = IsolationForest(random_state=self.model.seed)
y_score = clf.fit_predict(y_pred.reshape(-1,1))
clf = IsolationForest()
y_score = clf.fit_predict(y_pred.reshape(-1, 1))
y_score = (np.asarray(y_score) == -1).astype(np.float32)
uar_score = recall_score(y_true, y_score, labels=[0, 1], average='macro',