Grid Clusters.
This commit is contained in:
@ -15,6 +15,7 @@ import matplotlib.pyplot as plt
|
||||
from torch import nn
|
||||
from torch.optim import Adam
|
||||
from torch.utils.data import DataLoader
|
||||
from torch_geometric.data import Data
|
||||
from torchcontrib.optim import SWA
|
||||
from torchvision.transforms import Compose
|
||||
|
||||
@ -61,11 +62,11 @@ class BaseTrainMixin:
|
||||
# Batch To Data
|
||||
batch_to_data = BatchToData()
|
||||
|
||||
def training_step(self, batch_pos_x_y, batch_nb, *_, **__):
|
||||
def training_step(self, batch_pos_x_n_y_c, batch_nb, *_, **__):
|
||||
assert isinstance(self, LightningBaseModule)
|
||||
data = self.batch_to_data(*batch_pos_x_y)
|
||||
data = self.batch_to_data(*batch_pos_x_n_y_c) if not isinstance(batch_pos_x_n_y_c, Data) else batch_pos_x_n_y_c
|
||||
y = self(data).main_out
|
||||
nll_loss = self.nll_loss(y, data.y)
|
||||
nll_loss = self.nll_loss(y, data.yl)
|
||||
return dict(loss=nll_loss, log=dict(batch_nb=batch_nb))
|
||||
|
||||
def training_epoch_end(self, outputs):
|
||||
@ -86,14 +87,16 @@ class BaseValMixin:
|
||||
nll_loss = nn.NLLLoss()
|
||||
# Binary Cross Entropy
|
||||
bce_loss = nn.BCELoss()
|
||||
# Batch To Data
|
||||
batch_to_data = BatchToData()
|
||||
|
||||
def validation_step(self, batch_pos_x_y, batch_idx, *_, **__):
|
||||
def validation_step(self, batch_pos_x_n_y_c, batch_idx, *_, **__):
|
||||
assert isinstance(self, LightningBaseModule)
|
||||
data = self.batch_to_data(*batch_pos_x_y)
|
||||
data = self.batch_to_data(*batch_pos_x_n_y_c) if not isinstance(batch_pos_x_n_y_c, Data) else batch_pos_x_n_y_c
|
||||
y = self(data).main_out
|
||||
nll_loss = self.nll_loss(y, data.y)
|
||||
nll_loss = self.nll_loss(y, data.yl)
|
||||
return dict(val_nll_loss=nll_loss,
|
||||
batch_idx=batch_idx, y=y, batch_y=data.y)
|
||||
batch_idx=batch_idx, y=y, batch_y=data.yl)
|
||||
|
||||
def validation_epoch_end(self, outputs, *_, **__):
|
||||
assert isinstance(self, LightningBaseModule)
|
||||
@ -114,12 +117,12 @@ class BaseValMixin:
|
||||
#
|
||||
# INIT
|
||||
y_true = torch.cat([output['batch_y'] for output in outputs]) .cpu().numpy()
|
||||
y_true_one_hot = to_one_hot(y_true)
|
||||
y_true_one_hot = to_one_hot(y_true, self.n_classes)
|
||||
|
||||
y_pred = torch.cat([output['y'] for output in outputs]).squeeze().cpu().numpy()
|
||||
y_pred_max = np.argmax(y_pred, axis=1)
|
||||
|
||||
class_names = {val: key for key, val in GlobalVar.classes.__dict__().items()}
|
||||
class_names = {val: key for key, val in GlobalVar.classes.items()}
|
||||
######################################################################################
|
||||
#
|
||||
# F1 SCORE
|
||||
@ -167,7 +170,7 @@ class BaseValMixin:
|
||||
color='deeppink', linestyle=':', linewidth=4)
|
||||
|
||||
plt.plot(fpr["macro"], tpr["macro"],
|
||||
label=f'macro ROC({round(roc_auc["macro"], 2)})]',
|
||||
label=f'macro ROC({round(roc_auc["macro"], 2)})',
|
||||
color='navy', linestyle=':', linewidth=4)
|
||||
|
||||
colors = cycle(['firebrick', 'orangered', 'gold', 'olive', 'limegreen', 'aqua',
|
||||
@ -190,25 +193,32 @@ class BaseValMixin:
|
||||
#
|
||||
# ROC SCORE
|
||||
|
||||
macro_roc_auc_ovr = roc_auc_score(y_true_one_hot, y_pred, multi_class="ovr",
|
||||
average="macro")
|
||||
summary_dict['log'].update(macro_roc_auc_ovr=macro_roc_auc_ovr)
|
||||
try:
|
||||
macro_roc_auc_ovr = roc_auc_score(y_true_one_hot, y_pred, multi_class="ovr",
|
||||
average="macro")
|
||||
summary_dict['log'].update(macro_roc_auc_ovr=macro_roc_auc_ovr)
|
||||
except ValueError:
|
||||
micro_roc_auc_ovr = roc_auc_score(y_true_one_hot, y_pred, multi_class="ovr",
|
||||
average="micro")
|
||||
summary_dict['log'].update(micro_roc_auc_ovr=micro_roc_auc_ovr)
|
||||
|
||||
#######################################################################################
|
||||
#
|
||||
# Confusion matrix
|
||||
|
||||
cm = confusion_matrix(y_true, y_pred_max, labels=[class_name for class_name in class_names], normalize='all')
|
||||
cm = confusion_matrix([class_names[x] for x in y_true], [class_names[x] for x in y_pred_max],
|
||||
labels=[class_names[key] for key in class_names.keys()],
|
||||
normalize='all')
|
||||
disp = ConfusionMatrixDisplay(confusion_matrix=cm)
|
||||
disp.plot(include_values=True)
|
||||
self.logger.log_image('Confusion Matrix', image=plt.gcf(), step=self.current_epoch)
|
||||
self.logger.log_image('Confusion Matrix', image=disp.figure_, step=self.current_epoch)
|
||||
|
||||
return summary_dict
|
||||
|
||||
|
||||
class DatasetMixin:
|
||||
|
||||
def build_dataset(self, dataset_class):
|
||||
def build_dataset(self, dataset_class, **kwargs):
|
||||
assert isinstance(self, LightningBaseModule)
|
||||
|
||||
# Dataset
|
||||
@ -221,17 +231,16 @@ class DatasetMixin:
|
||||
dataset = Namespace(
|
||||
**dict(
|
||||
# TRAIN DATASET
|
||||
train_dataset=dataset_class(self.params.root, setting=GlobalVar.data_split.train,
|
||||
transforms=transforms
|
||||
),
|
||||
train_dataset=dataset_class(self.params.root, split=GlobalVar.data_split.train,
|
||||
transforms=transforms, **kwargs),
|
||||
|
||||
# VALIDATION DATASET
|
||||
val_dataset=dataset_class(self.params.root, setting=GlobalVar.data_split.devel,
|
||||
),
|
||||
val_dataset=dataset_class(self.params.root, split=GlobalVar.data_split.devel,
|
||||
**kwargs),
|
||||
|
||||
# TEST DATASET
|
||||
test_dataset=dataset_class(self.params.root, setting=GlobalVar.data_split.test,
|
||||
),
|
||||
test_dataset=dataset_class(self.params.root, split=GlobalVar.data_split.test,
|
||||
**kwargs),
|
||||
)
|
||||
)
|
||||
return dataset
|
||||
|
Reference in New Issue
Block a user