import torch from torch import nn from torch.optim import Adam from ml_lib.modules.blocks import ConvModule from ml_lib.modules.utils import LightningBaseModule class BinaryClassifier(LightningBaseModule): @classmethod def name(cls): return cls.__name__ def configure_optimizers(self): return Adam(lr=self.hparams.train.lr) def training_step(self, batch_xy, batch_nb, *args, **kwargs): batch_x, batch_y = batch_xy y = self(batch_y) loss = self.criterion(y, batch_y) return dict(loss=loss) def validation_step(self, batch_xy, **kwargs): batch_x, batch_y = batch_xy y = self(batch_y) val_loss = self.criterion(y, batch_y) return dict(val_loss=val_loss) def validation_epoch_end(self, outputs): overall_val_loss = torch.mean(torch.stack([output['val_loss'] for output in outputs])) return dict(log=dict( mean_val_loss=overall_val_loss) ) def __init__(self, hparams): super(BinaryClassifier, self).__init__(hparams) self.criterion = nn.BCELoss() # Additional parameters self.in_shape = self.hparams.model_params.in_shape # Model Modules self.conv_1 = ConvModule(self.in_shape, 32, 5, conv_stride=4, **hparams.model_params) self.conv_2 = ConvModule(self.conv_1.shape, 64, 7, conv_stride=2, **hparams.model_params) self.conv_3 = ConvModule(self.conv_1.shape, 128, 9, conv_stride=2, **hparams.model_params) def forward(self, batch, **kwargs): return batch