data shapes retrieval and hparam passing

This commit is contained in:
steffen
2020-04-17 18:07:04 +02:00
parent f51d73ce6d
commit f607324606
3 changed files with 15 additions and 8 deletions

View File

@@ -27,7 +27,10 @@ class BinaryClassifier(LightningBaseModule):
return dict(val_loss=val_loss)
def validation_epoch_end(self, outputs):
over_all_val_loss = torch.mean(torch.stack([output['val_loss'] for output in outputs]))
overall_val_loss = torch.mean(torch.stack([output['val_loss'] for output in outputs]))
return dict(log=dict(
mean_val_loss=overall_val_loss)
)
def __init__(self, hparams):
super(BinaryClassifier, self).__init__(hparams)
@@ -35,14 +38,12 @@ class BinaryClassifier(LightningBaseModule):
self.criterion = nn.BCELoss()
# Additional parameters
self.in_shape = ()
#
self.in_shape = self.hparams.model_params.in_shape
# Model Modules
self.conv_1 = ConvModule(self.in_shape, 32, 5, )
self.conv_2 = ConvModule(64)
self.conv_3 = ConvModule(128)
self.conv_1 = ConvModule(self.in_shape, 32, 5, conv_stride=4, **hparams.model_params)
self.conv_2 = ConvModule(self.conv_1.shape, 64, 7, conv_stride=2, **hparams.model_params)
self.conv_3 = ConvModule(self.conv_1.shape, 128, 9, conv_stride=2, **hparams.model_params)
def forward(self, batch, **kwargs):
return batch