data shapes retrieval and hparam passing

This commit is contained in:
steffen
2020-04-17 18:07:04 +02:00
parent f51d73ce6d
commit f607324606
3 changed files with 15 additions and 8 deletions

View File

@ -15,6 +15,10 @@ class BinaryMasksDataset(Dataset):
_to_label['mask'] = V.MASK
settings = ['test', 'devel', 'train']
@property
def sample_shape(self):
return self[0][0].shape
def __init__(self, data_root, setting, transforms=None):
assert isinstance(setting, str), f'Setting has to be a string, but was: {self.settings}.'
assert setting in self.settings, f'Setting must match one of: {self.settings}.'

View File

@ -108,7 +108,9 @@ def run_lightning_loop(config_obj):
# Model
# =============================================================================
# Build and Init its Weights
model: LightningBaseModule = config_obj.build_and_init_model(weight_init_function=torch.nn.init.xavier_normal_)
config_obj.set('model', 'in_shape', str(tuple(train_dataset.sample_shape)))
model: LightningBaseModule = config_obj.build_and_init_model(weight_init_function=torch.nn.init.xavier_normal_
)
# Trainer
# =============================================================================

View File

@ -27,7 +27,10 @@ class BinaryClassifier(LightningBaseModule):
return dict(val_loss=val_loss)
def validation_epoch_end(self, outputs):
over_all_val_loss = torch.mean(torch.stack([output['val_loss'] for output in outputs]))
overall_val_loss = torch.mean(torch.stack([output['val_loss'] for output in outputs]))
return dict(log=dict(
mean_val_loss=overall_val_loss)
)
def __init__(self, hparams):
super(BinaryClassifier, self).__init__(hparams)
@ -35,14 +38,12 @@ class BinaryClassifier(LightningBaseModule):
self.criterion = nn.BCELoss()
# Additional parameters
self.in_shape = ()
#
self.in_shape = self.hparams.model_params.in_shape
# Model Modules
self.conv_1 = ConvModule(self.in_shape, 32, 5, )
self.conv_2 = ConvModule(64)
self.conv_3 = ConvModule(128)
self.conv_1 = ConvModule(self.in_shape, 32, 5, conv_stride=4, **hparams.model_params)
self.conv_2 = ConvModule(self.conv_1.shape, 64, 7, conv_stride=2, **hparams.model_params)
self.conv_3 = ConvModule(self.conv_1.shape, 128, 9, conv_stride=2, **hparams.model_params)
def forward(self, batch, **kwargs):
return batch