2021-04-02 08:45:11 +02:00

53 lines
2.0 KiB
Python

import inspect
from argparse import Namespace
from torch import nn
from ml_lib.modules.blocks import LinearModule
from ml_lib.modules.model_parts import CNNEncoder
from ml_lib.modules.util import (LightningBaseModule)
from util.module_mixins import CombinedModelMixins
class CNNBaseline(CombinedModelMixins,
LightningBaseModule
):
def __init__(self, in_shape, n_classes, weight_init, activation,
use_bias, use_norm, dropout, lat_dim, filters,
lr, weight_decay, sto_weight_avg, lr_warm_restart_epochs, opt_reset_interval,
loss, scheduler, lr_scheduler_parameter
):
# TODO: Move this to parent class, or make it much easieer to access....
a = dict(locals())
params = {arg: a[arg] for arg in inspect.signature(self.__init__).parameters.keys() if arg != 'self'}
super(CNNBaseline, self).__init__(params)
# Model
# =============================================================================
# Additional parameters
self.in_shape = in_shape
assert len(self.in_shape) == 3, 'There need to be three Dimensions'
channels, height, width = self.in_shape
# Modules with Parameters
self.encoder = CNNEncoder(in_shape=self.in_shape, **self.params.module_kwargs)
# Make Decision between binary and Multiclass Classification
logits = n_classes if n_classes > 2 else 1
module_kwargs = self.params.module_kwargs
module_kwargs.update(activation=(nn.Softmax if logits > 1 else nn.Sigmoid))
self.classifier = LinearModule(self.encoder.shape, logits, **module_kwargs)
def forward(self, x, mask=None, return_attn_weights=False):
"""
:param x: the sequence to the encoder (required).
:param mask: the mask for the src sequence (optional).
:return:
"""
tensor = self.encoder(x)
tensor = self.classifier(tensor)
return Namespace(main_out=tensor)