ae_toolbox_torch/run_models.py
2021-02-01 09:59:56 +01:00

117 lines
4.4 KiB
Python

from torch.distributions import Normal
from torch.cuda import is_available
import time
import os
from argparse import Namespace
from argparse import ArgumentParser
from distutils.util import strtobool
from networks.auto_encoder import AutoEncoder, AutoEncoder_LO
from networks.variational_auto_encoder import VariationalAE, VAE_LO
from networks.adverserial_auto_encoder import AdversarialAE_LO, AdversarialAE
from networks.seperating_adversarial_auto_encoder import SeperatingAAE, SeparatingAAE_LO
from networks.modules import LightningModule
from pytorch_lightning import Trainer
from test_tube import Experiment
args = ArgumentParser()
args.add_argument('--step', default=5)
args.add_argument('--features', default=6)
args.add_argument('--size', default=9)
args.add_argument('--latent_dim', default=2)
args.add_argument('--model', default='AE_Model')
args.add_argument('--refresh', type=strtobool, default=False)
args.add_argument('--future_predictions', type=strtobool, default=False)
args.add_argument('--use_norm', type=strtobool, default=True)
class AE_Model(AutoEncoder_LO, LightningModule):
def __init__(self, parameters):
assert all([x in parameters for x in ['step', 'size', 'latent_dim', 'features']])
self.size = parameters.size
self.latent_dim = parameters.latent_dim
self.features = parameters.features
self.step = parameters.step
super(AE_Model, self).__init__(train_on_predictions=parameters.future_predictions)
self.network = AutoEncoder(self.latent_dim, self.features, use_norm=parameters.use_norm)
class VAE_Model(VAE_LO, LightningModule):
def __init__(self, parameters):
assert all([x in parameters for x in ['step', 'size', 'latent_dim', 'features']])
self.size = parameters.size
self.latent_dim = parameters.latent_dim
self.features = parameters.features
self.step = parameters.step
super(VAE_Model, self).__init__(train_on_predictions=parameters.future_predictions)
self.network = VariationalAE(self.latent_dim, self.features, use_norm=parameters.use_norm)
class AAE_Model(AdversarialAE_LO, LightningModule):
def __init__(self, parameters: Namespace):
assert all([x in parameters for x in ['step', 'size', 'latent_dim', 'features']])
self.size = parameters.size
self.latent_dim = parameters.latent_dim
self.features = parameters.features
self.step = parameters.step
super(AAE_Model, self).__init__(train_on_predictions=parameters.future_predictions)
self.normal = Normal(0, 1)
self.network = AdversarialAE(self.latent_dim, self.features, use_norm=parameters.use_norm)
pass
class SAAE_Model(SeparatingAAE_LO, LightningModule):
def __init__(self, parameters: Namespace):
assert all([x in parameters for x in ['step', 'size', 'latent_dim', 'features']])
self.size = parameters.size
self.latent_dim = parameters.latent_dim
self.features = parameters.features
self.step = parameters.step
super(SAAE_Model, self).__init__(train_on_predictions=parameters.future_predictions)
self.normal = Normal(0, 1)
self.network = SeperatingAAE(self.latent_dim, self.features, use_norm=parameters.use_norm)
pass
if __name__ == '__main__':
arguments = args.parse_args()
model = globals()[arguments.model](arguments)
# PyTorch summarywriter with a few bells and whistles
outpath = os.path.join(os.getcwd(), 'output', model.name, time.asctime().replace(' ', '_').replace(':', '-'))
os.makedirs(outpath, exist_ok=True)
exp = Experiment(save_dir=outpath)
exp.tag(tag_dict=arguments.__dict__)
from pytorch_lightning.callbacks import ModelCheckpoint
checkpoint_callback = ModelCheckpoint(
filepath=os.path.join(outpath, 'weights'),
save_best_only=False,
verbose=True,
period=4
)
trainer = Trainer(experiment=exp,
max_nb_epochs=60,
gpus=[0] if is_available() else None,
row_log_interval=1000,
checkpoint_callback=checkpoint_callback
)
trainer.fit(model)
trainer.save_checkpoint(os.path.join(outpath, 'weights.ckpt'))
# view tensorflow logs
print(f'View tensorboard logs by running\ntensorboard --logdir {outpath}')
print('and going to http://localhost:6006 on your browser')