ae_toolbox_torch/run_models.py
Si11ium 744c0c50b7 Done: First VIsualization
ToDo: Visualization for all classes, latent space setups
2019-08-21 07:56:31 +02:00

116 lines
4.3 KiB
Python

from networks.auto_encoder import *
import os
import time
from networks.variational_auto_encoder import *
from networks.adverserial_auto_encoder import *
from networks.seperating_adversarial_auto_encoder import *
from networks.modules import LightningModule
from torch.optim import Adam
from torch.utils.data import DataLoader
from pytorch_lightning import data_loader
from dataset import DataContainer
from torch.nn import BatchNorm1d
from pytorch_lightning import Trainer
from test_tube import Experiment
# ToDo: How to implement this better?
# other_classes = [AutoEncoder, AutoEncoderLightningOverrides]
class Model(VariationalAutoEncoderLightningOverrides, LightningModule):
def __init__(self, dataParams: dict):
super(Model, self).__init__()
self.dataParams = dataParams
self.network = VariationalAutoEncoder(self.dataParams)
def configure_optimizers(self):
return [Adam(self.parameters(), lr=0.02)]
@data_loader
def tng_dataloader(self):
return DataLoader(DataContainer('data', **self.dataParams), shuffle=True, batch_size=100)
class AdversarialModel(AdversarialAELightningOverrides, LightningModule):
@property
def name(self):
return self.network.name
def __init__(self, dataParams: dict):
super(AdversarialModel, self).__init__()
self.dataParams = dataParams
self.normal = Normal(0, 1)
self.network = AdversarialAutoEncoder(self.dataParams)
pass
# This is Fucked up, why do i need to put an additional empty list here?
def configure_optimizers(self):
return [Adam(self.network.discriminator.parameters(), lr=0.02),
Adam([*self.network.encoder.parameters(), *self.network.decoder.parameters()], lr=0.02)],\
[]
@data_loader
def tng_dataloader(self):
return DataLoader(DataContainer('data', **self.dataParams), shuffle=True, batch_size=100)
class SeparatingAdversarialModel(SeparatingAdversarialAELightningOverrides, LightningModule):
def __init__(self, latent_dim, dataParams: dict):
super(SeparatingAdversarialModel, self).__init__()
self.latent_dim = latent_dim
self.dataParams = dataParams
self.normal = Normal(0, 1)
self.network = SeperatingAdversarialAutoEncoder(self.latent_dim, self.dataParams)
pass
# This is Fucked up, why do i need to put an additional empty list here?
def configure_optimizers(self):
return [Adam([*self.network.spatial_discriminator.parameters(), *self.network.spatial_encoder.parameters()]
, lr=0.02),
Adam([*self.network.temporal_discriminator.parameters(), *self.network.temporal_encoder.parameters()]
, lr=0.02),
Adam([*self.network.temporal_encoder.parameters(),
*self.network.spatial_encoder.parameters(),
*self.network.decoder.parameters()]
, lr=0.02)], []
@data_loader
def tng_dataloader(self):
return DataLoader(DataContainer('data', **self.dataParams), shuffle=True, batch_size=100)
if __name__ == '__main__':
features = 6
latent_dim = 4
model = SeparatingAdversarialModel(latent_dim=latent_dim, dataParams=dict(refresh=False, size=5, step=5,
features=features, transforms=[BatchNorm1d(features)]
)
)
# PyTorch summarywriter with a few bells and whistles
outpath = os.path.join(os.getcwd(), 'output', model.name, time.asctime().replace(' ', '_').replace(':', '-'))
os.makedirs(outpath, exist_ok=True)
exp = Experiment(save_dir=outpath)
from pytorch_lightning.callbacks import ModelCheckpoint
checkpoint_callback = ModelCheckpoint(
filepath=os.path.join(outpath, 'weights.ckpt'),
save_best_only=True,
verbose=True,
monitor='val_loss',
mode='min',
)
trainer = Trainer(experiment=exp, checkpoint_callback=checkpoint_callback, max_nb_epochs=15) # gpus=[0...LoL]
trainer.fit(model)
trainer.save_checkpoint(os.path.join(outpath, 'weights.ckpt'))
# view tensorflow logs
print(f'View tensorboard logs by running\ntensorboard --logdir {outpath}')
print('and going to http://localhost:6006 on your browser')