From f1a8d8184ba65d855af9903565fb93ff13552bf5 Mon Sep 17 00:00:00 2001 From: Si11ium Date: Mon, 4 Mar 2019 08:39:44 +0100 Subject: [PATCH] Path Corrections for Multi os support --- code/experiment.py | 39 ++++++++++++++++++++++----------------- code/network.py | 2 -- code/test.py | 38 +++++++++++++++++++------------------- 3 files changed, 41 insertions(+), 38 deletions(-) diff --git a/code/experiment.py b/code/experiment.py index 4fe6cc9..b8c2a4b 100644 --- a/code/experiment.py +++ b/code/experiment.py @@ -1,9 +1,9 @@ import sys import os import time -import copy import dill + class Experiment: @staticmethod @@ -11,22 +11,24 @@ class Experiment: with open(path, "rb") as dill_file: return dill.load(dill_file) - def __init__(self, name=None, id=None): - self.experiment_id = id or time.time() - this_file = os.path.realpath(os.getcwd() + "/" + sys.argv[0]) - self.experiment_name = name or os.path.basename(this_file) - self.base_dir = os.path.realpath((os.path.dirname(this_file) + "/..")) + "/" + def __init__(self, name=None, ident=None): + self.experiment_id = ident or time.time() + # TODO reapair this path + this_file = os.path.realpath(os.getcwd()) + # Was wolltest du hier tun? Vorher die nummer war unsinnig. + self.experiment_name = name or os.path.basename(this_file).split('.')[0] + self.base_dir = os.path.join(os.getcwd(), self.experiment_name) self.next_iteration = 0 self.log_messages = [] - self.initialize_more() - - def initialize_more(self): - pass def __enter__(self): - self.dir = self.base_dir + "experiments/exp-" + str(self.experiment_name) + "-" + str(self.experiment_id) + "-" + str(self.next_iteration) + "/" - os.mkdir(self.dir) - print("** created " + str(self.dir)) + self.dir = os.path.join(self.base_dir, 'experiments', 'exp-{name}-{id}-{it}'.format( + name=self.experiment_name, id=self.experiment_id, it=self.next_iteration) + ) + # Use makedirs for subfolder creation + os.makedirs(self.dir) + # os.mkdir(self.dir) + print("** created {dir} **".format(dir=self.dir)) return self def __exit__(self, exc_type, exc_value, traceback): @@ -39,27 +41,30 @@ class Experiment: print(message, **kwargs) def save_log(self, log_name="log"): - with open(self.dir + "/" + str(log_name) + ".txt", "w") as log_file: + with open(os.path.join(self.dir, "{name}.txt".format(name=log_name)), "w") as log_file: for log_message in self.log_messages: print(str(log_message), file=log_file) def save(self, **kwargs): - for name,value in kwargs.items(): - with open(self.dir + "/" + str(name) + ".dill", "wb") as dill_file: + for name, value in kwargs.items(): + with open(os.path.join(self.dir, "{name}.dill".format(name=name)), "wb") as dill_file: dill.dump(value, dill_file) class FixpointExperiment(Experiment): - def initialize_more(self): + def __init__(self): + super().__init__() self.counters = dict(divergent=0, fix_zero=0, fix_other=0, fix_sec=0, other=0) self.interesting_fixpoints = [] + def run_net(self, net, step_limit=100): i = 0 while i < step_limit and not net.is_diverged() and not net.is_fixpoint(): net.self_attack() i += 1 self.count(net) + def count(self, net): if net.is_diverged(): self.counters['divergent'] += 1 diff --git a/code/network.py b/code/network.py index 221bf9c..688597d 100644 --- a/code/network.py +++ b/code/network.py @@ -314,7 +314,6 @@ class RecurrentNeuralNetwork(NeuralNetwork): return new_weights - if __name__ == '__main__': with FixpointExperiment() as exp: for run_id in tqdm(range(100)): @@ -324,4 +323,3 @@ if __name__ == '__main__': # net.print_weights() exp.run_net(net, 100) exp.log(exp.counters) - diff --git a/code/test.py b/code/test.py index ec2fc4e..115fe6d 100644 --- a/code/test.py +++ b/code/test.py @@ -10,23 +10,23 @@ def vary(e=0.0, f=0.0): np.array([[1.0+e], [0.0+f]], dtype=np.float32) ] -net = WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation='sigmoid') -if False: - net.set_weights([ - np.array([[1.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], dtype=np.float32), - np.array([[1.0, 0.0], [0.0, 0.0]], dtype=np.float32), - np.array([[1.0], [0.0]], dtype=np.float32) - ]) - print(net.get_weights()) - net.self_attack(100) - print(net.get_weights()) - print(net.is_fixpoint()) -if True: - net.set_weights(vary(0.01, 0.0)) - print(net.get_weights()) - for _ in range(5): - net.self_attack() +if __name__ == '__main__': + + net = WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation='sigmoid') + if False: + net.set_weights([ + np.array([[1.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], dtype=np.float32), + np.array([[1.0, 0.0], [0.0, 0.0]], dtype=np.float32), + np.array([[1.0], [0.0]], dtype=np.float32) + ]) print(net.get_weights()) - print(net.is_fixpoint()) - - \ No newline at end of file + net.self_attack(100) + print(net.get_weights()) + print(net.is_fixpoint()) + if True: + net.set_weights(vary(0.01, 0.0)) + print(net.get_weights()) + for _ in range(5): + net.self_attack() + print(net.get_weights()) + print(net.is_fixpoint())