Refactor:

Step 1 - Introduction of Weight object for global weight operations
Step2 - Cleanup
Step 3 - Redone WEightwise network updates in clean numpy code
This commit is contained in:
Si11ium 2019-06-06 21:57:22 +02:00
parent f3987cdbb5
commit 50f7f84084
14 changed files with 193 additions and 865 deletions

View File

@ -1,96 +0,0 @@
import os
from experiment import Experiment
# noinspection PyUnresolvedReferences
from soup import Soup
from typing import List
from collections import defaultdict
from argparse import ArgumentParser
import numpy as np
import plotly as pl
import plotly.graph_objs as go
import colorlover as cl
import dill
def build_args():
arg_parser = ArgumentParser()
arg_parser.add_argument('-i', '--in_file', nargs=1, type=str)
arg_parser.add_argument('-o', '--out_file', nargs='?', default='out', type=str)
return arg_parser.parse_args()
def plot_bars(names_bars_tuple, filename='histogram_plot'):
# catagorical
ryb = cl.scales['10']['div']['RdYlBu']
names, bars = names_bars_tuple
situations = list(bars[0].keys())
names = ['Weightwise', 'Aggregating', 'Recurrent'] # [name.split(' ')[0] for name in names]
data_dict = {}
for idx, name in enumerate(names):
data_dict[name] = bars[idx]
data = []
for idx, situation in enumerate(situations):
bar = go.Bar(
y=[data_dict[name][situation] for name in names],
# x=[key for key in data_dict[name].keys()],
x=names,
name=situation,
showlegend=True,
)
data.append(bar)
layout = dict(xaxis=dict(title="Networks", titlefont=dict(size=20)),
barmode='stack',
# height=400, width=400,
# margin=dict(l=20, r=20, t=20, b=20)
legend=dict(orientation="h", x=0.05)
)
fig = go.Figure(data=data, layout=layout)
pl.offline.plot(fig, auto_open=True, filename=filename)
pass
def search_and_apply(absolut_file_or_folder, plotting_function, files_to_look_for=[]):
if os.path.isdir(absolut_file_or_folder):
for sub_file_or_folder in os.scandir(absolut_file_or_folder):
search_and_apply(sub_file_or_folder.path, plotting_function, files_to_look_for=files_to_look_for)
elif absolut_file_or_folder.endswith('.dill'):
file_or_folder = os.path.split(absolut_file_or_folder)[-1]
if file_or_folder in files_to_look_for and not os.path.exists('{}.html'.format(file_or_folder[:-5])):
print('Apply Plotting function "{func}" on file "{file}"'.format(func=plotting_function.__name__,
file=absolut_file_or_folder)
)
with open(absolut_file_or_folder, 'rb') as in_f:
bars = dill.load(in_f)
names_dill_location = os.path.join(*os.path.split(absolut_file_or_folder)[:-1], 'all_names.dill')
with open(names_dill_location, 'rb') as in_f:
names = dill.load(in_f)
plotting_function((names, bars), filename='{}.html'.format(absolut_file_or_folder[:-5]))
else:
pass
# This was not a file i should look for.
else:
# This was either another FilyType or Plot.html alerady exists.
pass
if __name__ == '__main__':
args = build_args()
in_file = args.in_file[0]
out_file = args.out_file
search_and_apply(in_file, plot_bars, files_to_look_for=['all_counters.dill'])
# , 'all_names.dill', 'all_notable_nets.dill'])

View File

@ -1,129 +0,0 @@
import os
from experiment import Experiment
# noinspection PyUnresolvedReferences
from soup import Soup
from typing import List
from collections import defaultdict
from argparse import ArgumentParser
import numpy as np
import plotly as pl
import plotly.graph_objs as go
import colorlover as cl
import dill
def build_args():
arg_parser = ArgumentParser()
arg_parser.add_argument('-i', '--in_file', nargs=1, type=str)
arg_parser.add_argument('-o', '--out_file', nargs='?', default='out', type=str)
return arg_parser.parse_args()
def plot_box(exp: Experiment, filename='histogram_plot'):
# catagorical
ryb = cl.scales['10']['div']['RdYlBu']
data = []
for d in range(exp.depth):
names = ['D 10e-{}'.format(d)] * exp.trials
data.extend(names)
trace_list = []
vergence_box = go.Box(
y=exp.ys,
x=data,
name='Time to Vergence',
boxpoints=False,
showlegend=True,
marker=dict(
color=ryb[3]
),
)
fixpoint_box = go.Box(
y=exp.zs,
x=data,
name='Time as Fixpoint',
boxpoints=False,
showlegend=True,
marker=dict(
color=ryb[-1]
),
)
trace_list.extend([vergence_box, fixpoint_box])
layout = dict(title='{}'.format('Known Fixpoint Variation'),
titlefont=dict(size=30),
legend=dict(
orientation="h",
x=.1, y=-0.1,
font=dict(
size=20,
color='black'
),
),
boxmode='group',
boxgap=0,
# barmode='group',
bargap=0,
xaxis=dict(showgrid=False,
zeroline=True,
tickangle=0,
showticklabels=True),
yaxis=dict(
title='Steps',
zeroline=False,
titlefont=dict(
size=30
)
),
# height=400, width=400,
margin=dict(t=50)
)
fig = go.Figure(data=trace_list, layout=layout)
pl.offline.plot(fig, auto_open=True, filename=filename)
pass
def search_and_apply(absolut_file_or_folder, plotting_function, files_to_look_for=[]):
if os.path.isdir(absolut_file_or_folder):
for sub_file_or_folder in os.scandir(absolut_file_or_folder):
search_and_apply(sub_file_or_folder.path, plotting_function, files_to_look_for=files_to_look_for)
elif absolut_file_or_folder.endswith('.dill'):
file_or_folder = os.path.split(absolut_file_or_folder)[-1]
if file_or_folder in files_to_look_for and not os.path.exists('{}.html'.format(file_or_folder[:-5])):
print('Apply Plotting function "{func}" on file "{file}"'.format(func=plotting_function.__name__,
file=absolut_file_or_folder)
)
with open(absolut_file_or_folder, 'rb') as in_f:
exp = dill.load(in_f)
try:
plotting_function(exp, filename='{}.html'.format(absolut_file_or_folder[:-5]))
except AttributeError:
pass
else:
pass
# This was not a file i should look for.
else:
# This was either another FilyType or Plot.html alerady exists.
pass
if __name__ == '__main__':
args = build_args()
in_file = args.in_file[0]
out_file = args.out_file
search_and_apply(in_file, plot_box, files_to_look_for=['experiment.dill'])
# , 'all_names.dill', 'all_notable_nets.dill'])

View File

@ -1,118 +0,0 @@
import os
from experiment import Experiment
# noinspection PyUnresolvedReferences
from soup import Soup
from argparse import ArgumentParser
import numpy as np
import plotly as pl
import plotly.graph_objs as go
import colorlover as cl
import dill
from sklearn.manifold.t_sne import TSNE, PCA
def build_args():
arg_parser = ArgumentParser()
arg_parser.add_argument('-i', '--in_file', nargs=1, type=str)
arg_parser.add_argument('-o', '--out_file', nargs='?', default='out', type=str)
return arg_parser.parse_args()
def line_plot(names_exp_tuple, filename='lineplot'):
names, line_dict_list = names_exp_tuple
names = ['Weightwise', 'Aggregating', 'Recurrent']
if False:
data = []
base_scale = cl.scales['10']['div']['RdYlGn']
scale = cl.interp(base_scale, len(line_dict_list) + 1) # Map color scale to N bins
for ld_id, line_dict in enumerate(line_dict_list):
for data_point in ['ys', 'zs']:
trace = go.Scatter(
x=line_dict['xs'],
y=line_dict[data_point],
name='{} {}zero-fixpoints'.format(names[ld_id], 'non-' if data_point == 'zs' else ''),
line=dict(
# color=scale[ld_id],
width=5,
# dash='dash' if data_point == 'ys' else ''
),
)
data.append(trace)
if True:
data = []
base_scale = cl.scales['10']['div']['RdYlGn']
scale = cl.interp(base_scale, len(line_dict_list) + 1) # Map color scale to N bins
for ld_id, line_dict in enumerate(line_dict_list):
trace = go.Scatter(
x=line_dict['xs'],
y=line_dict['ys'],
name=names[ld_id],
line=dict( # color=scale[ld_id],
width=5
),
)
data.append(trace)
layout = dict(xaxis=dict(title='Trains per self-application', titlefont=dict(size=20)),
yaxis=dict(title='Average amount of fixpoints found',
titlefont=dict(size=20),
# type='log',
# range=[0, 2]
),
legend=dict(orientation='h', x=0.3, y=-0.3),
# height=800, width=800,
margin=dict(b=0)
)
fig = go.Figure(data=data, layout=layout)
pl.offline.plot(fig, auto_open=True, filename=filename)
pass
def search_and_apply(absolut_file_or_folder, plotting_function, files_to_look_for=[]):
if os.path.isdir(absolut_file_or_folder):
for sub_file_or_folder in os.scandir(absolut_file_or_folder):
search_and_apply(sub_file_or_folder.path, plotting_function, files_to_look_for=files_to_look_for)
elif absolut_file_or_folder.endswith('.dill'):
file_or_folder = os.path.split(absolut_file_or_folder)[-1]
if file_or_folder in files_to_look_for and not os.path.exists('{}.html'.format(absolut_file_or_folder[:-5])):
print('Apply Plotting function "{func}" on file "{file}"'.format(func=plotting_function.__name__,
file=absolut_file_or_folder)
)
with open(absolut_file_or_folder, 'rb') as in_f:
exp = dill.load(in_f)
names_dill_location = os.path.join(*os.path.split(absolut_file_or_folder)[:-1], 'all_names.dill')
with open(names_dill_location, 'rb') as in_f:
names = dill.load(in_f)
try:
plotting_function((names, exp), filename='{}.html'.format(absolut_file_or_folder[:-5]))
except ValueError:
pass
except AttributeError:
pass
else:
# This was either another FilyType or Plot.html alerady exists.
pass
if __name__ == '__main__':
args = build_args()
in_file = args.in_file[0]
out_file = args.out_file
search_and_apply(in_file, line_plot, ["all_data.dill"])

View File

@ -1,11 +1,12 @@
import numpy as np
from abc import abstractmethod, ABC
from typing import List, Union
from keras.models import Sequential
from keras.callbacks import Callback
from keras.layers import SimpleRNN, Dense
import keras.backend as K
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.callbacks import Callback
from tensorflow.python.keras.layers import SimpleRNN, Dense
from tensorflow.python.keras import backend as K
from util import *
from experiment import *
# Supress warnings and info messages
@ -13,12 +14,12 @@ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
class SaveStateCallback(Callback):
def __init__(self, net, epoch=0):
def __init__(self, network, epoch=0):
super(SaveStateCallback, self).__init__()
self.net = net
self.net = network
self.init_epoch = epoch
def on_epoch_end(self, epoch, logs={}):
def on_epoch_end(self, epoch, logs=None):
description = dict(time=epoch+self.init_epoch)
description['action'] = 'train_self'
description['counterpart'] = None
@ -26,67 +27,116 @@ class SaveStateCallback(Callback):
return
class NeuralNetwork(PrintingObject):
class Weights:
@staticmethod
def weights_to_string(weights):
s = ""
for layer_id, layer in enumerate(weights):
for cell_id, cell in enumerate(layer):
s += "[ "
for weight_id, weight in enumerate(cell):
s += str(weight) + " "
s += "]"
s += "\n"
return s
def __reshape_flat_array__(array, shapes):
sizes: List[int] = [int(np.prod(shape)) for shape in shapes]
# Split the incoming array into slices for layers
slices = [array[x: y] for x, y in zip(np.cumsum([0]+sizes), np.cumsum([0]+sizes)[1:])]
# reshape them in accordance to the given shapes
weights = [np.reshape(weight_slice, shape) for weight_slice, shape in zip(slices, shapes)]
return weights
@staticmethod
def are_weights_diverged(network_weights):
for layer_id, layer in enumerate(network_weights):
for cell_id, cell in enumerate(layer):
for weight_id, weight in enumerate(cell):
if np.isnan(weight):
return True
if np.isinf(weight):
return True
return False
def __init__(self, weight_vector: Union[List[np.ndarray], np.ndarray], flat_array_shape=None):
"""
Weight class, for easy manipulation of weight vectors from Keras models
@staticmethod
def are_weights_within(network_weights, lower_bound, upper_bound):
for layer_id, layer in enumerate(network_weights):
for cell_id, cell in enumerate(layer):
for weight_id, weight in enumerate(cell):
# could be a chain comparission "lower_bound <= weight <= upper_bound"
if not (lower_bound <= weight and weight <= upper_bound):
return False
return True
:param weight_vector: A numpy array holding weights
:type weight_vector: List[np.ndarray]
"""
self.__iter_idx = [0, 0]
if flat_array_shape:
weight_vector = self.__reshape_flat_array__(weight_vector, flat_array_shape)
@staticmethod
def fill_weights(old_weights, new_weights_list):
new_weights = copy.deepcopy(old_weights)
self.layers = weight_vector
# TODO: implement a way to access the cells directly
# self.cells = len(self)
# TODO: implement a way to access the weights directly
# self.weights = self.to_flat_array() ?
def __iter__(self):
self.__iter_idx = [0, 0]
return self
def __getitem__(self, item):
return self.layers[item]
def __len__(self):
return sum([x.size for x in self.layers])
def shapes(self):
return [x.shape for x in self.layers]
def num_layers(self):
return len(self.layers)
def __copy__(self):
return copy.deepcopy(self)
def __next__(self):
# ToDo: Check iteration progress over layers
# ToDo: There is still a problem interation, currently only cell level is the last loop stage.
# Do we need this?
if self.__iter_idx[0] >= len(self.layers):
if self.__iter_idx[1] >= len(self.layers[self.__iter_idx[0]]):
raise StopIteration
result = self.layers[self.__iter_idx[0]][self.__iter_idx[1]]
if self.__iter_idx[1] >= len(self.layers[self.__iter_idx[0]]):
self.__iter_idx[0] += 1
self.__iter_idx[1] = 0
else:
self.__iter_idx[1] += 1
return result
def __repr__(self):
return f'Weights({self.to_flat_array().tolist()})'
def to_flat_array(self) -> np.ndarray:
return np.hstack([weight.flatten() for weight in self.layers])
def from_flat_array(self, array):
new_weights = self.__reshape_flat_array__(array, self.shapes())
return new_weights
def are_diverged(self):
return any([np.isnan(x).any() for x in self.layers]) or any([np.isinf(x).any() for x in self.layers])
def are_within_bounds(self, lower_bound: float, upper_bound: float):
return bool(sum([((lower_bound < x) & (x > upper_bound)).size for x in self.layers]))
def apply_new_weights(self, weights: np.ndarray):
# TODO: Make this more Pythonic
new_weights = copy.deepcopy(self.layers)
current_weight_id = 0
for layer_id, layer in enumerate(new_weights):
for cell_id, cell in enumerate(layer):
for weight_id, weight in enumerate(cell):
new_weight = new_weights_list[current_weight_id]
new_weight = weights[current_weight_id]
new_weights[layer_id][cell_id][weight_id] = new_weight
current_weight_id += 1
return new_weights
class NeuralNetwork(ABC):
"""
This is the Base Network Class, including abstract functions that must be implemented.
"""
def __init__(self, **params):
super().__init__()
self.params = dict(epsilon=0.00000000000001)
self.params.update(params)
self.keras_params = dict(activation='linear', use_bias=False)
self.states = []
self.model: Sequential
def get_model(self):
raise NotImplementedError
def get_params(self):
def get_params(self) -> dict:
return self.params
def get_keras_params(self):
def get_keras_params(self) -> dict:
return self.keras_params
def with_params(self, **kwargs):
@ -97,96 +147,96 @@ class NeuralNetwork(PrintingObject):
self.keras_params.update(kwargs)
return self
def get_weights(self):
return self.model.get_weights()
def get_weights(self) -> Weights:
return Weights(self.model.get_weights())
def get_weights_flat(self):
return np.hstack([weight.flatten() for weight in self.get_weights()])
def get_weights_flat(self) -> np.ndarray:
return self.get_weights().to_flat_array()
def set_weights(self, new_weights):
def set_weights(self, new_weights: Weights):
return self.model.set_weights(new_weights)
def apply_to_weights(self, old_weights):
@abstractmethod
def apply_to_weights(self, old_weights) -> Weights:
# TODO: add a dogstring, telling the user what this does, e.g. what is applied?
raise NotImplementedError
def apply_to_network(self, other_network):
def apply_to_network(self, other_network) -> Weights:
# TODO: add a dogstring, telling the user what this does, e.g. what is applied?
new_weights = self.apply_to_weights(other_network.get_weights())
return new_weights
def attack(self, other_network):
# TODO: add a dogstring, telling the user what this does, e.g. what is an attack?
other_network.set_weights(self.apply_to_network(other_network))
return self
def fuck(self, other_network):
# TODO: add a dogstring, telling the user what this does, e.g. what is fucking?
self.set_weights(self.apply_to_network(other_network))
return self
def self_attack(self, iterations=1):
# TODO: add a dogstring, telling the user what this does, e.g. what is self attack?
for _ in range(iterations):
self.attack(self)
return self
def meet(self, other_network):
# TODO: add a dogstring, telling the user what this does, e.g. what is meeting?
new_other_network = copy.deepcopy(other_network)
return self.attack(new_other_network)
def is_diverged(self):
return self.are_weights_diverged(self.get_weights())
return self.get_weights().are_diverged()
def is_zero(self, epsilon=None):
epsilon = epsilon or self.get_params().get('epsilon')
return self.are_weights_within(self.get_weights(), -epsilon, epsilon)
return self.get_weights().are_within_bounds(-epsilon, epsilon)
def is_fixpoint(self, degree=1, epsilon=None):
def is_fixpoint(self, degree: int = 1, epsilon: float = None) -> bool:
assert degree >= 1, "degree must be >= 1"
epsilon = epsilon or self.get_params().get('epsilon')
old_weights = self.get_weights()
new_weights = copy.deepcopy(old_weights)
new_weights = copy.deepcopy(self.get_weights())
for _ in range(degree):
new_weights = self.apply_to_weights(new_weights)
if new_weights.are_diverged():
return False
if NeuralNetwork.are_weights_diverged(new_weights):
return False
for layer_id, layer in enumerate(old_weights):
for cell_id, cell in enumerate(layer):
for weight_id, weight in enumerate(cell):
new_weight = new_weights[layer_id][cell_id][weight_id]
if abs(new_weight - weight) >= epsilon:
return False
return True
biggerEpsilon = (np.abs(new_weights.to_flat_array() - self.get_weights().to_flat_array()) >= epsilon).any()
def repr_weights(self, weights=None):
return self.weights_to_string(weights or self.get_weights())
# Boolean Value needs to be flipped to answer "is_fixpoint"
return not biggerEpsilon
def print_weights(self, weights=None):
print(self.repr_weights(weights))
print(weights or self.get_weights())
class ParticleDecorator:
next_uid = 0
def __init__(self, net):
def __init__(self, network):
# ToDo: Add DocString, What does it do?
self.uid = self.__class__.next_uid
self.__class__.next_uid += 1
self.net = net
self.network = network
self.states = []
self.save_state(time=0,
action='init',
counterpart=None
)
self.save_state(time=0, action='init', counterpart=None)
def __getattr__(self, name):
return getattr(self.net, name)
return getattr(self.network, name)
def get_uid(self):
return self.uid
def make_state(self, **kwargs):
weights = self.net.get_weights_flat()
if any(np.isinf(weights)) or any(np.isnan(weights)):
if self.network.is_diverged():
return None
state = {'class': self.net.__class__.__name__, 'weights': weights}
state = {'class': self.network.__class__.__name__, 'weights': self.network.get_weights_flat()}
state.update(kwargs)
return state
@ -196,6 +246,7 @@ class ParticleDecorator:
self.states += [state]
else:
pass
return True
def update_state(self, number, **kwargs):
raise NotImplementedError('Result is vague')
@ -212,81 +263,33 @@ class ParticleDecorator:
class WeightwiseNeuralNetwork(NeuralNetwork):
@staticmethod
def normalize_id(value, norm):
if norm > 1:
return float(value) / float(norm)
else:
return float(value)
def __init__(self, width, depth, **kwargs):
# ToDo: Insert Docstring
super().__init__(**kwargs)
self.width = width
self.depth = depth
self.width: int = width
self.depth: int = depth
self.model = Sequential()
self.model.add(Dense(units=self.width, input_dim=4, **self.keras_params))
for _ in range(self.depth-1):
self.model.add(Dense(units=self.width, **self.keras_params))
self.model.add(Dense(units=1, **self.keras_params))
def get_model(self):
return self.model
def apply(self, inputs):
# TODO: Write about it... What does it do?
return self.model.predict(inputs)
def apply(self, *inputs):
stuff = np.transpose(np.array([[inputs[0]], [inputs[1]], [inputs[2]], [inputs[3]]]))
return self.model.predict(stuff)[0][0]
@classmethod
def compute_all_duplex_weight_points(cls, old_weights):
points = []
normal_points = []
max_layer_id = len(old_weights) - 1
for layer_id, layer in enumerate(old_weights):
max_cell_id = len(layer) - 1
for cell_id, cell in enumerate(layer):
max_weight_id = len(cell) - 1
for weight_id, weight in enumerate(cell):
normal_layer_id = cls.normalize_id(layer_id, max_layer_id)
normal_cell_id = cls.normalize_id(cell_id, max_cell_id)
normal_weight_id = cls.normalize_id(weight_id, max_weight_id)
points += [[weight, layer_id, cell_id, weight_id]]
normal_points += [[weight, normal_layer_id, normal_cell_id, normal_weight_id]]
return points, normal_points
@classmethod
def compute_all_weight_points(cls, all_weights):
return cls.compute_all_duplex_weight_points(all_weights)[0]
@classmethod
def compute_all_normal_weight_points(cls, all_weights):
return cls.compute_all_duplex_weight_points(all_weights)[1]
def apply_to_weights(self, old_weights):
new_weights = copy.deepcopy(self.get_weights())
for (weight_point, normal_weight_point) in zip(*self.__class__.compute_all_duplex_weight_points(old_weights)):
weight, layer_id, cell_id, weight_id = weight_point
_, normal_layer_id, normal_cell_id, normal_weight_id = normal_weight_point
new_weight = self.apply(*normal_weight_point)
new_weights[layer_id][cell_id][weight_id] = new_weight
if self.params.get("print_all_weight_updates", False) and not self.is_silent():
print("updated old weight {weight}\t @ ({layer},{cell},{weight_id}) "
"to new value {new_weight}\t calling @ ({normal_layer},{normal_cell},{normal_weight_id})").format(
weight=weight, layer=layer_id, cell=cell_id, weight_id=weight_id, new_weight=new_weight,
normal_layer=normal_layer_id, normal_cell=normal_cell_id, normal_weight_id=normal_weight_id)
return new_weights
def compute_samples(self):
samples = []
for normal_weight_point in self.compute_all_normal_weight_points(self.get_weights()):
weight, normal_layer_id, normal_cell_id, normal_weight_id = normal_weight_point
sample = np.transpose(np.array([[weight], [normal_layer_id], [normal_cell_id], [normal_weight_id]]))
samples += [sample[0]]
samples_array = np.asarray(samples)
return samples_array, samples_array[:, 0]
def apply_to_weights(self, weights) -> Weights:
# ToDo: Insert DocString
# Transform the weight matrix in an horizontal stack as: array([[weight, layer, cell, position], ...])
transformed_weights = np.asarray([
[weight, idx, *x] for idx, layer in enumerate(weights.layers) for x, weight in np.ndenumerate(layer)
])
# normalize [layer, cell, position]
for idx in range(1, transformed_weights.shape[1]):
transformed_weights[:, idx] = transformed_weights[:, idx] / np.max(transformed_weights[:, idx])
new_weights = self.apply(transformed_weights)
# use the original weight shape to transform the new tensor
return Weights(new_weights, flat_array_shape=weights.shapes())
class AggregatingNeuralNetwork(NeuralNetwork):
@ -332,9 +335,6 @@ class AggregatingNeuralNetwork(NeuralNetwork):
self.model.add(Dense(units=width, **self.keras_params))
self.model.add(Dense(units=self.aggregates, **self.keras_params))
def get_model(self):
return self.model
def get_aggregator(self):
return self.params.get('aggregator', self.aggregate_average)
@ -378,11 +378,11 @@ class AggregatingNeuralNetwork(NeuralNetwork):
new_weights = self.fill_weights(old_weights, new_weights_list)
# return results
if self.params.get("print_all_weight_updates", False) and not self.is_silent():
print("updated old weight aggregations " + str(old_aggregations))
print("to new weight aggregations " + str(new_aggregations))
print("resulting in network weights ...")
print(self.weights_to_string(new_weights))
# if self.params.get("print_all_weight_updates", False) and not self.is_silent():
# print("updated old weight aggregations " + str(old_aggregations))
# print("to new weight aggregations " + str(new_aggregations))
# print("resulting in network weights ...")
# print(self.weights_to_string(new_weights))
return new_weights
@staticmethod
@ -420,23 +420,23 @@ class AggregatingNeuralNetwork(NeuralNetwork):
assert degree >= 1, "degree must be >= 1"
epsilon = epsilon or self.get_params().get('epsilon')
old_weights = self.get_weights()
old_aggregations, _ = self.get_aggregated_weights()
new_weights = copy.deepcopy(self.get_weights())
new_weights = copy.deepcopy(old_weights)
for _ in range(degree):
new_weights = self.apply_to_weights(new_weights)
if NeuralNetwork.are_weights_diverged(new_weights):
return False
if new_weights.are_diverged():
return False
# ToDo: Explain This, what the heck is happening?
collection_size = self.get_amount_of_weights() // self.aggregates
collections, leftovers = self.__class__.collect_weights(new_weights, collection_size)
new_aggregations = [self.get_aggregator()(collection) for collection in collections]
for aggregation_id, old_aggregation in enumerate(old_aggregations):
new_aggregation = new_aggregations[aggregation_id]
if abs(new_aggregation - old_aggregation) >= epsilon:
return False, new_aggregations
return True, new_aggregations
# ToDo: Explain This, why are you additionally checking tolerances of aggregated weights?
biggerEpsilon = (np.abs(np.asarray(old_aggregations) - np.asarray(new_aggregations)) >= epsilon).any()
# Boolean value hast to be flipped to answer the question.
return True, not biggerEpsilon
class FFTNeuralNetwork(NeuralNetwork):
@ -473,9 +473,6 @@ class FFTNeuralNetwork(NeuralNetwork):
self.model.add(Dense(units=width, **self.keras_params))
self.model.add(Dense(units=self.aggregates, **self.keras_params))
def get_model(self):
return self.model
def get_shuffler(self):
return self.params.get('shuffler', self.shuffle_not)
@ -508,11 +505,11 @@ class FFTNeuralNetwork(NeuralNetwork):
new_weights = self.fill_weights(old_weights, new_weights_list)
# return results
if self.params.get("print_all_weight_updates", False) and not self.is_silent():
print("updated old weight aggregations " + str(old_aggregation))
print("to new weight aggregations " + str(new_aggregation))
print("resulting in network weights ...")
print(self.__class__.weights_to_string(new_weights))
# if self.params.get("print_all_weight_updates", False) and not self.is_silent():
# print("updated old weight aggregations " + str(old_aggregation))
# print("to new weight aggregations " + str(new_aggregation))
# print("resulting in network weights ...")
# print(self.weights_to_string(new_weights))
return new_weights
def compute_samples(self):
@ -534,9 +531,6 @@ class RecurrentNeuralNetwork(NeuralNetwork):
self.model.add(SimpleRNN(units=width, return_sequences=True, **self.keras_params))
self.model.add(SimpleRNN(units=self.features, return_sequences=True, **self.keras_params))
def get_model(self):
return self.model
def apply(self, *inputs):
stuff = np.transpose(np.array([[[inputs[i]] for i in range(len(inputs))]]))
return self.model.predict(stuff)[0].flatten()
@ -645,7 +639,7 @@ if __name__ == '__main__':
K.clear_session()
exp.log(exp.counters)
if True:
if False:
# Aggregating Neural Network
with FixpointExperiment() as exp:
for run_id in tqdm(range(100)):
@ -655,7 +649,7 @@ if __name__ == '__main__':
K.clear_session()
exp.log(exp.counters)
if True:
if False:
#FFT Neural Network
with FixpointExperiment() as exp:
for run_id in tqdm(range(100)):
@ -665,7 +659,7 @@ if __name__ == '__main__':
K.clear_session()
exp.log(exp.counters)
if True:
if False:
# ok so this works quite realiably
with FixpointExperiment() as exp:
for i in range(1):

View File

@ -4,7 +4,6 @@ import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *

View File

@ -3,16 +3,18 @@ import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
import keras.backend
import tensorflow.python.keras.backend as K
def generate_counters():
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, net, notable_nets=[]):
def count(counters, net, notable_nets=None):
notable_nets = notable_nets or []
if net.is_diverged():
counters['divergent'] += 1
elif net.is_fixpoint():
@ -52,7 +54,7 @@ if __name__ == '__main__':
net = ParticleDecorator(net)
name = str(net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias='" + str(net.get_keras_params().get('use_bias')) + "'"
count(counters, net, notable_nets)
keras.backend.clear_session()
K.clear_session()
all_counters += [counters]
# all_notable_nets += [notable_nets]
all_names += [name]

View File

@ -5,12 +5,11 @@ import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
from soup import prng
import keras.backend
import tensorflow.python.keras.backend as K
from statistics import mean
@ -85,7 +84,7 @@ if __name__ == '__main__':
exp.ys += [time_to_something]
# time steps still regarded as sthe initial fix-point
exp.zs += [time_as_fixpoint]
keras.backend.clear_session()
K.backend.clear_session()
current_scale /= 10.0
for d in range(exp.depth):
exp.log('variation 10e-' + str(d))

View File

@ -6,13 +6,12 @@ sys.path += os.path.join('..', '.')
from typing import Tuple
from util import *
from experiment import *
from network import *
from soup import *
import keras.backend
import tensorflow.python.keras.backend as K
from statistics import mean
avg = mean
@ -28,7 +27,7 @@ def generate_counters():
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, soup, notable_nets=[]):
def count(counters, soup, notable_nets=None):
"""
Count the occurences ot the types of weight trajectories.
@ -40,6 +39,7 @@ def count(counters, soup, notable_nets=[]):
:return: Both the counter dictionary and the list of interessting nets.
"""
notable_nets = notable_nets or list()
for net in soup.particles:
if net.is_diverged():
counters['divergent'] += 1

View File

@ -6,11 +6,10 @@ from typing import Tuple
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
import keras.backend
import tensorflow.python.keras.backend as K
def generate_counters():
@ -23,7 +22,7 @@ def generate_counters():
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, net, notable_nets=[]):
def count(counters, net, notable_nets=None):
"""
Count the occurences ot the types of weight trajectories.
@ -34,7 +33,7 @@ def count(counters, net, notable_nets=[]):
:rtype Tuple[dict, list]
:return: Both the counter dictionary and the list of interessting nets.
"""
notable_nets = notable_nets or list()
if net.is_diverged():
counters['divergent'] += 1
elif net.is_fixpoint():

View File

@ -6,12 +6,11 @@ sys.path += os.path.join('..', '.')
from typing import Tuple
from util import *
from experiment import *
from network import *
from soup import *
import keras.backend
import tensorflow.python.keras.backend as K
def generate_counters():
@ -24,7 +23,7 @@ def generate_counters():
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, soup, notable_nets=[]):
def count(counters, soup, notable_nets=None):
"""
Count the occurences ot the types of weight trajectories.
@ -36,6 +35,7 @@ def count(counters, soup, notable_nets=[]):
:return: Both the counter dictionary and the list of interessting nets.
"""
notable_nets = notable_nets or list()
for net in soup.particles:
if net.is_diverged():
counters['divergent'] += 1

View File

@ -4,16 +4,16 @@ import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
import keras.backend as K
import tensorflow.python.keras.backend as K
def generate_counters():
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, net, notable_nets=[]):
def count(counters, net, notable_nets=None):
notable_nets = notable_nets or list()
if net.is_diverged():
counters['divergent'] += 1
elif net.is_fixpoint():

View File

@ -61,7 +61,7 @@ class LearningNeuralNetwork(NeuralNetwork):
print("updated old weight aggregations " + str(old_aggregation))
print("to new weight aggregations " + str(new_aggregation))
print("resulting in network weights ...")
print(self.__class__.weights_to_string(new_weights))
print(self.weights_to_string(new_weights))
return new_weights
def with_compile_params(self, **kwargs):

View File

@ -1,39 +0,0 @@
class PrintingObject:
class SilenceSignal():
def __init__(self, obj, value):
self.obj = obj
self.new_silent = value
def __enter__(self):
self.old_silent = self.obj.get_silence()
self.obj.set_silence(self.new_silent)
def __exit__(self, exception_type, exception_value, traceback):
self.obj.set_silence(self.old_silent)
def __init__(self):
self.silent = True
def is_silent(self):
return self.silent
def get_silence(self):
return self.is_silent()
def set_silence(self, value=True):
self.silent = value
return self
def unset_silence(self):
self.silent = False
return self
def with_silence(self, value=True):
self.set_silence(value)
return self
def silence(self, value=True):
return self.__class__.SilenceSignal(self, value)
def _print(self, *args, **kwargs):
if not self.silent:
print(*args, **kwargs)

View File

@ -1,283 +0,0 @@
import os
from experiment import Experiment
# noinspection PyUnresolvedReferences
from soup import Soup
from argparse import ArgumentParser
import numpy as np
import plotly as pl
import plotly.graph_objs as go
import colorlover as cl
import dill
from sklearn.manifold.t_sne import TSNE, PCA
def build_args():
arg_parser = ArgumentParser()
arg_parser.add_argument('-i', '--in_file', nargs=1, type=str)
arg_parser.add_argument('-o', '--out_file', nargs='?', default='out', type=str)
return arg_parser.parse_args()
def build_from_soup_or_exp(soup):
particles = soup.historical_particles
particle_list = []
for particle in particles.values():
particle_dict = dict(
trajectory=[event['weights'] for event in particle],
time=[event['time'] for event in particle],
action=[event.get('action', None) for event in particle],
counterpart=[event.get('counterpart', None) for event in particle]
)
if any([x is not None for x in particle_dict['counterpart']]):
print('counterpart')
particle_list.append(particle_dict)
return particle_list
def plot_latent_trajectories(soup_or_experiment, filename='latent_trajectory_plot'):
assert isinstance(soup_or_experiment, (Experiment, Soup))
bupu = cl.scales['11']['div']['RdYlGn']
data_dict = build_from_soup_or_exp(soup_or_experiment)
scale = cl.interp(bupu, len(data_dict)+1) # Map color scale to N bins
# Fit the mebedding space
transformer = TSNE()
for particle_dict in data_dict:
array = np.asarray([np.hstack([x.flatten() for x in timestamp]).flatten()
for timestamp in particle_dict['trajectory']])
particle_dict['trajectory'] = array
transformer.fit(array)
# Transform data accordingly and plot it
data = []
for p_id, particle_dict in enumerate(data_dict):
transformed = transformer._fit(np.asarray(particle_dict['trajectory']))
line_trace = go.Scatter(
x=transformed[:, 0],
y=transformed[:, 1],
text='Hovertext goes here'.format(),
line=dict(color=scale[p_id]),
# legendgroup='Position -{}'.format(pos),
name='Particle - {}'.format(p_id),
showlegend=True,
# hoverinfo='text',
mode='lines')
line_start = go.Scatter(mode='markers', x=[transformed[0, 0]], y=[transformed[0, 1]],
marker=dict(
color='rgb(255, 0, 0)',
size=4
),
showlegend=False
)
line_end = go.Scatter(mode='markers', x=[transformed[-1, 0]], y=[transformed[-1, 1]],
marker=dict(
color='rgb(0, 0, 0)',
size=4
),
showlegend=False
)
data.extend([line_trace, line_start, line_end])
layout = dict(title='{} - Latent Trajectory Movement'.format('Penis'),
height=800, width=800, margin=dict(l=0, r=0, t=0, b=0))
# import plotly.io as pio
# pio.write_image(fig, filename)
fig = go.Figure(data=data, layout=layout)
pl.offline.plot(fig, auto_open=True, filename=filename)
pass
def plot_latent_trajectories_3D(soup_or_experiment, filename='plot'):
def norm(val, a=0, b=0.25):
return (val - a) / (b - a)
data_list = build_from_soup_or_exp(soup_or_experiment)
if not data_list:
return
base_scale = cl.scales['9']['div']['RdYlGn']
# base_scale = cl.scales['9']['qual']['Set1']
scale = cl.interp(base_scale, len(data_list)+1) # Map color scale to N bins
# Fit the embedding space
transformer = PCA(n_components=2)
array = []
for particle_dict in data_list:
array.append(particle_dict['trajectory'])
transformer.fit(np.vstack(array))
# Transform data accordingly and plot it
data = []
for p_id, particle_dict in enumerate(data_list):
transformed = transformer.transform(particle_dict['trajectory'])
line_trace = go.Scatter3d(
x=transformed[:, 0],
y=transformed[:, 1],
z=np.asarray(particle_dict['time']),
text='Particle: {}<br> It had {} lifes.'.format(p_id, len(particle_dict['trajectory'])),
line=dict(
color=scale[p_id],
width=4
),
# legendgroup='Particle - {}'.format(p_id),
name='Particle -{}'.format(p_id),
showlegend=False,
hoverinfo='text',
mode='lines')
line_start = go.Scatter3d(mode='markers', x=[transformed[0, 0]], y=[transformed[0, 1]],
z=np.asarray(particle_dict['time'][0]),
marker=dict(
color='rgb(255, 0, 0)',
size=4
),
showlegend=False
)
line_end = go.Scatter3d(mode='markers', x=[transformed[-1, 0]], y=[transformed[-1, 1]],
z=np.asarray(particle_dict['time'][-1]),
marker=dict(
color='rgb(0, 0, 0)',
size=4
),
showlegend=False
)
data.extend([line_trace, line_start, line_end])
axis_layout = dict(gridcolor='rgb(255, 255, 255)',
gridwidth=3,
zerolinecolor='rgb(255, 255, 255)',
showbackground=True,
backgroundcolor='rgb(230, 230,230)',
titlefont=dict(
color='black',
size=30
)
)
layout = go.Layout(scene=dict(
# aspectratio=dict(x=2, y=2, z=2),
xaxis=dict(title='Transformed X', **axis_layout),
yaxis=dict(title='Transformed Y', **axis_layout),
zaxis=dict(title='Epoch', **axis_layout)),
# title='{} - Latent Trajectory Movement'.format('Soup'),
width=1024, height=1024,
margin=dict(l=0, r=0, b=0, t=0)
)
fig = go.Figure(data=data, layout=layout)
pl.offline.plot(fig, auto_open=True, filename=filename, validate=True)
pass
def plot_histogram(bars_dict_list, filename='histogram_plot'):
# catagorical
ryb = cl.scales['10']['div']['RdYlBu']
data = []
for bar_id, bars_dict in bars_dict_list:
hist = go.Histogram(
histfunc="count",
y=bars_dict.get('value', 14),
x=bars_dict.get('name', 'gimme a name'),
showlegend=False,
marker=dict(
color=ryb[bar_id]
),
)
data.append(hist)
layout=dict(title='{} Histogram Plot'.format('Experiment Name Penis'),
height=400, width=400, margin=dict(l=0, r=0, t=0, b=0))
fig = go.Figure(data=data, layout=layout)
pl.offline.plot(fig, auto_open=True, filename=filename)
pass
def line_plot(line_dict_list, filename='lineplot'):
# lines with standard deviation
# Transform data accordingly and plot it
data = []
rdylgn = cl.scales['10']['div']['RdYlGn']
rdylgn_background = [scale + (0.4,) for scale in cl.to_numeric(rdylgn)]
for line_id, line_dict in enumerate(line_dict_list):
name = line_dict.get('name', 'gimme a name')
upper_bound = go.Scatter(
name='Upper Bound',
x=line_dict['x'],
y=line_dict['upper_y'],
mode='lines',
marker=dict(color="#444"),
line=dict(width=0),
fillcolor=rdylgn_background[line_id],
)
trace = go.Scatter(
x=line_dict['x'],
y=line_dict['main_y'],
mode='lines',
name=name,
line=dict(color=line_id),
fillcolor=rdylgn_background[line_id],
fill='tonexty')
lower_bound = go.Scatter(
name='Lower Bound',
x=line_dict['x'],
y=line_dict['lower_y'],
marker=dict(color="#444"),
line=dict(width=0),
mode='lines')
data.extend([upper_bound, trace, lower_bound])
layout=dict(title='{} Line Plot'.format('Experiment Name Penis'),
height=800, width=800, margin=dict(l=0, r=0, t=0, b=0))
fig = go.Figure(data=data, layout=layout)
pl.offline.plot(fig, auto_open=True, filename=filename)
pass
def search_and_apply(absolut_file_or_folder, plotting_function, files_to_look_for=[]):
if os.path.isdir(absolut_file_or_folder):
for sub_file_or_folder in os.scandir(absolut_file_or_folder):
search_and_apply(sub_file_or_folder.path, plotting_function, files_to_look_for=files_to_look_for)
elif absolut_file_or_folder.endswith('.dill'):
file_or_folder = os.path.split(absolut_file_or_folder)[-1]
if file_or_folder in files_to_look_for and not os.path.exists('{}.html'.format(absolut_file_or_folder[:-5])):
print('Apply Plotting function "{func}" on file "{file}"'.format(func=plotting_function.__name__,
file=absolut_file_or_folder)
)
with open(absolut_file_or_folder, 'rb') as in_f:
exp = dill.load(in_f)
try:
plotting_function(exp, filename='{}.html'.format(absolut_file_or_folder[:-5]))
except ValueError:
pass
except AttributeError:
pass
else:
# This was either another FilyType or Plot.html alerady exists.
pass
if __name__ == '__main__':
args = build_args()
in_file = args.in_file[0]
out_file = args.out_file
search_and_apply(in_file, plot_latent_trajectories_3D, ["trajectorys.dill", "soup.dill"])