Journal TEx Text

This commit is contained in:
Steffen Illium
2021-09-13 16:04:48 +02:00
parent 6c1a964f31
commit 5f6c658068
7 changed files with 109 additions and 30 deletions

View File

@ -30,11 +30,10 @@
- [x] Box-Plot of Avg. Distance of clones from parent - [x] Box-Plot of Avg. Distance of clones from parent
- [ ] Search subspace between two fixpoints by linage(10**-5), check were they end up - [x] Search subspace between two fixpoints by linage(10**-5), check were they end up
- [x] How are basins / "attractor areas" shaped?
- [ ] How are basins / "attractor areas" shaped?
- Weired.... tbc...
-
# Future Todos: # Future Todos:

View File

@ -63,13 +63,15 @@ class SoupExperiment:
net = Net(self.net_input_size, self.net_hidden_size, self.net_out_size, net_name) net = Net(self.net_input_size, self.net_hidden_size, self.net_out_size, net_name)
self.population.append(net) self.population.append(net)
def evolve(self): def population_self_train(self):
""" Evolving consists of attacking & self-training. """ # Self-training each network in the population
for j in range(self.population_size):
net = self.population[j]
loop_epochs = tqdm(range(self.epochs)) for _ in range(self.ST_steps):
for i in loop_epochs: net.self_train(1, self.log_step_size, self.net_learning_rate)
loop_epochs.set_description("Evolving soup %s" % i)
def population_attack(self):
# A network attacking another network with a given percentage # A network attacking another network with a given percentage
if random.randint(1, 100) <= self.attack_chance: if random.randint(1, 100) <= self.attack_chance:
random_net1, random_net2 = random.sample(range(self.population_size), 2) random_net1, random_net2 = random.sample(range(self.population_size), 2)
@ -78,12 +80,18 @@ class SoupExperiment:
print(f"\n Attack: {random_net1.name} -> {random_net2.name}") print(f"\n Attack: {random_net1.name} -> {random_net2.name}")
random_net1.attack(random_net2) random_net1.attack(random_net2)
# Self-training each network in the population def evolve(self):
for j in range(self.population_size): """ Evolving consists of attacking & self-training. """
net = self.population[j]
for _ in range(self.ST_steps): loop_epochs = tqdm(range(self.epochs))
net.self_train(1, self.log_step_size, self.net_learning_rate) for i in loop_epochs:
loop_epochs.set_description("Evolving soup %s" % i)
# A network attacking another network with a given percentage
self.population_attack()
# Self-training each network in the population
self.population_self_train()
# Testing for fixpoints after each batch of ST steps to see relevant data # Testing for fixpoints after each batch of ST steps to see relevant data
if i % self.ST_steps == 0: if i % self.ST_steps == 0:

View File

@ -0,0 +1,50 @@
import random
from tqdm import tqdm
from experiments.soup_exp import SoupExperiment
from functionalities_test import test_for_fixpoints
class MeltingSoupExperiment(SoupExperiment):
def __init__(self, melt_chance, *args, keep_population_size=True, **kwargs):
super(MeltingSoupExperiment, self).__init__(*args, **kwargs)
self.keep_population_size = keep_population_size
self.melt_chance = melt_chance
def population_melt(self):
# A network melting with another network by a given percentage
if random.randint(1, 100) <= self.melt_chance:
random_net1_idx, random_net2_idx, destroy_idx = random.sample(range(self.population_size), 3)
random_net1 = self.population[random_net1_idx]
random_net2 = self.population[random_net2_idx]
print(f"\n Melt: {random_net1.name} -> {random_net2.name}")
melted_network = random_net1.melt(random_net2)
if self.keep_population_size:
del self.population[destroy_idx]
self.population.append(melted_network)
def evolve(self):
""" Evolving consists of attacking, melting & self-training. """
loop_epochs = tqdm(range(self.epochs))
for i in loop_epochs:
loop_epochs.set_description("Evolving soup %s" % i)
self.population_attack()
self.population_melt()
self.population_self_train()
# Testing for fixpoints after each batch of ST steps to see relevant data
if i % self.ST_steps == 0:
test_for_fixpoints(self.fixpoint_counters, self.population)
fixpoints_percentage = round(self.fixpoint_counters["identity_func"] / self.population_size, 1)
self.fixpoint_counters_history.append(fixpoints_percentage)
# Resetting the fixpoint counter. Last iteration not to be reset -
# it is important for the bar_chart_fixpoints().
if i < self.epochs:
self.reset_fixpoint_counters()

View File

@ -262,7 +262,7 @@ if __name__ == "__main__":
# Countplot of all fixpoint clone after training per class. Uncomment and manually adjust xticklabels if x-ax size gets too small. # Countplot of all fixpoint clone after training per class. Uncomment and manually adjust xticklabels if x-ax size gets too small.
ax = sns.catplot(kind="count", data=df, x="noise", hue="class", height=5.27, aspect=11.7/5.27) ax = sns.catplot(kind="count", data=df, x="noise", hue="class", height=5.27, aspect=11.7/5.27)
ax.set_axis_labels("Noise Levels", "Clone Fixpoints After Training Count ", fontsize=15) ax.set_axis_labels("Noise Levels", "Clone Fixpoints After Training Count ", fontsize=15)
#ax.set_xticklabels(labels=('10e-10', '10e-9', '10e-8', '10e-7', '10e-6', '10e-5', '10e-4', '10e-3', '10e-2', '10e-1'), fontsize=15) ax.set_xticklabels(labels=('10e-10', '10e-9', '10e-8', '10e-7', '10e-6', '10e-5', '10e-4', '10e-3', '10e-2', '10e-1'), fontsize=15)
plt.savefig(f"{directory}/clone_status_after_countplot_{ST_name_hash}.png") plt.savefig(f"{directory}/clone_status_after_countplot_{ST_name_hash}.png")
plt.clf() plt.clf()
@ -274,5 +274,6 @@ if __name__ == "__main__":
ax.map(sns.boxplot, "State", "Distance", "noise", linewidth=0.8, order=["MAE_pre", "MAE_post"], whis=[0, 100]) ax.map(sns.boxplot, "State", "Distance", "noise", linewidth=0.8, order=["MAE_pre", "MAE_post"], whis=[0, 100])
ax.set_axis_labels("", "Manhattan Distance To Parent Weights", fontsize=15) ax.set_axis_labels("", "Manhattan Distance To Parent Weights", fontsize=15)
ax.set_xticklabels(labels=('after noise application', 'after training'), fontsize=15) ax.set_xticklabels(labels=('after noise application', 'after training'), fontsize=15)
plt.ticklabel_format(style='sci', axis='x')
plt.savefig(f"{directory}/before_after_distance_catplot_{ST_name_hash}.png") plt.savefig(f"{directory}/before_after_distance_catplot_{ST_name_hash}.png")
plt.clf() plt.clf()

View File

@ -6,6 +6,8 @@ import random
import copy import copy
from pathlib import Path from pathlib import Path
from matplotlib.ticker import ScalarFormatter
from tqdm import tqdm from tqdm import tqdm
from tabulate import tabulate from tabulate import tabulate
@ -125,7 +127,7 @@ class RobustnessComparisonExperiment:
# or multi network settings with a singlee seed # or multi network settings with a singlee seed
df = pd.DataFrame(columns=['setting', 'Noise Level', 'Self Train Steps', 'absolute_loss', df = pd.DataFrame(columns=['setting', 'Noise Level', 'Self Train Steps', 'absolute_loss',
'Time to vergence', 'Time as fixpoint']) 'Time to convergence', 'Time as fixpoint'])
with tqdm(total=max(len(self.id_functions), seeds)) as pbar: with tqdm(total=max(len(self.id_functions), seeds)) as pbar:
for i, fixpoint in enumerate(self.id_functions): # 1 / n for i, fixpoint in enumerate(self.id_functions): # 1 / n
row_headers.append(fixpoint.name) row_headers.append(fixpoint.name)
@ -157,7 +159,7 @@ class RobustnessComparisonExperiment:
# When this raises a Type Error, we found a second order fixpoint! # When this raises a Type Error, we found a second order fixpoint!
steps += 1 steps += 1
df.loc[df.shape[0]] = [setting, f'10e-{noise_level}', steps, absolute_loss, df.loc[df.shape[0]] = [setting, f'$10^{{-{noise_level}}}$', steps, absolute_loss,
time_to_vergence[setting][noise_level], time_to_vergence[setting][noise_level],
time_as_fixpoint[setting][noise_level]] time_as_fixpoint[setting][noise_level]]
pbar.update(1) pbar.update(1)
@ -165,14 +167,19 @@ class RobustnessComparisonExperiment:
# Get the measuremts at the highest time_time_to_vergence # Get the measuremts at the highest time_time_to_vergence
df_sorted = df.sort_values('Self Train Steps', ascending=False).drop_duplicates(['setting', 'Noise Level']) df_sorted = df.sort_values('Self Train Steps', ascending=False).drop_duplicates(['setting', 'Noise Level'])
df_melted = df_sorted.reset_index().melt(id_vars=['setting', 'Noise Level', 'Self Train Steps'], df_melted = df_sorted.reset_index().melt(id_vars=['setting', 'Noise Level', 'Self Train Steps'],
value_vars=['Time to vergence', 'Time as fixpoint'], value_vars=['Time to convergence', 'Time as fixpoint'],
var_name="Measurement", var_name="Measurement",
value_name="Steps").sort_values('Noise Level') value_name="Steps").sort_values('Noise Level')
# Plotting # Plotting
plt.rcParams.update({
"text.usetex": True,
"font.family": "sans-serif",
"font.size": 12,
"font.weight": 'bold',
"font.sans-serif": ["Helvetica"]})
sns.set(style='whitegrid', font_scale=2) sns.set(style='whitegrid', font_scale=2)
bf = sns.boxplot(data=df_melted, y='Steps', x='Noise Level', hue='Measurement', palette=PALETTE) bf = sns.boxplot(data=df_melted, y='Steps', x='Noise Level', hue='Measurement', palette=PALETTE)
synthetic = 'synthetic' if self.is_synthetic else 'natural' synthetic = 'synthetic' if self.is_synthetic else 'natural'
# bf.set_title(f'Robustness as self application steps per noise level for {synthetic} fixpoints.')
plt.tight_layout() plt.tight_layout()
# sns.set(rc={'figure.figsize': (10, 50)}) # sns.set(rc={'figure.figsize': (10, 50)})
@ -206,7 +213,6 @@ class RobustnessComparisonExperiment:
plot_loss(self.loss_history, self.directory) plot_loss(self.loss_history, self.directory)
if __name__ == "__main__": if __name__ == "__main__":
NET_INPUT_SIZE = 4 NET_INPUT_SIZE = 4
NET_OUT_SIZE = 1 NET_OUT_SIZE = 1
@ -214,12 +220,11 @@ if __name__ == "__main__":
ST_steps = 1000 ST_steps = 1000
ST_epochs = 5 ST_epochs = 5
ST_log_step_size = 10 ST_log_step_size = 10
ST_population_size = 500 ST_population_size = 1000
ST_net_hidden_size = 2 ST_net_hidden_size = 2
ST_net_learning_rate = 0.004 ST_net_learning_rate = 0.004
ST_name_hash = random.getrandbits(32) ST_name_hash = random.getrandbits(32)
ST_synthetic = True ST_synthetic = False
print(f"Running the robustness comparison experiment:") print(f"Running the robustness comparison experiment:")
exp = RobustnessComparisonExperiment( exp = RobustnessComparisonExperiment(

View File

@ -7,6 +7,7 @@ from typing import Union
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import seaborn as sns import seaborn as sns
from matplotlib.ticker import ScalarFormatter
from tqdm import tqdm from tqdm import tqdm
from matplotlib import pyplot as plt from matplotlib import pyplot as plt
from torch.nn import functional as F from torch.nn import functional as F
@ -158,8 +159,10 @@ class SoupRobustnessExperiment:
df = df.replace([np.inf, -np.inf], np.nan) df = df.replace([np.inf, -np.inf], np.nan)
df = df.dropna() df = df.dropna()
# sns.set(rc={'figure.figsize': (10, 50)}) # sns.set(rc={'figure.figsize': (10, 50)})
sns.set_theme(style="ticks")
bx = sns.catplot(data=df[df['absolute_loss'] < 1], y='absolute_loss', x='application_step', kind='box', bx = sns.catplot(data=df[df['absolute_loss'] < 1], y='absolute_loss', x='application_step', kind='box',
col='noise_level', col_wrap=3, showfliers=False) col='noise_level', col_wrap=3, showfliers=False)
directory = Path('output') / 'robustness' directory = Path('output') / 'robustness'
filename = f"absolute_loss_perapplication_boxplot_grid.png" filename = f"absolute_loss_perapplication_boxplot_grid.png"
filepath = directory / filename filepath = directory / filename
@ -167,7 +170,7 @@ class SoupRobustnessExperiment:
plt.savefig(str(filepath)) plt.savefig(str(filepath))
if print_it: if print_it:
col_headers = [str(f"10e-{d}") for d in range(noise_levels)] col_headers = [str(f"10-{d}") for d in range(noise_levels)]
print(f"\nAppplications steps until divergence / zero: ") print(f"\nAppplications steps until divergence / zero: ")
print(tabulate(avg_time_to_vergence, showindex=row_headers, headers=col_headers, tablefmt='orgtbl')) print(tabulate(avg_time_to_vergence, showindex=row_headers, headers=col_headers, tablefmt='orgtbl'))
@ -221,7 +224,7 @@ if __name__ == "__main__":
# soup_SA_steps = 10 # soup_SA_steps = 10
# Define number of networks & their architecture # Define number of networks & their architecture
soup_population_size = 20 soup_population_size = 4
soup_net_hidden_size = 2 soup_net_hidden_size = 2
soup_net_learning_rate = 0.04 soup_net_learning_rate = 0.04

View File

@ -1,5 +1,6 @@
# from __future__ import annotations # from __future__ import annotations
import copy import copy
import inspect
import random import random
from typing import Union from typing import Union
@ -167,18 +168,30 @@ class Net(nn.Module):
""" See after how many steps of SA is the output not changing anymore: """ """ See after how many steps of SA is the output not changing anymore: """
# print(f"Self-app. step {i+1}: {Experiment.changing_rate(output2, output)}") # print(f"Self-app. step {i+1}: {Experiment.changing_rate(output2, output)}")
self = self.apply_weights(output) _ = self.apply_weights(output)
return self return self
def attack(self, other_net): def attack(self, other_net):
other_net_weights = other_net.input_weight_matrix() other_net_weights = other_net.input_weight_matrix()
my_evaluation = self(other_net_weights) my_evaluation = self(other_net_weights)
SA_steps = 1
return other_net.apply_weights(my_evaluation) return other_net.apply_weights(my_evaluation)
def melt(self, other_net):
try:
melted_name = self.name + other_net.name
except AttributeError:
melted_name = None
melted_weights = self.create_target_weights(other_net.input_weight_matrix())
self_weights = self.create_target_weights(self.input_weight_matrix())
weight_indxs = list(range(len(self_weights)))
random.shuffle(weight_indxs)
for weight_idx in weight_indxs[:len(melted_weights) // 2]:
melted_weights[weight_idx] = self_weights[weight_idx]
melted_net = Net(i_size=self.input_size, h_size=self.hidden_size, o_size=self.out_size, name=melted_name)
melted_net.apply_weights(melted_weights)
return melted_net
def apply_noise(self, noise_size: float): def apply_noise(self, noise_size: float):
""" Changing the weights of a network to values + noise """ """ Changing the weights of a network to values + noise """
for layer_id, layer_name in enumerate(self.state_dict()): for layer_id, layer_name in enumerate(self.state_dict()):