functionalities_test.py updated
This commit is contained in:
@ -1,5 +1,5 @@
|
|||||||
from mixed_setting_exp import run_mixed_experiment
|
from .mixed_setting_exp import run_mixed_experiment
|
||||||
from robustness_exp import run_robustness_experiment
|
from .robustness_exp import run_robustness_experiment
|
||||||
from self_application_exp import run_SA_experiment
|
from .self_application_exp import run_SA_experiment
|
||||||
from self_train_exp import run_ST_experiment
|
from .self_train_exp import run_ST_experiment
|
||||||
from soup_exp import run_soup_experiment
|
from .soup_exp import run_soup_experiment
|
@ -5,7 +5,7 @@ from pathlib import Path
|
|||||||
from visualization import line_chart_fixpoints, bar_chart_fixpoints
|
from visualization import line_chart_fixpoints, bar_chart_fixpoints
|
||||||
|
|
||||||
|
|
||||||
def summary_fixpoint_experiment(runs, population_size, epochs, experiments, net_learning_rate, directory_name,
|
def summary_fixpoint_experiment(runs, population_size, epochs, experiments, net_learning_rate, directory,
|
||||||
summary_pre_title):
|
summary_pre_title):
|
||||||
avg_fixpoint_counters = {
|
avg_fixpoint_counters = {
|
||||||
"avg_identity_func": 0,
|
"avg_identity_func": 0,
|
||||||
@ -36,7 +36,7 @@ def summary_fixpoint_experiment(runs, population_size, epochs, experiments, net_
|
|||||||
# Plotting the summary
|
# Plotting the summary
|
||||||
source_checker = "summary"
|
source_checker = "summary"
|
||||||
exp_details = f"{summary_pre_title}: {runs} runs & {epochs} epochs each."
|
exp_details = f"{summary_pre_title}: {runs} runs & {epochs} epochs each."
|
||||||
bar_chart_fixpoints(avg_fixpoint_counters, population_size, directory_name, net_learning_rate, exp_details,
|
bar_chart_fixpoints(avg_fixpoint_counters, population_size, directory, net_learning_rate, exp_details,
|
||||||
source_checker)
|
source_checker)
|
||||||
|
|
||||||
|
|
||||||
|
@ -71,12 +71,10 @@ class MixedSettingExperiment:
|
|||||||
input_data = net.input_weight_matrix()
|
input_data = net.input_weight_matrix()
|
||||||
target_data = net.create_target_weights(input_data)
|
target_data = net.create_target_weights(input_data)
|
||||||
net.self_train(1, self.log_step_size, self.net_learning_rate, input_data, target_data)
|
net.self_train(1, self.log_step_size, self.net_learning_rate, input_data, target_data)
|
||||||
input_data = net.input_weight_matrix()
|
net.self_application(self.SA_steps, self.log_step_size)
|
||||||
net.self_application(input_data, self.SA_steps, self.log_step_size)
|
|
||||||
|
|
||||||
elif self.train_nets == "after_SA":
|
elif self.train_nets == "after_SA":
|
||||||
input_data = net.input_weight_matrix()
|
net.self_application(self.SA_steps, self.log_step_size)
|
||||||
net.self_application(input_data, self.SA_steps, self.log_step_size)
|
|
||||||
for _ in range(self.ST_steps_between_SA):
|
for _ in range(self.ST_steps_between_SA):
|
||||||
input_data = net.input_weight_matrix()
|
input_data = net.input_weight_matrix()
|
||||||
target_data = net.create_target_weights(input_data)
|
target_data = net.create_target_weights(input_data)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import os.path
|
import os.path
|
||||||
import pickle
|
import pickle
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
@ -82,13 +83,13 @@ class SelfTrainExperiment:
|
|||||||
def run_ST_experiment(population_size, batch_size, net_input_size, net_hidden_size, net_out_size, net_learning_rate,
|
def run_ST_experiment(population_size, batch_size, net_input_size, net_hidden_size, net_out_size, net_learning_rate,
|
||||||
epochs, runs, run_name, name_hash):
|
epochs, runs, run_name, name_hash):
|
||||||
experiments = {}
|
experiments = {}
|
||||||
|
logging_directory = Path('output') / 'self_training'
|
||||||
check_folder("self_training")
|
logging_directory.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
# Running the experiments
|
# Running the experiments
|
||||||
for i in range(runs):
|
for i in range(runs):
|
||||||
ST_directory_name = f"experiments/self_training/{run_name}_run_{i}_{str(population_size)}_nets_{epochs}_epochs_{str(name_hash)}"
|
experiment_name = f"{run_name}_run_{i}_{str(population_size)}_nets_{epochs}_epochs_{str(name_hash)}"
|
||||||
|
this_exp_directory = logging_directory / experiment_name
|
||||||
ST_experiment = SelfTrainExperiment(
|
ST_experiment = SelfTrainExperiment(
|
||||||
population_size,
|
population_size,
|
||||||
batch_size,
|
batch_size,
|
||||||
@ -97,17 +98,19 @@ def run_ST_experiment(population_size, batch_size, net_input_size, net_hidden_si
|
|||||||
net_out_size,
|
net_out_size,
|
||||||
net_learning_rate,
|
net_learning_rate,
|
||||||
epochs,
|
epochs,
|
||||||
ST_directory_name
|
this_exp_directory
|
||||||
)
|
)
|
||||||
pickle.dump(ST_experiment, open(f"{ST_directory_name}/full_experiment_pickle.p", "wb"))
|
with (this_exp_directory / 'full_experiment_pickle.p').open('wb') as f:
|
||||||
|
pickle.dump(ST_experiment, f)
|
||||||
experiments[i] = ST_experiment
|
experiments[i] = ST_experiment
|
||||||
|
|
||||||
# Building a summary of all the runs
|
# Building a summary of all the runs
|
||||||
directory_name = f"experiments/self_training/summary_{run_name}_{runs}_runs_{str(population_size)}_nets_{epochs}_epochs_{str(name_hash)}"
|
summary_name = f"/summary_{run_name}_{runs}_runs_{str(population_size)}_nets_{epochs}_epochs_{str(name_hash)}"
|
||||||
os.mkdir(directory_name)
|
summary_directory_name = logging_directory / summary_name
|
||||||
|
summary_directory_name.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
summary_pre_title = "ST"
|
summary_pre_title = "ST"
|
||||||
summary_fixpoint_experiment(runs, population_size, epochs, experiments, net_learning_rate, directory_name,
|
summary_fixpoint_experiment(runs, population_size, epochs, experiments, net_learning_rate, summary_directory_name,
|
||||||
summary_pre_title)
|
summary_pre_title)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -5,25 +5,12 @@ from torch import Tensor
|
|||||||
from network import Net
|
from network import Net
|
||||||
|
|
||||||
|
|
||||||
def overall_fixpoint_test(network: Net, epsilon: float, input_data) -> bool:
|
|
||||||
predicted_values = network(input_data)
|
|
||||||
|
|
||||||
check_smaller_epsilon = all(epsilon > predicted_values)
|
|
||||||
check_greater_epsilon = all(-epsilon < predicted_values)
|
|
||||||
|
|
||||||
if check_smaller_epsilon and check_greater_epsilon:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_divergent(network: Net) -> bool:
|
def is_divergent(network: Net) -> bool:
|
||||||
for i in network.input_weight_matrix():
|
for i in network.input_weight_matrix():
|
||||||
weight_value = i[0].item()
|
weight_value = i[0].item()
|
||||||
|
|
||||||
if np.isnan(weight_value) or np.isinf(weight_value):
|
if np.isnan(weight_value).all() or np.isinf(weight_value).all():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@ -33,26 +20,26 @@ def is_identity_function(network: Net, epsilon=pow(10, -5)) -> bool:
|
|||||||
target_data = network.create_target_weights(input_data)
|
target_data = network.create_target_weights(input_data)
|
||||||
predicted_values = network(input_data)
|
predicted_values = network(input_data)
|
||||||
|
|
||||||
return np.allclose(target_data.detach().numpy(), predicted_values.detach().numpy(), 0, epsilon)
|
return np.allclose(target_data.detach().numpy(), predicted_values.detach().numpy(),
|
||||||
|
rtol=0, atol=epsilon)
|
||||||
|
|
||||||
|
|
||||||
def is_zero_fixpoint(network: Net, input_data: Tensor, epsilon=pow(10, -5)) -> bool:
|
def is_zero_fixpoint(network: Net) -> bool:
|
||||||
# FIXME: Is the the correct test?
|
result = bool(len(np.nonzero(network.create_target_weights(network.input_weight_matrix()))))
|
||||||
raise NotImplementedError
|
|
||||||
result = overall_fixpoint_test(network, epsilon, input_data)
|
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def is_secondary_fixpoint(network: Net, input_data: Tensor, epsilon: float) -> bool:
|
def is_secondary_fixpoint(network: Net, epsilon: float = pow(10, -5)) -> bool:
|
||||||
""" Secondary fixpoint check is done like this: compare first INPUT with second OUTPUT.
|
""" Secondary fixpoint check is done like this: compare first INPUT with second OUTPUT.
|
||||||
If they are within the boundaries, then is secondary fixpoint. """
|
If they are within the boundaries, then is secondary fixpoint. """
|
||||||
|
|
||||||
|
input_data = network.input_weight_matrix()
|
||||||
|
target_data = network.create_target_weights(input_data)
|
||||||
|
|
||||||
# Calculating first output
|
# Calculating first output
|
||||||
first_output = network(input_data)
|
first_output = network(input_data)
|
||||||
|
|
||||||
# Getting the second output by initializing a new net with the weights of the original net.
|
# Getting the second output by initializing a new net with the weights of the original net.
|
||||||
# FixMe: Is this correct? I Think it should be the same function thus the same network
|
|
||||||
net_copy = copy.deepcopy(network)
|
net_copy = copy.deepcopy(network)
|
||||||
net_copy.apply_weights(first_output)
|
net_copy.apply_weights(first_output)
|
||||||
input_data_2 = net_copy.input_weight_matrix()
|
input_data_2 = net_copy.input_weight_matrix()
|
||||||
@ -60,50 +47,33 @@ def is_secondary_fixpoint(network: Net, input_data: Tensor, epsilon: float) -> b
|
|||||||
# Calculating second output
|
# Calculating second output
|
||||||
second_output = network(input_data_2)
|
second_output = network(input_data_2)
|
||||||
|
|
||||||
# Perform the Check:
|
# Perform the Check: all(epsilon > abs(input_data - second_output))
|
||||||
check_abs_within_epsilon = all(epsilon > abs(input_data - second_output))
|
check_abs_within_epsilon = np.allclose(target_data.detach().numpy(), second_output.detach().numpy(),
|
||||||
|
rtol=0, atol=epsilon)
|
||||||
# FIXME: This is wrong, is it?
|
return check_abs_within_epsilon
|
||||||
# check_smaller_epsilon = all(epsilon > second_output)
|
|
||||||
# check_greater_epsilon = all(-epsilon < second_output)
|
|
||||||
|
|
||||||
return True if check_abs_within_epsilon else False
|
|
||||||
|
|
||||||
|
|
||||||
def is_weak_fixpoint(network: Net, input_data: Tensor, epsilon: float) -> bool:
|
|
||||||
result = overall_fixpoint_test(network, epsilon, input_data)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def test_for_fixpoints(fixpoint_counter: Dict, nets: List, id_functions=None):
|
def test_for_fixpoints(fixpoint_counter: Dict, nets: List, id_functions=None):
|
||||||
id_functions = id_functions or None
|
id_functions = id_functions or None
|
||||||
zero_epsilon = pow(10, -5)
|
|
||||||
epsilon = pow(10, -3)
|
|
||||||
|
|
||||||
for i in range(len(nets)):
|
for i in range(len(nets)):
|
||||||
net = nets[i]
|
net = nets[i]
|
||||||
input_data = net.input_weight_matrix()
|
|
||||||
|
|
||||||
if is_divergent(nets[i]):
|
if is_divergent(nets[i]):
|
||||||
fixpoint_counter["divergent"] += 1
|
fixpoint_counter["divergent"] += 1
|
||||||
nets[i].is_fixpoint = "divergent"
|
nets[i].is_fixpoint = "divergent"
|
||||||
elif is_identity_function(nets[i], zero_epsilon):
|
elif is_identity_function(nets[i]): # is default value
|
||||||
fixpoint_counter["identity_func"] += 1
|
fixpoint_counter["identity_func"] += 1
|
||||||
nets[i].is_fixpoint = "identity_func"
|
nets[i].is_fixpoint = "identity_func"
|
||||||
id_functions.append(nets[i])
|
id_functions.append(nets[i])
|
||||||
elif is_zero_fixpoint(nets[i], input_data, zero_epsilon):
|
elif is_zero_fixpoint(nets[i]):
|
||||||
fixpoint_counter["fix_zero"] += 1
|
fixpoint_counter["fix_zero"] += 1
|
||||||
nets[i].is_fixpoint = "fix_zero"
|
nets[i].is_fixpoint = "fix_zero"
|
||||||
elif is_weak_fixpoint(nets[i], input_data, epsilon):
|
elif is_secondary_fixpoint(nets[i]):
|
||||||
fixpoint_counter["fix_weak"] += 1
|
|
||||||
nets[i].is_fixpoint = "fix_weak"
|
|
||||||
elif is_secondary_fixpoint(nets[i], input_data, zero_epsilon):
|
|
||||||
fixpoint_counter["fix_sec"] += 1
|
fixpoint_counter["fix_sec"] += 1
|
||||||
nets[i].is_fixpoint = "fix_sec"
|
nets[i].is_fixpoint = "fix_sec"
|
||||||
else:
|
else:
|
||||||
fixpoint_counter["other_func"] += 1
|
fixpoint_counter["other_func"] += 1
|
||||||
nets[i].is_fixpoint = "other_func"
|
nets[i].is_fixpoint = "other_func"
|
||||||
|
|
||||||
return id_functions
|
return id_functions
|
||||||
|
|
||||||
|
|
||||||
|
30
main.py
30
main.py
@ -1,5 +1,4 @@
|
|||||||
from experiments import run_ST_experiment, run_SA_experiment, run_soup_experiment, run_mixed_experiment, \
|
from experiments import *
|
||||||
run_robustness_experiment
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
|
||||||
@ -19,17 +18,20 @@ def run_experiments(run_ST, run_SA, run_soup, run_mixed, run_robustness):
|
|||||||
if run_soup:
|
if run_soup:
|
||||||
print(f"\n Running the soup experiment:")
|
print(f"\n Running the soup experiment:")
|
||||||
run_soup_experiment(soup_population_size, soup_attack_chance, NET_INPUT_SIZE, soup_net_hidden_size,
|
run_soup_experiment(soup_population_size, soup_attack_chance, NET_INPUT_SIZE, soup_net_hidden_size,
|
||||||
NET_OUT_SIZE, soup_net_learning_rate, soup_epochs, soup_log_step_size, soup_runs, soup_runs_name,
|
NET_OUT_SIZE, soup_net_learning_rate, soup_epochs, soup_log_step_size, soup_runs,
|
||||||
soup_name_hash, soup_ST_steps, soup_train_nets)
|
soup_runs_name, soup_name_hash, soup_ST_steps, soup_train_nets)
|
||||||
if run_mixed:
|
if run_mixed:
|
||||||
print(f"\n Running the mixed experiment:")
|
print(f"\n Running the mixed experiment:")
|
||||||
run_mixed_experiment(mixed_population_size, NET_INPUT_SIZE, mixed_net_hidden_size, NET_OUT_SIZE,
|
run_mixed_experiment(mixed_population_size, NET_INPUT_SIZE, mixed_net_hidden_size, NET_OUT_SIZE,
|
||||||
mixed_net_learning_rate, mixed_train_nets, mixed_epochs, mixed_SA_steps,
|
mixed_net_learning_rate, mixed_train_nets, mixed_epochs, mixed_SA_steps,
|
||||||
mixed_ST_steps_between_SA, mixed_log_step_size, mixed_name_hash, mixed_total_runs, mixed_runs_name)
|
mixed_ST_steps_between_SA, mixed_log_step_size, mixed_name_hash, mixed_total_runs,
|
||||||
|
mixed_runs_name)
|
||||||
if run_robustness:
|
if run_robustness:
|
||||||
print(f"Running the robustness experiment:")
|
print(f"Running the robustness experiment:")
|
||||||
run_robustness_experiment(rob_population_size, rob_log_step_size, NET_INPUT_SIZE, rob_net_hidden_size,
|
run_robustness_experiment(rob_population_size, rob_log_step_size, NET_INPUT_SIZE, rob_net_hidden_size,
|
||||||
NET_OUT_SIZE, rob_net_learning_rate, rob_ST_steps, rob_runs, rob_runs_name, rob_name_hash)
|
NET_OUT_SIZE, rob_net_learning_rate, rob_ST_steps, rob_runs, rob_runs_name,
|
||||||
|
rob_name_hash)
|
||||||
|
|
||||||
if not run_ST and not run_SA and not run_soup and not run_mixed and not run_robustness:
|
if not run_ST and not run_SA and not run_soup and not run_mixed and not run_robustness:
|
||||||
print(f"No experiments to be run.")
|
print(f"No experiments to be run.")
|
||||||
|
|
||||||
@ -38,9 +40,13 @@ if __name__ == '__main__':
|
|||||||
# Constants:
|
# Constants:
|
||||||
NET_INPUT_SIZE = 4
|
NET_INPUT_SIZE = 4
|
||||||
NET_OUT_SIZE = 1
|
NET_OUT_SIZE = 1
|
||||||
|
run_ST_experiment_bool = True
|
||||||
|
run_SA_experiment_bool = False
|
||||||
|
run_soup_experiment_bool = False
|
||||||
|
run_mixed_experiment_bool = False
|
||||||
|
run_robustness_bool = False
|
||||||
|
|
||||||
""" ------------------------------------- Self-training (ST) experiment ------------------------------------- """
|
""" ------------------------------------- Self-training (ST) experiment ------------------------------------- """
|
||||||
run_ST_experiment_bool = False
|
|
||||||
|
|
||||||
# Define number of runs & name:
|
# Define number of runs & name:
|
||||||
ST_runs = 1
|
ST_runs = 1
|
||||||
@ -57,9 +63,6 @@ if __name__ == '__main__':
|
|||||||
ST_name_hash = random.getrandbits(32)
|
ST_name_hash = random.getrandbits(32)
|
||||||
|
|
||||||
""" ----------------------------------- Self-application (SA) experiment ----------------------------------- """
|
""" ----------------------------------- Self-application (SA) experiment ----------------------------------- """
|
||||||
|
|
||||||
run_SA_experiment_bool = False
|
|
||||||
|
|
||||||
# Define number of runs, name, etc.:
|
# Define number of runs, name, etc.:
|
||||||
SA_runs_name = "test-17"
|
SA_runs_name = "test-17"
|
||||||
SA_runs = 2
|
SA_runs = 2
|
||||||
@ -81,9 +84,6 @@ if __name__ == '__main__':
|
|||||||
SA_name_hash = random.getrandbits(32)
|
SA_name_hash = random.getrandbits(32)
|
||||||
|
|
||||||
""" -------------------------------------------- Soup experiment -------------------------------------------- """
|
""" -------------------------------------------- Soup experiment -------------------------------------------- """
|
||||||
|
|
||||||
run_soup_experiment_bool = False
|
|
||||||
|
|
||||||
# Define number of runs, name, etc.:
|
# Define number of runs, name, etc.:
|
||||||
soup_runs = 1
|
soup_runs = 1
|
||||||
soup_runs_name = "test-16"
|
soup_runs_name = "test-16"
|
||||||
@ -107,8 +107,6 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
""" ------------------------------------------- Mixed experiment -------------------------------------------- """
|
""" ------------------------------------------- Mixed experiment -------------------------------------------- """
|
||||||
|
|
||||||
run_mixed_experiment_bool = False
|
|
||||||
|
|
||||||
# Define number of runs, name, etc.:
|
# Define number of runs, name, etc.:
|
||||||
mixed_runs_name = "test-17"
|
mixed_runs_name = "test-17"
|
||||||
mixed_total_runs = 2
|
mixed_total_runs = 2
|
||||||
@ -132,8 +130,6 @@ if __name__ == '__main__':
|
|||||||
mixed_name_hash = random.getrandbits(32)
|
mixed_name_hash = random.getrandbits(32)
|
||||||
|
|
||||||
""" ----------------------------------------- Robustness experiment ----------------------------------------- """
|
""" ----------------------------------------- Robustness experiment ----------------------------------------- """
|
||||||
run_robustness_bool = True
|
|
||||||
|
|
||||||
# Define number of runs & name:
|
# Define number of runs & name:
|
||||||
rob_runs = 3
|
rob_runs = 3
|
||||||
rob_runs_name = "test-07"
|
rob_runs_name = "test-07"
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tokenize import String
|
from typing import List, Dict, Union
|
||||||
from typing import List, Dict
|
|
||||||
|
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
@ -19,7 +18,7 @@ def plot_output(output):
|
|||||||
plt.show()
|
plt.show()
|
||||||
|
|
||||||
|
|
||||||
def plot_loss(loss_array, directory, batch_size=1):
|
def plot_loss(loss_array, directory: Union[str, Path], batch_size=1):
|
||||||
""" Plotting the evolution of the loss function."""
|
""" Plotting the evolution of the loss function."""
|
||||||
|
|
||||||
fig = plt.figure()
|
fig = plt.figure()
|
||||||
@ -41,8 +40,8 @@ def plot_loss(loss_array, directory, batch_size=1):
|
|||||||
plt.clf()
|
plt.clf()
|
||||||
|
|
||||||
|
|
||||||
def bar_chart_fixpoints(fixpoint_counter: Dict, population_size: int, directory: String, learning_rate: float,
|
def bar_chart_fixpoints(fixpoint_counter: Dict, population_size: int, directory: Union[str, Path], learning_rate: float,
|
||||||
exp_details: String, source_check=None):
|
exp_details: str, source_check=None):
|
||||||
""" Plotting the number of fixpoints in a barchart. """
|
""" Plotting the number of fixpoints in a barchart. """
|
||||||
|
|
||||||
fig = plt.figure()
|
fig = plt.figure()
|
||||||
@ -73,8 +72,8 @@ def bar_chart_fixpoints(fixpoint_counter: Dict, population_size: int, directory:
|
|||||||
plt.clf()
|
plt.clf()
|
||||||
|
|
||||||
|
|
||||||
def plot_3d(matrices_weights_history, directory, population_size, z_axis_legend, exp_name="experiment", is_trained="",
|
def plot_3d(matrices_weights_history, directory: Union[str, Path], population_size, z_axis_legend,
|
||||||
batch_size=1):
|
exp_name="experiment", is_trained="", batch_size=1):
|
||||||
""" Plotting the the weights of the nets in a 3d form using principal component analysis (PCA) """
|
""" Plotting the the weights of the nets in a 3d form using principal component analysis (PCA) """
|
||||||
|
|
||||||
fig = plt.figure()
|
fig = plt.figure()
|
||||||
@ -109,7 +108,10 @@ def plot_3d(matrices_weights_history, directory, population_size, z_axis_legend,
|
|||||||
population_size = mpatches.Patch(color="white", label=f"Population: {population_size} networks")
|
population_size = mpatches.Patch(color="white", label=f"Population: {population_size} networks")
|
||||||
|
|
||||||
if z_axis_legend == "Self-application":
|
if z_axis_legend == "Self-application":
|
||||||
trained = mpatches.Patch(color="white", label=f"Trained: true") if is_trained == "_trained" else mpatches.Patch(color="white", label=f"Trained: false")
|
if is_trained == '_trained':
|
||||||
|
trained = mpatches.Patch(color="white", label=f"Trained: true")
|
||||||
|
else:
|
||||||
|
trained = mpatches.Patch(color="white", label=f"Trained: false")
|
||||||
ax.legend(handles=[steps, population_size, trained])
|
ax.legend(handles=[steps, population_size, trained])
|
||||||
else:
|
else:
|
||||||
ax.legend(handles=[steps, population_size])
|
ax.legend(handles=[steps, population_size])
|
||||||
@ -134,7 +136,7 @@ def plot_3d(matrices_weights_history, directory, population_size, z_axis_legend,
|
|||||||
plt.show()
|
plt.show()
|
||||||
|
|
||||||
|
|
||||||
def plot_3d_self_train(nets_array: List, exp_name: String, directory: String, batch_size: int):
|
def plot_3d_self_train(nets_array: List, exp_name: str, directory: Union[str, Path], batch_size: int):
|
||||||
""" Plotting the evolution of the weights in a 3D space when doing self training. """
|
""" Plotting the evolution of the weights in a 3D space when doing self training. """
|
||||||
|
|
||||||
matrices_weights_history = []
|
matrices_weights_history = []
|
||||||
@ -150,7 +152,7 @@ def plot_3d_self_train(nets_array: List, exp_name: String, directory: String, ba
|
|||||||
return plot_3d(matrices_weights_history, directory, len(nets_array), z_axis_legend, exp_name, "", batch_size)
|
return plot_3d(matrices_weights_history, directory, len(nets_array), z_axis_legend, exp_name, "", batch_size)
|
||||||
|
|
||||||
|
|
||||||
def plot_3d_self_application(nets_array: List, exp_name: String, directory_name: String, batch_size: int) -> None:
|
def plot_3d_self_application(nets_array: List, exp_name: str, directory_name: Union[str, Path], batch_size: int) -> None:
|
||||||
""" Plotting the evolution of the weights in a 3D space when doing self application. """
|
""" Plotting the evolution of the weights in a 3D space when doing self application. """
|
||||||
|
|
||||||
matrices_weights_history = []
|
matrices_weights_history = []
|
||||||
@ -171,7 +173,7 @@ def plot_3d_self_application(nets_array: List, exp_name: String, directory_name:
|
|||||||
plot_3d(matrices_weights_history, directory_name, len(nets_array), z_axis_legend, exp_name, is_trained, batch_size)
|
plot_3d(matrices_weights_history, directory_name, len(nets_array), z_axis_legend, exp_name, is_trained, batch_size)
|
||||||
|
|
||||||
|
|
||||||
def plot_3d_soup(nets_list, exp_name, directory):
|
def plot_3d_soup(nets_list, exp_name, directory: Union[str, Path]):
|
||||||
""" Plotting the evolution of the weights in a 3D space for the soup environment. """
|
""" Plotting the evolution of the weights in a 3D space for the soup environment. """
|
||||||
|
|
||||||
# This batch size is not relevant for soups. To not affect the number of epochs shown in the 3D plot,
|
# This batch size is not relevant for soups. To not affect the number of epochs shown in the 3D plot,
|
||||||
@ -182,7 +184,7 @@ def plot_3d_soup(nets_list, exp_name, directory):
|
|||||||
|
|
||||||
|
|
||||||
def line_chart_fixpoints(fixpoint_counters_history: list, epochs: int, ST_steps_between_SA: int,
|
def line_chart_fixpoints(fixpoint_counters_history: list, epochs: int, ST_steps_between_SA: int,
|
||||||
SA_steps, directory: String, population_size: int):
|
SA_steps, directory: Union[str, Path], population_size: int):
|
||||||
""" Plotting the percentage of fixpoints after each iteration of SA & ST steps. """
|
""" Plotting the percentage of fixpoints after each iteration of SA & ST steps. """
|
||||||
|
|
||||||
fig = plt.figure()
|
fig = plt.figure()
|
||||||
@ -211,7 +213,7 @@ def line_chart_fixpoints(fixpoint_counters_history: list, epochs: int, ST_steps_
|
|||||||
plt.clf()
|
plt.clf()
|
||||||
|
|
||||||
|
|
||||||
def box_plot(data, directory, population_size):
|
def box_plot(data, directory: Union[str, Path], population_size):
|
||||||
fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(10, 7))
|
fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(10, 7))
|
||||||
|
|
||||||
# ax = fig.add_axes([0, 0, 1, 1])
|
# ax = fig.add_axes([0, 0, 1, 1])
|
||||||
@ -232,7 +234,7 @@ def box_plot(data, directory, population_size):
|
|||||||
plt.clf()
|
plt.clf()
|
||||||
|
|
||||||
|
|
||||||
def write_file(text, directory):
|
def write_file(text, directory: Union[str, Path]):
|
||||||
directory = Path(directory)
|
directory = Path(directory)
|
||||||
filepath = directory / 'experiment.txt'
|
filepath = directory / 'experiment.txt'
|
||||||
with filepath.open('w+') as f:
|
with filepath.open('w+') as f:
|
||||||
|
Reference in New Issue
Block a user