deleted all filed from the old code

This commit is contained in:
Cristian Lenta
2021-05-03 06:36:23 +00:00
parent 9ce36c2d0e
commit 9bd65713fe
111 changed files with 2 additions and 8338 deletions

View File

@ -1,70 +0,0 @@
import sys
import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
import keras.backend as K
def generate_counters():
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, net, notable_nets=[]):
if net.is_diverged():
counters['divergent'] += 1
elif net.is_fixpoint():
if net.is_zero():
counters['fix_zero'] += 1
else:
counters['fix_other'] += 1
notable_nets += [net]
elif net.is_fixpoint(2):
counters['fix_sec'] += 1
notable_nets += [net]
else:
counters['other'] += 1
return counters, notable_nets
if __name__ == '__main__':
with Experiment('applying_fixpoint') as exp:
exp.trials = 50
exp.run_count = 100
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear']: # , 'sigmoid', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
all_counters = []
all_notable_nets = []
all_names = []
for net_generator_id, net_generator in enumerate(net_generators):
counters = generate_counters()
notable_nets = []
for _ in tqdm(range(exp.trials)):
net = ParticleDecorator(net_generator())
net.with_params(epsilon=exp.epsilon)
name = str(net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
for run_id in range(exp.run_count):
loss = net.self_attack()
count(counters, net, notable_nets)
all_counters += [counters]
all_notable_nets += [notable_nets]
all_names += [name]
K.clear_session()
exp.save(all_counters=all_counters)
exp.save(trajectorys=exp.without_particles())
# net types reached in the end
# exp.save(all_notable_nets=all_notable_nets)
exp.save(all_names=all_names) #experiment setups
for exp_id, counter in enumerate(all_counters):
exp.log(all_names[exp_id])
exp.log(all_counters[exp_id])
exp.log('\n')

File diff suppressed because one or more lines are too long

View File

@ -1,4 +0,0 @@
TrainingNeuralNetworkDecorator activiation='linear' use_bias=False
{'xs': [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100], 'ys': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'zs': [0.0, 1.2, 5.2, 7.4, 8.1, 9.1, 9.6, 9.8, 10.0, 9.9, 9.9]}

File diff suppressed because one or more lines are too long

Binary file not shown.

Before

Width:  |  Height:  |  Size: 207 KiB

File diff suppressed because one or more lines are too long

View File

@ -1,12 +0,0 @@
WeightwiseNeuralNetwork activiation='linear' use_bias=False
{'divergent': 0, 'fix_zero': 0, 'fix_other': 50, 'fix_sec': 0, 'other': 0}
AggregatingNeuralNetwork activiation='linear' use_bias=False
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 50}
RecurrentNeuralNetwork activiation='linear' use_bias=False
{'divergent': 38, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 12}

View File

@ -1 +0,0 @@
{'divergent': 11, 'fix_zero': 9, 'fix_other': 0, 'fix_sec': 0, 'other': 0}

View File

@ -1,67 +0,0 @@
import sys
import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
import keras.backend
def generate_counters():
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, net, notable_nets=[]):
if net.is_diverged():
counters['divergent'] += 1
elif net.is_fixpoint():
if net.is_zero():
counters['fix_zero'] += 1
else:
counters['fix_other'] += 1
notable_nets += [net]
elif net.is_fixpoint(2):
counters['fix_sec'] += 1
notable_nets += [net]
else:
counters['other'] += 1
return counters, notable_nets
if __name__ == '__main__':
with Experiment('fixpoint-density') as exp:
#NOTE: settings could/should stay this way
#FFT doesn't work though
exp.trials = 100000
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear']:
net_generators += [lambda activation=activation: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
net_generators += [lambda activation=activation: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
# net_generators += [lambda activation=activation: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
# net_generators += [lambda activation=activation: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
all_counters = []
all_notable_nets = []
all_names = []
for net_generator_id, net_generator in enumerate(net_generators):
counters = generate_counters()
notable_nets = []
for _ in tqdm(range(exp.trials)):
net = net_generator().with_params(epsilon=exp.epsilon)
net = ParticleDecorator(net)
name = str(net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias='" + str(net.get_keras_params().get('use_bias')) + "'"
count(counters, net, notable_nets)
keras.backend.clear_session()
all_counters += [counters]
# all_notable_nets += [notable_nets]
all_names += [name]
exp.save(all_counters=all_counters)
exp.save(all_notable_nets=all_notable_nets)
exp.save(all_names=all_names)
for exp_id, counter in enumerate(all_counters):
exp.log(all_names[exp_id])
exp.log(all_counters[exp_id])
exp.log('\n')
print('Done')

View File

@ -1,93 +0,0 @@
import sys
import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
from soup import prng
import keras.backend
from statistics import mean
avg = mean
def generate_fixpoint_weights():
return [
np.array([[1.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], dtype=np.float32),
np.array([[1.0, 0.0], [0.0, 0.0]], dtype=np.float32),
np.array([[1.0], [0.0]], dtype=np.float32)
]
def generate_fixpoint_net():
#NOTE: Weightwise only is all we can do right now IMO
net = WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation='sigmoid')
# I don't know if this work for aggregaeting. We don't actually need it, though.
# net = AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation='sigmoid')
net.set_weights(generate_fixpoint_weights())
return net
def vary(old_weights, e=1.0):
new_weights = copy.deepcopy(old_weights)
for layer_id, layer in enumerate(new_weights):
for cell_id, cell in enumerate(layer):
for weight_id, weight in enumerate(cell):
if prng() < 0.5:
new_weights[layer_id][cell_id][weight_id] = weight + prng() * e
else:
new_weights[layer_id][cell_id][weight_id] = weight - prng() * e
return new_weights
if __name__ == '__main__':
with Experiment('known-fixpoint-variation') as exp:
exp.depth = 10
exp.trials = 100
exp.max_steps = 100
exp.epsilon = 1e-4
exp.xs = []
exp.ys = []
exp.zs = []
exp.notable_nets = []
current_scale = 1.0
for _ in range(exp.depth):
print('variation scale ' + str(current_scale))
for _ in tqdm(range(exp.trials)):
net = generate_fixpoint_net().with_params(epsilon=exp.epsilon)
net = ParticleDecorator(net)
net.set_weights(vary(net.get_weights(), current_scale))
time_to_something = 0
time_as_fixpoint = 0
still_fixpoint = True
for _ in range(exp.max_steps):
net.self_attack()
if net.is_zero() or net.is_diverged():
break
if net.is_fixpoint():
if still_fixpoint:
time_as_fixpoint += 1
else:
print('remarkable')
exp.notable_nets += [net.get_weights()]
still_fixpoint = True
else:
still_fixpoint = False
time_to_something += 1
exp.xs += [current_scale]
# time steps taken to reach divergence or zero (reaching another fix-point is basically never happening)
exp.ys += [time_to_something]
# time steps still regarded as sthe initial fix-point
exp.zs += [time_as_fixpoint]
keras.backend.clear_session()
current_scale /= 10.0
for d in range(exp.depth):
exp.log('variation 10e-' + str(d))
exp.log('avg time to vergence ' + str(avg(exp.ys[d*exp.trials:(d+1) * exp.trials])))
exp.log('avg time as fixpoint ' + str(avg(exp.zs[d*exp.trials:(d+1) * exp.trials])))

View File

@ -1,110 +0,0 @@
import sys
import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from typing import Tuple
from util import *
from experiment import *
from network import *
from soup import *
import keras.backend
from statistics import mean
avg = mean
def generate_counters():
"""
Initial build of the counter dict, to store counts.
:rtype: dict
:return: dictionary holding counter for: 'divergent', 'fix_zero', 'fix_sec', 'other'
"""
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, soup, notable_nets=[]):
"""
Count the occurences ot the types of weight trajectories.
:param counters: A counter dictionary.
:param soup: A Soup
:param notable_nets: A list to store and save intersting candidates
:rtype Tuple[dict, list]
:return: Both the counter dictionary and the list of interessting nets.
"""
for net in soup.particles:
if net.is_diverged():
counters['divergent'] += 1
elif net.is_fixpoint():
if net.is_zero():
counters['fix_zero'] += 1
else:
counters['fix_other'] += 1
# notable_nets += [net]
# elif net.is_fixpoint(2):
# counters['fix_sec'] += 1
# notable_nets += [net]
else:
counters['other'] += 1
return counters, notable_nets
if __name__ == '__main__':
with SoupExperiment('learn-from-soup') as exp:
exp.soup_size = 10
exp.soup_life = 100
exp.trials = 10
exp.learn_from_severity_values = [10 * i for i in range(11)]
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear']: # ['sigmoid', 'linear', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
all_names = []
all_data = []
for net_generator_id, net_generator in enumerate(net_generators):
xs = []
ys = []
zs = []
notable_nets = []
for learn_from_severity in exp.learn_from_severity_values:
counters = generate_counters()
results = []
for _ in tqdm(range(exp.trials)):
soup = Soup(exp.soup_size, lambda net_generator=net_generator,exp=exp: TrainingNeuralNetworkDecorator(net_generator()).with_params(epsilon=exp.epsilon))
soup.with_params(attacking_rate=-1, learn_from_rate=0.1, train=0, learn_from_severity=learn_from_severity)
soup.seed()
name = str(soup.particles[0].net.__class__.__name__) + " activiation='" + str(soup.particles[0].get_keras_params().get('activation')) + "' use_bias=" + str(soup.particles[0].get_keras_params().get('use_bias'))
for time in range(exp.soup_life):
soup.evolve()
count(counters, soup, notable_nets)
keras.backend.clear_session()
xs += [learn_from_severity]
ys += [float(counters['fix_zero']) / float(exp.trials)]
zs += [float(counters['fix_other']) / float(exp.trials)]
all_names += [name]
# xs: learn_from_intensity according to exp.learn_from_intensity_values
# ys: zero-fixpoints after life time
# zs: non-zero-fixpoints after life time
all_data += [{'xs':xs, 'ys':ys, 'zs':zs}]
exp.save(all_names=all_names)
exp.save(all_data=all_data)
exp.save(soup=soup.without_particles())
for exp_id, name in enumerate(all_names):
exp.log(all_names[exp_id])
exp.log(all_data[exp_id])
exp.log('\n')

View File

@ -1,101 +0,0 @@
import sys
import os
from typing import Tuple
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
import keras.backend
def generate_counters():
"""
Initial build of the counter dict, to store counts.
:rtype: dict
:return: dictionary holding counter for: 'divergent', 'fix_zero', 'fix_sec', 'other'
"""
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, net, notable_nets=[]):
"""
Count the occurences ot the types of weight trajectories.
:param counters: A counter dictionary.
:param net: A Neural Network
:param notable_nets: A list to store and save intersting candidates
:rtype Tuple[dict, list]
:return: Both the counter dictionary and the list of interessting nets.
"""
if net.is_diverged():
counters['divergent'] += 1
elif net.is_fixpoint():
if net.is_zero():
counters['fix_zero'] += 1
else:
counters['fix_other'] += 1
notable_nets += [net]
elif net.is_fixpoint(2):
counters['fix_sec'] += 1
notable_nets += [net]
else:
counters['other'] += 1
return counters, notable_nets
if __name__ == '__main__':
with Experiment('mixed-self-fixpoints') as exp:
exp.trials = 20
exp.selfattacks = 4
exp.trains_per_selfattack_values = [50 * i for i in range(11)]
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear']: # , 'sigmoid', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
all_names = []
all_data = []
for net_generator_id, net_generator in enumerate(net_generators):
xs = []
ys = []
for trains_per_selfattack in exp.trains_per_selfattack_values:
counters = generate_counters()
notable_nets = []
for _ in tqdm(range(exp.trials)):
net = ParticleDecorator(net_generator())
net = TrainingNeuralNetworkDecorator(net).with_params(epsilon=exp.epsilon)
name = str(net.net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
for selfattack_id in range(exp.selfattacks):
net.self_attack()
for train_id in range(trains_per_selfattack):
loss = net.compiled().train(epoch=selfattack_id*trains_per_selfattack+train_id)
if net.is_diverged() or net.is_fixpoint():
break
count(counters, net, notable_nets)
keras.backend.clear_session()
xs += [trains_per_selfattack]
ys += [float(counters['fix_zero'] + counters['fix_other']) / float(exp.trials)]
all_names += [name]
# xs: how many trains per self-attack from exp.trains_per_selfattack_values
# ys: average amount of fixpoints found
all_data += [{'xs': xs, 'ys': ys}]
exp.save(all_names=all_names)
exp.save(all_data=all_data)
for exp_id, name in enumerate(all_names):
exp.log(all_names[exp_id])
exp.log(all_data[exp_id])
exp.log('\n')

View File

@ -1,108 +0,0 @@
import sys
import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from typing import Tuple
from util import *
from experiment import *
from network import *
from soup import *
import keras.backend
def generate_counters():
"""
Initial build of the counter dict, to store counts.
:rtype: dict
:return: dictionary holding counter for: 'divergent', 'fix_zero', 'fix_sec', 'other'
"""
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, soup, notable_nets=[]):
"""
Count the occurences ot the types of weight trajectories.
:param counters: A counter dictionary.
:param soup: A Soup
:param notable_nets: A list to store and save intersting candidates
:rtype Tuple[dict, list]
:return: Both the counter dictionary and the list of interessting nets.
"""
for net in soup.particles:
if net.is_diverged():
counters['divergent'] += 1
elif net.is_fixpoint():
if net.is_zero():
counters['fix_zero'] += 1
else:
counters['fix_other'] += 1
# notable_nets += [net]
# elif net.is_fixpoint(2):
# counters['fix_sec'] += 1
# notable_nets += [net]
else:
counters['other'] += 1
return counters, notable_nets
if __name__ == '__main__':
with Experiment('mixed-soup') as exp:
exp.trials = 10
exp.soup_size = 10
exp.soup_life = 5
exp.trains_per_selfattack_values = [10 * i for i in range(11)]
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear']: # ['linear', 'sigmoid', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
all_names = []
all_data = []
for net_generator_id, net_generator in enumerate(net_generators):
xs = []
ys = []
zs = []
for trains_per_selfattack in exp.trains_per_selfattack_values:
counters = generate_counters()
notable_nets = []
for soup_idx in tqdm(range(exp.trials)):
soup = Soup(exp.soup_size,
lambda net_generator=net_generator, exp=exp: TrainingNeuralNetworkDecorator(
net_generator()).with_params(epsilon=exp.epsilon))
soup.with_params(attacking_rate=0.1, learn_from_rate=-1, train=trains_per_selfattack,
learn_from_severity=-1)
soup.seed()
name = str(soup.particles[0].net.__class__.__name__) + " activiation='" + str(
soup.particles[0].get_keras_params().get('activation')) + "' use_bias=" + str(
soup.particles[0].get_keras_params().get('use_bias'))
for _ in range(exp.soup_life):
soup.evolve()
count(counters, soup, notable_nets)
keras.backend.clear_session()
xs += [trains_per_selfattack]
ys += [float(counters['fix_zero']) / float(exp.trials)]
zs += [float(counters['fix_other']) / float(exp.trials)]
all_names += [name]
# xs: how many trains per self-attack from exp.trains_per_selfattack_values
# ys: average amount of zero-fixpoints found
# zs: average amount of non-zero fixpoints
all_data += [{'xs': xs, 'ys': ys, 'zs': zs}]
exp.save(all_names=all_names)
exp.save(all_data=all_data)
for exp_id, name in enumerate(all_names):
exp.log(all_names[exp_id])
exp.log(all_data[exp_id])
exp.log('\n')

View File

@ -1,112 +0,0 @@
import sys
import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from soup import *
from experiment import *
if __name__ == '__main__':
def run_exp(net, prints=False):
# INFO Run_ID needs to be more than 0, so that exp stores the trajectories!
exp.run_net(net, 100, run_id=run_id + 1)
exp.historical_particles[run_id] = net
if prints:
print("Fixpoint? " + str(net.is_fixpoint()))
print("Loss " + str(loss))
if True:
# WeightWise Neural Network
with FixpointExperiment(name="weightwise_self_application") as exp:
for run_id in tqdm(range(20)):
net = ParticleDecorator(WeightwiseNeuralNetwork(width=2, depth=2)
.with_keras_params(activation='linear'))
run_exp(net)
K.clear_session()
exp.log(exp.counters)
exp.save(trajectorys=exp.without_particles())
if False:
# Aggregating Neural Network
with FixpointExperiment(name="aggregating_self_application") as exp:
for run_id in tqdm(range(10)):
net = ParticleDecorator(AggregatingNeuralNetwork(aggregates=4, width=2, depth=2)
.with_keras_params(activation='linear'))
run_exp(net)
K.clear_session()
exp.log(exp.counters)
exp.save(trajectorys=exp.without_particles())
if False:
#FFT Neural Network
with FixpointExperiment() as exp:
for run_id in tqdm(range(10)):
net = ParticleDecorator(FFTNeuralNetwork(aggregates=4, width=2, depth=2)
.with_keras_params(activation='linear'))
run_exp(net)
K.clear_session()
exp.log(exp.counters)
exp.save(trajectorys=exp.without_particles())
if False:
# ok so this works quite realiably
with FixpointExperiment(name="weightwise_learning") as exp:
for i in range(10):
run_count = 100
net = TrainingNeuralNetworkDecorator(ParticleDecorator(WeightwiseNeuralNetwork(width=2, depth=2)))
net.with_params(epsilon=0.0001).with_keras_params(activation='linear')
exp.historical_particles[net.get_uid()] = net
for run_id in tqdm(range(run_count+1)):
net.compiled()
loss = net.train(epoch=run_id)
# run_exp(net)
# net.save_state(time=run_id)
K.clear_session()
exp.save(trajectorys=exp.without_particles())
if False:
# ok so this works quite realiably
with FixpointExperiment(name="aggregating_learning") as exp:
for i in range(10):
run_count = 100
net = TrainingNeuralNetworkDecorator(ParticleDecorator(AggregatingNeuralNetwork(4, width=2, depth=2)))
net.with_params(epsilon=0.0001).with_keras_params(activation='linear')
exp.historical_particles[net.get_uid()] = net
for run_id in tqdm(range(run_count+1)):
net.compiled()
loss = net.train(epoch=run_id)
# run_exp(net)
# net.save_state(time=run_id)
K.clear_session()
exp.save(trajectorys=exp.without_particles())
if False:
# this explodes in our faces completely... NAN everywhere
# TODO: Wtf is happening here?
with FixpointExperiment() as exp:
run_count = 10000
net = TrainingNeuralNetworkDecorator(RecurrentNeuralNetwork(width=2, depth=2))\
.with_params(epsilon=0.1e-2).with_keras_params(optimizer='sgd', activation='linear')
for run_id in tqdm(range(run_count+1)):
loss = net.compiled().train()
if run_id % 500 == 0:
net.print_weights()
# print(net.apply_to_network(net))
print("Fixpoint? " + str(net.is_fixpoint()))
print("Loss " + str(loss))
print()
if False:
# and this gets somewhat interesting... we can still achieve non-trivial fixpoints
# over multiple applications when training enough in-between
with MixedFixpointExperiment() as exp:
for run_id in range(10):
net = TrainingNeuralNetworkDecorator(FFTNeuralNetwork(2, width=2, depth=2))\
.with_params(epsilon=0.0001, activation='sigmoid')
exp.run_net(net, 500, 10)
net.print_weights()
print("Fixpoint? " + str(net.is_fixpoint()))
exp.log(exp.counters)

View File

@ -1,32 +0,0 @@
import sys
import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from soup import *
from experiment import *
if __name__ == '__main__':
if True:
with SoupExperiment("soup") as exp:
for run_id in range(1):
net_generator = lambda: TrainingNeuralNetworkDecorator(WeightwiseNeuralNetwork(2, 2)) \
.with_keras_params(activation='linear').with_params(epsilon=0.0001)
# net_generator = lambda: TrainingNeuralNetworkDecorator(AggregatingNeuralNetwork(4, 2, 2))\
# .with_keras_params(activation='linear')
# net_generator = lambda: TrainingNeuralNetworkDecorator(FFTNeuralNetwork(4, 2, 2))\
# .with_keras_params(activation='linear')
# net_generator = lambda: RecurrentNeuralNetwork(2, 2).with_keras_params(activation='linear').with_params()
soup = Soup(20, net_generator).with_params(remove_divergent=True, remove_zero=True,
train=30,
learn_from_rate=-1)
soup.seed()
for _ in tqdm(range(100)):
soup.evolve()
exp.log(soup.count())
# you can access soup.historical_particles[particle_uid].states[time_step]['loss']
# or soup.historical_particles[particle_uid].states[time_step]['weights']
# from soup.dill
exp.save(soup=soup.without_particles())

View File

@ -1,70 +0,0 @@
import sys
import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
import keras.backend as K
def generate_counters():
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
def count(counters, net, notable_nets=[]):
if net.is_diverged():
counters['divergent'] += 1
elif net.is_fixpoint():
if net.is_zero():
counters['fix_zero'] += 1
else:
counters['fix_other'] += 1
notable_nets += [net]
elif net.is_fixpoint(2):
counters['fix_sec'] += 1
notable_nets += [net]
else:
counters['other'] += 1
return counters, notable_nets
if __name__ == '__main__':
with Experiment('training_fixpoint') as exp:
exp.trials = 50
exp.run_count = 1000
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear']: # , 'sigmoid', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
all_counters = []
all_notable_nets = []
all_names = []
for net_generator_id, net_generator in enumerate(net_generators):
counters = generate_counters()
notable_nets = []
for _ in tqdm(range(exp.trials)):
net = ParticleDecorator(net_generator())
net = TrainingNeuralNetworkDecorator(net).with_params(epsilon=exp.epsilon)
name = str(net.net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
for run_id in range(exp.run_count):
loss = net.compiled().train(epoch=run_id+1)
count(counters, net, notable_nets)
all_counters += [counters]
all_notable_nets += [notable_nets]
all_names += [name]
K.clear_session()
exp.save(all_counters=all_counters)
exp.save(trajectorys=exp.without_particles())
# net types reached in the end
# exp.save(all_notable_nets=all_notable_nets)
exp.save(all_names=all_names) #experiment setups
for exp_id, counter in enumerate(all_counters):
exp.log(all_names[exp_id])
exp.log(all_counters[exp_id])
exp.log('\n')