bar plots
This commit is contained in:
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{'divergent': 0, 'fix_zero': 0, 'fix_other': 13, 'fix_sec': 0, 'other': 7}
|
||||
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,4 @@
|
||||
TrainingNeuralNetworkDecorator activiation='linear' use_bias=False
|
||||
{'xs': [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100], 'ys': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'zs': [0.0, 1.2, 5.2, 7.4, 8.1, 9.1, 9.6, 9.8, 10.0, 9.9, 9.9]}
|
||||
|
||||
|
||||
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
|
After Width: | Height: | Size: 207 KiB |
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,4 @@
|
||||
TrainingNeuralNetworkDecorator activiation='linear' use_bias=False
|
||||
{'xs': [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100], 'ys': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'zs': [0.0, 0.0, 0.6, 1.8, 2.7, 5.1, 5.8, 7.8, 8.5, 9.0, 8.8]}
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,12 @@
|
||||
WeightwiseNeuralNetwork activiation='linear' use_bias=False
|
||||
{'xs': [0, 50, 100, 150, 200, 250, 300, 350, 400, 450, 500], 'ys': [0.2, 0.3, 0.15, 0.55, 0.7, 0.85, 0.8, 0.95, 0.9, 1.0, 1.0]}
|
||||
|
||||
|
||||
AggregatingNeuralNetwork activiation='linear' use_bias=False
|
||||
{'xs': [0, 50, 100, 150, 200, 250, 300, 350, 400, 450, 500], 'ys': [1.0, 0.95, 1.0, 1.0, 0.95, 0.9, 0.8, 1.0, 0.85, 1.0, 0.9]}
|
||||
|
||||
|
||||
RecurrentNeuralNetwork activiation='linear' use_bias=False
|
||||
{'xs': [0, 50, 100, 150, 200, 250, 300, 350, 400, 450, 500], 'ys': [0.05, 0.0, 0.05, 0.0, 0.0, 0.1, 0.1, 0.05, 0.1, 0.0, 0.0]}
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,12 @@
|
||||
WeightwiseNeuralNetwork activiation='linear' use_bias=False
|
||||
{'divergent': 0, 'fix_zero': 0, 'fix_other': 50, 'fix_sec': 0, 'other': 0}
|
||||
|
||||
|
||||
AggregatingNeuralNetwork activiation='linear' use_bias=False
|
||||
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 50}
|
||||
|
||||
|
||||
RecurrentNeuralNetwork activiation='linear' use_bias=False
|
||||
{'divergent': 38, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 12}
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{'divergent': 11, 'fix_zero': 9, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
|
||||
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -57,52 +57,54 @@ def count(counters, soup, notable_nets=[]):
|
||||
return counters, notable_nets
|
||||
|
||||
|
||||
with SoupExperiment('learn-from-soup') as exp:
|
||||
exp.soup_size = 10
|
||||
exp.soup_life = 100
|
||||
exp.trials = 10
|
||||
exp.learn_from_severity_values = [10 * i for i in range(11)]
|
||||
exp.epsilon = 1e-4
|
||||
net_generators = []
|
||||
for activation in ['sigmoid']: #['linear', 'sigmoid', 'relu']:
|
||||
for use_bias in [False]:
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
if __name__ == '__main__':
|
||||
|
||||
all_names = []
|
||||
all_data = []
|
||||
for net_generator_id, net_generator in enumerate(net_generators):
|
||||
xs = []
|
||||
ys = []
|
||||
zs = []
|
||||
notable_nets = []
|
||||
for learn_from_severity in exp.learn_from_severity_values:
|
||||
counters = generate_counters()
|
||||
results = []
|
||||
for _ in tqdm(range(exp.trials)):
|
||||
soup = Soup(exp.soup_size, lambda net_generator=net_generator,exp=exp: TrainingNeuralNetworkDecorator(net_generator()).with_params(epsilon=exp.epsilon))
|
||||
soup.with_params(attacking_rate=-1, learn_from_rate=0.1, train=0, learn_from_severity=learn_from_severity)
|
||||
soup.seed()
|
||||
name = str(soup.particles[0].net.__class__.__name__) + " activiation='" + str(soup.particles[0].get_keras_params().get('activation')) + "' use_bias=" + str(soup.particles[0].get_keras_params().get('use_bias'))
|
||||
for time in range(exp.soup_life):
|
||||
soup.evolve()
|
||||
count(counters, soup, notable_nets)
|
||||
keras.backend.clear_session()
|
||||
with SoupExperiment('learn-from-soup') as exp:
|
||||
exp.soup_size = 10
|
||||
exp.soup_life = 100
|
||||
exp.trials = 10
|
||||
exp.learn_from_severity_values = [10 * i for i in range(11)]
|
||||
exp.epsilon = 1e-4
|
||||
net_generators = []
|
||||
for activation in ['linear']: # ['sigmoid', 'linear', 'relu']:
|
||||
for use_bias in [False]:
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
|
||||
xs += [learn_from_severity]
|
||||
ys += [float(counters['fix_zero']) / float(exp.trials)]
|
||||
zs += [float(counters['fix_other']) / float(exp.trials)]
|
||||
all_names += [name]
|
||||
# xs: learn_from_intensity according to exp.learn_from_intensity_values
|
||||
# ys: zero-fixpoints after life time
|
||||
# zs: non-zero-fixpoints after life time
|
||||
all_data += [{'xs':xs, 'ys':ys, 'zs':zs}]
|
||||
all_names = []
|
||||
all_data = []
|
||||
for net_generator_id, net_generator in enumerate(net_generators):
|
||||
xs = []
|
||||
ys = []
|
||||
zs = []
|
||||
notable_nets = []
|
||||
for learn_from_severity in exp.learn_from_severity_values:
|
||||
counters = generate_counters()
|
||||
results = []
|
||||
for _ in tqdm(range(exp.trials)):
|
||||
soup = Soup(exp.soup_size, lambda net_generator=net_generator,exp=exp: TrainingNeuralNetworkDecorator(net_generator()).with_params(epsilon=exp.epsilon))
|
||||
soup.with_params(attacking_rate=-1, learn_from_rate=0.1, train=0, learn_from_severity=learn_from_severity)
|
||||
soup.seed()
|
||||
name = str(soup.particles[0].net.__class__.__name__) + " activiation='" + str(soup.particles[0].get_keras_params().get('activation')) + "' use_bias=" + str(soup.particles[0].get_keras_params().get('use_bias'))
|
||||
for time in range(exp.soup_life):
|
||||
soup.evolve()
|
||||
count(counters, soup, notable_nets)
|
||||
keras.backend.clear_session()
|
||||
|
||||
exp.save(all_names=all_names)
|
||||
exp.save(all_data=all_data)
|
||||
exp.save(soup=soup.without_particles())
|
||||
for exp_id, name in enumerate(all_names):
|
||||
exp.log(all_names[exp_id])
|
||||
exp.log(all_data[exp_id])
|
||||
exp.log('\n')
|
||||
xs += [learn_from_severity]
|
||||
ys += [float(counters['fix_zero']) / float(exp.trials)]
|
||||
zs += [float(counters['fix_other']) / float(exp.trials)]
|
||||
all_names += [name]
|
||||
# xs: learn_from_intensity according to exp.learn_from_intensity_values
|
||||
# ys: zero-fixpoints after life time
|
||||
# zs: non-zero-fixpoints after life time
|
||||
all_data += [{'xs':xs, 'ys':ys, 'zs':zs}]
|
||||
|
||||
exp.save(all_names=all_names)
|
||||
exp.save(all_data=all_data)
|
||||
exp.save(soup=soup.without_particles())
|
||||
for exp_id, name in enumerate(all_names):
|
||||
exp.log(all_names[exp_id])
|
||||
exp.log(all_data[exp_id])
|
||||
exp.log('\n')
|
||||
|
||||
@@ -55,15 +55,15 @@ if __name__ == '__main__':
|
||||
with Experiment('mixed-self-fixpoints') as exp:
|
||||
exp.trials = 20
|
||||
exp.selfattacks = 4
|
||||
exp.trains_per_selfattack_values = [100 * i for i in range(11)]
|
||||
exp.trains_per_selfattack_values = [50 * i for i in range(11)]
|
||||
exp.epsilon = 1e-4
|
||||
net_generators = []
|
||||
for activation in ['linear']: # , 'sigmoid', 'relu']:
|
||||
for use_bias in [False]:
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
|
||||
all_names = []
|
||||
all_data = []
|
||||
@@ -77,7 +77,7 @@ if __name__ == '__main__':
|
||||
for _ in tqdm(range(exp.trials)):
|
||||
net = ParticleDecorator(net_generator())
|
||||
net = TrainingNeuralNetworkDecorator(net).with_params(epsilon=exp.epsilon)
|
||||
name = str(net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
|
||||
name = str(net.net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
|
||||
for selfattack_id in range(exp.selfattacks):
|
||||
net.self_attack()
|
||||
for train_id in range(trains_per_selfattack):
|
||||
|
||||
@@ -52,51 +52,57 @@ def count(counters, soup, notable_nets=[]):
|
||||
counters['other'] += 1
|
||||
return counters, notable_nets
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
with Experiment('mixed-self-fixpoints') as exp:
|
||||
exp.trials = 10
|
||||
exp.soup_size = 10
|
||||
exp.soup_life = 5
|
||||
exp.trains_per_selfattack_values = [10 * i for i in range(11)]
|
||||
exp.epsilon = 1e-4
|
||||
net_generators = []
|
||||
for activation in ['linear']: #['linear', 'sigmoid', 'relu']:
|
||||
for use_bias in [False]:
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
with Experiment('mixed-self-fixpoints') as exp:
|
||||
exp.trials = 10
|
||||
exp.soup_size = 10
|
||||
exp.soup_life = 5
|
||||
exp.trains_per_selfattack_values = [10 * i for i in range(11)]
|
||||
exp.epsilon = 1e-4
|
||||
net_generators = []
|
||||
for activation in ['linear']: # ['linear', 'sigmoid', 'relu']:
|
||||
for use_bias in [False]:
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
|
||||
all_names = []
|
||||
all_data = []
|
||||
for net_generator_id, net_generator in enumerate(net_generators):
|
||||
xs = []
|
||||
ys = []
|
||||
zs = []
|
||||
for trains_per_selfattack in exp.trains_per_selfattack_values:
|
||||
counters = generate_counters()
|
||||
notable_nets = []
|
||||
for soup_idx in tqdm(range(exp.trials)):
|
||||
soup = Soup(exp.soup_size, lambda net_generator=net_generator,exp=exp: TrainingNeuralNetworkDecorator(net_generator()).with_params(epsilon=exp.epsilon))
|
||||
soup.with_params(attacking_rate=0.1, learn_from_rate=-1, train=trains_per_selfattack, learn_from_severity=-1)
|
||||
soup.seed()
|
||||
name = str(soup.particles[0].net.__class__.__name__) + " activiation='" + str(soup.particles[0].get_keras_params().get('activation')) + "' use_bias=" + str(soup.particles[0].get_keras_params().get('use_bias'))
|
||||
for _ in range(exp.soup_life):
|
||||
soup.evolve()
|
||||
count(counters, soup, notable_nets)
|
||||
keras.backend.clear_session()
|
||||
all_names = []
|
||||
all_data = []
|
||||
for net_generator_id, net_generator in enumerate(net_generators):
|
||||
xs = []
|
||||
ys = []
|
||||
zs = []
|
||||
for trains_per_selfattack in exp.trains_per_selfattack_values:
|
||||
counters = generate_counters()
|
||||
notable_nets = []
|
||||
for soup_idx in tqdm(range(exp.trials)):
|
||||
soup = Soup(exp.soup_size,
|
||||
lambda net_generator=net_generator, exp=exp: TrainingNeuralNetworkDecorator(
|
||||
net_generator()).with_params(epsilon=exp.epsilon))
|
||||
soup.with_params(attacking_rate=0.1, learn_from_rate=-1, train=trains_per_selfattack,
|
||||
learn_from_severity=-1)
|
||||
soup.seed()
|
||||
name = str(soup.particles[0].net.__class__.__name__) + " activiation='" + str(
|
||||
soup.particles[0].get_keras_params().get('activation')) + "' use_bias=" + str(
|
||||
soup.particles[0].get_keras_params().get('use_bias'))
|
||||
for _ in range(exp.soup_life):
|
||||
soup.evolve()
|
||||
count(counters, soup, notable_nets)
|
||||
keras.backend.clear_session()
|
||||
|
||||
xs += [trains_per_selfattack]
|
||||
ys += [float(counters['fix_zero']) / float(exp.trials)]
|
||||
zs += [float(counters['fix_other']) / float(exp.trials)]
|
||||
all_names += [name]
|
||||
# xs: how many trains per self-attack from exp.trains_per_selfattack_values
|
||||
# ys: average amount of zero-fixpoints found
|
||||
# zs: average amount of non-zero fixpoints
|
||||
all_data += [{'xs':xs, 'ys':ys, 'zs':zs}]
|
||||
xs += [trains_per_selfattack]
|
||||
ys += [float(counters['fix_zero']) / float(exp.trials)]
|
||||
zs += [float(counters['fix_other']) / float(exp.trials)]
|
||||
all_names += [name]
|
||||
# xs: how many trains per self-attack from exp.trains_per_selfattack_values
|
||||
# ys: average amount of zero-fixpoints found
|
||||
# zs: average amount of non-zero fixpoints
|
||||
all_data += [{'xs': xs, 'ys': ys, 'zs': zs}]
|
||||
|
||||
exp.save(all_names=all_names)
|
||||
exp.save(all_data=all_data)
|
||||
for exp_id, name in enumerate(all_names):
|
||||
exp.log(all_names[exp_id])
|
||||
exp.log(all_data[exp_id])
|
||||
exp.log('\n')
|
||||
exp.save(all_names=all_names)
|
||||
exp.save(all_data=all_data)
|
||||
for exp_id, name in enumerate(all_names):
|
||||
exp.log(all_names[exp_id])
|
||||
exp.log(all_data[exp_id])
|
||||
exp.log('\n')
|
||||
|
||||
@@ -19,19 +19,18 @@ if __name__ == '__main__':
|
||||
|
||||
if True:
|
||||
# WeightWise Neural Network
|
||||
for _ in range(10):
|
||||
with FixpointExperiment() as exp:
|
||||
for run_id in tqdm(range(20)):
|
||||
net = ParticleDecorator(WeightwiseNeuralNetwork(width=2, depth=2)
|
||||
.with_keras_params(activation='linear'))
|
||||
run_exp(net)
|
||||
K.clear_session()
|
||||
exp.log(exp.counters)
|
||||
exp.save(trajectorys=exp.without_particles())
|
||||
with FixpointExperiment(name="weightwise_self_application") as exp:
|
||||
for run_id in tqdm(range(20)):
|
||||
net = ParticleDecorator(WeightwiseNeuralNetwork(width=2, depth=2)
|
||||
.with_keras_params(activation='linear'))
|
||||
run_exp(net)
|
||||
K.clear_session()
|
||||
exp.log(exp.counters)
|
||||
exp.save(trajectorys=exp.without_particles())
|
||||
|
||||
if False:
|
||||
# Aggregating Neural Network
|
||||
with FixpointExperiment() as exp:
|
||||
with FixpointExperiment(name="aggregating_self_application") as exp:
|
||||
for run_id in tqdm(range(10)):
|
||||
net = ParticleDecorator(AggregatingNeuralNetwork(aggregates=4, width=2, depth=2)
|
||||
.with_keras_params(activation='linear'))
|
||||
@@ -53,31 +52,33 @@ if __name__ == '__main__':
|
||||
|
||||
if False:
|
||||
# ok so this works quite realiably
|
||||
with FixpointExperiment() as exp:
|
||||
with FixpointExperiment(name="weightwise_learning") as exp:
|
||||
for i in range(10):
|
||||
run_count = 100
|
||||
net = TrainingNeuralNetworkDecorator(ParticleDecorator(WeightwiseNeuralNetwork(width=2, depth=2)))
|
||||
net.with_params(epsilon=0.0001).with_keras_params(activation='linear')
|
||||
exp.historical_particles[net.get_uid()] = net
|
||||
for run_id in tqdm(range(run_count+1)):
|
||||
net.compiled()
|
||||
loss = net.train(epoch=run_id)
|
||||
if run_id % 10 == 0:
|
||||
run_exp(net)
|
||||
# run_exp(net)
|
||||
# net.save_state(time=run_id)
|
||||
K.clear_session()
|
||||
exp.save(trajectorys=exp.without_particles())
|
||||
|
||||
if False:
|
||||
# ok so this works quite realiably
|
||||
with FixpointExperiment() as exp:
|
||||
with FixpointExperiment(name="aggregating_learning") as exp:
|
||||
for i in range(10):
|
||||
run_count = 100
|
||||
net = TrainingNeuralNetworkDecorator(ParticleDecorator(AggregatingNeuralNetwork(4, width=2, depth=2)))
|
||||
net.with_params(epsilon=0.0001).with_keras_params(activation='linear')
|
||||
exp.historical_particles[net.get_uid()] = net
|
||||
for run_id in tqdm(range(run_count+1)):
|
||||
net.compiled()
|
||||
loss = net.train(epoch=run_id)
|
||||
if run_id % 10 == 0:
|
||||
run_exp(net)
|
||||
# run_exp(net)
|
||||
# net.save_state(time=run_id)
|
||||
K.clear_session()
|
||||
exp.save(trajectorys=exp.without_particles())
|
||||
|
||||
|
||||
@@ -40,8 +40,8 @@ if __name__ == '__main__':
|
||||
for activation in ['linear']: # , 'sigmoid', 'relu']:
|
||||
for use_bias in [False]:
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
all_counters = []
|
||||
all_notable_nets = []
|
||||
all_names = []
|
||||
@@ -51,7 +51,7 @@ if __name__ == '__main__':
|
||||
for _ in tqdm(range(exp.trials)):
|
||||
net = ParticleDecorator(net_generator())
|
||||
net = TrainingNeuralNetworkDecorator(net).with_params(epsilon=exp.epsilon)
|
||||
name = str(net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
|
||||
name = str(net.net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
|
||||
for run_id in range(exp.run_count):
|
||||
loss = net.compiled().train(epoch=run_id+1)
|
||||
count(counters, net, notable_nets)
|
||||
|
||||
Reference in New Issue
Block a user