upped some setups, data incoming
This commit is contained in:
@ -31,14 +31,16 @@ def count(counters, net, notable_nets=[]):
|
||||
|
||||
if __name__ == '__main__':
|
||||
with Experiment('fixpoint-density') as exp:
|
||||
exp.trials = 100
|
||||
#NOTE: settings could/should stay this way
|
||||
#FFT doesn't work though
|
||||
exp.trials = 100000
|
||||
exp.epsilon = 1e-4
|
||||
net_generators = []
|
||||
for activation in ['linear', 'sigmoid', 'relu']:
|
||||
# net_generators += [lambda activation=activation: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||
net_generators += [lambda activation=activation: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||
net_generators += [lambda activation=activation: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||
# net_generators += [lambda activation=activation: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||
# net_generators += [lambda activation=activation: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||
#net_generators += [lambda activation=activation: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||
net_generators += [lambda activation=activation: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||
all_counters = []
|
||||
all_notable_nets = []
|
||||
all_names = []
|
||||
|
@ -26,8 +26,9 @@ def generate_fixpoint_weights():
|
||||
|
||||
|
||||
def generate_fixpoint_net():
|
||||
# net = WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation='sigmoid')
|
||||
net = AggregatingNeuralNetwork(width=2, depth=2).with_keras_params(activation='sigmoid')
|
||||
#NOTE: Weightwise only is all we can do right now IMO
|
||||
net = WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation='sigmoid')
|
||||
# net = AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation='sigmoid') # I don't know if this work for aggregaeting. We don't actually need it, though.
|
||||
net.set_weights(generate_fixpoint_weights())
|
||||
return net
|
||||
|
||||
|
@ -33,15 +33,15 @@ def count(counters, net, notable_nets=[]):
|
||||
if __name__ == '__main__':
|
||||
|
||||
with Experiment('training_fixpoint') as exp:
|
||||
exp.trials = 20
|
||||
exp.run_count = 500
|
||||
exp.trials = 50
|
||||
exp.run_count = 1000
|
||||
exp.epsilon = 1e-4
|
||||
net_generators = []
|
||||
for activation in ['linear']: # , 'sigmoid', 'relu']:
|
||||
for use_bias in [False]:
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||
all_counters = []
|
||||
all_notable_nets = []
|
||||
all_names = []
|
||||
|
Reference in New Issue
Block a user