def test_mutate_layout(self): layout = Layout(input_size=100, output_size=10, output_activation='softmax') training = Training(objective=None, optimizer=None, metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=ExperimentParameters(use_default_values=False)) experiment.parameters.all_search_parameters(True) check_experiment_parameters(experiment) for _ in range(10): blueprint1 = create_random_blueprint(experiment) blueprint2 = create_random_blueprint(experiment) mutant = mix_blueprints(blueprint1, blueprint2, parameters=experiment.parameters, p_mutate_param=0.1) blueprints = [blueprint1, blueprint2] parent_rows = [len(b.layout.rows) for b in blueprints] self.assertTrue( len(mutant.layout.rows) in parent_rows, 'Should have used one of the parents')
def test_random_blueprint(self): layout = Layout( input_size=100, output_size=10, output_activation='softmax') training = Training( objective=None, optimizer=None, metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=ExperimentParameters(use_default_values=False)) experiment.parameters.all_search_parameters(True) check_experiment_parameters(experiment) for _ in range(10): blueprint = create_random_blueprint(experiment) self.assertIsNotNone(blueprint, 'Should have created a blueprint') optimizer = blueprint.training.optimizer self.assertIsNotNone( optimizer.optimizer, 'Should have created an optimizer') ref_parameters = experiment.parameters.get_optimizers_parameters() for name, param in ref_parameters[optimizer.optimizer].items(): self.assertTrue( is_valid_param_value(param, optimizer.parameters[name]), 'Invalid param value') self.assertIsNotNone(blueprint.layout, 'Should have created a layout') rows = len(blueprint.layout.get_rows()) self.assertTrue( is_valid_param_value( experiment.parameters.get_layout_parameter('rows'), rows), 'Invalid value') for row in blueprint.layout.get_rows(): blocks = len(row.get_blocks()) self.assertTrue( is_valid_param_value( experiment.parameters.get_layout_parameter('blocks'), blocks), 'Invalid value') for block in row.get_blocks(): layers = len([ l for l in block.get_layers() if l.layer_type != 'Merge']) self.assertTrue( is_valid_param_value( experiment.parameters.get_layout_parameter('layers'), layers), 'Invalid value')
def test_search_parameters(self): experiment = Experiment( label='test', parameters=ExperimentParameters(use_default_values=False)) valid_parameters = True try: check_experiment_parameters(experiment) except InvalidParametersException: valid_parameters = False self.assertFalse(valid_parameters, 'Should have raised an exception')
def test_mutate_layout(self): layout = Layout( input_size=100, output_size=10, output_activation='softmax') training = Training( objective=None, optimizer=None, metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=ExperimentParameters(use_default_values=False)) experiment.parameters.all_search_parameters(True) check_experiment_parameters(experiment) for _ in range(10): blueprint = create_random_blueprint(experiment) mutant = mutate_blueprint( blueprint, parameters=experiment.parameters, p_mutate_layout=1, layout_mutation_count=1, layout_mutables=['rows'], mutate_in_place=False) self.assertTrue( len(mutant.layout.rows) != len(blueprint.layout.rows), 'Should have mutated rows') mutant = mutate_blueprint( blueprint, parameters=experiment.parameters, p_mutate_layout=1, layout_mutation_count=1, layout_mutables=['blocks'], mutate_in_place=False) self.assertTrue( len(mutant.layout.get_blocks()) != len(blueprint.layout.get_blocks()), 'Should have mutated blocks') mutant = mutate_blueprint( blueprint, parameters=experiment.parameters, p_mutate_layout=1, layout_mutation_count=1, layout_mutables=['layers'], mutate_in_place=False) self.assertTrue( len(mutant.layout.get_layers()) != len(blueprint.layout.get_layers()), 'Should have mutated layers')
def test_mutate_w_custom_definitions(self): def custom_activation(x): return x register_custom_activation('custom_activation', custom_activation) register_custom_layer('Dense2', Dense, deepcopy(reference_parameters['layers']['Dense'])) layout = Layout( input_size=100, output_size=10, output_activation='softmax', block=['Dense', 'Dense2']) training = Training( objective=Objective('categorical_crossentropy'), optimizer=None, metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(5), batch_size=250) experiment_parameters = ExperimentParameters(use_default_values=False) experiment_parameters.layout_parameter('blocks', int_param(1, 5)) experiment_parameters.layout_parameter('layers', int_param(1, 5)) experiment_parameters.layer_parameter('Dense2.output_dim', int_param(10, 500)) experiment_parameters.layer_parameter('Dropout.p', float_param(0.1, 0.9)) experiment_parameters.all_search_parameters(True) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=experiment_parameters) check_experiment_parameters(experiment) for _ in range(10): blueprint = create_random_blueprint(experiment) mutant = mutate_blueprint( blueprint, parameters=experiment.parameters, p_mutate_layout=0, p_mutate_param=1, mutate_in_place=False) for row_idx, row in enumerate(mutant.layout.rows): for block_idx, block in enumerate(row.blocks): for layer_idx, layer in enumerate(block.layers): original_row = blueprint.layout.rows[row_idx] original_block = original_row.blocks[block_idx] original_layer = original_block.layers[layer_idx] for name, value in layer.parameters.items(): self.assertTrue( value != original_layer.parameters[name], 'Should have mutated parameter')
def test_build(self): layout = Layout(input_size=100, output_size=10, output_activation='softmax') training = Training(objective=Objective('categorical_crossentropy'), optimizer=None, metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(10), batch_size=250) experiment_parameters = ExperimentParameters(use_default_values=False) experiment_parameters.layout_parameter('blocks', int_param(1, 5)) experiment_parameters.layout_parameter('layers', int_param(1, 5)) experiment_parameters.layer_parameter('Dense.output_dim', int_param(10, 500)) experiment_parameters.layer_parameter('Dense.activation', string_param(['relu', 'tanh'])) experiment_parameters.layer_parameter('Dropout.p', float_param(0.1, 0.9)) experiment_parameters.all_search_parameters(True) experiment = Experiment('test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=experiment_parameters) check_experiment_parameters(experiment) for _ in range(5): blueprint1 = create_random_blueprint(experiment) model = ModelBuilder().build(blueprint1, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint2 = create_random_blueprint(experiment) model = ModelBuilder().build(blueprint2, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint3 = mix_blueprints(blueprint1, blueprint2, experiment_parameters) model = ModelBuilder().build(blueprint3, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint4 = mutate_blueprint(blueprint1, experiment_parameters, mutate_in_place=False) model = ModelBuilder().build(blueprint4, cpu_device()) self.assertIsNotNone(model, 'Should have built a model')
def test_build_w_custom_definitions(self): def custom_activation(x): return x register_custom_activation('custom_activation', custom_activation) register_custom_layer( 'Dense2', Dense, deepcopy(reference_parameters['layers']['Dense']), True) layout = Layout(input_size=100, output_size=10, output_activation='softmax', block=['Dense2']) training = Training(objective=Objective('categorical_crossentropy'), optimizer=None, metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(5), batch_size=250) experiment_parameters = ExperimentParameters(use_default_values=False) experiment_parameters.layout_parameter('blocks', int_param(1, 5)) experiment_parameters.layout_parameter('layers', int_param(1, 5)) experiment_parameters.layout_parameter('layer.type', string_param(['Dense2'])) experiment_parameters.layer_parameter('Dense2.output_dim', int_param(10, 500)) experiment_parameters.layer_parameter( 'Dense2.activation', string_param(['custom_activation'])) experiment_parameters.layer_parameter('Dropout.p', float_param(0.1, 0.9)) experiment_parameters.all_search_parameters(True) experiment = Experiment('test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=experiment_parameters) check_experiment_parameters(experiment) for _ in range(5): blueprint1 = create_random_blueprint(experiment) for layer in blueprint1.layout.get_layers(): self.assertEqual('Dense2', layer.layer_type, 'Should have used custom layer') model = ModelBuilder().build(blueprint1, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint2 = create_random_blueprint(experiment) for layer in blueprint2.layout.get_layers(): self.assertEqual('Dense2', layer.layer_type, 'Should have used custom layer') model = ModelBuilder().build(blueprint2, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint3 = mix_blueprints(blueprint1, blueprint2, experiment_parameters) for layer in blueprint3.layout.get_layers(): self.assertEqual('Dense2', layer.layer_type, 'Should have used custom layer') model = ModelBuilder().build(blueprint3, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint4 = mutate_blueprint(blueprint1, experiment_parameters, mutate_in_place=False) for layer in blueprint4.layout.get_layers(): self.assertEqual('Dense2', layer.layer_type, 'Should have used custom layer') model = ModelBuilder().build(blueprint4, cpu_device()) self.assertIsNotNone(model, 'Should have built a model')