def test_mutate_layout(self): layout = Layout(input_size=100, output_size=10, output_activation='softmax') training = Training(objective=None, optimizer=None, metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=ExperimentParameters(use_default_values=False)) experiment.parameters.all_search_parameters(True) check_experiment_parameters(experiment) for _ in range(10): blueprint1 = create_random_blueprint(experiment) blueprint2 = create_random_blueprint(experiment) mutant = mix_blueprints(blueprint1, blueprint2, parameters=experiment.parameters, p_mutate_param=0.1) blueprints = [blueprint1, blueprint2] parent_rows = [len(b.layout.rows) for b in blueprints] self.assertTrue( len(mutant.layout.rows) in parent_rows, 'Should have used one of the parents')
def build_layout(input_size, output_size): """ Here we define a minimal layout. We don't specify the architecture. Layouts will be randomly generated using the min and max numbers of rows, blocks and layers """ return Layout(input_size=input_size, output_size=output_size, output_activation='softmax')
def test_predefined_parameters(self): layout = Layout( input_size=100, output_size=10, output_activation='softmax') training = Training( objective=None, optimizer=Optimizer('SGD', {'lr': 1}), metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None) for _ in range(10): blueprint = create_random_blueprint(experiment) self.assertIsNotNone(blueprint, 'Should have created a blueprint') self.assertEqual( training.optimizer.optimizer, blueprint.training.optimizer.optimizer, 'Should have created an optimizer') self.assertEqual( blueprint.training.optimizer.parameters['lr'], training.optimizer.parameters['lr'], 'Should have copied predefined parameter')
def test_train(self): n_jobs = 2 with Pool(n_jobs) as pool,\ tempfile.TemporaryDirectory() as tmp_dir: layout = Layout(input_size=1000, output_size=nb_classes, output_activation='softmax') training = Training( objective=Objective('categorical_crossentropy'), optimizer=Optimizer(optimizer='Adam'), metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(10), batch_size=batch_size) experiment_parameters = ExperimentParameters( use_default_values=True) experiment_parameters.layout_parameter('rows', 1) experiment_parameters.layout_parameter('blocks', 1) experiment_parameters.layout_parameter('layers', 1) experiment = Experiment('test__reuters_experiment', layout, training, batch_iterator, test_batch_iterator, CpuEnvironment(n_jobs=1, data_dir=tmp_dir), parameters=experiment_parameters) blueprints = [ create_random_blueprint(experiment) for _ in range(n_jobs) ] pool.map(multiprocess_fit, blueprints)
def test_predefined_layer_type(self): layout = Layout( input_size=100, output_size=10, output_activation='softmax') training = Training( objective=None, optimizer=Optimizer('SGD', {'lr': 1}), metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None) layer_types = ['Dropout'] experiment.parameters.layer_types(string_param(layer_types)) for _ in range(10): blueprint = create_random_blueprint(experiment) self.assertIsNotNone(blueprint, 'Should have created a blueprint') for layer in blueprint.layout.get_layers(): self.assertTrue( layer.layer_type in layer_types, 'Should have used predefined layer types')
def test_random_blueprint(self): layout = Layout( input_size=100, output_size=10, output_activation='softmax') training = Training( objective=None, optimizer=None, metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=ExperimentParameters(use_default_values=False)) experiment.parameters.all_search_parameters(True) check_experiment_parameters(experiment) for _ in range(10): blueprint = create_random_blueprint(experiment) self.assertIsNotNone(blueprint, 'Should have created a blueprint') optimizer = blueprint.training.optimizer self.assertIsNotNone( optimizer.optimizer, 'Should have created an optimizer') ref_parameters = experiment.parameters.get_optimizers_parameters() for name, param in ref_parameters[optimizer.optimizer].items(): self.assertTrue( is_valid_param_value(param, optimizer.parameters[name]), 'Invalid param value') self.assertIsNotNone(blueprint.layout, 'Should have created a layout') rows = len(blueprint.layout.get_rows()) self.assertTrue( is_valid_param_value( experiment.parameters.get_layout_parameter('rows'), rows), 'Invalid value') for row in blueprint.layout.get_rows(): blocks = len(row.get_blocks()) self.assertTrue( is_valid_param_value( experiment.parameters.get_layout_parameter('blocks'), blocks), 'Invalid value') for block in row.get_blocks(): layers = len([ l for l in block.get_layers() if l.layer_type != 'Merge']) self.assertTrue( is_valid_param_value( experiment.parameters.get_layout_parameter('layers'), layers), 'Invalid value')
def test_predefined_multiple_blocklayout(self): layout = Layout( input_size=100, output_size=10, output_activation='softmax', block=[ [('Dense', {'activation': 'relu'})], [('Dense', {'activation': 'relu'}), 'Dropout', ('Dense', {'output_dim': 100})]]) training = Training( objective=None, optimizer=Optimizer(), metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=ExperimentParameters(use_default_values=False)) experiment.parameters.search_parameter('layout', False) experiment.parameters.search_parameter('parameters', True) experiment.parameters.search_parameter('optimizer', True) for _ in range(10): blueprint1 = create_random_blueprint(experiment) blueprint2 = create_random_blueprint(experiment) blueprint3 = mix_blueprints(blueprint1, blueprint2, experiment.parameters) blueprint4 = mutate_blueprint(blueprint1, experiment.parameters, mutate_in_place=False) for idx, blueprint in enumerate([blueprint1, blueprint2, blueprint3, blueprint4]): self.assertIsNotNone(blueprint, 'Should have created a blueprint') self.assertIsNotNone(blueprint.layout, 'Should have created a layout') self.assertEqual( 1, len(blueprint.layout.get_rows()), 'Should have 1 row') self.assertEqual( 1, len(blueprint.layout.get_blocks()), 'Should have 1 block') for i, row in enumerate(blueprint.layout.get_rows()): blocks = len(row.get_blocks()) self.assertTrue( is_valid_param_value( experiment.parameters.get_layout_parameter('blocks'), blocks), 'Invalid value') for block in row.get_blocks(): self.assertTrue( len(block.layers) == len(layout.block[0]) or len(block.layers) == len(layout.block[1]), 'Should have used template')
def test_mutate_layout(self): layout = Layout( input_size=100, output_size=10, output_activation='softmax') training = Training( objective=None, optimizer=None, metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=ExperimentParameters(use_default_values=False)) experiment.parameters.all_search_parameters(True) check_experiment_parameters(experiment) for _ in range(10): blueprint = create_random_blueprint(experiment) mutant = mutate_blueprint( blueprint, parameters=experiment.parameters, p_mutate_layout=1, layout_mutation_count=1, layout_mutables=['rows'], mutate_in_place=False) self.assertTrue( len(mutant.layout.rows) != len(blueprint.layout.rows), 'Should have mutated rows') mutant = mutate_blueprint( blueprint, parameters=experiment.parameters, p_mutate_layout=1, layout_mutation_count=1, layout_mutables=['blocks'], mutate_in_place=False) self.assertTrue( len(mutant.layout.get_blocks()) != len(blueprint.layout.get_blocks()), 'Should have mutated blocks') mutant = mutate_blueprint( blueprint, parameters=experiment.parameters, p_mutate_layout=1, layout_mutation_count=1, layout_mutables=['layers'], mutate_in_place=False) self.assertTrue( len(mutant.layout.get_layers()) != len(blueprint.layout.get_layers()), 'Should have mutated layers')
def test_mutate_w_custom_definitions(self): def custom_activation(x): return x register_custom_activation('custom_activation', custom_activation) register_custom_layer('Dense2', Dense, deepcopy(reference_parameters['layers']['Dense'])) layout = Layout( input_size=100, output_size=10, output_activation='softmax', block=['Dense', 'Dense2']) training = Training( objective=Objective('categorical_crossentropy'), optimizer=None, metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(5), batch_size=250) experiment_parameters = ExperimentParameters(use_default_values=False) experiment_parameters.layout_parameter('blocks', int_param(1, 5)) experiment_parameters.layout_parameter('layers', int_param(1, 5)) experiment_parameters.layer_parameter('Dense2.output_dim', int_param(10, 500)) experiment_parameters.layer_parameter('Dropout.p', float_param(0.1, 0.9)) experiment_parameters.all_search_parameters(True) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=experiment_parameters) check_experiment_parameters(experiment) for _ in range(10): blueprint = create_random_blueprint(experiment) mutant = mutate_blueprint( blueprint, parameters=experiment.parameters, p_mutate_layout=0, p_mutate_param=1, mutate_in_place=False) for row_idx, row in enumerate(mutant.layout.rows): for block_idx, block in enumerate(row.blocks): for layer_idx, layer in enumerate(block.layers): original_row = blueprint.layout.rows[row_idx] original_block = original_row.blocks[block_idx] original_layer = original_block.layers[layer_idx] for name, value in layer.parameters.items(): self.assertTrue( value != original_layer.parameters[name], 'Should have mutated parameter')
def build_layout(input_size, output_size): """ Here we define a layout with a predefined architecture. We also fix some parameters that we don't want to evolve. Other layer parameters will be tested with random values. """ return Layout(input_size=input_size, output_size=output_size, output_activation='softmax', block=[('Dense', { 'activation': 'relu' }), 'Dropout', ('Dense', { 'output_dim': 100 })])
def test_early_stopping_condition_test(self): disable_sysout() with tempfile.TemporaryDirectory() as tmp_dir: batch_size = 50 min_epoch = 10 max_epoch = 15 batch_iterator, test_batch_iterator, nb_classes = get_reuters_dataset( batch_size, 1000) layout = Layout(input_size=1000, output_size=nb_classes, output_activation='softmax') training = Training( objective=Objective('categorical_crossentropy'), optimizer=Optimizer(optimizer='Adam'), metric=Metric('categorical_accuracy'), stopping=AccuracyDecreaseStoppingCondition( metric='categorical_accuracy', noprogress_count=2, min_epoch=min_epoch, max_epoch=max_epoch), batch_size=batch_size) experiment_parameters = ExperimentParameters( use_default_values=True) experiment_parameters.layout_parameter('rows', 1) experiment_parameters.layout_parameter('blocks', 1) experiment_parameters.layout_parameter('layers', 1) experiment = Experiment('test__reuters_experiment', layout, training, batch_iterator, test_batch_iterator, CpuEnvironment(n_jobs=1, data_dir=tmp_dir), parameters=experiment_parameters) _assert_valid_training_parameters(experiment) blueprint = create_random_blueprint(experiment) trainer = ModelTrainer(batch_iterator, test_batch_iterator) model, history, _duration = trainer.train(blueprint, cpu_device(), save_best_model=False) self.assertTrue( len(history.epoch) >= min_epoch, 'Should have trained for at least min epoch') self.assertTrue( len(history.epoch) <= max_epoch, 'Should have trained for max epoch') self.assertIsNotNone(model, 'should have fit the model') score = model.evaluate_generator( test_batch_iterator, val_samples=test_batch_iterator.sample_count) self.assertIsNotNone(score, 'should have evaluated the model')
def _mix_layouts(parent_layouts, parameters, p_mutate_param=0.05): layout = Layout( input_size=parent_layouts[0].input_size, output_size=parent_layouts[0].output_size, output_activation=parent_layouts[0].output_activation, block=parent_layouts[0].block, block_input=parent_layouts[0].block_input) if parameters.is_layout_search(): rows = random_list_element([len(p.rows) for p in parent_layouts]) for row_idx in range(rows): parent_rows = [p.rows[row_idx] for p in parent_layouts if row_idx < len(p.rows)] layout.rows.append(_mix_row(layout, row_idx, parent_rows, parameters)) _setup_block_inputs(layout, parameters) if parameters.is_parameters_search(): _mutate_layer_parameters( layout, parameters, p_mutate_param=p_mutate_param) else: layout.rows = random_list_element([ parent_layouts[0].rows, parent_layouts[1].rows]) return layout
def test_save(self): disable_sysout() def custom_activation(x): return x register_custom_activation('custom_activation', custom_activation) register_custom_layer('custom_layer', CustomLayer, {'output_dim': int_param(1, 100)}) with tempfile.TemporaryDirectory() as tmp_dir: batch_size = 50 batch_iterator, test_batch_iterator, nb_classes = get_reuters_dataset( batch_size, 1000) layout = Layout(input_size=1000, output_size=nb_classes, output_activation='softmax') training = Training( objective=Objective('categorical_crossentropy'), optimizer=Optimizer(optimizer='Adam'), metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(10), batch_size=batch_size) experiment_parameters = ExperimentParameters( use_default_values=True) experiment_parameters.layout_parameter('rows', 1) experiment_parameters.layout_parameter('blocks', 1) experiment_parameters.layout_parameter('layers', 1) experiment_parameters.layout_parameter('block.layer_type', 'custom_layer') experiment = Experiment('test__reuters_experiment', layout, training, batch_iterator, test_batch_iterator, CpuEnvironment(n_jobs=1, data_dir=tmp_dir), parameters=experiment_parameters) blueprint = create_random_blueprint(experiment) model = ModelBuilder().build(blueprint, default_device()) model.fit_generator( generator=batch_iterator, samples_per_epoch=batch_iterator.samples_per_epoch, nb_epoch=10, validation_data=test_batch_iterator, nb_val_samples=test_batch_iterator.sample_count) filepath = join(tmp_dir, 'model') model.save(filepath) model = load_keras_model(filepath) self.assertIsNotNone(model, 'Should have loaded the model')
def _random_layout(layout, experiment_parameters): layout = Layout( layout.input_size, layout.output_size, layout.output_activation, layout.block) if experiment_parameters.is_layout_search(): rows = random_initial_param_value(experiment_parameters.get_layout_parameter('rows')) for row_idx in range(rows): layout.rows.append( _random_layout_row( layout, row_idx, experiment_parameters)) else: layout.rows = [ Row(blocks=[ _instantiate_layout_block( layout, 0, experiment_parameters)])] if experiment_parameters.is_parameters_search(): _set_layout_random_parameters(layout, experiment_parameters) return layout
def test_build(self): layout = Layout(input_size=100, output_size=10, output_activation='softmax') training = Training(objective=Objective('categorical_crossentropy'), optimizer=None, metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(10), batch_size=250) experiment_parameters = ExperimentParameters(use_default_values=False) experiment_parameters.layout_parameter('blocks', int_param(1, 5)) experiment_parameters.layout_parameter('layers', int_param(1, 5)) experiment_parameters.layer_parameter('Dense.output_dim', int_param(10, 500)) experiment_parameters.layer_parameter('Dense.activation', string_param(['relu', 'tanh'])) experiment_parameters.layer_parameter('Dropout.p', float_param(0.1, 0.9)) experiment_parameters.all_search_parameters(True) experiment = Experiment('test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=experiment_parameters) check_experiment_parameters(experiment) for _ in range(5): blueprint1 = create_random_blueprint(experiment) model = ModelBuilder().build(blueprint1, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint2 = create_random_blueprint(experiment) model = ModelBuilder().build(blueprint2, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint3 = mix_blueprints(blueprint1, blueprint2, experiment_parameters) model = ModelBuilder().build(blueprint3, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint4 = mutate_blueprint(blueprint1, experiment_parameters, mutate_in_place=False) model = ModelBuilder().build(blueprint4, cpu_device()) self.assertIsNotNone(model, 'Should have built a model')
def create_experiment(input_size, output_size, batch_size): training = Training(objective=Objective('categorical_crossentropy'), optimizer=Optimizer(optimizer='Adam'), metric=Metric('categorical_accuracy'), stopping=AccuracyDecreaseStoppingCondition( metric='categorical_accuracy', min_epoch=5, max_epoch=25, noprogress_count=5), batch_size=batch_size) parameters = ExperimentParameters(use_default_values=True) layout = Layout(input_size=input_size, output_size=output_size, output_activation='softmax') experiment = Experiment(label='reuters_train_multi_gpu', layout=layout, training=training, parameters=parameters) return experiment
def test_mutate_parameters(self): layout = Layout( input_size=100, output_size=10, output_activation='softmax') training = Training( objective=None, optimizer=None, metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=ExperimentParameters(use_default_values=False)) experiment.parameters.all_search_parameters(True) for _ in range(10): blueprint = create_random_blueprint(experiment) mutant = mutate_blueprint( blueprint, parameters=experiment.parameters, p_mutate_layout=0, p_mutate_param=1, mutate_in_place=False) for row_idx, row in enumerate(mutant.layout.rows): for block_idx, block in enumerate(row.blocks): for layer_idx, layer in enumerate(block.layers): original_row = blueprint.layout.rows[row_idx] original_block = original_row.blocks[block_idx] original_layer = original_block.layers[layer_idx] for name, value in layer.parameters.items(): if value == original_layer.parameters[name]: pass self.assertTrue( value != original_layer.parameters[name], 'Should have mutated parameter')
def test_train(self): disable_sysout() with tempfile.TemporaryDirectory() as tmp_dir: batch_size = 50 batch_iterator, test_batch_iterator, nb_classes = get_reuters_dataset( batch_size, 1000) layout = Layout(input_size=1000, output_size=nb_classes, output_activation='softmax') training = Training( objective=Objective('categorical_crossentropy'), optimizer=Optimizer(optimizer='Adam'), metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(10), batch_size=batch_size) experiment_parameters = ExperimentParameters( use_default_values=True) experiment_parameters.layout_parameter('rows', 1) experiment_parameters.layout_parameter('blocks', 1) experiment_parameters.layout_parameter('layers', 1) experiment = Experiment('test__reuters_experiment', layout, training, batch_iterator, test_batch_iterator, CpuEnvironment(n_jobs=1, data_dir=tmp_dir), parameters=experiment_parameters) blueprint = create_random_blueprint(experiment) model = ModelBuilder().build(blueprint, default_device()) result = model.fit_generator( generator=batch_iterator, samples_per_epoch=batch_iterator.samples_per_epoch, nb_epoch=10, validation_data=test_batch_iterator, nb_val_samples=test_batch_iterator.sample_count) self.assertIsNotNone(result, 'should have fit the model') score = model.evaluate_generator( test_batch_iterator, val_samples=test_batch_iterator.sample_count) self.assertIsNotNone(score, 'should have evaluated the model')
def test_model_trainer(self): disable_sysout() with tempfile.TemporaryDirectory() as tmp_dir: batch_size = 50 batch_iterator, test_batch_iterator, nb_classes = get_reuters_dataset( batch_size, 1000) layout = Layout(input_size=1000, output_size=nb_classes, output_activation='softmax') training = Training( objective=Objective('categorical_crossentropy'), optimizer=Optimizer(optimizer='Adam'), metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(10), batch_size=batch_size) experiment_parameters = ExperimentParameters( use_default_values=True) experiment_parameters.layout_parameter('rows', 1) experiment_parameters.layout_parameter('blocks', 1) experiment_parameters.layout_parameter('layers', 1) experiment = Experiment('test__reuters_experiment', layout, training, batch_iterator, test_batch_iterator, CpuEnvironment(n_jobs=1, data_dir=tmp_dir), parameters=experiment_parameters) blueprint = create_random_blueprint(experiment) trainer = ModelTrainer(batch_iterator, test_batch_iterator) model_filename = join(tmp_dir, 'model') model, history, _duration = trainer.train( blueprint, cpu_device(), save_best_model=True, model_filename=model_filename) self.assertIsNotNone(model, 'should have fit the model') self.assertTrue(isfile(model_filename), 'Should have saved the model') self.assertIsNotNone(history, 'Should have the training history')
def test_build_w_custom_definitions(self): def custom_activation(x): return x register_custom_activation('custom_activation', custom_activation) register_custom_layer( 'Dense2', Dense, deepcopy(reference_parameters['layers']['Dense']), True) layout = Layout(input_size=100, output_size=10, output_activation='softmax', block=['Dense2']) training = Training(objective=Objective('categorical_crossentropy'), optimizer=None, metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(5), batch_size=250) experiment_parameters = ExperimentParameters(use_default_values=False) experiment_parameters.layout_parameter('blocks', int_param(1, 5)) experiment_parameters.layout_parameter('layers', int_param(1, 5)) experiment_parameters.layout_parameter('layer.type', string_param(['Dense2'])) experiment_parameters.layer_parameter('Dense2.output_dim', int_param(10, 500)) experiment_parameters.layer_parameter( 'Dense2.activation', string_param(['custom_activation'])) experiment_parameters.layer_parameter('Dropout.p', float_param(0.1, 0.9)) experiment_parameters.all_search_parameters(True) experiment = Experiment('test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=experiment_parameters) check_experiment_parameters(experiment) for _ in range(5): blueprint1 = create_random_blueprint(experiment) for layer in blueprint1.layout.get_layers(): self.assertEqual('Dense2', layer.layer_type, 'Should have used custom layer') model = ModelBuilder().build(blueprint1, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint2 = create_random_blueprint(experiment) for layer in blueprint2.layout.get_layers(): self.assertEqual('Dense2', layer.layer_type, 'Should have used custom layer') model = ModelBuilder().build(blueprint2, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint3 = mix_blueprints(blueprint1, blueprint2, experiment_parameters) for layer in blueprint3.layout.get_layers(): self.assertEqual('Dense2', layer.layer_type, 'Should have used custom layer') model = ModelBuilder().build(blueprint3, cpu_device()) self.assertIsNotNone(model, 'Should have built a model') blueprint4 = mutate_blueprint(blueprint1, experiment_parameters, mutate_in_place=False) for layer in blueprint4.layout.get_layers(): self.assertEqual('Dense2', layer.layer_type, 'Should have used custom layer') model = ModelBuilder().build(blueprint4, cpu_device()) self.assertIsNotNone(model, 'Should have built a model')
def test_ga_search(self): with tempfile.TemporaryDirectory() as tmp_dir: epoch = 3 generations = 2 batch_size = 50 batch_iterator, test_batch_iterator, nb_classes = get_reuters_dataset( batch_size, 1000) layout = Layout(input_size=1000, output_size=nb_classes, output_activation='softmax') training = Training( objective=Objective('categorical_crossentropy'), optimizer=Optimizer(optimizer='Adam'), metric=Metric('categorical_accuracy'), stopping=EpochStoppingCondition(epoch), batch_size=batch_size) experiment_parameters = ExperimentParameters( use_default_values=False) experiment_parameters.layout_parameter('rows', 1) experiment_parameters.layout_parameter('blocks', 1) experiment_parameters.layout_parameter('layers', 1) experiment_parameters.layer_parameter('Dense.output_dim', int_param(10, 500)) experiment_parameters.all_search_parameters(True) experiment_label = 'test__reuters_experiment' experiment = Experiment(experiment_label, layout, training, batch_iterator, test_batch_iterator, CpuEnvironment(n_jobs=2, data_dir=tmp_dir), parameters=experiment_parameters) run_ga_search_experiment(experiment, population_size=2, generations=2) self.assertTrue(isfile(experiment.get_log_filename()), 'Should have logged') self.assertTrue(isfile(experiment.get_step_data_filename(0)), 'Should have logged') self.assertTrue(isfile(experiment.get_step_log_filename(0)), 'Should have logged') blueprints = load_experiment_blueprints( experiment_label, 0, Environment(data_dir=tmp_dir)) self.assertTrue( len(blueprints) > 0, 'Should have saved/loaded blueprints') model = ModelBuilder().build(blueprints[0], cpu_device()) disable_sysout() model.fit_generator( generator=batch_iterator, samples_per_epoch=batch_iterator.samples_per_epoch, nb_epoch=5, validation_data=test_batch_iterator, nb_val_samples=test_batch_iterator.sample_count) score = model.evaluate_generator( test_batch_iterator, val_samples=test_batch_iterator.sample_count) self.assertTrue(score[1] > 0, 'Should have valid score') step, population = load_experiment_checkpoint(experiment) self.assertEqual(generations - 1, step, 'Should have loaded checkpoint') self.assertIsNotNone(population, 'Should have loaded checkpoint') blueprint = load_experiment_best_blueprint( experiment.label, environment=CpuEnvironment(n_jobs=2, data_dir=tmp_dir)) model = ModelBuilder().build(blueprint, cpu_device(), compile_model=False) self.assertIsNotNone( model, 'Should have loaded and built best model from experiment')
def test_predefined_layout(self): layout = Layout( input_size=100, output_size=10, output_activation='softmax', block=[ ('Dense', {'activation': 'relu'}), 'Dropout', ('Dense', {'output_dim': 100})]) training = Training( objective=None, optimizer=Optimizer(), metric=None, stopping=None, batch_size=None) experiment = Experiment( 'test', layout, training, batch_iterator=None, test_batch_iterator=None, environment=None, parameters=ExperimentParameters(use_default_values=False)) experiment.parameters.search_parameter('layout', False) experiment.parameters.search_parameter('parameters', True) experiment.parameters.search_parameter('optimizer', True) for _ in range(10): blueprint1 = create_random_blueprint(experiment) blueprint2 = create_random_blueprint(experiment) blueprint3 = mix_blueprints(blueprint1, blueprint2, experiment.parameters) blueprint4 = mutate_blueprint(blueprint1, experiment.parameters, mutate_in_place=False) for idx, blueprint in enumerate([blueprint1, blueprint2, blueprint3, blueprint4]): self.assertIsNotNone(blueprint, 'Should have created a blueprint') self.assertIsNotNone(blueprint.layout, 'Should have created a layout') self.assertEqual( 1, len(blueprint.layout.get_rows()), 'Should have 1 row') self.assertEqual( 1, len(blueprint.layout.get_blocks()), 'Should have 1 block') self.assertEqual( len(layout.block), len(blueprint.layout.get_layers()), 'Should have predefined layers count') for i, row in enumerate(blueprint.layout.get_rows()): blocks = len(row.get_blocks()) self.assertTrue( is_valid_param_value( experiment.parameters.get_layout_parameter('blocks'), blocks), 'Invalid value') for block in row.get_blocks(): self.assertEqual( len(layout.block), len(block.layers), 'Should have used template') for i in range(len(layout.block)): layer = layout.block[i] layer_type = str_param_name(layer[0] if isinstance(layer, tuple) else layer) params = layer[1] if isinstance(layer, tuple) else dict() self.assertEqual( layer_type, block.layers[i].layer_type, 'Should have used the predefined layer type') for name, value in params.items(): self.assertEqual( value, block.layers[i].parameters[name], 'Should have used the predefined parameter value for blueprint %d' % idx)