Beispiel #1
0
def custom_experiment_parameters():
    """ Here we define the experiment parameters.
    We are using use_default_values=True, which will initialize
    all the parameters with their default values. These parameters are then fixed
    for the duration of the experiment and won't evolve.
    That means that we need to manually specify which parametres we want to test,
    and the possible values, either intervals or lists of values.

    If we want to test all the parameters and possible values, we can
    set use_default_values to False. In that case, random values will be generated
    and tested during the experiment. We can redefine some parameters if we want to
    fix their values.
    Reference parameters and default values are defined in minos.model.parameters

    We set the rows, blocks and layers parameters to 1 as we have specified a fixed layout.
    We also set the 'layout' search parameter to False to disable the layout search
    """
    experiment_parameters = ExperimentParameters(use_default_values=True)
    experiment_parameters.search_parameter('layout', False)
    experiment_parameters.layout_parameter('blocks', 1)
    experiment_parameters.layout_parameter('blocks', 1)
    experiment_parameters.layout_parameter('layers', 1)
    experiment_parameters.layer_parameter('Dense.output_dim',
                                          int_param(10, 500))
    experiment_parameters.layer_parameter('Dense.activation',
                                          string_param(['relu', 'tanh']))
    experiment_parameters.layer_parameter('Dropout.p', float_param(0.1, 0.9))
    return experiment_parameters
Beispiel #2
0
    def test_mutate_w_custom_definitions(self):

        def custom_activation(x):
            return x

        register_custom_activation('custom_activation', custom_activation)
        register_custom_layer('Dense2', Dense, deepcopy(reference_parameters['layers']['Dense']))

        layout = Layout(
            input_size=100,
            output_size=10,
            output_activation='softmax',
            block=['Dense', 'Dense2'])
        training = Training(
            objective=Objective('categorical_crossentropy'),
            optimizer=None,
            metric=Metric('categorical_accuracy'),
            stopping=EpochStoppingCondition(5),
            batch_size=250)

        experiment_parameters = ExperimentParameters(use_default_values=False)
        experiment_parameters.layout_parameter('blocks', int_param(1, 5))
        experiment_parameters.layout_parameter('layers', int_param(1, 5))
        experiment_parameters.layer_parameter('Dense2.output_dim', int_param(10, 500))
        experiment_parameters.layer_parameter('Dropout.p', float_param(0.1, 0.9))
        experiment_parameters.all_search_parameters(True)
        experiment = Experiment(
            'test',
            layout,
            training,
            batch_iterator=None,
            test_batch_iterator=None,
            environment=None,
            parameters=experiment_parameters)
        check_experiment_parameters(experiment)
        for _ in range(10):
            blueprint = create_random_blueprint(experiment)
            mutant = mutate_blueprint(
                blueprint,
                parameters=experiment.parameters,
                p_mutate_layout=0,
                p_mutate_param=1,
                mutate_in_place=False)
            for row_idx, row in enumerate(mutant.layout.rows):
                for block_idx, block in enumerate(row.blocks):
                    for layer_idx, layer in enumerate(block.layers):
                        original_row = blueprint.layout.rows[row_idx]
                        original_block = original_row.blocks[block_idx]
                        original_layer = original_block.layers[layer_idx]
                        for name, value in layer.parameters.items():
                            self.assertTrue(
                                value != original_layer.parameters[name],
                                'Should have mutated parameter')
Beispiel #3
0
    def test_random_value(self):
        param = int_param(values=list(range(10)))
        val = random_param_value(param)
        self.assertTrue(
            isinstance(val, int),
            'Should be an int')
        self.assertTrue(
            val in param.values,
            'Value should be in predefined values')

        param = float_param(values=[i * 0.1 for i in range(10)])
        val = random_param_value(param)
        self.assertTrue(
            isinstance(val, float),
            'Should be a float')
        self.assertTrue(
            val in param.values,
            'Value should be in predefined values')

        param = float_param(lo=.5, hi=.7)
        for _ in range(100):
            val = random_param_value(param)
            self.assertTrue(
                isinstance(val, float),
                'Should be a float')
            self.assertTrue(
                val <= param.hi and val >= param.lo,
                'Value should be in range')

        param = {
            'a': float_param(optional=False),
            'b': float_param(optional=False)}
        for _ in range(10):
            val = random_param_value(param)
            self.assertTrue(
                isinstance(val, dict),
                'Should be a dict')
            self.assertEqual(
                len(param), len(val),
                'Should respect non optional setting')

        param = {
            'a': float_param(optional=True),
            'b': float_param(optional=True)}
        for _ in range(10):
            val = random_param_value(param)
            self.assertTrue(
                isinstance(val, dict),
                'Should be a dict')
            self.assertTrue(
                len(val) >= 0 and len(val) <= len(param),
                'Should respect non optional setting')
Beispiel #4
0
    def test_build(self):
        layout = Layout(input_size=100,
                        output_size=10,
                        output_activation='softmax')
        training = Training(objective=Objective('categorical_crossentropy'),
                            optimizer=None,
                            metric=Metric('categorical_accuracy'),
                            stopping=EpochStoppingCondition(10),
                            batch_size=250)

        experiment_parameters = ExperimentParameters(use_default_values=False)
        experiment_parameters.layout_parameter('blocks', int_param(1, 5))
        experiment_parameters.layout_parameter('layers', int_param(1, 5))
        experiment_parameters.layer_parameter('Dense.output_dim',
                                              int_param(10, 500))
        experiment_parameters.layer_parameter('Dense.activation',
                                              string_param(['relu', 'tanh']))
        experiment_parameters.layer_parameter('Dropout.p',
                                              float_param(0.1, 0.9))
        experiment_parameters.all_search_parameters(True)
        experiment = Experiment('test',
                                layout,
                                training,
                                batch_iterator=None,
                                test_batch_iterator=None,
                                environment=None,
                                parameters=experiment_parameters)
        check_experiment_parameters(experiment)
        for _ in range(5):
            blueprint1 = create_random_blueprint(experiment)
            model = ModelBuilder().build(blueprint1, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint2 = create_random_blueprint(experiment)
            model = ModelBuilder().build(blueprint2, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint3 = mix_blueprints(blueprint1, blueprint2,
                                        experiment_parameters)
            model = ModelBuilder().build(blueprint3, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint4 = mutate_blueprint(blueprint1,
                                          experiment_parameters,
                                          mutate_in_place=False)
            model = ModelBuilder().build(blueprint4, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
Beispiel #5
0
    def test_build_w_custom_definitions(self):
        def custom_activation(x):
            return x

        register_custom_activation('custom_activation', custom_activation)
        register_custom_layer(
            'Dense2', Dense, deepcopy(reference_parameters['layers']['Dense']),
            True)

        layout = Layout(input_size=100,
                        output_size=10,
                        output_activation='softmax',
                        block=['Dense2'])
        training = Training(objective=Objective('categorical_crossentropy'),
                            optimizer=None,
                            metric=Metric('categorical_accuracy'),
                            stopping=EpochStoppingCondition(5),
                            batch_size=250)

        experiment_parameters = ExperimentParameters(use_default_values=False)
        experiment_parameters.layout_parameter('blocks', int_param(1, 5))
        experiment_parameters.layout_parameter('layers', int_param(1, 5))
        experiment_parameters.layout_parameter('layer.type',
                                               string_param(['Dense2']))
        experiment_parameters.layer_parameter('Dense2.output_dim',
                                              int_param(10, 500))
        experiment_parameters.layer_parameter(
            'Dense2.activation', string_param(['custom_activation']))
        experiment_parameters.layer_parameter('Dropout.p',
                                              float_param(0.1, 0.9))
        experiment_parameters.all_search_parameters(True)
        experiment = Experiment('test',
                                layout,
                                training,
                                batch_iterator=None,
                                test_batch_iterator=None,
                                environment=None,
                                parameters=experiment_parameters)
        check_experiment_parameters(experiment)
        for _ in range(5):
            blueprint1 = create_random_blueprint(experiment)
            for layer in blueprint1.layout.get_layers():
                self.assertEqual('Dense2', layer.layer_type,
                                 'Should have used custom layer')
            model = ModelBuilder().build(blueprint1, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint2 = create_random_blueprint(experiment)
            for layer in blueprint2.layout.get_layers():
                self.assertEqual('Dense2', layer.layer_type,
                                 'Should have used custom layer')
            model = ModelBuilder().build(blueprint2, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint3 = mix_blueprints(blueprint1, blueprint2,
                                        experiment_parameters)
            for layer in blueprint3.layout.get_layers():
                self.assertEqual('Dense2', layer.layer_type,
                                 'Should have used custom layer')
            model = ModelBuilder().build(blueprint3, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint4 = mutate_blueprint(blueprint1,
                                          experiment_parameters,
                                          mutate_in_place=False)
            for layer in blueprint4.layout.get_layers():
                self.assertEqual('Dense2', layer.layer_type,
                                 'Should have used custom layer')
            model = ModelBuilder().build(blueprint4, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
Beispiel #6
0
reference_parameters = {
    'search': {
        'layout': boolean_param(default=True),
        'parameters': boolean_param(default=True),
        'optimizer': boolean_param(default=True)
    },
    'layout': {
        'rows': int_param(lo=1, hi=3, default=1),
        'blocks': int_param(lo=1, hi=5, default=1),
        'layers': int_param(lo=1, hi=5, default=1),
        'block': {
            'input_type':
            string_param(['concat', 'random+concat', 'concat+random'],
                         default='concat'),
            'input_size':
            float_param(default=1.)
        },
        'layer': {
            'type': string_param(['Dense', 'Dropout', 'BatchNormalization']),
            'stackable': string_param([])
        }
    },
    'layers': {
        'Dense': {
            'output_dim':
            int_param(1, 1000, default=100),
            'init':
            string_param(
                [
                    'uniform',
                    'lecun_uniform',