Exemple #1
0
def custom_experiment_parameters():
    """ Here we define the experiment parameters.
    We are using use_default_values=True, which will initialize
    all the parameters with their default values. These parameters are then fixed
    for the duration of the experiment and won't evolve.
    That means that we need to manually specify which parametres we want to test,
    and the possible values, either intervals or lists of values.

    If we want to test all the parameters and possible values, we can
    set use_default_values to False. In that case, random values will be generated
    and tested during the experiment. We can redefine some parameters if we want to
    fix their values.
    Reference parameters and default values are defined in minos.model.parameters

    We set the rows, blocks and layers parameters to 1 as we have specified a fixed layout.
    We also set the 'layout' search parameter to False to disable the layout search
    """
    experiment_parameters = ExperimentParameters(use_default_values=True)
    experiment_parameters.search_parameter('layout', False)
    experiment_parameters.layout_parameter('blocks', 1)
    experiment_parameters.layout_parameter('blocks', 1)
    experiment_parameters.layout_parameter('layers', 1)
    experiment_parameters.layer_parameter('Dense.output_dim',
                                          int_param(10, 500))
    experiment_parameters.layer_parameter('Dense.activation',
                                          string_param(['relu', 'tanh']))
    experiment_parameters.layer_parameter('Dropout.p', float_param(0.1, 0.9))
    return experiment_parameters
Exemple #2
0
 def test_predefined_layer_type(self):
     layout = Layout(
         input_size=100,
         output_size=10,
         output_activation='softmax')
     training = Training(
         objective=None,
         optimizer=Optimizer('SGD', {'lr': 1}),
         metric=None,
         stopping=None,
         batch_size=None)
     experiment = Experiment(
         'test',
         layout,
         training,
         batch_iterator=None,
         test_batch_iterator=None,
         environment=None)
     layer_types = ['Dropout']
     experiment.parameters.layer_types(string_param(layer_types))
     for _ in range(10):
         blueprint = create_random_blueprint(experiment)
         self.assertIsNotNone(blueprint, 'Should have created a blueprint')
         for layer in blueprint.layout.get_layers():
             self.assertTrue(
                 layer.layer_type in layer_types,
                 'Should have used predefined layer types')
Exemple #3
0
 def test_custom_parameters(self):
     experiment_parameters = ExperimentParameters()
     experiment_parameters.layout_parameter('blocks', int_param(1, 10))
     param = experiment_parameters.get_layout_parameter('blocks')
     self.assertTrue(
         1 == param.lo and 10 == param.hi,
         'Should have set values')
     experiment_parameters.layout_parameter('layers', int_param(1, 3))
     param = experiment_parameters.get_layout_parameter('layers')
     self.assertTrue(
         1 == param.lo and 3 == param.hi,
         'Should have set values')
     experiment_parameters.layer_parameter('Dense.activation', string_param(['relu', 'tanh']))
     param = experiment_parameters.get_layer_parameter('Dense.activation')
     self.assertTrue(
         'relu' == param.values[0] and 'tanh' == param.values[1],
         'Should have set values')
Exemple #4
0
    def test_build(self):
        layout = Layout(input_size=100,
                        output_size=10,
                        output_activation='softmax')
        training = Training(objective=Objective('categorical_crossentropy'),
                            optimizer=None,
                            metric=Metric('categorical_accuracy'),
                            stopping=EpochStoppingCondition(10),
                            batch_size=250)

        experiment_parameters = ExperimentParameters(use_default_values=False)
        experiment_parameters.layout_parameter('blocks', int_param(1, 5))
        experiment_parameters.layout_parameter('layers', int_param(1, 5))
        experiment_parameters.layer_parameter('Dense.output_dim',
                                              int_param(10, 500))
        experiment_parameters.layer_parameter('Dense.activation',
                                              string_param(['relu', 'tanh']))
        experiment_parameters.layer_parameter('Dropout.p',
                                              float_param(0.1, 0.9))
        experiment_parameters.all_search_parameters(True)
        experiment = Experiment('test',
                                layout,
                                training,
                                batch_iterator=None,
                                test_batch_iterator=None,
                                environment=None,
                                parameters=experiment_parameters)
        check_experiment_parameters(experiment)
        for _ in range(5):
            blueprint1 = create_random_blueprint(experiment)
            model = ModelBuilder().build(blueprint1, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint2 = create_random_blueprint(experiment)
            model = ModelBuilder().build(blueprint2, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint3 = mix_blueprints(blueprint1, blueprint2,
                                        experiment_parameters)
            model = ModelBuilder().build(blueprint3, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint4 = mutate_blueprint(blueprint1,
                                          experiment_parameters,
                                          mutate_in_place=False)
            model = ModelBuilder().build(blueprint4, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
Exemple #5
0
    def test_build_w_custom_definitions(self):
        def custom_activation(x):
            return x

        register_custom_activation('custom_activation', custom_activation)
        register_custom_layer(
            'Dense2', Dense, deepcopy(reference_parameters['layers']['Dense']),
            True)

        layout = Layout(input_size=100,
                        output_size=10,
                        output_activation='softmax',
                        block=['Dense2'])
        training = Training(objective=Objective('categorical_crossentropy'),
                            optimizer=None,
                            metric=Metric('categorical_accuracy'),
                            stopping=EpochStoppingCondition(5),
                            batch_size=250)

        experiment_parameters = ExperimentParameters(use_default_values=False)
        experiment_parameters.layout_parameter('blocks', int_param(1, 5))
        experiment_parameters.layout_parameter('layers', int_param(1, 5))
        experiment_parameters.layout_parameter('layer.type',
                                               string_param(['Dense2']))
        experiment_parameters.layer_parameter('Dense2.output_dim',
                                              int_param(10, 500))
        experiment_parameters.layer_parameter(
            'Dense2.activation', string_param(['custom_activation']))
        experiment_parameters.layer_parameter('Dropout.p',
                                              float_param(0.1, 0.9))
        experiment_parameters.all_search_parameters(True)
        experiment = Experiment('test',
                                layout,
                                training,
                                batch_iterator=None,
                                test_batch_iterator=None,
                                environment=None,
                                parameters=experiment_parameters)
        check_experiment_parameters(experiment)
        for _ in range(5):
            blueprint1 = create_random_blueprint(experiment)
            for layer in blueprint1.layout.get_layers():
                self.assertEqual('Dense2', layer.layer_type,
                                 'Should have used custom layer')
            model = ModelBuilder().build(blueprint1, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint2 = create_random_blueprint(experiment)
            for layer in blueprint2.layout.get_layers():
                self.assertEqual('Dense2', layer.layer_type,
                                 'Should have used custom layer')
            model = ModelBuilder().build(blueprint2, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint3 = mix_blueprints(blueprint1, blueprint2,
                                        experiment_parameters)
            for layer in blueprint3.layout.get_layers():
                self.assertEqual('Dense2', layer.layer_type,
                                 'Should have used custom layer')
            model = ModelBuilder().build(blueprint3, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
            blueprint4 = mutate_blueprint(blueprint1,
                                          experiment_parameters,
                                          mutate_in_place=False)
            for layer in blueprint4.layout.get_layers():
                self.assertEqual('Dense2', layer.layer_type,
                                 'Should have used custom layer')
            model = ModelBuilder().build(blueprint4, cpu_device())
            self.assertIsNotNone(model, 'Should have built a model')
Exemple #6
0
from minos.model.parameter import int_param, string_param, boolean_param,\
    float_param

reference_parameters = {
    'search': {
        'layout': boolean_param(default=True),
        'parameters': boolean_param(default=True),
        'optimizer': boolean_param(default=True)
    },
    'layout': {
        'rows': int_param(lo=1, hi=3, default=1),
        'blocks': int_param(lo=1, hi=5, default=1),
        'layers': int_param(lo=1, hi=5, default=1),
        'block': {
            'input_type':
            string_param(['concat', 'random+concat', 'concat+random'],
                         default='concat'),
            'input_size':
            float_param(default=1.)
        },
        'layer': {
            'type': string_param(['Dense', 'Dropout', 'BatchNormalization']),
            'stackable': string_param([])
        }
    },
    'layers': {
        'Dense': {
            'output_dim':
            int_param(1, 1000, default=100),
            'init':
            string_param(
                [