def test_merge_optimizer_configs_same_optimizer(
            self,
            global_decouple=None,
            local_decouple=None,
            local_learning_rate_multiplier=2.0,
            add_local_parameters=True,
            local_optimizer_name=None):
        global_optimization_parameters = {
            'optimizer_name': 'RMSPropOptimizer',
            'learning_rate': 100,
            'epsilon': 10,
            'decay': 5,
            'gradient_clip': 2,
            'gradient_noise_std': 0.1,
            'decouple_regularization': global_decouple
        }
        local_optimization_parameters = {
            'optimizer_name': local_optimizer_name,
            'decouple_regularization': local_decouple,
            'learning_rate_multiplier': local_learning_rate_multiplier
        }
        if add_local_parameters:
            local_optimization_parameters['decay'] = 2
        global_config = opt_configs.create_and_validate_optimization_config(
            **global_optimization_parameters, is_global=True)
        local_config = opt_configs.create_and_validate_optimization_config(
            **local_optimization_parameters)
        local_config_merged = opt_configs.merge_optimization_configs(
            global_config, local_config)
        if local_optimizer_name is not None:
            optimizer_parameters_must = ({
                'decay': 2
            } if add_local_parameters else None)
        else:
            optimizer_parameters_must = ({
                'decay': 2
            } if add_local_parameters else {
                'epsilon': 10,
                'decay': 5
            })

        self.assertEqual(local_config_merged.optimizer_name,
                         'RMSPropOptimizer')
        decouple_must = (local_decouple
                         if local_decouple is not None else global_decouple)
        self.assertEqual(local_config_merged.decouple_regularization,
                         decouple_must)
        self.assertEqual(2, local_config_merged.gradient_clip)
        self.assertEqual(0.1, local_config_merged.gradient_noise_std, 0.1)
        if optimizer_parameters_must is None:
            self.assertIsNone(local_config_merged.optimizer_parameters)
        else:
            self.assertDictEqual(optimizer_parameters_must,
                                 local_config_merged.optimizer_parameters)
        self.assertIsNone(local_config_merged.learning_rate)
Esempio n. 2
0
def build_optimization_config(
        config: dict,
        is_global: bool = True) -> opt_configs.OptimizationConfig:
    """
    Build the optimization config object based on the config.

    Will also build the learning rate manipulator if its configuration was
    provided

    Parameters
    ----------
    config
        config of the optimization config
    is_global
        defines if it must be treated as a global optimization or not

    Returns
    -------
    optimization_config
        optimization config
    """
    config = copy.deepcopy(config)
    learning_rate_manipulator_config = config.pop('learning_rate_manipulator',
                                                  None)
    if learning_rate_manipulator_config:
        learning_rate_manipulator = build_learning_rate_manipulator(
            **learning_rate_manipulator_config)
    else:
        learning_rate_manipulator = None
    optimization_config = opt_configs.create_and_validate_optimization_config(
        learning_rate_manipulator=learning_rate_manipulator,
        is_global=is_global,
        **config)
    return optimization_config
    def test_merge_optimizer_configs_different_optimizers(
            self,
            global_decouple=None,
            local_decouple=None,
            local_learning_rate_multiplier=2.0):
        global_optimization_parameters = {
            'optimizer_name': 'RMSPropOptimizer',
            'learning_rate': 100,
            'optimizer_parameters': {
                'epsilon': 10,
                'decay': 5
            },
            'gradient_clip': 2,
            'gradient_noise_std': 0.1,
            'decouple_regularization': global_decouple
        }
        local_optimization_parameters = {
            'optimizer_name': 'AdadeltaOptimizer',
            'optimizer_parameters': {
                'rho': 5,
            },
            'gradient_clip': 20,
            'decouple_regularization': local_decouple,
            'learning_rate_multiplier': local_learning_rate_multiplier
        }
        global_config = opt_configs.create_and_validate_optimization_config(
            **global_optimization_parameters, is_global=True)
        local_config = opt_configs.create_and_validate_optimization_config(
            **local_optimization_parameters)
        local_config_merged = opt_configs.merge_optimization_configs(
            global_config, local_config)

        decouple_must = (local_decouple
                         if local_decouple is not None else global_decouple)
        self.assertEqual('AdadeltaOptimizer',
                         local_config_merged.optimizer_name)
        self.assertEqual(decouple_must,
                         local_config_merged.decouple_regularization)
        self.assertEqual(20, local_config_merged.gradient_clip)
        self.assertEqual(0.1, local_config_merged.gradient_noise_std)
        self.assertEqual({'rho': 5}, local_config_merged.optimizer_parameters)
        self.assertIsNone(local_config_merged.learning_rate)
        self.assertEqual(local_learning_rate_multiplier or 1.0,
                         local_config_merged.learning_rate_multiplier)
Esempio n. 4
0
 def setUp(self):
     tf.reset_default_graph()
     self.random_seed = 5475
     tf.set_random_seed(self.random_seed)
     np.random.seed(self.random_seed)
     self.learning_rate = 0.1
     self.num_classes = 3
     self.single_cpu_device = ['/cpu:0']
     self.multiple_cpu_gpu_devices = ['/cpu:0', '/gpu:0'] * 2
     self.batch_size = 8
     self.data_dim = 20
     self.model = ModelMock(num_classes=self.num_classes,
                            regularization_l1=0.5).build()
     self.global_optim_config = create_and_validate_optimization_config(
         optimizer_name='GradientDescentOptimizer',
         learning_rate=self.learning_rate, is_global=True)
    def test_create_optimizer_config_local(self):
        optimization_parameters = {
            'optimizer_name': 'RMSPropOptimizer',
            'optimizer_parameters': {
                'epsilon': 10,
                'decay': 5
            },
            'gradient_clip': 2,
            'gradient_noise_std': 0.1,
            'decouple_regularization': True,
            'learning_rate_multiplier': 2
        }
        config = opt_configs.create_and_validate_optimization_config(
            **optimization_parameters, is_global=False)
        self.assertEqual(config.optimizer_name, "RMSPropOptimizer")
        self.assertIsNone(config.learning_rate)
        self.assertEqual(config.decouple_regularization, True)
        self.assertEqual(config.gradient_clip, 2)
        self.assertEqual(config.gradient_noise_std, 0.1)
        self.assertDictEqual(config.optimizer_parameters, {
            "epsilon": 10,
            'decay': 5
        })
        self.assertEqual(config.learning_rate_multiplier, 2)
        self.assertNotIn('learning_rate', config.optimizer_parameters)

        with self.assertRaises(AssertionError):
            opt_configs.create_and_validate_optimization_config(
                learning_rate=10, is_global=False)
        with self.assertRaises(AssertionError):
            opt_configs.create_and_validate_optimization_config(
                learning_rate_manipulator=object(), is_global=False)

        optimization_parameters = {
            'epsilon': 10,
            'decay': 5,
            'gradient_clip': 2,
            'gradient_noise_std': 0.1,
            'decouple_regularization': True,
            'learning_rate_multiplier': None
        }
        config = opt_configs.create_and_validate_optimization_config(
            **optimization_parameters, is_global=False)
        self.assertIsNone(config.optimizer_name)
        self.assertEqual(config.learning_rate_multiplier, 1.0)
        self.assertNotIn('learning_rate', config.optimizer_parameters)
Esempio n. 6
0
    def test_build(self):
        tf.reset_default_graph()
        batch_size = {'train': 4, 'eval': 2}
        samples_per_epoch = {'train': 100, 'eval': 100}
        iters_per_epoch = {
            k: v // batch_size[k]
            for k, v in samples_per_epoch.items()
        }
        project_dir = self.get_temp_dir()

        model = self._get_model()
        callbacks_handler_train = self._get_callbacks_handler('train')
        callbacks_handler_eval = self._get_callbacks_handler('eval')

        datasets = {k: self._get_dataset() for k in ['train', 'eval']}
        optimization_config = create_and_validate_optimization_config(
            optimizer_name='rmsprop', learning_rate=1e-3, is_global=True)
        run_config = cconfigs.create_and_validate_trainer_run_config(
            batch_size=batch_size, samples_per_epoch=samples_per_epoch)
        save_config = cconfigs.create_and_validate_trainer_save_config()
        trainer = Trainer(model=model,
                          project_dir=project_dir,
                          datasets=datasets,
                          run_config=run_config,
                          save_config=save_config,
                          optimization_config=optimization_config,
                          callbacks_handler_train=callbacks_handler_train,
                          callbacks_handler_eval=callbacks_handler_eval)
        trainer.build()
        self.assertTrue(isinstance(trainer.estimator, tf.estimator.Estimator))
        self.assertTrue(
            isinstance(trainer.estimator_train_spec, tf.estimator.TrainSpec))
        self.assertTrue(
            isinstance(trainer.estimator_eval_spec, tf.estimator.EvalSpec))

        trainer.model.build_dna.assert_called_once_with(
            incoming_nucleotides=datasets['train'])

        for mode in ['train', 'eval']:
            callbacks_handler = trainer.callbacks_handler[mode]
            build_dna_handler_args, build_dna_handler_kwargs = (
                callbacks_handler.build_dna.call_args)
            self.assertEqual(0, len(build_dna_handler_args))
            self.assertSetEqual({'incoming_nucleotides'},
                                set(build_dna_handler_kwargs))
            incoming_nucleotides = set(
                build_dna_handler_kwargs['incoming_nucleotides'])
            incoming_nucleotides_must = list(
                trainer.model.all_nucleotides.values())
            incoming_nucleotides_must.append(datasets['train'])
            self.assertSetEqual(set(incoming_nucleotides_must),
                                incoming_nucleotides)
            self.assertEqual(mode, callbacks_handler.mode)
            self.assertEqual(
                os.path.join(trainer.project_dirs.callbacks, mode),
                callbacks_handler.log_dir)
            self.assertEqual(iters_per_epoch[mode],
                             callbacks_handler.number_iterations_per_epoch)
        self.assertTrue(
            os.path.isdir(
                os.path.join(project_dir, ProjectDirs.TRAINER.callbacks)))
        self.assertTrue(
            os.path.isdir(
                os.path.join(project_dir, ProjectDirs.TRAINER.summaries)))
        self.assertTrue(
            os.path.isdir(
                os.path.join(project_dir, ProjectDirs.TRAINER.saved_models)))
        self.assertTrue(
            os.path.isdir(
                os.path.join(project_dir, ProjectDirs.TRAINER.checkpoints)))