Esempio n. 1
0
def run(force_rerun: bool, params_path: Optional[str],
        changed_params_path: Optional[str], device_id: Optional[int]) -> None:
    module = get_params_module(params_path, 'cl_default_config')
    config = module.classifier_config
    if changed_params_path:
        with open(changed_params_path, 'r') as f:
            patch = dict(jsons.loads(f.read()))
            config = patch_config(config, patch)
    if gpu.gpu_available():
        gpu_id_to_use = device_id if device_id is not None else get_current_device(
        )
        logger.debug(f'Using gpu with id: {gpu_id_to_use}')
        with device(gpu_id_to_use):
            run_on_device(config, force_rerun)
    else:
        run_on_device(config, force_rerun)
Esempio n. 2
0
def run(find_lr: bool, force_rerun: bool, params_path: Optional[str],
        changed_params_path: Optional[str], device_id: Optional[int]) -> None:
    if find_lr:
        module = get_params_module(params_path, 'lm_lr_default_config')
        config = module.lm_lr_config
    else:
        module = get_params_module(params_path, 'lm_default_config')
        config = module.lm_config
    if changed_params_path:
        with open(changed_params_path, 'r') as f:
            patch = dict(jsons.loads(f.read()))
            config = patch_config(config, patch)
    logger.info(f'Using config: {jsons.dumps(config)}')
    if gpu.gpu_available():
        gpu_id_to_use = device_id if device_id is not None else get_current_device(
        )
        with device(gpu_id_to_use):
            run_on_device(config, find_lr, force_rerun)
    else:
        run_on_device(config, find_lr, force_rerun)
Esempio n. 3
0
    def test_patch(self):
        changed_config = patch_config(classifier_config,
                                      {'training.lrs.base_lr': 1000.0})

        self.assertEqual(changed_config.training.lrs.base_lr, 1000)
Esempio n. 4
0
 def test_patch_no_attribute2(self):
     with self.assertRaises(AttributeError):
         patch_config(classifier_config,
                      {'not_existent_attr.lrs.base_lr': 1000.0})
Esempio n. 5
0
 def test_patch_no_attribute1(self):
     with self.assertRaises(AttributeError):
         patch_config(classifier_config,
                      {'training.lrs.not_existent_attr': 1000.0})