def main() -> None: try: from InnerEye import ML # noqa: 411 except: add_package_to_sys_path_if_needed() from InnerEye.ML import runner from InnerEye.Common import fixed_paths print(f"Repository root: {repository_root}") runner.run(project_root=repository_root, yaml_config_file=fixed_paths.SETTINGS_YAML_FILE, post_cross_validation_hook=None)
def test_runner1(test_output_dirs: OutputFolderForTests) -> None: """ Test starting a classification model via the commandline runner. Test if we can provide overrides for parameters that live inside the DeepLearningConfig, and ones that are specific to classification models. :return: """ set_from_commandline = 12345 scalar1 = '["label"]' model_name = "DummyClassification" initial_config = ModelConfigLoader[ScalarModelBase]( ).create_model_config_from_name(model_name) assert initial_config.non_image_feature_channels == [] output_root = str(test_output_dirs.root_dir) args = [ "", "--model", model_name, "--train", "True", "--random_seed", str(set_from_commandline), "--non_image_feature_channels", scalar1, "--output_to", output_root, "--max_num_gpus", "1" ] with mock.patch("sys.argv", args): config, _ = runner.run( project_root=fixed_paths.repository_root_directory(), yaml_config_file=fixed_paths.SETTINGS_YAML_FILE) assert isinstance(config, ScalarModelBase) assert config.model_name == "DummyClassification" assert config.get_effective_random_seed() == set_from_commandline assert config.non_image_feature_channels == ["label"] assert str(config.outputs_folder).startswith(output_root) assert (config.logs_folder / runner.LOG_FILE_NAME).exists()
def test_runner1(test_output_dirs: OutputFolderForTests) -> None: """ Test starting a classification model via the commandline runner. Test if we can provide overrides for parameters that live inside the DeepLearningConfig, and ones that are specific to classification models. :return: """ set_from_commandline = 12345 scalar1 = '["label"]' model_name = "DummyClassification" initial_config = ModelConfigLoader().create_model_config_from_name( model_name) assert initial_config.non_image_feature_channels == [] output_root = str(test_output_dirs.root_dir) args = [ "", "--model", model_name, "--train", "True", "--random_seed", str(set_from_commandline), "--non_image_feature_channels", scalar1, "--output_to", output_root, "--max_num_gpus", "1", "--recovery_checkpoint_save_interval", "2", "--recovery_checkpoints_save_last_k", "2", "--num_epochs", "6", ] with mock.patch("sys.argv", args): config, _ = runner.run( project_root=fixed_paths.repository_root_directory(), yaml_config_file=fixed_paths.SETTINGS_YAML_FILE) assert isinstance(config, ScalarModelBase) assert config.model_name == "DummyClassification" assert config.get_effective_random_seed() == set_from_commandline assert config.non_image_feature_channels == ["label"] assert str(config.outputs_folder).startswith(output_root) assert (config.logs_folder / LOG_FILE_NAME).exists() # Check that we saved one checkpoint every second epoch and that we kept only that last 2 and that last.ckpt has # been renamed to best.ckpt assert len(os.listdir(config.checkpoint_folder)) == 3 assert (config.checkpoint_folder / str(RECOVERY_CHECKPOINT_FILE_NAME + "_epoch=3" + CHECKPOINT_SUFFIX)).exists() assert (config.checkpoint_folder / str(RECOVERY_CHECKPOINT_FILE_NAME + "_epoch=5" + CHECKPOINT_SUFFIX)).exists() assert (config.checkpoint_folder / BEST_CHECKPOINT_FILE_NAME_WITH_SUFFIX).exists()
def test_runner2(test_output_dirs: OutputFolderForTests) -> None: """ Test starting a classification model via the commandline runner, and provide the same arguments that would be passed in via the YAML files. :return: """ output_root = str(test_output_dirs.root_dir) args = [ "", "--model", "DummyClassification", "--train", "True", "--output_to", output_root, "--max_num_gpus", "1" ] with mock.patch("sys.argv", args): config, _ = runner.run( project_root=fixed_paths.repository_root_directory(), yaml_config_file=fixed_paths.SETTINGS_YAML_FILE) assert isinstance(config, ScalarModelBase) assert config.name.startswith("DummyClassification")