Esempio n. 1
0
def test_TestSectionLoader_should_load_data_from_path_override_data():
    with tempfile.TemporaryDirectory() as data_dir:
        os.makedirs(os.path.join(data_dir, "volume_name"))
        os.makedirs(os.path.join(data_dir, "splits"))

        seimic = np.zeros([IL, XL, D])
        generate_npy_files(
            os.path.join(data_dir, "volume_name", "seismic.npy"), seimic)

        labels = np.ones([IL, XL, D])
        generate_npy_files(os.path.join(data_dir, "volume_name", "labels.npy"),
                           labels)

        txt_path = os.path.join(data_dir, "splits", "section_volume_name.txt")
        open(txt_path, "a").close()

        TestSectionLoader = get_test_loader(config)
        config.merge_from_list(["DATASET.ROOT", data_dir])
        test_set = TestSectionLoader(
            config,
            split="volume_name",
            is_transform=True,
            augmentations=None,
            seismic_path=os.path.join(data_dir, "volume_name", "seismic.npy"),
            label_path=os.path.join(data_dir, "volume_name", "labels.npy"),
        )

        assert_dimensions(test_set)
Esempio n. 2
0
def get_config(
    config_paths: Optional[Union[List[str], str]] = None,
    opts: Optional[list] = None,
) -> CN:
    r"""Create a unified config with default values overwritten by values from
    :p:`config_paths` and overwritten by options from :p:`opts`.

    :param config_paths: List of config paths or string that contains comma
        separated list of config paths.
    :param opts: Config options (keys, values) in a list (e.g., passed from
        command line into the config. For example,
        :py:`opts = ['FOO.BAR', 0.5]`. Argument can be used for parameter
        sweeping or quick tests.
    """
    config = _C.clone()
    if config_paths:
        if isinstance(config_paths, str):
            if CONFIG_FILE_SEPARATOR in config_paths:
                config_paths = config_paths.split(CONFIG_FILE_SEPARATOR)
            else:
                config_paths = [config_paths]

        for config_path in config_paths:
            config.merge_from_file(config_path)

    if opts:
        config.merge_from_list(opts)

    config.freeze()
    return config
Esempio n. 3
0
def test_TrainPatchLoaderWithDepth_should_load_with_one_train_and_label_file(
        tmpdir):
    """
    Check for successful class instantiation w/ single npy file for train & label
    """
    # Setup
    os.makedirs(os.path.join(tmpdir, "volume_name"))
    os.makedirs(os.path.join(tmpdir, "splits"))

    seimic = np.zeros([IL, XL, D])
    generate_npy_files(os.path.join(tmpdir, "volume_name", "seismic.npy"),
                       seimic)

    labels = np.ones([IL, XL, D])
    generate_npy_files(os.path.join(tmpdir, "volume_name", "labels.npy"),
                       labels)

    txt_dir = os.path.join(tmpdir, "splits")
    txt_path = os.path.join(txt_dir, "patch_volume_name.txt")
    open(txt_path, "a").close()

    config.merge_from_list(["DATASET.ROOT", str(tmpdir)])

    # Test
    train_set = TrainPatchLoaderWithDepth(
        config,
        split="volume_name",
        is_transform=True,
        augmentations=None,
        seismic_path=os.path.join(tmpdir, "volume_name", "seismic.npy"),
        label_path=os.path.join(tmpdir, "volume_name", "labels.npy"),
    )

    assert train_set.labels.shape == (IL, XL, D + 2 * config.TRAIN.PATCH_SIZE)
    assert train_set.seismic.shape == (IL, XL, D + 2 * config.TRAIN.PATCH_SIZE)
Esempio n. 4
0
def test_TrainPatchLoaderWithDepth_should_fail_on_missing_label_file(tmpdir):
    """
    Check for exception when training param is empty
    """
    # Setup
    os.makedirs(os.path.join(tmpdir, "volume_name"))
    os.makedirs(os.path.join(tmpdir, "splits"))

    seimic = np.zeros([IL, XL, D])
    generate_npy_files(os.path.join(tmpdir, "volume_name", "seismic.npy"),
                       seimic)

    txt_path = os.path.join(tmpdir, "splits", "patch_volume_name.txt")
    open(txt_path, "a").close()

    config.merge_from_list(["DATASET.ROOT", str(tmpdir)])

    # Test
    with pytest.raises(Exception) as excinfo:

        _ = TrainPatchLoaderWithDepth(
            config,
            split="volume_name",
            is_transform=True,
            augmentations=None,
            seismic_path=os.path.join(tmpdir, "volume_name", "seismic.npy"),
            label_path=os.path.join(tmpdir, "volume_name", "labels.npy"),
        )
    assert "does not exist" in str(excinfo.value)
Esempio n. 5
0
def get_config(
    config_paths: Optional[Union[List[str], str]] = None,
    opts: Optional[list] = None,
) -> CN:
    r"""Create a unified config with default values overwritten by values from
    :p:`config_paths` and overwritten by options from :p:`opts`.

    :param config_paths: List of config paths or string that contains comma
        separated list of config paths.
    :param opts: Config options (keys, values) in a list (e.g., passed from
        command line into the config. For example,
        :py:`opts = ['FOO.BAR', 0.5]`. Argument can be used for parameter
        sweeping or quick tests.
    """
    config = _C.clone()
    if config_paths:
        if isinstance(config_paths, str):
            if CONFIG_FILE_SEPARATOR in config_paths:
                config_paths = config_paths.split(CONFIG_FILE_SEPARATOR)
            else:
                config_paths = [config_paths]

        for config_path in config_paths:
            config.merge_from_file(config_path)

    if opts:
        config.merge_from_list(opts)
    # multi-task handling
    # list of tasks to list of config nodes, levaraging `TASK` default values
    tasks = []
    for task in config.MULTI_TASK.TASKS:
        # get default values
        t = _C.TASK.clone()
        task = CN(init_dict=task)

        # each task can now have a different dataset, if unspecified the global one is used
        t.DATASET = config.DATASET.clone()
        # same thing for the episode iterator
        t.EPISODE_ITERATOR_OPTIONS = (
            config.ENVIRONMENT.ITERATOR_OPTIONS.clone()
        )
        t.merge_from_other_cfg(task)
        tasks.append(t)
    config.MULTI_TASK.TASKS = tasks
    config.freeze()
    return config
Esempio n. 6
0
def load_config() -> yacs.config.CfgNode:
    parser = argparse.ArgumentParser()
    parser.add_argument('--config', type=str)
    parser.add_argument('options', default=None, nargs=argparse.REMAINDER)
    args = parser.parse_args()

    config = get_default_config()
    if args.config is not None:
        config.merge_from_file(args.config)
    config.merge_from_list(args.options)
    if not torch.cuda.is_available():
        config.device = 'cpu'
        config.train.train_dataloader.pin_memory = False
        config.train.val_dataloader.pin_memory = False
        config.test.dataloader.pin_memory = False
    config.freeze()
    return config
Esempio n. 7
0
def test_TestSectionLoader_should_load_data_from_test2_set():
    with tempfile.TemporaryDirectory() as data_dir:
        os.makedirs(os.path.join(data_dir, "test_once"))
        os.makedirs(os.path.join(data_dir, "splits"))

        seimic = np.zeros([IL, XL, D])
        generate_npy_files(
            os.path.join(data_dir, "test_once", "test2_seismic.npy"), seimic)

        A = np.load(os.path.join(data_dir, "test_once", "test2_seismic.npy"))

        labels = np.ones([IL, XL, D])
        generate_npy_files(
            os.path.join(data_dir, "test_once", "test2_labels.npy"), labels)

        txt_path = os.path.join(data_dir, "splits", "section_test2.txt")
        open(txt_path, "a").close()

        TestSectionLoader = get_test_loader(config)
        config.merge_from_list(["DATASET.ROOT", data_dir])
        test_set = TestSectionLoader(config, split="test2")

        assert_dimensions(test_set)