Beispiel #1
0
def test_open_missing(erepo):
    # Remove cache to make foo missing
    shutil.rmtree(erepo.dvc.cache.local.cache_dir)

    repo_url = "file://" + erepo.dvc.root_dir
    with pytest.raises(OutputFileMissingError):
        api.read(erepo.FOO, repo=repo_url)
Beispiel #2
0
def test_read_with_subrepos(tmp_dir, scm, local_cloud, local_repo):
    tmp_dir.scm_gen("foo.txt", "foo.txt", commit="add foo.txt")
    subrepo = tmp_dir / "dir" / "subrepo"
    make_subrepo(subrepo, scm, config=local_cloud.config)
    with subrepo.chdir():
        subrepo.scm_gen({"lorem": "lorem"}, commit="add lorem")
        subrepo.dvc_gen({"dir": {"file.txt": "file.txt"}}, commit="add dir")
        subrepo.dvc_gen("dvc-file", "dvc-file", commit="add dir")
        subrepo.dvc.push()

    repo_path = None if local_repo else f"file:///{tmp_dir}"
    subrepo_path = os.path.join("dir", "subrepo")

    assert api.read("foo.txt", repo=repo_path) == "foo.txt"
    assert (
        api.read(os.path.join(subrepo_path, "lorem"), repo=repo_path)
        == "lorem"
    )
    assert (
        api.read(os.path.join(subrepo_path, "dvc-file"), repo=repo_path)
        == "dvc-file"
    )
    assert (
        api.read(os.path.join(subrepo_path, "dir", "file.txt"), repo=repo_path)
        == "file.txt"
    )
Beispiel #3
0
def test_missing(remote_url, tmp_dir, dvc):
    tmp_dir.dvc_gen("foo", "foo")
    run_dvc("remote", "add", "-d", "upstream", remote_url)

    # Remove cache to make foo missing
    shutil.rmtree(dvc.cache.local.cache_dir)

    with pytest.raises(FileMissingError):
        api.read("foo")
Beispiel #4
0
def test_missing(repo_dir, dvc_repo, remote_url):
    run_dvc("add", repo_dir.FOO)
    run_dvc("remote", "add", "-d", "upstream", remote_url)

    # Remove cache to make foo missing
    shutil.rmtree(dvc_repo.cache.local.cache_dir)

    with pytest.raises(FileMissingError):
        api.read(repo_dir.FOO)
Beispiel #5
0
def test_missing(tmp_dir, dvc, remote):
    tmp_dir.dvc_gen("foo", "foo")

    # Remove cache to make foo missing
    remove(dvc.cache.local.cache_dir)

    api.read("foo")

    remove("foo")

    with pytest.raises(FileMissingError):
        api.read("foo")
Beispiel #6
0
def test_missing(remote_url, tmp_dir, dvc):
    tmp_dir.dvc_gen("foo", "foo")
    run_dvc("remote", "add", "-d", TEST_REMOTE, remote_url)
    ensure_dir(dvc, remote_url)

    # Remove cache to make foo missing
    remove(dvc.cache.local.cache_dir)

    api.read("foo")

    remove("foo")

    with pytest.raises(FileMissingError):
        api.read("foo")
Beispiel #7
0
def test_open_not_cached(dvc):
    metric_file = "metric.txt"
    metric_content = "0.6"
    metric_code = "open('{}', 'w').write('{}')".format(metric_file,
                                                       metric_content)
    dvc.run(
        metrics_no_cache=[metric_file],
        cmd=('python -c "{}"'.format(metric_code)),
    )

    with api.open(metric_file) as fd:
        assert fd.read() == metric_content

    os.remove(metric_file)
    with pytest.raises(FileMissingError):
        api.read(metric_file)
Beispiel #8
0
def test_api_missing_local_cache_exists_on_remote(tmp_dir, scm, dvc,
                                                  as_external, remote, files,
                                                  to_read):
    tmp_dir.dvc_gen(files, commit="DVC track files")
    dvc.push()

    # Remove cache to make foo missing
    remove(dvc.odb.local.cache_dir)
    remove(first(files))

    repo_url = f"file://{tmp_dir}" if as_external else None
    file_content = get_in(files, to_read.split(os.sep))
    assert api.read(to_read, repo=repo_url) == file_content
Beispiel #9
0
def test_open_external(repo_dir, dvc_repo, erepo, remote_url):
    erepo.dvc.scm.checkout("branch")
    _set_remote_url_and_commit(erepo.dvc, remote_url)
    erepo.dvc.scm.checkout("master")
    _set_remote_url_and_commit(erepo.dvc, remote_url)

    erepo.dvc.push(all_branches=True)

    # Remove cache to force download
    shutil.rmtree(erepo.dvc.cache.local.cache_dir)

    # Using file url to force clone to tmp repo
    repo_url = "file://" + erepo.dvc.root_dir
    with api.open("version", repo=repo_url) as fd:
        assert fd.read() == "master"

    assert api.read("version", repo=repo_url, rev="branch") == "branch"
Beispiel #10
0
def resnext101_32x4d(num_classes=1000, pretrained='imagenet'):
    model = ResNeXt101_32x4d(num_classes=num_classes)
    if pretrained is not None:
        settings = pretrained_settings['resnext101_32x4d'][pretrained]
        assert num_classes == settings['num_classes'], \
            "num_classes should be {}, but is {}".format(settings['num_classes'], num_classes)

        weights = dvcapi.read('model_weights/resnext101_32x4d-29e315fa.pth',
                              remote='gsremote',
                              mode="rb",
                              encoding=None)
        model_weights = torch.load(io.BytesIO(weights))
        model.load_state_dict(model_weights)
        model.input_space = settings['input_space']
        model.input_size = settings['input_size']
        model.input_range = settings['input_range']
        model.mean = settings['mean']
        model.std = settings['std']

    return model
Beispiel #11
0
def test_open_external(tmp_dir, erepo_dir, cloud):
    erepo_dir.add_remote(config=cloud.config)

    with erepo_dir.chdir():
        erepo_dir.dvc_gen("version", "master", commit="add version")

        with erepo_dir.branch("branch", new="True"):
            # NOTE: need file to be other size for Mac
            erepo_dir.dvc_gen("version", "branchver", commit="add version")

    erepo_dir.dvc.push(all_branches=True)

    # Remove cache to force download
    remove(erepo_dir.dvc.cache.local.cache_dir)

    # Using file url to force clone to tmp repo
    repo_url = f"file://{erepo_dir}"
    with api.open("version", repo=repo_url) as fd:
        assert fd.read() == "master"

    assert api.read("version", repo=repo_url, rev="branch") == "branchver"
Beispiel #12
0
def test_open_external(remote_url, erepo_dir):
    _set_remote_url_and_commit(erepo_dir.dvc, remote_url)

    with erepo_dir.chdir():
        erepo_dir.dvc_gen("version", "master", commit="add version")

        with erepo_dir.branch("branch", new="True"):
            # NOTE: need file to be other size for Mac
            erepo_dir.dvc_gen("version", "branchver", commit="add version")

    erepo_dir.dvc.push(all_branches=True)

    # Remove cache to force download
    shutil.rmtree(erepo_dir.dvc.cache.local.cache_dir)

    # Using file url to force clone to tmp repo
    repo_url = "file://{}".format(erepo_dir)
    with api.open("version", repo=repo_url) as fd:
        assert fd.read() == "master"

    assert api.read("version", repo=repo_url, rev="branch") == "branchver"
Beispiel #13
0
def init_detector(config, device='cuda:0'):
    """Initialize a detector from config file. DVC weights loading.

    Args:
        config (str or :obj:`mmcv.Config`): Config file path or the config
            object.

    Returns:
        nn.Module: The constructed detector.
    """
    if isinstance(config, str):
        config = Config.fromfile(config)
    elif not isinstance(config, Config):
        raise TypeError('config must be a filename or Config object, '
                        'but got {}'.format(type(config)))
    config.model.pretrained = None
    model = build_detector(config.model, test_cfg=config.test_cfg)

    weights = dvcapi.read('model_weights/faster_rcnn_r50_c4_2x-6e4fdf4f.pth',
                          remote='gsremote',
                          mode="rb",
                          encoding=None)
    checkpoint = torch.load(io.BytesIO(weights))
    state_dict = checkpoint['state_dict']

    model.load_state_dict(state_dict, strict=False)

    if 'CLASSES' in checkpoint['meta']:
        model.CLASSES = checkpoint['meta']['CLASSES']
    else:
        model.CLASSES = get_classes('coco')

    model.cfg = config  # save the config in the model for convenience
    model.to(device)
    model.eval()
    return model
Beispiel #14
0
def init(model_path, metadata):
    ctx["model"] = pickle.loads(
        api.read(metadata["model_path"], metadata["dvc_repo"], mode="rb"))
    ctx["pipeline"] = pickle.loads(
        api.read(metadata["pipeline_path"], metadata["dvc_repo"], mode="rb"))