예제 #1
0
def check_save_dotfiles(to_git, save_path, path):
    # Note: Take relpath to work with Travis "TMPDIR=/var/tmp/sym\ link" run.
    paths = [
        Path(op.relpath(op.join(root, fname), path))
        for root, _, fnames in os.walk(op.join(path, save_path or ""))
        for fname in fnames
    ]
    ok_(paths)
    ds = Dataset(path).create(force=True)
    ds.save(save_path, to_git=to_git)
    if save_path is None:
        assert_repo_status(ds.path)
    repo = ds.repo
    annexinfo = repo.get_content_annexinfo()

    def _check(fn, p):
        fn("key", annexinfo[repo.pathobj / p], p)

    if to_git:

        def check(p):
            _check(assert_not_in, p)
    else:

        def check(p):
            _check(assert_in, p)

    for path in paths:
        check(path)
예제 #2
0
def test_add_archive_use_archive_dir(repo_path=None):
    ds = Dataset(repo_path).create(force=True)
    with chpwd(repo_path):
        # Let's add first archive to the repo with default setting
        archive_path = opj('4u', '1.tar.gz')
        # check it gives informative error if archive is not already added
        res = add_archive_content(archive_path, on_failure='ignore')
        message = \
            "Can not add an untracked archive. Run 'datalad save 4u\\1.tar.gz'"\
        if on_windows else \
            "Can not add an untracked archive. Run 'datalad save 4u/1.tar.gz'"
        assert_in_results(res,
                          action='add-archive-content',
                          message=message,
                          status='impossible')

        with swallow_outputs():
            ds.save(archive_path)

        ok_archives_caches(ds.path, 0)
        add_archive_content(archive_path,
                            strip_leading_dirs=True,
                            use_current_dir=True)
        ok_(not exists(opj('4u', '1 f.txt')))
        ok_file_under_git(ds.path, '1 f.txt', annexed=True)
        ok_archives_caches(ds.path, 0)

        # and now let's extract under archive dir
        add_archive_content(archive_path, strip_leading_dirs=True)
        ok_file_under_git(ds.path, opj('4u', '1 f.txt'), annexed=True)
        ok_archives_caches(ds.path, 0)

        add_archive_content(opj('4u', 'sub.tar.gz'))
        ok_file_under_git(ds.path, opj('4u', 'sub', '2 f.txt'), annexed=True)
        ok_archives_caches(ds.path, 0)
예제 #3
0
def test_sibling_enable_sameas(repo=None, clone_path=None):
    ds = Dataset(repo.path)
    create_tree(ds.path, {"f0": "0"})
    ds.save(path="f0")
    ds.push(["f0"], to="r_dir")
    ds.repo.drop(["f0"])

    ds_cloned = clone(ds.path, clone_path)

    assert_false(ds_cloned.repo.file_has_content("f0"))
    # does not work without a name
    res = ds_cloned.siblings(
        action="enable",
        result_renderer='disabled',
        on_failure='ignore',
    )
    assert_in_results(res,
                      status='error',
                      message='require `name` of sibling to enable')
    # does not work with the wrong name
    res = ds_cloned.siblings(
        action="enable",
        name='wrong',
        result_renderer='disabled',
        on_failure='ignore',
    )
    assert_in_results(res,
                      status='impossible',
                      message=("cannot enable sibling '%s', not known",
                               'wrong'))
    # works with the right name
    res = ds_cloned.siblings(action="enable", name="r_rsync")
    assert_status("ok", res)
    ds_cloned.get(path=["f0"])
    ok_(ds_cloned.repo.file_has_content("f0"))
예제 #4
0
파일: test_run.py 프로젝트: datalad/datalad
def test_io_substitution(path=None):
    files = [OBSCURE_FILENAME + ".t", "normal.txt"]
    ds = Dataset(path).create(force=True)
    ds.save()
    # prefix the content of any given file with 'mod::'
    cmd = "import sys; from pathlib import Path; t = [(Path(p), 'mod::' + Path(p).read_text()) for p in sys.argv[1:]]; [k.write_text(v) for k, v in t]"
    cmd_str = "{} -c \"{}\" {{inputs}}".format(sys.executable, cmd)
    # this should run and not crash with permission denied
    ds.run(cmd_str,
           inputs=["{outputs}"],
           outputs=["*.t*"],
           result_renderer='disabled')
    # all filecontent got the prefix
    for f in files:
        ok_((ds.pathobj / f).read_text().startswith('mod::'))

    # we could just ds.rerun() now, and it should work, but this would make
    # rerun be a dependency of a core test
    # instead just double-run, but with a non-list input-spec.
    # should have same outcome
    ds.run(cmd_str,
           inputs="{outputs}",
           outputs="*.t*",
           result_renderer='disabled')
    for f in files:
        ok_((ds.pathobj / f).read_text().startswith('mod::mod::'))
예제 #5
0
파일: test_log.py 프로젝트: datalad/datalad
def test_with_result_progress_generator():
    # Tests ability for the decorator to decorate a regular function
    # or a generator function (then it returns a generator function)

    @with_result_progress
    def func(l):
        return l

    generated = []
    @with_result_progress
    def gen(l):
        for i in l:
            generated.append(i)
            yield i

    recs = [{'status': 'ok', 'unrelated': i} for i in range(2)]
    # still works for a func and returns provided list
    ok_(not inspect.isgeneratorfunction(func))
    assert_equal(func(recs), recs)

    # generator should still yield and next iteration should only happen
    # when requested
    ok_(inspect.isgeneratorfunction(gen))
    g = gen(recs)

    ok_generator(g)
    assert_equal(generated, [])  # nothing yet
    assert_equal(next(g), recs[0])
    assert_equal(generated, recs[:1])
    assert_equal(next(g), recs[1])
    assert_equal(generated, recs)

    # just to make sure all good to redo
    assert_equal(list(gen(recs)), recs)
예제 #6
0
def test_url_base():
    # Basic checks
    assert_raises(ValueError,
                  URL,
                  "http://example.com",
                  hostname='example.com')
    url = URL("http://example.com")
    eq_(url.hostname, 'example.com')
    eq_(url.scheme, 'http')
    eq_(url.port, '')  # not specified -- empty strings
    eq_(url.username, '')  # not specified -- empty strings
    eq_(repr(url), "URL(hostname='example.com', scheme='http')")
    eq_(url, "http://example.com")  # automagic coercion in __eq__

    neq_(URL(), URL(hostname='x'))

    smth = URL('smth')
    eq_(smth.hostname, '')
    ok_(bool(smth))
    nok_(bool(URL()))

    assert_raises(ValueError, url._set_from_fields, unknown='1')

    with swallow_logs(new_level=logging.WARNING) as cml:
        # we don't "care" about params ATM so there is a warning if there are any
        purl = URL("http://example.com/;param")
        eq_(str(purl),
            'http://example.com/;param')  # but we do maintain original string
        assert_in('ParseResults contains params', cml.out)
        eq_(purl.as_str(), 'http://example.com/')
예제 #7
0
def test_get_most_obscure_supported_name():
    n = get_most_obscure_supported_name()
    ok_startswith(n, OBSCURE_PREFIX)
    ok_(len(OBSCURE_FILENAMES) > 1)
    # from more complex to simpler ones
    ok_(len(OBSCURE_FILENAMES[0]) > len(OBSCURE_FILENAMES[-1]))
    print(repr(n))
예제 #8
0
def test_install_dataset_from_just_source(src_repo=None, path=None):

    src_ds = Dataset(src_repo).create(result_renderer='disabled', force=True)
    src_ds.save(['INFO.txt', 'test.dat'], to_git=True)
    src_ds.save('test-annex.dat', to_git=False)
    # equivalent repo on github:
    src_url = "https://github.com/datalad/testrepo--basic--r1.git"
    sources = [
        src_ds.path,
        get_local_file_url(src_ds.path, compatibility='git')
    ]
    if not dl_cfg.get('datalad.tests.nonetwork'):
        sources.append(src_url)

    for url in sources:

        with chpwd(path, mkdir=True):
            ds = install(source=url)

        ok_startswith(ds.path, path)
        ok_(ds.is_installed())
        ok_(GitRepo.is_valid_repo(ds.path))
        assert_repo_status(ds.path, annex=None)
        assert_in('INFO.txt', ds.repo.get_indexed_files())

        # cleanup before next iteration
        rmtree(path)
예제 #9
0
def test_install_known_subdataset(src=None, path=None):

    _mk_submodule_annex(src, fname="test-annex.dat", fcontent="whatever")

    # get the superdataset:
    ds = install(path, source=src)
    # subdataset not installed:
    subds = Dataset(opj(path, 'subm 1'))
    assert_false(subds.is_installed())
    assert_in('subm 1', ds.subdatasets(state='absent', result_xfm='relpaths'))
    assert_not_in('subm 1',
                  ds.subdatasets(state='present', result_xfm='relpaths'))
    # install it:
    ds.install('subm 1')
    ok_(subds.is_installed())
    ok_(AnnexRepo.is_valid_repo(subds.path, allow_noninitialized=False))
    # Verify that it is the correct submodule installed and not
    # new repository initiated
    assert_in("test-annex.dat", subds.repo.get_indexed_files()),
    assert_not_in('subm 1',
                  ds.subdatasets(state='absent', result_xfm='relpaths'))
    assert_in('subm 1', ds.subdatasets(state='present', result_xfm='relpaths'))

    # now, get the data by reinstalling with -g:
    ok_(subds.repo.file_has_content('test-annex.dat') is False)
    with chpwd(ds.path):
        result = get(path='subm 1', dataset=os.curdir)
        assert_in_results(result, path=opj(subds.path, 'test-annex.dat'))
        ok_(subds.repo.file_has_content('test-annex.dat') is True)
        ok_(subds.is_installed())
예제 #10
0
def test_ssh_get_connection():

    manager = SSHManager()
    if _ssh_manager_is_multiplex:
        assert manager._socket_dir is None, \
            "Should be unset upon initialization. Got %s" % str(manager._socket_dir)
    c1 = manager.get_connection('ssh://datalad-test')

    if _ssh_manager_is_multiplex:
        assert manager._socket_dir, "Should be set after interactions with the manager"
        assert_is_instance(c1, MultiplexSSHConnection)
        # subsequent call returns the very same instance:
        ok_(manager.get_connection('ssh://datalad-test') is c1)
    else:
        assert_is_instance(c1, NoMultiplexSSHConnection)

    # fail on malformed URls (meaning: our fancy URL parser can't correctly
    # deal with them):
    #assert_raises(ValueError, manager.get_connection, 'localhost')
    # we now allow those simple specifications of host to get_connection
    c2 = manager.get_connection('datalad-test')
    assert_is_instance(c2, SSHConnection)

    # but should fail if it looks like something else
    assert_raises(ValueError, manager.get_connection, 'datalad-test/')
    assert_raises(ValueError, manager.get_connection, ':datalad-test')

    # we can do what urlparse cannot
    # assert_raises(ValueError, manager.get_connection, 'someone@localhost')
    # next one is considered a proper url by urlparse (netloc:'',
    # path='/localhost), but eventually gets turned into SSHRI(hostname='ssh',
    # path='/localhost') -- which is fair IMHO -> invalid test
    # assert_raises(ValueError, manager.get_connection, 'ssh:/localhost')

    manager.close()
예제 #11
0
def test_non_master_branch(src_path=None, target_path=None):
    src_path = Path(src_path)
    target_path = Path(target_path)

    ds_a = Dataset(src_path).create()
    # Rename rather than checking out another branch so that the default branch
    # doesn't exist in any state.
    ds_a.repo.call_git(["branch", "-m", DEFAULT_BRANCH, "other"])
    (ds_a.pathobj / "afile").write_text("content")
    sa = ds_a.create("sub-a")
    sa.repo.checkout("other-sub", ["-b"])
    ds_a.create("sub-b")

    ds_a.save()
    ds_a.create_sibling(name="sib",
                        recursive=True,
                        sshurl="ssh://datalad-test" + str(target_path / "b"))
    ds_a.push(to="sib", data="anything")

    ds_b = Dataset(target_path / "b")

    def get_branch(repo):
        return repo.get_corresponding_branch() or repo.get_active_branch()

    # The HEAD for the create-sibling matches what the branch was in
    # the original repo.
    eq_(get_branch(ds_b.repo), "other")
    ok_((ds_b.pathobj / "afile").exists())

    eq_(get_branch(Dataset(target_path / "b" / "sub-a").repo), "other-sub")
    eq_(get_branch(Dataset(target_path / "b" / "sub-b").repo), DEFAULT_BRANCH)
예제 #12
0
def test_install_list(path=None, top_path=None):

    _mk_submodule_annex(path, fname="test-annex.dat", fcontent="whatever")

    # we want to be able to install several things, if these are known
    # (no 'source' allowed). Therefore first toplevel:
    ds = install(top_path, source=path, recursive=False)
    assert_not_in('annex.hardlink', ds.config)
    ok_(ds.is_installed())
    sub1 = Dataset(opj(top_path, 'subm 1'))
    sub2 = Dataset(opj(top_path, '2'))
    ok_(not sub1.is_installed())
    ok_(not sub2.is_installed())

    # fails, when `source` is passed:
    assert_raises(ValueError,
                  ds.install,
                  path=['subm 1', '2'],
                  source='something')

    # now should work:
    result = ds.install(path=['subm 1', '2'], result_xfm='paths')
    ok_(sub1.is_installed())
    ok_(sub2.is_installed())
    eq_(set(result), {sub1.path, sub2.path})
    # and if we request it again via get, result should be empty
    get_result = ds.get(path=['subm 1', '2'], get_data=False)
    assert_status('notneeded', get_result)
예제 #13
0
def test_get_autoresolve_recurse_subdatasets(src=None, path=None):

    origin = Dataset(src).create()
    origin_sub = origin.create('sub')
    origin_subsub = origin_sub.create('subsub')
    with open(opj(origin_subsub.path, 'file_in_annex.txt'), "w") as f:
        f.write('content')
    origin.save(recursive=True)

    ds = install(path,
                 source=src,
                 result_xfm='datasets',
                 return_type='item-or-list')
    eq_(len(ds.subdatasets(state='present')), 0)

    with chpwd(ds.path):
        results = get(opj(ds.path, 'sub'),
                      recursive=True,
                      result_xfm='datasets')
    eq_(len(ds.subdatasets(state='present', recursive=True)), 2)
    subsub = Dataset(opj(ds.path, 'sub', 'subsub'))
    ok_(subsub.is_installed())
    assert_in(subsub, results)
    # all file handles are fulfilled by default
    ok_(
        Dataset(opj(ds.path, 'sub', 'subsub')).repo.file_has_content(
            "file_in_annex.txt") is True)
예제 #14
0
def test_get_mixed_hierarchy(src=None, path=None):

    origin = Dataset(src).create(annex=False)
    origin_sub = origin.create('subds')
    with open(opj(origin.path, 'file_in_git.txt'), "w") as f:
        f.write('no idea')
    with open(opj(origin_sub.path, 'file_in_annex.txt'), "w") as f:
        f.write('content')
    origin.save('file_in_git.txt', to_git=True)
    origin_sub.save('file_in_annex.txt')
    origin.save()

    # now, install that thing:
    ds, subds = install(path,
                        source=src,
                        recursive=True,
                        result_xfm='datasets',
                        return_type='item-or-list',
                        result_filter=None)
    ok_(subds.repo.file_has_content("file_in_annex.txt") is False)

    # and get:
    result = ds.get(curdir, recursive=True)
    # git repo and subds
    assert_status(['ok', 'notneeded'], result)
    assert_result_count(result,
                        1,
                        path=opj(subds.path, "file_in_annex.txt"),
                        status='ok')
    ok_(subds.repo.file_has_content("file_in_annex.txt") is True)
예제 #15
0
def test_get_install_missing_subdataset(src=None, path=None):
    _mk_submodule_annex(src, 'test-annex.dat', 'irrelevant')

    ds = install(path=path,
                 source=src,
                 result_xfm='datasets',
                 return_type='item-or-list')
    ds.create(force=True)  # force, to cause dataset initialization
    subs = ds.subdatasets(result_xfm='datasets')
    ok_(all([not sub.is_installed() for sub in subs]))

    # we don't install anything, if no explicitly given path points into a
    # not yet installed subdataset:
    ds.get(curdir)
    ok_(all([not sub.is_installed() for sub in subs]))

    # but we do, whenever a given path is contained in such a subdataset:
    file_ = opj(subs[0].path, 'test-annex.dat')
    ds.get(file_)
    ok_(subs[0].is_installed())
    ok_(subs[0].repo.file_has_content('test-annex.dat') is True)

    # but we fulfill any handles, and dataset handles too
    ds.get(curdir, recursive=True)
    ok_(all([sub.is_installed() for sub in subs]))
예제 #16
0
def test_install_into_dataset(source=None, top_path=None):
    src_ds = Dataset(source).create(result_renderer='disabled', force=True)
    src_ds.save(['INFO.txt', 'test.dat'], to_git=True)
    src_ds.save('test-annex.dat', to_git=False)

    ds = create(top_path)
    assert_repo_status(ds.path)

    subds = ds.install("sub", source=source)
    ok_(isdir(opj(subds.path, '.git')))
    ok_(subds.is_installed())
    assert_in('sub', ds.subdatasets(result_xfm='relpaths'))
    # sub is clean:
    assert_repo_status(subds.path, annex=None)
    # top is too:
    assert_repo_status(ds.path, annex=None)
    ds.save(message='addsub')
    # now it is:
    assert_repo_status(ds.path, annex=None)

    # but we could also save while installing and there should be no side-effect
    # of saving any other changes if we state to not auto-save changes
    # Create a dummy change
    create_tree(ds.path, {'dummy.txt': 'buga'})
    assert_repo_status(ds.path, untracked=['dummy.txt'])
    subds_ = ds.install("sub2", source=source)
    eq_(subds_.path, opj(ds.path, "sub2"))  # for paranoid yoh ;)
    assert_repo_status(ds.path, untracked=['dummy.txt'])

    # and we should achieve the same behavior if we create a dataset
    # and then decide to add it
    create(_path_(top_path, 'sub3'))
    assert_repo_status(ds.path, untracked=['dummy.txt', 'sub3/'])
    ds.save('sub3')
    assert_repo_status(ds.path, untracked=['dummy.txt'])
예제 #17
0
def test_gitrepo_call_git_methods(path=None):
    gr = GitRepo(path).init()
    gr.call_git(['add', "foo", "bar"])
    gr.call_git(['commit', '-m', "foobar"])
    gr.call_git(["mv"], files=["foo", "foo.txt"])
    ok_((gr.pathobj / 'foo.txt').exists())

    for expect_fail, check in [(False, assert_in),
                               (True, assert_not_in)]:
        with swallow_logs(new_level=logging.DEBUG) as cml:
            with assert_raises(CommandError):
                gr.call_git(["mv"], files=["notthere", "dest"],
                            expect_fail=expect_fail)
            check("fatal: bad source", cml.out)

    eq_(list(gr.call_git_items_(["ls-files"], read_only=True)),
        ["bar", "foo.txt"])
    eq_(list(gr.call_git_items_(["ls-files", "-z"], sep="\0", read_only=True)),
        # Note: The custom separator has trailing empty item, but this is an
        # arbitrary command with unknown output it isn't safe to trim it.
        ["bar", "foo.txt"])

    with assert_raises(AssertionError):
        gr.call_git_oneline(["ls-files"], read_only=True)

    eq_(gr.call_git_oneline(["ls-files"], files=["bar"], read_only=True),
        "bar")

    ok_(gr.call_git_success(["rev-parse", "HEAD^{commit}"], read_only=True))
    with swallow_logs(new_level=logging.DEBUG) as cml:
        assert_false(gr.call_git_success(["rev-parse", "HEAD^{blob}"],
                                         read_only=True))
        assert_not_in("expected blob type", cml.out)
예제 #18
0
def test_probe_known_failure():
    # should raise assert error if function no longer fails
    with patch_config({'datalad.tests.knownfailures.probe': True}):
        with assert_raises(Failed):
            probe_known_failure(lambda: True)()

    with patch_config({'datalad.tests.knownfailures.probe': False}):
        ok_(probe_known_failure(lambda: True))
예제 #19
0
 def check_mkdir(d1):
     ok_(os.path.exists(d1))
     ok_(os.path.isdir(d1))
     dnames.append(d1)
     eq_(glob(os.path.join(d1, '*')), [])
     # Create a file to assure we can remove later the temporary load
     with open(os.path.join(d1, "test.dat"), "w") as f:
         f.write("TEST LOAD")
예제 #20
0
def test_newthings_coming_down(originpath=None, destpath=None):
    origin = GitRepo(originpath, create=True)
    create_tree(originpath, {'load.dat': 'heavy'})
    Dataset(originpath).save('load.dat')
    ds = install(source=originpath,
                 path=destpath,
                 result_xfm='datasets',
                 return_type='item-or-list')
    assert_is_instance(ds.repo, GitRepo)
    assert_in(DEFAULT_REMOTE, ds.repo.get_remotes())
    # turn origin into an annex
    origin = AnnexRepo(originpath, create=True)
    # clone doesn't know yet
    assert_false(knows_annex(ds.path))
    # but after an update it should
    # no merge, only one sibling, no parameters should be specific enough
    assert_result_count(ds.update(), 1, status='ok', type='dataset')
    assert (knows_annex(ds.path))
    # no branches appeared
    eq_(ds.repo.get_branches(), [DEFAULT_BRANCH])
    # now merge, and get an annex
    assert_result_count(ds.update(merge=True),
                        1,
                        action='update',
                        status='ok',
                        type='dataset')
    assert_in('git-annex', ds.repo.get_branches())
    assert_is_instance(ds.repo, AnnexRepo)
    # should be fully functional
    testfname = opj(ds.path, 'load.dat')
    assert_false(ds.repo.file_has_content(testfname))
    ds.get('.')
    ok_file_has_content(opj(ds.path, 'load.dat'), 'heavy')
    # check that a new tag comes down
    origin.tag('first!')
    assert_result_count(ds.update(), 1, status='ok', type='dataset')
    eq_(ds.repo.get_tags(output='name')[0], 'first!')

    # and now we destroy the remote annex
    origin.call_git(['config', '--remove-section', 'annex'])
    rmtree(opj(origin.path, '.git', 'annex'), chmod_files=True)
    origin.call_git(['branch', '-D', 'git-annex'])
    origin = GitRepo(originpath)
    assert_false(knows_annex(originpath))

    # and update the local clone
    # for now this should simply not fail (see gh-793), later might be enhanced to a
    # graceful downgrade
    before_branches = ds.repo.get_branches()
    ok_(any("git-annex" in b for b in ds.repo.get_remote_branches()))
    assert_result_count(ds.update(), 1, status='ok', type='dataset')
    eq_(before_branches, ds.repo.get_branches())
    # annex branch got pruned
    assert_false(any("git-annex" in b for b in ds.repo.get_remote_branches()))
    # check that a new tag comes down even if repo types mismatch
    origin.tag('second!')
    assert_result_count(ds.update(), 1, status='ok', type='dataset')
    eq_(ds.repo.get_tags(output='name')[-1], 'second!')
예제 #21
0
def test_remove_recreation(path=None):
    # test recreation is possible and doesn't conflict with in-memory
    # remainings of the old instances
    # see issue #1311
    ds = Dataset(path).create()
    ds.remove(reckless='availability')
    ds = Dataset(path).create()
    assert_repo_status(ds.path)
    ok_(ds.is_installed())
예제 #22
0
def test_download_url_archive_from_subdir(toppath=None,
                                          topurl=None,
                                          path=None):
    ds = Dataset(path).create()
    subdir_path = opj(ds.path, "subdir", "")
    os.mkdir(subdir_path)
    with chpwd(subdir_path):
        download_url([topurl + "archive.tar.gz"], archive=True)
    ok_(ds.repo.file_has_content(opj("subdir", "archive", "file1.txt")))
예제 #23
0
def test_runner_stdout_capture():
    runner = Runner()
    test_msg = "stdout-Message"
    res = runner.run(py2cmd(
        'import sys; print(%r, file=sys.stdout)' % test_msg),
        protocol=StdOutErrCapture,
    )
    eq_(res['stdout'].rstrip(), test_msg)
    ok_(not res['stderr'])
예제 #24
0
def test_rerun_mutator_stem_nonrun_merges(path=None):
    ds = Dataset(path).create()
    # keep direct repo accessor to speed things up
    ds_repo = ds.repo
    ds.run("echo foo >>foo")
    with open(op.join(path, "nonrun-file0"), "w") as f:
        f.write("blah")
    ds.save()
    ds_repo.checkout(DEFAULT_BRANCH + "~", options=["-b", "side"])
    with open(op.join(path, "nonrun-file1"), "w") as f:
        f.write("more blah")
    ds.save()
    ds_repo.checkout(DEFAULT_BRANCH)
    ds_repo.merge("side", options=["-m", "Merge side"])
    # o                 e_n
    # |\
    # | o               d_n
    # o |               c_n
    # |/
    # o                 b_r
    # o                 a_n

    ds.rerun(since="", onto="")
    # o                 e_M
    # |\
    # | o               d_C
    # o |               c_C
    # |/
    # o                 b_R
    # o                 a_n
    ok_(ds_repo.commit_exists("HEAD^2"))
    neq_(ds_repo.get_hexsha(DEFAULT_BRANCH), ds_repo.get_hexsha())

    ds_repo.checkout(DEFAULT_BRANCH)
    ds.rerun(since="", onto=DEFAULT_BRANCH + "^2")
    # o                 c_C
    # o                 b_R
    # o                 d_n
    # o                 b_r
    # o                 a_n
    assert_false(ds_repo.commit_exists("HEAD^2"))
    eq_(ds_repo.get_hexsha("HEAD~2"),
        ds_repo.get_hexsha(DEFAULT_BRANCH + "^2"))

    ds_repo.checkout(DEFAULT_BRANCH)
    hexsha_before = ds_repo.get_hexsha()
    ds.rerun(since="")
    # o                 b_R
    # o                 e_n
    # |\
    # | o               d_n
    # o |               c_n
    # |/
    # o                 b_r
    # o                 a_n
    eq_(hexsha_before, ds_repo.get_hexsha(DEFAULT_BRANCH + "^"))
    assert_false(ds_repo.commit_exists("HEAD^2"))
예제 #25
0
def test_update_how_subds_different(path=None, *, follow, action):
    path = Path(path)
    ds_src = Dataset(path / "source").create()
    ds_src_sub = ds_src.create("sub")
    ds_src.save()

    ds_clone = install(source=ds_src.path,
                       path=path / "clone",
                       recursive=True,
                       result_xfm="datasets")
    (ds_clone.pathobj / "foo").write_text("foo")
    ds_clone.save()
    ds_clone_sub = Dataset(ds_clone.pathobj / "sub")

    (ds_src_sub.pathobj / "bar").write_text("bar")
    ds_src.save(recursive=True)

    # Add unrecorded state to make --follow=sibling/parentds differ.
    (ds_src_sub.pathobj / "baz").write_text("baz")
    ds_src_sub.save()

    ds_clone_repo = ds_clone.repo
    ds_clone_hexsha_pre = ds_clone_repo.get_hexsha()

    ds_clone_sub_repo = ds_clone_sub.repo
    ds_clone_sub_branch_pre = ds_clone_sub_repo.get_active_branch()

    res = ds_clone.update(follow=follow,
                          how="merge",
                          how_subds=action,
                          recursive=True)

    assert_result_count(res,
                        1,
                        action="merge",
                        status="ok",
                        path=ds_clone.path)
    assert_result_count(res,
                        1,
                        action=f"update.{action}",
                        status="ok",
                        path=ds_clone_sub.path)

    ds_clone_hexsha_post = ds_clone_repo.get_hexsha()
    neq_(ds_clone_hexsha_pre, ds_clone_hexsha_post)
    neq_(ds_src.repo.get_hexsha(), ds_clone_hexsha_post)
    ok_(ds_clone_repo.is_ancestor(ds_clone_hexsha_pre, ds_clone_hexsha_post))

    eq_(ds_clone_sub.repo.get_hexsha(),
        ds_src_sub.repo.get_hexsha(None if follow == "sibling" else "HEAD~"))
    ds_clone_sub_branch_post = ds_clone_sub_repo.get_active_branch()

    if action == "checkout":
        neq_(ds_clone_sub_branch_pre, ds_clone_sub_branch_post)
        assert_false(ds_clone_sub_branch_post)
    else:
        eq_(ds_clone_sub_branch_pre, ds_clone_sub_branch_post)
예제 #26
0
def test_GitRepo_equals(path1=None, path2=None):

    repo1 = GitRepo(path1)
    repo2 = GitRepo(path1)
    ok_(repo1 == repo2)
    eq_(repo1, repo2)
    repo2 = GitRepo(path2)
    neq_(repo1, repo2)
    ok_(repo1 != repo2)
예제 #27
0
def test_runner(tempfile=None):
    runner = Runner()
    content = 'Testing real run' if on_windows else 'Testing äöü東 real run' 
    cmd = 'echo %s > %s' % (content, tempfile)
    res = runner.run(cmd)
    # no capture of any kind, by default
    ok_(not res['stdout'])
    ok_(not res['stderr'])
    ok_file_has_content(tempfile, content, strip=True)
    os.unlink(tempfile)
예제 #28
0
def test_bundle_invariance(path=None):
    remote_url = 'ssh://datalad-test'
    manager = SSHManager()
    testfile = Path(path) / 'dummy'
    for flag in (True, False):
        assert_false(testfile.exists())
        ssh = manager.get_connection(remote_url, use_remote_annex_bundle=flag)
        ssh('cd .>{}'.format(str(testfile)))
        ok_(testfile.exists())
        testfile.unlink()
예제 #29
0
def test_ssh_git_props():
    remote_url = 'ssh://datalad-test'
    manager = SSHManager()
    ssh = manager.get_connection(remote_url)
    # Note: Avoid comparing these versions directly to the versions in
    # external_versions because the ssh://localhost versions detected might
    # differ depending on how git-annex is installed.
    ok_(ssh.get_annex_version())
    ok_(ssh.get_git_version())
    manager.close()  # close possibly still present connections
예제 #30
0
def test_having_annex(path=None):
    ok_(os.path.exists(os.path.join(path, '.git')))
    repo = GitRepo(path)
    # might not necessarily be present upon initial submodule init
    #branches = [r.name for r in repo.branches]
    #ok_('git-annex' in branches, msg="Didn't find git-annex among %s" % branches)
    # look for it among remote refs
    refs = repo.get_remote_branches()
    ok_(DEFAULT_REMOTE +'/git-annex' in refs,
        msg="Didn't find git-annex among refs %s" % refs)