Exemplo n.º 1
0
def test_uninstall_without_super(path):
    # a parent dataset with a proper subdataset, and another dataset that
    # is just placed underneath the parent, but not an actual subdataset
    parent = Dataset(path).create()
    sub = parent.create('sub')
    ok_clean_git(parent.path)
    nosub = create(opj(parent.path, 'nosub'))
    ok_clean_git(nosub.path)
    subreport = parent.subdatasets()
    assert_result_count(subreport, 1, path=sub.path)
    assert_result_count(subreport, 0, path=nosub.path)
    # it should be possible to uninstall the proper subdataset, even without
    # explicitly calling the uninstall methods of the parent -- things should
    # be figured out by datalad
    uninstall(sub.path)
    assert not sub.is_installed()
    # no present subdatasets anymore
    subreport = parent.subdatasets()
    assert_result_count(subreport, 1)
    assert_result_count(subreport, 1, path=sub.path, state='absent')
    assert_result_count(subreport, 0, path=nosub.path)
    # but we should fail on an attempt to uninstall the non-subdataset
    res = uninstall(nosub.path, on_failure='ignore')
    assert_result_count(
        res, 1, path=nosub.path, status='error',
        message="will not uninstall top-level dataset (consider `remove` command)")
Exemplo n.º 2
0
def test_safetynet(path):
    ds = Dataset(path).create()
    os.makedirs(opj(ds.path, 'deep', 'down'))
    for p in (ds.path, opj(ds.path, 'deep'), opj(ds.path, 'deep', 'down')):
        with chpwd(p):
            # will never remove PWD, or anything outside the dataset
            for target in (ds.path, os.curdir, os.pardir,
                           opj(os.pardir, os.pardir)):
                assert_status(('error', 'impossible'),
                              uninstall(path=target, on_failure='ignore'))
    sub = ds.create('sub')
    subsub = sub.create('subsub')
    for p in (sub.path, subsub.path):
        with chpwd(p):
            assert_status('error', uninstall(on_failure='ignore'))
Exemplo n.º 3
0
def test_safetynet(path):
    ds = Dataset(path).create()
    os.makedirs(opj(ds.path, 'deep', 'down'))
    for p in (ds.path, opj(ds.path, 'deep'), opj(ds.path, 'deep', 'down')):
        with chpwd(p):
            # will never remove PWD, or anything outside the dataset
            for target in (ds.path, os.curdir, os.pardir, opj(os.pardir, os.pardir)):
                assert_status(
                    ('error', 'impossible'),
                    uninstall(path=target, on_failure='ignore'))
Exemplo n.º 4
0
def test_uninstall_invalid(path):
    assert_raises(InsufficientArgumentsError, uninstall)
    # makes no sense to call uninstall, but ask it to do nothing
    assert_raises(ValueError,
                  uninstall,
                  remove_handles=False,
                  remove_data=False)
    ds = Dataset(path)
    # TODO make these two cases uniform
    if hasattr(ds.repo, 'drop'):
        assert_raises(Exception, uninstall, dataset=ds, path='not_existent')
    else:
        eq_(uninstall(dataset=ds, path='not_existent'), [])
Exemplo n.º 5
0
def test_uninstall_subdataset(src, dst):

    ds = install(dst, source=src, recursive=True)
    ok_(ds.is_installed())
    known_subdss = ds.subdatasets(result_xfm='datasets')
    for subds in ds.subdatasets(result_xfm='datasets'):
        ok_(subds.is_installed())

        repo = subds.repo

        annexed_files = repo.get_content_annexinfo(init=None)
        repo.get([str(f) for f in annexed_files])

        # drop data of subds:
        res = ds.drop(path=subds.path, result_xfm='paths')
        ok_(all(str(f) in res for f in annexed_files))
        ainfo = repo.get_content_annexinfo(paths=annexed_files,
                                           eval_availability=True)
        ok_(all(not st["has_content"] for st in ainfo.values()))
        # subdataset is still known
        assert_in(subds.path, ds.subdatasets(result_xfm='paths'))

    eq_(ds.subdatasets(result_xfm='datasets'), known_subdss)

    for subds in ds.subdatasets(result_xfm='datasets'):
        # uninstall subds itself:
        # simulate a cmdline invocation pointing to the subdataset
        # with a relative path from outside the superdataset to catch
        # https://github.com/datalad/datalad/issues/4001
        pwd = Path(dst).parent
        with chpwd(str(pwd)):
            res = uninstall(
                dataset=ds.path,
                path=str(subds.pathobj.relative_to(pwd)),
                result_xfm='datasets',
            )
        eq_(res[0], subds)
        ok_(not subds.is_installed())
        # just a deinit must not remove the subdataset registration
        eq_(ds.subdatasets(result_xfm='datasets'), known_subdss)
        # mountpoint of subdataset should still be there
        ok_(exists(subds.path))
Exemplo n.º 6
0
def main(argv):
    parser = ArgumentParser(
        description='This program will copy augmented BIDS sidecar files back'
        'to the original BIDS data and run bidsmri2nidm on it')

    parser.add_argument(
        '-datalad_dir',
        dest='datalad_dir',
        required=True,
        help="Path to directory containing "
        "datalad datasets to copy BIDS sidecar files to (e.g. /datasets.datalad.org/openfmri)"
    )
    parser.add_argument(
        '-new_sidecar_dir',
        dest='sidecar_dir',
        required=True,
        help="Path to directory containing "
        "sub-directories for each BIDS dataset with augmented BIDS sidecar files"
    )
    parser.add_argument(
        '-nidm_dir',
        dest='nidm_dir',
        required=False,
        help="If path is included then "
        "NIDM files will be put in this location under dataset name otherwise they will be"
        "added to the datalad dataset directly.")
    parser.add_argument('-logfile',
                        dest='logfile',
                        required=True,
                        help="Logfile to track progress and errors")

    args = parser.parse_args()

    # open log file
    logger = logging.getLogger('bidsidecar_to_datset')
    hdlr = logging.FileHandler(args.logfile)
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
    hdlr.setFormatter(formatter)
    logger.addHandler(hdlr)
    logger.setLevel(logging.INFO)

    # step 1 check directories exist
    if not isdir(args.datalad_dir):
        logger.error("Datalad directory not found: %s" % args.datalad_dir)
        exit(1)
    if not isdir(args.sidecar_dir):
        logger.error("BIDS augmented sidecar directory not found: %s" %
                     args.sidecar_dir)
        exit(1)

    # set working directory to args.datalad_dir
    #os.chdir(args.datalad_dir)

    # step 2 loop through all datasets in args.sidecar_dir
    bids_datasets = [
        x for x in os.listdir(args.sidecar_dir)
        if isdir(join(args.sidecar_dir, x))
    ]
    # for each dataset
    for ds in bids_datasets:
        # search recursively in ds looking for json files.  Path should be same as found within the dataset in
        # datalad so it'll be a simple copy.
        json_files = glob2.glob(join(args.sidecar_dir, ds, '**', "*.json"))

        # check of we have json files before we download dataset from datalad
        if len(json_files) == 0:
            continue

        # download datalad dataset and install
        #cmd = ["datalad","get", "-r", join(args.datalad_dir,ds)]
        # replacing with datalad api
        #cmd = ["datalad", "get", "-r", ds]
        try:
            dl.get(path=join(args.datalad_dir, ds), recursive=True)
            logger.info("Running datalad get command on dataset: %s" %
                        join(args.datalad_dir, ds))
            #ret = subprocess.run(cmd, stdout=subprocess.PIPE, text=True)
        except:
            logger.error("Datalad returned error: %s for dataset %s." %
                         (sys.exc_info()[0], ds))
            continue

        # now copy each of the json_files into the datalad dataset
        for file in json_files:
            # changing copy to use copyfile from shutil
            #cmd = ["cp",join(args.sidecar_dir,ds,file),join(args.datalad_dir,ds)]
            if not isdir(join(args.datalad_dir, ds)):
                os.mkdir(join(args.datalad_dir, ds))
            logger.info(
                "Copying file: source=%s, dest=%s" %
                (join(args.sidecar_dir, ds, file), join(args.datalad_dir, ds)))
            copyfile(join(args.sidecar_dir, ds, file),
                     join(args.datalad_dir, ds, file))
            #ret = subprocess.run(cmd, stdout=subprocess.PIPE, text=True)

        # make sure it's there
        if not isfile(join(args.datalad_dir, ds, file)):
            logger.error(
                "ERROR: copy of file %s to %s didn't complete successfully!" %
                (join(args.sidecar_dir, ds, file), join(args.datalad_dir, ds)))

        # now run bidsmri2nidm
        if args.nidm_dir is not None:
            cmd = [
                "bidsmri2nidm", "-d",
                join(args.datalad_dir, ds), "-o",
                join(args.nidm_dir, ds, "nidm.ttl"), "-bidsignore",
                "-no_concepts"
            ]
        else:
            cmd = [
                "bidsmri2nidm", "-d",
                join(args.datalad_dir, ds), "-o",
                join(args.datalad_dir, ds, "nidm.ttl"), "-bidsignore",
                "-no_concepts"
            ]
        logger.info("Running command: %s" % cmd)
        ret = subprocess.run(cmd, stdout=subprocess.PIPE, text=True)

        # now remove the datalad dataset to save space
        # replacing with datalad api call
        #cmd = ["datalad", "uninstall", "-r", join(args.datalad_dir, ds)]
        dl.uninstall(path=join(args.datalad_dir, ds), recursive=True)
        logger.info("Running datalad uninstall command on dataset: %s" %
                    join(args.datalad_dir, ds))