Ejemplo n.º 1
0
    def get_raw_dataset(self):
        # Note: This is lazy to avoid building on import time, since import is part of nose's discovery and executed
        # before the dependencies. This leads to datalad's ui backend not yet being correctly set, which in turn
        # let's the cloning hang within progressbar generation.
        if not self._dspath:
            import tempfile
            kwargs = get_tempfile_kwargs()
            path = tempfile.mkdtemp(**kwargs)
            f_dicoms = get_dicom_dataset('functional')
            s_dicoms = get_dicom_dataset('structural')
            ds = Dataset.create(path, cfg_proc=['hirni'])
            ds.install(source=f_dicoms, path=op.join('func_acq', 'dicoms'))
            ds.install(source=s_dicoms, path=op.join('struct_acq', 'dicoms'))

            # Note: Recursive, since aggregation wasn't performed in the installed dastasets
            ds.meta_aggregate([
                op.join('func_acq', 'dicoms'),
                op.join('struct_acq', 'dicoms')
            ],
                              into='top',
                              recursive=True)

            # TODO: Figure how to add it to things to be removed after tests ran
            self._dspath = ds.path
        return self._dspath
Ejemplo n.º 2
0
def test_dicom2spec(path):

    # ###  SETUP ###
    dicoms = get_dicom_dataset('structural')

    ds = Dataset.create(path, cfg_proc=['hirni'])
    ds.install(source=dicoms, path='acq100')
    # Note: Recursive, since aggregation wasn't performed in the installed dastasets
    # TODO: Use get_raw_sd from above instead of this setup
    ds.meta_aggregate('acq100', into='top', recursive=True)
    # ### END SETUP ###

    # TODO: should it be specfile or acq/specfile? => At least doc needed,
    # if not change
    res = ds.hirni_dicom2spec(path='acq100', spec='spec_structural.json')

    # check for actual location of spec_structural!
    # => studyds root!

    assert_result_count(res, 2)
    assert_result_count(res, 1, path=op.join(ds.path, 'spec_structural.json'))
    assert_result_count(res, 1, path=op.join(ds.path, '.gitattributes'))
    ok_clean_git(ds.path)

    # multiple execution shouldn't change .gitattributes again:
    from os import stat
    mtime = stat(op.join(ds.path, '.gitattributes')).st_mtime
    res = ds.hirni_dicom2spec(path='acq100', spec='spec_structural.json')
    assert_equal(stat(op.join(ds.path, '.gitattributes')).st_mtime, mtime)
Ejemplo n.º 3
0
def test_dicom_metadata_aggregation(path):
    dicoms = get_dicom_dataset('structural')

    ds = Dataset.create(path)
    ds.install(source=dicoms, path='acq100')
    ds.aggregate_metadata(recursive=True)
    res = ds.metadata(get_aggregates=True)
    assert_result_count(res, 2)
    assert_result_count(res, 1, path=opj(ds.path, 'acq100'))
Ejemplo n.º 4
0
def test_dicom_metadata_aggregation(path):
    dicoms = get_dicom_dataset('structural')

    ds = Dataset.create(path)
    ds.install(source=dicoms, path='acq100')

    # Note: Recursive, since aggregation wasn't performed in the installed dastasets
    ds.meta_aggregate('acq100', into='top', recursive=True)
    res = ds.meta_dump(reporton='aggregates', recursive=True)
    assert_result_count(res, 2)
    assert_result_count(res, 1, path=op.join(ds.path, 'acq100'))
Ejemplo n.º 5
0
def _single_session_dicom2bids(label, path, toolbox_url):

    with patch.dict('os.environ',
                    {'DATALAD_HIRNI_TOOLBOX_URL': toolbox_url}):
        ds = Dataset.create(path, cfg_proc=['hirni'])

    subject = "02"
    acquisition = "{sub}_{label}".format(sub=subject, label=label)

    dicoms = get_dicom_dataset(label)
    ds.install(source=dicoms, path=op.join(acquisition, 'dicoms'))
    # Note: Recursive, since aggregation wasn't performed in the installed dastasets
    ds.meta_aggregate(op.join(acquisition, 'dicoms'), into='top', recursive=True)

    spec_file = 'spec_{label}.json'.format(label=label)
    ds.hirni_dicom2spec(path=op.join(acquisition, 'dicoms'),
                        spec=op.join(acquisition, spec_file))

    ds.hirni_spec2bids(op.join(acquisition, spec_file))
Ejemplo n.º 6
0
def test_spec2bids(study_path, bids_path, toolbox_url):

    with patch.dict('os.environ',
                    {'DATALAD_HIRNI_TOOLBOX_URL': toolbox_url}):
        study_ds = Dataset(study_path).create(cfg_proc=['hirni'])

    subject = "02"
    acquisition = "{sub}_functional".format(sub=subject)

    dicoms = get_dicom_dataset('functional')
    study_ds.install(source=dicoms, path=op.join(acquisition, 'dicoms'))
    # Note: Recursive, since aggregation wasn't performed in the installed dastasets
    study_ds.meta_aggregate(op.join(acquisition, 'dicoms'), into='top', recursive=True)

    study_ds.hirni_dicom2spec(path=op.join(acquisition, 'dicoms'),
                              spec=op.join(acquisition, 'studyspec.json'))

    # add a custom converter script which is just a copy converter
    from shutil import copy
    copy(op.join(op.dirname(datalad_hirni.__file__),
                 'resources', 'dummy_executable.sh'),
         op.join(study_ds.path, 'code', 'my_script.sh'))
    study_ds.save(op.join('code', 'my_script.sh'), to_git=True,
                  message="add a copy converter script")

    # add dummy data to be 'converted' by the copy converter
    makedirs(op.join(study_ds.path, acquisition, 'my_fancy_data'))
    with open(op.join(study_ds.path, acquisition, 'my_fancy_data',
                      'my_raw_data.txt'), 'w') as f:
        f.write("some content")
    study_ds.save(op.join(study_ds.path, acquisition, 'my_fancy_data', 'my_raw_data.txt'),
                  message="added fancy data")

    # add specification snippet for that data:

    # ############
    # TODO: Needs procedure now
    #
    # snippet = {"type": "my_new_type",
    #            "location": op.join('my_fancy_data', 'my_raw_data.txt'),
    #            "subject": {"value": "{sub}".format(sub=subject),
    #                        "approved": True},
    #            "converter": {"value": "{_hs[converter_path]} {_hs[location]} {dspath}/sub-{_hs[bids_subject]}/my_converted_data.txt",
    #                          "approved": True},
    #            "converter_path": {"value": op.join(op.pardir, 'code', 'my_script.sh'),
    #                               "approved": True}
    #            }
    #
    # # TODO: proper spec save helper, not just sort (also to be used in webapp!)
    # from datalad.support import json_py
    # spec_list = [r for r in json_py.load_stream(op.join(study_ds.path, acquisition, spec_file))]
    # spec_list.append(snippet)
    # from ..support.helpers import sort_spec
    # spec_list = sorted(spec_list, key=lambda x: sort_spec(x))
    # json_py.dump2stream(spec_list, op.join(study_ds.path, acquisition, spec_file))
    #
    # study_ds.add(op.join(acquisition, spec_file),
    #              message="Add spec snippet for fancy data",
    #              to_git=True)
    #
    # ##############

    # create the BIDS dataset:
    with patch.dict('os.environ',
                    {'DATALAD_HIRNI_TOOLBOX_URL': toolbox_url}):
        bids_ds = Dataset.create(bids_path, cfg_proc=['hirni'])

    # install the study dataset as "sourcedata":
    bids_ds.install(source=study_ds.path, path="sourcedata")
    # get the toolbox, since procedures can't be discovered otherwise
    bids_ds.get(op.join('sourcedata', 'code', 'hirni-toolbox', 'converters',
                        'heudiconv', 'heudiconv.simg'))

    # make sure, we have the target directory "sub-02" for the copy converter,
    # even if heudiconv didn't run before (order of execution of the converters
    # depends on order in the spec). This could of course also be part of the
    # converter script itself.
    makedirs(op.join(bids_ds.path, "sub-{sub}".format(sub=subject)))

    bids_ds.hirni_spec2bids(op.join("sourcedata", acquisition, "studyspec.json"))