Example #1
0
def test_integration_multirun(skip_integration, multi_run_file):

    if skip_integration:
        pytest.skip('Skipping integration test')

    test_path, test_filename = split(multi_run_file)
    test_chtrig = 1
    conversion_path = join(test_path, 'code', 'conversion')

    phys2bids(filename=test_filename,
              indir=test_path,
              outdir=test_path,
              chtrig=test_chtrig,
              num_timepoints_expected=[534, 513],
              tr=[1.2, 1.2])

    # Check that files are generated in outdir
    base_filename = 'Test2_samefreq_TWOscans_'
    for suffix in ['.json', '.tsv.gz']:
        for run in ['01', '02']:
            assert isfile(join(test_path, f'{base_filename}{run}{suffix}'))

    assert isfile(join(test_path, 'Test2_samefreq_TWOscans.txt'))

    # Check that files are generated in conversion_path
    for run in ['01', '02']:
        assert isfile(
            join(conversion_path, f'Test2_samefreq_TWOscans_{run}.log'))

    # Check that plots are generated in conversion_path
    # base_filename = 'Test2_samefreq_TWOscans_'
    # for run in ['1', '2']:
    #     assert isfile(join(conversion_path, f'Test2_samefreq_TWOscans_{run}_trigger_time.png'))
    assert isfile(join(conversion_path, 'Test2_samefreq_TWOscans.png'))
Example #2
0
def test_integration_heuristic():
    """
    Does integration test of tutorial file with heurositics
    """
    test_path = resource_filename('phys2bids', 'tests/data')
    test_filename = 'tutorial_file.txt'
    test_full_path = os.path.join(test_path, test_filename)
    test_chtrig = 1
    test_outdir = test_path
    test_ntp = 158
    test_tr = 1.2
    test_thr = 0.735
    heur_path = resource_filename('phys2bids', 'heuristics')
    test_heur = os.path.join(heur_path, 'heur_tutorial.py')
    phys2bids(filename=test_full_path, chtrig=test_chtrig, outdir=test_outdir,
              num_timepoints_expected=test_ntp, tr=test_tr, thr=test_thr, sub='006',
              ses='01', heur_file=test_heur)

    test_path_output = os.path.join(test_path, 'sub-006/ses-01/func')

    # Check that files are generated
    base_filename = 'sub-006_ses-01_task-test_rec-labchart_run-01_physio'
    for suffix in ['.log', '.json', '.tsv.gz']:
        assert os.path.isfile(os.path.join(test_path_output, base_filename + suffix))

    # Read log file (note that this file is not the logger file)
    log_filename = 'sub-006_ses-01_task-test_rec-labchart_run-01_physio.log'
    with open(os.path.join(test_path_output, log_filename)) as log_info:
        log_info = log_info.readlines()

    # Check timepoints expected
    assert check_string(log_info, 'Timepoints expected', '158')
    # Check timepoints found
    assert check_string(log_info, 'Timepoints found', '158')
    # Check sampling frequency
    assert check_string(log_info, 'Sampling Frequency', '1000.0')
    # Check sampling started
    assert check_string(log_info, 'Sampling started', '0.2450')
    # Check start time
    assert check_string(log_info, 'first trigger', 'Time 0', is_num=False)

    # Checks json file
    json_filename = 'sub-006_ses-01_task-test_rec-labchart_run-01_physio.json'
    with open(os.path.join(test_path_output, json_filename)) as json_file:
        json_data = json.load(json_file)

    # Compares values in json file with ground truth
    assert math.isclose(json_data['SamplingFrequency'], 1000.0)
    assert math.isclose(json_data['StartTime'], 0.2450)
    assert json_data['Columns'] == ['time', 'Trigger', 'CO2', 'O2', 'Pulse']

    # Remove generated files
    for filename in glob.glob(os.path.join(test_path, 'phys2bids*')):
        os.remove(filename)
    for filename in glob.glob(os.path.join(test_path, 'Test_belt_pulse_samefreq*')):
        os.remove(filename)
    for filename in glob.glob(os.path.join(test_path_output, '*')):
        os.remove(filename)
def test_integration_acq(samefreq_full_acq_file):
    """
    Does the integration test for an acq file
    """

    test_path, test_filename = os.path.split(samefreq_full_acq_file)
    test_chtrig = 3

    phys2bids(filename=test_filename,
              indir=test_path,
              outdir=test_path,
              chtrig=test_chtrig,
              num_timepoints_expected=1)

    # Check that files are generated
    for suffix in ['.log', '.json', '.tsv.gz', '_trigger_time.png']:
        assert os.path.isfile(
            os.path.join(test_path, 'Test_belt_pulse_samefreq' + suffix))

    # Read log file (note that this file is not the logger file)
    with open(os.path.join(test_path,
                           'Test_belt_pulse_samefreq.log')) as log_info:
        log_info = log_info.readlines()

    # Check timepoints expected
    assert check_string(log_info, 'Timepoints expected', '1')
    # Check timepoints found
    assert check_string(log_info, 'Timepoints found', '60')
    # Check sampling frequency
    assert check_string(log_info, 'Sampling Frequency', '10000.0')
    # Check sampling started
    assert check_string(log_info, 'Sampling started', '10.425107798467103')
    # Check start time
    assert check_string(log_info, 'first trigger', 'Time 0', is_num=False)

    # Checks json file
    with open(os.path.join(test_path,
                           'Test_belt_pulse_samefreq.json')) as json_file:
        json_data = json.load(json_file)

    # Compares values in json file with ground truth
    assert math.isclose(json_data['SamplingFrequency'], 10000.0)
    assert math.isclose(json_data['StartTime'], 10.425107798467103)
    assert json_data['Columns'] == [
        'time', 'RESP - RSP100C', 'PULSE - Custom, DA100C',
        'MR TRIGGER - Custom, HLT100C - A 5', 'PPG100C', 'CO2', 'O2'
    ]

    # Remove generated files
    for filename in glob.glob(os.path.join(test_path, 'phys2bids*')):
        os.remove(filename)
    for filename in glob.glob(
            os.path.join(test_path, 'Test_belt_pulse_samefreq*')):
        os.remove(filename)
def test_integration_tutorial():
    """
    Does an integration test with the tutorial file
    """
    test_path = resource_filename('phys2bids', 'tests/data')
    test_filename = 'tutorial_file.txt'
    test_full_path = os.path.join(test_path, test_filename)
    test_chtrig = 1
    test_outdir = test_path
    test_ntp = 158
    test_tr = 1.2
    test_thr = 0.735
    phys2bids(filename=test_full_path,
              chtrig=test_chtrig,
              outdir=test_outdir,
              num_timepoints_expected=test_ntp,
              tr=test_tr,
              thr=test_thr)

    # Check that files are generated
    for suffix in ['.log', '.json', '.tsv.gz', '_trigger_time.png']:
        assert os.path.isfile(os.path.join(test_path,
                                           'tutorial_file' + suffix))

    # Read log file (note that this file is not the logger file)
    with open(os.path.join(test_path, 'tutorial_file.log')) as log_info:
        log_info = log_info.readlines()

    # Check timepoints expected
    assert check_string(log_info, 'Timepoints expected', '158')
    # Check timepoints found
    assert check_string(log_info, 'Timepoints found', '158')
    # Check sampling frequency
    assert check_string(log_info, 'Sampling Frequency', '1000.0')
    # Check sampling frequency
    assert check_string(log_info, 'Sampling started', '0.24499999999989086')
    # Check start time
    assert check_string(log_info, 'first trigger', 'Time 0', is_num=False)

    # Checks json file
    with open(os.path.join(test_path, 'tutorial_file.json')) as json_file:
        json_data = json.load(json_file)

    # Compares values in json file with ground truth
    assert math.isclose(json_data['SamplingFrequency'], 1000.0)
    assert math.isclose(json_data['StartTime'], 0.245)
    assert json_data['Columns'] == ['time', 'Trigger', 'CO2', 'O2', 'Pulse']

    # Remove generated files
    for filename in glob.glob(os.path.join(test_path, 'phys2bids*')):
        os.remove(filename)
Example #5
0
def phys2bids_cache(sourcefile: Path):
    """
    A chached version of phys2bids that reads the info structure

    :param sourcefile:  The sourcefile from which the info needs to be read
    :return:            The retrieved phys2bids info structure
    """

    return phys2bids(str(sourcefile), info=True)
Example #6
0
def is_sourcefile(file: Path) -> str:
    """
    This plugin function assesses whether a sourcefile is of a supported dataformat

    :param file:    The sourcefile that is assessed
    :return:        The valid / supported dataformat of the sourcefile
    """

    if file.suffix[1:] in SUPPORTED_FTYPES:
        if file.suffix in ('.txt', '.mat'):
            try:
                phys2bids(file, info=True)
            except Exception as phys2bidserror:
                LOGGER.debug(
                    f'The phys2bids-plugin "is_sourcefile()" routine crashed, assessing whether "{file}" has a valid dataformat:\n{phys2bidserror}'
                )
                return ''
        return 'Physio'

    return ''
Example #7
0
def test_raise_exception(samefreq_full_acq_file):
    test_path, test_filename = os.path.split(samefreq_full_acq_file)
    with raises(Exception) as errorinfo:
        phys2bids.phys2bids(filename=test_filename,
                            indir=test_path,
                            outdir=test_path,
                            chtrig=0)
    assert 'Wrong trigger' in str(errorinfo.value)

    with raises(Exception) as errorinfo:
        phys2bids.phys2bids(filename=test_filename,
                            num_timepoints_expected=[70],
                            tr=[1.3, 2],
                            indir=test_path,
                            outdir=test_path)
    assert "doesn't match" in str(errorinfo.value)

    with raises(Exception) as errorinfo:
        phys2bids.phys2bids(filename=test_filename,
                            num_timepoints_expected=[20, 300],
                            chtrig=3,
                            tr=1.5,
                            indir=test_path,
                            outdir=test_path)
    assert 'stop now' in str(errorinfo.value)
Example #8
0
def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> None:
    """
    This wrapper funtion around phys2bids converts the physio data in the session folder and saves it in the bidsfolder.
    Each saved datafile should be accompanied with a json sidecar file. The bidsmap options for this plugin can be found in:

    bidsmap_new['Options']['plugins']['phys2bidscoin']

    See also the dcm2niix2bids plugin for reference implementation

    :param session:     The full-path name of the subject/session raw data source folder
    :param bidsmap:     The full mapping heuristics from the bidsmap YAML-file
    :param bidsses:     The full-path name of the BIDS output `sub-/ses-` folder
    :return:            Nothing
    """

    # Get the subject identifiers and the BIDS root folder from the bidsses folder
    if bidsses.name.startswith('ses-'):
        bidsfolder = bidsses.parent.parent
        subid = bidsses.parent.name
        sesid = bidsses.name
    else:
        bidsfolder = bidsses.parent
        subid = bidsses.name
        sesid = ''

    # Get started
    plugin = {'phys2bidscoin': bidsmap['Options']['plugins']['phys2bidscoin']}
    datasource = bids.get_datasource(session, plugin)
    sourcefiles = [file for file in session.rglob('*') if is_sourcefile(file)]
    if not sourcefiles:
        LOGGER.info(f"No {__name__} sourcedata found in: {session}")
        return

    # Loop over all source data files and convert them to BIDS
    for sourcefile in sourcefiles:

        # Get a data source, a matching run from the bidsmap
        datasource = bids.DataSource(sourcefile, plugin, datasource.dataformat)
        run, match = bids.get_matching_run(datasource, bidsmap, runtime=True)

        # Check if we should ignore this run
        if datasource.datatype in bidsmap['Options']['bidscoin'][
                'ignoretypes']:
            LOGGER.info(f"Leaving out: {sourcefile}")
            continue

        # Check that we know this run
        if not match:
            LOGGER.error(
                f"Skipping unknown '{datasource.datatype}' run: {sourcefile}\n-> Re-run the bidsmapper and delete the physiological output data in {bidsses} to solve this warning"
            )
            continue

        LOGGER.info(f"Processing: {sourcefile}")

        # Get an ordered list of the func runs from the scans.tsv file (which should have a standardized datetime format)
        scans_tsv = bidsses / f"{subid}{bids.add_prefix('_', sesid)}_scans.tsv"
        if scans_tsv.is_file():
            scans_table = pd.read_csv(scans_tsv,
                                      sep='\t',
                                      index_col='filename')
            scans_table.sort_values(by=['acq_time', 'filename'], inplace=True)
        else:
            LOGGER.error(
                f"Could not read the TR's for phys2bids due to a missing '{scans_tsv}' file"
            )
            continue
        funcscans = []
        for index, row in scans_table.iterrows():
            if index.startswith('func/'):
                funcscans.append(index)

        # Then read the TR's from the associated func sidecar files
        tr = []
        for funcscan in funcscans:
            with (bidsses /
                  funcscan).with_suffix('.json').open('r') as json_fid:
                jsondata = json.load(json_fid)
            tr.append(jsondata['RepetitionTime'])

        # Create a heuristic function for phys2bids
        heur_str = ('def heur(physinfo, run=""):\n'
                    '    info = {}\n'
                    f'    if physinfo == "{sourcefile.name}":')
        for key, val in run['bids'].items():
            heur_str = (f'{heur_str}' f'\n        info["{key}"] = "{val}"')
        heur_str = f'{heur_str}\n    return info'

        # Write heuristic function as file in temporary folder
        heur_file = Path(
            tempfile.mkdtemp()) / f'heuristic_sub-{subid}_ses-{sesid}.py'
        heur_file.write_text(heur_str)

        # Run phys2bids
        physiofiles = phys2bids(
            filename=str(sourcefile),
            outdir=str(bidsfolder),
            heur_file=str(heur_file),
            sub=subid,
            ses=sesid,
            chtrig=int(run['meta'].get('TriggerChannel', 0)),
            num_timepoints_expected=run['meta'].get('ExpectedTimepoints',
                                                    None),
            tr=tr,
            pad=run['meta'].get('Pad', 9),
            ch_name=run['meta'].get('ChannelNames', []),
            yml='',
            debug=True,
            quiet=False)

        # Add user-specified meta-data to the newly produced json files (NB: assumes every physio-file comes with a json-file)
        for physiofile in physiofiles:
            jsonfile = Path(physiofile).with_suffix('.json')
            if not jsonfile.is_file():
                LOGGER.error(
                    f"Could not find the expected json sidecar-file: '{jsonfile}'"
                )
                continue
            with jsonfile.open('r') as json_fid:
                jsondata = json.load(json_fid)
            for metakey, metaval in run['meta'].items():
                metaval = datasource.dynamicvalue(metaval,
                                                  cleanup=False,
                                                  runtime=True)
                try:
                    metaval = ast.literal_eval(str(metaval))
                except (ValueError, SyntaxError):
                    pass
                LOGGER.info(f"Adding '{metakey}: {metaval}' to: {jsonfile}")
                if not metaval:
                    metaval = None
                jsondata[metakey] = metaval
            with jsonfile.open('w') as json_fid:
                json.dump(jsondata, json_fid, indent=4)
Example #9
0
def test_integration_acq(skip_integration, samefreq_full_acq_file):
    """
    Does the integration test for an acq file
    """

    if skip_integration:
        pytest.skip('Skipping integration test')

    test_path, test_filename = split(samefreq_full_acq_file)
    test_chtrig = 3
    conversion_path = join(test_path, 'code', 'conversion')

    phys2bids(filename=test_filename,
              indir=test_path,
              outdir=test_path,
              chtrig=test_chtrig,
              num_timepoints_expected=60,
              tr=1.5)

    # Check that files are generated
    for suffix in ['.json', '.tsv.gz']:
        assert isfile(join(test_path, 'Test_belt_pulse_samefreq' + suffix))

    # Check files in extra are generated
    for suffix in ['.log']:
        assert isfile(
            join(conversion_path, 'Test_belt_pulse_samefreq' + suffix))

    # Read log file (note that this file is not the logger file)
    with open(join(conversion_path,
                   'Test_belt_pulse_samefreq.log')) as log_info:
        log_info = log_info.readlines()

    # Check timepoints expected
    assert check_string(log_info, 'Timepoints expected', '60')
    # Check timepoints found
    assert check_string(log_info, 'Timepoints found', '60')
    # Check sampling frequency
    assert check_string(log_info, 'Sampling Frequency', '10000.0')
    # Check sampling started
    assert check_string(log_info, 'Sampling started', '10.4251')
    # Check start time
    assert check_string(log_info, 'first trigger', 'Time 0', is_num=False)

    # Checks json file
    with open(join(test_path, 'Test_belt_pulse_samefreq.json')) as json_file:
        json_data = json.load(json_file)

    # Compares values in json file with ground truth
    assert math.isclose(json_data['SamplingFrequency'], 10000.0)
    assert math.isclose(json_data['StartTime'], 10.4251)
    assert json_data['Columns'] == [
        'time', 'RESP - RSP100C', 'PULSE - Custom, DA100C',
        'MR TRIGGER - Custom, HLT100C - A 5', 'PPG100C', 'CO2', 'O2'
    ]

    # Remove generated files
    for filename in glob.glob(join(conversion_path, 'phys2bids*')):
        remove(filename)
    for filename in glob.glob(join(test_path, 'Test_belt_pulse_samefreq*')):
        remove(filename)
    shutil.rmtree(conversion_path)