Exemple #1
0
def write_bids_metadata_file(
    scan_dir,
    extraction_dicts,
    out_file="bids_metadata.json",
    task='',
):
    """Create a sidecar JSON file according to the BIDS standard.

	Parameters
	----------

	scan_dir : str
		Path to the scan directory containing the acquisition protocol files.
	extraction_dicts : str
		A list of dictionaries which contain keys including `query_file` (which specifies the file, relative to `scan dir`, which to query), `regex` (which gives a regex expression which will be tested against each rowin the file until a match is found), and `field_name` (which specifies under what field name to record this value in the JSON file).
		Additionally, the following keys are also supported: `type` (a python class operator, e.g. `str` to which the value should be converted), and `scale` (a float with which the value is multiplied before recording in JSON).
	out_file : str, optional
		Path under which to save the resulting JSON.
	task_name : str, optional
		String value to assign to the "TaskName" field in the BIDS JSON.
		If this parameter evaluates to false, no "TaskName" will be recorded.
	"""

    import json
    import re
    from os import path
    from samri.pipelines.utils import parse_paravision_date

    out_file = path.abspath(path.expanduser(out_file))
    scan_dir = path.abspath(path.expanduser(scan_dir))
    metadata = {}

    # Extract nice parameters:
    for extraction_dict in extraction_dicts:
        query_file = path.abspath(
            path.join(scan_dir, extraction_dict['query_file']))
        with open(query_file) as search:
            for line in search:
                if re.match(extraction_dict['regex'], line):
                    m = re.match(extraction_dict['regex'], line)
                    value = m.groupdict()['value']
                    try:
                        value = extraction_dict['type'](value)
                    except KeyError:
                        pass
                    try:
                        value = value * extraction_dict['scale']
                    except KeyError:
                        pass
                    metadata[extraction_dict['field_name']] = value
                    break
    # Extract DelayAfterTrigger
    try:
        query_file = path.abspath(path.join(scan_dir, 'AdjStatePerScan'))
        read_line = False
        with open(query_file) as search:
            for line in search:
                if '##$AdjScanStateTime=( 2 )' in line:
                    read_line = True
                    continue
                if read_line:
                    m = re.match(r'^<(?P<value>.*?)> <.*?>$', line)
                    adjustments_start = m.groupdict()['value']
                    adjustments_start = parse_paravision_date(
                        adjustments_start)
                    break
    except IOError:
        pass
    else:
        query_file = path.abspath(path.join(scan_dir, 'acqp'))
        with open(query_file) as search:
            for line in search:
                if re.match(r'^##\$ACQ_time=<.*?>$', line):
                    m = re.match(r'^##\$ACQ_time=<(?P<value>.*?)>$', line)
                    adjustments_end = m.groupdict()['value']
                    adjustments_end = parse_paravision_date(adjustments_end)
                    break
        adjustments_duration = adjustments_end - adjustments_start
        metadata['DelayAfterTrigger'] = adjustments_duration.total_seconds()

    if task:
        metadata['TaskName'] = task

    with open(out_file, 'w') as out_file_writeable:
        json.dump(metadata, out_file_writeable, indent=1)
        out_file_writeable.write(
            "\n"
        )  # `json.dump` does not add a newline at the end; we do it here.

    return out_file
Exemple #2
0
def write_bids_metadata_file(scan_dir, extraction_dicts,
	out_file="bids_metadata.json",
	task_name=False,
	):
	"""Create a sidecar JSON file according to the BIDS standard.

	Parameters
	----------

	scan_dir : str
		Path to the scan directory containing the acquisition protocol files.
	extraction_dicts : str
		A list of dictionaries which contain keys including `query_file` (which specifies the file, relative to `scan dir`, which to query), `regex` (which gives a regex expression which will be tested against each rowin the file until a match is found), and `field_name` (which specifies under what field name to record this value in the JSON file).
		Additionally, the following keys are also supported: `type` (a python class operator, e.g. `str` to which the value should be converted), and `scale` (a float with which the value is multiplied before recording in JSON).
	out_file : str, optional
		Path under which to save the resulting JSON.
	task_name : str, optional
		String value to assign to the "TaskName" field in the BIDS JSON.
		If this parameter evaluates to false, no "TaskName" will be recorded.
	"""

	import json
	import re
	from os import path
	from samri.pipelines.utils import parse_paravision_date

	out_file = path.abspath(path.expanduser(out_file))
	scan_dir = path.abspath(path.expanduser(scan_dir))
	metadata = {}

	# Extract nice parameters:
	for extraction_dict in extraction_dicts:
		query_file = path.abspath(path.join(scan_dir,extraction_dict['query_file']))
		with open(query_file) as search:
			for line in search:
				if re.match(extraction_dict['regex'], line):
					m = re.match(extraction_dict['regex'], line)
					value = m.groupdict()['value']
					try:
						value = extraction_dict['type'](value)
					except KeyError:
						pass
					try:
						value = value * extraction_dict['scale']
					except KeyError:
						pass
					metadata[extraction_dict['field_name']] = value
					break
	# Extract DelayAfterTrigger
	try:
		query_file = path.abspath(path.join(scan_dir,'AdjStatePerScan'))
		read_line = False
		with open(query_file) as search:
			for line in search:
				if '##$AdjScanStateTime=( 2 )' in line:
					read_line = True
					continue
				if read_line:
					m = re.match(r'^<(?P<value>.*?)> <.*?>$', line)
					adjustments_start = m.groupdict()['value']
					adjustments_start = parse_paravision_date(adjustments_start)
					break
	except IOError:
		pass
	else:
		query_file = path.abspath(path.join(scan_dir,'acqp'))
		with open(query_file) as search:
			for line in search:
				if re.match(r'^##\$ACQ_time=<.*?>$', line):
					m = re.match(r'^##\$ACQ_time=<(?P<value>.*?)>$', line)
					adjustments_end = m.groupdict()['value']
					adjustments_end = parse_paravision_date(adjustments_end)
					break
		adjustments_duration = adjustments_end - adjustments_start
		metadata['DelayAfterTrigger'] = adjustments_duration.total_seconds()

	if task_name:
		metadata['TaskName'] = task_name

	with open(out_file, 'w') as out_file_writeable:
		json.dump(metadata, out_file_writeable, indent=1)
		out_file_writeable.write("\n")  # `json.dump` does not add a newline at the end; we do it here.

	return out_file
Exemple #3
0
def write_bids_metadata_file(
    scan_dir,
    extraction_dicts,
    out_file="bids_metadata.json",
):

    import json
    import re
    from os import path
    from samri.pipelines.utils import parse_paravision_date

    out_file = path.abspath(path.expanduser(out_file))
    scan_dir = path.abspath(path.expanduser(scan_dir))
    metadata = {}

    # Extract nice parameters:
    for extraction_dict in extraction_dicts:
        query_file = path.abspath(
            path.join(scan_dir, extraction_dict['query_file']))
        with open(query_file) as search:
            for line in search:
                if re.match(extraction_dict['regex'], line):
                    m = re.match(extraction_dict['regex'], line)
                    value = m.groupdict()['value']
                    try:
                        value = extraction_dict['type'](value)
                    except KeyError:
                        pass
                    try:
                        value = value * extraction_dict['scale']
                    except KeyError:
                        pass
                    metadata[extraction_dict['field_name']] = value
                    break
    # Extract DelayAfterTrigger
    try:
        query_file = path.abspath(path.join(scan_dir, 'AdjStatePerScan'))
        read_line = False
        with open(query_file) as search:
            for line in search:
                if '##$AdjScanStateTime=( 2 )' in line:
                    read_line = True
                    continue
                if read_line:
                    m = re.match(r'^<(?P<value>.*?)> <.*?>$', line)
                    adjustments_start = m.groupdict()['value']
                    adjustments_start = parse_paravision_date(
                        adjustments_start)
                    break
    except IOError:
        pass
    else:
        query_file = path.abspath(path.join(scan_dir, 'acqp'))
        with open(query_file) as search:
            for line in search:
                if re.match(r'^##\$ACQ_time=<.*?>$', line):
                    m = re.match(r'^##\$ACQ_time=<(?P<value>.*?)>$', line)
                    adjustments_end = m.groupdict()['value']
                    adjustments_end = parse_paravision_date(adjustments_end)
                    break
        adjustments_duration = adjustments_end - adjustments_start
        metadata['DelayAfterTrigger'] = adjustments_duration.total_seconds()

    with open(out_file, 'w') as out_file_writeable:
        json.dump(metadata, out_file_writeable, indent=1)
        out_file_writeable.write(
            "\n"
        )  # `json.dump` does not add a newline at the end; we do it here.

    return out_file