def save_physio(output_filename_base, meta_data, physio_data): tsv_filename = output_filename_base + ".tsv.gz" json_filename = output_filename_base + ".json" header = physio_data.columns.tolist() json_data = {"Columns": header, "StartTime": 0, "SamplingFrequency": 496} add_info_to_json(json_filename, json_data, create_new=True) to_tsv(physio_data, tsv_filename, header=False)
def save_physio(output_filename_base, meta_data, physio_data): tsv_filename = output_filename_base + ".tsv" json_filename = output_filename_base + ".json" header = physio_data.columns.tolist() json_data = {"header": header, "meta_data": meta_data} add_info_to_json(json_filename, json_data, create_new=True) to_tsv(physio_data, tsv_filename, header=False)
def add_additional_bids_parameters_from_par(par_file, bids_file, parameters={ "angulation": "Angulation" }): header_params = {} for param, param_label in parameters.items(): header_params[param_label] = get_par_info( par_file, param)[param] # out_parameter add_info_to_json(bids_file, header_params)
def add_total_readout_time_from_par(par_file, bids_file): general_info, image_defs = read_par(par_file) wfs = general_info["water_fat_shift"] ef = general_info["epi_factor"] if ef != 1: # ef=1: no EPI --> trt not meaningful es = wfs / (434.215 * (ef + 1)) # echo spacing in sec trt = es * (ef - 1) # in sec add_info_to_json(bids_file, { "TotalReadoutTime": trt, "EffectiveEchoSpacing": es })
def dwi_treat_bvecs(abs_par_file, bids_file, bvecs_from_scanner_file, nii_output_dir, par_file): ''' replaces dcm2niix bvecs with rotated, own bvecs adds angulation to json ''' add_additional_bids_parameters_from_par(abs_par_file, bids_file, {"angulation": "Angulation"}) # remove dcm2niix bvecs and replace with own, rotated LAS bvecs bvecs_file = glob(os.path.join(nii_output_dir, "*.bvec"))[0] os.remove(bvecs_file) bvecs_from_scanner = np.genfromtxt(bvecs_from_scanner_file) rotated_bvecs_ras = rotate_bvecs(bvecs_from_scanner, par_file) rotated_bvecs_las = rotated_bvecs_ras.copy() rotated_bvecs_las[0] *= -1 np.savetxt(bvecs_file, rotated_bvecs_las.T, fmt="%.5f") add_info_to_json(bids_file, {"BvecsInfo": "rotated for angulation and in LAS space"})
def run_dcm2niix(bids_name, bids_modality, bvecs_from_scanner_file, mapping_file, nii_file, nii_output_dir, out_filename, par_file, public_output, task): ''' Converts one par/rec pair to nii.gz. Adds scan duration and dcm2niix & docker container version to bids file. ''' abs_par_file = os.path.abspath(par_file) abs_rec_file = os.path.splitext(abs_par_file)[0] + ".rec" assert os.path.exists(abs_rec_file), "REC file does not exist %s" % abs_rec_file # run converter converter = Dcm2niix() converter.inputs.source_names = [abs_par_file] converter.inputs.bids_format = True converter.inputs.compress = 'i' converter.inputs.has_private = True converter.inputs.out_filename = out_filename converter.inputs.output_dir = nii_output_dir print("XXXXXXX running dcm2niix command") print(converter.cmdline) converter_results = converter.run() bids_file = converter_results.outputs.bids # add additional information to json ## scan duration add_additional_bids_parameters_from_par(abs_par_file, bids_file, {"scan_duration": "ScanDurationSec", "technique": "PulseSequenceType", "protocol_name": "PulseSequenceDetails"}) add_flip_angle_from_par(abs_par_file, bids_file) add_total_readout_time_from_par(abs_par_file, bids_file) ## lhab_pipelines add_info_to_json(bids_file, {"LhabPipelinesVersion": lhab_pipelines.__version__}) ## task if task: add_info_to_json(bids_file, {"TaskName": task}) ## time add_info_to_json(bids_file, {"ConversionTimestamp": str(dt.datetime.now())}) if not public_output: # write par 2 nii mapping file only for private use with open(mapping_file, "a") as fi: fi.write("%s %s\n" % (abs_par_file, nii_file)) else: # remove info file generated by dcm2niix os.remove(os.path.join(nii_output_dir, out_filename + '.txt')) # rotate bvecs and add angulation to json for dwi if (bids_name == "dwi") & (bids_modality != "fmap"): dwi_treat_bvecs(abs_par_file, bids_file, bvecs_from_scanner_file, nii_output_dir, par_file) # remove _dwi_ADC.nii.gz file created by dcm2niix adc_file = glob(os.path.join(nii_output_dir, "*_dwi_ADC.nii.gz"))[0] os.remove(adc_file) return bids_file, converter_results
def run_conversion(raw_dir, output_base_dir, analysis_level, info_out_dir, participant_label, session_label, public_output, use_new_ids, ds_version, info_list, dataset_description, new_id_lut_file=None, bvecs_from_scanner_file=None, tp6_raw_lut=None, dry_run=False, demo_file=None, session_duration_min=120): # privacy settings private_str = "_PRIVATE" if not (public_output and use_new_ids) else "" output_dir = Path( output_base_dir) / f"LHAB_{ds_version}{private_str}" / "sourcedata" metainfo_dir = Path( output_base_dir) / f"LHAB_{ds_version}{private_str}" / "metainfo" metainfo_dir.mkdir(exist_ok=True, parents=True) output_dir.mkdir(parents=True, exist_ok=True) info_out_dir = Path(info_out_dir) / "PRIVATE" info_out_dir.mkdir(parents=True, exist_ok=True) if analysis_level == "participant": for old_subject_id in participant_label: submit_single_subject( old_subject_id, session_label, raw_dir, output_dir, info_list, info_out_dir, bvecs_from_scanner_file=bvecs_from_scanner_file, public_output=public_output, use_new_ids=use_new_ids, new_id_lut_file=new_id_lut_file, tp6_raw_lut=tp6_raw_lut, dry_run=dry_run, session_duration_min=session_duration_min) print("\n\n\n\nDONE.\nConverted %d subjects." % len(participant_label)) print(participant_label) elif analysis_level == "group": ds_desc_file = output_dir / "dataset_description.json" if ds_desc_file.is_file(): ds_desc_file.unlink() dataset_description["DataSetVersion"] = ds_version add_info_to_json(ds_desc_file, dataset_description, create_new=True) # Demos print("Exporting demos...") pwd = getpass.getpass("Enter the Password for dob file:") calc_demos(output_dir, info_out_dir, demo_file, pwd, new_id_lut_file=new_id_lut_file) # check for duplicates mappings = concat_tsvs(info_out_dir / "parrec_mapping_PRIVATE") dups = mappings[mappings.duplicated(subset="from")] assert len(dups) == 0, print("duplicates found", dups) # concat notconverted files unconv_df = concat_tsvs(info_out_dir / "unconverted_files") unconv_df.to_csv(info_out_dir / "unconverted_files.tsv", sep="\t", index=False) print("X" * 20 + "\nRuning BIDS validator") os.system(f"bids-validator {str(output_dir)}") print("\n Get BIDS layout") layout = BIDSLayout(output_dir) layout.to_df().to_csv(metainfo_dir / "layout.csv", index=False) else: raise RuntimeError(f"Analysis level unknown {analysis_level}")
def run_dcm2niix(bids_name, bids_modality, bvecs_from_scanner_file, info_out_dir, nii_file, nii_output_dir, out_filename, par_file, task): ''' Converts one par/rec pair to nii.gz. Adds scan duration and dcm2niix & docker container version to bids file. ''' abs_par_file = os.path.abspath(par_file) abs_rec_file = os.path.splitext(abs_par_file)[0] + ".rec" assert os.path.exists( abs_rec_file), "REC file does not exist %s" % abs_rec_file # run converter converter = Dcm2niix_par() converter.inputs.source_names = [abs_par_file] converter.inputs.bids_format = True converter.inputs.compress = 'i' converter.inputs.has_private = True converter.inputs.out_filename = out_filename converter.inputs.output_dir = nii_output_dir print("XXXXXXX running dcm2niix command") print(converter.cmdline) converter_results = converter.run() bids_file = [ s for s in converter_results.outputs.bids if s.endswith(".json") ] assert len(bids_file) == 1, bids_file bids_file = bids_file[0] # add additional information to json ## scan duration add_additional_bids_parameters_from_par( abs_par_file, bids_file, { "scan_duration": "ScanDurationSec", "technique": "PulseSequenceType", "protocol_name": "PulseSequenceDetails" }) add_flip_angle_from_par(abs_par_file, bids_file) add_total_readout_time_from_par(abs_par_file, bids_file) ## lhab_pipelines add_info_to_json(bids_file, {"LhabPipelinesVersion": lhab_pipelines.__version__}) ## task if task: add_info_to_json(bids_file, {"TaskName": task}) ## time add_info_to_json(bids_file, {"ConversionTimestamp": str(dt.datetime.now())}) # dcm_conversion_info dcm_conversion_info_dir = info_out_dir / "dcm2niix_conversion_PRIVATE" dcm_conversion_info_dir.mkdir(parents=True, exist_ok=True) dcm_conversion_info_file = dcm_conversion_info_dir / f"{out_filename}.txt" orig_file = Path(nii_output_dir) / f"{out_filename}.txt" shutil.copyfile(str(orig_file), str(dcm_conversion_info_file)) orig_file.unlink() # rotate bvecs and add angulation to json for dwi if (bids_name == "dwi") & (bids_modality != "fmap"): dwi_treat_bvecs(abs_par_file, bids_file, bvecs_from_scanner_file, nii_output_dir, par_file) # remove _dwi_ADC.nii.gz file created by dcm2niix adc_file = glob(os.path.join(nii_output_dir, "*_dwi_ADC.nii.gz"))[0] os.remove(adc_file) mapping = [abs_par_file, nii_file] return bids_file, converter_results, mapping
def convert_modality(old_subject_id, old_ses_id, output_dir, info_out_dir, bids_name, bids_modality, search_str, bvecs_from_scanner_file=None, public_sub_id=None, public_output=True, reorient2std=True, task=None, direction=None, acq=None, only_use_last=False, deface=False, physio=False, add_info={}, dry_run=False, post_glob_filter=None): """ runs conversion for one subject and one modality public_output: if True: strips all info about original subject_id, file, date """ if (public_output and bids_modality == "anat" and not deface): raise Exception( "Public output requested, but anatomical images not defaced. exit. %s %s %s" % (old_subject_id, old_ses_id, bids_name)) new_ses_id = get_clean_ses_id(old_ses_id) bids_ses = "ses-" + new_ses_id if public_sub_id: bids_sub = "sub-" + public_sub_id else: bids_sub = "sub-" + get_clean_subject_id(old_subject_id) if isinstance(search_str, str): search_str = [search_str] par_file_list = [] for s_str in search_str: par_file_list += sorted(glob("*" + s_str + "*.par")) # remove double entries par_file_list = list(set(par_file_list)) if post_glob_filter: par_file_list = list(filter(post_glob_filter, par_file_list)) physio_in_file_list = [] mapping = [] if par_file_list: sub_output_dir = os.path.join(output_dir, bids_sub) nii_output_dir = os.path.join(sub_output_dir, bids_ses, bids_modality) if not os.path.exists(nii_output_dir): os.makedirs(nii_output_dir) if only_use_last: par_file_list = par_file_list[-1:] # sort files by acquision number par_acq_nr = np.array([ get_par_info(par_file, "acquisition_nr")["acquisition_nr"] for par_file in par_file_list ]) sort_index = np.argsort(par_acq_nr) for run_id, par_file in enumerate( np.array(par_file_list)[sort_index].tolist(), 1): # put together bids file name # bids run bids_run = "run-" + str(run_id) out_components = [bids_sub, bids_ses] # bids acq if acq: out_components += ["acq-%s" % acq] # bids task if task: out_components += ["task-%s" % task] # bids acq. direction if direction: out_components += ["dir-%s" % direction] out_components += [bids_run, bids_name] out_filename = "_".join(out_components) out_filename_wo_name = "_".join(out_components[:-1]) nii_file = os.path.join(nii_output_dir, out_filename + ".nii.gz") if not dry_run: assert not os.path.exists( nii_file), "file exists. STOP. %s" % nii_file bids_file, converter_results, mapping_ = run_dcm2niix( bids_name, bids_modality, bvecs_from_scanner_file, info_out_dir, nii_file, nii_output_dir, out_filename, par_file, task) mapping.append(mapping_) if reorient2std: reorient = Reorient2Std() reorient.inputs.in_file = converter_results.outputs.converted_files reorient.inputs.out_file = converter_results.outputs.converted_files reorient_results = reorient.run() if deface: deface_data(nii_file, nii_output_dir, out_filename) add_info_to_json(bids_file, {"Defaced": deface}) add_info_to_json(bids_file, add_info) # finally as a sanity check, check that converted nii exists assert os.path.exists(nii_file), "Something went wrong" \ "converted file does not exist. STOP. %s" % nii_file physio_in_file_list = [] if physio: # convert physiological data physio_search_str_list = [ ".".join(par_file.split(".")[:-1]) + "_*phys*.log", "SCANPHYSLOG_" + ".".join(par_file.split(".")[:-1]) + ".log" ] physio_in_file_list = [] for physio_search_str in physio_search_str_list: physio_in_file_list += glob(physio_search_str) assert len( physio_in_file_list ) < 2, "more than 1 phyio file found for %s" % physio_search_str if physio_in_file_list and not dry_run: physio_out_file_base = os.path.join( nii_output_dir, out_filename_wo_name + "_physio") meta_data, physio_data = parse_physio( physio_in_file_list[0]) save_physio(physio_out_file_base, meta_data, physio_data) return par_file_list, physio_in_file_list, mapping
def convert_modality(old_subject_id, old_ses_id, output_dir, bids_name, bids_modality, search_str, bvecs_from_scanner_file=None, public_sub_id=None, public_output=True, reorient2std=True, task=None, direction=None, acq=None, only_use_last=False, deface=False, physio=False, add_info={}): """ runs conversion for one subject and one modality public_output: if True: strips all info about original subject_id, file, date """ if (public_output and bids_modality == "anat" and not deface): raise Exception("Public output requested, but anatomical images not defaced. exit. %s %s %s" % ( old_subject_id, old_ses_id, bids_name)) new_ses_id = get_clean_ses_id(old_ses_id) bids_ses = "ses-" + new_ses_id if public_sub_id: bids_sub = "sub-" + public_sub_id else: bids_sub = "sub-" + get_clean_subject_id(old_subject_id) mapping_file = os.path.join(output_dir, bids_sub, "par2nii_mapping.txt") par_file_list = glob("*" + search_str + "*.par") if par_file_list: sub_output_dir = os.path.join(output_dir, bids_sub) nii_output_dir = os.path.join(sub_output_dir, bids_ses, bids_modality) if not os.path.exists(nii_output_dir): os.makedirs(nii_output_dir) if only_use_last: par_file_list = par_file_list[-1:] for run_id, par_file in enumerate(par_file_list, 1): # put together bids file name # bids run bids_run = "run-" + str(run_id) out_components = [bids_sub, bids_ses] # bids acq if acq: out_components += ["acq-%s" % acq] # bids task if task: out_components += ["task-%s" % task] # bids acq. direction if direction: out_components += ["dir-%s" % direction] out_components += [bids_run, bids_name] out_filename = "_".join(out_components) nii_file = os.path.join(nii_output_dir, out_filename + ".nii.gz") assert not os.path.exists(nii_file), "file exists. STOP. %s" % nii_file bids_file, converter_results = run_dcm2niix(bids_name, bids_modality, bvecs_from_scanner_file, mapping_file, nii_file, nii_output_dir, out_filename, par_file, public_output, task) if reorient2std: reorient = Reorient2Std() reorient.inputs.in_file = converter_results.outputs.converted_files reorient.inputs.out_file = converter_results.outputs.converted_files reorient_results = reorient.run() if deface: deface_data(nii_file, nii_output_dir, out_filename) add_info_to_json(bids_file, {"Defaced": deface}) add_info_to_json(bids_file, add_info) update_sub_scans_file(output_dir, bids_sub, bids_ses, bids_modality, out_filename, par_file, public_output) # finally as a sanity check, check that converted nii exists assert os.path.exists(nii_file), "Something went wrong" \ "converted file does not exist. STOP. %s" % nii_file if physio: # convert physiological data physio_search_str = ".".join(par_file.split(".")[:-1]) + "_physio.log" physio_in_file_list = glob(physio_search_str) assert len(physio_in_file_list) < 2, "more than 1 phyio file found for %s" % physio_search_str if physio_in_file_list: physio_out_file_base = os.path.join(nii_output_dir, out_filename + "_physio") meta_data, physio_data = parse_physio(physio_in_file_list[0]) save_physio(physio_out_file_base, meta_data, physio_data)
def add_flip_angle_from_par(par_file, bids_file): general_info, image_defs = read_par(par_file) add_info_to_json(bids_file, {"FlipAngle": image_defs["image_flip_angle"][0]})