logging.basicConfig( filename=log_fpath, level=logging.DEBUG, format= 'LOG ENTRY %(asctime)s - %(levelname)s \n%(message)s \nEND LOG ENTRY\n' ) settings_str = "Bids directory: {}\n".format(settings.bids_dir) + \ "Output directory: {}\n".format(settings.output_dir) + \ "Log directory: {}\n".format(settings.log_dir) + \ "No. of Threads: {}\n".format(settings.nthreads) + \ "Overwrite: {}\n".format(settings.overwrite) + \ "Workflow: {}".format(settings.workflow) log_output(settings_str, logger=logging) log_output("Beginning analysis...", logger=logging) # If analysis output directory exists, verify that it's either empty, or that overwrite is allowed. # Otherwise create directory. if os.path.isdir(settings.output_dir): analysis_files = glob(os.path.join(settings.output_dir, '*')) if analysis_files: if not settings.overwrite: raise DuplicateFile( "The output directory is not empty, and overwrite is set to False. Aborting..." ) else:
def main(): env = retro.make(game=params['ENVIRONMENT'], use_restricted_actions=retro.Actions.DISCRETE) action_space = env.action_space.n if params[ 'USE_FULL_ACTION_SPACE'] else params['SMALL_ACTION_SPACE'] env.action_space = spaces.Discrete(action_space) # Epsilon Data epsilon = params['EPSILON'] epsilon_gamma = params['EPSILON_GAMMA'] epsilon_min = params['EPSILON_MIN'] # Epoch Length Data epochs = params['EPOCHS'] epoch_length = params['EPOCH_MAX_LENGTH'] use_time_cutoff = params['USE_TIME_CUTOFF'] # Input Formatting Data img_width = params['IMG_WIDTH'] img_height = params['IMG_HEIGHT'] channels = 1 if params['GRAYSCALE'] else 3 # Experience Replay Data replay_iterations = params['REPLAY_ITERATIONS'] replay_sample_size = params['REPLAY_SAMPLE_SIZE'] replay_memory_size = params['REPLAY_MEMORY_SIZE'] replay_alpha = params['REPLAY_ALPHA'] replay_beta = params['REPLAY_BETA'] # Q-Learning Data q_learning_gamma = params['Q_LEARNING_GAMMA'] frames_since_score_limit = params['FRAMES_SINCE_SCORE_LIMIT'] # Network Initialization and resuming status model = GalagaAgent(action_space, img_width, img_height, channels) target = GalagaAgent(action_space, img_width, img_height, channels) target.set_weights(model.get_weights()) model.load_weights('m_weights.h5') target.load_weights('t_weights.h5') # Logging Initialization logpath = log_create() log_params(logpath, model.get_summary()) memory = ReplayMemory(replay_memory_size, params['REPLAY_EPSILON']) # Progress Tracking Data score_window = deque(maxlen=epochs) frame_count = 0 # Training loop for epoch in range(epochs): state = env.reset() done = False time_since_score_up = 0 last_score = 0 time = 0 reward_window = deque(maxlen=epoch_length) # Play loop while not done: state = preprocess(state, img_width, img_height, channels) chance = np.random.random() if chance > epsilon: action, model_Q = model.get_action(state) else: action, model_Q = map_actions( np.random.randint(0, action_space)), None next_state, reward, done, info = env.step(action) # Reward if info['score'] == last_score: time_since_score_up += 1 else: time_since_score_up = 0 if time_since_score_up >= frames_since_score_limit: reward -= 10 if reward > 0: # Bound reward [-10,1] reward = 1 reward_window.append(reward) last_score = info['score'] # Memory Replay pp_next = preprocess(next_state, img_width, img_height, channels) experience = (state, pp_next, int(action / 3), reward, done) memory.remember(experience) state = next_state if use_time_cutoff and time > epoch_length: break if "--play" in sys.argv: env.render() time += 1 frame_count += 1 epsilon = epsilon * epsilon_gamma if epsilon > epsilon_min else epsilon_min score_window.append(info['score']) mean_score = np.mean(score_window) output = "\r Episode: %d/%d, Epsilon: %f, Mean Score: %d, Mean Reward: %f" % ( epoch + 1, epochs, epsilon, mean_score, np.mean(reward_window)) if epochs % params['TARGET_UPDATE_EVERY'] == 0: target.set_weights(model.get_weights()) log_output(logpath, output, "Total frames seen: %d" % (frame_count)) replay_beta = (replay_beta * epochs) / epochs # Raise to 1 over training memory.replay(model, target, replay_iterations, replay_sample_size, q_learning_gamma, replay_alpha, replay_beta) model.save_weights('m_weights.h5') target.save_weights('t_weights.h5') log_output(logpath, "Total Frames Seen: %d" % (frame_count))
def dcm_to_nifti(dcm_dir, out_fname, out_dir, conversion_tool, logger=None, bids_meta=False, semaphore=None): if conversion_tool == 'dcm2niix': dcm2niix_workdir = dcm_dir if bids_meta: cmd = [ "dcm2niix", "-z", "y", "-b", "y", "-f", out_fname, dcm_dir ] else: cmd = [ "dcm2niix", "-z", "y", "-f", out_fname, dcm_dir ] try: result = check_output(cmd, stderr=STDOUT, cwd=dcm2niix_workdir, universal_newlines=True) # The following line is a hack to get the actual filename returned by the dcm2niix utility. When converting # the B0 dcm files, or files that specify which coil they used, or whether they contain phase information, # the utility appends some prefixes to the filename it saves, instead of just using # the specified output filename. There is no option to turn this off (and the author seemed unwilling to # add one). With this hack I retrieve the actual filename it used to save the file from the utility output. # This might break on future updates of dcm2niix actual_fname = \ [s for s in ([s for s in str(result).split('\n') if "Convert" in s][0].split(" ")) if s[0] == '/'][0].split("/")[-1] # Move nifti file and json bids file to anat folder shutil.move(os.path.join(dcm_dir, "{}.nii.gz".format(actual_fname)), os.path.join(out_dir, "{}.nii.gz".format(out_fname))) shutil.move(os.path.join(dcm_dir, "{}.json".format(actual_fname)), os.path.join(out_dir, "{}.json".format(out_fname))) dcm_file = [f for f in os.listdir(dcm_dir) if ".dcm" in f][0] log_str = LOG_MESSAGES['success_converted'].format(os.path.join(dcm_dir, dcm_file), out_fname, " ".join(cmd), 0) if result: log_str += LOG_MESSAGES['output'].format(result) log_output(log_str, logger=logger, semaphore=semaphore) return ("/".join(dcm2niix_workdir.split("/")[-3:]), os.path.join("/".join(out_dir.split("/")[-4:]), out_fname + ".nii.gz"), True) except CalledProcessError as e: log_str = LOG_MESSAGES['dcm2niix_error'].format(dcm_dir, " ".join(cmd), e.returncode) if e.output: log_str += LOG_MESSAGES['output'].format(e.output) log_output(log_str, level="ERROR", logger=logger, semaphore=semaphore) return ("/".join(dcm2niix_workdir.split("/")[-3:]), os.path.join("/".join(out_dir.split("/")[-4:]), out_fname + ".nii.gz"), False) finally: # Clean up temporary files tmp_files = glob(os.path.join(dcm2niix_workdir, "*.nii.gz")) tmp_files.extend(glob(os.path.join(dcm2niix_workdir, "*.json"))) if tmp_files: list(map(os.remove, tmp_files)) elif conversion_tool == 'dimon': dimon_workdir = dcm_dir # IMPLEMENT GENERATION OF BIDS METADATA FILES WHEN USING DIMON FOR CONVERSION OF DCM FILES cmd = [ "Dimon", "-infile_pattern", os.path.join(dcm_dir, "*.dcm"), "-gert_create_dataset", "-gert_quit_on_err", "-gert_to3d_prefix", "{}.nii.gz".format(out_fname) ] dimon_env = os.environ.copy() dimon_env['AFNI_TO3D_OUTLIERS'] = 'No' try: result = check_output(cmd, stderr=STDOUT, env=dimon_env, cwd=dimon_workdir, universal_newlines=True) # Check the contents of stdout for the -quit_on_err flag because to3d returns a success code # even if it terminates because the -quit_on_err flag was thrown if "to3d kept from going into interactive mode by option -quit_on_err" in result: log_str = LOG_MESSAGES['dimon_error'].format(dcm_dir, " ".join(cmd), 0) if result: log_str += LOG_MESSAGES['output'].format(result) log_output(log_str, level="ERROR", logger=logger, semaphore=semaphore) return ("/".join(dimon_workdir.split("/")[-3:]), os.path.join("/".join(out_dir.split("/")[-4:]), out_fname + ".nii.gz"), False) shutil.move(os.path.join(dimon_workdir, "{}.nii.gz".format(out_fname)), os.path.join(out_dir, "{}.nii.gz".format(out_fname))) dcm_file = [f for f in os.listdir(dcm_dir) if ".dcm" in f][0] log_str = LOG_MESSAGES['success_converted'].format(os.path.join(dcm_dir, dcm_file), out_fname, " ".join(cmd), 0) if result: log_str += LOG_MESSAGES['output'].format(result) log_output(log_str, logger=logger, semaphore=semaphore) return ("/".join(dimon_workdir.split("/")[-3:]), os.path.join("/".join(out_dir.split("/")[-4:]), out_fname + ".nii.gz"), True) except CalledProcessError as e: log_str = LOG_MESSAGES['dimon_error'].format(dcm_dir, " ".join(cmd), e.returncode) if e.output: log_str += LOG_MESSAGES['output'].format(e.output) log_output(log_str, level="ERROR", logger=logger, semaphore=semaphore) return ("/".join(dimon_workdir.split("/")[-3:]), os.path.join("/".join(out_dir.split("/")[-4:]), out_fname + ".nii.gz"), False) finally: # Clean up temporary files tmp_files = glob(os.path.join(dimon_workdir, "GERT_Reco_dicom*")) tmp_files.extend(glob(os.path.join(dimon_workdir, "dimon.files.run.*"))) if tmp_files: list(map(os.remove, tmp_files)) else: raise NiftyConversionFailure("Tool Error: {} is not a supported conversion tool. Please select 'dcm2niix' or " "'dimon'".format(conversion_tool))
def convert_to_bids(bids_dir, oxygen_dir, mapping_guide=None, conversion_tool='dcm2niix', logger=None, nthreads=MAX_WORKERS, overwrite=False, filters=None, scanner_meta=False): if nthreads > 0: thread_semaphore = Semaphore(value=1) else: thread_semaphore = None # If BIDS directory exists, verify that it's either empty, or that overwrite is allowed. Otherwise create directory. if os.path.isdir(bids_dir): bids_files = glob(os.path.join(bids_dir, '*')) if bids_files: if not overwrite: raise DuplicateFile("The BIDS directory is not empty, and overwrite is set to False. Aborting...") else: rm_files = glob(os.path.join(bids_dir, '*')) list(map(shutil.rmtree, rm_files)) else: create_path(bids_dir) # Uncompress any compressed Oxygen DICOM files raw_files = os.path.join(oxygen_dir, '*') # Check if there are compressed oxygen files, and if so, uncompress them compressed_files = [d for d in glob(raw_files) if os.path.isfile(d)] log_output("Extracting compressed files...", logger=logger) if nthreads > 0: # Run in multiple threads futures = [] with ThreadPoolExecutor(max_workers=nthreads) as executor: for f in compressed_files: futures.append(executor.submit(extract_tgz, f, oxygen_dir, logger, thread_semaphore)) wait(futures) else: # Run sequentially for f in compressed_files: extract_tgz(f, oxygen_dir, logger=logger) log_output("Compressed file extractions complete.", logger=logger) # Now we can get a list of uncompressed directories uncompressed_files = [d for d in glob(raw_files) if os.path.isdir(d)] mapping = {} # If a BIDS mapping has not be provided to guide the conversion process, attempt to generate mapping from # available information. if not mapping_guide: subject_counter = 1 mapping = {} for unc_file in uncompressed_files: subject_id = unc_file.split("/")[-1].split("-")[-1] if subject_id not in mapping.keys(): mapping[subject_id] = { "bids_subject": "{:0>4d}".format(subject_counter), "sessions": {} } subject_counter += 1 session_dirs = [d for d in glob(os.path.join(unc_file, '*')) if os.path.isdir(d)] session_counter = 1 for ses_dir in session_dirs: session_id = ses_dir.split("/")[-1] mapping[subject_id]["sessions"][session_id] = { "bids_session": "{:0>4d}".format(session_counter), "oxygen_file": "{}-{}-DICOM.tgz".format(ses_dir.split("/")[-2], ses_dir.split("/")[-1]), "scans": {} } session_counter += 1 scan_dirs = [d for d in glob(os.path.join(ses_dir, '*')) if os.path.isdir(d) and "mr_" in d] scan_counter = 1 for sc_dir in scan_dirs: scan_id = sc_dir.split("/")[-1] # Filter this series directory if filter_series(sc_dir, filters=filters, logger=logger): continue mapping[subject_id]["sessions"][session_id]["scans"][scan_id] = { "series_dir": "/".join(sc_dir.split("/")[-3:]), "bids_fpath": "", "conversion_status": False, "meta": { "type": "func", "modality": "bold", "description": "task-fmri", "run": "{:0>4d}".format(scan_counter) } } if scanner_meta: meta = get_scanner_meta(sc_dir) mapping[subject_id]["sessions"][session_id]["scans"][scan_id]["scanner_meta"] = meta scan_counter += 1 # Mapping has been generated # Iterate through the mapping to create execution list to be split into threads exec_list = [] for subject in mapping.keys(): for session in mapping[subject]["sessions"].keys(): for scan in mapping[subject]["sessions"][session]["scans"].keys(): series_dir = os.path.join(oxygen_dir, mapping[subject]["sessions"][session]["scans"][scan]["series_dir"]) bids_subject = "sub-{}".format(mapping[subject]["bids_subject"]) bids_session = "ses-{}".format(mapping[subject]["sessions"][session]["bids_session"]) bids_desc = mapping[subject]["sessions"][session]["scans"][scan]["meta"]["description"] bids_type = mapping[subject]["sessions"][session]["scans"][scan]["meta"]["type"] bids_modality = mapping[subject]["sessions"][session]["scans"][scan]["meta"]["modality"] bids_run = "run-{}".format(mapping[subject]["sessions"][session]["scans"][scan]["meta"]["run"]) bids_fname = "{}_{}_{}_{}".format(bids_subject, bids_session, bids_desc, bids_run) if bids_modality: bids_fname += "_{}".format(bids_modality) bids_fname = "{}.nii.gz".format(bids_fname) bids_fpath = os.path.join(bids_dir, bids_subject, bids_session, bids_type, bids_fname) exec_list.append((series_dir, bids_fpath)) # Iterate through executable list and convert to nifti if nthreads > 0: # Run in multiple threads futures = [] with ThreadPoolExecutor(max_workers=nthreads) as executor: for dcm_dir, bids_fpath in exec_list: out_bdir = "/".join(bids_fpath.split("/")[:-1]) if not os.path.isdir(out_bdir): create_path(out_bdir) out_fname = bids_fpath.split("/")[-1].split(".")[0] futures.append(executor.submit(dcm_to_nifti, dcm_dir, out_fname, out_bdir, conversion_tool=conversion_tool, bids_meta=True, logger=logger, semaphore=thread_semaphore)) ## FOR TESTING # break ####### wait(futures) for future in futures: series_dir, bids_fpath, success = future.result() subject = series_dir.split("/")[0].split("-")[1] session = series_dir.split("/")[1] scan = series_dir.split("/")[2] if success: mapping[subject]["sessions"][session]["scans"][scan]["bids_fpath"] = bids_fpath mapping[subject]["sessions"][session]["scans"][scan]["conversion_status"] = True else: # Run sequentially for dcm_dir, bids_fpath in exec_list: out_bdir = "/".join(bids_fpath.split("/")[:-1]) if not os.path.isdir(out_bdir): create_path(out_bdir) out_fname = bids_fpath.split("/")[-1].split(".")[0] series_dir, bids_fpath, success = dcm_to_nifti(dcm_dir, out_fname, out_bdir, conversion_tool='dcm2niix', bids_meta=True, logger=logger) subject = series_dir.split("/")[0].split("-")[1] session = series_dir.split("/")[1] scan = series_dir.split("/")[2] if success: mapping[subject]["sessions"][session]["scans"][scan]["bids_fpath"] = bids_fpath mapping[subject]["sessions"][session]["scans"][scan]["conversion_status"] = True return mapping
level=logging.DEBUG, format= 'LOG ENTRY %(asctime)s - %(levelname)s \n%(message)s \nEND LOG ENTRY\n' ) # Print the settings settings_str = "Bids directory: {}\n".format(settings.bids_dir) + \ "Oxygen data: {}\n".format(settings.oxygen_dir) + \ "Mapping guide fpath: {}\n".format(settings.mapping_guide) + \ "Mapping directory: {}\n".format(settings.mapping_dir) + \ "Overwrite: {}\n".format(settings.overwrite) + \ "Filter(s) fpath: {}\n".format(settings.filters) + \ "Log directory: {}\n".format(settings.log_dir) + \ "Include scanner metadata: {}\n\n".format(settings.scanner_meta) log_output(settings_str, logger=logging) log_output("Beginning conversion to BIDS format of data in {} directory.\n" "Log located in {}.".format(settings.oxygen_dir, log_fpath), logger=logging) if settings.filters: with open(settings.filters, "r") as filter_file: filters = json.load(filter_file) else: filters = None mapping = convert_to_bids(settings.bids_dir, settings.oxygen_dir, mapping_guide=settings.mapping_guide, conversion_tool='dcm2niix',
def anat_average_wf(session_dir, out_dir, logger=None, semaphore=None): base_img = glob(os.path.join(session_dir, "*run-01_T1w.nii*"))[0] additional_imgs = [ img for img in glob(os.path.join(session_dir, "*.nii*")) if "run-01_T1w" not in img ] wf = [] for img in additional_imgs: wf.append(_register_anat(base_img, img, out_dir)) wf_success = True for cmd in wf: if not wf_success: break try: result = check_output(cmd, cwd=session_dir, stderr=STDOUT, universal_newlines=True) log_str = LOG_MESSAGES["success"].format(" ".join(cmd), 0) if result: log_str += LOG_MESSAGES["output"].format(result) log_output(log_str, logger=logger, semaphore=semaphore) except CalledProcessError as e: log_str = LOG_MESSAGES["error"].format(cmd[0], " ".join(cmd), e.returncode) if e.output: log_str += LOG_MESSAGES["output"].format(e.output) log_output(log_str, logger=logger, semaphore=semaphore) wf_success = False if wf_success: alphabet = list(ascii_lowercase) calc_cmd = ["3dcalc"] used_letters = [] volreg_imgs = glob(os.path.join(out_dir, "*_volreg.nii.gz")) volreg_imgs.insert(0, base_img) for img in volreg_imgs: curr_letter = alphabet.pop(0) curr_params = ["-{}".format(curr_letter), "{}".format(img)] calc_cmd.extend(curr_params) used_letters.append(curr_letter) expr_string = "({})/{}".format("+".join(used_letters), len(used_letters)) expr = [ "-expr", "{}".format(expr_string), ] calc_cmd.extend(expr) calc_name = "_".join(os.path.basename(base_img).split("_")[:2]) calc_cmd.extend([ "-prefix", "{}_anat_avg.nii.gz".format(os.path.join(out_dir, calc_name)) ]) try: result = check_output(calc_cmd, cwd=session_dir, stderr=STDOUT, universal_newlines=True) log_str = LOG_MESSAGES["success"].format(" ".join(calc_cmd), 0) if result: log_str += LOG_MESSAGES["output"].format(result) log_output(log_str, logger=logger, semaphore=semaphore) except CalledProcessError as e: log_str = LOG_MESSAGES["error"].format(calc_cmd[0], " ".join(calc_cmd), e.returncode) if e.output: log_str += LOG_MESSAGES["output"].format(e.output) log_output(log_str, logger=logger, semaphore=semaphore) return False return True return False
def seven_tesla_wf(in_file, out_dir, logger=None, semaphore=None): if ".nii" in in_file or ".nii.gz" in in_file: clean_fname = os.path.basename(in_file).split(".")[0] else: raise ValueError( "Files must be in Nifti (.nii) or compressed Nifti (.nii.gz) formats." ) cwd = os.path.join(out_dir, clean_fname) if not os.path.isdir(cwd): create_path(cwd) despike_fname = "{}_despike".format(os.path.join(cwd, clean_fname)) despike = [ "3dDespike", "-overwrite", "-prefix", "{}.nii.gz".format(despike_fname), "{}".format(in_file) ] tshift_fname = "{}_tshift".format(despike_fname) tshift = [ "3dTshift", "-overwrite", "-prefix", "{}.nii.gz".format(tshift_fname), "{}.nii.gz".format(despike_fname) ] prereg_fname = "{}_prereg_fwhm.out".format(tshift_fname) prereg_fwhm = [ "3dFWHMx", "-input", "{}.nii.gz".format(tshift_fname), "-detrend", "1", "-combine", ] oned_file = "{}.1D".format(tshift_fname) oned_matrix = "{}.aff12.1D".format(tshift_fname) max_disp = "{}_md.1D".format(tshift_fname) volreg_fname = "{}_volreg".format(tshift_fname) volreg = [ "3dvolreg", "-overwrite", "-twopass", "-cubic", "-base", "3", "-zpad", "4", "-1Dfile", "{}".format(oned_file), "-maxdisp1D", "{}".format(max_disp), "-1Dmatrix_save", "{}".format(oned_matrix), "-prefix", "{}.nii.gz".format(volreg_fname), "{}.nii.gz".format(tshift_fname) ] postreg_fname = "{}_postreg_fwhm.out".format(volreg_fname) postreg_fwhm = [ "3dFWHMx", "-input", "{}.nii.gz".format(volreg_fname), "-detrend", "1", "-combine", ] epi_mask_fname = "{}_mask".format(volreg_fname) epi_mask = [ "3dAutomask", "-dilate", "1", "-prefix", "{}.nii.gz".format(epi_mask_fname), "{}.nii.gz".format(volreg_fname) ] mean_fname = "{}_mean".format(volreg_fname) mean = [ "3dTstat", "-overwrite", "-mean", "-prefix", "{}.nii.gz".format(mean_fname), "{}.nii.gz".format(volreg_fname) ] detrend_fname = "{}_detrend".format(volreg_fname) detrend = [ "3dDetrend", "-overwrite", "-polort", "1", "-prefix", "{}.nii.gz".format(detrend_fname), "{}.nii.gz".format(volreg_fname) ] detrend_with_mean_fname = "{}_detrend_with_mean".format(volreg_fname) detrend_with_mean = [ "3dcalc", "-overwrite", "-a", "{}.nii.gz".format(mean_fname), "-b", "{}.nii.gz".format(detrend_fname), "-expr", "a+b", "-prefix", "{}.nii.gz".format(detrend_with_mean_fname) ] workflow = [ despike, tshift, prereg_fwhm, volreg, postreg_fwhm, epi_mask, mean, detrend, detrend_with_mean ] wf_success = True for cmd in workflow: if not wf_success: break try: result = check_output(cmd, cwd=cwd, stderr=STDOUT, universal_newlines=True) # The 3dFWHMx command outputs to stdout, capture this into a file if "3dFWHMx" in cmd: outfname = "" if "{}.nii.gz".format(tshift_fname) in cmd: outfname = prereg_fname elif "{}.nii.gz".format(volreg_fname) in cmd: outfname = postreg_fname if outfname: with open(outfname, "w") as outfile: outfile.write(result) log_str = LOG_MESSAGES["success"].format(" ".join(cmd), 0) if result: log_str += LOG_MESSAGES["output"].format(result) log_output(log_str, logger=logger, semaphore=semaphore) except CalledProcessError as e: log_str = LOG_MESSAGES["error"].format(cmd[0], " ".join(cmd), e.returncode) if e.output: log_str += LOG_MESSAGES["output"].format(e.output) log_output(log_str, logger=logger, semaphore=semaphore) wf_success = False if wf_success: # Compute the TSNR image and the mean TSNR value tsnr_fname = "{}_TSNR".format(os.path.join(cwd, clean_fname)) tsnr_infile = "{}.nii.gz".format(detrend_with_mean_fname) epi_mask = "{}.nii.gz".format(os.path.join(cwd, epi_mask_fname)) tsnr_val = calc_tsnr(tsnr_fname, tsnr_infile, epi_mask) # Calculate the FWHM of the dataset before and after registration (using linear detrending) prereg_fname = os.path.join(cwd, prereg_fname) postreg_fname = os.path.join(cwd, postreg_fname) pre_fwhm_x, pre_fwhm_y, pre_fwhm_z, pre_fwhm_combined = parse_fwhm( prereg_fname) post_fwhm_x, post_fwhm_y, post_fwhm_z, post_fwhm_combined = parse_fwhm( postreg_fname) # Calculate the framewise displacement fd_fname = os.path.join(cwd, "{}_fd.txt".format(clean_fname)) fd_res = fd_jenkinson(os.path.join(cwd, oned_matrix), out_file=fd_fname) # Parse fd results mean_fd, num_above_cutoff, perc_above_cutoff = extract_fd_results( fd_res, cutoff=0.2) statistics = OrderedDict({ 'tsnr_val': tsnr_val, 'prereg_fwhm_x': pre_fwhm_x, 'prereg_fwhm_y': pre_fwhm_y, 'prereg_fwhm_z': pre_fwhm_z, 'prereg_fwhm_combined': pre_fwhm_combined, 'postreg_fwhm_x': post_fwhm_x, 'postreg_fwhm_y': post_fwhm_y, 'postreg_fwhm_z': post_fwhm_z, 'postreg_fwhm_combined': post_fwhm_combined, 'mean_fd': mean_fd, 'num_fd_above_cutoff': num_above_cutoff, 'perc_fd_above_cutoff': perc_above_cutoff }) return clean_fname, statistics return clean_fname, None