def common_select(bids_base, out_base, workflow_name, template, registration_mask, functional_match, structural_match, subjects, sessions): """Common selection and variable processing function for SAMRI preprocessing workflows.""" if template: if template == "mouse": template = '/usr/share/mouse-brain-atlases/dsurqec_200micron.nii' registration_mask = '/usr/share/mouse-brain-atlases/dsurqec_200micron_mask.nii' elif template == "rat": from samri.fetch.templates import fetch_rat_waxholm template = fetch_rat_waxholm()['template'] registration_mask = fetch_rat_waxholm()['mask'] else: if template: template = path.abspath(path.expanduser(template)) if registration_mask: registration_mask = path.abspath( path.expanduser(registration_mask)) else: raise ValueError("No species or template path specified") return -1 bids_base = path.abspath(path.expanduser(bids_base)) if not out_base: out_base = path.join(bids_base, 'preprocessing') else: out_base = path.abspath(path.expanduser(out_base)) out_dir = path.join(out_base, workflow_name) data_selection = bids_data_selection(bids_base, structural_match, functional_match, subjects, sessions) workdir = out_dir + '_work' if not os.path.exists(workdir): os.makedirs(workdir) data_selection.to_csv(path.join(workdir, 'data_selection.csv')) # generate functional and structural scan types functional_scan_types = data_selection.loc[data_selection.type == 'func']['acq'].values structural_scan_types = data_selection.loc[data_selection.type == 'anat']['acq'].values # we start to define nipype workflow elements (nodes, connections, meta) subjects_sessions = data_selection[["subject", "session" ]].drop_duplicates().values.tolist() _func_ind = data_selection[data_selection["type"] == "func"] func_ind = _func_ind.index.tolist() _struct_ind = data_selection[data_selection["type"] == "anat"] struct_ind = _struct_ind.index.tolist() if True: print(data_selection) print(subjects_sessions) return bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind
def select_template(template, registration_mask): """Select the template and mask to be used, this supports special string values which select default SAMRI settings""" if template: if template == "mouse": template = '/usr/share/mouse-brain-atlases/dsurqec_200micron.nii' registration_mask = '/usr/share/mouse-brain-atlases/dsurqec_200micron_mask.nii' elif template == "rat": from samri.fetch.templates import fetch_rat_waxholm template = fetch_rat_waxholm()['template'] registration_mask = fetch_rat_waxholm()['mask'] else: if template: template = path.abspath(path.expanduser(template)) if registration_mask: registration_mask = path.abspath(path.expanduser(registration_mask)) else: raise ValueError("No species or template path specified") return -1 return template, registration_mask
def bruker( measurements_base, template, DEBUG=False, exclude={}, functional_match={}, structural_match={}, sessions=[], subjects=[], actual_size=True, functional_blur_xy=False, functional_registration_method="structural", highpass_sigma=225, lowpass_sigma=None, negative_contrast_agent=False, n_procs=N_PROCS, realign="time", registration_mask=False, tr=1, very_nasty_bruker_delay_hack=False, workflow_name="generic", keep_work=False, autorotate=False, strict=False, verbose=False, ): ''' realign: {"space","time","spacetime",""} Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution! ''' if template: if template == "mouse": template = fetch_mouse_DSURQE()['template'] registration_mask = fetch_mouse_DSURQE()['mask'] elif template == "rat": template = fetch_rat_waxholm()['template'] registration_mask = fetch_rat_waxholm()['mask'] else: pass else: raise ValueError("No species or template specified") return -1 measurements_base = path.abspath(path.expanduser(measurements_base)) # add subject and session filters if present if subjects: structural_scan_types['subject'] = subjects if sessions: structural_scan_types['session'] = sessions # define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection data_selection = pd.DataFrame([]) if structural_match: s_data_selection = get_data_selection( measurements_base, match=structural_match, exclude=exclude, ) structural_scan_types = s_data_selection['scan_type'].unique() data_selection = pd.concat([data_selection, s_data_selection]) if functional_match: f_data_selection = get_data_selection( measurements_base, match=functional_match, exclude=exclude, ) functional_scan_types = f_data_selection['scan_type'].unique() data_selection = pd.concat([data_selection, f_data_selection]) # we currently only support one structural scan type per session #if functional_registration_method in ("structural", "composite") and structural_scan_types: # structural_scan_types = [structural_scan_types[0]] # we start to define nipype workflow elements (nodes, connections, meta) subjects_sessions = data_selection[["subject", "session" ]].drop_duplicates().values.tolist() if debug: print('Data selection:') print(data_selection) print('Iterating over:') print(subjects_sessions) infosource = pe.Node(interface=util.IdentityInterface( fields=['subject_session'], mandatory_inputs=False), name="infosource") infosource.iterables = [('subject_session', subjects_sessions)] get_f_scan = pe.Node(name='get_f_scan', interface=util.Function( function=get_scan, input_names=inspect.getargspec(get_scan)[0], output_names=['scan_path', 'scan_type', 'trial'])) if not strict: get_f_scan.inputs.ignore_exception = True get_f_scan.inputs.data_selection = data_selection get_f_scan.inputs.measurements_base = measurements_base get_f_scan.iterables = ("scan_type", functional_scan_types) f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii") f_bru2nii.inputs.actual_size = actual_size dummy_scans = pe.Node( name='dummy_scans', interface=util.Function( function=force_dummy_scans, input_names=inspect.getargspec(force_dummy_scans)[0], output_names=['out_file'])) dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS bandpass = pe.Node(interface=fsl.maths.TemporalFilter(), name="bandpass") bandpass.inputs.highpass_sigma = highpass_sigma if lowpass_sigma: bandpass.inputs.lowpass_sigma = lowpass_sigma else: bandpass.inputs.lowpass_sigma = tr #bids_filename = pe.Node(name='bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename'])) bids_filename = pe.Node(name='bids_filename', interface=util.Function( function=bids_naming, input_names=inspect.getargspec(bids_naming)[0], output_names=['filename'])) bids_filename.inputs.metadata = data_selection #bids_stim_filename = pe.Node(name='bids_stim_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename'])) bids_stim_filename = pe.Node( name='bids_stim_filename', interface=util.Function(function=bids_naming, input_names=inspect.getargspec(bids_naming)[0], output_names=['filename'])) bids_stim_filename.inputs.suffix = "events" bids_stim_filename.inputs.extension = ".tsv" bids_stim_filename.inputs.metadata = data_selection events_file = pe.Node( name='events_file', interface=util.Function( function=write_events_file, input_names=inspect.getargspec(write_events_file)[0], output_names=['out_file'])) events_file.inputs.dummy_scans_ms = DUMMY_SCANS * tr * 1000 events_file.inputs.stim_protocol_dictionary = STIM_PROTOCOL_DICTIONARY events_file.inputs.very_nasty_bruker_delay_hack = very_nasty_bruker_delay_hack if not (strict or verbose): events_file.inputs.ignore_exception = True datasink = pe.Node(nio.DataSink(), name='datasink') datasink.inputs.base_directory = path.join(measurements_base, "preprocessing", workflow_name) datasink.inputs.parameterization = False if not (strict or verbose): datasink.inputs.ignore_exception = True workflow_connections = [ (infosource, get_f_scan, [('subject_session', 'selector')]), (infosource, bids_stim_filename, [('subject_session', 'subject_session')]), (get_f_scan, bids_stim_filename, [('scan_type', 'scan_type')]), (get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]), (f_bru2nii, dummy_scans, [('nii_file', 'in_file')]), (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]), (get_f_scan, events_file, [('trial', 'trial'), ('scan_path', 'scan_dir')]), (events_file, datasink, [('out_file', 'func.@events')]), (bids_stim_filename, events_file, [('filename', 'out_file')]), (infosource, datasink, [(('subject_session', ss_to_path), 'container') ]), (infosource, bids_filename, [('subject_session', 'subject_session')]), (get_f_scan, bids_filename, [('scan_type', 'scan_type')]), (bids_filename, bandpass, [('filename', 'out_file')]), (bandpass, datasink, [('out_file', 'func')]), ] if realign == "space": realigner = pe.Node(interface=spm.Realign(), name="realigner") realigner.inputs.register_to_mean = True workflow_connections.extend([ (dummy_scans, realigner, [('out_file', 'in_file')]), ]) elif realign == "spacetime": realigner = pe.Node(interface=nipy.SpaceTimeRealigner(), name="realigner") realigner.inputs.slice_times = "asc_alt_2" realigner.inputs.tr = tr realigner.inputs.slice_info = 3 #3 for coronal slices (2 for horizontal, 1 for sagittal) workflow_connections.extend([ (dummy_scans, realigner, [('out_file', 'in_file')]), ]) elif realign == "time": realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer") realigner.inputs.time_repetition = tr workflow_connections.extend([ (dummy_scans, realigner, [('out_file', 'in_file')]), ]) #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE: if actual_size: s_biascorrect, f_biascorrect = real_size_nodes() else: s_biascorrect, f_biascorrect = inflated_size_nodes() if structural_scan_types.any(): get_s_scan = pe.Node( name='get_s_scan', interface=util.Function( function=get_scan, input_names=inspect.getargspec(get_scan)[0], output_names=['scan_path', 'scan_type', 'trial'])) if not strict: get_s_scan.inputs.ignore_exception = True get_s_scan.inputs.data_selection = data_selection get_s_scan.inputs.measurements_base = measurements_base get_s_scan.iterables = ("scan_type", structural_scan_types) s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii") s_bru2nii.inputs.force_conversion = True s_bru2nii.inputs.actual_size = actual_size #s_bids_filename = pe.Node(name='s_bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename'])) s_bids_filename = pe.Node( name='s_bids_filename', interface=util.Function( function=bids_naming, input_names=inspect.getargspec(bids_naming)[0], output_names=['filename'])) s_bids_filename.inputs.metadata = data_selection if actual_size: s_register, s_warp, _, _ = DSURQEc_structural_registration( template, registration_mask) #TODO: incl. in func registration if autorotate: workflow_connections.extend([ (s_biascorrect, s_rotated, [('output_image', 'out_file')]), (s_rotated, s_register, [('out_file', 'moving_image')]), ]) else: workflow_connections.extend([ (s_biascorrect, s_register, [('output_image', 'moving_image')]), (s_register, s_warp, [('composite_transform', 'transforms') ]), (s_bru2nii, s_warp, [('nii_file', 'input_image')]), (s_warp, datasink, [('output_image', 'anat')]), ]) else: s_reg_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="s_reg_biascorrect") s_reg_biascorrect.inputs.dimension = 3 s_reg_biascorrect.inputs.bspline_fitting_distance = 95 s_reg_biascorrect.inputs.shrink_factor = 2 s_reg_biascorrect.inputs.n_iterations = [500, 500, 500, 500] s_reg_biascorrect.inputs.convergence_threshold = 1e-14 s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff") s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98" s_BET = pe.Node(interface=fsl.BET(), name="s_BET") s_BET.inputs.mask = True s_BET.inputs.frac = 0.3 s_BET.inputs.robust = True s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask") s_register, s_warp, f_warp = structural_registration(template) workflow_connections.extend([ (s_bru2nii, s_reg_biascorrect, [('nii_file', 'input_image')]), (s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]), (s_cutoff, s_BET, [('out_file', 'in_file')]), (s_biascorrect, s_mask, [('output_image', 'in_file')]), (s_BET, s_mask, [('mask_file', 'mask_file')]), ]) #TODO: incl. in func registration if autorotate: workflow_connections.extend([ (s_mask, s_rotated, [('out_file', 'out_file')]), (s_rotated, s_register, [('out_file', 'moving_image')]), ]) else: workflow_connections.extend([ (s_mask, s_register, [('out_file', 'moving_image')]), (s_register, s_warp, [('composite_transform', 'transforms') ]), (s_bru2nii, s_warp, [('nii_file', 'input_image')]), (s_warp, datasink, [('output_image', 'anat')]), ]) if autorotate: s_rotated = autorotate(template) workflow_connections.extend([ (infosource, get_s_scan, [('subject_session', 'selector')]), (infosource, s_bids_filename, [('subject_session', 'subject_session')]), (get_s_scan, s_bru2nii, [('scan_path', 'input_dir')]), (get_s_scan, s_bids_filename, [('scan_type', 'scan_type')]), (s_bids_filename, s_warp, [('filename', 'output_image')]), (s_bru2nii, s_biascorrect, [('nii_file', 'input_image')]), ]) if functional_registration_method == "structural": if not structural_scan_types: raise ValueError( 'The option `registration="structural"` requires there to be a structural scan type.' ) workflow_connections.extend([ (s_register, f_warp, [('composite_transform', 'transforms')]), ]) if realign == "space": workflow_connections.extend([ (realigner, f_warp, [('realigned_files', 'input_image')]), ]) elif realign == "spacetime": workflow_connections.extend([ (realigner, f_warp, [('out_file', 'input_image')]), ]) elif realign == "time": workflow_connections.extend([ (realigner, f_warp, [('slice_time_corrected_file', 'input_image')]), ]) else: workflow_connections.extend([ (dummy_scans, f_warp, [('out_file', 'input_image')]), ]) if functional_registration_method == "composite": if not structural_scan_types.any(): raise ValueError( 'The option `registration="composite"` requires there to be a structural scan type.' ) _, _, f_register, f_warp = DSURQEc_structural_registration( template, registration_mask) temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean") merge = pe.Node(util.Merge(2), name='merge') workflow_connections.extend([ (temporal_mean, f_biascorrect, [('out_file', 'input_image')]), (f_biascorrect, f_register, [('output_image', 'moving_image')]), (s_biascorrect, f_register, [('output_image', 'fixed_image')]), (f_register, merge, [('composite_transform', 'in1')]), (s_register, merge, [('composite_transform', 'in2')]), (merge, f_warp, [('out', 'transforms')]), ]) if realign == "space": workflow_connections.extend([ (realigner, temporal_mean, [('realigned_files', 'in_file')]), (realigner, f_warp, [('realigned_files', 'input_image')]), ]) elif realign == "spacetime": workflow_connections.extend([ (realigner, temporal_mean, [('out_file', 'in_file')]), (realigner, f_warp, [('out_file', 'input_image')]), ]) elif realign == "time": workflow_connections.extend([ (realigner, temporal_mean, [('slice_time_corrected_file', 'in_file')]), (realigner, f_warp, [('slice_time_corrected_file', 'input_image')]), ]) else: workflow_connections.extend([ (dummy_scans, temporal_mean, [('out_file', 'in_file')]), (dummy_scans, f_warp, [('out_file', 'input_image')]), ]) elif functional_registration_method == "functional": f_register, f_warp = functional_registration(template) temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean") #f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff") #f_cutoff.inputs.op_string = "-thrP 30" #f_BET = pe.Node(interface=fsl.BET(), name="f_BET") #f_BET.inputs.mask = True #f_BET.inputs.frac = 0.5 workflow_connections.extend([ (temporal_mean, f_biascorrect, [('out_file', 'input_image')]), #(f_biascorrect, f_cutoff, [('output_image', 'in_file')]), #(f_cutoff, f_BET, [('out_file', 'in_file')]), #(f_BET, f_register, [('out_file', 'moving_image')]), (f_biascorrect, f_register, [('output_image', 'moving_image')]), (f_register, f_warp, [('composite_transform', 'transforms')]), ]) if realign == "space": workflow_connections.extend([ (realigner, temporal_mean, [('realigned_files', 'in_file')]), (realigner, f_warp, [('realigned_files', 'input_image')]), ]) elif realign == "spacetime": workflow_connections.extend([ (realigner, temporal_mean, [('out_file', 'in_file')]), (realigner, f_warp, [('out_file', 'input_image')]), ]) elif realign == "time": workflow_connections.extend([ (realigner, temporal_mean, [('slice_time_corrected_file', 'in_file')]), (realigner, f_warp, [('slice_time_corrected_file', 'input_image')]), ]) else: workflow_connections.extend([ (dummy_scans, temporal_mean, [('out_file', 'in_file')]), (dummy_scans, f_warp, [('out_file', 'input_image')]), ]) invert = pe.Node(interface=fsl.ImageMaths(), name="invert") if functional_blur_xy and negative_contrast_agent: blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur") blur.inputs.fwhmxy = functional_blur_xy workflow_connections.extend([ (f_warp, blur, [('output_image', 'in_file')]), (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string') ]), (blur, invert, [('out_file', 'in_file')]), (invert, bandpass, [('out_file', 'in_file')]), ]) elif functional_blur_xy: blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur") blur.inputs.fwhmxy = functional_blur_xy workflow_connections.extend([ (f_warp, blur, [('output_image', 'in_file')]), (blur, bandpass, [('out_file', 'in_file')]), ]) elif negative_contrast_agent: blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur") blur.inputs.fwhmxy = functional_blur_xy workflow_connections.extend([ (f_warp, invert, [(('output_image', fslmaths_invert_values), 'op_string')]), (f_warp, invert, [('output_image', 'in_file')]), (invert, bandpass, [('out_file', 'in_file')]), ]) else: workflow_connections.extend([ (f_warp, bandpass, [('output_image', 'in_file')]), ]) workflow_config = { 'execution': { 'crashdump_dir': path.join(measurements_base, 'preprocessing/crashdump'), } } if debug: workflow_config['logging'] = { 'workflow_level': 'DEBUG', 'utils_level': 'DEBUG', 'interface_level': 'DEBUG', 'filemanip_level': 'DEBUG', 'log_to_file': 'true', } workdir_name = workflow_name + "_work" workflow = pe.Workflow(name=workdir_name) workflow.connect(workflow_connections) workflow.base_dir = path.join(measurements_base, "preprocessing") workflow.config = workflow_config workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name, "graph.dot"), graph2use="hierarchical", format="png") workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs}) if not keep_work: shutil.rmtree(path.join(workflow.base_dir, workdir_name))
def common_select(bids_base, out_base, workflow_name, template, registration_mask, functional_match, structural_match, subjects, sessions, exclude): """Common selection and variable processing function for SAMRI preprocessing workflows. Parameters ---------- bids_base : string Path to the BIDS root directory. out_base : string Output base directory - inside which a directory named `workflow_name` (as well as associated directories) will be created. workflow_name : string Top level name for the output directory. template : string Path to the template to register the data to. registration_mask : string Mask to use for the registration process. functional_match : dict Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered. structural_match : dict Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered. subjects : list A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered. sessions : list A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered. exclude : dict A dictionary with any combination of "sessions", "subjects", "tasks" as keys and corresponding identifiers as values. If this is specified, matching entries will be excluded in the analysis. Returns ------- bids_base : string Path to the BIDS root directory. out_base : string Output base directory - inside which a directory named `workflow_name` (as well as associated directories) is located. out_dir : string Directory where output is located (gives path to workflow_name). template : string Full path to the template. registration_mask : string Full path to the registration mask. data_selection : df A Pandas dataframe of data from bids_base filtered according to structural_match, functional_match, subjects, and sessions. functional_scan_types : np array Functional scan types. structural_scan_types : np array Structural scan types. subjects_sessions : df Pandas dataframe giving names of subjects and sessions selected from bids_base. func_ind: list List of all functional scan entries. struct_ind: list List of all structural scan entries. """ if template: if template == "mouse": template = '/usr/share/mouse-brain-atlases/dsurqec_200micron.nii' registration_mask = '/usr/share/mouse-brain-atlases/dsurqec_200micron_mask.nii' elif template == "rat": from samri.fetch.templates import fetch_rat_waxholm template = fetch_rat_waxholm()['template'] registration_mask = fetch_rat_waxholm()['mask'] else: if template: template = path.abspath(path.expanduser(template)) if registration_mask: registration_mask = path.abspath( path.expanduser(registration_mask)) else: raise ValueError("No species or template path specified") return -1 bids_base = path.abspath(path.expanduser(bids_base)) if not out_base: out_base = path.join(bids_base, 'preprocessing') else: out_base = path.abspath(path.expanduser(out_base)) out_dir = path.join(out_base, workflow_name) data_selection = bids_data_selection(bids_base, structural_match, functional_match, subjects, sessions) workdir = out_dir + '_work' if not os.path.exists(workdir): os.makedirs(workdir) data_selection.to_csv(path.join(workdir, 'data_selection.csv')) # generate functional and structural scan types # PyBIDS 0.6.5 and 0.10.2 compatibility try: functional_scan_types = data_selection.loc[data_selection['type'] == 'func']['acq'].values structural_scan_types = data_selection.loc[data_selection['type'] == 'anat']['acq'].values except KeyError: functional_scan_types = data_selection.loc[ data_selection['datatype'] == 'func']['acquisition'].values structural_scan_types = data_selection.loc[ data_selection['datatype'] == 'anat']['acquisition'].values # we start to define nipype workflow elements (nodes, connections, meta) subjects_sessions = data_selection[["subject", "session" ]].drop_duplicates().values.tolist() if exclude: for key in exclude: data_selection = data_selection[~data_selection[key]. isin(exclude[key])] # PyBIDS 0.6.5 and 0.10.2 compatibility try: _func_ind = data_selection[data_selection["type"] == "func"] _struct_ind = data_selection[data_selection["type"] == "anat"] except KeyError: _func_ind = data_selection[data_selection["datatype"] == "func"] _struct_ind = data_selection[data_selection["datatype"] == "anat"] func_ind = _func_ind.index.tolist() struct_ind = _struct_ind.index.tolist() if True: print(data_selection) print(subjects_sessions) return bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind
# This example illustrates how to generate a functional connectivity matrix and its respective plot import matplotlib matplotlib.use('Agg') import os from os import path from samri.analysis import fc from samri.plotting import connectivity from samri.fetch.templates import fetch_rat_waxholm # fetch data templates and data data_dir = path.join(path.dirname(path.realpath(__file__)),"../tests/data/") results_dir = path.abspath(path.expanduser('~/.samri_files/results/fc/')) # check if results dir exists, otherwise create if not os.path.exists(path.abspath(path.expanduser(results_dir))): os.makedirs(path.abspath(path.expanduser(results_dir))) template = fetch_rat_waxholm() trial = 'MhBu' ts = path.abspath(path.expanduser('~/ni_data/data/preprocessing/composite/sub-22/ses-noFUSr0/func/sub-22_ses-noFUSr0_acq-seEPI_trial-'+trial+'.nii.gz')) figsize=(50,50) correlation_matrix = fc.correlation_matrix(ts, labels_img = template['atlas'], mask=template['mask'], save_as = results_dir + '/correlation_matrix_'+trial+'.csv') connectivity.plot_connectivity_matrix(correlation_matrix, figsize = figsize, labels=template['labels'], save_as = results_dir + '/correlation_matrix_'+trial+'.png') # also plot dendogram fc.dendogram(correlation_matrix, figsize = figsize, save_as = results_dir + '/dendogram_'+trial+'.png')