def run_randomise_summary_basics(args: object) \ -> Tuple[List[CorrpMap], pd.DataFrame]: '''Run the randomise output summary (ran from commandline)''' # Get a list of corrp map paths based on the arg parse inputs corrp_map_locs = get_corrp_map_locs(args) # Get a list of corrpMap objects ** this is where corrpMaps are formed corrp_map_classes = get_corrp_map_classes(corrp_map_locs, args) # Print a matrix information of the first corrpMap in the corrp_map_classes # assuming all of corrpMaps in the list have the same matrix and contrast print_head('Matrix information') corrp_map_classes[0].print_matrix_info() # TODO: copy this to html summary if args.print_cov_info and \ hasattr(corrp_map_classes[0], 'covar_info_dict'): print_head('Covariate summary') for col, table in corrp_map_classes[0].covar_info_dict.items(): print(col) print_df(table) # Print information of each corrpMap df = print_and_return_corrpMaps_summary(corrp_map_classes, sig_only=args.sig_only) return corrp_map_classes, df
def run_atlas_query(args: object, corrp_map_classes: List[object]) -> None: '''Atlas query''' # if atlas query option is on if args.atlasquery: print_head('Atlas query of the significant cluster') print_head('(atlasquery from FSL)') for corrpMap in corrp_map_classes: if corrpMap.significant: corrpMap.get_atlas_query() print_df(corrpMap.df_query)
def print_matrix_info(self): print_head('Matrix summary') print(f'Contrast file : {self.contrast_file}') print(f'Matrix file : {self.matrix_file}') print() if hasattr(self, 'matrix_df'): print(f'total number of data point : {len(self.matrix_df)}') if hasattr(self, 'group_cols'): print(f'Group columns are : ' + ', '.join(self.group_cols)) if hasattr(self, 'matrix_info'): print_df(self.matrix_info)
def get_corrp_map_classes(corrp_map_locs: List, args: object) -> List[object]: '''Return corrpMaps from the corrp_map_locs and additional args info''' print_head('Summarizing information for files below') corrp_map_classes = [] # set caselist as empty string if the caselist is not given if args.caselist: caselist = args.caselist else: caselist = '' for corrp_map_loc in corrp_map_locs: print(f'\t{corrp_map_loc}') if args.merged_img_dir: corrpMap = CorrpMap(corrp_map_loc, threshold=args.threshold, contrast_file=args.contrast, matrix_file=args.matrix, merged_img_dir=args.merged_img_dir, template=args.template, caselist=caselist, group_labels=args.grouplabels) else: corrpMap = CorrpMap(corrp_map_loc, threshold=args.threshold, contrast_file=args.contrast, matrix_file=args.matrix, template=args.template, caselist=caselist, group_labels=args.grouplabels) corrp_map_classes.append(corrpMap) # if no corrpMap is defined try: corrpMap except NameError: sys.exit('Please check there is corrp file') return corrp_map_classes
def create_figure(args: object, corrp_map_classes: List[object]) -> None: if args.figure or args.tbss_fill: print_head('Saving figures') for corrpMap in corrp_map_classes: if corrpMap.significant is True: # tbss_fill if tbss_fill=True if args.tbss_fill: print_head(f'Estimating tbss_fill for {corrpMap.location}') # run tbss_fill corrpMap.tbss_fill_out = re.sub('.nii.gz', '_filled.nii.gz', str(corrpMap.location)) corrpMap.tbss_fill() corrpMap.get_figure( figure_same_slice=args.figure_same_slice) plt.close() if args.figure: corrpMap.get_figure( figure_same_slice=args.figure_same_slice) plt.close()
def print_and_return_corrpMaps_summary(corrp_map_classes: List[object], sig_only: bool = False) -> pd.DataFrame: """Print the information of each corrpMap in the corrp_map_classes list""" print_head('Result summary') # concatenate corrpMap.df df = pd.concat([x.df for x in corrp_map_classes], sort=False) df = df.sort_values('file name') if sig_only: print_head('Only showing significant maps') try: df_sig = df.groupby('Significance').get_group(True) print_df(df_sig.set_index(df_sig.columns[0])) except KeyError: print('There is no significant corrp map. Please return withtout ' 'the -so option') else: print_df(df.set_index(df.columns[0])) return df
def get_skeleton_summary(args: object, corrp_map_classes: List[object]) -> None: '''skeleton summary''' # skeleton summary parts if args.skeleton_summary: print_head('Running skeleton summary') summarized_merged_maps = [] for corrpMap in corrp_map_classes: if not hasattr(corrpMap, 'matrix_df'): print('Please provide correct design matrix. The file is ' 'required to read in the group infromation.') pass elif corrpMap.modality == 'unknown': print(f'The modality for {corrpMap.location} is unknown to ' 'the current version of randomise_summary. Please check ' 'the modality is in the list below.') print(' ' + ' '.join(corrpMap.modality_full_list)) elif corrpMap.merged_4d_file == 'missing': print(f'Merged 4d file for {corrpMap.location} is missing. ' f'Please check there are all_{corrpMap.modality}' '_skeleton.nii.gz in the same directory.') elif hasattr(corrpMap, 'merged_4d_file') and \ corrpMap.merged_4d_file not in summarized_merged_maps and \ corrpMap.merged_4d_file != 'missing' and args.tbss_all_loc: print_head("Summarizing merged 4d file:" f"{corrpMap.merged_4d_file}") warp_dir = str( Path(args.tbss_all_loc) / corrpMap.modality / 'warped') print(warp_dir) caselist = str(Path(args.tbss_all_loc) / 'log/caselist.txt') skeleton_summary(corrpMap, warp_dir=warp_dir, caselist=caselist) summarized_merged_maps.append(corrpMap.merged_4d_file) print() elif hasattr(corrpMap, 'merged_4d_file') and \ corrpMap.merged_4d_file not in summarized_merged_maps and \ corrpMap.merged_4d_file != 'missing': print_head("Summarizing merged 4d file:" f"{corrpMap.merged_4d_file}") skeleton_summary(corrpMap) summarized_merged_maps.append(corrpMap.merged_4d_file) print()
def __init__(self, location: Union[str, Path], threshold=0.95, contrast_file=False, matrix_file=False, **kwargs): #TODO add group labels #TODO add merged image location #TODO add randomise script location and contents self.location = Path(location) self.name = self.location.name self.threshold = threshold if contrast_file == False: # contrast not specified self.contrast_file = '' else: self.contrast_file = contrast_file if matrix_file == False: # matrix file not specified self.matrix_file = '' else: self.matrix_file = matrix_file # group labels if 'group_labels' in kwargs: self.group_labels = kwargs.get('group_labels') else: self.group_labels = False # if caselist is given # in `get_corrp_map_classes`, '' is given as the caselist # when there is no caelist is given to the randomise_summary.py if 'caselist' in kwargs: caselist = kwargs.get('caselist') if Path(caselist).is_file(): with open(caselist, 'r') as f: self.caselist = [x.strip() for x in f.readlines()] if not Path(self.contrast_file).is_file(): self.contrast_file = search_and_select_one('contrast_file', self.location.parent, ['*.con', 'contrast*'], depth=0) if not Path(self.matrix_file).is_file(): self.matrix_file = search_and_select_one('matrix_file', self.location.parent, ['*.mat', 'matrix*'], depth=0) # Modality # modality must be included in its name self.modality_full_list = [ 'FW', 'FA', 'FAt', 'FAc', 'FAk', 'iFW', 'MK', 'MKc', 'MKk', 'MD', 'MDt', 'RD', 'RDt', 'AD', 'ADt' ] try: self.modality = re.search( '(' + '|'.join(self.modality_full_list) + ')_', self.location.name).group(1) except AttributeError: print_head(f'No modality is detected in the file: {self.name}\n' 'Please add modality in the file name') self.modality = 'unknown' # Merged skeleton file # find merged skeleton file merged_skel_pattern = [ f'*all*_{self.modality}[_.]*skel*nii.gz', f'*{self.modality}*merged*.nii.gz', f'all_{self.modality}_*' ] if 'merged_img_dir' in kwargs: self.merged_4d_file = search_and_select_one( 'merged_skeleton', kwargs.get('merged_img_dir'), merged_skel_pattern, depth=0) else: self.merged_4d_file = search_and_select_one('merged_skeleton', self.location.parent, merged_skel_pattern, depth=0) # information from the file name self.test_kind = re.search(r'(\w)stat\d+.nii.gz', self.name).group(1) self.stat_num = re.search(r'(\d+).nii.gz', self.name).group(1) # Below variables are to estimate number of significant voxels in each # hemisphere # checking significance self.check_significance() # template settings: If not specified use ENIGMA if 'template' in kwargs: self.template = kwargs.get('template') else: self.template = 'enigma' self.template_settings() if self.significant: # if significant read in skeleton mask #TODO # enigma settings skel_img, self.skel_mask_data = get_nifti_img_data( self.skel_mask_loc) self.get_significant_info() self.get_significant_overlap() # uncache the skeleton data matrix skel_img.uncache() # summary in pandas DataFrame self.make_df() # if matrix or contrast file is given if self.matrix_file != 'missing': self.get_matrix_info() if self.contrast_file != 'missing': self.get_contrast_info() self.get_contrast_info_english() self.update_with_contrast()