def main(): archive = hcp3t_archive.Hcp3T_Archive() project_names = ['HCP_500', 'HCP_900'] for project_name in project_names: subject_ids = archive.available_subject_ids(project_name) for subject_id in subject_ids: subject_info = hcp3t_subject.Hcp3TSubjectInfo( project_name, subject_id) available_diffusion_scan_names = archive.available_diffusion_scan_names( subject_info) dir95_scan_LR_scan_name = subject_info.subject_id + '_3T_DWI_dir95_LR.nii.gz' dir95_scan_RL_scan_name = subject_info.subject_id + '_3T_DWI_dir95_RL.nii.gz' dir96_scan_LR_scan_name = subject_info.subject_id + '_3T_DWI_dir96_LR.nii.gz' dir96_scan_RL_scan_name = subject_info.subject_id + '_3T_DWI_dir96_RL.nii.gz' dir97_scan_LR_scan_name = subject_info.subject_id + '_3T_DWI_dir97_LR.nii.gz' dir97_scan_RL_scan_name = subject_info.subject_id + '_3T_DWI_dir97_RL.nii.gz' if ((dir96_scan_LR_scan_name in available_diffusion_scan_names) and (dir96_scan_RL_scan_name in available_diffusion_scan_names) and (dir97_scan_LR_scan_name in available_diffusion_scan_names) and (dir97_scan_RL_scan_name in available_diffusion_scan_names)): if ((dir95_scan_LR_scan_name in available_diffusion_scan_names) and (dir95_scan_RL_scan_name not in available_diffusion_scan_names)): _inform( "Subject: " + str(subject_info) + " has all dir96 and dir97 scans and only dir95_LR.") elif ((dir95_scan_LR_scan_name not in available_diffusion_scan_names) and (dir95_scan_RL_scan_name in available_diffusion_scan_names)): _inform( "Subject: " + str(subject_info) + " has all dir96 and dir97 scans and only dir95_RL.")
def main(): # create a parser object for getting the command line options parser = my_argparse.MyArgumentParser(description="Program to check Diffusion Preprocessing Output size") # mandatory arguments parser.add_argument('-p', '--project', dest='project', required=True, type=str) # optional arguments parser.add_argument('-s', '--subject', dest='subject', required=False, default='all', type=str) # parse the command line arguments args = parser.parse_args() # show parsed arguments log.debug("Project: " + args.project) log.debug("Subject: " + args.subject) # create archive archive = hcp3t_archive.Hcp3T_Archive() # Create a list of subjects to process subject_list = _build_subject_list(archive, args.project, args.subject) all_succeeded = True # Create a DiffusionOutputSizeChecker size_checker = DiffusionOutputSizeChecker() print("Subject\tExpected Volumes\tCheck Success") for subject in subject_list: subject_info = hcp3t_subject.Hcp3TSubjectInfo(args.project, subject) try: # check the diffusion preprocessing size for the specified subject (success, expected_size, msg) = size_checker.check_diffusion_preproc_size(archive, subject_info) print(subject_info.subject_id + "\t" + str(expected_size) + "\t" + str(success) + "\t" + msg) all_succeeded = all_succeeded and success except NoDiffusionPreprocResource as e: print(subject_info.subject_id + "\t" + "N/A" + "\t" + "N/A" + "\t" + "No Diff Preproc Resource") all_succeeded = False except FileNotFoundError as e: print(subject_info.subject_id + "\t" + "N/A" + "\t" + "N/A" + "\t" + "A necessary output file was not found") all_succeeded = False return all_succeeded
def _simple_interactive_demo(): archive = hcp3t_archive.Hcp3T_Archive() completion_checker = OneSubjectCompletionChecker() # 100307 subject_id_list = [ 100307, 100408, 101006, 101107, 101309, 101410, 101915, 102008, 102311, 102816, 103111, 103414, 103515, 103818, 104012, 104820, 105014, 105115, 105216, 105923, 106016, 106319, 106521, 107321, 107422, 108121, 108323, 108525, 108828, 109123, 109325, 110411, 111312, 901442, 904044, 907656, 910241, 912447, 917255, 922854, 930449, 932554, 937160, 951457, 957974, 958976, 959574, 965367, 965771, 978578, 979984, 983773, 984472, 987983, 991267, 992774, 994273 ] for subject_id in subject_id_list: hcp3t_subject_info = hcp3t_subject.Hcp3TSubjectInfo( 'HCP_500', str(subject_id)) log.info("Checking subject: " + hcp3t_subject_info.subject_id) resource_exists = completion_checker.does_processed_resource_exist( archive, hcp3t_subject_info) log.info("resource_exists: " + str(resource_exists)) processing_complete = completion_checker.is_processing_complete( archive, hcp3t_subject_info, False) log.info("processing_complete: " + str(processing_complete)) try: (success, expected_size, msg) = output_size_checker.check_diffusion_preproc_size( archive, hcp3t_subject_info) log.info("expected_size: " + str(expected_size) + "\tsuccess: " + str(success) + "\t" + msg) except output_size_checker.NoDiffusionPreprocResource as e: log.info("expected_size: " + "N/A" + "tsuccess: " + "N/A" + "\t" + "No Diff Preproc Resource") except FileNotFoundError as e: log.info("expected_size: " + "N/A" + "tsuccess: " + "N/A" + "\t" + "A necessary output file was not found")
def main(): archive = hcp3t_archive.Hcp3T_Archive() project_names = ['HCP_500', 'HCP_900'] for project_name in project_names: subject_ids = archive.available_subject_ids(project_name) for subject_id in subject_ids: subject_info = hcp3t_subject.Hcp3TSubjectInfo(project_name, subject_id) available_diffusion_scan_fullpaths = archive.available_diffusion_scan_fullpaths(subject_info) for diffusion_scan in available_diffusion_scan_fullpaths: #_inform("") volume_count = get_volume_count(diffusion_scan) #_inform("diffusion_scan: " + diffusion_scan + " volume_count: " + str(volume_count)) expected_volume_count = get_expected_volume_count(diffusion_scan) #_inform("diffusion_scan: " + diffusion_scan + " expected_volume_count: " + str(expected_volume_count)) if volume_count != expected_volume_count: _inform("diffusion_scan: " + os.path.basename(diffusion_scan) + " has expected volume count: " + str(expected_volume_count) + " and actual volume count: " + str(volume_count))
DATE_FORMAT = '%Y-%m-%d %H:%M:%S' if __name__ == '__main__': # get list of subjects to check subject_file_name = file_utils.get_subjects_file_name(__file__) logger.info("Retrieving subject list from: " + subject_file_name) subject_list = hcp3t_subject.read_subject_info_list(subject_file_name, separator='\t') # open complete and incomplete files for writing complete_file = open('complete.status', 'w') incomplete_file = open('incomplete.status', 'w') # create archive access object archive = hcp3t_archive.Hcp3T_Archive() # create on subject completion checker completion_checker = one_subject_completion_checker.OneSubjectCompletionChecker( ) # create and output the header line header_line = "\t".join([ "Project", "Subject ID", "Output Resource Exists", "Output Resource Date", "Files Exist" ]) print(header_line) for subject in subject_list: logger.debug("subject: " + str(subject))
def __init__(self): super().__init__(hcp3t_archive.Hcp3T_Archive())
def main(): # create a parser object for getting the command line arguments parser = my_argparse.MyArgumentParser() # mandatory arguments parser.add_argument('-p', '--project', dest='project', required=True, type=str) parser.add_argument('-s', '--subject', dest='subject', required=True, type=str) parser.add_argument('-d', '--study-dir', dest='output_study_dir', required=True, type=str) parser.add_argument('-t', '--structural-reference-project', dest='structural_reference_project', required=True, type=str) # optional arguments parser.add_argument('-c', '--copy', dest='copy', action='store_true', required=False, default=False) parser.add_argument('-l', '--log', dest='log', action='store_true', required=False, default=False) parser.add_argument('-r', '--remove-non-subdirs', dest='remove_non_subdirs', action='store_true', required=False, default=False) parser.add_argument('-j', '--remove-job-and-catalog-files', dest='remove_job_and_catalog_files', action='store_true', required=False, default=False) phase_choices = [ "FULL", "full", "DIFFUSION_PREPROC_VETTING", "diffusion_preproc_vetting", "MULTIRUNICAFIX_PREREQS", "multirunicafix_prereqs", "ICAFIX", "icafix", "MULTIRUNICAFIX", "multirunicafix", "POSTFIX", "postfix" ] parser.add_argument('-ph', '--phase', dest='phase', required=False, choices=phase_choices, default="full") # parse the command line arguments args = parser.parse_args() # show arguments log.info("Arguments:") log.info(" Project: " + args.project) log.info(" Structural Ref Project: " + args.structural_reference_project) log.info(" Subject: " + args.subject) log.info(" Output Study Dir: " + args.output_study_dir) log.info(" Copy: " + str(args.copy)) log.info(" Phase: " + args.phase) log.info(" Log: " + str(args.log)) log.info(" Remove Non-Subdirs: " + str(args.remove_non_subdirs)) subject_info = hcp7t_subject.Hcp7TSubjectInfo( project=args.project, structural_reference_project=args.structural_reference_project, subject_id=args.subject) archive = hcp7t_archive.Hcp7T_Archive() reference_archive = hcp3t_archive.Hcp3T_Archive() # create and configure CinabStyleDataRetriever data_retriever = CinabStyleDataRetriever(archive, reference_archive) data_retriever.copy = args.copy data_retriever.show_log = args.log # retrieve data based on phase requested args.phase = args.phase.upper() if args.phase == "FULL": data_retriever.get_full_data(subject_info, args.output_study_dir) elif args.phase == "DIFFUSION_PREPROC_VETTING": data_retriever.get_diffusion_preproc_vetting_data( subject_info, args.output_study_dir) elif args.phase == "MULTIRUNICAFIX_PREREQS": data_retriever.get_multirunicafix_prereqs(subject_info, args.output_study_dir) elif args.phase == "ICAFIX": data_retriever.get_data_through_ICAFIX(subject_info, args.output_study_dir) elif args.phase == "MULTIRUNICAFIX": data_retriever.get_data_through_multirun_ICAFIX( subject_info, args.output_study_dir) elif args.phase == "POSTFIX": data_retriever.get_data_through_PostFix(subject_info, args.output_study_dir) if args.remove_non_subdirs: # remove any non-subdirectory data at the output study directory level data_retriever.remove_non_subdirs(args.output_study_dir) data_retriever.remove_non_subdirs(args.output_study_dir + os.sep + subject_info.subject_id) if args.remove_job_and_catalog_files: # remove any PBS job files and XNAT catalog files data_retriever.remove_pbs_job_files(args.output_study_dir) data_retriever.remove_xnat_catalog_files(args.output_study_dir)
def main(): # create a parser object for getting the command line arguments parser = my_argparse.MyArgumentParser() # mandatory arguments parser.add_argument('-p', '--project', dest='project', required=True, type=str) parser.add_argument('-s', '--subject', dest='subject', required=True, type=str) parser.add_argument('-d', '--study-dir', dest='output_study_dir', required=True, type=str) # optional arguments parser.add_argument('-c', '--copy', dest='copy', action='store_true', required=False, default=False) phase_choices = [ "FULL", "full", "DIFFUSION_PREPROC_VETTING", "diffusion_preproc_vetting", "STRUCT_PREPROC", "struct_preproc", "DIFFUSION_PREPROC", "diffusion_preproc", "DIFFUSION_BEDPOSTX", "diffusion_bedpostx", "APPLY_HAND_RECLASSIFICATION_PREREQS", "apply_hand_reclassification_prereqs", "REAPPLYFIX_PREREQS", "reapplyfix_prereqs" ] default_phase_choice = phase_choices[0] parser.add_argument('-ph', '--phase', dest='phase', required=False, choices=phase_choices, default=default_phase_choice) # parse the command line arguments args = parser.parse_args() # show arguments module_logger.info("Arguments:") module_logger.info(" Project: " + args.project) module_logger.info(" Subject: " + args.subject) module_logger.info(" Output Study Dir: " + args.output_study_dir) module_logger.info(" Copy: " + str(args.copy)) module_logger.info(" Phase: " + args.phase) subject_info = hcp3t_subject.Hcp3TSubjectInfo(args.project, args.subject) archive = hcp3t_archive.Hcp3T_Archive() # create and configure CinabStyleDataRetriever data_retriever = CinabStyleDataRetriever(archive) data_retriever.copy = args.copy data_retriever.show_log = True # retrieve data based on phase requested if (args.phase.upper() == "FULL"): module_logger.debug("phase = FULL") data_retriever.get_full_data(subject_info, args.output_study_dir) elif (args.phase.upper() == "DIFFUSION_PREPROC_VETTING"): data_retriever.get_diffusion_preproc_vetting_data( subject_info, args.output_study_dir) elif (args.phase.upper() == "STRUCT_PREPROC"): data_retriever.get_data_through_STRUCT_PREPROC(subject_info, args.output_study_dir) elif (args.phase.upper() == "DIFFUSION_PREPROC"): data_retriever.get_data_through_DIFFUSION_PREPROC( subject_info, args.output_study_dir) elif (args.phase.upper() == "DIFFUSION_BEDPOSTX"): data_retriever.get_diffusion_bedpostx_data(subject_info, args.output_study_dir) elif (args.phase.upper() == "APPLY_HAND_RECLASSIFICATION_PREREQS"): data_retriever.get_apply_hand_reclassification_prereqs( subject_info, args.output_study_dir) elif (args.phase.upper() == "REAPPLYFIX_PREREQS"): data_retriever.get_reapplyfix_prereqs(subject_info, args.output_study_dir) data_retriever.remove_symlinks(args.output_study_dir)