def bb_basic_QC(subject, fileConfig): keysToPop = [] global logger logger = LT.initLogging(__file__, subject) idealConfigFile = os.environ[ 'BB_BIN_DIR'] + '/bb_data/ideal_config_sizes.json' with open(idealConfigFile, 'r') as f: idealConfig = json.load(f) os.chdir(subject) fd_fileName = "logs/file_descriptor.json" for fileN in fileConfig: if not isinstance(fileConfig[fileN], list): if bb_path.isImage(fileConfig[fileN]): fils = bb_path.removeImageExt(fileConfig[fileN]) if os.path.isfile(fils + "_orig.nii.gz"): fileList = [fils + "_orig.nii.gz"] else: fileList = [fileConfig[fileN]] else: fileList = [fileConfig[fileN]] else: fileList = fileConfig[fileN] for fileName in fileList: if os.path.isfile(fileName): if fileN in idealConfig: img = nib.load(fileName) dims = img.header['dim'][1:5] if not np.all(dims == idealConfig[fileN]['dims']): keysToPop.append(fileN) #make_unusable(fileName, idealConfig[fileName]['dep_dirs']) f = open('info_basic_QC.txt', 'a') f.write('Problem in file ' + fileName + '\n') f.close() for keyToPop in keysToPop: fileConfig.pop(keyToPop, None) fd = open(fd_fileName, "w") json.dump(fileConfig, fd, sort_keys=True, indent=4) fd.close() os.chdir("..") return fileConfig
def main(): parser = MyParser(description='BioBank Pipeline Manager') parser.add_argument("subjectFolder", help='Subject Folder') argsa = parser.parse_args() subject = argsa.subjectFolder subject = subject.strip() if subject[-1] == '/': subject = subject[0:len(subject) - 1] logger = LT.initLogging(__file__, subject) logger.info('Running file manager') fileConfig = bb_file_manager(subject) fileConfig = bb_basic_QC(subject, fileConfig) logger.info("File configuration after running file manager: " + str(fileConfig)) # runTopup ==> Having fieldmap if not ((('AP' in fileConfig) and (fileConfig['AP'] != '')) and (('PA' in fileConfig) and (fileConfig['PA'] != ''))): logger.error( "There is no proper DWI data. Thus, the B0 file cannot be generated in order to run topup" ) runTopup = False else: runTopup = True # Default value for job id. SGE does not wait for a job with this id. jobSTEP1 = "-1" jobSTEP2 = "-1" jobSTEP3 = "-1" jobSTEP1 = bb_pipeline_struct(subject, runTopup, fileConfig) if runTopup: jobSTEP2 = bb_pipeline_func(subject, jobSTEP1, fileConfig) jobSTEP3 = bb_pipeline_diff(subject, jobSTEP1, fileConfig) jobSTEP4 = bb_IDP(subject, jobSTEP1 + "," + jobSTEP2 + "," + jobSTEP3, fileConfig) LT.finishLogging(logger)
def main(): parser = MyParser(description='BioBank basic QC tool') parser.add_argument("subjectFolder", help='Subject Folder') argsa = parser.parse_args() subject = argsa.subjectFolder subject = subject.strip() if subject[-1] == '/': subject = subject[0:len(subject) - 1] logger = LT.initLogging(__file__, subject) logger.info('Running file manager') idealConfigFile = os.environ['BB_BIN_DIR'] + '/bb_data/ideal_config.json' with open(idealConfigFile, 'r') as f: fileConfig = json.load(f) fileConfig = bb_basic_QC(subject, fileConfig)
def main(): global logger parser = MyParser(description='BioBank Pipeline FILE Manager') parser.add_argument("subjectFolder", help='Subject Folder') argsa = parser.parse_args() subject = argsa.subjectFolder subject = subject.strip() if subject[-1] == '/': subject = subject[0:len(subject) - 1] logger = LT.initLogging(__file__, subject) logger.info('Running UK Biobank to BIDS converter') bb_UKBB_to_BIDS_converter(subject) LT.finishLogging(logger)
def main(): parser = MyParser(description='BioBank Pipeline FILE Manager') parser.add_argument("subjectFolder", help='Subject Folder') argsa = parser.parse_args() subject = argsa.subjectFolder subject = subject.strip() if subject[-1] == '/': subject = subject[0:len(subject) - 1] logger = LT.initLogging(__file__, subject) logger.info('Running file manager') fileConfig = bb_file_manager(subject) logger.info("File configuration after running file manager: \n" + str(fileConfig)) LT.finishLogging(logger)
def bb_file_manager(subject): global logger global idealConfig global fileConfig logger = LT.initLogging(__file__, subject) idealConfigFile = os.environ['BB_BIN_DIR'] + '/bb_data/ideal_config.json' with open(idealConfigFile, 'r') as f: idealConfig = json.load(f) directories = [ "delete", "unclassified", "raw", "T1", "T2_FLAIR", "SWI", "SWI/PHA_TE1", "SWI/PHA_TE2", "SWI/MAG_TE1", "SWI/MAG_TE2", "SWI/unclassified", "dMRI", "dMRI/raw", "fMRI", "fieldmap" ] patterns_actions = [[["*.*"], capitalize_and_clean], [["dicom", "DICOM"], move_to, "delete/"], [["T1*.nii.gz"], manage_struct, "T1"], [["T2*FLAIR*.nii.gz"], manage_struct, "T2"], [["*FMRI*RESTING*.nii.gz", "MB8*RESTING*.nii.gz"], manage_fMRI, "rfMRI"], [["*FMRI*TASK*.nii.gz", "MB8*TASK*.nii.gz"], manage_fMRI, "tfMRI"], [["SWI*nii.gz"], manage_SWI], [["DIFF_*", "MB3_*"], manage_DWI], [["SWI*.*"], move_to, "SWI/unclassified/"], [["*.*"], move_to, "unclassified/"]] os.chdir(subject) fd_fileName = "logs/file_descriptor.json" #Check if the subject has already been managed if (os.path.isfile(fd_fileName)): with open(fd_fileName, 'r') as f: fileConfig = json.load(f) else: for directory in directories: if not os.path.isdir(directory): os.mkdir(directory) listFiles = glob.glob("*.*") listFiles.sort() # Organize the files in sets for patterns_action in patterns_actions: patterns = patterns_action[0] action = patterns_action[1] args = patterns_action[2:] listFiles = [] for fileTy in patterns: listFiles.extend( [x for x in glob.glob(fileTy) if x not in listFiles]) logger.info("Performing action " + action.__name__ + " on files with patterns " + str(patterns)) action(listFiles, *args) # Create file descriptor fd = open(fd_fileName, "w") json.dump(fileConfig, fd, sort_keys=True, indent=4) fd.close() os.chdir("..") fileConfigFormatted = formatFileConfig() return fileConfig