class Sorting: def __init__(self): self.tableName = 'Sorting' self.DBClient = DbUtils() self.sqlBuilder = SQLBuilder() def getObjectFromTuple(self, tuple): valuesDict = dict(record_id=tuple[0], study=tuple[1], rid=tuple[2], scan_type=tuple[3], scan_date=tuple[4].strftime("%Y-%m-%d"), scan_time=str(tuple[5]), s_identifier=tuple[6], i_identifier=tuple[7], file_type=tuple[8], download_folder=tuple[9], raw_folder=tuple[10], moved=tuple[11]) return SortingObject(valuesDict) def insertToTable(self, objList): for obj in objList: self.DBClient.executeNoResult( self.sqlBuilder.getSQL_AddNewEntryToSortingTable(obj.sqlInsert())) def getUnmovedFilesPerStudy(self, study): unmovedList = self.DBClient.executeAllResults( self.sqlBuilder.getSQL_getUnmovedFilesFromSortingTable(study, tuple(sc.ProcessingModalityAndPipelineTypePerStudy[study].keys()))) return [self.getObjectFromTuple(t) for t in unmovedList] def setMovedTrue(self, sortingObj): sortingObj.moved = 1 self.saveObj(sortingObj) def saveObj(self, sortingObj): self.DBClient.executeNoResult(self.sqlBuilder.getSQL_saveObjSortingTable(sortingObj))
class Processing: def __init__(self): self.DBClient = DbUtils() self.sqlBuilder = SQLBuilder() def getObjectFromTuple(self, tuple): valuesDict = dict(record_id=tuple[0], study=tuple[1], rid=tuple[2], modality=tuple[3], scan_date=tuple[4].strftime("%Y-%m-%d"), scan_time=str(tuple[5]), s_identifier=tuple[6], i_identifier=tuple[7], root_folder=tuple[8], converted_folder=tuple[9], version=tuple[10], processed=tuple[12]) return ProcessingObject(valuesDict) def insertToTable(self, objList): for obj in objList: self.DBClient.executeNoResult( self.sqlBuilder.getSQL_AddNewEntryToProcessingTable(obj.sqlInsert())) def insertFromConvertionObj(self, convertionObj): convertionValues = convertionObj.getValuesDict() convertionValues['modality'] = sc.ProcessingModalityAndPipelineTypePerStudy[convertionObj.study][convertionObj.scan_type] convertionValues['root_folder'] = '/'.join(convertionObj.converted_folder.split('/')[0:-2]) # Keeping only the three last elements self.insertToTable([ProcessingObject(convertionValues)]) def getToProcessListPerStudy(self, study): toProcessList = self.DBClient.executeAllResults( self.sqlBuilder.getSQL_getToBeProcessedFromProcessingTable(study)) return [self.getObjectFromTuple(t) for t in toProcessList]
class Conversion: def __init__(self): self.tableName = 'Conversion' self.DBClient = DbUtils() self.sqlBuilder = SQLBuilder() def getObjectFromTuple(self, tuple): valuesDict = dict(record_id=tuple[0], study=tuple[1], rid=tuple[2], scan_type=tuple[3], scan_date=tuple[4].strftime("%Y-%m-%d"), scan_time=str(tuple[5]), s_identifier=tuple[6], i_identifier=tuple[7], file_type=tuple[8], raw_folder=tuple[9], converted_folder=tuple[10], version=tuple[11], converted=tuple[12]) return ConversionObject(valuesDict) def insertToTable(self, objList): for obj in objList: self.DBClient.executeNoResult( self.sqlBuilder.getSQL_AddNewEntryToConversionTable(obj.sqlInsert())) def get_version(self, sortingObj, versionDict): if sortingObj.study == 'ADNI': dl_path = sortingObj.download_folder if 'Uniform' in dl_path: return 'V2' else: return versionDict[sc.ProcessingModalityAndPipelineTypePerStudy[sortingObj.study][sortingObj.scan_type]] if sc.ProcessingModalityAndPipelineTypePerStudy[sortingObj.study][sortingObj.scan_type] in versionDict else 'V1' else: return versionDict[sc.ProcessingModalityAndPipelineTypePerStudy[sortingObj.study][sortingObj.scan_type]] if sc.ProcessingModalityAndPipelineTypePerStudy[sortingObj.study][sortingObj.scan_type] in versionDict else 'V1' def insertFromSortingObj(self, sortingObj, versionDict): sortingValues = sortingObj.getValuesDict() version = self.get_version(sortingObj, versionDict) sortingValues['converted_folder'] = '{0}/{1}/{2}/{3}/{4}_{5}_{6}/{7}/converted/final'.format(sc.studyDatabaseRootDict[sortingObj.study], sortingObj.study, sortingObj.scan_type, sortingObj.rid, sortingObj.scan_date, sortingObj.s_identifier, sortingObj.i_identifier, version) sortingValues['version'] = version sortingValues['converted'] = 0 self.insertToTable([ConversionObject(sortingValues)]) def gettoBeConvertedPerStudy(self, study): toConvertList = self.DBClient.executeAllResults( self.sqlBuilder.getSQL_getToBeConvertedFileFromConversionTable(study)) return [self.getObjectFromTuple(t) for t in toConvertList] def setConvertedTrue(self, convertionObj): convertionObj.converted = 1 self.saveObj(convertionObj) def setConvertedFailed(self, convertionObj): convertionObj.skip = 1 self.saveObj(convertionObj) def saveObj(self, convertionObj): self.DBClient.executeNoResult(self.sqlBuilder.getSQL_saveObjConversionTable(convertionObj)) def getConvertedListPerStudy(self, study): convertedList = self.DBClient.executeAllResults(self.sqlBuilder.getSQL_getAllConvertedFromConvertionTable(study)) return [self.getObjectFromTuple(t) for t in convertedList]
class CoregHandler: def __init__(self): self.DBClient = DbUtils() def requestCoreg(self, study, rid, type, pet_folder, t1_folder, petScanType, t1ScanType, xfm_name): regsql = "INSERT IGNORE INTO Coregistration VALUES (Null, '{0}', '{1}', '{2}', '{3}', '{4}', '{5}', '{6}', '{7}', 0, 0, 0, Null)".format( study.upper(), rid, type.upper(), pet_folder, t1_folder, petScanType, t1ScanType, xfm_name) self.DBClient.executeNoResult(regsql)
class CoregHandler: def __init__(self): self.DBClient = DbUtils() def requestCoreg(self, study, rid, type, pet_folder, t1_folder, petScanType, t1ScanType, xfm_name): regsql = "INSERT IGNORE INTO Coregistration VALUES (Null, '{0}', '{1}', '{2}', '{3}', '{4}', '{5}', '{6}', '{7}', 0, 0, 0, Null)".format(study.upper(), rid, type.upper(), pet_folder, t1_folder, petScanType, t1ScanType, xfm_name) self.DBClient.executeNoResult(regsql)
class Processing: def __init__(self): self.DBClient = DbUtils() self.sqlBuilder = SQLBuilder() def getObjectFromTuple(self, tuple): valuesDict = dict(record_id=tuple[0], study=tuple[1], rid=tuple[2], modality=tuple[3], scan_date=tuple[4].strftime("%Y-%m-%d"), scan_time=str(tuple[5]), s_identifier=tuple[6], i_identifier=tuple[7], root_folder=tuple[8], converted_folder=tuple[9], version=tuple[10], processed=tuple[12]) return ProcessingObject(valuesDict) def insertToTable(self, objList): for obj in objList: self.DBClient.executeNoResult( self.sqlBuilder.getSQL_AddNewEntryToProcessingTable( obj.sqlInsert())) def insertFromConvertionObj(self, convertionObj): convertionValues = convertionObj.getValuesDict() convertionValues[ 'modality'] = sc.ProcessingModalityAndPipelineTypePerStudy[ convertionObj.study][convertionObj.scan_type] convertionValues['root_folder'] = '/'.join( convertionObj.converted_folder.split('/') [0:-2]) # Keeping only the three last elements self.insertToTable([ProcessingObject(convertionValues)]) def getToProcessListPerStudy(self, study): toProcessList = self.DBClient.executeAllResults( self.sqlBuilder.getSQL_getToBeProcessedFromProcessingTable(study)) return [self.getObjectFromTuple(t) for t in toProcessList]
class QCHandler: def __init__(self): self.DBClient = DbUtils() def requestQC(self, study, modal_table, modal_tableId, qcField, qctype, qcFolder): qcsql = "INSERT IGNORE INTO QC VALUES (Null, '{0}', '{1}', '{2}', '{3}', '{4}','{5}' , 0, 0, 0, 0, Null)".format(study.upper(), modal_table, modal_tableId, qcField, qctype, qcFolder) self.DBClient.executeNoResult(qcsql) def checkQCJobs(self, study, modality): sql = "SELECT * FROM {0}_{1}_Pipeline WHERE QC = 1 AND FINISHED = 1".format(study, modality) res = self.DBClient.executeAllResults(sql) if len(res) < 1: return 0 else: for result in res: proc_id = result[1] setProcessedSQL = "UPDATE Processing SET PROCESSED = 1, QCPASSED = 1 WHERE RECORD_ID = {0}".format(proc_id) self.DBClient.executeNoResult(setProcessedSQL)
class QCHandler: def __init__(self): self.DBClient = DbUtils() def requestQC(self, study, modal_table, modal_tableId, qcField, qctype, qcFolder): qcsql = "INSERT IGNORE INTO QC VALUES (Null, '{0}', '{1}', '{2}', '{3}', '{4}','{5}' , 0, 0, 0, 0, Null)".format( study.upper(), modal_table, modal_tableId, qcField, qctype, qcFolder) self.DBClient.executeNoResult(qcsql) def checkQCJobs(self, study, modality): sql = "SELECT * FROM {0}_{1}_Pipeline WHERE QC = 1 AND FINISHED = 1".format( study, modality) res = self.DBClient.executeAllResults(sql) if len(res) < 1: return 0 else: for result in res: proc_id = result[1] setProcessedSQL = "UPDATE Processing SET PROCESSED = 1, QCPASSED = 1 WHERE RECORD_ID = {0}".format( proc_id) self.DBClient.executeNoResult(setProcessedSQL)
class QSubJobStatusReporter: def __init__(self): self.DBClient = DbUtils() self.QCHandler = QCHandler() def setStatus(self, job, status): if job.jobType == 'beast': nestedJob = job.job table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality) table_id = nestedJob.table_id if status == 'Success': setSql = 'UPDATE {0} SET BEAST_MASK = 1 WHERE RECORD_ID = {1}'.format(table, table_id) elif status == 'Fail': setSql = 'UPDATE {0} SET BEAST_MASK = -1, BEAST_SKIP = 1 WHERE RECORD_ID = {1}'.format(table, table_id) self.DBClient.executeNoResult(setSql) if status == 'Fail': PipelineLogger.log('manager', 'error','QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}'.format(job.jobType, nestedJob.processing_rid, nestedJob.table_id)) if job.jobType == 'av45': nestedJob = job.job table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality) table_id = nestedJob.table_id if status == 'Success': setSql = "UPDATE {0} SET FINISHED = 1, PROC_Failed = Null WHERE RECORD_ID = {1}".format(table, table_id) self.requestQC(nestedJob, 'av45') elif status == 'Fail': setSql = "UPDATE {0} SET PROC_Failed = 'Failed' , SKIP = 1 WHERE RECORD_ID = {1}".format(table, table_id) self.DBClient.executeNoResult(setSql) if status == 'Fail': PipelineLogger.log('manager', 'error','QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}'.format(job.jobType, nestedJob.processing_rid, nestedJob.table_id)) if job.jobType == 'fdg': nestedJob = job.job table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality) table_id = nestedJob.table_id if status == 'Success': setSql = "UPDATE {0} SET FINISHED = 1, PROC_Failed = Null WHERE RECORD_ID = {1}".format(table, table_id) self.requestQC(nestedJob, 'fdg') elif status == 'Fail': setSql = "UPDATE {0} SET PROC_Failed = 'Failed' , SKIP = 1 WHERE RECORD_ID = {1}".format(table, table_id) self.DBClient.executeNoResult(setSql) if status == 'Fail': PipelineLogger.log('manager', 'error','QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}'.format(job.jobType, nestedJob.processing_rid, nestedJob.table_id)) def requestQC(self, processingItemObj, qctype): qcFieldDict = dict(civet='QC', beast='BEAST_QC', av45='QC', fdg='QC') qcFolderDict = { 'civet' : '{0}/civet'.format(processingItemObj.root_folder), 'beast' : '{0}/beast'.format(processingItemObj.root_folder), 'av45' : '{0}/processed'.format(processingItemObj.root_folder), 'fdg' : '{0}/processed'.format(processingItemObj.root_folder)} self.QCHandler.requestQC(processingItemObj.study, '{0}_{1}_Pipeline'.format(processingItemObj.study, processingItemObj.modality), processingItemObj.table_id, qcFieldDict[qctype], qctype, qcFolderDict[qctype])
import shutil DBClient = DbUtils() IID_list = ['I546612', 'I620366', 'I535767', 'I560359', 'I581738'] for iid in IID_list: getDataFolderSQL = "SELECT RAW_FOLDER FROM Sorting WHERE I_IDENTIFIER = '{0}'".format(iid) res = DBClient.executeAllResults(getDataFolderSQL) if len(res) == 0: pass else: rawFolder = res[0][0] dataFolder = os.path.abspath(os.path.join(rawFolder, '../')) shutil.rmtree(dataFolder) print(dataFolder) delsql = "DELETE FROM Sorting WHERE I_IDENTIFIER = '{0}'".format(iid) DBClient.executeNoResult(delsql) delsql = "DELETE FROM Conversion WHERE I_IDENTIFIER = '{0}'".format(iid) DBClient.executeNoResult(delsql) getProSQL = "SELECT RECORD_ID, STUDY, MODALITY FROM Processing WHERE I_IDENTIFIER = '{0}'".format(iid) res2 = DBClient.executeAllResults(getProSQL) if len(res2) == 0: pass else: P_ID = res2[0][0] study = res2[0][1] mod = res2[0][2] print(P_ID) getPPSQL = "SELECT RECORD_ID FROM {1}_{2}_Pipeline WHERE PROCESSING_TID = '{0}'".format(P_ID, study, mod) res3 = DBClient.executeAllResults(getPPSQL)
res = DBClient.executeAllResults(sql) if len(res) > 0: if len(DBClient.executeAllResults("SELECT * FROM Auth WHERE USER = '******'".format(args.createUser))) > 0: print('User already exists. ') sys.exit(0) newpass1 = getpass.getpass('Enter password for {0} : '.format(args.createUser)) newpass2 = getpass.getpass('Re-enter password : '******'utf-8')) passHex = hash_object.hexdigest() sqlInsert = "INSERT INTO Auth VALUES (Null, '{0}', 1, '{1}')".format(args.createUser, passHex) DBClient.executeNoResult(sqlInsert) else: print('Password mismatch. ') sys.exit(0) else: print('Not authorized') sys.exit(0) sys.exit(0) elif args.study is not None and args.type is not None and args.user is not None and args.createUser is None: passwd = getpass.getpass('Password : ')
__author__ = 'sulantha' from Utils.DbUtils import DbUtils CSVFile = '/data/data03/sulantha/Downloads/missing_list_preprocessed.csv' dbc = DbUtils() with open(CSVFile, 'rU') as csv_file: for line in csv_file: lin = line.strip() rid = lin.split('/')[6] print(rid) s_id = lin.split('/')[7].split('_')[-2] i_id = lin.split('/')[7].split('_')[-1] sql = "UPDATE ADNI_AV45_Pipeline SET SKIP = 0, QC = 0, FINISHED = 0, PROC_Failed = NULL, MANUAL_XFM = 'Req_man_reg' WHERE PROCESSING_TID IN (SELECT RECORD_ID FROM Processing WHERE RID = '{0}' AND MODALITY = 'AV45' AND VERSION = 'V2' AND S_IDENTIFIER = '{1}' )".format( rid, s_id) dbc.executeNoResult(sql)
from Utils.DbUtils import DbUtils DBClient = DbUtils() getAllTodoSQL = "SELECT JOB_ID FROM externalWaitingJobs WHERE `JOB_ID` NOT LIKE '%CIVETRUN'" res = DBClient.executeAllResults(getAllTodoSQL) for job_id in res: job_id = job_id[0] job_type = job_id.split('_')[-1] old_job_id = job_id new_job_id = job_id.replace(job_type, 'CIVETRUN') ins_sql = "UPDATE externalWaitingJobs SET `JOB_ID` = \'{0}\' WHERE `JOB_ID` = \'{1}\'".format( new_job_id, old_job_id) try: DBClient.executeNoResult(ins_sql) except Exception as e: print(new_job_id)
class ADNI_T1_Fmri_Helper: def __init__(self): self.DBClient = DbUtils() self.MatchDBClient = DbUtils(database=pc.ADNI_dataMatchDBName) def getMatchingT1(self, processingItemObj): modalityID = '{0}{1}{2}{3}{4}{5}{6}'.format( processingItemObj.study, processingItemObj.version, processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) getFromMatchTableSQL = "SELECT * FROM MatchT1 WHERE MODALITY_ID = '{0}'".format( modalityID) # Find matching record in matching T1 table existingMatchedRec = self.DBClient.executeAllResults( getFromMatchTableSQL) if len(existingMatchedRec) == 1: getConvSQL = "SELECT * FROM Conversion WHERE RECORD_ID = '{0}'".format( existingMatchedRec[0][3]) return self.DBClient.executeAllResults(getConvSQL)[0] else: # If can't find them, look into MRIList to find an equivalent getFmriRecordSQL = "SELECT * FROM MRILIST WHERE subject LIKE '%_%_{0}' AND seriesid = {1} AND imageuid = {2}".format( processingItemObj.subject_rid, processingItemObj.s_identifier.replace('S', ''), processingItemObj.i_identifier.replace('I', '')) FmriRecord = self.MatchDBClient.executeAllResults(getFmriRecordSQL) if not FmriRecord: PipelineLogger.log( 'root', 'error', 'Cannot find Fmri record : {0} - {1} - {2}'.format( processingItemObj.subject_rid, processingItemObj.s_identifier.replace('S', ''), processingItemObj.i_identifier.replace('I', ''))) return None visit_code = pc.ADNI_visitCode_Dict[FmriRecord[0][2]] getMRIRecordsSQL = "SELECT * FROM MPRAGEMETA WHERE subjectid LIKE '%_%_{0}'".format( processingItemObj.subject_rid) mrirecords = self.MatchDBClient.executeAllResults(getMRIRecordsSQL) if not mrirecords: PipelineLogger.log( 'root', 'error', '################################ - Error !!!!! Cannot find any MRI records : {0} - Please check ADNI recs. ################################' .format(processingItemObj.subject_rid)) return None # getMRISecondarySQL = "SELECT * FROM MRILIST WHERE subject LIKE '%_%_{0}'".format(processingItemObj.subject_rid) # mriSecondaryRecords = self.MatchDBClient.executeAllResults(getMRISecondarySQL) # t_mrirecords = mrirecords # for record in mriSecondaryRecords: # distint = 1 # for i in t_mrirecords: # if record[7] == i[7] and record[8] == i[8]: # distint = 0 # if distint: # mrirecords.append(record) matchedT1Recs = [] for rec in mrirecords: if pc.ADNI_visitCode_Dict[rec[2]] == visit_code: matchedT1Recs.append(rec) if len(matchedT1Recs) == 0: PipelineLogger.log( 'root', 'error', 'Cannot match visit codes for : {0} - {1} - {2} - Searching based on scan date. +/- 60 days from PET date' .format(processingItemObj.subject_rid, processingItemObj.modality, visit_code)) pet_date = datetime.strptime(processingItemObj.scan_date, '%Y-%m-%d') sortedRecs = sorted( mrirecords, key=lambda x: abs( datetime.strptime(x[5], '%Y-%m-%d') - pet_date)) closestDate = [ k for k, g in itertools.groupby( sortedRecs, key=lambda x: abs( datetime.strptime(x[5], '%Y-%m-%d') - pet_date)) ][0] PipelineLogger.log( 'root', 'error', 'PET MRI Matching based on dates - match visit codes for : {0} - {1} - {2} - Distance between MRI/PET : {3} days.' .format(processingItemObj.subject_rid, processingItemObj.modality, visit_code, closestDate)) closestMatchedRecs = [ list(g) for k, g in itertools.groupby( sortedRecs, key=lambda x: abs( datetime.strptime(x[5], '%Y-%m-%d') - pet_date)) ][0] matchedT1Recs = closestMatchedRecs if len(matchedT1Recs) == 0: PipelineLogger.log( 'root', 'error', 'Cannot match visit codes for : {0} - {1} - {2}'.format( processingItemObj.subject_rid, processingItemObj.modality, visit_code)) return None matchedT1withScanDescriptions = [] for rec in matchedT1Recs: getScanFromConversionSQL = "SELECT * FROM Conversion WHERE STUDY = '{0}' AND S_IDENTIFIER = '{1}' AND I_IDENTIFIER = '{2}' AND SKIP = 0".format( processingItemObj.study, 'S{0}'.format(rec[7]), 'I{0}'.format(rec[8])) t1_conversion = self.DBClient.executeAllResults( getScanFromConversionSQL) if len(t1_conversion) > 0: matchedT1withScanDescriptions.append(t1_conversion[0]) else: PipelineLogger.log( 'root', 'error', 'Correspoding MRI was not found in the system : {0} - {1} - {2}' .format(processingItemObj.subject_rid, 'S{0}'.format(rec[7]), 'I{0}'.format(rec[8]))) continue if len(matchedT1withScanDescriptions) < 1: PipelineLogger.log( 'root', 'error', 'Matched T1s are not in the database. : Matched T1 s - \n {0}' .format(matchedT1Recs)) return None else: if len(matchedT1withScanDescriptions) == 1: ## ONLY ONE MATCHED T1. GOOD> CHECK IF THE T1 is a good scan type and not a bluff !!! if matchedT1withScanDescriptions[0][ 3] in pc.ADNI_T1_match_accepted_scantypes: self.addToMatchT1Table( processingItemObj, modalityID, matchedT1withScanDescriptions[0]) return matchedT1withScanDescriptions[0] else: PipelineLogger.log( 'root', 'error', 'Matched T1s is not accepted scan type. : Matched T1 s - \n {0}' .format(matchedT1withScanDescriptions[0])) return None else: #### MORE THAN ONE FOUND. SELECT ONE BASED ON SCAN TYPE PRIORITY sortedList = sorted(matchedT1withScanDescriptions, key=lambda x: (pc.ADNI_T1_match_scantype_priorityList .index(x[3]), -x[5])) self.addToMatchT1Table(processingItemObj, modalityID, sortedList[0]) return sortedList[0] def checkProcessed(self, t1Record): subject_id = t1Record[2] version = t1Record[11] s_id = t1Record[6] i_id = t1Record[7] checkProcessedSQL = "SELECT * FROM Processing WHERE RID = '{0}' AND VERSION = '{1}' AND S_IDENTIFIER = '{2}' AND I_IDENTIFIER = '{3}'".format( subject_id, version, s_id, i_id) result = self.DBClient.executeAllResults(checkProcessedSQL)[0] if len(result) < 1: PipelineLogger.log( 'root', 'error', 'Matched T1 is not added to the processing table. {0} - {1} - {2}' .format(subject_id, s_id, i_id)) return False else: if result[12] == 1 and result[13] == 1: PipelineLogger.log( 'root', 'debug', 'Matched T1 is processed and QC passed. {0} - {1} - {2}'. format(subject_id, s_id, i_id)) return result[8] else: PipelineLogger.log( 'root', 'error', 'Matched T1 is not process or QC failed. {0} - {1} - {2}'. format(subject_id, s_id, i_id)) self.startProcessOFT1(result) return False def addToMatchT1Table(self, processingItemObj, modalityID, t1Record): pet_date = datetime.strptime(processingItemObj.scan_date, '%Y-%m-%d') mri_date = datetime.combine(t1Record[4], datetime.min.time()) date_diff = abs(mri_date - pet_date) t1ID = '{0}{1}{2}_x_{3}_x_{4}{5}{6}'.format( t1Record[1], t1Record[11], t1Record[2], t1Record[3], t1Record[4].strftime('%Y-%m-%d').replace('-', ''), t1Record[6], t1Record[7]) conversionID = t1Record[0] sql = "INSERT IGNORE INTO MatchT1 VALUES (Null, '{0}', '{1}', '{2}', {3})".format( modalityID, t1ID, conversionID, date_diff.days) self.DBClient.executeNoResult(sql) def startProcessOFT1(self, processTableEntry): recordId = processTableEntry[0] study = processTableEntry[1] sql = "UPDATE {0}_T1_Pipeline SET SKIP = 0 WHERE PROCESSING_TID = {1}".format( study, recordId) self.DBClient.executeNoResult(sql)
class PipelineHandler: def __init__(self): self.processingPPDict = {'ADNI':{'V1':{'T1':ADNI_V1_T1(), 'FMRI':ADNI_V1_FMRI(), 'AV45':ADNI_V1_AV45(), 'FDG':ADNI_V1_FDG(), 'AV1451': ADNI_V1_AV1451()}, 'V2':{'T1':ADNI_V1_T1(), 'FMRI':ADNI_V1_FMRI(), 'AV45':ADNI_V2_AV45(), 'FDG':ADNI_V2_FDG(), 'AV1451': ADNI_V2_AV1451()}}} self.DBClient = DbUtils() self.QCH = QCHandler() def checkExternalJobs(self, study, modality): getExtJobSql = "SELECT * FROM externalWaitingJobs WHERE JOB_ID LIKE '{0}_{1}_%'".format(study, modality) extJobs = self.DBClient.executeAllResults(getExtJobSql) for job in extJobs: jobType = job[0].split('_')[-1] reportTable = job[1] tableID = job[0].split('_')[2] reportField = job[2] subjectScanID = job[0].split('_')[3] success = 0 if jobType == 'CIVETRUN': if glob.glob('{0}/{1}_{2}_*'.format(PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID)): getProccessRecSql = "SELECT * FROM Processing WHERE RECORD_ID IN (SELECT PROCESSING_TID FROM {0}_T1_Pipeline WHERE RECORD_ID = {1})".format(study, tableID) processingEntry = self.DBClient.executeAllResults(getProccessRecSql)[0] civetFolder = '{0}/civet'.format(processingEntry[8]) if os.path.exists(civetFolder): shutil.rmtree(civetFolder) try: PipelineLogger.log('manager', 'info', 'Copying - {0} -> {1}'.format(glob.glob('{0}/{1}_{2}_*'.format(PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID))[0], civetFolder)) dir_util.copy_tree(glob.glob('{0}/{1}_{2}_*'.format(PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID))[0], civetFolder) success = 1 except: success = 0 else: continue else: PipelineLogger.log('manager', 'error', 'Unknown external job type - {}'.format(jobType)) if success: updateSQL = "UPDATE {0} SET {1} = 1 WHERE RECORD_ID = {2}".format(reportTable, reportField, tableID) self.DBClient.executeNoResult(updateSQL) if jobType == 'CIVETRUN': finishSQL = "UPDATE {0} SET FINISHED = 1 WHERE RECORD_ID = {1}".format(reportTable, tableID) self.DBClient.executeNoResult(finishSQL) modal_table = reportTable modal_tableId = tableID qcField = 'QC' qctype = 'civet' qcFolder = civetFolder self.QCH.requestQC(study, modal_table, modal_tableId, qcField, qctype, qcFolder) rmSql = "DELETE FROM externalWaitingJobs WHERE JOB_ID LIKE '{0}_{1}_{2}_{3}_%'".format(study, modality, tableID, subjectScanID) self.DBClient.executeNoResult(rmSql) def process(self, study, modality): os.environ['PATH'] = ':'.join(libpath.PATH) os.environ['LD_LIBRARY_PATH'] = ':'.join(libpath.LD_LIBRARY_PATH) os.environ['LD_LIBRARYN32_PATH'] = ':'.join(libpath.LD_LIBRARYN32_PATH) os.environ['PERL5LIB'] = ':'.join(libpath.PERL5LIB) os.environ['MNI_DATAPATH'] = ':'.join(libpath.MNI_DATAPATH) os.environ['ROOT'] = ';'.join(libpath.ROOT) os.environ['MINC_TOOLKIT_VERSION'] = libpath.MINC_TOOLKIT_VERSION os.environ['MINC_COMPRESS'] = libpath.MINC_COMPRESS os.environ['MINC_FORCE_V2'] = libpath.MINC_FORCE_V2 toProcessinModalityPerStudy = self.DBClient.executeAllResults("SELECT * FROM Processing INNER JOIN (SELECT * FROM {0}_{1}_Pipeline WHERE NOT (FINISHED OR SKIP)) as TMP ON Processing.RECORD_ID=TMP.PROCESSING_TID".format(study, modality)) for processingItem in toProcessinModalityPerStudy: version = processingItem[10] # Calling on the process .section of given studies and modalities self.processingPPDict[study][version][modality].process(processingItem) return 0 def addToPipelineTable(self, processingObj): study = processingObj.study version = processingObj.version modality = processingObj.modality r_id = processingObj.record_id addToTableDict = dict(T1="INSERT IGNORE INTO {0}_T1_Pipeline VALUES (NULL, {1}, \"{2}\", 0, 0, 0, 0, 0, 0, 0, 0, 0, NULL, NULL)".format(study, r_id, PipelineConfig.defaultT1config), AV45="INSERT IGNORE INTO {0}_AV45_Pipeline VALUES (NULL, {1}, \"{2}\", '{3}', 0, 0, 0, NULL, NULL)".format(study, r_id, PipelineConfig.defaultAV45config, ''), AV1451="INSERT IGNORE INTO {0}_AV1451_Pipeline VALUES (NULL, {1}, \"{2}\", '{3}', 0, 0, 0, NULL, NULL)".format(study, r_id, PipelineConfig.defaultAV1451config, ''), FDG="INSERT IGNORE INTO {0}_FDG_Pipeline VALUES (NULL, {1}, \"{2}\", '{3}', 0, 0, 0, NULL, NULL)".format(study, r_id, PipelineConfig.defaultFDGconfig, ''), FMRI="INSERT IGNORE INTO {0}_FMRI_Pipeline VALUES (NULL, {1}, \"{2}\", '{3}', 0, 0, 0, NULL, NULL)".format(study, r_id, PipelineConfig.defaultFMRIconfig, 'NIAK_STH_COMESHERE')) self.DBClient.executeNoResult(addToTableDict[modality])
import csv from Utils.DbUtils import DbUtils inputFile = '/home/sulantha/reRUNAv45.csv' DBC = DbUtils() with open(inputFile, 'r') as inputFile: csvFile = csv.reader(inputFile) for line in csvFile: RID = line[0].split('/')[6] IID = line[0].split('/')[7].split('_')[-1] sql = "UPDATE ADNI_AV45_Pipeline SET FINISHED = 0, SKIP = 0 WHERE PROCESSING_TID IN (SELECT RECORD_ID FROM Processing WHERE RID = {0} AND I_IDENTIFIER = '{1}')".format( RID, IID) DBC.executeNoResult(sql)
class ADNI_V1_T1: def __init__(self): self.DBClient = DbUtils() self.QCHandler = QCHandler() def process(self, processingItem): processingItemObj = ProcessingItemObj(processingItem) if processingItemObj.beast_skip and processingItemObj.manual_skip and not processingItemObj.civet: self.runCivet(processingItemObj, 'N') elif processingItemObj.manual_mask and not processingItemObj.manual_skip and not processingItemObj.civet: self.runCivet(processingItemObj, 'M') elif processingItemObj.beast_mask == 0 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 0 and not processingItemObj.manual_mask: self.runBeast(processingItemObj) elif processingItemObj.beast_skip and not processingItemObj.manual_mask and not processingItemObj.manual_skip: PipelineLogger.log('manager', 'error', '$$$$$$$$$$$$$$$$$ Manual Mask Requested $$$$$$$$$$$$$$$$$$ - {0}'.format(processingItem)) pass elif processingItemObj.beast_mask == 1 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 1 and not processingItemObj.manual_mask and not processingItemObj.civet: self.runCivet(processingItemObj, 'B') elif processingItemObj.beast_mask == 1 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 0 and not processingItemObj.manual_mask and not processingItemObj.manual_skip: self.requestQC(processingItemObj, 'beast') elif processingItemObj.civet == 1 and processingItemObj.civet_qc == 0: self.requestQC(processingItemObj, 'civet') else: if processingItemObj.civet_qc == -1: PipelineLogger.log('manager', 'error', 'Civet QC failed. Skipping. - {0}'.format(processingItem)) PipelineLogger.log('manager', 'error', 'Error handling obj for processing - {0}'.format(processingItem)) return 0 def getScanType(self, processingItemObj): r = self.DBClient.executeAllResults("SELECT SCAN_TYPE FROM Conversion WHERE STUDY = '{0}' AND RID = '{1}' " "AND SCAN_DATE = '{2}' AND S_IDENTIFIER = '{3}' " "AND I_IDENTIFIER = '{4}'".format(processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date, processingItemObj.s_identifier, processingItemObj.i_identifier)) return r[0][0] def checkNative(self, processingItemObj): orig_ScanType = self.getScanType(processingItemObj) converted_file = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(processingItemObj.converted_folder, processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier, orig_ScanType) nativeFolder = '{0}/native'.format(processingItemObj.root_folder) nativeFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(nativeFolder, processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier, processingItemObj.modality.lower()) if not os.path.exists(nativeFileName): try: distutils.dir_util.mkpath(nativeFolder) shutil.copyfile(converted_file, nativeFileName) except Exception as e: PipelineLogger.log('manager', 'error', 'Error in creating folders or copying native file. \n {0}'.format(e)) PipelineLogger.log('manager', 'error', 'Setting to restart conversion. \n {0}'.format(e)) sql = "UPDATE Conversion SET CONVERTED = 0, SKIP = 0 WHERE S_IDENTIFIER = '{0}' AND I_IDENTIFIER = '{1}'".format(processingItemObj.s_identifier, processingItemObj.i_identifier) self.DBClient.executeNoResult(sql) return None return nativeFileName def runBeast(self, processingItemObj): nativeFileName = self.checkNative(processingItemObj) if not nativeFileName: return 0 beastFolder = '{0}/beast'.format(processingItemObj.root_folder) logDir = '{0}/logs'.format(processingItemObj.root_folder) PipelineLogger.log('manager', 'info', 'BeAST starting for {0}'.format(nativeFileName)) PipelineLogger.log('manager', 'info', 'Current working folder : {0}'.format(os.getcwd())) try: distutils.dir_util.mkpath(logDir) except Exception as e: PipelineLogger.log('manager', 'error', 'Error in creating log folder \n {0}'.format(e)) return 0 id = '{0}{1}{2}{3}'.format(processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) beastCMD = 'source /opt/minc-toolkit/minc-toolkit-config.sh; Pipelines/ADNI_T1/ADNI_V1_T1_BeAST {0} {1} {2} {3} {4} {5}'.format(id, nativeFileName, beastFolder, logDir, socket.gethostname(), 50500) try: shutil.rmtree(beastFolder) except: pass try: distutils.dir_util.mkpath(beastFolder) except Exception as e: PipelineLogger.log('manager', 'error', 'Error in creating BeAST folder. \n {0}'.format(e)) return 0 PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(beastCMD)) os.chdir(pc.SourcePath) p = subprocess.Popen(beastCMD, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable='/bin/bash') out, err = p.communicate() PipelineLogger.log('manager', 'debug', 'Beast Log Output : \n{0}'.format(out.decode("utf-8"))) PipelineLogger.log('manager', 'debug', 'Beast Log Err : \n{0}'.format(err.decode("utf-8"))) QSubJobHandler.submittedJobs[id] = QSubJob(id, '02:00:00', processingItemObj, 'beast') return 1 def runCivet(self, processingItemObj, maskStatus): nativeFileName = self.checkNative(processingItemObj) if not nativeFileName: return 0 copyFolder = pc.T1TempDirForCIVETProcessing subjectFileName_base = '{0}_{1}{2}{3}{4}_{5}'.format(processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier, processingItemObj.modality.lower()) jobId = '{0}_{1}_{2}_{3}{4}{5}{6}_CIVETRUN'.format(processingItemObj.study, processingItemObj.modality, processingItemObj.table_id, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) checkJobPresentSql = "SELECT * FROM externalWaitingJobs WHERE JOB_ID = '{0}'".format(jobId) if len(self.DBClient.executeAllResults(checkJobPresentSql)) is 0: beastFileName = '{0}/beast/mask/{1}_skull_mask_native.mnc'.format(processingItemObj.root_folder, subjectFileName_base) beastMaskName_base = '{0}_{1}{2}{3}{4}_mask'.format(processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) beastMaskName = '{0}/{1}.mnc'.format(copyFolder, beastMaskName_base) manualFileName = '{0}/manual/mask/{1}_skull_mask_native.mnc'.format(processingItemObj.root_folder, subjectFileName_base) manualMaskName_base = '{0}_{1}{2}{3}{4}_mask'.format(processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) manualMaskName = '{0}/{1}.mnc'.format(copyFolder, manualMaskName_base) try: distutils.file_util.copy_file(nativeFileName, copyFolder) if maskStatus == 'B': distutils.file_util.copy_file(beastFileName, beastMaskName) elif maskStatus == 'M': distutils.file_util.copy_file(manualFileName, manualMaskName) elif maskStatus == 'N': pass else: PipelineLogger.log('manager', 'error', 'Unknown mask status - {0} Entry : Processing ID - {1}, Table ID - {3}'.format(maskStatus, processingItemObj.processing_rid, processingItemObj.table_id)) addExternalJobSQL = "INSERT INTO externalWaitingJobs VALUES ('{0}', '{1}', '{2}', NULL, NULL, NULL)".format(jobId, '{0}_{1}_Pipeline'.format(processingItemObj.study, processingItemObj.modality), 'CIVET') self.DBClient.executeNoResult(addExternalJobSQL) except Exception as e: PipelineLogger.log('manager', 'error', 'Error copying for CIVET input. Rolling back... - Processing Table ID -> {0} Table ID -> {1}'.format( processingItemObj.processing_rid, processingItemObj.table_id)) PipelineLogger.log('manager', 'exception', e) nativeFileOnCopyFolder = '{0}/{1}'.format(copyFolder, os.path.basename(nativeFileName)) os.remove(nativeFileOnCopyFolder) if os.path.exists(nativeFileOnCopyFolder) else None os.remove(beastMaskName) if os.path.exists(beastMaskName) else None os.remove(manualMaskName) if os.path.exists(manualMaskName) else None def requestQC(self, processingItemObj, qctype): qcFieldDict = dict(civet='QC', beast='BEAST_QC') qcFolderDict = { 'civet' : '{0}/civet'.format(processingItemObj.root_folder), 'beast' : '{0}/beast'.format(processingItemObj.root_folder)} tablename = '{0}_{1}_Pipeline'.format(processingItemObj.study, processingItemObj.modality) self.QCHandler.requestQC(processingItemObj.study, tablename, processingItemObj.table_id, qcFieldDict[qctype], qctype, qcFolderDict[qctype])
DBClient = DbUtils() IID_list = ['45WL3UA1MPRAGEv0020111115xDICOM'] for iid in IID_list: getDataFolderSQL = "SELECT RAW_FOLDER FROM Sorting WHERE I_IDENTIFIER = '{0}'".format( iid) res = DBClient.executeAllResults(getDataFolderSQL) if len(res) == 0: pass else: rawFolder = res[0][0] dataFolder = os.path.abspath(os.path.join(rawFolder, '../')) shutil.rmtree(dataFolder) print(dataFolder) delsql = "DELETE FROM Sorting WHERE I_IDENTIFIER = '{0}'".format(iid) DBClient.executeNoResult(delsql) delsql = "DELETE FROM Conversion WHERE I_IDENTIFIER = '{0}'".format(iid) DBClient.executeNoResult(delsql) getProSQL = "SELECT RECORD_ID, STUDY, MODALITY FROM Processing WHERE I_IDENTIFIER = '{0}'".format( iid) res2 = DBClient.executeAllResults(getProSQL) if len(res2) == 0: pass else: P_ID = res2[0][0] study = res2[0][1] mod = res2[0][2] print(P_ID) getPPSQL = "SELECT RECORD_ID FROM {1}_{2}_Pipeline WHERE PROCESSING_TID = '{0}'".format(
class ADNI_T1_Helper: def __init__(self): self.DBClient = DbUtils() self.MatchDBClient = DbUtils(database=pc.ADNI_dataMatchDBName) def getMatchingT1(self, processingItemObj): modalityID = '{0}{1}{2}{3}{4}{5}{6}'.format(processingItemObj.study, processingItemObj.version, processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) getFromMatchTableSQL = "SELECT * FROM MatchT1 WHERE MODALITY_ID = '{0}'".format(modalityID) existingMatchedRec = self.DBClient.executeAllResults(getFromMatchTableSQL) if len(existingMatchedRec) == 1: getConvSQL = "SELECT * FROM Conversion WHERE RECORD_ID = '{0}'".format(existingMatchedRec[0][3]) return self.DBClient.executeAllResults(getConvSQL)[0] else: if processingItemObj.modality == 'FMRI': # For MRI and fMRI images getRecordSQL = "SELECT * FROM MRILIST WHERE subject LIKE '%_%_{0}' AND seriesid = {1} AND imageuid = {2}".format(processingItemObj.subject_rid, processingItemObj.s_identifier.replace('S', ''), processingItemObj.i_identifier.replace('I', '')) else: # By Default, for PET images getRecordSQL = "SELECT * FROM PET_META_LIST WHERE subject LIKE '%_%_{0}' AND seriesid = {1} AND imageid = {2}".format(processingItemObj.subject_rid, processingItemObj.s_identifier.replace('S', ''), processingItemObj.i_identifier.replace('I', '')) petrecord = self.MatchDBClient.executeAllResults(getRecordSQL) if not petrecord: PipelineLogger.log('root', 'error', 'Cannot find PET record : {0} - {1} - {2}'.format(processingItemObj.subject_rid, processingItemObj.s_identifier.replace('S', ''), processingItemObj.i_identifier.replace('I', ''))) return None visit_code = pc.ADNI_visitCode_Dict[petrecord[0][2]] getMRIRecordsSQL = "SELECT * FROM MPRAGEMETA WHERE subjectid LIKE '%_%_{0}'".format(processingItemObj.subject_rid) mrirecords = self.MatchDBClient.executeAllResults(getMRIRecordsSQL) if not mrirecords: PipelineLogger.log('root', 'error', '################################ - Error !!!!! Cannot find any MRI records : {0} - Please check ADNI recs. ################################'.format(processingItemObj.subject_rid)) return None # getMRISecondarySQL = "SELECT * FROM MRILIST WHERE subject LIKE '%_%_{0}'".format(processingItemObj.subject_rid) # mriSecondaryRecords = self.MatchDBClient.executeAllResults(getMRISecondarySQL) # t_mrirecords = mrirecords # for record in mriSecondaryRecords: # distint = 1 # for i in t_mrirecords: # if record[7] == i[7] and record[8] == i[8]: # distint = 0 # if distint: # mrirecords.append(record) matchedT1Recs = [] for rec in mrirecords: if pc.ADNI_visitCode_Dict[rec[2]] == visit_code: matchedT1Recs.append(rec) if len(matchedT1Recs) == 0: PipelineLogger.log('root', 'error', 'Cannot match visit codes for : {0} - {1} - {2} - Searching based on scan date. +/- 60 days from PET date'.format(processingItemObj.subject_rid, processingItemObj.modality, visit_code)) pet_date = datetime.strptime(processingItemObj.scan_date, '%Y-%m-%d') sortedRecs = sorted(mrirecords, key=lambda x:abs(datetime.strptime(x[5], '%Y-%m-%d') - pet_date)) closestDate = [k for k,g in itertools.groupby(sortedRecs, key=lambda x:abs(datetime.strptime(x[5], '%Y-%m-%d') - pet_date))][0] PipelineLogger.log('root', 'error', 'PET MRI Matching based on dates - match visit codes for : {0} - {1} - {2} - Distance between MRI/PET : {3} days.'.format(processingItemObj.subject_rid, processingItemObj.modality, visit_code, closestDate)) closestMatchedRecs = [list(g) for k,g in itertools.groupby(sortedRecs, key=lambda x:abs(datetime.strptime(x[5], '%Y-%m-%d') - pet_date))][0] matchedT1Recs = closestMatchedRecs if len(matchedT1Recs) == 0: PipelineLogger.log('root', 'error', 'Cannot match visit codes for : {0} - {1} - {2}'.format(processingItemObj.subject_rid, processingItemObj.modality, visit_code)) return None matchedT1withScanDescriptions = [] for rec in matchedT1Recs: getScanFromConversionSQL = "SELECT * FROM Conversion WHERE STUDY = '{0}' AND S_IDENTIFIER = '{1}' AND I_IDENTIFIER = '{2}' AND SKIP = 0".format(processingItemObj.study,'S{0}'.format(rec[7]), 'I{0}'.format(rec[8])) t1_conversion = self.DBClient.executeAllResults(getScanFromConversionSQL) if len(t1_conversion) > 0 : matchedT1withScanDescriptions.append(t1_conversion[0]) else: PipelineLogger.log('root', 'error', 'Corresponding MRI was not found in the system : {0} - {1} - {2}'.format(processingItemObj.subject_rid, 'S{0}'.format(rec[7]), 'I{0}'.format(rec[8]))) continue if len(matchedT1withScanDescriptions) < 1: PipelineLogger.log('root', 'error', 'Matched T1s are not in the database. : Matched T1 s - \n {0}'.format(matchedT1Recs)) return None else: if len(matchedT1withScanDescriptions) == 1: ## ONLY ONE MATCHED T1. GOOD> CHECK IF THE T1 is a good scan type and not a bluff !!! if matchedT1withScanDescriptions[0][3] in pc.ADNI_T1_match_accepted_scantypes: self.addToMatchT1Table(processingItemObj, modalityID, matchedT1withScanDescriptions[0]) return matchedT1withScanDescriptions[0] else: PipelineLogger.log('root', 'error', 'Matched T1s is not accepted scan type. : Matched T1 s - \n {0}'.format(matchedT1withScanDescriptions[0])) return None else: #### MORE THAN ONE FOUND. SELECT ONE BASED ON SCAN TYPE PRIORITY sortedList = sorted(matchedT1withScanDescriptions, key=lambda x: (pc.ADNI_T1_match_scantype_priorityList.index(x[3]), -x[5])) self.addToMatchT1Table(processingItemObj, modalityID, sortedList[0]) return sortedList[0] def checkProcessed(self, t1Record): subject_id = t1Record[2] version = t1Record[11] s_id = t1Record[6] i_id = t1Record[7] checkProcessedSQL = "SELECT * FROM Processing WHERE RID = '{0}' AND VERSION = '{1}' AND S_IDENTIFIER = '{2}' AND I_IDENTIFIER = '{3}'".format(subject_id, version, s_id, i_id) result = self.DBClient.executeAllResults(checkProcessedSQL)[0] if len(result) < 1: PipelineLogger.log('root', 'error', 'Matched T1 is not added to the processing table. {0} - {1} - {2}'.format(subject_id, s_id, i_id)) return False else: if result[12] == 1 and result[13] == 1: return result[8] else: PipelineLogger.log('root', 'error', 'Matched T1 is not process or QC failed. {0} - {1} - {2}'.format(subject_id, s_id, i_id)) self.startProcessOFT1(result) return False def addToMatchT1Table(self, processingItemObj, modalityID, t1Record): pet_date = datetime.strptime(processingItemObj.scan_date, '%Y-%m-%d') mri_date =datetime.combine(t1Record[4], datetime.min.time()) date_diff = abs(mri_date - pet_date) t1ID = '{0}{1}{2}_x_{3}_x_{4}{5}{6}'.format(t1Record[1], t1Record[11], t1Record[2], t1Record[3], t1Record[4].strftime('%Y-%m-%d').replace('-', ''), t1Record[6], t1Record[7]) conversionID = t1Record[0] sql = "INSERT IGNORE INTO MatchT1 VALUES (Null, '{0}', '{1}', '{2}', {3})".format(modalityID, t1ID, conversionID, date_diff.days) self.DBClient.executeNoResult(sql) def startProcessOFT1(self, processTableEntry): recordId = processTableEntry[0] study = processTableEntry[1] sql = "UPDATE {0}_T1_Pipeline SET SKIP = 0 WHERE PROCESSING_TID = {1}".format(study, recordId) self.DBClient.executeNoResult(sql)
__author__ = 'sulantha' import datetime from Utils.DbUtils import DbUtils DBClient = DbUtils() with open('/data/data03/sulantha/Downloads/av45_list.csv', 'r') as file: next(file) for line in file: row = line.split(',') rid = row[0] date = row[1].strip() dateT = datetime.datetime.strptime(date, '%m/%d/%Y') dateS = dateT.strftime('%Y-%m-%d') findSQL = "SELECT * FROM Processing WHERE RID = {0} AND MODALITY = 'AV45' AND SCAN_DATE = '{1}'".format(rid, dateS) res = DBClient.executeAllResults(findSQL) print('{0}-{1} {2}'.format(rid, len(res), '############' if len(res) is 0 else '')) if len(res) is 0 else None processingSQL = "UPDATE Processing SET SKIP = 0 WHERE RID = {0} AND MODALITY = 'AV45' AND SCAN_DATE = '{1}'".format(rid, dateS) DBClient.executeNoResult(processingSQL)
class DIAN_T1_Helper: def __init__(self): self.DBClient = DbUtils() self.MatchDBClient = DbUtils(database=pc.DIAN_dataMatchDBName) def getMatchingT1(self, processingItemObj): modalityID = '{0}{1}{2}{3}{4}{5}{6}'.format( processingItemObj.study, processingItemObj.version, processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) getFromMatchTableSQL = "SELECT * FROM MatchT1 WHERE MODALITY_ID = '{0}'".format( modalityID) existingMatchedRec = self.DBClient.executeAllResults( getFromMatchTableSQL) if len(existingMatchedRec) == 1: getConvSQL = "SELECT * FROM Conversion WHERE RECORD_ID = '{0}'".format( existingMatchedRec[0][3]) return self.DBClient.executeAllResults(getConvSQL)[0] else: if processingItemObj.modality == 'FMRI': PipelineLogger.log( 'root', 'error', 'FMRI T1 Matching not implemented. {0} - {1} - {2}'.format( processingItemObj.subject_rid, processingItemObj.s_identifier.replace('S', ''), processingItemObj.i_identifier.replace('I', ''))) return None else: # By Default, for PET images date_str = processingItemObj.scan_date.replace('-', '') name_and_Mod = '{0}{1}'.format(processingItemObj.subject_rid, processingItemObj.modality) visit = processingItemObj.i_identifier.split('x')[0].replace( date_str, '').replace(name_and_Mod, '') pet_label = '{0}_{1}_{2}'.format( processingItemObj.subject_rid, visit, processingItemObj.modality.lower()) getRecordSQL = "SELECT * FROM PET_MRI_Proc_Match WHERE Label LIKE '{0}'".format( pet_label) petrecord = self.MatchDBClient.executeAllResults(getRecordSQL) if not petrecord: PipelineLogger.log( 'root', 'error', 'Cannot find PET record : {0} - {1} - {2}'.format( processingItemObj.subject_rid, processingItemObj.s_identifier.replace('S', ''), processingItemObj.i_identifier.replace('I', ''))) return None mr_name = petrecord[0][5] if mr_name == '': ### Processed with MR entry not found. Have to switch to date based matching. PipelineLogger.log( 'root', 'error', 'Processed with MR entry not found. : {0} - {1} - {2} - Searching based on scan date. +/- 60 days from PET date' .format(processingItemObj.subject_rid, processingItemObj.modality, visit)) return None mr_fid = petrecord[0][6] mr_visit = mr_name.split('_')[1] matchedT1withScanDescriptions = [] for t1_type in ['MPRAGE', 'IRFSPGR', 'MPR', 'FSPGR']: mr_DB_iid = '{0}{3}{1}%x{2}'.format( processingItemObj.subject_rid, mr_visit, mr_fid, t1_type) getScanFromConversionSQL = "SELECT * FROM Conversion WHERE STUDY = '{0}' AND I_IDENTIFIER LIKE '{1}' AND SKIP = 0".format( processingItemObj.study, mr_DB_iid) t1_conversion = self.DBClient.executeAllResults( getScanFromConversionSQL) if len(t1_conversion) > 0: matchedT1withScanDescriptions.append(t1_conversion[0]) if len(matchedT1withScanDescriptions) < 1: PipelineLogger.log( 'root', 'error', 'Matched T1s are not in the database. : Subject, visit and FID - {0} {1} {2}' .format(processingItemObj.subject_rid, mr_visit, mr_fid)) return None else: if len(matchedT1withScanDescriptions) == 1: ## ONLY ONE MATCHED T1. GOOD> CHECK IF THE T1 is a good scan type and not a bluff !!! self.addToMatchT1Table(processingItemObj, modalityID, matchedT1withScanDescriptions[0]) return matchedT1withScanDescriptions[0] else: #### MORE THAN ONE FOUND. Very weird fro DIAN. PipelineLogger.log( 'root', 'error', 'MORE THAN ONE T1 Match FOUND. Very weird fro DIAN. : Subject and visit - {0} {1}' .format(processingItemObj.subject_rid, mr_visit)) return None def checkProcessed(self, t1Record): subject_id = t1Record[2] version = t1Record[11] s_id = t1Record[6] i_id = t1Record[7] checkProcessedSQL = "SELECT * FROM Processing WHERE RID = '{0}' AND VERSION = '{1}' AND S_IDENTIFIER = '{2}' AND I_IDENTIFIER = '{3}'".format( subject_id, version, s_id, i_id) result = self.DBClient.executeAllResults(checkProcessedSQL)[0] if len(result) < 1: PipelineLogger.log( 'root', 'error', 'Matched T1 is not added to the processing table. {0} - {1} - {2}' .format(subject_id, s_id, i_id)) return False else: if result[12] == 1 and result[13] == 1: return result[8] else: PipelineLogger.log( 'root', 'error', 'Matched T1 is not process or QC failed. {0} - {1} - {2}'. format(subject_id, s_id, i_id)) self.startProcessOFT1(result) return False def addToMatchT1Table(self, processingItemObj, modalityID, t1Record): pet_date = datetime.strptime(processingItemObj.scan_date, '%Y-%m-%d') mri_date = datetime.combine(t1Record[4], datetime.min.time()) date_diff = abs(mri_date - pet_date) t1ID = '{0}{1}{2}_x_{3}_x_{4}{5}{6}'.format( t1Record[1], t1Record[11], t1Record[2], t1Record[3], t1Record[4].strftime('%Y-%m-%d').replace('-', ''), t1Record[6], t1Record[7]) conversionID = t1Record[0] sql = "INSERT IGNORE INTO MatchT1 VALUES (Null, '{0}', '{1}', '{2}', {3}, Null)".format( modalityID, t1ID, conversionID, date_diff.days) self.DBClient.executeNoResult(sql) def startProcessOFT1(self, processTableEntry): recordId = processTableEntry[0] study = processTableEntry[1] sql = "UPDATE {0}_T1_Pipeline SET SKIP = 0 WHERE PROCESSING_TID = {1}".format( study, recordId) self.DBClient.executeNoResult(sql)
__author__ = 'sulantha' from Utils.DbUtils import DbUtils CSVFile = '/data/data03/sulantha/Downloads/missing_list_preprocessed.csv' dbc = DbUtils() with open(CSVFile, 'rU') as csv_file: for line in csv_file: lin = line.strip() rid = lin.split('/')[6] print(rid) s_id = lin.split('/')[7].split('_')[-2] i_id = lin.split('/')[7].split('_')[-1] sql = "UPDATE ADNI_AV45_Pipeline SET SKIP = 0, QC = 0, FINISHED = 0, PROC_Failed = NULL, MANUAL_XFM = 'Req_man_reg' WHERE PROCESSING_TID IN (SELECT RECORD_ID FROM Processing WHERE RID = '{0}' AND MODALITY = 'AV45' AND VERSION = 'V2' AND S_IDENTIFIER = '{1}' )".format(rid, s_id) dbc.executeNoResult(sql)
class QSubJobStatusReporter: def __init__(self): self.DBClient = DbUtils() self.QCHandler = QCHandler() def setStatus(self, job, status): if job.jobType == 'beast': nestedJob = job.job table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality) table_id = nestedJob.table_id if status == 'Success': setSql = 'UPDATE {0} SET BEAST_MASK = 1 WHERE RECORD_ID = {1}'.format( table, table_id) elif status == 'Fail': setSql = 'UPDATE {0} SET BEAST_MASK = -1, BEAST_SKIP = 1 WHERE RECORD_ID = {1}'.format( table, table_id) self.DBClient.executeNoResult(setSql) if status == 'Fail': PipelineLogger.log( 'manager', 'error', 'QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}' .format(job.jobType, nestedJob.processing_rid, nestedJob.table_id)) if job.jobType == 'av45': nestedJob = job.job table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality) table_id = nestedJob.table_id if status == 'Success': setSql = "UPDATE {0} SET FINISHED = 1, PROC_Failed = Null WHERE RECORD_ID = {1}".format( table, table_id) self.requestQC(nestedJob, 'av45') elif status == 'Fail': setSql = "UPDATE {0} SET PROC_Failed = 'Failed' , SKIP = 1 WHERE RECORD_ID = {1}".format( table, table_id) self.DBClient.executeNoResult(setSql) if status == 'Fail': PipelineLogger.log( 'manager', 'error', 'QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}' .format(job.jobType, nestedJob.processing_rid, nestedJob.table_id)) if job.jobType == 'av1451': nestedJob = job.job table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality) table_id = nestedJob.table_id if status == 'Success': setSql = "UPDATE {0} SET FINISHED = 1, PROC_Failed = Null WHERE RECORD_ID = {1}".format( table, table_id) self.requestQC(nestedJob, 'av1451') elif status == 'Fail': setSql = "UPDATE {0} SET PROC_Failed = 'Failed' , SKIP = 1 WHERE RECORD_ID = {1}".format( table, table_id) self.DBClient.executeNoResult(setSql) if status == 'Fail': PipelineLogger.log( 'manager', 'error', 'QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}' .format(job.jobType, nestedJob.processing_rid, nestedJob.table_id)) if job.jobType == 'fdg': nestedJob = job.job table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality) table_id = nestedJob.table_id if status == 'Success': setSql = "UPDATE {0} SET FINISHED = 1, PROC_Failed = Null WHERE RECORD_ID = {1}".format( table, table_id) self.requestQC(nestedJob, 'fdg') elif status == 'Fail': setSql = "UPDATE {0} SET PROC_Failed = 'Failed' , SKIP = 1 WHERE RECORD_ID = {1}".format( table, table_id) self.DBClient.executeNoResult(setSql) if status == 'Fail': PipelineLogger.log( 'manager', 'error', 'QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}' .format(job.jobType, nestedJob.processing_rid, nestedJob.table_id)) if job.jobType == 'pib': nestedJob = job.job table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality) table_id = nestedJob.table_id if status == 'Success': setSql = "UPDATE {0} SET FINISHED = 1, PROC_Failed = Null WHERE RECORD_ID = {1}".format( table, table_id) self.requestQC(nestedJob, 'pib') elif status == 'Fail': setSql = "UPDATE {0} SET PROC_Failed = 'Failed' , SKIP = 1 WHERE RECORD_ID = {1}".format( table, table_id) self.DBClient.executeNoResult(setSql) if status == 'Fail': PipelineLogger.log( 'manager', 'error', 'QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}' .format(job.jobType, nestedJob.processing_rid, nestedJob.table_id)) def requestQC(self, processingItemObj, qctype): qcFieldDict = dict(civet='QC', beast='BEAST_QC', av45='QC', fdg='QC', av1451='QC', pib='QC') qcFolderDict = { 'civet': '{0}/civet'.format(processingItemObj.root_folder), 'beast': '{0}/beast'.format(processingItemObj.root_folder), 'av45': '{0}/processed'.format(processingItemObj.root_folder), 'av1451': '{0}/processed'.format(processingItemObj.root_folder), 'fdg': '{0}/processed'.format(processingItemObj.root_folder), 'pib': '{0}/processed'.format(processingItemObj.root_folder) } self.QCHandler.requestQC( processingItemObj.study, '{0}_{1}_Pipeline'.format(processingItemObj.study, processingItemObj.modality), processingItemObj.table_id, qcFieldDict[qctype], qctype, qcFolderDict[qctype])
class ADNI_V1_T1: def __init__(self): self.DBClient = DbUtils() self.QCHandler = QCHandler() def process(self, processingItem): processingItemObj = ProcessingItemObj(processingItem) if processingItemObj.beast_skip and processingItemObj.manual_skip and not processingItemObj.civet: self.runCivet(processingItemObj, 'N') elif processingItemObj.manual_mask and not processingItemObj.manual_skip and not processingItemObj.civet: self.runCivet(processingItemObj, 'M') elif processingItemObj.beast_mask == 0 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 0 and not processingItemObj.manual_mask: self.runBeast(processingItemObj) elif processingItemObj.beast_skip and not processingItemObj.manual_mask and not processingItemObj.manual_skip: PipelineLogger.log( 'manager', 'error', '$$$$$$$$$$$$$$$$$ Manual Mask Requested $$$$$$$$$$$$$$$$$$ - {0}' .format(processingItem)) pass elif processingItemObj.beast_mask == 1 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 1 and not processingItemObj.manual_mask and not processingItemObj.civet: self.runCivet(processingItemObj, 'B') elif processingItemObj.beast_mask == 1 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 0 and not processingItemObj.manual_mask and not processingItemObj.manual_skip: self.requestQC(processingItemObj, 'beast') elif processingItemObj.civet == 1 and processingItemObj.civet_qc == 0: self.requestQC(processingItemObj, 'civet') else: if processingItemObj.civet_qc == -1: PipelineLogger.log( 'manager', 'error', 'Civet QC failed. Skipping. - {0}'.format(processingItem)) PipelineLogger.log( 'manager', 'error', 'Error handling obj for processing - {0}'.format( processingItem)) return 0 def getScanType(self, processingItemObj): r = self.DBClient.executeAllResults( "SELECT SCAN_TYPE FROM Conversion WHERE STUDY = '{0}' AND RID = '{1}' " "AND SCAN_DATE = '{2}' AND S_IDENTIFIER = '{3}' " "AND I_IDENTIFIER = '{4}'".format(processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date, processingItemObj.s_identifier, processingItemObj.i_identifier)) return r[0][0] def checkNative(self, processingItemObj): orig_ScanType = self.getScanType(processingItemObj) converted_file = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format( processingItemObj.converted_folder, processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier, orig_ScanType) nativeFolder = '{0}/native'.format(processingItemObj.root_folder) nativeFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format( nativeFolder, processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier, processingItemObj.modality.lower()) if not os.path.exists(nativeFileName): try: distutils.dir_util.mkpath(nativeFolder) shutil.copyfile(converted_file, nativeFileName) except Exception as e: PipelineLogger.log( 'manager', 'error', 'Error in creating folders or copying native file. \n {0}'. format(e)) PipelineLogger.log( 'manager', 'error', 'Setting to restart conversion. \n {0}'.format(e)) sql = "UPDATE Conversion SET CONVERTED = 0, SKIP = 0 WHERE S_IDENTIFIER = '{0}' AND I_IDENTIFIER = '{1}'".format( processingItemObj.s_identifier, processingItemObj.i_identifier) self.DBClient.executeNoResult(sql) return None return nativeFileName def runBeast(self, processingItemObj): nativeFileName = self.checkNative(processingItemObj) if not nativeFileName: return 0 beastFolder = '{0}/beast'.format(processingItemObj.root_folder) logDir = '{0}/logs'.format(processingItemObj.root_folder) PipelineLogger.log('manager', 'info', 'BeAST starting for {0}'.format(nativeFileName)) PipelineLogger.log('manager', 'info', 'Current working folder : {0}'.format(os.getcwd())) try: distutils.dir_util.mkpath(logDir) except Exception as e: PipelineLogger.log('manager', 'error', 'Error in creating log folder \n {0}'.format(e)) return 0 id = '{0}{1}{2}{3}'.format( processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) beastCMD = 'source /opt/minc-1.9.15/minc-toolkit-config.sh; Pipelines/ADNI_T1/ADNI_V1_T1_BeAST {0} {1} {2} {3} {4} {5}'.format( id, nativeFileName, beastFolder, logDir, socket.gethostname(), 50500) try: shutil.rmtree(beastFolder) except: pass try: distutils.dir_util.mkpath(beastFolder) except Exception as e: PipelineLogger.log( 'manager', 'error', 'Error in creating BeAST folder. \n {0}'.format(e)) return 0 PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(beastCMD)) os.chdir(pc.SourcePath) p = subprocess.Popen(beastCMD, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable='/bin/bash') out, err = p.communicate() PipelineLogger.log( 'manager', 'debug', 'Beast Log Output : \n{0}'.format(out.decode("utf-8"))) PipelineLogger.log('manager', 'debug', 'Beast Log Err : \n{0}'.format(err.decode("utf-8"))) QSubJobHandler.submittedJobs[id] = QSubJob(id, '02:00:00', processingItemObj, 'beast') return 1 def runCivet(self, processingItemObj, maskStatus): nativeFileName = self.checkNative(processingItemObj) if not nativeFileName: return 0 copyFolder = pc.T1TempDirForCIVETProcessing subjectFileName_base = '{0}_{1}{2}{3}{4}_{5}'.format( processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier, processingItemObj.modality.lower()) jobId = '{0}_{1}_{2}_{3}{4}{5}{6}_CIVETRUN'.format( processingItemObj.study, processingItemObj.modality, processingItemObj.table_id, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) checkJobPresentSql = "SELECT * FROM externalWaitingJobs WHERE JOB_ID = '{0}'".format( jobId) if len(self.DBClient.executeAllResults(checkJobPresentSql)) is 0: beastFileName = '{0}/beast/mask/{1}_skull_mask_native.mnc'.format( processingItemObj.root_folder, subjectFileName_base) beastMaskName_base = '{0}_{1}{2}{3}{4}_mask'.format( processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) beastMaskName = '{0}/{1}.mnc'.format(copyFolder, beastMaskName_base) manualFileName = '{0}/manual/mask/{1}_skull_mask_native.mnc'.format( processingItemObj.root_folder, subjectFileName_base) manualMaskName_base = '{0}_{1}{2}{3}{4}_mask'.format( processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) manualMaskName = '{0}/{1}.mnc'.format(copyFolder, manualMaskName_base) try: distutils.file_util.copy_file(nativeFileName, copyFolder) if maskStatus == 'B': distutils.file_util.copy_file(beastFileName, beastMaskName) elif maskStatus == 'M': distutils.file_util.copy_file(manualFileName, manualMaskName) elif maskStatus == 'N': pass else: PipelineLogger.log( 'manager', 'error', 'Unknown mask status - {0} Entry : Processing ID - {1}, Table ID - {3}' .format(maskStatus, processingItemObj.processing_rid, processingItemObj.table_id)) addExternalJobSQL = "INSERT INTO externalWaitingJobs VALUES ('{0}', '{1}', '{2}', NULL, NULL, NULL)".format( jobId, '{0}_{1}_Pipeline'.format(processingItemObj.study, processingItemObj.modality), 'CIVET') self.DBClient.executeNoResult(addExternalJobSQL) except Exception as e: PipelineLogger.log( 'manager', 'error', 'Error copying for CIVET input. Rolling back... - Processing Table ID -> {0} Table ID -> {1}' .format(processingItemObj.processing_rid, processingItemObj.table_id)) PipelineLogger.log('manager', 'exception', e) nativeFileOnCopyFolder = '{0}/{1}'.format( copyFolder, os.path.basename(nativeFileName)) os.remove(nativeFileOnCopyFolder) if os.path.exists( nativeFileOnCopyFolder) else None os.remove(beastMaskName) if os.path.exists( beastMaskName) else None os.remove(manualMaskName) if os.path.exists( manualMaskName) else None def requestQC(self, processingItemObj, qctype): qcFieldDict = dict(civet='QC', beast='BEAST_QC') qcFolderDict = { 'civet': '{0}/civet'.format(processingItemObj.root_folder), 'beast': '{0}/beast'.format(processingItemObj.root_folder) } tablename = '{0}_{1}_Pipeline'.format(processingItemObj.study, processingItemObj.modality) self.QCHandler.requestQC(processingItemObj.study, tablename, processingItemObj.table_id, qcFieldDict[qctype], qctype, qcFolderDict[qctype])
__author__ = 'sulantha' from Utils.DbUtils import DbUtils DBClient = DbUtils() RIDList = ['4225','4746','4799','4136','4142','4192','4713','4960','4387','0021','4827','4579','4580','4616','4668','4696','4809','4549','4680','5012','5019','4674','4757','4385','4721','4947','4714','4715','4736','4706','4720','4661','4728','4767','4739','4089','4379','0382','4732','0230','4586','4653','4671','4742','4369','4589','4730','4676','4689','4722','4723','4587','4631','4632','4672','4678','4756','4711','4764'] for rid in RIDList: sql1 = "DELETE FROM Sorting WHERE RID = {0} AND SCAN_TYPE NOT IN ('AV45', 'FDG')".format(rid) DBClient.executeNoResult(sql1) sql2 = "DELETE FROM Conversion WHERE RID = {0} AND SCAN_TYPE NOT IN ('AV45', 'FDG')".format(rid) DBClient.executeNoResult(sql2) sql3 = "SELECT RECORD_ID FROM Processing WHERE RID = {0} AND MODALITY NOT IN ('AV45', 'FDG')".format(rid) recs = DBClient.executeAllResults(sql3) for rec in recs: sql4 = "DELETE FROM ADNI_T1_Pipeline WHERE PROCESSING_TID = {0}".format(rec[0]) DBClient.executeNoResult(sql4) sql5 = "DELETE FROM Processing WHERE RID = {0} AND MODALITY NOT IN ('AV45', 'FDG')".format(rid) DBClient.executeNoResult(sql5)
__author__ = 'Sulantha' from Utils.DbUtils import DbUtils import glob, os, sys, fileinput if __name__ == '__main__': DBClient = DbUtils() sql1 = "SELECT * FROM Processing WHERE PROCESSED = 1 AND MODALITY ='T1'" res = DBClient.executeAllResults(sql1) for result in res: proc_id = result[0] #sql2 = "SELECT * FROM Processing WHERE RECORD_ID = {0}".format(proc_id) #process_rec = DBClient.executeAllResults(sql2)[0] T1Path = result[8] try: civet_nl_xfm_name = '{0}/civet/transforms/nonlinear/*nlfit_It.xfm'.format(T1Path) civet_nl_xfm_file = glob.glob(civet_nl_xfm_name)[0] civet_nl_mnc_name = '{0}/civet/transforms/nonlinear/*nlfit_It_grid_0.mnc'.format(T1Path) civet_nl_mnc_file = glob.glob(civet_nl_mnc_name)[0] civet_nl_mnc_name_base = os.path.basename(civet_nl_mnc_file) for line in fileinput.input(civet_nl_xfm_file, inplace=True): if 'Displacement_Volume' in line: line = 'Displacement_Volume = {0};'.format(civet_nl_mnc_name_base) sys.stdout.write(line) except: s = "UPDATE Processing SET QCPASSED = 0 WHERE RECORD_ID = {0}".format(proc_id) DBClient.executeNoResult(s) print('Files not found - {0} - {1}'.format(proc_id, T1Path))
from Utils.DbUtils import DbUtils import shutil Dbclient = DbUtils() with open(maskList, 'r') as inf: for line in inf: row = line.split(',') if row[0].strip() == 'None' or row[1].strip( ) == 'None' or row[2].strip() == 'None': pass else: study = row[1].split('/')[-1].split('_')[0].upper() rid = row[2].split('_')[3] t1sid = row[2].split('.')[0].split('_')[-2] t1iid = row[2].split('.')[0].split('_')[-1] uid = 'SKULLMASK_{0}_{1}_{2}_{3}'.format(study, rid, t1sid, t1iid) path = '{0}/{1}.mnc'.format(outputpath, uid) print(study, rid, uid, sep=', ') try: shutil.copyfile(row[0], path) Dbclient.executeNoResult( "INSERT IGNORE INTO MANUAL_MASK VALUES (Null, '{0}', '{1}', '{2}', '{3}')" .format(study, rid, uid, path)) sql2 = "UPDATE ADNI_T1_Pipeline SET MANUAL_MASK = 1 WHERE PROCESSING_TID IN (SELECT RECORD_ID FROM Processing WHERE RID = {0} AND S_IDENTIFIER = {1} AND I_IDENTIFIER = {2})".format( rid, t1sid, t1iid) except Exception as e: print('Error copy. {0}'.format(e))
__author__ = 'sulantha' import datetime from Utils.DbUtils import DbUtils DBClient = DbUtils() with open('/data/data03/sulantha/Downloads/fdg_list.csv', 'r') as file: next(file) for line in file: row = line.split(',') rid = row[0] date = row[1].strip() dateT = datetime.datetime.strptime(date, '%Y-%m-%d') dateS = dateT.strftime('%Y-%m-%d') findSQL = "SELECT * FROM Processing WHERE RID = {0} AND MODALITY = 'FDG' AND SCAN_DATE = '{1}'".format( rid, dateS) res = DBClient.executeAllResults(findSQL) print('{0}-{1} {2}'.format( rid, len(res), '############' if len(res) is 0 else '')) if len(res) is 0 else None processingSQL = "UPDATE Processing SET SKIP = 0 WHERE RID = {0} AND MODALITY = 'FDG' AND SCAN_DATE = '{1}'".format( rid, dateS) DBClient.executeNoResult(processingSQL)
from Utils.DbUtils import DbUtils import shutil Dbclient = DbUtils() with open(xfmList, 'r') as inf: for line in inf: row = line.split(',') if row[0].strip() == 'None' or row[2].strip() == 'None' or row[4].strip() == 'None': pass else: study = row[0].split('/')[-1].split('_')[0].upper() rid = row[0].split('/')[-1].split('_')[1][2:-2] petsid = row[2].split('.')[0].split('_')[-2] petiid = row[2].split('.')[0].split('_')[-1] t1sid = row[4].split('.')[0].split('_')[-2] t1iid = row[4].split('.')[0].split('_')[-1] uid = 'PET_{0}_{1}_T1_{2}_{3}'.format(petsid, petiid, t1sid, t1iid) path = '{0}/{1}_{2}_{3}.xfm'.format(outputpath, study, rid, uid) if petiid.startswith('I') and petsid.startswith('S'): print(study, rid, uid, sep=', ') else: print('PET - {0}'.format(row[2])) try: shutil.copyfile(row[0], path) except Exception as e: print('Error copy. {0}'.format(e)) Dbclient.executeNoResult("INSERT IGNORE INTO MANUAL_XFM VALUES (Null, '{0}', '{1}', '{2}', '{3}')".format(study, rid, uid, path))
import csv from Utils.DbUtils import DbUtils inputFile = '/home/sulantha/reRUNAv45.csv' DBC = DbUtils() with open(inputFile, 'r') as inputFile: csvFile = csv.reader(inputFile) for line in csvFile: RID = line[0].split('/')[6] IID = line[0].split('/')[7].split('_')[-1] sql = "UPDATE ADNI_AV45_Pipeline SET FINISHED = 0, SKIP = 0 WHERE PROCESSING_TID IN (SELECT RECORD_ID FROM Processing WHERE RID = {0} AND I_IDENTIFIER = '{1}')".format(RID, IID) DBC.executeNoResult(sql)
class ADNI_V1_AV1451: def __init__(self): self.DBClient = DbUtils() self.MatchDBClient = DbUtils(database=pc.ADNI_dataMatchDBName) self.PETHelper = PETHelper() def process(self, processingItem): processingItemObj = ProcessingItemObj(processingItem) matching_t1 = ADNI_T1_Helper().getMatchingT1(processingItemObj) if not matching_t1: PipelineLogger.log('root', 'error', 'PET cannot be processed no matching T1 found. - {0} - {1} - {2}.'.format(processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date)) return 0 processed = ADNI_T1_Helper().checkProcessed(matching_t1) if not processed: PipelineLogger.log('root', 'error', 'PET cannot be processed due to matching T1 not being processed - {0}'.format(matching_t1)) return 0 else: PipelineLogger.log('root', 'INFO', '+++++++++ PET ready to be processed. Will check for xfm. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date)) if processingItemObj.manual_xfm == '': manualXFM = self.PETHelper.getManualXFM(processingItemObj, matching_t1) processingItemObj.manual_xfm = manualXFM elif processingItemObj.manual_xfm == 'Req_man_reg': coregDone = self.PETHelper.checkIfAlreadyDone(processingItemObj, matching_t1) if coregDone: manualXFM = coregDone setPPTableSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE RECORD_ID = {3}".format(processingItemObj.study, processingItemObj.modality, manualXFM, processingItemObj.table_id) self.DBClient.executeNoResult(setPPTableSQL) else: self.PETHelper.requestCoreg(processingItemObj, matching_t1) PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date)) return 0 else: manualXFM = processingItemObj.manual_xfm if manualXFM: self.processPET(processingItemObj, processed) else: PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date)) return 0 def getScanType(self, processingItemObj): r = self.DBClient.executeAllResults("SELECT SCAN_TYPE FROM Conversion WHERE STUDY = '{0}' AND RID = '{1}' " "AND SCAN_DATE = '{2}' AND S_IDENTIFIER = '{3}' " "AND I_IDENTIFIER = '{4}'".format(processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date, processingItemObj.s_identifier, processingItemObj.i_identifier)) return r[0][0] def processPET(self, processingItemObj, matchT1Path): petFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(processingItemObj.converted_folder, processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier, self.getScanType(processingItemObj)) processedFolder = '{0}/processed'.format(processingItemObj.root_folder) logDir = '{0}/logs'.format(processingItemObj.root_folder) PipelineLogger.log('manager', 'info', 'PET processing starting for {0}'.format(petFileName)) try: distutils.dir_util.mkpath(logDir) except Exception as e: PipelineLogger.log('manager', 'error', 'Error in creating log folder \n {0}'.format(e)) return 0 id = '{0}{1}{2}{3}'.format(processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) paramStrd = ast.literal_eval(processingItemObj.parameters) paramStrt = ' '.join(['[\"{0}\"]=\"{1}\"'.format(k, v) for k,v in paramStrd.items()]) paramStr = '({0})'.format(paramStrt) petCMD = "source /opt/minc-1.9.15/minc-toolkit-config.sh; Pipelines/ADNI_AV1451/ADNI_V1_AV1451_Process {0} {1} {2} {3} {4} {5} '{6}' {7} {8}".format(id, petFileName, processedFolder, matchT1Path, processingItemObj.manual_xfm, logDir, paramStr,socket.gethostname(), 50500) try: processedFolder_del = '{0}/processed_del'.format(processingItemObj.root_folder) os.rename(processedFolder, processedFolder_del) shutil.rmtree(processedFolder_del) except Exception as e: PipelineLogger.log('manager', 'error', 'Error in deleting old processing folder. \n {0}'.format(e)) try: distutils.dir_util.mkpath(processedFolder) except Exception as e: PipelineLogger.log('manager', 'error', 'Error in creating processing folder. \n {0}'.format(e)) return 0 ### This section is new for ADNI Pre processing - Per scanner type blurring. Only required if ### the images are aquired from different scanners and need to get to same PSF. blur_x, blur_y, blur_z = self.PETHelper.getBlurringParams(processingItemObj) ### End pre processing. PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(petCMD)) p = subprocess.Popen(petCMD, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable='/bin/bash') out, err = p.communicate() PipelineLogger.log('manager', 'debug', 'Process Log Output : \n{0}'.format(out)) PipelineLogger.log('manager', 'debug', 'Process Log Err : \n{0}'.format(err)) QSubJobHandler.submittedJobs[id] = QSubJob(id, '02:00:00', processingItemObj, 'av1451') return 1
class PipelineHandler: def __init__(self): self.processingPPDict = { 'ADNI': { 'V1': { 'T1': ADNI_V1_T1(), 'FMRI': ADNI_V1_FMRI(), 'AV45': ADNI_V1_AV45(), 'FDG': ADNI_V1_FDG(), 'AV1451': ADNI_V1_AV1451() }, 'V2': { 'T1': ADNI_V1_T1(), 'FMRI': ADNI_V1_FMRI(), 'AV45': ADNI_V2_AV45(), 'FDG': ADNI_V2_FDG(), 'AV1451': ADNI_V2_AV1451() } }, 'DIAN': { 'V1': { 'T1': DIAN_V1_T1(), 'FDG': DIAN_V1_FDG(), 'PIB': DIAN_V1_PIB() } } } self.DBClient = DbUtils() self.QCH = QCHandler() def checkExternalJobs(self, study, modality): getExtJobSql = "SELECT * FROM externalWaitingJobs WHERE JOB_ID LIKE '{0}_{1}_%'".format( study, modality) extJobs = self.DBClient.executeAllResults(getExtJobSql) for job in extJobs: jobType = job[0].split('_')[-1] reportTable = job[1] tableID = job[0].split('_')[2] reportField = job[2] subjectScanID = job[0].split('_')[3] success = 0 if jobType == 'CIVETRUN': if glob.glob('{0}/{1}_{2}_*'.format( PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID)): getProccessRecSql = "SELECT * FROM Processing WHERE RECORD_ID IN (SELECT PROCESSING_TID FROM {0}_T1_Pipeline WHERE RECORD_ID = {1})".format( study, tableID) processingEntry = self.DBClient.executeAllResults( getProccessRecSql)[0] civetFolder = '{0}/civet'.format(processingEntry[8]) if os.path.exists(civetFolder): shutil.rmtree(civetFolder) try: PipelineLogger.log( 'manager', 'info', 'Copying - {0} -> {1}'.format( glob.glob('{0}/{1}_{2}_*'.format( PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID))[0], civetFolder)) dir_util.copy_tree( glob.glob('{0}/{1}_{2}_*'.format( PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID))[0], civetFolder) success = 1 except: success = 0 else: continue else: PipelineLogger.log( 'manager', 'error', 'Unknown external job type - {}'.format(jobType)) if success: updateSQL = "UPDATE {0} SET {1} = 1 WHERE RECORD_ID = {2}".format( reportTable, reportField, tableID) self.DBClient.executeNoResult(updateSQL) if jobType == 'CIVETRUN': finishSQL = "UPDATE {0} SET FINISHED = 1 WHERE RECORD_ID = {1}".format( reportTable, tableID) self.DBClient.executeNoResult(finishSQL) modal_table = reportTable modal_tableId = tableID qcField = 'QC' qctype = 'civet' qcFolder = civetFolder self.QCH.requestQC(study, modal_table, modal_tableId, qcField, qctype, qcFolder) rmSql = "DELETE FROM externalWaitingJobs WHERE JOB_ID LIKE '{0}_{1}_{2}_{3}_%'".format( study, modality, tableID, subjectScanID) self.DBClient.executeNoResult(rmSql) def process(self, study, modality): os.environ['PATH'] = ':'.join(libpath.PATH) os.environ['LD_LIBRARY_PATH'] = ':'.join(libpath.LD_LIBRARY_PATH) os.environ['LD_LIBRARYN32_PATH'] = ':'.join(libpath.LD_LIBRARYN32_PATH) os.environ['PERL5LIB'] = ':'.join(libpath.PERL5LIB) os.environ['MNI_DATAPATH'] = ':'.join(libpath.MNI_DATAPATH) os.environ['ROOT'] = ';'.join(libpath.ROOT) os.environ['MINC_TOOLKIT_VERSION'] = libpath.MINC_TOOLKIT_VERSION os.environ['MINC_COMPRESS'] = libpath.MINC_COMPRESS os.environ['MINC_FORCE_V2'] = libpath.MINC_FORCE_V2 toProcessinModalityPerStudy = self.DBClient.executeAllResults( "SELECT * FROM Processing INNER JOIN (SELECT * FROM {0}_{1}_Pipeline WHERE NOT (FINISHED OR SKIP)) as TMP ON Processing.RECORD_ID=TMP.PROCESSING_TID" .format(study, modality)) for processingItem in toProcessinModalityPerStudy: version = processingItem[10] # Calling on the process .section of given studies and modalities self.processingPPDict[study][version][modality].process( processingItem) return 0 def addToPipelineTable(self, processingObj): study = processingObj.study version = processingObj.version modality = processingObj.modality r_id = processingObj.record_id addToTableDict = dict( T1= "INSERT IGNORE INTO {0}_T1_Pipeline VALUES (NULL, {1}, \"{2}\", 0, 0, 0, 0, 0, 0, 0, 0, 0, NULL, NULL)" .format(study, r_id, PipelineConfig.defaultT1config), AV45= "INSERT IGNORE INTO {0}_AV45_Pipeline VALUES (NULL, {1}, \"{2}\", '{3}', 0, 0, 0, NULL, NULL)" .format(study, r_id, PipelineConfig.defaultAV45config, ''), AV1451= "INSERT IGNORE INTO {0}_AV1451_Pipeline VALUES (NULL, {1}, \"{2}\", '{3}', 0, 0, 0, NULL, NULL)" .format(study, r_id, PipelineConfig.defaultAV1451config, ''), FDG= "INSERT IGNORE INTO {0}_FDG_Pipeline VALUES (NULL, {1}, \"{2}\", '{3}', 0, 0, 0, NULL, NULL)" .format(study, r_id, PipelineConfig.defaultFDGconfig, ''), PIB= "INSERT IGNORE INTO {0}_PIB_Pipeline VALUES (NULL, {1}, \"{2}\", '{3}', 0, 0, 0, NULL, NULL)" .format(study, r_id, PipelineConfig.defaultFDGconfig, ''), FMRI= "INSERT IGNORE INTO {0}_FMRI_Pipeline VALUES (NULL, {1}, \"{2}\", '{3}', 0, 0, 0, NULL, NULL)" .format(study, r_id, PipelineConfig.defaultFMRIconfig, 'NIAK_STH_COMESHERE')) self.DBClient.executeNoResult(addToTableDict[modality])
class Conversion: def __init__(self): self.tableName = 'Conversion' self.DBClient = DbUtils() self.sqlBuilder = SQLBuilder() def getObjectFromTuple(self, tuple): valuesDict = dict(record_id=tuple[0], study=tuple[1], rid=tuple[2], scan_type=tuple[3], scan_date=tuple[4].strftime("%Y-%m-%d"), scan_time=str(tuple[5]), s_identifier=tuple[6], i_identifier=tuple[7], file_type=tuple[8], raw_folder=tuple[9], converted_folder=tuple[10], version=tuple[11], converted=tuple[12]) return ConversionObject(valuesDict) def insertToTable(self, objList): for obj in objList: self.DBClient.executeNoResult( self.sqlBuilder.getSQL_AddNewEntryToConversionTable( obj.sqlInsert())) def get_version(self, sortingObj, versionDict): if sortingObj.study == 'ADNI': dl_path = sortingObj.download_folder if 'Uniform' in dl_path: return 'V2' else: return versionDict[ sc.ProcessingModalityAndPipelineTypePerStudy[ sortingObj.study] [sortingObj. scan_type]] if sc.ProcessingModalityAndPipelineTypePerStudy[ sortingObj.study][ sortingObj.scan_type] in versionDict else 'V1' else: return versionDict[ sc.ProcessingModalityAndPipelineTypePerStudy[sortingObj.study] [sortingObj. scan_type]] if sc.ProcessingModalityAndPipelineTypePerStudy[ sortingObj.study][ sortingObj.scan_type] in versionDict else 'V1' def insertFromSortingObj(self, sortingObj, versionDict): sortingValues = sortingObj.getValuesDict() version = self.get_version(sortingObj, versionDict) sortingValues[ 'converted_folder'] = '{0}/{1}/{2}/{3}/{4}_{5}_{6}/{7}/converted/final'.format( sc.studyDatabaseRootDict[sortingObj.study], sortingObj.study, sortingObj.scan_type, sortingObj.rid, sortingObj.scan_date, sortingObj.s_identifier, sortingObj.i_identifier, version) sortingValues['version'] = version sortingValues['converted'] = 0 self.insertToTable([ConversionObject(sortingValues)]) def gettoBeConvertedPerStudy(self, study): toConvertList = self.DBClient.executeAllResults( self.sqlBuilder.getSQL_getToBeConvertedFileFromConversionTable( study)) return [self.getObjectFromTuple(t) for t in toConvertList] def setConvertedTrue(self, convertionObj): convertionObj.converted = 1 self.saveObj(convertionObj) def setConvertedFailed(self, convertionObj): convertionObj.skip = 1 self.saveObj(convertionObj) def saveObj(self, convertionObj): self.DBClient.executeNoResult( self.sqlBuilder.getSQL_saveObjConversionTable(convertionObj)) def getConvertedListPerStudy(self, study): convertedList = self.DBClient.executeAllResults( self.sqlBuilder.getSQL_getAllConvertedFromConvertionTable(study)) return [self.getObjectFromTuple(t) for t in convertedList]
class PETHelper: def __init__(self): self.DBClient = DbUtils() self.CoregHand = CoregHandler() self.client = MongoClient('localhost', 27017) self.db = self.client.ADNI_Database self.XML_collection = self.db.Scan_XML_Collection def getManualXFM(self, processingItemObj, matchedT1entry): study = processingItemObj.study rid = processingItemObj.subject_rid pet_sid = processingItemObj.s_identifier pet_iid = processingItemObj.i_identifier t1_sid = matchedT1entry[6] t1_iid = matchedT1entry[7] xfmUID = 'PET_{0}_{1}_T1_{2}_{3}'.format(pet_sid, pet_iid, t1_sid, t1_iid) getXFMSQL = "SELECT * FROM MANUAL_XFM WHERE STUDY = '{0}' AND RID = '{1}' AND XFM_UNIQUEID = '{2}'".format(study, rid, xfmUID) res = self.DBClient.executeAllResults(getXFMSQL) if len(res) > 0: PipelineLogger.log('root', 'INFO', '++ Manual XFM found. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date)) manXFM = res[0][4] updateSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE PROCESSING_TID = {3}".format(study, processingItemObj.modality, manXFM, processingItemObj.processing_rid) self.DBClient.executeNoResult(updateSQL) return manXFM else: PipelineLogger.log('root', 'INFO', '$$$$$$$ Manual XFM not found. Trying to find using uncorrected T1s. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date)) mustMatchT1SID = t1_sid mustMatchT1IID = t1_iid xfmApproximation = 'PET_{0}_{1}_T1_%_%'.format(pet_sid, pet_iid) getAllT1s = "SELECT * FROM MANUAL_XFM WHERE STUDY = '{0}' AND RID = '{1}' AND XFM_UNIQUEID LIKE '{2}'".format(study, rid, xfmApproximation) approxRes = self.DBClient.executeAllResults(getAllT1s) getFromProcessingSQL = "SELECT * FROM Processing WHERE (STUDY, RID, SCAN_DATE, SCAN_TIME) = (SELECT `STUDY`, `RID`, `SCAN_DATE`, `SCAN_TIME` FROM `Processing` WHERE MODALITY = 'T1' AND `S_IDENTIFIER` = '{0}' AND `I_IDENTIFIER` = '{1}')".format(mustMatchT1SID, mustMatchT1IID) allT1s = self.DBClient.executeAllResults(getFromProcessingSQL) for t1 in allT1s: t1sid = t1[6] t1iid = t1[7] for appRes in approxRes: approxResSID = appRes[3].split('_')[4] approxResIID = appRes[3].split('_')[5] if t1sid == approxResSID and t1iid == approxResIID: PipelineLogger.log('root', 'INFO', '++ Manual XFM found from approximate matching. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date)) manXFM = appRes[4] updateSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE PROCESSING_TID = {3}".format(study, processingItemObj.modality, manXFM, processingItemObj.processing_rid) self.DBClient.executeNoResult(updateSQL) return manXFM self.requestCoreg(processingItemObj, matchedT1entry) return None def getScanType(self, processingItemObj): r = self.DBClient.executeAllResults("SELECT SCAN_TYPE FROM Conversion WHERE STUDY = '{0}' AND RID = '{1}' " "AND SCAN_DATE = '{2}' AND S_IDENTIFIER = '{3}' " "AND I_IDENTIFIER = '{4}'".format(processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date, processingItemObj.s_identifier, processingItemObj.i_identifier)) return r[0][0] def requestCoreg(self, processingItemObj, matchedT1entry): PipelineLogger.log('root', 'INFO', '$$$$$$$ Manual XFM not found. Requesting manual XFM. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date)) study = processingItemObj.study rid = processingItemObj.subject_rid pet_sid = processingItemObj.s_identifier pet_iid = processingItemObj.i_identifier t1_sid = matchedT1entry[6] t1_iid = matchedT1entry[7] pet_folder = processingItemObj.converted_folder pet_scanType = self.getScanType(processingItemObj) t1_civet_root = self.DBClient.executeAllResults("SELECT ROOT_FOLDER FROM Processing WHERE S_IDENTIFIER = '{0}' " "AND I_IDENTIFIER = '{1}' AND PROCESSED = 1".format(t1_sid, t1_iid)) if len(t1_civet_root)>0: t1_folder = t1_civet_root[0][0] t1_scanType = matchedT1entry[3] xfmFileName = '{0}_{1}_PET_{2}_{3}_T1_{4}_{5}'.format(study, rid, pet_sid, pet_iid, t1_sid, t1_iid) self.CoregHand.requestCoreg(study, rid, processingItemObj.modality, pet_folder, t1_folder, pet_scanType, t1_scanType, xfmFileName) else: PipelineLogger.log('root', 'error', 'T1 files not processed and cannot be added for manual coregistration - {0} - {1} - {2}'.format(processingItemObj.subject_rid, processingItemObj.scan_date, matchedT1entry[10])) def checkIfAlreadyDone(self, processingItemObj, matchedT1entry): study = processingItemObj.study rid = processingItemObj.subject_rid pet_sid = processingItemObj.s_identifier pet_iid = processingItemObj.i_identifier t1_sid = matchedT1entry[6] t1_iid = matchedT1entry[7] XFM_UNIQID = 'PET_{0}_{1}_T1_{2}_{3}'.format(pet_sid, pet_iid, t1_sid, t1_iid) checkAlreadyDoneSQL = "SELECT * FROM MANUAL_XFM WHERE XFM_UNIQUEID = '{0}'".format(XFM_UNIQID) result = self.DBClient.executeAllResults(checkAlreadyDoneSQL) if len(result) > 0: return result[0][4] else: return None def getScannerType(self, processingItemObj, source): if source == 'xml': rid = processingItemObj.subject_rid sid = processingItemObj.s_identifier iid = processingItemObj.i_identifier scan_info_dict = {} matched_doc = self.XML_collection.find_one({'_id':'{0}_{1}_{2}'.format(rid, sid, iid)}) if matched_doc: for rec in matched_doc['idaxs']['project']['subject']['study']['series']['imagingProtocol']['protocolTerm']['protocol']: try: scan_info_dict[rec['@term']] = rec['#text'] except KeyError: scan_info_dict[rec['@term']] = None else: matched_doc = self.XML_collection.find_one({'idaxs.project.subject.study.series.seriesIdentifier': sid[1:], 'idaxs.project.subject.study.series.seriesLevelMeta.relatedImageDetail.originalRelatedImage.imageUID': iid[1:]}) for rec in matched_doc['idaxs']['project']['subject']['study']['series']['seriesLevelMeta']['relatedImageDetail'][ 'originalRelatedImage']['protocolTerm']['protocol']: try: scan_info_dict[rec['@term']] = rec['#text'] except KeyError: scan_info_dict[rec['@term']] = None return scan_info_dict elif source == 'dcm': iid = processingItemObj.i_identifier get_raw_folder_sql = "SELECT RAW_FOLDER FROM Conversion WHERE I_IDENTIFIER = '{0}'".format(iid) raw_path = self.DBClient.executeAllResults(get_raw_folder_sql)[0][0] dcm_0 = os.listdir(raw_path)[0] scan_info_dict = self.getDCMScannerDetails('{0}/{1}'.format(raw_path, dcm_0)) return scan_info_dict else: raise NotImplementedError def getDCMScannerDetails(self, dcm_file): cmd = "/data/data02/sulantha/bin/gdcmbin/bin/gdcmdump -d {0} ".format(dcm_file) proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) tmp = proc.stdout.read().decode("utf-8") manufacturer = None model = None pet_version = None rows = None columns = None slices = None pet_scanner_desc = None pet_scanner_mfn = None for line in tmp.split("\n"): if '(0008,0070)' in line: manufacturer = line.split('\t')[-1].strip() for line in tmp.split("\n"): if '(0008,1090)' in line: model = line.split('\t')[-1].strip() for line in tmp.split("\n"): if '(0028,0010)' in line: rows = line.split('\t')[-1].strip() for line in tmp.split("\n"): if '(0028,0011)' in line: columns = line.split('\t')[-1].strip() for line in tmp.split("\n"): if '(0054,0081)' in line: slices = line.split('\t')[-1].strip() for line in tmp.split("\n"): if '(0009,1001)' in line: pet_version = line.split('\t')[-1].strip() for line in tmp.split("\n"): if '(0009,1011)' in line: pet_scanner_desc = line.split('\t')[-1].strip() for line in tmp.split("\n"): if '(0009,1012)' in line: pet_scanner_mfn = line.split('\t')[-1].strip() return dict(dcm_file=dcm_file, manufacturer=manufacturer, model=model, pet_version=pet_version, rows=rows, columns=columns, slices=slices, pet_scanner_desc=pet_scanner_desc, pet_scanner_mfn=pet_scanner_mfn) def getBlurVals(self, scanner_type, study): if study == 'DIAN': scanners_dict_lower = {k.lower(): v for k, v in pc.DIAN_scanner_specific_blurs.items()} list_of_scanners = list(scanners_dict_lower.keys()) scanner_str = '{0}_{1}'.format( scanner_type['manufacturer'] if scanner_type['manufacturer'] else scanner_type['pet_scanner_mfn'], scanner_type['model'] if scanner_type['model'] else scanner_type['pet_scanner_desc']) if scanner_type['model'] == 'HR+': return scanners_dict_lower['HR+'] elif scanner_type['model'] == 'HRRT': return scanners_dict_lower['HRRT'] else: best_match = difflib.get_close_matches(scanner_str.lower(), list_of_scanners, 1, 0.3) if not best_match: return None, None, None return scanners_dict_lower[best_match[0]] def getBlurringParams(self, processingItemObj): if processingItemObj.study == 'ADNI': source = 'xml' elif processingItemObj.study == 'DIAN': source = 'dcm' scanner_type = self.getScannerType(processingItemObj, source) return self.getBlurVals(scanner_type, processingItemObj.study)
class ADNI_V2_AV45: def __init__(self): self.DBClient = DbUtils() self.MatchDBClient = DbUtils(database=pc.ADNI_dataMatchDBName) self.PETHelper = PETHelper() def process(self, processingItem): processingItemObj = ProcessingItemObj(processingItem) matching_t1 = ADNI_T1_Helper().getMatchingT1(processingItemObj) if not matching_t1: PipelineLogger.log('root', 'error', 'PET cannot be processed no matching T1 found. - {0} - {1} - {2}.'.format(processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date)) return 0 processed = ADNI_T1_Helper().checkProcessed(matching_t1) if not processed: PipelineLogger.log('root', 'error', 'PET cannot be processed due to matching T1 not being processed - {0}'.format(matching_t1)) return 0 else: PipelineLogger.log('root', 'INFO', '+++++++++ PET ready to be processed. Will check for initial xfm. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date)) if processingItemObj.manual_xfm == 'Req_man_reg': coregDone = self.PETHelper.checkIfAlreadyDone(processingItemObj, matching_t1) if coregDone: manualXFM = coregDone setPPTableSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE RECORD_ID = {3}".format(processingItemObj.study, processingItemObj.modality, manualXFM, processingItemObj.table_id) self.DBClient.executeNoResult(setPPTableSQL) self.processPET(processingItemObj, processed) else: self.PETHelper.requestCoreg(processingItemObj, matching_t1) PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date)) return 0 else: self.processPET(processingItemObj, processed) def getScanType(self, processingItemObj): r = self.DBClient.executeAllResults("SELECT SCAN_TYPE FROM Conversion WHERE STUDY = '{0}' AND RID = '{1}' " "AND SCAN_DATE = '{2}' AND S_IDENTIFIER = '{3}' " "AND I_IDENTIFIER = '{4}'".format(processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date, processingItemObj.s_identifier, processingItemObj.i_identifier)) return r[0][0] def processPET(self, processingItemObj, matchT1Path): petFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(processingItemObj.converted_folder, processingItemObj.study, processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier, self.getScanType(processingItemObj)) processedFolder = '{0}/processed'.format(processingItemObj.root_folder) logDir = '{0}/logs'.format(processingItemObj.root_folder) PipelineLogger.log('manager', 'info', 'PET processing starting for {0}'.format(petFileName)) try: distutils.dir_util.mkpath(logDir) except Exception as e: PipelineLogger.log('manager', 'error', 'Error in creating log folder \n {0}'.format(e)) return 0 id = '{0}{1}{2}{3}'.format(processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier) paramStrd = ast.literal_eval(processingItemObj.parameters) paramStrt = ' '.join(['[\"{0}\"]=\"{1}\"'.format(k, v) for k,v in paramStrd.items()]) paramStr = '({0})'.format(paramStrt) petCMD = "source /opt/minc-toolkit/minc-toolkit-config.sh; Pipelines/ADNI_AV45/ADNI_V2_AV45_Process {0} {1} {2} {3} {4} {5} '{6}' {7} {8}".format(id, petFileName, processedFolder, matchT1Path, 'auto' if processingItemObj.manual_xfm == '' else processingItemObj.manual_xfm, logDir, paramStr,socket.gethostname(), 50500) try: processedFolder_del = '{0}/processed_del'.format(processingItemObj.root_folder) os.rename(processedFolder, processedFolder_del) shutil.rmtree(processedFolder_del) except Exception as e: PipelineLogger.log('manager', 'error', 'Error in deleting old processing folder. \n {0}'.format(e)) try: distutils.dir_util.mkpath(processedFolder) except Exception as e: PipelineLogger.log('manager', 'error', 'Error in creating processing folder. \n {0}'.format(e)) return 0 PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(petCMD)) p = subprocess.Popen(petCMD, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable='/bin/bash') out, err = p.communicate() PipelineLogger.log('manager', 'debug', 'Process Log Output : \n{0}'.format(out)) PipelineLogger.log('manager', 'debug', 'Process Log Err : \n{0}'.format(err)) QSubJobHandler.submittedJobs[id] = QSubJob(id, '02:00:00', processingItemObj, 'av45') return 1
__author__ = 'sulantha' from Utils.DbUtils import DbUtils DBClient = DbUtils() RIDList = [ '4225', '4746', '4799', '4136', '4142', '4192', '4713', '4960', '4387', '0021', '4827', '4579', '4580', '4616', '4668', '4696', '4809', '4549', '4680', '5012', '5019', '4674', '4757', '4385', '4721', '4947', '4714', '4715', '4736', '4706', '4720', '4661', '4728', '4767', '4739', '4089', '4379', '0382', '4732', '0230', '4586', '4653', '4671', '4742', '4369', '4589', '4730', '4676', '4689', '4722', '4723', '4587', '4631', '4632', '4672', '4678', '4756', '4711', '4764' ] for rid in RIDList: sql1 = "DELETE FROM Sorting WHERE RID = {0} AND SCAN_TYPE NOT IN ('AV45', 'FDG')".format( rid) DBClient.executeNoResult(sql1) sql2 = "DELETE FROM Conversion WHERE RID = {0} AND SCAN_TYPE NOT IN ('AV45', 'FDG')".format( rid) DBClient.executeNoResult(sql2) sql3 = "SELECT RECORD_ID FROM Processing WHERE RID = {0} AND MODALITY NOT IN ('AV45', 'FDG')".format( rid) recs = DBClient.executeAllResults(sql3) for rec in recs: sql4 = "DELETE FROM ADNI_T1_Pipeline WHERE PROCESSING_TID = {0}".format( rec[0]) DBClient.executeNoResult(sql4) sql5 = "DELETE FROM Processing WHERE RID = {0} AND MODALITY NOT IN ('AV45', 'FDG')".format( rid) DBClient.executeNoResult(sql5)