def process(self, processingItem):
        processingItemObj = ProcessingItemObj(processingItem)
        matching_t1 = ADNI_T1_Helper().getMatchingT1(processingItemObj)
        if not matching_t1:
            PipelineLogger.log('root', 'error', 'PET cannot be processed no matching T1 found. - {0} - {1} - {2}.'.format(processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date))
            return 0

        processed = ADNI_T1_Helper().checkProcessed(matching_t1)
        if not processed:
            PipelineLogger.log('root', 'error', 'PET cannot be processed due to matching T1 not being processed - {0}'.format(matching_t1))
            return 0
        else:
            PipelineLogger.log('root', 'INFO', '+++++++++ PET ready to be processed. Will check for xfm. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
            if processingItemObj.manual_xfm == '':
                manualXFM = self.PETHelper.getManualXFM(processingItemObj, matching_t1)
                processingItemObj.manual_xfm = manualXFM
            elif processingItemObj.manual_xfm == 'Req_man_reg':
                coregDone = self.PETHelper.checkIfAlreadyDone(processingItemObj, matching_t1)
                if coregDone:
                    manualXFM = coregDone
                    setPPTableSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE RECORD_ID = {3}".format(processingItemObj.study, processingItemObj.modality, manualXFM, processingItemObj.table_id)
                    self.DBClient.executeNoResult(setPPTableSQL)
                else:
                    self.PETHelper.requestCoreg(processingItemObj, matching_t1)
                    PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added.  - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
                    return 0
            else:
                manualXFM = processingItemObj.manual_xfm
            if manualXFM:
                self.processPET(processingItemObj, processed)
            else:
                PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added.  - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
                return 0
Esempio n. 2
0
 def determineScanType(self, scanTypeRaw):
     try:
         return arc.scanTypeDict[scanTypeRaw]
     except KeyError:
         if 'FDG' in scanTypeRaw:
             PipelineLogger.log(
                 'root', 'error',
                 'Scan Type not defined : {0} -> Close match FDG...'.format(
                     scanTypeRaw))
             return 'FDG'
         if 'AV45' in scanTypeRaw:
             PipelineLogger.log(
                 'root', 'error',
                 'Scan Type not defined : {0} -> Close match AV45...'.
                 format(scanTypeRaw))
             return 'AV45'
         if 'AV1451' in scanTypeRaw or 'AV-1451' in scanTypeRaw or 'AV_1451' in scanTypeRaw:
             PipelineLogger.log(
                 'root', 'error',
                 'Scan Type unidentified : {0} -> Close match AV1451...'.
                 format(scanTypeRaw))
             return 'AV1451'
         if 'MPRAGE' in scanTypeRaw.upper():
             PipelineLogger.log(
                 'root', 'error',
                 'Scan Type unidentified : {0} -> Close match MPRAGE...'.
                 format(scanTypeRaw))
             return 'MPRAGE'
         else:
             PipelineLogger.log(
                 'root', 'error',
                 'Scan Type unidentified : {0} -> No match...'.format(
                     scanTypeRaw))
             return 'unknown'
 def checkMncFile(self, mncFile):
     cmd = Config.ConverterConfig.mincSource_exec + '; mincinfo ' + mncFile + ' | grep \"time\" '
     p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     out, err = p.communicate()
     if not out: # If no output, string empty
         PipelineLogger.log('converter', 'debug', mncFile + 'does not have a time axis!')
         os.remove(mncFile)
 def checkNative(self, processingItemObj):
     orig_ScanType = self.getScanType(processingItemObj)
     converted_file = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(
         processingItemObj.converted_folder, processingItemObj.study,
         processingItemObj.subject_rid,
         processingItemObj.scan_date.replace('-', ''),
         processingItemObj.s_identifier, processingItemObj.i_identifier,
         orig_ScanType)
     nativeFolder = '{0}/native'.format(processingItemObj.root_folder)
     nativeFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(
         nativeFolder, processingItemObj.study,
         processingItemObj.subject_rid,
         processingItemObj.scan_date.replace('-', ''),
         processingItemObj.s_identifier, processingItemObj.i_identifier,
         processingItemObj.modality.lower())
     if not os.path.exists(nativeFileName):
         try:
             distutils.dir_util.mkpath(nativeFolder)
             shutil.copyfile(converted_file, nativeFileName)
         except Exception as e:
             PipelineLogger.log(
                 'manager', 'error',
                 'Error in creating folders or copying native file. \n {0}'.
                 format(e))
             PipelineLogger.log(
                 'manager', 'error',
                 'Setting to restart conversion. \n {0}'.format(e))
             sql = "UPDATE Conversion SET CONVERTED = 0, SKIP = 0 WHERE S_IDENTIFIER = '{0}' AND I_IDENTIFIER = '{1}'".format(
                 processingItemObj.s_identifier,
                 processingItemObj.i_identifier)
             self.DBClient.executeNoResult(sql)
             return None
     return nativeFileName
    def process(self, processingItem):
        processingItemObj = ProcessingItemObj(processingItem)
        matching_t1 = ADNI_T1_Helper().getMatchingT1(processingItemObj)
        if not matching_t1:
            PipelineLogger.log('root', 'error', 'PET cannot be processed no matching T1 found. - {0} - {1} - {2}.'.format(processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date))
            return 0

        processed = ADNI_T1_Helper().checkProcessed(matching_t1)
        if not processed:
            PipelineLogger.log('root', 'error', 'PET cannot be processed due to matching T1 not being processed - {0}'.format(matching_t1))
            return 0
        else:
            PipelineLogger.log('root', 'INFO', '+++++++++ PET ready to be processed. Will check for xfm. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
            if processingItemObj.manual_xfm == '':
                manualXFM = self.PETHelper.getManualXFM(processingItemObj, matching_t1)
                processingItemObj.manual_xfm = manualXFM
            elif processingItemObj.manual_xfm == 'Req_man_reg':
                coregDone = self.PETHelper.checkIfAlreadyDone(processingItemObj, matching_t1)
                if coregDone:
                    manualXFM = coregDone
                    setPPTableSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE RECORD_ID = {3}".format(processingItemObj.study, processingItemObj.modality, manualXFM, processingItemObj.table_id)
                    self.DBClient.executeNoResult(setPPTableSQL)
                else:
                    self.PETHelper.requestCoreg(processingItemObj, matching_t1)
                    PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added.  - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
                    return 0
            else:
                manualXFM = processingItemObj.manual_xfm
            if manualXFM:
                self.processPET(processingItemObj, processed)
            else:
                PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added.  - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
                return 0
    def createNewScanSession(self,down_most_folder, filelist):
        # Return parts of the folder path, the ones of interest
        try:
            folder = down_most_folder.replace(self.root_folder,"")
            folder_parts = folder.split("/")  # List containing each parts/folders of the full path
            filename_parts = filelist[0].split("_")  # Takes the first filename and create a list of its parts

            rid = folder_parts[1][-4:]  # Get the last 4 characters
            if re.search('[a-zA-Z]', rid) is not None:
                rid = filename_parts[3]
                if re.search('[a-zA-Z]', rid) is not None:
                    PipelineLogger.log('root', 'error', 'File recurse error on Folder RID cannot be identified. - {0}, \n Filelist - {1}'.format(folder, filelist))
                    return None
            scan_type = self.determineScanType(folder_parts[-3])

            scan_date = folder_parts[-2].split('_')[0]
            scan_time = folder_parts[-2].split('_', 1)[-1].replace("_", ":")
            s_identifier = filename_parts[-2]
            i_identifier = filename_parts[-1].split('.', 1)[0]
            file_type = self.determineExtension(filename_parts)
            download_folder = down_most_folder
            raw_folder = '{0}/{1}/{2}/{3}/{4}_{5}_{6}/raw'.format(sc.studyDatabaseRootDict[self.study], 'ADNI', scan_type, rid, scan_date, s_identifier, i_identifier)

            newScanSession = ScanSession\
                ('ADNI', rid, scan_type, scan_date, scan_time,
                 s_identifier, i_identifier, download_folder, raw_folder, file_type)
            if scan_type == 'unknown':
                newScanSession.printObject()
                return None
            return newScanSession
        except Exception as e:
            return None
    def convertRawData(self):
        def addTODB(result):
            if result['converted']:
                #### Add to correspoing table
                #self.conversionTable.insertFromConvertionObj(convertionObj, self.version)
                self.conversionTable.setConvertedTrue(result['obj'])
            else:
                PipelineLogger.log(
                    'manager', 'error',
                    'File conversion Error : {0} -> {1}. Moving to next...'.
                    format(result['obj'].raw_folder,
                           result['obj'].converted_folder))
                self.conversionTable.setConvertedFailed(result['obj'])

        for study in self.studyList:
            totalToConv = len(self.toConvertObjListDict[study])
            PipelineLogger.log(
                'manager', 'info',
                'Convertion started for study {0} - Total to be converted : {1}'
                .format(study, totalToConv))
            results = []
            for convObj in self.toConvertObjListDict[study]:
                convertedResult = self.pool.apply_async(
                    self.raw2mincConverter.convert2minc,
                    args=(convObj, ),
                    callback=addTODB)
                results.append(convertedResult)
            for r in results:
                r.wait()
 def restart_mongo(self):
     try:
         if self.old_pid > 0:
             os.kill(self.old_pid, 9)
         mongo_Cmd = '/data/data03/MongoDB/mongodb/bin/mongod --dbpath /data/data03/MongoDB/data/db/'
         Popen(mongo_Cmd, shell=True)
     except OSError as e:
         PipelineLogger.log('root', 'exception', 'MongoDB cannot be stopped or started.\n {0}\n'.format(e))
Esempio n. 9
0
 def restart_mongo(self):
     try:
         if self.old_pid > 0:
             os.kill(self.old_pid, 9)
         mongo_Cmd = '/data/data03/MongoDB/mongodb/bin/mongod --logpath Logs/MongoLog.log --dbpath /data/data03/MongoDB/data/db/'
         Popen(mongo_Cmd, shell=True)
     except OSError as e:
         PipelineLogger.log('root', 'exception', 'MongoDB cannot be stopped or started.\n {0}\n'.format(e))
 def addTODB(result):
     if result['converted']:
         #### Add to correspoing table
         #self.conversionTable.insertFromConvertionObj(convertionObj, self.version)
         self.conversionTable.setConvertedTrue(result['obj'])
     else:
         PipelineLogger.log('manager', 'error', 'File conversion Error : {0} -> {1}. Moving to next...'.format(result['obj'].raw_folder, result['obj'].converted_folder))
         self.conversionTable.setConvertedFailed(result['obj'])
 def runNiak(self, processingItemObj):
     # Run Niak
     if not processingItemObj.skip:
         self.niak.process(processingItemObj)
     elif processingItemObj.skip:
         pass
     else:
         PipelineLogger.log('manager', 'error', 'Error handling obj for processing - {0}'.format(processingItemObj))
         return 0
    def convert_dicom(self, convertionObj):
        rawFolder = convertionObj.raw_folder
        outFile = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(convertionObj.converted_folder, convertionObj.study,
                                                        convertionObj.rid, convertionObj.scan_date.replace('-', ''),
                                                        convertionObj.s_identifier, convertionObj.i_identifier,
                                                        convertionObj.scan_type)

        self.createNewFolder(convertionObj.converted_folder) # Create output folder
        tempFolder = convertionObj.converted_folder + '/../temp'  # Generate path for temp folder
        self.createNewFolder(tempFolder)  # Create temp folder

        # Move all the non-dicom stuff out of the original folder into tempFolder
        otherFiles = self.removeOtherFilesInFolder(rawFolder, '.dcm', tempFolder)
        # Run dcm2nii
        cmd = Config.ConverterConfig.dcmToNii_exec + ' -a N -e N -p N -g N -o ' + tempFolder + '/ -v Y ' + rawFolder
        PipelineLogger.log('converter', 'info',
                           'dcm2nii conversion starting for : {0} - {1} - {2} - {3}'.format(convertionObj.study,
                                                                                         convertionObj.rid,
                                                                                         convertionObj.scan_date,
                                                                                         convertionObj.scan_type))
        PipelineLogger.log('converter', 'debug', 'Command : {0}'.format(cmd))
        self.runShellCommand(cmd)
        # Move all the non-dicom stuff back into the original folder
        self.addBackOtherFiles(rawFolder, otherFiles, tempFolder)

        # Run nii2mnc
        fake_command = '{0} {1} {2}/../'.format(Config.ConverterConfig.niiToMnc_exec, rawFolder, convertionObj.converted_folder)
        PipelineLogger.log('converter', 'info',
                           'nii2mnc conversion starting for : {0} - {1} - {2} - {3}'.format(convertionObj.study,
                                                                                         convertionObj.rid,
                                                                                         convertionObj.scan_date,
                                                                                         convertionObj.scan_type))
        PipelineLogger.log('converter', 'debug', 'Command : {0}'.format(fake_command))
        iterator = 1
        for niiFile in glob.glob(tempFolder + '/*.nii'):
            tempOutFile = outFile.replace('.mnc', '_run' + str(iterator) + '.mnc')
            cmd = '%s %s %s' % (Config.ConverterConfig.niiToMnc_exec, niiFile, tempOutFile)
            self.runShellCommand(cmd)
            self.checkMncFile(tempOutFile) # Check whether the fMRI files have a time component/axis
            iterator += 1
        # Delete Temporary Folder
        self.deleteFolder(tempFolder)

        # Check how many mnc files were generated
        mncList = []
        for root, dirnames, filenames in os.walk(convertionObj.converted_folder):
            for filename in fnmatch.filter(filenames, '*.mnc'):
                mncList.append(os.path.join(root, filename))
        if len(mncList) == 0:
            PipelineLogger.log('converter', 'error',
                               'MINC Conversion unsuccessful : Check log for : {0} - {1} - {2} - {3}'.format(
                                   convertionObj.study, convertionObj.rid, convertionObj.scan_date,
                                   convertionObj.scan_type))
            return 0
        else:
            return 1
 def removeCommaIfThere(destFolder):
     PipelineLogger.log('manager', 'debug',
                            'Removing unsupported chars from file names...... :')
     for dpath, dnames, fnames in os.walk(destFolder):
         for f in fnames:
             os.chdir(dpath)
             if ',' in f:
                 os.rename(f, f.replace(',', ''))
     PipelineLogger.log('manager', 'debug',
                            'Removing unsupported chars from file names done ...:')
 def avgTime(self, inputMNC, outputMNC):
     avgCMD = "/opt/minc-toolkit/bin/mincaverage -short -avgdim time {0} {1}".format(inputMNC, outputMNC)
     p = subprocess.Popen(avgCMD, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     out, err = p.communicate()
     PipelineLogger.log("converter", "debug", "Averaging Time Output : \n{0}".format(out))
     PipelineLogger.log("converter", "debug", "Averaging Time Err : \n{0}".format(err))
     if os.path.exists(outputMNC):
         return 1
     else:
         return 0
 def runShellCommand(self, cmd):
     p = subprocess.Popen(cmd,
                          shell=True,
                          stdout=subprocess.PIPE,
                          stderr=subprocess.PIPE)
     out, err = p.communicate()
     PipelineLogger.log('converter', 'debug',
                        'Conversion Log Output : \n{0}'.format(out))
     PipelineLogger.log('converter', 'debug',
                        'Conversion Log Err : \n{0}'.format(err))
    def createNewScanSession(self, down_most_folder, filelist):
        # Return parts of the folder path, the ones of interest
        folder = down_most_folder.replace(self.root_folder, "")
        xmllist = [x for x in filelist if "xml" in x]
        filelist = [x for x in filelist if "xml" not in x]
        if filelist is None:  # If no file in folder, ignore and skip
            return None
        try:
            folder_parts = folder.split("/")  # List containing each parts/folders of the full path
            filename_parts = filelist[0].split("_")  # Takes the first filename and create a list of its parts
            xmlFileS = open("{0}/{1}".format(down_most_folder, xmllist[0])).read()
            xmlDict = xmltodict.parse(xmlFileS)

            rid = xmlDict["metadata"]["subject"]["@id"].split("_")[-1]
            if re.search("[a-zA-Z]", rid) is not None:
                rid = filename_parts[3]
                if re.search("[a-zA-Z]", rid) is not None:
                    PipelineLogger.log(
                        "root",
                        "error",
                        "File recurse error on Folder RID cannot be identified. - {0}, \n Filelist - {1}".format(
                            folder, filelist
                        ),
                    )
                    return None

            s_identifier = xmlDict["metadata"]["series"]["@uid"]
            i_identifier = xmlDict["metadata"]["image"]["@uid"]
            scan_type = self.determineScanType(folder_parts[-3], self.study, rid, s_identifier, i_identifier)
            scan_date = folder_parts[-2].split("_")[0]
            scan_time = folder_parts[-2].split("_", 1)[-1].replace("_", ":")
            file_type = self.determineExtension(filename_parts)
            download_folder = down_most_folder
            raw_folder = "{0}/{1}/{2}/{3}/{4}_{5}_{6}/raw".format(
                sc.studyDatabaseRootDict[self.study], self.study, scan_type, rid, scan_date, s_identifier, i_identifier
            )
        except:
            PipelineLogger.log(
                "root", "exception", "File recurse error on Folder - {0}, \n Filelist - {1}".format(folder, filelist)
            )
            return None

        newScanSession = ScanSession(
            self.study,
            rid,
            scan_type,
            scan_date,
            scan_time,
            s_identifier,
            i_identifier,
            download_folder,
            raw_folder,
            file_type,
        )
        return newScanSession
 def checkMncFile(self, mncFile):
     cmd = Config.ConverterConfig.mincSource_exec + '; mincinfo ' + mncFile + ' | grep \"time\" '
     p = subprocess.Popen(cmd,
                          shell=True,
                          stdout=subprocess.PIPE,
                          stderr=subprocess.PIPE)
     out, err = p.communicate()
     if not out:  # If no output, string empty
         PipelineLogger.log('converter', 'debug',
                            mncFile + 'does not have a time axis!')
         os.remove(mncFile)
    def createNewScanSession(self, tup):
        down_most_folder, filelist = tup
        # Return parts of the folder path, the ones of interest
        folder = down_most_folder.replace(self.root_folder, "")
        filelist = [x for x in filelist if 'xml' not in x]
        if len(filelist) == 0:  # If no file in folder, ignore and skip
            return None
        try:
            folder_parts = folder.split(
                "/")  # List containing each parts/folders of the full path
            filename_parts = filelist[0].split(
                ".")  # Takes the first filename and create a list of its parts

            rid = filename_parts[0]
            file_type = self.determineExtension(filename_parts)

            scan_date_str, scan_time_str = self.getScanDateTimeDCMHeader(
                '{0}/{1}'.format(down_most_folder, filelist[0]))

            scan_date = '{0}-{1}-{2}'.format(scan_date_str[:4],
                                             scan_date_str[4:6],
                                             scan_date_str[6:8])
            scan_time = '{0}:{1}:{2}'.format(scan_time_str[:2],
                                             scan_time_str[2:4],
                                             scan_time_str[4:6])
            visit = self.get_visit_info('{0}/{1}'.format(
                down_most_folder, filelist[0]))
            if not visit:
                return None
            scan_type = self.determineScanType('{0}/{1}'.format(
                down_most_folder, filelist[0]))
            if not scan_type or scan_type not in self.only_allowed_scan_types_to_move:
                return None
            i_identifier = '{0}{1}{2}{3}x{4}'.format(
                rid, scan_type, visit, scan_date_str,
                re.sub(r'[\W_]+', '', folder_parts[-2]))
            s_identifier = 'ySy'
            download_folder = down_most_folder
            raw_folder = '{0}/{1}/{2}/{3}/{4}_{5}_{6}/raw'.format(
                sc.studyDatabaseRootDict[self.study], self.study, scan_type,
                rid, scan_date, s_identifier, i_identifier)
        except Exception as e:
            PipelineLogger.log(
                'root', 'exception',
                'File recurse error on Folder - {0}, \n Filelist - {1}'.format(
                    folder, filelist))
            PipelineLogger.log('root', 'exception',
                               'Exception - {0}'.format(e))
            return None

        newScanSession = ScanSession(self.study, rid, scan_type, scan_date,
                                     scan_time, s_identifier, i_identifier,
                                     download_folder, raw_folder, file_type)
        return newScanSession
 def removeCommaIfThere(destFolder):
     PipelineLogger.log(
         'manager', 'debug',
         'Removing unsupported chars from file names...... :')
     for dpath, dnames, fnames in os.walk(destFolder):
         for f in fnames:
             os.chdir(dpath)
             if ',' in f:
                 os.rename(f, f.replace(',', ''))
     PipelineLogger.log(
         'manager', 'debug',
         'Removing unsupported chars from file names done ...:')
 def addTODB(result):
     if result['converted']:
         #### Add to correspoing table
         #self.conversionTable.insertFromConvertionObj(convertionObj, self.version)
         self.conversionTable.setConvertedTrue(result['obj'])
     else:
         PipelineLogger.log(
             'manager', 'error',
             'File conversion Error : {0} -> {1}. Moving to next...'.
             format(result['obj'].raw_folder,
                    result['obj'].converted_folder))
         self.conversionTable.setConvertedFailed(result['obj'])
Esempio n. 21
0
    def findCorrespondingMRI(self, processingItemObj):
		# Find Matching T1
        matching_t1 = ADNI_T1_Fmri_Helper().getMatchingT1(processingItemObj)
        if not matching_t1:
            return 0
			
		# Find out whether T1 has been processed
        processed = ADNI_T1_Fmri_Helper().checkProcessed(matching_t1)
        if not processed:
            PipelineLogger.log('root', 'error', 'FMRI cannot be processed due to matching T1 not being processed.')
            return 0
        else:
            return processed
Esempio n. 22
0
 def run(self):
     try:
         self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
         self.sock.bind((socket.gethostname(), 50500))
         self.sock.settimeout(300)
         self.sock.listen(1000)
         PipelineLogger.log('manager', 'info',
                            ' ++++++++ QSub Job Handler started.')
         PipelineLogger.log(
             'manager', 'info',
             ' ++++++++ QSub Job Handler listening in Host : {0} at Port : {1}.'
             .format(socket.gethostname(), 50500))
         while not self.QUIT and self.checkJobs():
             try:
                 conn = self.sock.accept()[0]
                 thread = threading.Thread(target=self.doWork,
                                           args=(conn, ))
                 thread.start()
             except socket.timeout:
                 continue
     except Exception as e:
         PipelineLogger.log('manager', 'exception', e)
         PipelineLogger.log(
             'manager', 'error',
             'Cannot create QSubJobHandler... Will not listen to on jobs. ')
         del self.sock
 def convertMinc(self, convertionObj):
     rawFile = '{0}/*.mnc'.format(convertionObj.raw_folder)
     outFile = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(convertionObj.converted_folder, convertionObj.study,
                                                     convertionObj.rid, convertionObj.scan_date.replace('-', ''),
                                                     convertionObj.s_identifier, convertionObj.i_identifier,
                                                     convertionObj.scan_type)
     # Move files
     cmd = '{0} {1} {2}/../'.format('mv ' + rawFile + ' ' + outFile)
     PipelineLogger.log('converter', 'info',
                        'MINC transfer starting for : {0} - {1} - {2} - {3}'.format(convertionObj.study,
                                                                                      convertionObj.rid,
                                                                                      convertionObj.scan_date,
                                                                                      convertionObj.scan_type))
     PipelineLogger.log('converter', 'debug', 'Command : {0}'.format(cmd))
     self.runShellCommand(cmd)
    def createNewScanSession(self, down_most_folder, filelist):
        # Return parts of the folder path, the ones of interest
        folder = down_most_folder.replace(self.root_folder, "")
        xmllist = [x for x in filelist if 'xml' in x]
        filelist = [x for x in filelist if 'xml' not in x]
        if len(filelist) == 0:  # If no file in folder, ignore and skip
            return None
        try:
            folder_parts = folder.split(
                "/")  # List containing each parts/folders of the full path
            filename_parts = filelist[0].split(
                "_")  # Takes the first filename and create a list of its parts
            xmlFileS = open('{0}/{1}'.format(down_most_folder,
                                             xmllist[0])).read()
            xmlDict = xmltodict.parse(xmlFileS)

            rid = xmlDict['metadata']['subject']['@id'].split('_')[-1]
            if re.search('[a-zA-Z]', rid) is not None:
                rid = filename_parts[3]
                if re.search('[a-zA-Z]', rid) is not None:
                    PipelineLogger.log(
                        'root', 'error',
                        'File recurse error on Folder RID cannot be identified. - {0}, \n Filelist - {1}'
                        .format(folder, filelist))
                    return None

            s_identifier = xmlDict['metadata']['series']['@uid']
            i_identifier = xmlDict['metadata']['image']['@uid']
            scan_type = self.determineScanType(folder_parts[-3], self.study,
                                               rid, s_identifier, i_identifier)
            scan_date = folder_parts[-2].split('_')[0]
            scan_time = folder_parts[-2].split('_', 1)[-1].replace("_", ":")
            file_type = self.determineExtension(filename_parts)
            download_folder = down_most_folder
            raw_folder = '{0}/{1}/{2}/{3}/{4}_{5}_{6}/raw'.format(
                sc.studyDatabaseRootDict[self.study], self.study, scan_type,
                rid, scan_date, s_identifier, i_identifier)
        except:
            PipelineLogger.log(
                'root', 'exception',
                'File recurse error on Folder - {0}, \n Filelist - {1}'.format(
                    folder, filelist))
            return None

        newScanSession = ScanSession\
            (self.study, rid, scan_type, scan_date, scan_time,
             s_identifier, i_identifier, download_folder, raw_folder, file_type)
        return newScanSession
Esempio n. 25
0
    def findCorrespondingMRI(self, processingItemObj):
        # Find Matching T1
        matching_t1 = ADNI_T1_Fmri_Helper().getMatchingT1(processingItemObj)
        if not matching_t1:
            return 0

        # Find out whether T1 has been processed
        processed = ADNI_T1_Fmri_Helper().checkProcessed(matching_t1)
        if not processed:
            PipelineLogger.log(
                'root', 'error',
                'FMRI cannot be processed due to matching T1 not being processed.'
            )
            return 0
        else:
            return processed
    def checkExternalJobs(self, study, modality):
        getExtJobSql = "SELECT * FROM externalWaitingJobs WHERE JOB_ID LIKE '{0}_{1}_%'".format(study, modality)
        extJobs = self.DBClient.executeAllResults(getExtJobSql)
        for job in extJobs:
            jobType = job[0].split('_')[-1]
            reportTable = job[1]
            tableID = job[0].split('_')[2]
            reportField = job[2]
            subjectScanID = job[0].split('_')[3]
            success = 0
            if jobType == 'CIVETRUN':
                if glob.glob('{0}/{1}_{2}_*'.format(PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID)):
                    getProccessRecSql = "SELECT * FROM Processing WHERE RECORD_ID IN (SELECT PROCESSING_TID FROM {0}_T1_Pipeline WHERE RECORD_ID = {1})".format(study, tableID)
                    processingEntry = self.DBClient.executeAllResults(getProccessRecSql)[0]

                    civetFolder = '{0}/civet'.format(processingEntry[8])

                    if os.path.exists(civetFolder):
                        shutil.rmtree(civetFolder)
                    try:
                        PipelineLogger.log('manager', 'info', 'Copying - {0} -> {1}'.format(glob.glob('{0}/{1}_{2}_*'.format(PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID))[0], civetFolder))
                        dir_util.copy_tree(glob.glob('{0}/{1}_{2}_*'.format(PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID))[0], civetFolder)
                        success = 1
                    except:
                        success = 0
                else:
                    continue
            else:
                PipelineLogger.log('manager', 'error', 'Unknown external job type - {}'.format(jobType))

            if success:
                updateSQL = "UPDATE {0} SET {1} = 1 WHERE RECORD_ID = {2}".format(reportTable, reportField, tableID)
                self.DBClient.executeNoResult(updateSQL)

                if jobType == 'CIVETRUN':
                    finishSQL = "UPDATE {0} SET FINISHED = 1 WHERE RECORD_ID = {1}".format(reportTable, tableID)
                    self.DBClient.executeNoResult(finishSQL)
                    modal_table = reportTable
                    modal_tableId = tableID
                    qcField = 'QC'
                    qctype = 'civet'
                    qcFolder = civetFolder
                    self.QCH.requestQC(study, modal_table, modal_tableId, qcField, qctype, qcFolder)


                rmSql = "DELETE FROM externalWaitingJobs WHERE JOB_ID LIKE '{0}_{1}_{2}_{3}_%'".format(study, modality, tableID, subjectScanID)
                self.DBClient.executeNoResult(rmSql)
 def convertMinc(self, convertionObj):
     rawFile = '{0}/*.mnc'.format(convertionObj.raw_folder)
     outFile = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(
         convertionObj.converted_folder,
         convertionObj.study, convertionObj.rid,
         convertionObj.scan_date.replace('-',
                                         ''), convertionObj.s_identifier,
         convertionObj.i_identifier, convertionObj.scan_type)
     # Move files
     cmd = '{0} {1} {2}/../'.format('mv ' + rawFile + ' ' + outFile)
     PipelineLogger.log(
         'converter', 'info',
         'MINC transfer starting for : {0} - {1} - {2} - {3}'.format(
             convertionObj.study, convertionObj.rid,
             convertionObj.scan_date, convertionObj.scan_type))
     PipelineLogger.log('converter', 'debug', 'Command : {0}'.format(cmd))
     self.runShellCommand(cmd)
 def checkProcessed(self, t1Record):
     subject_id = t1Record[2]
     version = t1Record[11]
     s_id = t1Record[6]
     i_id = t1Record[7]
     checkProcessedSQL = "SELECT * FROM Processing WHERE RID = '{0}' AND VERSION = '{1}' AND S_IDENTIFIER = '{2}' AND I_IDENTIFIER = '{3}'".format(subject_id, version, s_id, i_id)
     result = self.DBClient.executeAllResults(checkProcessedSQL)[0]
     if len(result) < 1:
         PipelineLogger.log('root', 'error', 'Matched T1 is not added to the processing table. {0} - {1} - {2}'.format(subject_id, s_id, i_id))
         return False
     else:
         if result[12] == 1 and result[13] == 1:
             return result[8]
         else:
             PipelineLogger.log('root', 'error', 'Matched T1 is not process or QC failed. {0} - {1} - {2}'.format(subject_id, s_id, i_id))
             self.startProcessOFT1(result)
             return False
Esempio n. 29
0
 def checkProcessed(self, t1Record):
     subject_id = t1Record[2]
     version = t1Record[11]
     s_id = t1Record[6]
     i_id = t1Record[7]
     checkProcessedSQL = "SELECT * FROM Processing WHERE RID = '{0}' AND VERSION = '{1}' AND S_IDENTIFIER = '{2}' AND I_IDENTIFIER = '{3}'".format(subject_id, version, s_id, i_id)
     result = self.DBClient.executeAllResults(checkProcessedSQL)[0]
     if len(result) < 1:
         PipelineLogger.log('root', 'error', 'Matched T1 is not added to the processing table. {0} - {1} - {2}'.format(subject_id, s_id, i_id))
         return False
     else:
         if result[12] == 1 and result[13] == 1:
             return result[8]
         else:
             PipelineLogger.log('root', 'error', 'Matched T1 is not process or QC failed. {0} - {1} - {2}'.format(subject_id, s_id, i_id))
             self.startProcessOFT1(result)
             return False
Esempio n. 30
0
    def process(self, processingItemObj):
        try:
            matlabScript, nativeFileName, niakFolder = self.readTemplateFile(processingItemObj)
            PipelineLogger.log('manager', 'info', 'NIAK starting for {0}'.format(nativeFileName))
        except:
            return 0

        # Delete PIPE.lock file, if is exists
        if os.path.isfile("%s/preprocessing/logs/PIPE.lock" % niakFolder):
            os.remove("%s/preprocessing/logs/PIPE.lock" % niakFolder)

        success = self.executeScript(processingItemObj, matlabScript, niakFolder)

        #### After, if Niak succeeded, concatenate all runs together using combiningRuns
        if False:
            if success:
                self.combiningRuns(processingItemObj)
            else:
                PipelineLogger.log()
    def convertRawData(self):

        def addTODB(result):
            if result['converted']:
                #### Add to correspoing table
                #self.conversionTable.insertFromConvertionObj(convertionObj, self.version)
                self.conversionTable.setConvertedTrue(result['obj'])
            else:
                PipelineLogger.log('manager', 'error', 'File conversion Error : {0} -> {1}. Moving to next...'.format(result['obj'].raw_folder, result['obj'].converted_folder))
                self.conversionTable.setConvertedFailed(result['obj'])

        for study in self.studyList:
            totalToConv = len(self.toConvertObjListDict[study])
            PipelineLogger.log('manager', 'info', 'Convertion started for study {0} - Total to be converted : {1}'.format(study, totalToConv))
            results = []
            for convObj in self.toConvertObjListDict[study]:
                convertedResult = self.pool.apply_async(self.raw2mincConverter.convert2minc, args=(convObj,), callback=addTODB)
                results.append(convertedResult)
            for r in results:
                r.wait()
 def copyFile(sourceFolder, destFolder):
     try:
         PipelineLogger.log('manager', 'debug', 'Raw Data Copying : {0} -> {1}'.format(sourceFolder, destFolder))
         distutils.dir_util.copy_tree(sourceFolder, destFolder, update=True)
         PipelineLogger.log('manager', 'debug',
                            'Raw Data Copy Done...... : {0} -> {1}'.format(sourceFolder, destFolder))
         removeCommaIfThere(destFolder)
         return 1
     except Exception as exc:
         PipelineLogger.log('manager', 'error',
                            'Raw Data Move Error : {0} -> {1}'.format(sourceFolder, destFolder))
         PipelineLogger.log('manager', 'exception', exc)
         return 0
Esempio n. 33
0
 def requestCoreg(self, processingItemObj, matchedT1entry):
     PipelineLogger.log('root', 'INFO', '$$$$$$$ Manual XFM not found. Requesting manual XFM. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
     study = processingItemObj.study
     rid = processingItemObj.subject_rid
     pet_sid = processingItemObj.s_identifier
     pet_iid = processingItemObj.i_identifier
     t1_sid = matchedT1entry[6]
     t1_iid = matchedT1entry[7]
     pet_folder = processingItemObj.converted_folder
     pet_scanType = self.getScanType(processingItemObj)
     t1_civet_root = self.DBClient.executeAllResults("SELECT ROOT_FOLDER FROM Processing WHERE S_IDENTIFIER = '{0}' "
                                     "AND I_IDENTIFIER = '{1}' AND PROCESSED = 1".format(t1_sid, t1_iid))
     if len(t1_civet_root)>0:
         t1_folder = t1_civet_root[0][0]
         t1_scanType = matchedT1entry[3]
         xfmFileName = '{0}_{1}_PET_{2}_{3}_T1_{4}_{5}'.format(study, rid, pet_sid, pet_iid, t1_sid, t1_iid)
         self.CoregHand.requestCoreg(study, rid, processingItemObj.modality, pet_folder, t1_folder, pet_scanType, t1_scanType, xfmFileName)
     else:
         PipelineLogger.log('root', 'error', 'T1 files not processed and cannot be added for manual coregistration - {0} - {1} - {2}'.format(processingItemObj.subject_rid,
                                                                                      processingItemObj.scan_date, matchedT1entry[10]))
 def determineScanType(self, scanTypeRaw):
     try:
         return arc.scanTypeDict[scanTypeRaw]
     except KeyError:
         if 'FDG' in scanTypeRaw:
             PipelineLogger.log('root', 'error', 'Scan Type not defined : {0} -> Close match FDG...'.format(scanTypeRaw))
             return 'FDG'
         if 'AV45' in scanTypeRaw:
             PipelineLogger.log('root', 'error', 'Scan Type not defined : {0} -> Close match AV45...'.format(scanTypeRaw))
             return 'AV45'
         if 'AV1451' in scanTypeRaw or 'AV-1451' in scanTypeRaw or 'AV_1451' in scanTypeRaw:
             PipelineLogger.log('root', 'error',
                                'Scan Type unidentified : {0} -> Close match AV1451...'.format(scanTypeRaw))
             return 'AV1451'
         if 'MPRAGE' in scanTypeRaw.upper():
             PipelineLogger.log('root', 'error', 'Scan Type unidentified : {0} -> Close match MPRAGE...'.format(scanTypeRaw))
             return 'MPRAGE'
         else:
             PipelineLogger.log('root', 'error', 'Scan Type unidentified : {0} -> No match...'.format(scanTypeRaw))
             return 'unknown'
Esempio n. 35
0
 def add_to_mongoDB(self, file):
     f = open(file, 'r').read()
     try:
         o = xmltodict.parse(f)
     except:
         print('XML cannot be read - {0}.'.format(f))
         PipelineLogger.log('root', 'exception',
                            'XML cannot be read - {0}.'.format(f))
         return 0
     subId, SID, IID, valid = self.get_subid_sid_iid(file)
     if valid:
         _id = '{0}_{1}_{2}'.format(subId, SID, IID)
         o['_id'] = _id
         client = MongoClient('localhost', 27017)
         db = client.ADNI_Database
         collection = db.Scan_XML_Collection
         try:
             post_id = collection.insert_one(o).inserted_id
         except DuplicateKeyError:
             PipelineLogger.log('root', 'exception',
                                'XML already in DB - {0}.'.format(_id))
             post_id = _id
         try:
             shutil.move(file, moncfg.XMLProcessedArchivePath)
         except shutil.Error:
             PipelineLogger.log(
                 'root', 'Info',
                 'XML already in processed path  - {0}.'.format(_id))
             os.remove(file)
             post_id = _id
         return post_id
Esempio n. 36
0
    def process(self, processingItem):
        processingItemObj = ProcessingItemObj(processingItem)

        if processingItemObj.beast_skip and processingItemObj.manual_skip and not processingItemObj.civet:
            self.runCivet(processingItemObj, 'N')
        elif processingItemObj.manual_mask and not processingItemObj.manual_skip and not processingItemObj.civet:
            self.runCivet(processingItemObj, 'M')
        elif processingItemObj.beast_mask == 0 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 0 and not processingItemObj.manual_mask:
            self.runBeast(processingItemObj)
        elif processingItemObj.beast_skip and not processingItemObj.manual_mask and not processingItemObj.manual_skip:
            PipelineLogger.log(
                'manager', 'error',
                '$$$$$$$$$$$$$$$$$ Manual Mask Requested $$$$$$$$$$$$$$$$$$ - {0}'
                .format(processingItem))
            pass
        elif processingItemObj.beast_mask == 1 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 1 and not processingItemObj.manual_mask and not processingItemObj.civet:
            self.runCivet(processingItemObj, 'B')
        elif processingItemObj.beast_mask == 1 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 0 and not processingItemObj.manual_mask and not processingItemObj.manual_skip:
            self.requestQC(processingItemObj, 'beast')
        elif processingItemObj.civet == 1 and processingItemObj.civet_qc == 0:
            self.requestQC(processingItemObj, 'civet')
        else:
            if processingItemObj.civet_qc == -1:
                PipelineLogger.log(
                    'manager', 'error',
                    'Civet QC failed. Skipping. - {0}'.format(processingItem))
            PipelineLogger.log(
                'manager', 'error',
                'Error handling obj for processing - {0}'.format(
                    processingItem))
            return 0
 def checkNative(self, processingItemObj):
     orig_ScanType = self.getScanType(processingItemObj)
     converted_file = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(processingItemObj.converted_folder, processingItemObj.study,
                                                     processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
                                                     processingItemObj.s_identifier, processingItemObj.i_identifier,
                                                     orig_ScanType)
     nativeFolder = '{0}/native'.format(processingItemObj.root_folder)
     nativeFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(nativeFolder, processingItemObj.study,
                                                     processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
                                                     processingItemObj.s_identifier, processingItemObj.i_identifier,
                                                     processingItemObj.modality.lower())
     if not os.path.exists(nativeFileName):
         try:
             distutils.dir_util.mkpath(nativeFolder)
             shutil.copyfile(converted_file, nativeFileName)
         except Exception as e:
             PipelineLogger.log('manager', 'error', 'Error in creating folders or copying native file. \n {0}'.format(e))
             PipelineLogger.log('manager', 'error', 'Setting to restart conversion. \n {0}'.format(e))
             sql = "UPDATE Conversion SET CONVERTED = 0, SKIP = 0 WHERE S_IDENTIFIER = '{0}' AND I_IDENTIFIER = '{1}'".format(processingItemObj.s_identifier, processingItemObj.i_identifier)
             self.DBClient.executeNoResult(sql)
             return None
     return nativeFileName
Esempio n. 38
0
    def process(self, processingItemObj):
        try:
            matlabScript, nativeFileName, niakFolder = self.readTemplateFile(
                processingItemObj)
            PipelineLogger.log('manager', 'info',
                               'NIAK starting for {0}'.format(nativeFileName))
        except:
            return 0

        # Delete PIPE.lock file, if is exists
        if os.path.isfile("%s/preprocessing/logs/PIPE.lock" % niakFolder):
            os.remove("%s/preprocessing/logs/PIPE.lock" % niakFolder)

        success = self.executeScript(processingItemObj, matlabScript,
                                     niakFolder)

        #### After, if Niak succeeded, concatenate all runs together using combiningRuns
        if False:
            if success:
                self.combiningRuns(processingItemObj)
            else:
                PipelineLogger.log()
Esempio n. 39
0
    def createNewScanSession(self, down_most_folder, filelist):
        # Return parts of the folder path, the ones of interest
        try:
            folder = down_most_folder.replace(self.root_folder, "")
            folder_parts = folder.split(
                "/")  # List containing each parts/folders of the full path
            filename_parts = filelist[0].split(
                "_")  # Takes the first filename and create a list of its parts

            rid = folder_parts[1][-4:]  # Get the last 4 characters
            if re.search('[a-zA-Z]', rid) is not None:
                rid = filename_parts[3]
                if re.search('[a-zA-Z]', rid) is not None:
                    PipelineLogger.log(
                        'root', 'error',
                        'File recurse error on Folder RID cannot be identified. - {0}, \n Filelist - {1}'
                        .format(folder, filelist))
                    return None
            scan_type = self.determineScanType(folder_parts[-3])

            scan_date = folder_parts[-2].split('_')[0]
            scan_time = folder_parts[-2].split('_', 1)[-1].replace("_", ":")
            s_identifier = filename_parts[-2]
            i_identifier = filename_parts[-1].split('.', 1)[0]
            file_type = self.determineExtension(filename_parts)
            download_folder = down_most_folder
            raw_folder = '{0}/{1}/{2}/{3}/{4}_{5}_{6}/raw'.format(
                sc.studyDatabaseRootDict[self.study], 'ADNI', scan_type, rid,
                scan_date, s_identifier, i_identifier)

            newScanSession = ScanSession\
                ('ADNI', rid, scan_type, scan_date, scan_time,
                 s_identifier, i_identifier, download_folder, raw_folder, file_type)
            if scan_type == 'unknown':
                newScanSession.printObject()
                return None
            return newScanSession
        except Exception as e:
            return None
 def add_to_mongoDB(self, file):
     f = open(file, 'r').read()
     try:
         o = xmltodict.parse(f)
     except:
         print('XML cannot be read - {0}.'.format(f))
         PipelineLogger.log('root', 'exception', 'XML cannot be read - {0}.'.format(f))
         return 0
     subId, SID, IID, valid = self.get_subid_sid_iid(file)
     if valid:
         _id = '{0}_{1}_{2}'.format(subId, SID, IID)
         o['_id'] = _id
         client = MongoClient('localhost', 27017)
         db = client.ADNI_Database
         collection = db.Scan_XML_Collection
         try:
             post_id = collection.insert_one(o).inserted_id
             shutil.move(file, moncfg.XMLProcessedArchivePath)
         except DuplicateKeyError:
             PipelineLogger.log('root', 'exception', 'XML already in DB - {0}.'.format(_id))
             shutil.move(file, moncfg.XMLProcessedArchivePath)
             post_id = _id
         return post_id
Esempio n. 41
0
    def executeScript(self, processingItemObj, matlabScript, niakFolder):

        # Create a matlab file to be called later on
        matlabFile = self.createMatlabFile(matlabScript, niakFolder)

        # Prepare matlab command
        matlabCommand = '%s run %s;exit"' % (config.matlab_call, matlabFile)

        # Creating log folder
        logDir = '{0}/logs'.format(processingItemObj.root_folder)
        try:
            distutils.dir_util.mkpath(logDir)
        except Exception as e:
            PipelineLogger.log('manager', 'error', 'Error in creating log folder \n {0}'.format(e))
            return 0

        # Create list of files that should be present
        fmri_file = niakFolder + '/fmri/fmri_subject1_session1_run1.mnc'
        anat_ln_file = niakFolder + '/anat/anat_subject1_nuc_stereolin.mnc'
        anat_nl_file = niakFolder + '/anat/anat_subject1_nuc_stereonl.mnc'
        fmri_mean_file = niakFolder + '/anat/func_subject1_mean_stereonl.mnc'
        func_coregister = niakFolder + '/quality_control/group_coregistration/func_tab_qc_coregister_stereonl.csv'
        anat_ln_coregister = niakFolder + '/quality_control/group_coregistration/anat_tab_qc_coregister_stereolin.csv'
        anat_nl_coregister = niakFolder + '/quality_control/group_coregistration/anat_tab_qc_coregister_stereonl.csv'
        func_motion = niakFolder + '/quality_control/group_motion/qc_scrubbing_group.csv'
        outputFiles = ' '.join([fmri_file, anat_ln_file, anat_nl_file, fmri_mean_file, func_coregister,
                                anat_ln_coregister, anat_nl_coregister, func_motion])

        # Prepare bash command
        id = '{0}{1}{2}{3}'.format(processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
                                   processingItemObj.s_identifier, processingItemObj.i_identifier)
        command = '%s; Pipelines/ADNI_Fmri/MatlabScripts/startMatlabScript.sh %s %s %s %s %s %s %s' % \
                  (config.sourcing, id, matlabCommand, niakFolder, logDir, socket.gethostname(), '50500', outputFiles)

        # Create NIAK folder
        if not os.path.exists(niakFolder):
            os.makedirs(niakFolder)

        # Run converter command
        PipelineLogger.log('converter', 'debug', 'Command : {0}'.format(command))
        p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable='/bin/bash')
        out, err = p.communicate()
        PipelineLogger.log('converter', 'debug', 'Conversion Log Output : \n{0}'.format(out))
        PipelineLogger.log('converter', 'debug', 'Conversion Log Err : \n{0}'.format(err))

        QSubJobHandler.submittedJobs[id] = QSubJob(id, '01:00:00', processingItemObj, 'niak')
        return 1
    def convert_nii(self, convertionObj):
        # Run nii2mnc
        cmd = '{0} {1} {2}/../'.format(Config.ConverterConfig.niiToMnc_exec,
                                       convertionObj.raw_folder,
                                       convertionObj.converted_folder)
        PipelineLogger.log(
            'converter', 'info',
            'nii2mnc conversion starting for : {0} - {1} - {2} - {3}'.format(
                convertionObj.study, convertionObj.rid,
                convertionObj.scan_date, convertionObj.scan_type))
        PipelineLogger.log('converter', 'debug', 'Command : {0}'.format(cmd))

        outFile = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(
            convertionObj.converted_folder,
            convertionObj.study, convertionObj.rid,
            convertionObj.scan_date.replace('-',
                                            ''), convertionObj.s_identifier,
            convertionObj.i_identifier, convertionObj.scan_type)

        iterator = 1
        for niiFile in glob.glob(convertionObj.raw_folder + '/*.nii*'):
            if niiFile.endswith('.gz'):
                subprocess.Popen('gzip -d ' + niiFile,
                                 shell=True,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
                niiFile = niiFile.replace('.gz', '')
            tempOutFile = outFile.replace('.mnc',
                                          '_run' + str(iterator) + '.mnc')
            cmd = '%s %s %s' % (Config.ConverterConfig.niiToMnc_exec, niiFile,
                                tempOutFile)
            self.runShellCommand(cmd)
            self.checkMncFile(
                tempOutFile
            )  # Check whether the fMRI files have a time component/axis
            iterator += 1

        # Check how many mnc files were generated
        mncList = []
        for root, dirnames, filenames in os.walk(
                convertionObj.converted_folder):
            for filename in fnmatch.filter(filenames, '*.mnc'):
                mncList.append(os.path.join(root, filename))
        if len(mncList) == 0:
            PipelineLogger.log(
                'converter', 'error',
                'MINC Conversion unsuccessful : Check log for : {0} - {1} - {2} - {3}'
                .format(convertionObj.study, convertionObj.rid,
                        convertionObj.scan_date, convertionObj.scan_type))
            return 0
        else:
            return 1
 def run(self):
     try:
         self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
         self.sock.bind((socket.gethostname(), 50500))
         self.sock.settimeout(300)
         self.sock.listen(1000)
         PipelineLogger.log('manager', 'info',' ++++++++ QSub Job Handler started.')
         PipelineLogger.log('manager', 'info',' ++++++++ QSub Job Handler listening in Host : {0} at Port : {1}.'.format(socket.gethostname(), 50500))
         while not self.QUIT and self.checkJobs():
             try:
                 conn = self.sock.accept()[0]
                 thread = threading.Thread(target=self.doWork, args=(conn, ))
                 thread.start()
             except socket.timeout:
                 continue
     except Exception as e:
         PipelineLogger.log('manager', 'exception',e)
         PipelineLogger.log('manager', 'error','Cannot create QSubJobHandler... Will not listen to on jobs. ')
         del self.sock
 def copyFile(sourceFolder, destFolder):
     try:
         PipelineLogger.log(
             'manager', 'debug', 'Raw Data Copying : {0} -> {1}'.format(
                 sourceFolder, destFolder))
         distutils.dir_util.copy_tree(sourceFolder,
                                      destFolder,
                                      update=True)
         PipelineLogger.log(
             'manager', 'debug',
             'Raw Data Copy Done...... : {0} -> {1}'.format(
                 sourceFolder, destFolder))
         removeCommaIfThere(destFolder)
         return 1
     except Exception as exc:
         PipelineLogger.log(
             'manager', 'error',
             'Raw Data Move Error : {0} -> {1}'.format(
                 sourceFolder, destFolder))
         PipelineLogger.log('manager', 'exception', exc)
         return 0
Esempio n. 45
0
    def combiningRuns(self, processingItemObj):
        #### Needs to improve it a lot more
        command = "%s combiningRuns('%s', '%s', %s, %s, %s)" %\
                  (config.matlab_call, config.fmristat_location, config.emma_tools_location,
                   processingItemObj.root_folder, processingItemObj.subject_rid, '1')

        # Run matlab command
        PipelineLogger.log('processing', 'debug', 'Command : {0}'.format(command))
        p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable='/bin/bash')
        out, err = p.communicate()
        PipelineLogger.log('processing', 'debug', 'combiningRuns Log Output : \n{0}'.format(out))
        PipelineLogger.log('processing', 'debug', 'combiningRuns Log Err : \n{0}'.format(err))

        return out
    def runCivet(self, processingItemObj, maskStatus):
        nativeFileName = self.checkNative(processingItemObj)
        if not nativeFileName:
            return 0
        copyFolder = pc.T1TempDirForCIVETProcessing
        subjectFileName_base = '{0}_{1}{2}{3}{4}_{5}'.format(processingItemObj.study,
                                                        processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
                                                        processingItemObj.s_identifier, processingItemObj.i_identifier,
                                                        processingItemObj.modality.lower())
        jobId = '{0}_{1}_{2}_{3}{4}{5}{6}_CIVETRUN'.format(processingItemObj.study, processingItemObj.modality,
                                                           processingItemObj.table_id, processingItemObj.subject_rid,
                                                           processingItemObj.scan_date.replace('-', ''),
                                                            processingItemObj.s_identifier, processingItemObj.i_identifier)
        checkJobPresentSql = "SELECT * FROM externalWaitingJobs WHERE JOB_ID = '{0}'".format(jobId)
        if len(self.DBClient.executeAllResults(checkJobPresentSql)) is 0:
            beastFileName = '{0}/beast/mask/{1}_skull_mask_native.mnc'.format(processingItemObj.root_folder, subjectFileName_base)
            beastMaskName_base = '{0}_{1}{2}{3}{4}_mask'.format(processingItemObj.study,
                                                            processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
                                                            processingItemObj.s_identifier, processingItemObj.i_identifier)
            beastMaskName = '{0}/{1}.mnc'.format(copyFolder, beastMaskName_base)
            manualFileName = '{0}/manual/mask/{1}_skull_mask_native.mnc'.format(processingItemObj.root_folder, subjectFileName_base)
            manualMaskName_base = '{0}_{1}{2}{3}{4}_mask'.format(processingItemObj.study,
                                                            processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
                                                            processingItemObj.s_identifier, processingItemObj.i_identifier)
            manualMaskName = '{0}/{1}.mnc'.format(copyFolder, manualMaskName_base)
            try:
                distutils.file_util.copy_file(nativeFileName, copyFolder)
                if maskStatus == 'B':
                    distutils.file_util.copy_file(beastFileName, beastMaskName)
                elif maskStatus == 'M':
                    distutils.file_util.copy_file(manualFileName, manualMaskName)
                elif maskStatus == 'N':
                    pass
                else:
                    PipelineLogger.log('manager', 'error', 'Unknown mask status - {0} Entry : Processing ID - {1}, Table ID - {3}'.format(maskStatus, processingItemObj.processing_rid, processingItemObj.table_id))

                addExternalJobSQL = "INSERT INTO externalWaitingJobs VALUES ('{0}', '{1}', '{2}', NULL, NULL, NULL)".format(jobId, '{0}_{1}_Pipeline'.format(processingItemObj.study, processingItemObj.modality), 'CIVET')
                self.DBClient.executeNoResult(addExternalJobSQL)
            except Exception as e:
                PipelineLogger.log('manager', 'error', 'Error copying for CIVET input. Rolling back... - Processing Table ID -> {0} Table ID -> {1}'.format( processingItemObj.processing_rid, processingItemObj.table_id))
                PipelineLogger.log('manager', 'exception', e)
                nativeFileOnCopyFolder = '{0}/{1}'.format(copyFolder, os.path.basename(nativeFileName))
                os.remove(nativeFileOnCopyFolder) if os.path.exists(nativeFileOnCopyFolder) else None
                os.remove(beastMaskName) if os.path.exists(beastMaskName) else None
                os.remove(manualMaskName) if os.path.exists(manualMaskName) else None
Esempio n. 47
0
    def getManualXFM(self, processingItemObj, matchedT1entry):
        study = processingItemObj.study
        rid = processingItemObj.subject_rid
        pet_sid = processingItemObj.s_identifier
        pet_iid = processingItemObj.i_identifier
        t1_sid = matchedT1entry[6]
        t1_iid = matchedT1entry[7]

        xfmUID = 'PET_{0}_{1}_T1_{2}_{3}'.format(pet_sid, pet_iid, t1_sid, t1_iid)

        getXFMSQL = "SELECT * FROM MANUAL_XFM WHERE STUDY = '{0}' AND RID = '{1}' AND XFM_UNIQUEID = '{2}'".format(study, rid, xfmUID)
        res = self.DBClient.executeAllResults(getXFMSQL)

        if len(res) > 0:
            PipelineLogger.log('root', 'INFO', '++ Manual XFM found. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
            manXFM = res[0][4]
            updateSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE PROCESSING_TID = {3}".format(study, processingItemObj.modality, manXFM, processingItemObj.processing_rid)
            self.DBClient.executeNoResult(updateSQL)
            return manXFM
        else:
            PipelineLogger.log('root', 'INFO', '$$$$$$$ Manual XFM not found. Trying to find using uncorrected T1s. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
            mustMatchT1SID = t1_sid
            mustMatchT1IID = t1_iid
            xfmApproximation = 'PET_{0}_{1}_T1_%_%'.format(pet_sid, pet_iid)
            getAllT1s = "SELECT * FROM MANUAL_XFM WHERE STUDY = '{0}' AND RID = '{1}' AND XFM_UNIQUEID LIKE '{2}'".format(study, rid, xfmApproximation)
            approxRes = self.DBClient.executeAllResults(getAllT1s)
            getFromProcessingSQL = "SELECT * FROM Processing WHERE (STUDY, RID, SCAN_DATE, SCAN_TIME) = (SELECT `STUDY`, `RID`, `SCAN_DATE`, `SCAN_TIME` FROM `Processing` WHERE MODALITY = 'T1' AND `S_IDENTIFIER` = '{0}' AND `I_IDENTIFIER` = '{1}')".format(mustMatchT1SID, mustMatchT1IID)
            allT1s = self.DBClient.executeAllResults(getFromProcessingSQL)
            for t1 in allT1s:
                t1sid = t1[6]
                t1iid = t1[7]
                for appRes in approxRes:
                    approxResSID = appRes[3].split('_')[4]
                    approxResIID = appRes[3].split('_')[5]
                    if t1sid == approxResSID and t1iid == approxResIID:
                        PipelineLogger.log('root', 'INFO', '++ Manual XFM found from approximate matching. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
                        manXFM = appRes[4]
                        updateSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE PROCESSING_TID = {3}".format(study, processingItemObj.modality, manXFM, processingItemObj.processing_rid)
                        self.DBClient.executeNoResult(updateSQL)
                        return manXFM


            self.requestCoreg(processingItemObj, matchedT1entry)
            return None
Esempio n. 48
0
    def combiningRuns(self, processingItemObj):
        #### Needs to improve it a lot more
        command = "%s combiningRuns('%s', '%s', %s, %s, %s)" % \
                  (config.matlab_call, config.fmristat_location, config.emma_tools_location,
                   processingItemObj.root_folder, processingItemObj.subject_rid, '1')

        # Run matlab command
        PipelineLogger.log('processing', 'debug',
                           'Command : {0}'.format(command))
        p = subprocess.Popen(command,
                             shell=True,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             executable='/bin/bash')
        out, err = p.communicate()
        PipelineLogger.log('processing', 'debug',
                           'combiningRuns Log Output : \n{0}'.format(out))
        PipelineLogger.log('processing', 'debug',
                           'combiningRuns Log Err : \n{0}'.format(err))

        return out
    def convert_nii(self, convertionObj):
        # Run nii2mnc
        cmd = '{0} {1} {2}/../'.format(Config.ConverterConfig.niiToMnc_exec,
                                       convertionObj.raw_folder, convertionObj.converted_folder)
        PipelineLogger.log('converter', 'info',
                           'nii2mnc conversion starting for : {0} - {1} - {2} - {3}'.format(convertionObj.study,
                                                                                         convertionObj.rid,
                                                                                         convertionObj.scan_date,
                                                                                         convertionObj.scan_type))
        PipelineLogger.log('converter', 'debug', 'Command : {0}'.format(cmd))

        outFile = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(convertionObj.converted_folder, convertionObj.study,
                                                        convertionObj.rid, convertionObj.scan_date.replace('-', ''),
                                                        convertionObj.s_identifier, convertionObj.i_identifier,
                                                        convertionObj.scan_type)

        iterator = 1
        for niiFile in glob.glob(convertionObj.raw_folder + '/*.nii*'):
            if niiFile.endswith('.gz'):
                subprocess.Popen('gzip -d ' + niiFile, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                niiFile = niiFile.replace('.gz', '')
            tempOutFile = outFile.replace('.mnc', '_run' + str(iterator) + '.mnc')
            cmd = '%s %s %s' % (Config.ConverterConfig.niiToMnc_exec, niiFile, tempOutFile)
            self.runShellCommand(cmd)
            self.checkMncFile(tempOutFile) # Check whether the fMRI files have a time component/axis
            iterator += 1

        # Check how many mnc files were generated
        mncList = []
        for root, dirnames, filenames in os.walk(convertionObj.converted_folder):
            for filename in fnmatch.filter(filenames, '*.mnc'):
                mncList.append(os.path.join(root, filename))
        if len(mncList) == 0:
            PipelineLogger.log('converter', 'error',
                               'MINC Conversion unsuccessful : Check log for : {0} - {1} - {2} - {3}'.format(
                                   convertionObj.study, convertionObj.rid, convertionObj.scan_date,
                                   convertionObj.scan_type))
            return 0
        else:
            return 1
    def setStatus(self, job, status):
        if job.jobType == 'beast':
            nestedJob = job.job
            table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality)
            table_id = nestedJob.table_id
            if status == 'Success':
                setSql = 'UPDATE {0} SET BEAST_MASK = 1 WHERE RECORD_ID = {1}'.format(table, table_id)
            elif status == 'Fail':
                setSql = 'UPDATE {0} SET BEAST_MASK = -1, BEAST_SKIP = 1 WHERE RECORD_ID = {1}'.format(table, table_id)
            self.DBClient.executeNoResult(setSql)
            if status == 'Fail':
                PipelineLogger.log('manager', 'error','QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}'.format(job.jobType, nestedJob.processing_rid, nestedJob.table_id))

        if job.jobType == 'av45':
            nestedJob = job.job
            table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality)
            table_id = nestedJob.table_id
            if status == 'Success':
                setSql = "UPDATE {0} SET FINISHED = 1, PROC_Failed = Null WHERE RECORD_ID = {1}".format(table, table_id)
                self.requestQC(nestedJob, 'av45')
            elif status == 'Fail':
                setSql = "UPDATE {0} SET PROC_Failed = 'Failed' , SKIP = 1 WHERE RECORD_ID = {1}".format(table, table_id)
            self.DBClient.executeNoResult(setSql)
            if status == 'Fail':
                PipelineLogger.log('manager', 'error','QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}'.format(job.jobType, nestedJob.processing_rid, nestedJob.table_id))

        if job.jobType == 'fdg':
            nestedJob = job.job
            table = '{0}_{1}_Pipeline'.format(nestedJob.study, nestedJob.modality)
            table_id = nestedJob.table_id
            if status == 'Success':
                setSql = "UPDATE {0} SET FINISHED = 1, PROC_Failed = Null WHERE RECORD_ID = {1}".format(table, table_id)
                self.requestQC(nestedJob, 'fdg')
            elif status == 'Fail':
                setSql = "UPDATE {0} SET PROC_Failed = 'Failed' , SKIP = 1 WHERE RECORD_ID = {1}".format(table, table_id)
            self.DBClient.executeNoResult(setSql)
            if status == 'Fail':
                PipelineLogger.log('manager', 'error','QSUB job Status Failed: - {0} - Processing Table ID : {1} - Modality Table ID : {2}'.format(job.jobType, nestedJob.processing_rid, nestedJob.table_id))
    def moveRawData(self):
        def removeCommaIfThere(destFolder):
            PipelineLogger.log('manager', 'debug',
                                   'Removing unsupported chars from file names...... :')
            for dpath, dnames, fnames in os.walk(destFolder):
                for f in fnames:
                    os.chdir(dpath)
                    if ',' in f:
                        os.rename(f, f.replace(',', ''))
            PipelineLogger.log('manager', 'debug',
                                   'Removing unsupported chars from file names done ...:')

        def copyFile(sourceFolder, destFolder):
            try:
                PipelineLogger.log('manager', 'debug', 'Raw Data Copying : {0} -> {1}'.format(sourceFolder, destFolder))
                distutils.dir_util.copy_tree(sourceFolder, destFolder, update=True)
                PipelineLogger.log('manager', 'debug',
                                   'Raw Data Copy Done...... : {0} -> {1}'.format(sourceFolder, destFolder))
                removeCommaIfThere(destFolder)
                return 1
            except Exception as exc:
                PipelineLogger.log('manager', 'error',
                                   'Raw Data Move Error : {0} -> {1}'.format(sourceFolder, destFolder))
                PipelineLogger.log('manager', 'exception', exc)
                return 0
        for study in self.studyList:
            totalToMove = len(self.moveSortingObjListDict[study])
            PipelineLogger.log('manager', 'info', 'Moving started for study {0} - Total to be moved : {1}'.format(study, totalToMove))
            count = 1
            for sortingObj in self.moveSortingObjListDict[study]:
                PipelineLogger.log('manager', 'info', 'Moving {0}/{1} - {2}'.format(count, totalToMove, sortingObj.download_folder))
                copied = copyFile(sortingObj.download_folder, sortingObj.raw_folder)
                count += 1
                if copied:
                    self.conversionTable.insertFromSortingObj(sortingObj, self.version)
                    self.sortingTable.setMovedTrue(sortingObj)
                else:
                    PipelineLogger.log('manager', 'error', 'File Move Error : {0} -> {1}. Moving to next...'.format(sortingObj.download_folder, sortingObj.raw_folder))
    def process(self, processingItem):
        processingItemObj = ProcessingItemObj(processingItem)

        if processingItemObj.beast_skip and processingItemObj.manual_skip and not processingItemObj.civet:
            self.runCivet(processingItemObj, 'N')
        elif processingItemObj.manual_mask and not processingItemObj.manual_skip and not processingItemObj.civet:
            self.runCivet(processingItemObj, 'M')
        elif processingItemObj.beast_mask == 0 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 0 and not processingItemObj.manual_mask:
            self.runBeast(processingItemObj)
        elif processingItemObj.beast_skip and not processingItemObj.manual_mask and not processingItemObj.manual_skip:
            PipelineLogger.log('manager', 'error', '$$$$$$$$$$$$$$$$$ Manual Mask Requested $$$$$$$$$$$$$$$$$$ - {0}'.format(processingItem))
            pass
        elif processingItemObj.beast_mask == 1 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 1 and not processingItemObj.manual_mask and not processingItemObj.civet:
            self.runCivet(processingItemObj, 'B')
        elif processingItemObj.beast_mask == 1 and not processingItemObj.beast_skip and processingItemObj.beast_qc == 0 and not processingItemObj.manual_mask and not processingItemObj.manual_skip:
            self.requestQC(processingItemObj, 'beast')
        elif processingItemObj.civet == 1 and processingItemObj.civet_qc == 0:
            self.requestQC(processingItemObj, 'civet')
        else:
            if processingItemObj.civet_qc == -1:
                PipelineLogger.log('manager', 'error', 'Civet QC failed. Skipping. - {0}'.format(processingItem))
            PipelineLogger.log('manager', 'error', 'Error handling obj for processing - {0}'.format(processingItem))
            return 0
Esempio n. 53
0
    def processPET(self, processingItemObj, matchT1Path):
        petFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(processingItemObj.converted_folder, processingItemObj.study,
                                                        processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
                                                        processingItemObj.s_identifier, processingItemObj.i_identifier,
                                                        self.getScanType(processingItemObj))
        processedFolder = '{0}/processed'.format(processingItemObj.root_folder)
        logDir = '{0}/logs'.format(processingItemObj.root_folder)
        PipelineLogger.log('manager', 'info', 'PET processing starting for {0}'.format(petFileName))
        try:
            distutils.dir_util.mkpath(logDir)
        except Exception as e:
            PipelineLogger.log('manager', 'error', 'Error in creating log folder \n {0}'.format(e))
            return 0

        id = '{0}{1}{2}{3}'.format(processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier)
        paramStrd = ast.literal_eval(processingItemObj.parameters)
        paramStrt = ' '.join(['[\"{0}\"]=\"{1}\"'.format(k, v) for k,v in paramStrd.items()])
        paramStr = '({0})'.format(paramStrt)
        petCMD = "source /opt/minc-toolkit/minc-toolkit-config.sh; Pipelines/ADNI_AV45/ADNI_V2_AV45_Process {0} {1} {2} {3} {4} {5} '{6}' {7} {8}".format(id, petFileName, processedFolder, matchT1Path, 'auto' if processingItemObj.manual_xfm == '' else processingItemObj.manual_xfm, logDir, paramStr,socket.gethostname(), 50500)
        try:
            processedFolder_del = '{0}/processed_del'.format(processingItemObj.root_folder)
            os.rename(processedFolder, processedFolder_del)
            shutil.rmtree(processedFolder_del)
        except Exception as e:
            PipelineLogger.log('manager', 'error', 'Error in deleting old processing folder. \n {0}'.format(e))
        try:
            distutils.dir_util.mkpath(processedFolder)
        except Exception as e:
            PipelineLogger.log('manager', 'error', 'Error in creating processing folder. \n {0}'.format(e))
            return 0

        PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(petCMD))
        p = subprocess.Popen(petCMD, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable='/bin/bash')
        out, err = p.communicate()
        PipelineLogger.log('manager', 'debug', 'Process Log Output : \n{0}'.format(out))
        PipelineLogger.log('manager', 'debug', 'Process Log Err : \n{0}'.format(err))

        QSubJobHandler.submittedJobs[id] = QSubJob(id, '02:00:00', processingItemObj, 'av45')
        return 1
 def convert_v(self, convertionObj):
     rawFile = "{0}/*.v".format(convertionObj.raw_folder)
     outFile = "{0}/{1}_{2}{3}{4}{5}_{6}.mnc".format(
         convertionObj.converted_folder,
         convertionObj.study,
         convertionObj.rid,
         convertionObj.scan_date.replace("-", ""),
         convertionObj.s_identifier,
         convertionObj.i_identifier,
         convertionObj.scan_type,
     )
     outDynFile = "{0}/{1}_{2}{3}{4}{5}_{6}_Dyn.mnc".format(
         convertionObj.converted_folder,
         convertionObj.study,
         convertionObj.rid,
         convertionObj.scan_date.replace("-", ""),
         convertionObj.s_identifier,
         convertionObj.i_identifier,
         convertionObj.scan_type,
     )
     outTempFile = "{0}/{1}_{2}{3}{4}{5}_{6}_temp.mnc".format(
         convertionObj.converted_folder,
         convertionObj.study,
         convertionObj.rid,
         convertionObj.scan_date.replace("-", ""),
         convertionObj.s_identifier,
         convertionObj.i_identifier,
         convertionObj.scan_type,
     )
     cmd = "/opt/minc/bin/ecattominc {0} {1}".format(rawFile, outTempFile)
     cmdRes = "mincresample -short {0} {1}".format(outTempFile, outDynFile)
     PipelineLogger.log(
         "converter",
         "info",
         "MINC conversion starting for : {0} - {1} - {2} - {3}".format(
             convertionObj.study, convertionObj.rid, convertionObj.scan_date, convertionObj.scan_type
         ),
     )
     PipelineLogger.log("converter", "debug", "Command : {0}".format(cmd))
     try:
         os.remove(outDynFile)
     except:
         pass
     if os.path.exists(convertionObj.converted_folder):
         try:
             convertedFolder_del = "{0}_del".format(convertionObj.converted_folder)
             os.rename(convertionObj.converted_folder, convertedFolder_del)
             shutil.rmtree(convertedFolder_del)
         except Exception as e:
             PipelineLogger.log("manager", "error", "Error in deleting old converted folder. \n {0}".format(e))
     try:
         distutils.dir_util.mkpath(convertionObj.converted_folder)
     except:
         pass
     p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     out, err = p.communicate()
     PipelineLogger.log("converter", "debug", "Conversion Log Output : \n{0}".format(out))
     PipelineLogger.log("converter", "debug", "Conversion Log Err : \n{0}".format(err))
     subprocess.Popen(cmdRes, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
     if self.avgTime(outDynFile, outFile):
         PipelineLogger.log(
             "converter",
             "info",
             "MINC Conversion success : {0} - {1} - {2} - {3}".format(
                 convertionObj.study, convertionObj.rid, convertionObj.scan_date, convertionObj.scan_type
             ),
         )
         return 1
     else:
         PipelineLogger.log(
             "converter",
             "error",
             "MINC Conversion unsuccessful : Check log for : {0} - {1} - {2} - {3}".format(
                 convertionObj.study, convertionObj.rid, convertionObj.scan_date, convertionObj.scan_type
             ),
         )
         return 0
    def processPET(self, processingItemObj, matchT1Path):
        petFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(processingItemObj.converted_folder, processingItemObj.study,
                                                        processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
                                                        processingItemObj.s_identifier, processingItemObj.i_identifier,
                                                        self.getScanType(processingItemObj))
        processedFolder = '{0}/processed'.format(processingItemObj.root_folder)
        logDir = '{0}/logs'.format(processingItemObj.root_folder)
        PipelineLogger.log('manager', 'info', 'PET processing starting for {0}'.format(petFileName))
        try:
            distutils.dir_util.mkpath(logDir)
        except Exception as e:
            PipelineLogger.log('manager', 'error', 'Error in creating log folder \n {0}'.format(e))
            return 0

        id = '{0}{1}{2}{3}'.format(processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier)
        paramStrd = ast.literal_eval(processingItemObj.parameters)
        paramStrt = ' '.join(['[\"{0}\"]=\"{1}\"'.format(k, v) for k,v in paramStrd.items()])
        paramStr = '({0})'.format(paramStrt)
        petCMD = "source /opt/minc-1.9.15/minc-toolkit-config.sh; Pipelines/ADNI_AV1451/ADNI_V1_AV1451_Process {0} {1} {2} {3} {4} {5} '{6}' {7} {8}".format(id, petFileName, processedFolder, matchT1Path, processingItemObj.manual_xfm, logDir, paramStr,socket.gethostname(), 50500)
        try:
            processedFolder_del = '{0}/processed_del'.format(processingItemObj.root_folder)
            os.rename(processedFolder, processedFolder_del)
            shutil.rmtree(processedFolder_del)
        except Exception as e:
            PipelineLogger.log('manager', 'error', 'Error in deleting old processing folder. \n {0}'.format(e))
        try:
            distutils.dir_util.mkpath(processedFolder)
        except Exception as e:
            PipelineLogger.log('manager', 'error', 'Error in creating processing folder. \n {0}'.format(e))
            return 0

        ### This section is new for ADNI Pre processing - Per scanner type blurring. Only required if
        ### the images are aquired from different scanners and need to get to same PSF.
        blur_x, blur_y, blur_z = self.PETHelper.getBlurringParams(processingItemObj)
        ### End pre processing.

        PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(petCMD))
        p = subprocess.Popen(petCMD, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable='/bin/bash')
        out, err = p.communicate()
        PipelineLogger.log('manager', 'debug', 'Process Log Output : \n{0}'.format(out))
        PipelineLogger.log('manager', 'debug', 'Process Log Err : \n{0}'.format(err))

        QSubJobHandler.submittedJobs[id] = QSubJob(id, '02:00:00', processingItemObj, 'av1451')
        return 1
    def processPET(self, processingItemObj, matchT1Path):
        petFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(
            processingItemObj.converted_folder, processingItemObj.study,
            processingItemObj.subject_rid,
            processingItemObj.scan_date.replace('-', ''),
            processingItemObj.s_identifier, processingItemObj.i_identifier,
            self.getScanType(processingItemObj))
        processedFolder = '{0}/processed'.format(processingItemObj.root_folder)
        logDir = '{0}/logs'.format(processingItemObj.root_folder)
        PipelineLogger.log(
            'manager', 'info',
            'PET processing starting for {0}'.format(petFileName))
        try:
            distutils.dir_util.mkpath(logDir)
        except Exception as e:
            PipelineLogger.log('manager', 'error',
                               'Error in creating log folder \n {0}'.format(e))
            return 0

        id = '{0}{1}{2}{3}'.format(
            processingItemObj.subject_rid,
            processingItemObj.scan_date.replace('-', ''),
            processingItemObj.s_identifier, processingItemObj.i_identifier)
        paramStrd = ast.literal_eval(processingItemObj.parameters)
        paramStrt = ' '.join(
            ['[\"{0}\"]=\"{1}\"'.format(k, v) for k, v in paramStrd.items()])
        paramStr = '({0})'.format(paramStrt)
        petCMD = "source /opt/minc-1.9.15/minc-toolkit-config.sh; Pipelines/ADNI_FDG/ADNI_V2_FDG_Process {0} {1} {2} {3} {4} {5} '{6}' {7} {8}".format(
            id, petFileName, processedFolder, matchT1Path,
            'auto' if processingItemObj.manual_xfm == ''
            else processingItemObj.manual_xfm, logDir, paramStr,
            socket.gethostname(), 50500)
        try:
            processedFolder_del = '{0}/processed_del'.format(
                processingItemObj.root_folder)
            os.rename(processedFolder, processedFolder_del)
            shutil.rmtree(processedFolder_del)
        except Exception as e:
            PipelineLogger.log(
                'manager', 'error',
                'Error in deleting old processing folder. \n {0}'.format(e))
        try:
            distutils.dir_util.mkpath(processedFolder)
        except Exception as e:
            PipelineLogger.log(
                'manager', 'error',
                'Error in creating processing folder. \n {0}'.format(e))
            return 0

        PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(petCMD))
        p = subprocess.Popen(petCMD,
                             shell=True,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             executable='/bin/bash')
        out, err = p.communicate()
        PipelineLogger.log('manager', 'debug',
                           'Process Log Output : \n{0}'.format(out))
        PipelineLogger.log('manager', 'debug',
                           'Process Log Err : \n{0}'.format(err))

        QSubJobHandler.submittedJobs[id] = QSubJob(id, '23:00:00',
                                                   processingItemObj, 'fdg')
        return 1
Esempio n. 57
0
__author__ = 'Sulantha'
import logging.config
from Utils.PipelineLogger import PipelineLogger
from Test.logTestSubModule import testSubModuleLogging


logging.config.fileConfig('../Config/LoggingConfig.conf')

PipelineLogger.log('root', 'info', 'Main module log')

lg = testSubModuleLogging()

lg.log()

    def getMatchingT1(self, processingItemObj):
        modalityID = '{0}{1}{2}{3}{4}{5}{6}'.format(
            processingItemObj.study, processingItemObj.version,
            processingItemObj.subject_rid, processingItemObj.modality,
            processingItemObj.scan_date.replace('-', ''),
            processingItemObj.s_identifier, processingItemObj.i_identifier)
        getFromMatchTableSQL = "SELECT * FROM MatchT1 WHERE MODALITY_ID = '{0}'".format(
            modalityID)
        existingMatchedRec = self.DBClient.executeAllResults(
            getFromMatchTableSQL)
        if len(existingMatchedRec) == 1:
            getConvSQL = "SELECT * FROM Conversion WHERE RECORD_ID = '{0}'".format(
                existingMatchedRec[0][3])
            return self.DBClient.executeAllResults(getConvSQL)[0]
        else:

            if processingItemObj.modality == 'FMRI':
                PipelineLogger.log(
                    'root', 'error',
                    'FMRI T1 Matching not implemented. {0} - {1} - {2}'.format(
                        processingItemObj.subject_rid,
                        processingItemObj.s_identifier.replace('S', ''),
                        processingItemObj.i_identifier.replace('I', '')))
                return None

            else:  # By Default, for PET images
                date_str = processingItemObj.scan_date.replace('-', '')
                name_and_Mod = '{0}{1}'.format(processingItemObj.subject_rid,
                                               processingItemObj.modality)
                visit = processingItemObj.i_identifier.split('x')[0].replace(
                    date_str, '').replace(name_and_Mod, '')
                pet_label = '{0}_{1}_{2}'.format(
                    processingItemObj.subject_rid, visit,
                    processingItemObj.modality.lower())
                getRecordSQL = "SELECT * FROM PET_MRI_Proc_Match WHERE Label LIKE '{0}'".format(
                    pet_label)

            petrecord = self.MatchDBClient.executeAllResults(getRecordSQL)
            if not petrecord:
                PipelineLogger.log(
                    'root', 'error',
                    'Cannot find PET record : {0} - {1} - {2}'.format(
                        processingItemObj.subject_rid,
                        processingItemObj.s_identifier.replace('S', ''),
                        processingItemObj.i_identifier.replace('I', '')))
                return None

            mr_name = petrecord[0][5]
            if mr_name == '':
                ### Processed with MR entry not found. Have to switch to date based matching.
                PipelineLogger.log(
                    'root', 'error',
                    'Processed with MR entry not found. : {0} - {1} - {2} - Searching based on scan date. +/- 60 days from PET date'
                    .format(processingItemObj.subject_rid,
                            processingItemObj.modality, visit))
                return None

            mr_fid = petrecord[0][6]
            mr_visit = mr_name.split('_')[1]

            matchedT1withScanDescriptions = []

            for t1_type in ['MPRAGE', 'IRFSPGR', 'MPR', 'FSPGR']:
                mr_DB_iid = '{0}{3}{1}%x{2}'.format(
                    processingItemObj.subject_rid, mr_visit, mr_fid, t1_type)
                getScanFromConversionSQL = "SELECT * FROM Conversion WHERE STUDY = '{0}' AND I_IDENTIFIER LIKE '{1}' AND SKIP = 0".format(
                    processingItemObj.study, mr_DB_iid)
                t1_conversion = self.DBClient.executeAllResults(
                    getScanFromConversionSQL)
                if len(t1_conversion) > 0:
                    matchedT1withScanDescriptions.append(t1_conversion[0])
            if len(matchedT1withScanDescriptions) < 1:
                PipelineLogger.log(
                    'root', 'error',
                    'Matched T1s are not in the database. : Subject, visit and FID - {0} {1} {2}'
                    .format(processingItemObj.subject_rid, mr_visit, mr_fid))
                return None
            else:
                if len(matchedT1withScanDescriptions) == 1:
                    ## ONLY ONE MATCHED T1. GOOD> CHECK IF THE T1 is a good scan type and not a bluff !!!
                    self.addToMatchT1Table(processingItemObj, modalityID,
                                           matchedT1withScanDescriptions[0])
                    return matchedT1withScanDescriptions[0]

                else:
                    #### MORE THAN ONE FOUND. Very weird fro DIAN.
                    PipelineLogger.log(
                        'root', 'error',
                        'MORE THAN ONE T1 Match FOUND. Very weird fro DIAN. : Subject and visit - {0} {1}'
                        .format(processingItemObj.subject_rid, mr_visit))
                    return None
Esempio n. 59
0
    def getMatchingT1(self, processingItemObj):
        modalityID = '{0}{1}{2}{3}{4}{5}{6}'.format(
            processingItemObj.study, processingItemObj.version,
            processingItemObj.subject_rid, processingItemObj.modality,
            processingItemObj.scan_date.replace('-', ''),
            processingItemObj.s_identifier, processingItemObj.i_identifier)
        getFromMatchTableSQL = "SELECT * FROM MatchT1 WHERE MODALITY_ID = '{0}'".format(
            modalityID)

        # Find matching record in matching T1 table
        existingMatchedRec = self.DBClient.executeAllResults(
            getFromMatchTableSQL)
        if len(existingMatchedRec) == 1:
            getConvSQL = "SELECT * FROM Conversion WHERE RECORD_ID = '{0}'".format(
                existingMatchedRec[0][3])
            return self.DBClient.executeAllResults(getConvSQL)[0]
        else:
            # If can't find them, look into MRIList to find an equivalent
            getFmriRecordSQL = "SELECT * FROM MRILIST WHERE subject LIKE '%_%_{0}' AND seriesid = {1} AND imageuid = {2}".format(
                processingItemObj.subject_rid,
                processingItemObj.s_identifier.replace('S', ''),
                processingItemObj.i_identifier.replace('I', ''))
            FmriRecord = self.MatchDBClient.executeAllResults(getFmriRecordSQL)
            if not FmriRecord:
                PipelineLogger.log(
                    'root', 'error',
                    'Cannot find Fmri record : {0} - {1} - {2}'.format(
                        processingItemObj.subject_rid,
                        processingItemObj.s_identifier.replace('S', ''),
                        processingItemObj.i_identifier.replace('I', '')))
                return None

            visit_code = pc.ADNI_visitCode_Dict[FmriRecord[0][2]]
            getMRIRecordsSQL = "SELECT * FROM MPRAGEMETA WHERE subjectid LIKE '%_%_{0}'".format(
                processingItemObj.subject_rid)

            mrirecords = self.MatchDBClient.executeAllResults(getMRIRecordsSQL)
            if not mrirecords:
                PipelineLogger.log(
                    'root', 'error',
                    '################################  - Error !!!!! Cannot find any MRI records : {0} - Please check ADNI recs. ################################'
                    .format(processingItemObj.subject_rid))
                return None

            # getMRISecondarySQL = "SELECT * FROM MRILIST WHERE subject LIKE '%_%_{0}'".format(processingItemObj.subject_rid)
            # mriSecondaryRecords = self.MatchDBClient.executeAllResults(getMRISecondarySQL)
            # t_mrirecords = mrirecords
            # for record in mriSecondaryRecords:
            #     distint = 1
            #     for i in t_mrirecords:
            #         if record[7] == i[7] and record[8] == i[8]:
            #             distint = 0
            #     if distint:
            #         mrirecords.append(record)

            matchedT1Recs = []
            for rec in mrirecords:
                if pc.ADNI_visitCode_Dict[rec[2]] == visit_code:
                    matchedT1Recs.append(rec)
            if len(matchedT1Recs) == 0:
                PipelineLogger.log(
                    'root', 'error',
                    'Cannot match visit codes for : {0} - {1} - {2} - Searching based on scan date. +/- 60 days from PET date'
                    .format(processingItemObj.subject_rid,
                            processingItemObj.modality, visit_code))
                pet_date = datetime.strptime(processingItemObj.scan_date,
                                             '%Y-%m-%d')
                sortedRecs = sorted(
                    mrirecords,
                    key=lambda x: abs(
                        datetime.strptime(x[5], '%Y-%m-%d') - pet_date))
                closestDate = [
                    k for k, g in itertools.groupby(
                        sortedRecs,
                        key=lambda x: abs(
                            datetime.strptime(x[5], '%Y-%m-%d') - pet_date))
                ][0]
                PipelineLogger.log(
                    'root', 'error',
                    'PET MRI Matching based on dates - match visit codes for : {0} - {1} - {2} - Distance between MRI/PET : {3} days.'
                    .format(processingItemObj.subject_rid,
                            processingItemObj.modality, visit_code,
                            closestDate))
                closestMatchedRecs = [
                    list(g) for k, g in itertools.groupby(
                        sortedRecs,
                        key=lambda x: abs(
                            datetime.strptime(x[5], '%Y-%m-%d') - pet_date))
                ][0]
                matchedT1Recs = closestMatchedRecs
            if len(matchedT1Recs) == 0:
                PipelineLogger.log(
                    'root', 'error',
                    'Cannot match visit codes for : {0} - {1} - {2}'.format(
                        processingItemObj.subject_rid,
                        processingItemObj.modality, visit_code))
                return None

            matchedT1withScanDescriptions = []
            for rec in matchedT1Recs:
                getScanFromConversionSQL = "SELECT * FROM Conversion WHERE STUDY = '{0}' AND S_IDENTIFIER = '{1}' AND I_IDENTIFIER = '{2}' AND SKIP = 0".format(
                    processingItemObj.study, 'S{0}'.format(rec[7]),
                    'I{0}'.format(rec[8]))
                t1_conversion = self.DBClient.executeAllResults(
                    getScanFromConversionSQL)
                if len(t1_conversion) > 0:
                    matchedT1withScanDescriptions.append(t1_conversion[0])
                else:
                    PipelineLogger.log(
                        'root', 'error',
                        'Correspoding MRI was not found in the system : {0} - {1} - {2}'
                        .format(processingItemObj.subject_rid,
                                'S{0}'.format(rec[7]), 'I{0}'.format(rec[8])))
                    continue
            if len(matchedT1withScanDescriptions) < 1:
                PipelineLogger.log(
                    'root', 'error',
                    'Matched T1s are not in the database. : Matched T1 s - \n {0}'
                    .format(matchedT1Recs))
                return None
            else:
                if len(matchedT1withScanDescriptions) == 1:
                    ## ONLY ONE MATCHED T1. GOOD> CHECK IF THE T1 is a good scan type and not a bluff !!!
                    if matchedT1withScanDescriptions[0][
                            3] in pc.ADNI_T1_match_accepted_scantypes:
                        self.addToMatchT1Table(
                            processingItemObj, modalityID,
                            matchedT1withScanDescriptions[0])
                        return matchedT1withScanDescriptions[0]
                    else:
                        PipelineLogger.log(
                            'root', 'error',
                            'Matched T1s is not accepted scan type. : Matched T1 s - \n {0}'
                            .format(matchedT1withScanDescriptions[0]))
                        return None

                else:
                    #### MORE THAN ONE FOUND. SELECT ONE BASED ON SCAN TYPE PRIORITY
                    sortedList = sorted(matchedT1withScanDescriptions,
                                        key=lambda x:
                                        (pc.ADNI_T1_match_scantype_priorityList
                                         .index(x[3]), -x[5]))
                    self.addToMatchT1Table(processingItemObj, modalityID,
                                           sortedList[0])
                    return sortedList[0]
 def determineScanType(self, scanTypeRaw, study, rid, sid, iid):
     try:
         return arc.scanTypeDict[scanTypeRaw]
     except KeyError:
         if 'FDG' in scanTypeRaw:
             PipelineLogger.log(
                 'root', 'error',
                 'Scan Type unidentified : {0} -> Close match FDG...'.
                 format(scanTypeRaw))
             return 'FDG'
         if 'PIB' in scanTypeRaw:
             PipelineLogger.log(
                 'root', 'error',
                 'Scan Type unidentified : {0} -> Close match PIB...'.
                 format(scanTypeRaw))
             return 'PIB'
         if 'AV45' in scanTypeRaw or 'AV-45' in scanTypeRaw or 'AV_45' in scanTypeRaw:
             PipelineLogger.log(
                 'root', 'error',
                 'Scan Type unidentified : {0} -> Close match AV45...'.
                 format(scanTypeRaw))
             return 'AV45'
         if 'AV1451' in scanTypeRaw or 'AV-1451' in scanTypeRaw or 'AV_1451' in scanTypeRaw or 'tau' in scanTypeRaw.lower(
         ):
             PipelineLogger.log(
                 'root', 'error',
                 'Scan Type unidentified : {0} -> Close match AV1451...'.
                 format(scanTypeRaw))
             return 'AV1451'
         if 'MPRAGE' in scanTypeRaw.upper():
             PipelineLogger.log(
                 'root', 'error',
                 'Scan Type unidentified : {0} -> Close match MPRAGE...'.
                 format(scanTypeRaw))
             return 'MPRAGE'
         else:
             typeFromXML = self.getScanTypeFromXMLs(sc.xmlPath, study, rid,
                                                    sid, iid)
             if typeFromXML:
                 if 'FDG' in typeFromXML:
                     PipelineLogger.log(
                         'root', 'error',
                         'Scan Type unidentified : {0} -> Close match from XML - FDG...'
                         .format(typeFromXML))
                     return 'FDG'
                 if 'PIB' in typeFromXML:
                     PipelineLogger.log(
                         'root', 'error',
                         'Scan Type unidentified : {0} -> Close match  from XML - PIB...'
                         .format(typeFromXML))
                     return 'PIB'
                 if 'AV45' in typeFromXML or 'AV-45' in typeFromXML or 'AV_45' in typeFromXML:
                     PipelineLogger.log(
                         'root', 'error',
                         'Scan Type unidentified : {0} -> Close match  from XML - AV45...'
                         .format(typeFromXML))
                     return 'AV45'
                 if 'AV1451' in scanTypeRaw or 'AV-1451' in scanTypeRaw or 'AV_1451' in scanTypeRaw or 'tau' in scanTypeRaw.lower(
                 ):
                     PipelineLogger.log(
                         'root', 'error',
                         'Scan Type unidentified : {0} -> Close match AV1451...'
                         .format(scanTypeRaw))
                     return 'AV1451'
             else:
                 PipelineLogger.log(
                     'root', 'error',
                     'Scan Type unidentified : {0} -> No match...'.format(
                         scanTypeRaw))
                 return 'unknown'