def processPET(self, processingItemObj, matchT1Path):
        petFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(
            processingItemObj.converted_folder, processingItemObj.study,
            processingItemObj.subject_rid,
            processingItemObj.scan_date.replace('-', ''),
            processingItemObj.s_identifier, processingItemObj.i_identifier,
            self.getScanType(processingItemObj))
        processedFolder = '{0}/processed'.format(processingItemObj.root_folder)
        logDir = '{0}/logs'.format(processingItemObj.root_folder)
        PipelineLogger.log(
            'manager', 'info',
            'PET processing starting for {0}'.format(petFileName))
        try:
            distutils.dir_util.mkpath(logDir)
        except Exception as e:
            PipelineLogger.log('manager', 'error',
                               'Error in creating log folder \n {0}'.format(e))
            return 0

        id = '{0}{1}{2}{3}'.format(
            processingItemObj.subject_rid,
            processingItemObj.scan_date.replace('-', ''),
            processingItemObj.s_identifier, processingItemObj.i_identifier)
        paramStrd = ast.literal_eval(processingItemObj.parameters)
        paramStrt = ' '.join(
            ['[\"{0}\"]=\"{1}\"'.format(k, v) for k, v in paramStrd.items()])
        paramStr = '({0})'.format(paramStrt)
        petCMD = "source /opt/minc-1.9.15/minc-toolkit-config.sh; Pipelines/ADNI_FDG/ADNI_V1_FDG_Process {0} {1} {2} {3} {4} {5} '{6}' {7} {8}".format(
            id, petFileName, processedFolder,
            matchT1Path, processingItemObj.manual_xfm, logDir, paramStr,
            socket.gethostname(), 50500)
        try:
            processedFolder_del = '{0}/processed_del'.format(
                processingItemObj.root_folder)
            os.rename(processedFolder, processedFolder_del)
            shutil.rmtree(processedFolder_del)
        except Exception as e:
            PipelineLogger.log(
                'manager', 'error',
                'Error in deleting old processing folder. \n {0}'.format(e))
        try:
            distutils.dir_util.mkpath(processedFolder)
        except Exception as e:
            PipelineLogger.log(
                'manager', 'error',
                'Error in creating processing folder. \n {0}'.format(e))
            return 0

        PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(petCMD))
        p = subprocess.Popen(petCMD,
                             shell=True,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             executable='/bin/bash')
        out, err = p.communicate()
        PipelineLogger.log('manager', 'debug',
                           'Process Log Output : \n{0}'.format(out))
        PipelineLogger.log('manager', 'debug',
                           'Process Log Err : \n{0}'.format(err))

        QSubJobHandler.submittedJobs[id] = QSubJob(id, '02:00:00',
                                                   processingItemObj, 'fdg')
        return 1
Example #2
0
class QSubJobHandler(threading.Thread):
    submittedJobs = {'xxxx': QSubJob('xxxx', '23:59:59', None, 'beast')}
    QUIT = 0

    def checkJobs(self):
        if not self.submittedJobs:
            return False
        else:
            timeNow = datetime.datetime.now()
            for jobID in list(self.submittedJobs):
                job = self.submittedJobs[jobID]
                submitTime = job.submitTime
                startTime = job.startTime
                wallTime = job.wallTime
                if (job.startTime and job.Start
                        and timeNow - startTime > wallTime) or job.Fin:
                    self.submittedJobs.pop(jobID)
            return True

    def __init__(self):
        threading.Thread.__init__(self)
        self.sock = None
        self.thread_list = []

    def doWork(self, conn):
        data = conn.recv(1024)
        #PipelineLogger.log('manager', 'info', ' Data recieved - {0}.'.format(data))
        try:
            jobID = data.strip().decode('utf-8').rsplit('_', 1)[0]
            status = data.strip().decode('utf-8').rsplit('_', 1)[1]
            PipelineLogger.log(
                'manager', 'info',
                ' ++++++++ QSub Job Handler received JobID - {0}.'.format(
                    jobID))
            if jobID not in self.submittedJobs:
                PipelineLogger.log(
                    'manager', 'error',
                    ' ++++++++ QSub Job Handler unidentified JobID - {0}.'.
                    format(jobID))
            else:
                jobReporter = QSubJobStatusReporter()
                if status == 'Start':
                    PipelineLogger.log(
                        'manager', 'info',
                        ' ++++++++ JobID - {0} -> Status - {1}.'.format(
                            jobID, status))
                    self.submittedJobs[jobID].Start = True
                    self.submittedJobs[
                        jobID].startTime = datetime.datetime.now()
                elif status == 'Success':
                    PipelineLogger.log(
                        'manager', 'info',
                        ' ++++++++ JobID - {0} -> Status - {1}.'.format(
                            jobID, status))
                    self.submittedJobs[jobID].Fin = True
                    jobReporter.setStatus(self.submittedJobs[jobID], status)
                elif status == 'Fail':
                    PipelineLogger.log(
                        'manager', 'info',
                        ' ++++++++ JobID - {0} -> Status - {1}.'.format(
                            jobID, status))
                    self.submittedJobs[jobID].Fin = True
                    jobReporter.setStatus(self.submittedJobs[jobID], status)
                else:
                    PipelineLogger.log(
                        'manager', 'info',
                        ' ++++++++ JobID - {0} -> Status (Unhandled)- {1}.'.
                        format(jobID, status))
        except Exception as e:
            PipelineLogger.log('manager', 'error',
                               ' Data recieved - {0} - {1}.'.format(data, e))

    def run(self):
        try:
            self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
            self.sock.bind((socket.gethostname(), 50500))
            self.sock.settimeout(300)
            self.sock.listen(1000)
            PipelineLogger.log('manager', 'info',
                               ' ++++++++ QSub Job Handler started.')
            PipelineLogger.log(
                'manager', 'info',
                ' ++++++++ QSub Job Handler listening in Host : {0} at Port : {1}.'
                .format(socket.gethostname(), 50500))
            while not self.QUIT and self.checkJobs():
                try:
                    conn = self.sock.accept()[0]
                    thread = threading.Thread(target=self.doWork,
                                              args=(conn, ))
                    thread.start()
                except socket.timeout:
                    continue
        except Exception as e:
            PipelineLogger.log('manager', 'exception', e)
            PipelineLogger.log(
                'manager', 'error',
                'Cannot create QSubJobHandler... Will not listen to on jobs. ')
            del self.sock
Example #3
0
    def processPET(self, processingItemObj, matchT1Path):
        petFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(
            processingItemObj.converted_folder, processingItemObj.study,
            processingItemObj.subject_rid,
            processingItemObj.scan_date.replace('-', ''),
            processingItemObj.s_identifier, processingItemObj.i_identifier,
            self.getScanType(processingItemObj))
        processedFolder = '{0}/processed'.format(processingItemObj.root_folder)
        logDir = '{0}/logs'.format(processingItemObj.root_folder)
        PipelineLogger.log(
            'manager', 'info',
            'PET processing starting for {0}'.format(petFileName))
        try:
            distutils.dir_util.mkpath(logDir)
        except Exception as e:
            PipelineLogger.log('manager', 'error',
                               'Error in creating log folder \n {0}'.format(e))
            return 0

        id = '{0}{1}{2}{3}'.format(
            processingItemObj.subject_rid,
            processingItemObj.scan_date.replace('-', ''),
            processingItemObj.s_identifier, processingItemObj.i_identifier)

        ### This section is new for ADNI Pre processing - Per scanner type blurring. Only required if
        ### the images are aquired from different scanners and need to get to same PSF.
        blur_x, blur_y, blur_z = self.PETHelper.getBlurringParams(
            processingItemObj)
        ### End pre processing.
        if not blur_x:
            PipelineLogger.log(
                'manager', 'error',
                'Error in identifying scanner specific blurring - {0}'.format(
                    petFileName))
            return 0
        paramStrd = "{{'blur_x':'{0}', 'blur_y':'{1}', 'blur_z':'{2}'}}".format(
            blur_x, blur_y, blur_z)
        self.set_new_param_str(
            '{0}_{1}_Pipeline'.format(processingItemObj.study,
                                      processingItemObj.modality),
            processingItemObj.table_id, paramStrd)
        paramStrd = ast.literal_eval(paramStrd)
        paramStrt = ' '.join(
            ['[\"{0}\"]=\"{1}\"'.format(k, v) for k, v in paramStrd.items()])
        paramStr = '({0})'.format(paramStrt)
        petCMD = "source /opt/minc-1.9.15/minc-toolkit-config.sh; Pipelines/DIAN_PIB/DIAN_V1_PIB_Process {0} {1} {2} {3} {4} {5} '{6}' {7} {8}".format(
            id, petFileName, processedFolder, matchT1Path,
            'auto' if processingItemObj.manual_xfm == ''
            else processingItemObj.manual_xfm, logDir, paramStr,
            socket.gethostname(), 50500)

        try:
            processedFolder_del = '{0}/processed_del'.format(
                processingItemObj.root_folder)
            os.rename(processedFolder, processedFolder_del)
            shutil.rmtree(processedFolder_del)
        except Exception as e:
            PipelineLogger.log(
                'manager', 'error',
                'Error in deleting old processing folder. \n {0}'.format(e))
        try:
            distutils.dir_util.mkpath(processedFolder)
        except Exception as e:
            PipelineLogger.log(
                'manager', 'error',
                'Error in creating processing folder. \n {0}'.format(e))
            return 0

        PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(petCMD))
        p = subprocess.Popen(petCMD,
                             shell=True,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             executable='/bin/bash')
        out, err = p.communicate()
        PipelineLogger.log('manager', 'debug',
                           'Process Log Output : \n{0}'.format(out))
        PipelineLogger.log('manager', 'debug',
                           'Process Log Err : \n{0}'.format(err))

        QSubJobHandler.submittedJobs[id] = QSubJob(id, '23:00:00',
                                                   processingItemObj, 'pib')
        return 1