Example #1
0
    def setJobDef(self, data):
        """ set values for a job object from a dictionary data
        which is usually from cgi messages from panda server """

        self.jobId = str(data.get('PandaID', '0'))
        self.taskID = data.get('taskID', '')

        self.outputFilesXML = "OutputFiles-%s.xml" % (self.jobId)

        self.homePackage = data.get('homepackage', '')
        self.trf = data.get('transformation', '')

        try:
            self.jobDefinitionID = int(data.get('jobDefinitionID', ''))
        except:
            self.jobDefinitionID = ''

        try:
            self.cloud = data.get('cloud', '')
        except:
            self.cloud = ''

        # get the input files
        inFiles = data.get('inFiles', '')
        self.inFiles = inFiles.split(",")

        realDatasetsIn = data.get('realDatasetsIn', '')
        self.realDatasetsIn = realDatasetsIn.split(",")

        filesizeIn = data.get('fsize', '')
        self.filesizeIn = filesizeIn.split(",")

        checksumIn = data.get('checksum', '')
        self.checksumIn = checksumIn.split(",")

        dispatchDblock = data.get('dispatchDblock', '')
        self.dispatchDblock = dispatchDblock.split(",")

        prodDBlocks = data.get('prodDBlocks', '')
        self.prodDBlocks = prodDBlocks.split(",")

        prodDBlockToken = data.get('prodDBlockToken', '')
        self.prodDBlockToken = prodDBlockToken.split(",")

        prodDBlockTokenForOutput = data.get('prodDBlockTokenForOutput', '')
        self.prodDBlockTokenForOutput = prodDBlockTokenForOutput.split(",")

        dispatchDBlockToken = data.get('dispatchDBlockToken', '')
        self.dispatchDBlockToken = dispatchDBlockToken.split(",")

        dispatchDBlockTokenForOut = data.get('dispatchDBlockTokenForOut', '')
        self.dispatchDBlockTokenForOut = dispatchDBlockTokenForOut.split(",")

        destinationDBlockToken = data.get('destinationDBlockToken', '')
        self.destinationDBlockToken = destinationDBlockToken.split(",")

        self.ddmEndPointIn = data.get(
            'ddmEndPointIn',
            '').split(',') if data.get('ddmEndPointIn') else []
        self.ddmEndPointOut = data.get(
            'ddmEndPointOut',
            '').split(',') if data.get('ddmEndPointOut') else []

        self.cloneJob = data.get('cloneJob', '')

        logFile = data.get('logFile', '')
        self.logFile = logFile

        tarFileGuid = data.get('logGUID', pUtil.getGUID())
        self.tarFileGuid = tarFileGuid

        self.prodUserID = data.get('prodUserID', '')

        self.credname = data.get('credname', 'None')
        self.myproxy = data.get('myproxy', 'None')

        outFiles = data.get('outFiles', '')

        self.attemptNr = int(data.get('attemptNr', -1))

        if data.has_key('GUID'):
            self.inFilesGuids = data['GUID'].split(",")
        else:
            self.inFilesGuids = []

        if data.has_key('processingType'):
            self.processingType = str(data['processingType'])
#            self.processingType = 'nightlies'
        else:
            # use default
            pass

        # Event Service variables
        if data.has_key('eventService'):
            if data.get('eventService', '').lower() == "true":
                self.eventService = True
            else:
                self.eventService = False
            pUtil.tolog("eventService = %s" % str(self.eventService))
        else:
            pUtil.tolog("Normal job (not an eventService job)")
        if data.has_key('eventRanges'):
            self.eventRanges = data.get('eventRanges', None)
        if data.has_key('jobsetID'):
            self.jobsetID = data.get('jobsetID', None)
            pUtil.tolog("jobsetID=%s" % (self.jobsetID))
        if not self.eventService and self.processingType == "evtest":
            pUtil.tolog("Turning on Event Service for processing type = %s" %
                        (self.processingType))
            self.eventService = True

        # Event Service Merge variables
        if data.has_key('eventServiceMerge'):
            if data.get('eventServiceMerge', '').lower() == "true":
                self.eventServiceMerge = True
            else:
                self.eventServiceMerge = False
            pUtil.tolog("eventServiceMerge = %s" % str(self.eventServiceMerge))

        # Event Service merge job
        if self.workdir and data.has_key('eventServiceMerge') and data[
                'eventServiceMerge'].lower() == "true":
            if data.has_key('writeToFile'):
                writeToFile = data['writeToFile']
                esFileDictionary, orderedFnameList = pUtil.createESFileDictionary(
                    writeToFile)
                pUtil.tolog("esFileDictionary=%s" % (esFileDictionary))
                pUtil.tolog("orderedFnameList=%s" % (orderedFnameList))
                if esFileDictionary != {}:
                    ec, fnames = pUtil.writeToInputFile(
                        self.workdir, esFileDictionary, orderedFnameList)
                    if ec == 0:
                        data['jobPars'] = pUtil.updateJobPars(
                            data['jobPars'], fnames)

        # HPC job staus
        if data.has_key('mode'):
            self.mode = data.get("mode", None)
        if data.has_key('hpcStatus'):
            self.hpcStatus = data.get('hpcStatus', None)

#        self.eventRangeID = data.get('eventRangeID', None)
#        self.startEvent = data.get('startEvent', None)
#        self.lastEvent = data.get('lastEvent', None)
#        pUtil.tolog("eventRangeID = %s" % str(self.eventRangeID))
#        pUtil.tolog("startEvent = %s" % str(self.startEvent))
#        pUtil.tolog("lastEvent = %s" % str(self.lastEvent))
#        if data.has_key('lfn'):
#            self.lfn = data['lfn'].split(",")
#        else:
#            self.lfn = []
#        if data.has_key('guid'):
#            self.guid = data['guid'].split(",")
#        else:
#            self.guid = []

# Rucio scopes
        if data.has_key('scopeIn'):
            self.scopeIn = data['scopeIn'].split(",")
        else:
            self.scopeIn = []
        if data.has_key('scopeOut'):
            self.scopeOut = data['scopeOut'].split(",")
        else:
            self.scopeOut = []
        if data.has_key('scopeLog'):
            self.scopeLog = data['scopeLog'].split(",")
        else:
            self.scopeLog = []

        self.maxCpuCount = int(data.get('maxCpuCount', 0))
        self.transferType = data.get('transferType', '')
        #PN        self.transferType = 'direct'

        if data.has_key('maxDiskCount'):
            _tmp = int(data['maxDiskCount'])
            if _tmp != 0 and _tmp != self.maxDiskCount:
                self.maxDiskCount = _tmp
        else:
            # use default
            pass

        if data.has_key('cmtConfig'):
            self.cmtconfig = str(data['cmtConfig'])
        else:
            # use default
            pass

        if data.has_key('coreCount'):
            self.coreCount = str(data['coreCount'])
        else:
            # use default
            pass
        # Overwrite the coreCount value with ATHENA_PROC_NUMBER if it is set
        if os.environ.has_key('ATHENA_PROC_NUMBER'):
            try:
                self.coreCount = int(os.environ['ATHENA_PROC_NUMBER'])
            except Exception, e:
                pUtil.tolog(
                    "ATHENA_PROC_NUMBER is not properly set: %s (will use existing job.coreCount value)"
                    % (e))
Example #2
0
    def setJobDef(self, data):
        """ set values for a job object from a dictionary data
        which is usually from cgi messages from panda server """

        self.jobId = str(data.get('PandaID', '0'))
        self.taskID = str(data.get('taskID', ''))

        self.outputFilesXML = "OutputFiles-%s.xml" % self.jobId

        self.homePackage = data.get('homepackage', '')
        self.trf = data.get('transformation', '')

        try:
            self.jobDefinitionID = int(data.get('jobDefinitionID', ''))
        except:
            self.jobDefinitionID = ''

        self.cloud = data.get('cloud', '')

        # get the input files
        self.inFiles = data.get('inFiles', '').split(',')

        # remove zip:// from input files then mover can stage it in
        # but record it in inputZipFiles for special handling
        for i in range(len(self.inFiles)):
            if self.inFiles[i].startswith("zip://"):
                self.inFiles[i] = self.inFiles[i].replace("zip://", "")
                self.inputZipFiles.append(self.inFiles[i])

        self.realDatasetsIn = data.get('realDatasetsIn', '').split(',')
        self.filesizeIn = data.get('fsize', '').split(',')
        self.checksumIn = data.get('checksum', '').split(',')

        self.dispatchDblock = data.get('dispatchDblock', '').split(',')
        self.prodDBlocks = data.get('prodDBlocks', '').split(',')

        self.prodDBlockToken = data.get('prodDBlockToken', '').split(',')
        self.prodDBlockTokenForOutput = data.get('prodDBlockTokenForOutput',
                                                 '').split(',')

        self.dispatchDBlockToken = data.get('dispatchDBlockToken',
                                            '').split(',')
        self.dispatchDBlockTokenForOut = data.get('dispatchDBlockTokenForOut',
                                                  '').split(',')

        self.destinationDBlockToken = data.get('destinationDBlockToken',
                                               '').split(',')

        self.ddmEndPointIn = data.get(
            'ddmEndPointIn',
            '').split(',') if data.get('ddmEndPointIn') else []
        self.ddmEndPointOut = data.get(
            'ddmEndPointOut',
            '').split(',') if data.get('ddmEndPointOut') else []
        self.allowNoOutput = data.get(
            'allowNoOutput',
            '').split(',') if data.get('allowNoOutput') else []

        self.altStageOut = data.get('altStageOut', '')  # on, off, force
        self.cloneJob = data.get('cloneJob', '')
        self.logFile = data.get('logFile', '')
        self.prodUserID = data.get('prodUserID', '')

        self.credname = data.get('credname', 'None')
        self.myproxy = data.get('myproxy', 'None')

        self.attemptNr = int(data.get('attemptNr', -1))

        if data.has_key('GUID'):
            self.inFilesGuids = data['GUID'].split(",")
        else:
            self.inFilesGuids = []

        if data.has_key('processingType'):
            self.processingType = str(data['processingType'])
#            self.processingType = 'nightlies'
        else:
            # use default
            pass

        # Event Service variables
        self.eventService = data.get('eventService', '').lower() == "true"
        self.outputZipName = data.get('outputZipName', None)
        self.outputZipBucketID = data.get('outputZipBucketID', None)

        if self.eventService:
            pUtil.tolog("eventService = %s" % self.eventService)
        else:
            pUtil.tolog("Normal job (not an eventService job)")

        self.eventRanges = data.get('eventRanges')
        self.jobsetID = str(data.get('jobsetID'))

        pUtil.tolog("jobsetID=%s" % self.jobsetID)

        self.pandaProxySecretKey = data.get('pandaProxySecretKey')

        if not self.eventService and self.processingType == "evtest":
            pUtil.tolog("Turning on Event Service for processing type = %s" %
                        self.processingType)
            self.eventService = True

        # Event Service Merge variables
        if data.has_key('eventServiceMerge'):
            if data.get('eventServiceMerge', '').lower() == "true":
                self.eventServiceMerge = True
            else:
                self.eventServiceMerge = False
            pUtil.tolog("eventServiceMerge = %s" % str(self.eventServiceMerge))

        # Event Service merge job
        if self.workdir and data.has_key(
                'writeToFile'
        ):  #data.has_key('eventServiceMerge') and data['eventServiceMerge'].lower() == "true":
            #if data.has_key('writeToFile'):
            writeToFile = data['writeToFile']
            esFileDictionary, orderedFnameList = pUtil.createESFileDictionary(
                writeToFile)
            #pUtil.tolog("esFileDictionary=%s" % (esFileDictionary))
            #pUtil.tolog("orderedFnameList=%s" % (orderedFnameList))
            if esFileDictionary != {}:
                if data.has_key('eventServiceMerge') and data[
                        'eventServiceMerge'].lower() == "true":
                    eventservice = True
                else:
                    eventservice = False
                ec, fnames = pUtil.writeToInputFile(self.workdir,
                                                    esFileDictionary,
                                                    orderedFnameList,
                                                    eventservice)
                if ec == 0:
                    data['jobPars'] = pUtil.updateJobPars(
                        data['jobPars'], fnames)

        # Yoda job status and accounting info
        if data.has_key('mode'):
            self.mode = data.get("mode", None)
        if data.has_key('hpcStatus'):
            self.hpcStatus = data.get('hpcStatus', None)
        if data.has_key('yodaJobMetrics'):
            self.yodaJobMetrics = data.get('yodaJobMetrics', None)
        if self.yodaJobMetrics:
            self.yodaJobMetrics = json.loads(self.yodaJobMetrics)
        if data.has_key('HPCJobId'):
            self.HPCJobId = data.get('HPCJobId', None)


#        self.eventRangeID = data.get('eventRangeID', None)
#        self.startEvent = data.get('startEvent', None)
#        self.lastEvent = data.get('lastEvent', None)
#        pUtil.tolog("eventRangeID = %s" % str(self.eventRangeID))
#        pUtil.tolog("startEvent = %s" % str(self.startEvent))
#        pUtil.tolog("lastEvent = %s" % str(self.lastEvent))
#        if data.has_key('lfn'):
#            self.lfn = data['lfn'].split(",")
#        else:
#            self.lfn = []
#        if data.has_key('guid'):
#            self.guid = data['guid'].split(",")
#        else:
#            self.guid = []

# Rucio scopes
        if data.has_key('scopeIn'):
            self.scopeIn = data['scopeIn'].split(",")
        else:
            self.scopeIn = []
        if data.has_key('scopeOut'):
            self.scopeOut = data['scopeOut'].split(",")
        else:
            self.scopeOut = []
        if data.has_key('scopeLog'):
            self.scopeLog = data['scopeLog'].split(",")
        else:
            self.scopeLog = []

        self.maxCpuCount = int(data.get('maxCpuCount', 0))
        self.transferType = data.get('transferType', '')

        if data.has_key('maxDiskCount'):
            _tmp = int(data['maxDiskCount'])
            if _tmp != 0 and _tmp != self.maxDiskCount:
                self.maxDiskCount = _tmp
        else:
            # use default
            pass

        if data.has_key('cmtConfig'):
            self.cmtconfig = str(data['cmtConfig'])
        else:
            # use default
            pass

        if data.has_key('coreCount'):
            self.coreCount = str(data['coreCount'])
        else:
            # use default
            pass
        # Overwrite the coreCount value with ATHENA_PROC_NUMBER if it is set
        if os.environ.has_key('ATHENA_PROC_NUMBER'):
            try:
                self.coreCount = int(os.environ['ATHENA_PROC_NUMBER'])
            except Exception, e:
                pUtil.tolog(
                    "ATHENA_PROC_NUMBER is not properly set: %s (will use existing job.coreCount value)"
                    % (e))
Example #3
0
    def setJobDef(self, data):
        """ set values for a job object from a dictionary data
        which is usually from cgi messages from panda server """

        self.jobId = data.get('PandaID', '0')
        self.taskID = data.get('taskID', '')

        self.outputFilesXML = "OutputFiles-%s.xml" % (self.jobId)

        self.homePackage = data.get('homepackage', '')
        self.trf = data.get('transformation', '')

        try:
            self.jobDefinitionID = int(data.get('jobDefinitionID', ''))
        except:
            self.jobDefinitionID = ''

        try:
            self.cloud = data.get('cloud', '')
        except:
            self.cloud = ''

        # get the input files
        inFiles = data.get('inFiles', '')
        self.inFiles = inFiles.split(",")

        realDatasetsIn = data.get('realDatasetsIn', '')
        self.realDatasetsIn = realDatasetsIn.split(",")

        filesizeIn = data.get('fsize', '')
        self.filesizeIn = filesizeIn.split(",")

        checksumIn = data.get('checksum', '')
        self.checksumIn = checksumIn.split(",")

        dispatchDblock = data.get('dispatchDblock', '')
        self.dispatchDblock = dispatchDblock.split(",")

        prodDBlocks = data.get('prodDBlocks', '')
        self.prodDBlocks = prodDBlocks.split(",")

        prodDBlockToken = data.get('prodDBlockToken', '')
        self.prodDBlockToken = prodDBlockToken.split(",")

        prodDBlockTokenForOutput = data.get('prodDBlockTokenForOutput', '')
        self.prodDBlockTokenForOutput = prodDBlockTokenForOutput.split(",")

        dispatchDBlockToken = data.get('dispatchDBlockToken', '')
        self.dispatchDBlockToken = dispatchDBlockToken.split(",")

        dispatchDBlockTokenForOut = data.get('dispatchDBlockTokenForOut', '')
        self.dispatchDBlockTokenForOut = dispatchDBlockTokenForOut.split(",")

        destinationDBlockToken = data.get('destinationDBlockToken', '')
        self.destinationDBlockToken = destinationDBlockToken.split(",")

        self.ddmEndPointIn = data.get('ddmEndPointIn', '').split(',') if data.get('ddmEndPointIn') else []
        self.ddmEndPointOut = data.get('ddmEndPointOut', '').split(',') if data.get('ddmEndPointOut') else []

        self.cloneJob = data.get('cloneJob', '')

        logFile = data.get('logFile', '')
        self.logFile = logFile

        self.prodUserID = data.get('prodUserID', '')

        self.credname = data.get('credname', 'None')
        self.myproxy = data.get('myproxy', 'None')

        outFiles = data.get('outFiles', '')

        self.attemptNr = int(data.get('attemptNr', -1))

        if data.has_key('GUID'):
            self.inFilesGuids = data['GUID'].split(",")
        else:
            self.inFilesGuids = []

        if data.has_key('processingType'):
            self.processingType = str(data['processingType'])
#            self.processingType = 'nightlies'
        else:
            # use default
            pass

        # Event Service variables
        if data.has_key('eventService'):
            if data.get('eventService', '').lower() == "true":
                self.eventService = True
            else:
                self.eventService = False
            pUtil.tolog("eventService = %s" % str(self.eventService))
        else:
            pUtil.tolog("Normal job (not an eventService job)")
        if data.has_key('eventRanges'):
            self.eventRanges = data.get('eventRanges', None)
        if data.has_key('jobsetID'):
            self.jobsetID = data.get('jobsetID', None)
            pUtil.tolog("jobsetID=%s" % (self.jobsetID))
        if not self.eventService and self.processingType == "evtest":
            pUtil.tolog("Turning on Event Service for processing type = %s" % (self.processingType))
            self.eventService = True

        # Event Service Merge variables
        if data.has_key('eventServiceMerge'):
            if data.get('eventServiceMerge', '').lower() == "true":
                self.eventServiceMerge = True
            else:
                self.eventServiceMerge = False
            pUtil.tolog("eventServiceMerge = %s" % str(self.eventServiceMerge))

        # Event Service merge job
        if self.workdir and data.has_key('eventServiceMerge') and data['eventServiceMerge'].lower() == "true":
            if data.has_key('writeToFile'):
                writeToFile = data['writeToFile']
                esFileDictionary, orderedFnameList = pUtil.createESFileDictionary(writeToFile)
                pUtil.tolog("esFileDictionary=%s" % (esFileDictionary))
                pUtil.tolog("orderedFnameList=%s" % (orderedFnameList))
                if esFileDictionary != {}:
                    ec, fnames = pUtil.writeToInputFile(self.workdir, esFileDictionary, orderedFnameList)
                    if ec == 0:
                        data['jobPars'] = pUtil.updateJobPars(data['jobPars'], fnames)

        # HPC job staus
        if data.has_key('mode'):
            self.mode = data.get("mode", None)
        if data.has_key('hpcStatus'):
            self.hpcStatus = data.get('hpcStatus', None)

#        self.eventRangeID = data.get('eventRangeID', None)
#        self.startEvent = data.get('startEvent', None)
#        self.lastEvent = data.get('lastEvent', None)
#        pUtil.tolog("eventRangeID = %s" % str(self.eventRangeID))
#        pUtil.tolog("startEvent = %s" % str(self.startEvent))
#        pUtil.tolog("lastEvent = %s" % str(self.lastEvent))
#        if data.has_key('lfn'):
#            self.lfn = data['lfn'].split(",")
#        else:
#            self.lfn = []
#        if data.has_key('guid'):
#            self.guid = data['guid'].split(",")
#        else:
#            self.guid = []

        # Rucio scopes
        if data.has_key('scopeIn'):
            self.scopeIn = data['scopeIn'].split(",")
        else:
            self.scopeIn = []
        if data.has_key('scopeOut'):
            self.scopeOut = data['scopeOut'].split(",")
        else:
            self.scopeOut = []
        if data.has_key('scopeLog'):
            self.scopeLog = data['scopeLog'].split(",")
        else:
            self.scopeLog = []

        self.maxCpuCount = int(data.get('maxCpuCount', 0))
        self.transferType = data.get('transferType', '')
#PN        self.transferType = 'direct'

        if data.has_key('maxDiskCount'):
            _tmp = int(data['maxDiskCount'])
            if _tmp != 0 and _tmp != self.maxDiskCount:
                self.maxDiskCount = _tmp
        else:
            # use default
            pass

        if data.has_key('cmtConfig'):
            self.cmtconfig = str(data['cmtConfig'])
        else:
            # use default
            pass

        if data.has_key('coreCount'):
            self.coreCount = str(data['coreCount'])
        else:
            # use default
            pass
        # Overwrite the coreCount value with ATHENA_PROC_NUMBER if it is set
        if os.environ.has_key('ATHENA_PROC_NUMBER'):
            try:
                self.coreCount = int(os.environ['ATHENA_PROC_NUMBER'])
            except Exception, e:
                pUtil.tolog("ATHENA_PROC_NUMBER is not properly set: %s (will use existing job.coreCount value)" % (e))
Example #4
0
    def setJobDef(self, data):
        """ set values for a job object from a dictionary data
        which is usually from cgi messages from panda server """

        self.jobId = str(data.get('PandaID', '0'))
        self.taskID = str(data.get('taskID', ''))

        self.outputFilesXML = "OutputFiles-%s.xml" % self.jobId

        self.homePackage = data.get('homepackage', '')
        self.trf = data.get('transformation', '')

        try:
            self.jobDefinitionID = int(data.get('jobDefinitionID', ''))
        except:
            self.jobDefinitionID = ''

        self.cloud = data.get('cloud', '')

        # get the input files
        self.inFiles = data.get('inFiles', '').split(',')

        # remove zip:// from input files then mover can stage it in
        # but record it in inputZipFiles for special handling
        for i in range(len(self.inFiles)):
            if self.inFiles[i].startswith("zip://"):
                self.inFiles[i] = self.inFiles[i].replace("zip://", "")
                self.inputZipFiles.append(self.inFiles[i])

        self.realDatasetsIn = data.get('realDatasetsIn', '').split(',')
        self.filesizeIn = data.get('fsize', '').split(',')
        self.checksumIn = data.get('checksum', '').split(',')

        self.dispatchDblock = data.get('dispatchDblock', '').split(',')
        self.prodDBlocks = data.get('prodDBlocks', '').split(',')

        self.prodDBlockToken = data.get('prodDBlockToken', '').split(',')
        self.prodDBlockTokenForOutput = data.get('prodDBlockTokenForOutput', '').split(',')

        self.dispatchDBlockToken = data.get('dispatchDBlockToken', '').split(',')
        self.dispatchDBlockTokenForOut = data.get('dispatchDBlockTokenForOut', '').split(',')

        self.destinationDBlockToken = data.get('destinationDBlockToken', '').split(',')

        self.ddmEndPointIn = data.get('ddmEndPointIn', '').split(',') if data.get('ddmEndPointIn') else []
        self.ddmEndPointOut = data.get('ddmEndPointOut', '').split(',') if data.get('ddmEndPointOut') else []
        self.allowNoOutput = data.get('allowNoOutput', '').split(',') if data.get('allowNoOutput') else []

        self.altStageOut = data.get('altStageOut', '') # on, off, force
        self.cloneJob = data.get('cloneJob', '')
        self.logFile = data.get('logFile', '')
        self.prodUserID = data.get('prodUserID', '')

        self.credname = data.get('credname', 'None')
        self.myproxy = data.get('myproxy', 'None')

        self.attemptNr = int(data.get('attemptNr', -1))

        if data.has_key('GUID'):
            self.inFilesGuids = data['GUID'].split(",")
        else:
            self.inFilesGuids = []

        if data.has_key('processingType'):
            self.processingType = str(data['processingType'])
#            self.processingType = 'nightlies'
        else:
            # use default
            pass

        # Event Service variables
        self.eventService = data.get('eventService', '').lower() == "true"
        self.outputZipName = data.get('outputZipName', None)
        self.outputZipBucketID = data.get('outputZipBucketID', None)

        if self.eventService:
            pUtil.tolog("eventService = %s" % self.eventService)
        else:
            pUtil.tolog("Normal job (not an eventService job)")

        self.eventRanges = data.get('eventRanges')
        self.jobsetID = str(data.get('jobsetID'))

        pUtil.tolog("jobsetID=%s" % self.jobsetID)

        self.pandaProxySecretKey = data.get('pandaProxySecretKey')

        if not self.eventService and self.processingType == "evtest":
            pUtil.tolog("Turning on Event Service for processing type = %s" % self.processingType)
            self.eventService = True

        # Event Service Merge variables
        if data.has_key('eventServiceMerge'):
            if data.get('eventServiceMerge', '').lower() == "true":
                self.eventServiceMerge = True
            else:
                self.eventServiceMerge = False
            pUtil.tolog("eventServiceMerge = %s" % str(self.eventServiceMerge))

        # Event Service merge job
        if self.workdir and data.has_key('writeToFile'): #data.has_key('eventServiceMerge') and data['eventServiceMerge'].lower() == "true":
            #if data.has_key('writeToFile'):
            writeToFile = data['writeToFile']
            esFileDictionary, orderedFnameList = pUtil.createESFileDictionary(writeToFile)
            #pUtil.tolog("esFileDictionary=%s" % (esFileDictionary))
            #pUtil.tolog("orderedFnameList=%s" % (orderedFnameList))
            if esFileDictionary != {}:
                if data.has_key('eventServiceMerge') and data['eventServiceMerge'].lower() == "true":
                    eventservice = True
                else:
                    eventservice = False
                ec, fnames = pUtil.writeToInputFile(self.workdir, esFileDictionary, orderedFnameList, eventservice)
                if ec == 0:
                    data['jobPars'] = pUtil.updateJobPars(data['jobPars'], fnames)

        # Yoda job status and accounting info
        if data.has_key('mode'):
            self.mode = data.get("mode", None)
        if data.has_key('hpcStatus'):
            self.hpcStatus = data.get('hpcStatus', None)
        if data.has_key('yodaJobMetrics'):
            self.yodaJobMetrics = data.get('yodaJobMetrics', None)
        if self.yodaJobMetrics:
            self.yodaJobMetrics = json.loads(self.yodaJobMetrics)
        if data.has_key('HPCJobId'):
            self.HPCJobId = data.get('HPCJobId', None)

#        self.eventRangeID = data.get('eventRangeID', None)
#        self.startEvent = data.get('startEvent', None)
#        self.lastEvent = data.get('lastEvent', None)
#        pUtil.tolog("eventRangeID = %s" % str(self.eventRangeID))
#        pUtil.tolog("startEvent = %s" % str(self.startEvent))
#        pUtil.tolog("lastEvent = %s" % str(self.lastEvent))
#        if data.has_key('lfn'):
#            self.lfn = data['lfn'].split(",")
#        else:
#            self.lfn = []
#        if data.has_key('guid'):
#            self.guid = data['guid'].split(",")
#        else:
#            self.guid = []

        # Rucio scopes
        if data.has_key('scopeIn'):
            self.scopeIn = data['scopeIn'].split(",")
        else:
            self.scopeIn = []
        if data.has_key('scopeOut'):
            self.scopeOut = data['scopeOut'].split(",")
        else:
            self.scopeOut = []
        if data.has_key('scopeLog'):
            self.scopeLog = data['scopeLog'].split(",")
        else:
            self.scopeLog = []

        self.maxCpuCount = int(data.get('maxCpuCount', 0))
        self.transferType = data.get('transferType', '')
#PN        self.transferType = 'direct'
#        self.transferType = 'fax'

        if data.has_key('maxDiskCount'):
            _tmp = int(data['maxDiskCount'])
            if _tmp != 0 and _tmp != self.maxDiskCount:
                self.maxDiskCount = _tmp
        else:
            # use default
            pass

        if data.has_key('cmtConfig'):
            self.cmtconfig = str(data['cmtConfig'])
        else:
            # use default
            pass

        if data.has_key('coreCount'):
            self.coreCount = str(data['coreCount'])
        else:
            # use default
            pass
        # Overwrite the coreCount value with ATHENA_PROC_NUMBER if it is set
        if os.environ.has_key('ATHENA_PROC_NUMBER'):
            try:
                self.coreCount = int(os.environ['ATHENA_PROC_NUMBER'])
            except Exception, e:
                pUtil.tolog("ATHENA_PROC_NUMBER is not properly set: %s (will use existing job.coreCount value)" % (e))