Exemplo n.º 1
0
def test_SimpleParametricJob():

    job = Job()
    job.setExecutable('myExec')
    job.setLogLevel('DEBUG')
    parList = [1, 2, 3]
    job.setParameterSequence('JOB_ID', parList, addToWorkflow=True)
    inputDataList = [['/lhcb/data/data1', '/lhcb/data/data2'],
                     ['/lhcb/data/data3', '/lhcb/data/data4'],
                     ['/lhcb/data/data5', '/lhcb/data/data6']]
    job.setParameterSequence('InputData', inputDataList, addToWorkflow=True)

    jdl = job._toJDL()

    try:
        with open('./DIRAC/Interfaces/API/test/testWF.jdl') as fd:
            expected = fd.read()
    except IOError:
        with open('./Interfaces/API/test/testWF.jdl') as fd:
            expected = fd.read()

    assert jdl == expected

    clad = ClassAd('[' + jdl + ']')

    arguments = clad.getAttributeString('Arguments')
    job_id = clad.getAttributeString('JOB_ID')
    inputData = clad.getAttributeString('InputData')

    assert job_id == '%(JOB_ID)s'
    assert inputData == '%(InputData)s'
    assert 'jobDescription.xml' in arguments
    assert '-o LogLevel=DEBUG' in arguments
    assert '-p JOB_ID=%(JOB_ID)s' in arguments
    assert '-p InputData=%(InputData)s' in arguments
Exemplo n.º 2
0
    def test_SimpleParametricJob(self):

        job = Job()
        job.setExecutable('myExec')
        job.setLogLevel('DEBUG')
        parList = [1, 2, 3]
        job.setParameterSequence('JOB_ID', parList, addToWorkflow=True)
        inputDataList = [['/lhcb/data/data1', '/lhcb/data/data2'],
                         ['/lhcb/data/data3', '/lhcb/data/data4'],
                         ['/lhcb/data/data5', '/lhcb/data/data6']]
        job.setParameterSequence('InputData',
                                 inputDataList,
                                 addToWorkflow=True)

        jdl = job._toJDL()

        print jdl

        clad = ClassAd('[' + jdl + ']')

        arguments = clad.getAttributeString('Arguments')
        job_id = clad.getAttributeString('JOB_ID')
        inputData = clad.getAttributeString('InputData')

        print "arguments", arguments

        self.assertEqual(job_id, '%(JOB_ID)s')
        self.assertEqual(inputData, '%(InputData)s')
        self.assertIn('jobDescription.xml', arguments)
        self.assertIn('-o LogLevel=DEBUG', arguments)
        self.assertIn('-p JOB_ID=%(JOB_ID)s', arguments)
        self.assertIn('-p InputData=%(InputData)s', arguments)
Exemplo n.º 3
0
 def getJobDefinition(self, job, jobDef=False):
     """ Retrieve JDL of the Job and return jobDef dictionary
 """
     if not jobDef:
         jobDef = {}
     # If not jdl in jobinfo load it
     if 'jdl' not in jobDef:
         if self.requiredJobInfo == 'jdlOriginal':
             result = self.jobDB.getJobJDL(job, original=True)
             if not result['OK']:
                 self.log.error("No JDL for job", "%s" % job)
                 return S_ERROR("No JDL for job")
             jobDef['jdl'] = result['Value']
         if self.requiredJobInfo == 'jdl':
             result = self.jobDB.getJobJDL(job)
             if not result['OK']:
                 self.log.error("No JDL for job", "%s" % job)
                 return S_ERROR("No JDL for job")
             jobDef['jdl'] = result['Value']
     # Load the classad if needed
     if 'jdl' in jobDef and 'classad' not in jobDef:
         try:
             classad = ClassAd(jobDef['jdl'])
         except BaseException:
             self.log.debug("Cannot load JDL")
             return S_ERROR('Illegal Job JDL')
         if not classad.isOK():
             self.log.debug(
                 "Warning: illegal JDL for job %s, will be marked problematic"
                 % (job))
             return S_ERROR('Illegal Job JDL')
         jobDef['classad'] = classad
     return S_OK(jobDef)
Exemplo n.º 4
0
 def getJobDefinition(self, job, jobDef=False):
     """Retrieve JDL of the Job and return jobDef dictionary"""
     if not jobDef:
         jobDef = {}
     # If not jdl in jobinfo load it
     if "jdl" not in jobDef:
         if self.requiredJobInfo == "jdlOriginal":
             result = self.jobDB.getJobJDL(job, original=True)
             if not result["OK"]:
                 self.log.error("No JDL for job", "%s" % job)
                 return S_ERROR("No JDL for job")
             jobDef["jdl"] = result["Value"]
         if self.requiredJobInfo == "jdl":
             result = self.jobDB.getJobJDL(job)
             if not result["OK"]:
                 self.log.error("No JDL for job", "%s" % job)
                 return S_ERROR("No JDL for job")
             jobDef["jdl"] = result["Value"]
     # Load the classad if needed
     if "jdl" in jobDef and "classad" not in jobDef:
         try:
             classad = ClassAd(jobDef["jdl"])
         except Exception:
             self.log.debug("Cannot load JDL")
             return S_ERROR(JobMinorStatus.ILLEGAL_JOB_JDL)
         if not classad.isOK():
             self.log.debug(
                 "Warning: illegal JDL for job %s, will be marked problematic"
                 % (job))
             return S_ERROR(JobMinorStatus.ILLEGAL_JOB_JDL)
         jobDef["classad"] = classad
     return S_OK(jobDef)
Exemplo n.º 5
0
def test_SimpleParametricJob():

    job = Job()
    job.setExecutable("myExec")
    job.setLogLevel("DEBUG")
    parList = [1, 2, 3]
    job.setParameterSequence("JOB_ID", parList, addToWorkflow=True)
    inputDataList = [
        ["/lhcb/data/data1", "/lhcb/data/data2"],
        ["/lhcb/data/data3", "/lhcb/data/data4"],
        ["/lhcb/data/data5", "/lhcb/data/data6"],
    ]
    job.setParameterSequence("InputData", inputDataList, addToWorkflow=True)

    jdl = job._toJDL()

    with open(join(dirname(__file__), "testWF.jdl")) as fd:
        expected = fd.read()

    assert jdl == expected

    clad = ClassAd("[" + jdl + "]")

    arguments = clad.getAttributeString("Arguments")
    job_id = clad.getAttributeString("JOB_ID")
    inputData = clad.getAttributeString("InputData")

    assert job_id == "%(JOB_ID)s"
    assert inputData == "%(InputData)s"
    assert "jobDescription.xml" in arguments
    assert "-o LogLevel=DEBUG" in arguments
    assert "-p JOB_ID=%(JOB_ID)s" in arguments
    assert "-p InputData=%(InputData)s" in arguments
Exemplo n.º 6
0
  def submitJob( self, jdl ):
    """ Submit one job specified by its JDL to WMS
    """

    if os.path.exists( jdl ):
      fic = open ( jdl, "r" )
      jdlString = fic.read()
      fic.close()
    else:
      # If file JDL does not exist, assume that the JDL is passed as a string
      jdlString = jdl

    # Check the validity of the input JDL
    jdlString = jdlString.strip()
    if jdlString.find( "[" ) != 0:
      jdlString = "[%s]" % jdlString
    classAdJob = ClassAd( jdlString )
    if not classAdJob.isOK():
      return S_ERROR( 'Invalid job JDL' )

    # Check the size and the contents of the input sandbox
    result = self.__uploadInputSandbox( classAdJob )
    if not result['OK']:
      return result

    # Submit the job now and get the new job ID
    if not self.jobManager:
      self.jobManager = RPCClient( 'WorkloadManagement/JobManager',
                                    useCertificates = self.useCertificates,
                                    timeout = self.timeout )
    result = self.jobManager.submitJob( classAdJob.asJDL() )
    if 'requireProxyUpload' in result and result['requireProxyUpload']:
      gLogger.warn( "Need to upload the proxy" )
    return result
Exemplo n.º 7
0
 def getJobDefinition(self, job, jobDef=False):
   """ Retrieve JDL of the Job and return jobDef dictionary
   """
   if not jobDef:
     jobDef = {}
   # If not jdl in jobinfo load it
   if 'jdl' not in jobDef:
     if self.requiredJobInfo == 'jdlOriginal':
       result = self.jobDB.getJobJDL(job, original=True)
       if not result['OK']:
         self.log.error("No JDL for job", "%s" % job)
         return S_ERROR("No JDL for job")
       jobDef['jdl'] = result['Value']
     if self.requiredJobInfo == 'jdl':
       result = self.jobDB.getJobJDL(job)
       if not result['OK']:
         self.log.error("No JDL for job", "%s" % job)
         return S_ERROR("No JDL for job")
       jobDef['jdl'] = result['Value']
   # Load the classad if needed
   if 'jdl' in jobDef and 'classad' not in jobDef:
     try:
       classad = ClassAd(jobDef['jdl'])
     except BaseException:
       self.log.debug("Cannot load JDL")
       return S_ERROR('Illegal Job JDL')
     if not classad.isOK():
       self.log.debug("Warning: illegal JDL for job %s, will be marked problematic" % (job))
       return S_ERROR('Illegal Job JDL')
     jobDef['classad'] = classad
   return S_OK(jobDef)
Exemplo n.º 8
0
    def submitJob(self, jdl):
        """ Submit one job specified by its JDL to WMS
    """

        if os.path.exists(jdl):
            fic = open(jdl, "r")
            jdlString = fic.read()
            fic.close()
        else:
            # If file JDL does not exist, assume that the JDL is passed as a string
            jdlString = jdl

        # Check the validity of the input JDL
        jdlString = jdlString.strip()
        if jdlString.find("[") != 0:
            jdlString = "[%s]" % jdlString
        classAdJob = ClassAd(jdlString)
        if not classAdJob.isOK():
            return S_ERROR('Invalid job JDL')

        # Check the size and the contents of the input sandbox
        result = self.__uploadInputSandbox(classAdJob)
        if not result['OK']:
            return result

        # Submit the job now and get the new job ID
        if not self.jobManager:
            self.jobManager = RPCClient('WorkloadManagement/JobManager',
                                        useCertificates=self.useCertificates,
                                        timeout=self.timeout)
        result = self.jobManager.submitJob(classAdJob.asJDL())
        if 'requireProxyUpload' in result and result['requireProxyUpload']:
            gLogger.warn("Need to upload the proxy")
        return result
Exemplo n.º 9
0
 def _saveJobJDLRequest(self, jobID, jobJDL):
     """Save job JDL local to JobAgent."""
     classAdJob = ClassAd(jobJDL)
     classAdJob.insertAttributeString("LocalCE", self.ceName)
     jdlFileName = jobID + ".jdl"
     jdlFile = open(jdlFileName, "w")
     jdl = classAdJob.asJDL()
     jdlFile.write(jdl)
     jdlFile.close()
Exemplo n.º 10
0
 def __saveJobJDLRequest( self, jobID, jobJDL ):
   """Save job JDL local to JobAgent.
   """
   classAdJob = ClassAd( jobJDL )
   classAdJob.insertAttributeString( 'LocalCE', self.ceName )
   jdlFileName = jobID + '.jdl'
   jdlFile = open( jdlFileName, 'w' )
   jdl = classAdJob.asJDL()
   jdlFile.write( jdl )
   jdlFile.close()
Exemplo n.º 11
0
 def _getProcessingType(self, jobID):
     """Get the Processing Type from the JDL, until it is promoted to a real Attribute"""
     processingType = "unknown"
     result = self.jobDB.getJobJDL(jobID, original=True)
     if not result["OK"]:
         return processingType
     classAdJob = ClassAd(result["Value"])
     if classAdJob.lookupAttribute("ProcessingType"):
         processingType = classAdJob.getAttributeString("ProcessingType")
     return processingType
Exemplo n.º 12
0
    def __getProcessingType(self, jobID):
        """ Get the Processing Type from the JDL, until it is promoted to a real Attribute
"""
        processingType = 'unknown'
        result = self.jobDB.getJobJDL(jobID, original=True)
        if not result['OK']:
            return processingType
        classAdJob = ClassAd(result['Value'])
        if classAdJob.lookupAttribute('ProcessingType'):
            processingType = classAdJob.getAttributeString('ProcessingType')
        return processingType
Exemplo n.º 13
0
 def __saveJobJDLRequest(self, jobID, jobJDL):
     """Save job JDL local to JobAgent.
 """
     classAdJob = ClassAd(jobJDL)
     classAdJob.insertAttributeString('LocalCE', self.ceName)
     jdlFileName = jobID + '.jdl'
     jdlFile = open(jdlFileName, 'w')
     jdl = classAdJob.asJDL()
     jdlFile.write(jdl)
     jdlFile.close()
     return S_OK(jdlFileName)
Exemplo n.º 14
0
  def __getProcessingType( self, jobID ):
    """ Get the Processing Type from the JDL, until it is promoted to a real Attribute
"""
    processingType = 'unknown'
    result = self.jobDB.getJobJDL( jobID, original = True )
    if not result['OK']:
      return processingType
    classAdJob = ClassAd( result['Value'] )
    if classAdJob.lookupAttribute( 'ProcessingType' ):
      processingType = classAdJob.getAttributeString( 'ProcessingType' )
    return processingType
Exemplo n.º 15
0
    def __getProcessingType(self, jobID):
        """ Get the Processing Type from the JDL, until it is promoted to a real Attribute
"""
        processingType = "unknown"
        result = self.jobDB.getJobJDL(jobID, original=True)
        if not result["OK"]:
            return processingType
        classAdJob = ClassAd(result["Value"])
        if classAdJob.lookupAttribute("ProcessingType"):
            processingType = classAdJob.getAttributeString("ProcessingType")
        return processingType
Exemplo n.º 16
0
 def __saveJobJDLRequest(self, jobID, jobJDL):
     """Save job JDL local to JobAgent.
 """
     classAdJob = ClassAd(jobJDL)
     classAdJob.insertAttributeString("LocalCE", self.ceName)
     jdlFileName = jobID + ".jdl"
     jdlFile = open(jdlFileName, "w")
     jdl = classAdJob.asJDL()
     jdlFile.write(jdl)
     jdlFile.close()
     return S_OK(jdlFileName)
Exemplo n.º 17
0
    def insertJobInQueue(self, job, classAdJob):
        """ Check individual job and add to the Task Queue eventually.
    """

        jobReq = classAdJob.get_expression("JobRequirements")
        classAdJobReq = ClassAd(jobReq)
        jobReqDict = {}
        for name in self.taskQueueDB.getSingleValueTQDefFields():
            if classAdJobReq.lookupAttribute(name):
                if name == 'CPUTime':
                    jobReqDict[name] = classAdJobReq.getAttributeInt(name)
                else:
                    jobReqDict[name] = classAdJobReq.getAttributeString(name)

        for name in self.taskQueueDB.getMultiValueTQDefFields():
            if classAdJobReq.lookupAttribute(name):
                jobReqDict[name] = classAdJobReq.getListFromExpression(name)

        jobPriority = classAdJobReq.getAttributeInt('UserPriority')

        result = self.taskQueueDB.insertJob(job, jobReqDict, jobPriority)
        if not result['OK']:
            self.log.error("Cannot insert job %s in task queue: %s" %
                           (job, result['Message']))
            # Force removing the job from the TQ if it was actually inserted
            result = self.taskQueueDB.deleteJob(job)
            if result['OK']:
                if result['Value']:
                    self.log.info("Job %s removed from the TQ" % job)
            return S_ERROR("Cannot insert in task queue")
        return S_OK()
Exemplo n.º 18
0
def test_MPJob(proc, minProc, maxProc, expectedProc, expectedMinProc, expectedMaxProc):

    job = Job()
    job.setExecutable("myExec")
    job.setLogLevel("DEBUG")
    job.setNumberOfProcessors(proc, minProc, maxProc)
    jdl = job._toJDL()
    clad = ClassAd("[" + jdl + "]")
    processors = clad.getAttributeInt("NumberOfProcessors")
    minProcessors = clad.getAttributeInt("MinNumberOfProcessors")
    maxProcessors = clad.getAttributeInt("MaxNumberOfProcessors")
    assert processors == expectedProc
    assert minProcessors == expectedMinProc
    assert maxProcessors == expectedMaxProc
Exemplo n.º 19
0
def test_MPJob(proc, minProc, maxProc, expectedProc, expectedMinProc,
               expectedMaxProc):

    job = Job()
    job.setExecutable('myExec')
    job.setLogLevel('DEBUG')
    job.setNumberOfProcessors(proc, minProc, maxProc)
    jdl = job._toJDL()
    clad = ClassAd('[' + jdl + ']')
    processors = clad.getAttributeInt('NumberOfProcessors')
    minProcessors = clad.getAttributeInt('MinNumberOfProcessors')
    maxProcessors = clad.getAttributeInt('MaxNumberOfProcessors')
    assert processors == expectedProc
    assert minProcessors == expectedMinProc
    assert maxProcessors == expectedMaxProc
Exemplo n.º 20
0
 def __getJDLParameters(self, jdl):
     """Returns a dictionary of JDL parameters.
 """
     try:
         parameters = {}
         #      print jdl
         if not re.search('\[', jdl):
             jdl = '[' + jdl + ']'
         classAdJob = ClassAd(jdl)
         paramsDict = classAdJob.contents
         for param, value in paramsDict.items():
             if value.strip().startswith('{'):
                 self.log.debug('Found list type parameter %s' % (param))
                 rawValues = value.replace('{',
                                           '').replace('}', '').replace(
                                               '"', '').split()
                 valueList = []
                 for val in rawValues:
                     if re.search(',$', val):
                         valueList.append(val[:-1])
                     else:
                         valueList.append(val)
                 parameters[param] = valueList
             else:
                 parameters[param] = value.replace('"', '').replace(
                     '{', '"{').replace('}', '}"')
                 self.log.debug('Found standard parameter %s: %s' %
                                (param, parameters[param]))
         return S_OK(parameters)
     except Exception, x:
         self.log.exception(lException=x)
         return S_ERROR('Exception while extracting JDL parameters for job')
Exemplo n.º 21
0
 def _getJDLParameters(self, jdl):
     """Returns a dictionary of JDL parameters."""
     try:
         parameters = {}
         #      print jdl
         if not re.search(r"\[", jdl):
             jdl = "[" + jdl + "]"
         classAdJob = ClassAd(jdl)
         paramsDict = classAdJob.contents
         for param, value in paramsDict.items():
             if value.strip().startswith("{"):
                 self.log.debug("Found list type parameter %s" % (param))
                 rawValues = value.replace("{",
                                           "").replace("}", "").replace(
                                               '"', "").split()
                 valueList = []
                 for val in rawValues:
                     if re.search(",$", val):
                         valueList.append(val[:-1])
                     else:
                         valueList.append(val)
                 parameters[param] = valueList
             else:
                 parameters[param] = value.replace('"', "").replace(
                     "{", '"{').replace("}", '}"')
                 self.log.debug("Found standard parameter %s: %s" %
                                (param, parameters[param]))
         return S_OK(parameters)
     except Exception as x:
         self.log.exception(lException=x)
         return S_ERROR("Exception while extracting JDL parameters for job")
Exemplo n.º 22
0
    def test_NoParameters(self):

        clad = ClassAd(TEST_JDL_NO_PARAMETERS)
        result = getParameterVectorLength(clad)
        self.assertTrue(result['OK'])
        nParam = result['Value']
        self.assertTrue(nParam is None)
Exemplo n.º 23
0
  def test_SimpleProgression(self):

    clad = ClassAd( TEST_JDL_SIMPLE_PROGRESSION )
    nParam = getNumberOfParameters( clad )

    self.assertEqual( nParam, 3 )

    result = generateParametricJobs( clad )
    self.assert_( result['OK'] )

    jobDescList = result['Value']
    self.assertEqual( nParam, len( jobDescList ) )

    # Check the definition of the 2nd job
    jobClassAd = ClassAd( jobDescList[1] )
    self.assertEqual( jobClassAd.getAttributeString( 'Arguments' ), '3' )
    self.assertEqual( jobClassAd.getAttributeString( 'JobName' ), 'Test_1' )
Exemplo n.º 24
0
    def test_SimpleProgression(self):

        clad = ClassAd(TEST_JDL_SIMPLE_PROGRESSION)
        nParam = getNumberOfParameters(clad)

        self.assertEqual(nParam, 3)

        result = generateParametricJobs(clad)
        self.assert_(result['OK'])

        jobDescList = result['Value']
        self.assertEqual(nParam, len(jobDescList))

        # Check the definition of the 2nd job
        jobClassAd = ClassAd(jobDescList[1])
        self.assertEqual(jobClassAd.getAttributeString('Arguments'), '3')
        self.assertEqual(jobClassAd.getAttributeString('JobName'), 'Test_1')
Exemplo n.º 25
0
def test_SimpleParametricJob():

  job = Job()
  job.setExecutable('myExec')
  job.setLogLevel('DEBUG')
  parList = [1, 2, 3]
  job.setParameterSequence('JOB_ID', parList, addToWorkflow=True)
  inputDataList = [
      [
          '/lhcb/data/data1',
          '/lhcb/data/data2'
      ],
      [
          '/lhcb/data/data3',
          '/lhcb/data/data4'
      ],
      [
          '/lhcb/data/data5',
          '/lhcb/data/data6'
      ]
  ]
  job.setParameterSequence('InputData', inputDataList, addToWorkflow=True)

  jdl = job._toJDL()

  try:
    with open('./DIRAC/Interfaces/API/test/testWF.jdl') as fd:
      expected = fd.read()
  except IOError:
    with open('./Interfaces/API/test/testWF.jdl') as fd:
      expected = fd.read()

  assert jdl == expected

  clad = ClassAd('[' + jdl + ']')

  arguments = clad.getAttributeString('Arguments')
  job_id = clad.getAttributeString('JOB_ID')
  inputData = clad.getAttributeString('InputData')

  assert job_id == '%(JOB_ID)s'
  assert inputData == '%(InputData)s'
  assert 'jobDescription.xml' in arguments
  assert '-o LogLevel=DEBUG' in arguments
  assert'-p JOB_ID=%(JOB_ID)s' in arguments
  assert'-p InputData=%(InputData)s' in arguments
Exemplo n.º 26
0
  def insertJobInQueue( self, job, classAdJob ):
    """ Check individual job and add to the Task Queue eventually.
    """

    jobReq = classAdJob.get_expression( "JobRequirements" )
    classAdJobReq = ClassAd( jobReq )
    jobReqDict = {}
    for name in self.taskQueueDB.getSingleValueTQDefFields():
      if classAdJobReq.lookupAttribute( name ):
        if name == 'CPUTime':
          jobReqDict[name] = classAdJobReq.getAttributeInt( name )
        else:
          jobReqDict[name] = classAdJobReq.getAttributeString( name )

    for name in self.taskQueueDB.getMultiValueTQDefFields():
      if classAdJobReq.lookupAttribute( name ):
        jobReqDict[name] = classAdJobReq.getListFromExpression( name )

    jobPriority = classAdJobReq.getAttributeInt( 'UserPriority' )

    result = self.taskQueueDB.insertJob( job, jobReqDict, jobPriority )
    if not result[ 'OK' ]:
      self.log.error( "Cannot insert job %s in task queue: %s" % ( job, result[ 'Message' ] ) )
      # Force removing the job from the TQ if it was actually inserted
      result = self.taskQueueDB.deleteJob( job )
      if result['OK']:
        if result['Value']:
          self.log.info( "Job %s removed from the TQ" % job )
      return S_ERROR( "Cannot insert in task queue" )
    return S_OK()
Exemplo n.º 27
0
    def submitJob(self, jdl):
        """ Submit one job specified by its JDL to WMS
    """

        if not self.jobManagerClient:
            jobManager = RPCClient(
                "WorkloadManagement/JobManager", useCertificates=self.useCertificates, timeout=self.timeout
            )
        else:
            jobManager = self.jobManagerClient
        if os.path.exists(jdl):
            fic = open(jdl, "r")
            jdlString = fic.read()
            fic.close()
        else:
            # If file JDL does not exist, assume that the JDL is
            # passed as a string
            jdlString = jdl

        # Check the validity of the input JDL
        jdlString = jdlString.strip()
        if jdlString.find("[") != 0:
            jdlString = "[%s]" % jdlString
        classAdJob = ClassAd(jdlString)
        if not classAdJob.isOK():
            return S_ERROR("Invalid job JDL")

        # Check the size and the contents of the input sandbox
        result = self.__uploadInputSandbox(classAdJob)
        if not result["OK"]:
            return result

        # Submit the job now and get the new job ID
        result = jobManager.submitJob(classAdJob.asJDL())

        if not result["OK"]:
            return result
        jobID = result["Value"]
        if "requireProxyUpload" in result and result["requireProxyUpload"]:
            gProxyManager.uploadProxy()

        # print "Sandbox uploading"
        return S_OK(jobID)
Exemplo n.º 28
0
  def submitNewBigJob( self ):

    result = jobDB.getJobJDL( str( self.__jobID ) , True )
    classAdJob = ClassAd( result['Value'] )
    executableFile = ""
    if classAdJob.lookupAttribute( 'Executable' ):
      executableFile = classAdJob.getAttributeString( 'Executable' )

    tempPath = self.__tmpSandBoxDir
    dirac = Dirac()
    if not os.path.exists( tempPath ):
      os.makedirs( tempPath )

    settingJobSandBoxDir = dirac.getInputSandbox( self.__jobID, tempPath )
    self.log.info( 'Writting temporal SandboxDir in Server', settingJobSandBoxDir )
    moveData = self.__tmpSandBoxDir + "/InputSandbox" + str( self.__jobID )

    HiveV1Cli = HiveV1Client( self.__User , self.__publicIP )
    returned = HiveV1Cli.dataCopy( moveData, self.__tmpSandBoxDir )
    self.log.info( 'Copy the job contain to the Hadoop Master with HIVE: ', returned )

    jobInfo = jobDB.getJobAttributes( self.__jobID )
    if not jobInfo['OK']:
      return S_ERROR( jobInfo['Value'] )
    proxy = ""
    jobInfo = jobInfo['Value']
    if gProxyManager.userHasProxy( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] ):
      proxy = gProxyManager.downloadProxyToFile( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )
    else:
      proxy = self.__requestProxyFromProxyManager( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )

    HiveJob = "InputSandbox" + str( self.__jobID ) + "/" + executableFile
    HiveJobOutput = str( self.__jobID ) + "_" + executableFile + "_out"

    returned = HiveV1Cli.jobSubmit( tempPath, HiveJob, proxy['chain'], HiveJobOutput )
    self.log.info( 'Launch Hadoop-Hive job to the Master: ', returned )

    if not returned['OK']:
      return S_ERROR( returned['Message'] )
    else:
      self.log.info( 'Hadoop-Hive Job ID: ', returned['Value'] )

    return S_OK( returned['Value'] )
Exemplo n.º 29
0
  def test_SimpleBunch(self):

    clad = ClassAd( TEST_JDL_SIMPLE_BUNCH )
    result = getParameterVectorLength( clad )
    self.assertTrue( result['OK'] )
    nParam = result['Value']

    self.assertEqual( nParam, 3 )

    result = generateParametricJobs( clad )
    self.assertTrue(result['OK'])

    jobDescList = result['Value']
    self.assertEqual( nParam, len( jobDescList ) )

    # Check the definition of the 2nd job
    jobClassAd = ClassAd( jobDescList[1] )
    self.assertEqual( jobClassAd.getAttributeString( 'Arguments' ), '5' )
    self.assertEqual( jobClassAd.getAttributeString( 'JobName' ), 'Test_1' )
Exemplo n.º 30
0
    def test_SimpleBunch(self):

        clad = ClassAd(TEST_JDL_SIMPLE_BUNCH)
        result = getParameterVectorLength(clad)
        self.assertTrue(result["OK"])
        nParam = result["Value"]

        self.assertEqual(nParam, 3)

        result = generateParametricJobs(clad)
        self.assertTrue(result["OK"])

        jobDescList = result["Value"]
        self.assertEqual(nParam, len(jobDescList))

        # Check the definition of the 2nd job
        jobClassAd = ClassAd(jobDescList[1])
        self.assertEqual(jobClassAd.getAttributeString("Arguments"), "5")
        self.assertEqual(jobClassAd.getAttributeString("JobName"), "Test_1")
Exemplo n.º 31
0
    def test_Simple(self):

        clad = ClassAd(TEST_JDL_SIMPLE)
        result = getParameterVectorLength(clad)
        self.assertTrue(result['OK'])
        nParam = result['Value']

        self.assertEqual(nParam, 3)

        result = generateParametricJobs(clad)
        self.assertTrue(result['OK'])

        jobDescList = result['Value']
        self.assertEqual(nParam, len(jobDescList))

        # Check the definition of the 2nd job
        jobClassAd = ClassAd(jobDescList[1])
        self.assertEqual(jobClassAd.getAttributeString('Arguments'), 'b')
        self.assertEqual(jobClassAd.getAttributeString('JobName'), 'Test_1')
Exemplo n.º 32
0
    def submitJob(self, jdl, jobDescriptionObject=None):
        """ Submit one job specified by its JDL to WMS
    """

        if os.path.exists(jdl):
            fic = open(jdl, "r")
            jdlString = fic.read()
            fic.close()
        else:
            # If file JDL does not exist, assume that the JDL is passed as a string
            jdlString = jdl

        jdlString = jdlString.strip()

        # Strip of comments in the jdl string
        newJdlList = []
        for line in jdlString.split('\n'):
            if not line.strip().startswith('#'):
                newJdlList.append(line)
        jdlString = '\n'.join(newJdlList)

        # Check the validity of the input JDL
        if jdlString.find("[") != 0:
            jdlString = "[%s]" % jdlString
        classAdJob = ClassAd(jdlString)
        if not classAdJob.isOK():
            return S_ERROR('Invalid job JDL')

        # Check the size and the contents of the input sandbox
        result = self.__uploadInputSandbox(classAdJob, jobDescriptionObject)
        if not result['OK']:
            return result

        # Submit the job now and get the new job ID
        if not self.jobManager:
            self.jobManager = RPCClient('WorkloadManagement/JobManager',
                                        useCertificates=self.useCertificates,
                                        timeout=self.timeout)
        result = self.jobManager.submitJob(classAdJob.asJDL())
        if 'requireProxyUpload' in result and result['requireProxyUpload']:
            gLogger.warn("Need to upload the proxy")
        return result
Exemplo n.º 33
0
  def submitJob( self, jdl, jobDescriptionObject = None ):
    """ Submit one job specified by its JDL to WMS
    """

    if os.path.exists( jdl ):
      fic = open ( jdl, "r" )
      jdlString = fic.read()
      fic.close()
    else:
      # If file JDL does not exist, assume that the JDL is passed as a string
      jdlString = jdl

    jdlString = jdlString.strip()

    # Strip of comments in the jdl string
    newJdlList = []
    for line in jdlString.split('\n'):
      if not line.strip().startswith( '#' ):
        newJdlList.append( line )
    jdlString = '\n'.join( newJdlList )

    # Check the validity of the input JDL
    if jdlString.find( "[" ) != 0:
      jdlString = "[%s]" % jdlString
    classAdJob = ClassAd( jdlString )
    if not classAdJob.isOK():
      return S_ERROR( 'Invalid job JDL' )

    # Check the size and the contents of the input sandbox
    result = self.__uploadInputSandbox( classAdJob, jobDescriptionObject )
    if not result['OK']:
      return result

    # Submit the job now and get the new job ID
    if not self.jobManager:
      self.jobManager = RPCClient( 'WorkloadManagement/JobManager',
                                    useCertificates = self.useCertificates,
                                    timeout = self.timeout )
    result = self.jobManager.submitJob( classAdJob.asJDL() )
    if 'requireProxyUpload' in result and result['requireProxyUpload']:
      gLogger.warn( "Need to upload the proxy" )
    return result
Exemplo n.º 34
0
  def test_SimpleParametricJob( self ):

    job = Job()
    job.setExecutable( 'myExec' )
    job.setLogLevel( 'DEBUG' )
    parList = [1,2,3]
    job.setParameterSequence( 'JOB_ID', parList, addToWorkflow=True )
    inputDataList = [
      [
        '/lhcb/data/data1',
        '/lhcb/data/data2'
      ],
      [
        '/lhcb/data/data3',
        '/lhcb/data/data4'
      ],
      [
        '/lhcb/data/data5',
        '/lhcb/data/data6'
      ]
    ]
    job.setParameterSequence( 'InputData', inputDataList, addToWorkflow=True )

    jdl = job._toJDL()

    print jdl

    clad = ClassAd( '[' + jdl + ']' )

    arguments = clad.getAttributeString( 'Arguments' )
    job_id = clad.getAttributeString( 'JOB_ID' )
    inputData = clad.getAttributeString( 'InputData' )

    print "arguments", arguments

    self.assertEqual( job_id, '%(JOB_ID)s' )
    self.assertEqual( inputData, '%(InputData)s' )
    self.assertIn( 'jobDescription.xml', arguments )
    self.assertIn( '-o LogLevel=DEBUG', arguments )
    self.assertIn( '-p JOB_ID=%(JOB_ID)s', arguments )
    self.assertIn( '-p InputData=%(InputData)s', arguments )
Exemplo n.º 35
0
 def matchJob(self, resourceJDL):
   """  Use Matcher service to retrieve a MPI job from Task Queue.
        Returns: JobID, NumProc required, JDL and MPI flavor
        Input: resourceJDL
        Output: result = {JobID, JobJDL, NumProc, MPIFlavor}
   """
   print "S37"
   matcher = RPCClient('WorkloadManagement/Matcher', timeout = 600)
   dictMatchMPI = {'Setup':'EELA-Production', 'CPUTime':6000, 'JobType':'MPI'}
   result = matcher.getMatchingTaskQueues(dictMatchMPI)
   if not result['OK']:
     print "S38"
     gLogger.info("-------------------------------------------------------------------")
     gLogger.error ("Here I have to call to get normal job")
     gLogger.info("-------------------------------------------------------------------")
     gLogger.error (("Match not found: %s") % (result['Message']))
     gLogger.info("-------------------------------------------------------------------")
     return S_ERROR()
   else:
     if result['Value'] == {}:
       gLogger.info("-------------------------------------------------------------------")
       gLogger.info("Value == Empty")
       gLogger.info("-------------------------------------------------------------------")
       return S_ERROR()
   mpiTaskQueue = result['Value']
   classAdAgent = ClassAd(resourceJDL)
   classAdAgent.insertAttributeString('JobType', 'MPI')
   resourceJDL = str(classAdAgent.asJDL())
   result = matcher.requestJob(resourceJDL)
   if not result['OK']:
     gLogger.error (("Request Job Error: %s") % (result['Message']))
     return S_ERROR()
   elif result['OK']==False:
     gLogger.error (("Request Job False: %s") % (result['Message']))
     return S_ERROR()
   else:
     gLogger.error (("Request Job OK"))
   jobJDL = result['Value']['JDL']
   ### Review how to optimize this part (Importante)
   jobID1 = ClassAd(jobJDL)
   jobID = jobID1.getAttributeString('JobID')
   numProc = jobID1.getAttributeString('CPUNumber')
   mpiFlavor = jobID1.getAttributeString('Flavor')
   result = {'JobID':jobID, 'JobJDL':jobJDL, 'NumProc': numProc, 
             'MPIFlavor': mpiFlavor}
   print "S39"
   print result
   return S_OK(result)
Exemplo n.º 36
0
  def execute( self ):
    """Main Agent code:
      1.- Query TaskQueueDB for existing TQs
      2.- Count Pending Jobs
      3.- Submit Jobs
    """
    self.__checkSubmitPools()

    bigDataJobsToSubmit = {}
    bigDataJobIdsToSubmit = {}

    for directorName, directorDict in self.directors.items():
      self.log.verbose( 'Checking Director:', directorName )
      self.log.verbose( 'RunningEndPoints:', directorDict['director'].runningEndPoints )
      for runningEndPointName in directorDict['director'].runningEndPoints:
        runningEndPointDict = directorDict['director'].runningEndPoints[runningEndPointName]
        NameNode = runningEndPointDict['NameNode']
        jobsByEndPoint = 0
        result = BigDataDB.getBigDataJobsByStatusAndEndpoint( 'Submitted', NameNode )
        if result['OK']:
          jobsByEndPoint += len( result['Value'] )
        result = BigDataDB.getBigDataJobsByStatusAndEndpoint( 'Running', NameNode )
        if result['OK']:
          jobsByEndPoint += len( result['Value'] )
        self.log.verbose( 'Checking Jobs By EndPoint %s:' % jobsByEndPoint )
        jobLimitsEndPoint = runningEndPointDict['LimitQueueJobsEndPoint']

        bigDataJobs = 0
        if jobsByEndPoint >= jobLimitsEndPoint:
          self.log.info( '%s >= %s Running jobs reach job limits: %s, skipping' % ( jobsByEndPoint, jobLimitsEndPoint, runningEndPointName ) )
          continue
        else:
          bigDataJobs = jobLimitsEndPoint - jobsByEndPoint
        requirementsDict = runningEndPointDict['Requirements']

        self.log.info( 'Requirements Dict: ', requirementsDict )
        result = taskQueueDB.getMatchingTaskQueues( requirementsDict )
        if not result['OK']:
          self.log.error( 'Could not retrieve TaskQueues from TaskQueueDB', result['Message'] )
          return result

        taskQueueDict = result['Value']
        self.log.info( 'Task Queues Dict: ', taskQueueDict )
        jobs = 0
        priority = 0
        cpu = 0
        jobsID = 0
        self.log.info( 'Pending Jobs from TaskQueue, which not matching before: ', self.pendingTaskQueueJobs )
        for tq in taskQueueDict:
          jobs += taskQueueDict[tq]['Jobs']
          priority += taskQueueDict[tq]['Priority']
          cpu += taskQueueDict[tq]['Jobs'] * taskQueueDict[tq]['CPUTime']

          #Matching of Jobs with BigData Softwares
          #This process is following the sequence:
          #Retrieve a job from taskqueueDict
          #Get job name and try to match with the resources        
          #If not match store the var pendingTaskQueueJobs for the
          #next iteration
          #
          #This matching is doing with the following JobName Pattern
          # NameSoftware _ SoftwareVersion _ HighLanguageName _ HighLanguageVersion _ DataSetName          
          #extract a job from the TaskQueue
          if tq not in self.pendingTaskQueueJobs.keys():
            self.pendingTaskQueueJobs[tq] = {}
          getJobFromTaskQueue = taskQueueDB.matchAndGetJob( taskQueueDict[tq] )
          if not getJobFromTaskQueue['OK']:
            self.log.error( 'Could not get Job and FromTaskQueue', getJobFromTaskQueue['Message'] )
            return getJobFromTaskQueue

          jobInfo = getJobFromTaskQueue['Value']
          jobID = jobInfo['jobId']
          jobAttrInfo = jobDB.getJobAttributes( jobID )

          if not jobAttrInfo['OK']:
            self.log.error( 'Could not get Job Attributes', jobAttrInfo['Message'] )
            return jobAttrInfo
          jobInfoUniq = jobAttrInfo['Value']
          jobName = jobInfoUniq['JobName']
          self.pendingTaskQueueJobs[tq][jobID] = jobName


          result = jobDB.getJobJDL( jobID, True )
          classAdJob = ClassAd( result['Value'] )
          arguments = 0
          if classAdJob.lookupAttribute( 'Arguments' ):
            arguments = classAdJob.getAttributeString( 'Arguments' )
          #if not classAdJob.lookupAttribute( 'Arguments' ):
          #  continue

          jobsToSubmit = self.matchingJobsForBDSubmission( arguments,
                                                       runningEndPointName,
                                                       runningEndPointDict['BigDataSoftware'],
                                                       runningEndPointDict['BigDataSoftwareVersion'],
                                                       runningEndPointDict['HighLevelLanguage']['HLLName'],
                                                       runningEndPointDict['HighLevelLanguage']['HLLVersion'],
                                                       jobID )
          if ( jobsToSubmit == "OK" ):
            if directorName not in bigDataJobsToSubmit:
              bigDataJobsToSubmit[directorName] = {}
            if runningEndPointName not in bigDataJobsToSubmit[directorName]:
              bigDataJobsToSubmit[directorName][runningEndPointName] = {}
            bigDataJobsToSubmit[directorName][runningEndPointName] = { 'JobId': jobID,
                                                        'JobName': jobName,
                                                        'TQPriority': priority,
                                                        'CPUTime': cpu,
                                                        'BigDataEndpoint': runningEndPointName,
                                                        'BigDataEndpointNameNode': runningEndPointDict['NameNode'],
                                                        'BdSoftware': runningEndPointDict['BigDataSoftware'],
                                                        'BdSoftwareVersion': runningEndPointDict['BigDataSoftwareVersion'],
                                                        'HLLName' : runningEndPointDict['HighLevelLanguage']['HLLName'],
                                                        'HLLVersion' : runningEndPointDict['HighLevelLanguage']['HLLVersion'],
                                                        'NumBigDataJobsAllowedToSubmit': bigDataJobs,
                                                        'SiteName': runningEndPointDict['SiteName'],
                                                        'PublicIP': runningEndPointDict['PublicIP'],
                                                        'User': runningEndPointDict['User'],
                                                        'Port': runningEndPointDict['Port'],
                                                        'UsePilot': runningEndPointDict['UsePilot'],
                                                        'IsInteractive': runningEndPointDict['IsInteractive'],
                                                        'Arguments': arguments }
            del self.pendingTaskQueueJobs[tq][jobID]
          else:
            self.log.error( jobsToSubmit )
        self.log.info( 'Pending Jobs from TaskQueue, which not matching after: ', self.pendingTaskQueueJobs )
        for tq in self.pendingTaskQueueJobs.keys():
          for jobid in self.pendingTaskQueueJobs[tq].keys():
            result = jobDB.getJobJDL( jobid, True )
            classAdJob = ClassAd( result['Value'] )
            arguments = 0
            if classAdJob.lookupAttribute( 'Arguments' ):
              arguments = classAdJob.getAttributeString( 'Arguments' )
            #if not classAdJob.lookupAttribute( 'Arguments' ):
            #  continue
            #do the match with the runningEndPoint
            jobsToSubmit = self.matchingJobsForBDSubmission( arguments,
                                                             runningEndPointName,
                                                             runningEndPointDict['BigDataSoftware'],
                                                             runningEndPointDict['BigDataSoftwareVersion'],
                                                             runningEndPointDict['HighLevelLanguage']['HLLName'],
                                                             runningEndPointDict['HighLevelLanguage']['HLLVersion'],
                                                             jobid )
            if ( jobsToSubmit == "OK" ):
              if directorName not in bigDataJobsToSubmit:
                bigDataJobsToSubmit[directorName] = {}
              if runningEndPointName not in bigDataJobsToSubmit[directorName]:
                bigDataJobsToSubmit[directorName][runningEndPointName] = {}
              bigDataJobsToSubmit[directorName][runningEndPointName] = { 'JobId': jobid,
                                                          'JobName': self.pendingTaskQueueJobs[tq][jobid],
                                                          'TQPriority': priority,
                                                          'CPUTime': cpu,
                                                          'BigDataEndpoint': runningEndPointName,
                                                          'BigDataEndpointNameNode': runningEndPointDict['NameNode'],
                                                          'BdSoftware': runningEndPointDict['BigDataSoftware'],
                                                          'BdSoftwareVersion': runningEndPointDict['BigDataSoftwareVersion'],
                                                          'HLLName' : runningEndPointDict['HighLevelLanguage']['HLLName'],
                                                          'HLLVersion' : runningEndPointDict['HighLevelLanguage']['HLLVersion'],
                                                          'NumBigDataJobsAllowedToSubmit': bigDataJobs,
                                                          'SiteName': runningEndPointDict['SiteName'],
                                                          'PublicIP': runningEndPointDict['PublicIP'],
                                                          'User': runningEndPointDict['User'],
                                                          'Port': runningEndPointDict['Port'],
                                                          'UsePilot': runningEndPointDict['UsePilot'],
                                                          'IsInteractive': runningEndPointDict['IsInteractive'],
                                                          'Arguments': arguments  }
              del self.pendingTaskQueueJobs[tq][jobid]
            else:
             self.log.error( jobsToSubmit )
        if not jobs and not self.pendingTaskQueueJobs:
          self.log.info( 'No matching jobs for %s found, skipping' % NameNode )
          continue

        self.log.info( '___BigDataJobsTo Submit:', bigDataJobsToSubmit )

    for directorName, JobsToSubmitDict in bigDataJobsToSubmit.items():
      for runningEndPointName, jobsToSubmitDict in JobsToSubmitDict.items():
        if self.directors[directorName]['isEnabled']:
          self.log.info( 'Requesting submission to %s of %s' % ( runningEndPointName, directorName ) )

          director = self.directors[directorName]['director']
          pool = self.pools[self.directors[directorName]['pool']]

          jobIDs = JobsToSubmitDict[runningEndPointName]['JobId']
          jobName = JobsToSubmitDict[runningEndPointName]['JobName']
          endpoint = JobsToSubmitDict[runningEndPointName]['BigDataEndpoint']
          runningSiteName = JobsToSubmitDict[runningEndPointName]['SiteName']
          NameNode = JobsToSubmitDict[runningEndPointName]['BigDataEndpointNameNode']
          BigDataSoftware = JobsToSubmitDict[runningEndPointName]['BdSoftware']
          BigDataSoftwareVersion = JobsToSubmitDict[runningEndPointName]['BdSoftwareVersion']
          HLLName = JobsToSubmitDict[runningEndPointName]['HLLName']
          HLLVersion = JobsToSubmitDict[runningEndPointName]['HLLVersion']
          PublicIP = JobsToSubmitDict[runningEndPointName]['PublicIP']
          User = JobsToSubmitDict[runningEndPointName]['User']
          Port = JobsToSubmitDict[runningEndPointName]['Port']
          UsePilot = JobsToSubmitDict[runningEndPointName]['UsePilot']
          IsInteractive = JobsToSubmitDict[runningEndPointName]['IsInteractive']
          Arguments = JobsToSubmitDict[runningEndPointName]['Arguments']
          numBigDataJobsAllowed = JobsToSubmitDict[runningEndPointName]['NumBigDataJobsAllowedToSubmit']

          ret = pool.generateJobAndQueueIt( director.submitBigDataJobs,
                                            args = ( endpoint, numBigDataJobsAllowed, runningSiteName, NameNode,
                                                     BigDataSoftware, BigDataSoftwareVersion, HLLName, HLLVersion,
                                                     PublicIP, Port, jobIDs, runningEndPointName, jobName, User, self.jobDataset, UsePilot, IsInteractive ),
                                            oCallback = self.callBack,
                                            oExceptionCallback = director.exceptionCallBack,
                                            blocking = False )
          if not ret['OK']:
            # Disable submission until next iteration
            self.directors[directorName]['isEnabled'] = False
          else:
            time.sleep( self.am_getOption( 'ThreadStartDelay' ) )

    if 'Default' in self.pools:
      # only for those in "Default' thread Pool
      # for pool in self.pools:
      self.pools['Default'].processAllResults()

    return DIRAC.S_OK()
Exemplo n.º 37
0
    def export_submitJob(self, jobDesc):
        """ Submit a single job to DIRAC WMS
    """

        if self.peerUsesLimitedProxy:
            return S_ERROR("Can't submit using a limited proxy! (bad boy!)")

        # Check job submission permission
        result = self.jobPolicy.getJobPolicy()
        if not result['OK']:
            return S_ERROR('Failed to get job policies')
        policyDict = result['Value']
        if not policyDict[RIGHT_SUBMIT]:
            return S_ERROR('Job submission not authorized')

        #jobDesc is JDL for now
        jobDesc = jobDesc.strip()
        if jobDesc[0] != "[":
            jobDesc = "[%s" % jobDesc
        if jobDesc[-1] != "]":
            jobDesc = "%s]" % jobDesc

        # Check if the job is a parameteric one
        jobClassAd = ClassAd(jobDesc)
        parametricJob = False
        if jobClassAd.lookupAttribute('Parameters'):
            parametricJob = True
            if jobClassAd.isAttributeList('Parameters'):
                parameterList = jobClassAd.getListFromExpression('Parameters')
            else:
                pStep = 0
                pFactor = 1
                pStart = 1
                nParameters = jobClassAd.getAttributeInt('Parameters')
                if not nParameters:
                    value = jobClassAd.get_expression('Parameters')
                    return S_ERROR(
                        'Illegal value for Parameters JDL field: %s' % value)

                if jobClassAd.lookupAttribute('ParameterStart'):
                    value = jobClassAd.get_expression(
                        'ParameterStart').replace('"', '')
                    try:
                        pStart = int(value)
                    except:
                        try:
                            pStart = float(value)
                        except:
                            return S_ERROR(
                                'Illegal value for ParameterStart JDL field: %s'
                                % value)

                if jobClassAd.lookupAttribute('ParameterStep'):
                    pStep = jobClassAd.getAttributeInt('ParameterStep')
                    if not pStep:
                        pStep = jobClassAd.getAttributeFloat('ParameterStep')
                        if not pStep:
                            value = jobClassAd.get_expression('ParameterStep')
                            return S_ERROR(
                                'Illegal value for ParameterStep JDL field: %s'
                                % value)
                if jobClassAd.lookupAttribute('ParameterFactor'):
                    pFactor = jobClassAd.getAttributeInt('ParameterFactor')
                    if not pFactor:
                        pFactor = jobClassAd.getAttributeFloat(
                            'ParameterFactor')
                        if not pFactor:
                            value = jobClassAd.get_expression(
                                'ParameterFactor')
                            return S_ERROR(
                                'Illegal value for ParameterFactor JDL field: %s'
                                % value)

                parameterList = list()
                parameterList.append(pStart)
                for i in range(nParameters - 1):
                    parameterList.append(parameterList[i] * pFactor + pStep)

            if len(parameterList) > self.maxParametricJobs:
                return S_ERROR(
                    'The number of parametric jobs exceeded the limit of %d' %
                    self.maxParametricJobs)

            jobDescList = []
            nParam = len(parameterList) - 1
            for n, p in enumerate(parameterList):
                newJobDesc = jobDesc.replace('%s', str(p)).replace(
                    '%n',
                    str(n).zfill(len(str(nParam))))
                newClassAd = ClassAd(newJobDesc)
                for attr in ['Parameters', 'ParameterStep', 'ParameterFactor']:
                    newClassAd.deleteAttribute(attr)
                if type(p) == type(' ') and p.startswith('{'):
                    newClassAd.insertAttributeInt('Parameter', str(p))
                else:
                    newClassAd.insertAttributeString('Parameter', str(p))
                newClassAd.insertAttributeInt('ParameterNumber', n)
                newJDL = newClassAd.asJDL()
                jobDescList.append(newJDL)
        else:
            jobDescList = [jobDesc]

        jobIDList = []
        for jobDescription in jobDescList:
            result = gJobDB.insertNewJobIntoDB(jobDescription, self.owner,
                                               self.ownerDN, self.ownerGroup,
                                               self.diracSetup)
            if not result['OK']:
                return result

            jobID = result['JobID']
            gLogger.info('Job %s added to the JobDB for %s/%s' %
                         (jobID, self.ownerDN, self.ownerGroup))

            gJobLoggingDB.addLoggingRecord(jobID,
                                           result['Status'],
                                           result['MinorStatus'],
                                           source='JobManager')

            jobIDList.append(jobID)

        #Set persistency flag
        retVal = gProxyManager.getUserPersistence(self.ownerDN,
                                                  self.ownerGroup)
        if 'Value' not in retVal or not retVal['Value']:
            gProxyManager.setPersistency(self.ownerDN, self.ownerGroup, True)

        if parametricJob:
            result = S_OK(jobIDList)
        else:
            result = S_OK(jobIDList[0])

        result['JobID'] = result['Value']
        result['requireProxyUpload'] = self.__checkIfProxyUploadIsRequired()
        self.__sendNewJobsToMind(jobIDList)
        return result
Exemplo n.º 38
0
  def _toJDL( self, xmlFile = '' ): #messy but need to account for xml file being in /tmp/guid dir
    """Creates a JDL representation of itself as a Job.
    """
    #Check if we have to do old bootstrap...
    classadJob = ClassAd( '[]' )

    paramsDict = {}
    params = self.workflow.parameters # ParameterCollection object

    paramList = params
    for param in paramList:
      paramsDict[param.getName()] = {'type':param.getType(), 'value':param.getValue()}

    scriptname = 'jobDescription.xml'
    arguments = []
    if self.script:
      if os.path.exists( self.script ):
        scriptname = os.path.abspath( self.script )
        self.log.verbose( 'Found script name %s' % scriptname )
    else:
      if xmlFile:
        self.log.verbose( 'Found XML File %s' % xmlFile )
        scriptname = xmlFile

    arguments.append( os.path.basename( scriptname ) )
    self.addToInputSandbox.append( scriptname )
    if paramsDict.has_key( 'LogLevel' ):
      if paramsDict['LogLevel']['value']:
        arguments.append( '-o LogLevel=%s' % ( paramsDict['LogLevel']['value'] ) )
      else:
        self.log.warn( 'Job LogLevel defined with null value' )
    if paramsDict.has_key( 'DIRACSetup' ):
      if paramsDict['DIRACSetup']['value']:
        arguments.append( '-o DIRAC/Setup=%s' % ( paramsDict['DIRACSetup']['value'] ) )
      else:
        self.log.warn( 'Job DIRACSetup defined with null value' )
    if paramsDict.has_key( 'JobMode' ):
      if paramsDict['JobMode']['value']:
        arguments.append( '-o JobMode=%s' % ( paramsDict['JobMode']['value'] ) )
      else:
        self.log.warn( 'Job Mode defined with null value' )
    if paramsDict.has_key( 'JobConfigArgs' ):
      if paramsDict['JobConfigArgs']['value']:
        arguments.append( '%s' % ( paramsDict['JobConfigArgs']['value'] ) )
      else:
        self.log.warn( 'JobConfigArgs defined with null value' )

    classadJob.insertAttributeString( 'Executable', self.executable )
    self.addToOutputSandbox.append( self.stderr )
    self.addToOutputSandbox.append( self.stdout )

    #Extract i/o sandbox parameters from steps and any input data parameters
    #to do when introducing step-level api...

    #To add any additional files to input and output sandboxes
    if self.addToInputSandbox:
      extraFiles = ';'.join( self.addToInputSandbox )
      if paramsDict.has_key( 'InputSandbox' ):
        currentFiles = paramsDict['InputSandbox']['value']
        finalInputSandbox = currentFiles + ';' + extraFiles
        uniqueInputSandbox = uniqueElements( finalInputSandbox.split( ';' ) )
        paramsDict['InputSandbox']['value'] = ';'.join( uniqueInputSandbox )
        self.log.verbose( 'Final unique Input Sandbox %s' % ( ';'.join( uniqueInputSandbox ) ) )
      else:
        paramsDict['InputSandbox'] = {}
        paramsDict['InputSandbox']['value'] = extraFiles
        paramsDict['InputSandbox']['type'] = 'JDL'

    if self.addToOutputSandbox:
      extraFiles = ';'.join( self.addToOutputSandbox )
      if paramsDict.has_key( 'OutputSandbox' ):
        currentFiles = paramsDict['OutputSandbox']['value']
        finalOutputSandbox = currentFiles + ';' + extraFiles
        uniqueOutputSandbox = uniqueElements( finalOutputSandbox.split( ';' ) )
        paramsDict['OutputSandbox']['value'] = ';'.join( uniqueOutputSandbox )
        self.log.verbose( 'Final unique Output Sandbox %s' % ( ';'.join( uniqueOutputSandbox ) ) )
      else:
        paramsDict['OutputSandbox'] = {}
        paramsDict['OutputSandbox']['value'] = extraFiles
        paramsDict['OutputSandbox']['type'] = 'JDL'

    if self.addToInputData:
      extraFiles = ';'.join( self.addToInputData )
      if paramsDict.has_key( 'InputData' ):
        currentFiles = paramsDict['InputData']['value']
        finalInputData = extraFiles
        if currentFiles:
          finalInputData = currentFiles + ';' + extraFiles
        uniqueInputData = uniqueElements( finalInputData.split( ';' ) )
        paramsDict['InputData']['value'] = ';'.join( uniqueInputData )
        self.log.verbose( 'Final unique Input Data %s' % ( ';'.join( uniqueInputData ) ) )
      else:
        paramsDict['InputData'] = {}
        paramsDict['InputData']['value'] = extraFiles
        paramsDict['InputData']['type'] = 'JDL'

    # Handle here the Parametric values
    if self.parametric:
      for pType in ['InputData', 'InputSandbox']:
        if self.parametric.has_key( pType ):
          if paramsDict.has_key( pType ) and paramsDict[pType]['value']:
            pData = self.parametric[pType]
            # List of lists case
            currentFiles = paramsDict[pType]['value'].split( ';' )
            tmpList = []
            if type( pData[0] ) == list:
              for pElement in pData:
                tmpList.append( currentFiles + pElement )
            else:
              for pElement in pData:
                tmpList.append( currentFiles + [pElement] )
            self.parametric[pType] = tmpList

          paramsDict[pType] = {}
          paramsDict[pType]['value'] = "%s"
          paramsDict[pType]['type'] = 'JDL'
          self.parametric['files'] = self.parametric[pType]
          arguments.append( ' -p Parametric' + pType + '=%s' )
          break

      if self.parametric.has_key( 'files' ):
        paramsDict['Parameters'] = {}
        paramsDict['Parameters']['value'] = self.parametric['files']
        paramsDict['Parameters']['type'] = 'JDL'
      if self.parametric.has_key( 'GenericParameters' ):
        paramsDict['Parameters'] = {}
        paramsDict['Parameters']['value'] = self.parametric['GenericParameters']
        paramsDict['Parameters']['type'] = 'JDL'
        arguments.append( ' -p ParametricParameters=%s' )
    ##This needs to be put here so that the InputData and/or InputSandbox parameters for parametric jobs are processed
    classadJob.insertAttributeString( 'Arguments', ' '.join( arguments ) )

    #Add any JDL parameters to classad obeying lists with ';' rule
    requirements = False
    for name, props in paramsDict.items():
      ptype = props['type']
      value = props['value']
      if name.lower() == 'requirements' and ptype == 'JDL':
        self.log.verbose( 'Found existing requirements: %s' % ( value ) )
        requirements = True

      if re.search( '^JDL', ptype ):
        if type( value ) == list:
          if type( value[0] ) == list:
            classadJob.insertAttributeVectorStringList( name, value )
          else:
            classadJob.insertAttributeVectorString( name, value )
        elif value == "%s":
          classadJob.insertAttributeInt( name, value )
        elif not re.search( ';', value ) or name == 'GridRequirements': #not a nice fix...
          classadJob.insertAttributeString( name, value )
        else:
          classadJob.insertAttributeVectorString( name, value.split( ';' ) )

    if not requirements:
      reqtsDict = self.reqParams
      exprn = ''
      plus = ''
      for name, props in paramsDict.items():
        ptype = paramsDict[name]['type']
        value = paramsDict[name]['value']
        if not ptype == 'dict':
          if ptype == 'JDLReqt':
            if value and not value.lower() == 'any':
              plus = ' && '
              if re.search( ';', value ):
                for val in value.split( ';' ):
                  exprn += reqtsDict[name].replace( 'NAME', name ).replace( 'VALUE', str( val ) ) + plus
              else:
                exprn += reqtsDict[name].replace( 'NAME', name ).replace( 'VALUE', str( value ) ) + plus

      if len( plus ):
        exprn = exprn[:-len( plus )]
      if not exprn:
        exprn = 'true'
      self.log.verbose( 'Requirements: %s' % ( exprn ) )
      #classadJob.set_expression('Requirements', exprn)

    self.addToInputSandbox.remove( scriptname )
    self.addToOutputSandbox.remove( self.stdout )
    self.addToOutputSandbox.remove( self.stderr )
    jdl = classadJob.asJDL()
    start = jdl.find( '[' )
    end = jdl.rfind( ']' )
    return jdl[( start + 1 ):( end - 1 )]
Exemplo n.º 39
0
  def __processResourceDescription( self, resourceDescription ):
    # Check and form the resource description dictionary
    resourceDict = {}
    if type( resourceDescription ) in StringTypes:
      classAdAgent = ClassAd( resourceDescription )
      if not classAdAgent.isOK():
        return S_ERROR( 'Illegal Resource JDL' )
      gLogger.verbose( classAdAgent.asJDL() )

      for name in gTaskQueueDB.getSingleValueTQDefFields():
        if classAdAgent.lookupAttribute( name ):
          if name == 'CPUTime':
            resourceDict[name] = classAdAgent.getAttributeInt( name )
          else:
            resourceDict[name] = classAdAgent.getAttributeString( name )

      for name in gTaskQueueDB.getMultiValueMatchFields():
        if classAdAgent.lookupAttribute( name ):
          if name == 'SubmitPool':
            resourceDict[name] = classAdAgent.getListFromExpression( name )      
          else:
            resourceDict[name] = classAdAgent.getAttributeString( name )

      # Check if a JobID is requested
      if classAdAgent.lookupAttribute( 'JobID' ):
        resourceDict['JobID'] = classAdAgent.getAttributeInt( 'JobID' )

      for k in ( 'DIRACVersion', 'ReleaseVersion', 'ReleaseProject', 'VirtualOrganization' ):
        if classAdAgent.lookupAttribute( k ):
          resourceDict[ k ] = classAdAgent.getAttributeString( k )
          
    else:
      for name in gTaskQueueDB.getSingleValueTQDefFields():
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      for name in gTaskQueueDB.getMultiValueMatchFields():
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      if resourceDescription.has_key( 'JobID' ):
        resourceDict['JobID'] = resourceDescription['JobID']

      for k in ( 'DIRACVersion', 'ReleaseVersion', 'ReleaseProject', 'VirtualOrganization',
                 'PilotReference', 'PilotInfoReportedFlag', 'PilotBenchmark' ):
        if k in resourceDescription:
          resourceDict[ k ] = resourceDescription[ k ]

    return resourceDict
Exemplo n.º 40
0
  def export_submitJob( self, jobDesc ):
    """ Submit a single job to DIRAC WMS
    """

    if self.peerUsesLimitedProxy:
      return S_ERROR( "Can't submit using a limited proxy! (bad boy!)" )

    # Check job submission permission
    result = self.jobPolicy.getJobPolicy()
    if not result['OK']:
      return S_ERROR( 'Failed to get job policies' )
    policyDict = result['Value']
    if not policyDict[ RIGHT_SUBMIT ]:
      return S_ERROR( 'Job submission not authorized' )

    #jobDesc is JDL for now
    jobDesc = jobDesc.strip()
    if jobDesc[0] != "[":
      jobDesc = "[%s" % jobDesc
    if jobDesc[-1] != "]":
      jobDesc = "%s]" % jobDesc

    # Check if the job is a parameteric one
    jobClassAd = ClassAd( jobDesc )
    parametricJob = False
    if jobClassAd.lookupAttribute( 'Parameters' ):
      parametricJob = True
      if jobClassAd.isAttributeList( 'Parameters' ):
        parameterList = jobClassAd.getListFromExpression( 'Parameters' )
      else:
        pStep = 0
        pFactor = 1
        pStart = 1
        nParameters = jobClassAd.getAttributeInt( 'Parameters' )
        if not nParameters:
          value = jobClassAd.get_expression( 'Parameters' )
          return S_ERROR( 'Illegal value for Parameters JDL field: %s' % value )

        if jobClassAd.lookupAttribute( 'ParameterStart' ):
          value = jobClassAd.get_expression( 'ParameterStart' ).replace( '"', '' )
          try:
            pStart = int( value )
          except:
            try:
              pStart = float( value )
            except:
              return S_ERROR( 'Illegal value for ParameterStart JDL field: %s' % value )

        if jobClassAd.lookupAttribute( 'ParameterStep' ):
          pStep = jobClassAd.getAttributeInt( 'ParameterStep' )
          if not pStep:
            pStep = jobClassAd.getAttributeFloat( 'ParameterStep' )
            if not pStep:
              value = jobClassAd.get_expression( 'ParameterStep' )
              return S_ERROR( 'Illegal value for ParameterStep JDL field: %s' % value )
        if jobClassAd.lookupAttribute( 'ParameterFactor' ):
          pFactor = jobClassAd.getAttributeInt( 'ParameterFactor' )
          if not pFactor:
            pFactor = jobClassAd.getAttributeFloat( 'ParameterFactor' )
            if not pFactor:
              value = jobClassAd.get_expression( 'ParameterFactor' )
              return S_ERROR( 'Illegal value for ParameterFactor JDL field: %s' % value )

        parameterList = list()
        parameterList.append( pStart )
        for i in range( nParameters - 1 ):
          parameterList.append( parameterList[i] * pFactor + pStep )


      if len( parameterList ) > self.maxParametricJobs:
        return S_ERROR( 'The number of parametric jobs exceeded the limit of %d' % self.maxParametricJobs )

      jobDescList = []
      nParam = len(parameterList) - 1
      for n,p in enumerate(parameterList):
        newJobDesc = jobDesc.replace('%s',str(p)).replace('%n',str(n).zfill(len(str(nParam))))
        newClassAd = ClassAd(newJobDesc)
        for attr in ['Parameters','ParameterStep','ParameterFactor']:
          newClassAd.deleteAttribute(attr)
        if type( p ) == type ( ' ' ) and p.startswith('{'):
          newClassAd.insertAttributeInt( 'Parameter',str(p) )
        else:
          newClassAd.insertAttributeString( 'Parameter', str( p ) )
        newClassAd.insertAttributeInt( 'ParameterNumber', n )
        newJDL = newClassAd.asJDL()
        jobDescList.append( newJDL )
    else:
      jobDescList = [ jobDesc ]

    jobIDList = []
    for jobDescription in jobDescList:
      result = gJobDB.insertNewJobIntoDB( jobDescription, self.owner, self.ownerDN, self.ownerGroup, self.diracSetup )
      if not result['OK']:
        return result

      jobID = result['JobID']
      gLogger.info( 'Job %s added to the JobDB for %s/%s' % ( jobID, self.ownerDN, self.ownerGroup ) )

      gJobLoggingDB.addLoggingRecord( jobID, result['Status'], result['MinorStatus'], source = 'JobManager' )

      jobIDList.append( jobID )

    #Set persistency flag
    retVal = gProxyManager.getUserPersistence( self.ownerDN, self.ownerGroup )
    if 'Value' not in retVal or not retVal[ 'Value' ]:
      gProxyManager.setPersistency( self.ownerDN, self.ownerGroup, True )

    if parametricJob:
      result = S_OK( jobIDList )
    else:
      result = S_OK( jobIDList[0] )

    result['JobID'] = result['Value']
    result[ 'requireProxyUpload' ] = self.__checkIfProxyUploadIsRequired()
    self.__sendNewJobsToMind( jobIDList )
    return result
Exemplo n.º 41
0
  def submitNewBigJob( self ):

    #1.- Creamos carpeta temporal
    self.log.debug( 'Step1::: mkdir temp folder' )
    tempPath = self.__tmpSandBoxDir + str( self.__jobID ) + "/"
    dirac = Dirac()
    if not os.path.exists( tempPath ):
      os.makedirs( tempPath )

    #2.- Introducimos el contenido del inputsandbox en la carpeta temporal
    self.log.debug( 'Step2::: download inputsand to temp folder' )
    settingJobSandBoxDir = dirac.getInputSandbox( self.__jobID, tempPath )
    self.log.info( 'Writting temporal SandboxDir in Server', settingJobSandBoxDir )
    moveData = tempPath + "/InputSandbox" + str( self.__jobID )

    #3.- Move the data to client
    self.log.debug( 'Step2::: download inputsandbox to temp folder' )
    HadoopV1InteractiveCli = HadoopV1InteractiveClient( self.__User , self.__publicIP, self.__Port )
    returned = HadoopV1InteractiveCli.dataCopy( tempPath, self.__tmpSandBoxDir )
    self.log.debug( 'Returned of copy the job contain to the Hadoop Master with HadoopInteractive::: ', returned )

    #3.- Get executable file
    result = jobDB.getJobJDL( str( self.__jobID ) , True )
    classAdJob = ClassAd( result['Value'] )
    executableFile = ""
    if classAdJob.lookupAttribute( 'Executable' ):
      executableFile = classAdJob.getAttributeString( 'Executable' )
    self.log.debug( 'Step3::: Get executable file: ', executableFile )

    jobInfo = jobDB.getJobAttributes( self.__jobID )
    if not jobInfo['OK']:
      return S_ERROR( jobInfo['Value'] )
    proxy = ""
    jobInfo = jobInfo['Value']
    if gProxyManager.userHasProxy( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] ):
      proxy = gProxyManager.downloadProxyToFile( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )
    else:
      proxy = self.__requestProxyFromProxyManager( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )

    HadoopInteractiveJob = "InputSandbox" + str( self.__jobID ) + "/" + executableFile
    HadoopInteractiveJobCommand = "InputSandbox" + str( self.__jobID ) + "/" + executableFile + " " + self.__JobName
    HadoopInteractiveJobOutput = tempPath + str( self.__jobID ) + "_" + executableFile + "_out"

    #4.- Creating second part of the job name
    if ( len( re.split( " ", self.__JobName ) ) > 1 ):
    #(name for random writter with -D)name_job = re.split( " ", self.__JobName )[0] + " " + re.split( " ", self.__JobName )[1] + " " + re.split( " ", self.__JobName )[2]
      name_job = re.split( " ", self.__JobName )[0] + " " + re.split( " ", self.__JobName )[1]
    #(name for random writter with -D)output_job = moveData + "/" + re.split( " ", self.__JobName )[3]

    #(name for random writter with -D)cfg_job = ""
    #(name for random writter with -D)if ( len( re.split( " ", self.__JobName ) ) > 4 ):
    #(name for random writter with -D)  cfg_job = moveData + "/" + re.split( " ", self.__JobName )[4]

    #5.- Parsing execution command
    #cmd = "hadoop jar " + tempPath + HadoopInteractiveJob + " " + name_job + " " + output_job + " " + cfg_job
      cmd = "hadoop jar " + tempPath + HadoopInteractiveJob + " " + name_job + " " + tempPath + "/InputSandbox" + str( self.__jobID ) + "/" + "/dataset-USC-a-grep '[and]+'"
    else:
      dataset = re.split( "/", self.__Dataset )
      count = 0
      datasetname = ""
      for dir in dataset:
        count = count + 1
        if ( count > 2 ):
          datasetname = datasetname + "/" + dir
      cmd = "hadoop jar " + tempPath + HadoopInteractiveJob + " " + self.__JobName + " " + datasetname + " " + tempPath + "/" + self.__JobName.replace( " ", "" ) + "_" + str( self.__jobID )
    self.log.debug( 'Step4::: Making CMD for submission: ', cmd )

    self.log.debug( 'Step5::: Submit file to hadoop: ' )
    returned = HadoopV1InteractiveCli.jobSubmit( tempPath, HadoopInteractiveJob, proxy['chain'],
                                                 HadoopInteractiveJobOutput, cmd )
    self.log.info( 'Launch Hadoop-HadoopInteractive job to the Master: ', returned )

    if not returned['OK']:
      return S_ERROR( returned['Message'] )
    else:
      self.log.info( 'Hadoop-HadoopInteractive Job ID: ', returned['Value'] )

    return S_OK( returned['Value'] )
Exemplo n.º 42
0
def generateParametricJobs(jobClassAd):
    """ Generate a series of ClassAd job descriptions expanding
      job parameters

  :param jobClassAd: ClassAd job description object
  :return: list of ClassAd job description objects
  """
    if not jobClassAd.lookupAttribute('Parameters'):
        return S_OK([jobClassAd.asJDL()])

    result = getParameterVectorLength(jobClassAd)
    if not result['OK']:
        return result
    nParValues = result['Value']
    if nParValues is None:
        return S_ERROR(EWMSJDL,
                       'Can not determine the number of job parameters')

    parameterDict = {}
    attributes = jobClassAd.getAttributes()
    for attribute in attributes:
        for key in [
                'Parameters', 'ParameterStart', 'ParameterStep',
                'ParameterFactor'
        ]:
            if attribute.startswith(key):
                seqID = '0' if '.' not in attribute else attribute.split(
                    '.')[1]
                parameterDict.setdefault(seqID, {})
                if key == 'Parameters':
                    if jobClassAd.isAttributeList(attribute):
                        parList = jobClassAd.getListFromExpression(attribute)
                        if len(parList) != nParValues:
                            return S_ERROR(
                                EWMSJDL,
                                'Inconsistent parametric job description')
                        parameterDict[seqID]['ParameterList'] = parList
                    else:
                        if attribute != "Parameters":
                            return S_ERROR(
                                EWMSJDL,
                                'Inconsistent parametric job description')
                        nPar = jobClassAd.getAttributeInt(attribute)
                        if nPar is None:
                            value = jobClassAd.get_expression(attribute)
                            return S_ERROR(
                                EWMSJDL,
                                'Inconsistent parametric job description: %s=%s'
                                % (attribute, value))
                        parameterDict[seqID]['Parameters'] = nPar
                else:
                    value = jobClassAd.getAttributeInt(attribute)
                    if value is None:
                        value = jobClassAd.getAttributeFloat(attribute)
                        if value is None:
                            value = jobClassAd.get_expression(attribute)
                            return S_ERROR(
                                'Illegal value for %s JDL field: %s' %
                                (attribute, value))
                    parameterDict[seqID][key] = value

    if '0' in parameterDict and not parameterDict.get('0'):
        parameterDict.pop('0')

    parameterLists = {}
    for seqID in parameterDict:
        parList = __getParameterSequence(
            nParValues,
            parList=parameterDict[seqID].get('ParameterList', []),
            parStart=parameterDict[seqID].get('ParameterStart', 1),
            parStep=parameterDict[seqID].get('ParameterStep', 0),
            parFactor=parameterDict[seqID].get('ParameterFactor', 1))
        if not parList:
            return S_ERROR(EWMSJDL, 'Inconsistent parametric job description')

        parameterLists[seqID] = parList

    jobDescList = []
    jobDesc = jobClassAd.asJDL()
    # Width of the sequential parameter number
    zLength = len(str(nParValues - 1))
    for n in range(nParValues):
        newJobDesc = jobDesc
        newJobDesc = newJobDesc.replace('%n', str(n).zfill(zLength))
        newClassAd = ClassAd(newJobDesc)
        for seqID in parameterLists:
            parameter = parameterLists[seqID][n]
            for attribute in newClassAd.getAttributes():
                __updateAttribute(newClassAd, attribute, seqID, str(parameter))

        for seqID in parameterLists:
            for attribute in [
                    'Parameters', 'ParameterStart', 'ParameterStep',
                    'ParameterFactor'
            ]:
                if seqID == '0':
                    newClassAd.deleteAttribute(attribute)
                else:
                    newClassAd.deleteAttribute('%s.%s' % (attribute, seqID))

            parameter = parameterLists[seqID][n]
            if seqID == '0':
                attribute = 'Parameter'
            else:
                attribute = 'Parameter.%s' % seqID
            if isinstance(parameter,
                          six.string_types) and parameter.startswith('{'):
                newClassAd.insertAttributeInt(attribute, str(parameter))
            else:
                newClassAd.insertAttributeString(attribute, str(parameter))

        newClassAd.insertAttributeInt('ParameterNumber', n)
        newJDL = newClassAd.asJDL()
        jobDescList.append(newJDL)

    return S_OK(jobDescList)
Exemplo n.º 43
0
def generateParametricJobs( jobClassAd ):
  """ Generate a series of ClassAd job descriptions expanding
      job parameters

  :param jobClassAd: ClassAd job description object
  :return: list of ClassAd job description objects
  """
  if not jobClassAd.lookupAttribute( 'Parameters' ):
    return S_OK( [ jobClassAd.asJDL() ] )

  result = getParameterVectorLength( jobClassAd )
  if not result['OK']:
    return result
  nParValues = result['Value']
  if nParValues is None:
    return S_ERROR(EWMSJDL, 'Can not determine the number of job parameters')

  parameterDict = {}
  attributes = jobClassAd.getAttributes()
  for attribute in attributes:
    for key in [ 'Parameters', 'ParameterStart', 'ParameterStep', 'ParameterFactor' ]:
      if attribute.startswith( key ):
        seqID = '0' if not '.' in attribute else attribute.split( '.' )[1]
        parameterDict.setdefault( seqID, {} )
        if key == 'Parameters':
          if jobClassAd.isAttributeList( attribute ):
            parList = jobClassAd.getListFromExpression( attribute )
            if len( parList ) != nParValues:
              return S_ERROR( EWMSJDL, 'Inconsistent parametric job description' )
            parameterDict[seqID]['ParameterList'] = parList
          else:
            if attribute != "Parameters":
              return S_ERROR( EWMSJDL, 'Inconsistent parametric job description' )
            nPar = jobClassAd.getAttributeInt( attribute )
            if nPar is None:
              value = jobClassAd.get_expression( attribute )
              return S_ERROR( EWMSJDL, 'Inconsistent parametric job description: %s=%s' % ( attribute, value ) )
            parameterDict[seqID]['Parameters'] = nPar
        else:
          value = jobClassAd.getAttributeInt( attribute )
          if value is None:
            value = jobClassAd.getAttributeFloat( attribute )
            if value is None:
              value = jobClassAd.get_expression( attribute )
              return S_ERROR( 'Illegal value for %s JDL field: %s' % ( attribute, value ) )
          parameterDict[seqID][key] = value

  if '0' in parameterDict and not parameterDict.get( '0' ):
    parameterDict.pop( '0' )

  parameterLists = {}
  for seqID in parameterDict:
    parList = __getParameterSequence( nParValues,
                                      parList = parameterDict[seqID].get( 'ParameterList', [] ),
                                      parStart = parameterDict[seqID].get( 'ParameterStart', 1 ),
                                      parStep = parameterDict[seqID].get( 'ParameterStep', 0 ),
                                      parFactor = parameterDict[seqID].get( 'ParameterFactor', 1 )
                                    )
    if not parList:
      return S_ERROR( EWMSJDL, 'Inconsistent parametric job description' )

    parameterLists[seqID] = parList

  jobDescList = []
  jobDesc = jobClassAd.asJDL()
  # Width of the sequential parameter number
  zLength = len( str( nParValues - 1 ) )
  for n in range( nParValues ):
    newJobDesc = jobDesc
    newJobDesc = newJobDesc.replace( '%n', str( n ).zfill( zLength ) )
    newClassAd = ClassAd( newJobDesc )
    for seqID in parameterLists:
      parameter = parameterLists[seqID][n]
      for attribute in newClassAd.getAttributes():
        __updateAttribute( newClassAd, attribute, seqID, str( parameter ) )

    for seqID in parameterLists:
      for attribute in ['Parameters', 'ParameterStart', 'ParameterStep', 'ParameterFactor']:
        if seqID == '0':
          newClassAd.deleteAttribute( attribute )
        else:
          newClassAd.deleteAttribute( '%s.%s' % ( attribute, seqID ) )

      parameter = parameterLists[seqID][n]
      if seqID == '0':
        attribute = 'Parameter'
      else:
        attribute = 'Parameter.%s' % seqID
      if isinstance( parameter, basestring) and parameter.startswith( '{' ):
        newClassAd.insertAttributeInt( attribute, str( parameter ) )
      else:
        newClassAd.insertAttributeString( attribute, str( parameter ) )

    newClassAd.insertAttributeInt( 'ParameterNumber', n )
    newJDL = newClassAd.asJDL()
    jobDescList.append( newJDL )

  return S_OK( jobDescList )
Exemplo n.º 44
0
def matchQueue(jobJDL, queueDict, fullMatch=False):
    """
    Match the job description to the queue definition

    :param str job: JDL job description
    :param bool fullMatch: test matching on all the criteria
    :param dict queueDict: queue parameters dictionary

    :return: S_OK/S_ERROR, Value - result of matching, S_OK if matched or
             S_ERROR with the reason for no match
    """

    # Check the job description validity
    job = ClassAd(jobJDL)
    if not job.isOK():
        return S_ERROR("Invalid job description")

    noMatchReasons = []

    # Check job requirements to resource
    # 1. CPUTime
    cpuTime = job.getAttributeInt("CPUTime")
    if not cpuTime:
        cpuTime = 84600
    if cpuTime > int(queueDict.get("CPUTime", 0)):
        noMatchReasons.append("Job CPUTime requirement not satisfied")
        if not fullMatch:
            return S_OK({"Match": False, "Reason": noMatchReasons[0]})

    # 2. Multi-value match requirements
    for parameter in ["Site", "GridCE", "Platform", "JobType"]:
        if parameter in queueDict:
            valueSet = set(job.getListFromExpression(parameter))
            if not valueSet:
                valueSet = set(job.getListFromExpression("%ss" % parameter))
            queueSet = set(fromChar(queueDict[parameter]))
            if valueSet and queueSet and not valueSet.intersection(queueSet):
                valueToPrint = ",".join(valueSet)
                if len(valueToPrint) > 20:
                    valueToPrint = "%s..." % valueToPrint[:20]
                noMatchReasons.append("Job %s %s requirement not satisfied" % (parameter, valueToPrint))
                if not fullMatch:
                    return S_OK({"Match": False, "Reason": noMatchReasons[0]})

    # 3. Banned multi-value match requirements
    for par in ["Site", "GridCE", "Platform", "JobType"]:
        parameter = "Banned%s" % par
        if par in queueDict:
            valueSet = set(job.getListFromExpression(parameter))
            if not valueSet:
                valueSet = set(job.getListFromExpression("%ss" % parameter))
            queueSet = set(fromChar(queueDict[par]))
            if valueSet and queueSet and valueSet.issubset(queueSet):
                valueToPrint = ",".join(valueSet)
                if len(valueToPrint) > 20:
                    valueToPrint = "%s..." % valueToPrint[:20]
                noMatchReasons.append("Job %s %s requirement not satisfied" % (parameter, valueToPrint))
                if not fullMatch:
                    return S_OK({"Match": False, "Reason": noMatchReasons[0]})

    # 4. Tags
    tags = set(job.getListFromExpression("Tag"))
    nProc = job.getAttributeInt("NumberOfProcessors")
    if nProc and nProc > 1:
        tags.add("MultiProcessor")
    wholeNode = job.getAttributeString("WholeNode")
    if wholeNode:
        tags.add("WholeNode")
    queueTags = set(queueDict.get("Tag", []))
    if not tags.issubset(queueTags):
        noMatchReasons.append("Job Tag %s not satisfied" % ",".join(tags))
        if not fullMatch:
            return S_OK({"Match": False, "Reason": noMatchReasons[0]})

    # 4. MultiProcessor requirements
    if nProc and nProc > int(queueDict.get("NumberOfProcessors", 1)):
        noMatchReasons.append("Job NumberOfProcessors %d requirement not satisfied" % nProc)
        if not fullMatch:
            return S_OK({"Match": False, "Reason": noMatchReasons[0]})

    # 5. RAM
    ram = job.getAttributeInt("RAM")
    # If MaxRAM is not specified in the queue description, assume 2GB
    if ram and ram > int(queueDict.get("MaxRAM", 2048) / 1024):
        noMatchReasons.append("Job RAM %d requirement not satisfied" % ram)
        if not fullMatch:
            return S_OK({"Match": False, "Reason": noMatchReasons[0]})

    # Check resource requirements to job
    # 1. OwnerGroup - rare case but still
    if "OwnerGroup" in queueDict:
        result = getProxyInfo(disableVOMS=True)
        if not result["OK"]:
            return S_ERROR("No valid proxy available")
        ownerGroup = result["Value"]["group"]
        if ownerGroup != queueDict["OwnerGroup"]:
            noMatchReasons.append("Resource OwnerGroup %s requirement not satisfied" % queueDict["OwnerGroup"])
            if not fullMatch:
                return S_OK({"Match": False, "Reason": noMatchReasons[0]})

    # 2. Required tags
    requiredTags = set(queueDict.get("RequiredTags", []))
    if not requiredTags.issubset(tags):
        noMatchReasons.append("Resource RequiredTags %s not satisfied" % ",".join(requiredTags))
        if not fullMatch:
            return S_OK({"Match": False, "Reason": noMatchReasons[0]})

    # 3. RunningLimit
    site = queueDict["Site"]
    ce = queueDict.get("GridCE")
    opsHelper = Operations()
    result = opsHelper.getSections("JobScheduling/RunningLimit")
    if result["OK"] and site in result["Value"]:
        result = opsHelper.getSections("JobScheduling/RunningLimit/%s" % site)
        if result["OK"]:
            for parameter in result["Value"]:
                value = job.getAttributeString(parameter)
                if (
                    value
                    and (
                        opsHelper.getValue("JobScheduling/RunningLimit/%s/%s/%s" % (site, parameter, value), 1)
                        or opsHelper.getValue(
                            "JobScheduling/RunningLimit/%s/CEs/%s/%s/%s" % (site, ce, parameter, value), 1
                        )
                    )
                    == 0
                ):
                    noMatchReasons.append("Resource operational %s requirement not satisfied" % parameter)
                    if not fullMatch:
                        return S_OK({"Match": False, "Reason": noMatchReasons[0]})

    return S_OK({"Match": not bool(noMatchReasons), "Reason": noMatchReasons})
Exemplo n.º 45
0
Arquivo: Job.py Projeto: sposs/DIRAC
  def _toJDL( self, xmlFile = '' ): #messy but need to account for xml file being in /tmp/guid dir
    """Creates a JDL representation of itself as a Job.
    """
    #Check if we have to do old bootstrap...
    classadJob = ClassAd( '[]' )

    paramsDict = {}
    params = self.workflow.parameters # ParameterCollection object

    paramList = params
    for param in paramList:
      paramsDict[param.getName()] = {'type':param.getType(), 'value':param.getValue()}

    scriptname = 'jobDescription.xml'
    arguments = []
    if self.script:
      if os.path.exists( self.script ):
        scriptname = os.path.abspath( self.script )
        self.log.verbose( 'Found script name %s' % scriptname )
    else:
      if xmlFile:
        self.log.verbose( 'Found XML File %s' % xmlFile )
        scriptname = xmlFile

    arguments.append( os.path.basename( scriptname ) )
    self.addToInputSandbox.append( scriptname )
    if paramsDict.has_key( 'LogLevel' ):
      if paramsDict['LogLevel']['value']:
        arguments.append( '-o LogLevel=%s' % ( paramsDict['LogLevel']['value'] ) )
      else:
        self.log.warn( 'Job LogLevel defined with null value' )
    if paramsDict.has_key( 'DIRACSetup' ):
      if paramsDict['DIRACSetup']['value']:
        arguments.append( '-o DIRAC/Setup=%s' % ( paramsDict['DIRACSetup']['value'] ) )
      else:
        self.log.warn( 'Job DIRACSetup defined with null value' )
    if paramsDict.has_key( 'JobMode' ):
      if paramsDict['JobMode']['value']:
        arguments.append( '-o JobMode=%s' % ( paramsDict['JobMode']['value'] ) )
      else:
        self.log.warn( 'Job Mode defined with null value' )
    if paramsDict.has_key( 'JobConfigArgs' ):
      if paramsDict['JobConfigArgs']['value']:
        arguments.append( '%s' % ( paramsDict['JobConfigArgs']['value'] ) )
      else:
        self.log.warn( 'JobConfigArgs defined with null value' )

    classadJob.insertAttributeString( 'Executable', self.executable )
    self.addToOutputSandbox.append( self.stderr )
    self.addToOutputSandbox.append( self.stdout )

    #Extract i/o sandbox parameters from steps and any input data parameters
    #to do when introducing step-level api...

    #To add any additional files to input and output sandboxes
    if self.addToInputSandbox:
      extraFiles = ';'.join( self.addToInputSandbox )
      if paramsDict.has_key( 'InputSandbox' ):
        currentFiles = paramsDict['InputSandbox']['value']
        finalInputSandbox = currentFiles + ';' + extraFiles
        uniqueInputSandbox = uniqueElements( finalInputSandbox.split( ';' ) )
        paramsDict['InputSandbox']['value'] = ';'.join( uniqueInputSandbox )
        self.log.verbose( 'Final unique Input Sandbox %s' % ( ';'.join( uniqueInputSandbox ) ) )
      else:
        paramsDict['InputSandbox'] = {}
        paramsDict['InputSandbox']['value'] = extraFiles
        paramsDict['InputSandbox']['type'] = 'JDL'

    if self.addToOutputSandbox:
      extraFiles = ';'.join( self.addToOutputSandbox )
      if paramsDict.has_key( 'OutputSandbox' ):
        currentFiles = paramsDict['OutputSandbox']['value']
        finalOutputSandbox = currentFiles + ';' + extraFiles
        uniqueOutputSandbox = uniqueElements( finalOutputSandbox.split( ';' ) )
        paramsDict['OutputSandbox']['value'] = ';'.join( uniqueOutputSandbox )
        self.log.verbose( 'Final unique Output Sandbox %s' % ( ';'.join( uniqueOutputSandbox ) ) )
      else:
        paramsDict['OutputSandbox'] = {}
        paramsDict['OutputSandbox']['value'] = extraFiles
        paramsDict['OutputSandbox']['type'] = 'JDL'

    if self.addToInputData:
      extraFiles = ';'.join( self.addToInputData )
      if paramsDict.has_key( 'InputData' ):
        currentFiles = paramsDict['InputData']['value']
        finalInputData = extraFiles
        if currentFiles:
          finalInputData = currentFiles + ';' + extraFiles
        uniqueInputData = uniqueElements( finalInputData.split( ';' ) )
        paramsDict['InputData']['value'] = ';'.join( uniqueInputData )
        self.log.verbose( 'Final unique Input Data %s' % ( ';'.join( uniqueInputData ) ) )
      else:
        paramsDict['InputData'] = {}
        paramsDict['InputData']['value'] = extraFiles
        paramsDict['InputData']['type'] = 'JDL'

    # Handle here the Parametric values
    if self.parametric:
      for pType in ['InputData', 'InputSandbox']:
        if self.parametric.has_key( pType ):
          if paramsDict.has_key( pType ) and paramsDict[pType]['value']:
            pData = self.parametric[pType]
            # List of lists case
            currentFiles = paramsDict[pType]['value'].split( ';' )
            tmpList = []
            if type( pData[0] ) == list:
              for pElement in pData:
                tmpList.append( currentFiles + pElement )
            else:
              for pElement in pData:
                tmpList.append( currentFiles + [pElement] )
            self.parametric[pType] = tmpList

          paramsDict[pType] = {}
          paramsDict[pType]['value'] = "%s"
          paramsDict[pType]['type'] = 'JDL'
          self.parametric['files'] = self.parametric[pType]
          arguments.append( ' -p Parametric' + pType + '=%s' )
          break

      if self.parametric.has_key( 'files' ):
        paramsDict['Parameters'] = {}
        paramsDict['Parameters']['value'] = self.parametric['files']
        paramsDict['Parameters']['type'] = 'JDL'
      if self.parametric.has_key( 'GenericParameters' ):
        paramsDict['Parameters'] = {}
        paramsDict['Parameters']['value'] = self.parametric['GenericParameters']
        paramsDict['Parameters']['type'] = 'JDL'
        arguments.append( ' -p ParametricParameters=%s' )
    ##This needs to be put here so that the InputData and/or InputSandbox parameters for parametric jobs are processed
    classadJob.insertAttributeString( 'Arguments', ' '.join( arguments ) )

    #Add any JDL parameters to classad obeying lists with ';' rule
    requirements = False
    for name, props in paramsDict.items():
      ptype = props['type']
      value = props['value']
      if name.lower() == 'requirements' and ptype == 'JDL':
        self.log.verbose( 'Found existing requirements: %s' % ( value ) )
        requirements = True

      if re.search( '^JDL', ptype ):
        if type( value ) == list:
          if type( value[0] ) == list:
            classadJob.insertAttributeVectorStringList( name, value )
          else:
            classadJob.insertAttributeVectorString( name, value )
        elif value == "%s":
          classadJob.insertAttributeInt( name, value )
        elif not re.search( ';', value ) or name == 'GridRequirements': #not a nice fix...
          classadJob.insertAttributeString( name, value )
        else:
          classadJob.insertAttributeVectorString( name, value.split( ';' ) )

    if not requirements:
      reqtsDict = self.reqParams
      exprn = ''
      plus = ''
      for name, props in paramsDict.items():
        ptype = paramsDict[name]['type']
        value = paramsDict[name]['value']
        if not ptype == 'dict':
          if ptype == 'JDLReqt':
            if value and not value.lower() == 'any':
              plus = ' && '
              if re.search( ';', value ):
                for val in value.split( ';' ):
                  exprn += reqtsDict[name].replace( 'NAME', name ).replace( 'VALUE', str( val ) ) + plus
              else:
                exprn += reqtsDict[name].replace( 'NAME', name ).replace( 'VALUE', str( value ) ) + plus

      if len( plus ):
        exprn = exprn[:-len( plus )]
      if not exprn:
        exprn = 'true'
      self.log.verbose( 'Requirements: %s' % ( exprn ) )
      #classadJob.set_expression('Requirements', exprn)

    self.addToInputSandbox.remove( scriptname )
    self.addToOutputSandbox.remove( self.stdout )
    self.addToOutputSandbox.remove( self.stderr )
    jdl = classadJob.asJDL()
    start = jdl.find( '[' )
    end = jdl.rfind( ']' )
    return jdl[( start + 1 ):( end - 1 )]
Exemplo n.º 46
0
  def selectJob( self, resourceDescription ):
    """ Main job selection function to find the highest priority job
        matching the resource capacity
    """

    startTime = time.time()

    # Check and form the resource description dictionary
    resourceDict = {}
    if type( resourceDescription ) in StringTypes:
      classAdAgent = ClassAd( resourceDescription )
      if not classAdAgent.isOK():
        return S_ERROR( 'Illegal Resource JDL' )
      gLogger.verbose( classAdAgent.asJDL() )

      for name in gTaskQueueDB.getSingleValueTQDefFields():
        if classAdAgent.lookupAttribute( name ):
          if name == 'CPUTime':
            resourceDict[name] = classAdAgent.getAttributeInt( name )
          else:
            resourceDict[name] = classAdAgent.getAttributeString( name )

      for name in gTaskQueueDB.getMultiValueMatchFields():
        if classAdAgent.lookupAttribute( name ):
          resourceDict[name] = classAdAgent.getAttributeString( name )

      # Check if a JobID is requested
      if classAdAgent.lookupAttribute( 'JobID' ):
        resourceDict['JobID'] = classAdAgent.getAttributeInt( 'JobID' )

      if classAdAgent.lookupAttribute( 'DIRACVersion' ):
        resourceDict['DIRACVersion'] = classAdAgent.getAttributeString( 'DIRACVersion' )

      if classAdAgent.lookupAttribute( 'VirtualOrganization' ):
        resourceDict['VirtualOrganization'] = classAdAgent.getAttributeString( 'VirtualOrganization' )

    else:
      for name in gTaskQueueDB.getSingleValueTQDefFields():
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      for name in gTaskQueueDB.getMultiValueMatchFields():
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      if resourceDescription.has_key( 'JobID' ):
        resourceDict['JobID'] = resourceDescription['JobID']

      if resourceDescription.has_key( 'DIRACVersion' ):
        resourceDict['DIRACVersion'] = resourceDescription['DIRACVersion']

      if resourceDescription.has_key( 'VirtualOrganization' ):
        resourceDict['VirtualOrganization'] = resourceDescription['VirtualOrganization']

    # Check the pilot DIRAC version
    if self.checkPilotVersion:
      if not 'DIRACVersion' in resourceDict:
        return S_ERROR( 'Version check requested and not provided by Pilot' )

      # Check if the matching Request provides a VirtualOrganization
      if 'VirtualOrganization' in resourceDict:
        voName = resourceDict['VirtualOrganization']
      # Check if the matching Request provides an OwnerGroup
      elif 'OwnerGroup' in resourceDict:
        voName = getVOForGroup( resourceDict['OwnerGroup'] )
      # else take the default VirtualOrganization for the installation
      else:
        voName = getVOForGroup( '' )

      self.pilotVersion = gConfig.getValue( '/Operations/%s/%s/Versions/PilotVersion' % ( voName, self.setup ), '' )
      if self.pilotVersion and resourceDict['DIRACVersion'] != self.pilotVersion:
        return S_ERROR( 'Pilot version does not match the production version %s:%s' % \
                       ( resourceDict['DIRACVersion'], self.pilotVersion ) )

    # Get common site mask and check the agent site
    result = gJobDB.getSiteMask( siteState = 'Active' )
    if result['OK']:
      maskList = result['Value']
    else:
      return S_ERROR( 'Internal error: can not get site mask' )

    if not 'Site' in resourceDict:
      return S_ERROR( 'Missing Site Name in Resource JDL' )

    siteName = resourceDict['Site']
    if resourceDict['Site'] not in maskList:
      if 'GridCE' in resourceDict:
        del resourceDict['Site']
      else:
        return S_ERROR( 'Site not in mask and GridCE not specified' )

    resourceDict['Setup'] = self.serviceInfoDict['clientSetup']

    if DEBUG:
      print "Resource description:"
      for key, value in resourceDict.items():
        print key.rjust( 20 ), value

    # Check if Job Limits are imposed onto the site
    extraConditions = {}
    if self.siteJobLimits:
      result = self.getExtraConditions( siteName )
      if result['OK']:
        extraConditions = result['Value']
    if extraConditions:
      gLogger.info( 'Job Limits for site %s are: %s' % ( siteName, str( extraConditions ) ) )

    result = gTaskQueueDB.matchAndGetJob( resourceDict, extraConditions = extraConditions )

    if DEBUG:
      print result

    if not result['OK']:
      return result
    result = result['Value']
    if not result['matchFound']:
      return S_ERROR( 'No match found' )

    jobID = result['jobId']
    resAtt = gJobDB.getJobAttributes( jobID, ['OwnerDN', 'OwnerGroup', 'Status'] )
    if not resAtt['OK']:
      return S_ERROR( 'Could not retrieve job attributes' )
    if not resAtt['Value']:
      return S_ERROR( 'No attributes returned for job' )
    if not resAtt['Value']['Status'] == 'Waiting':
      gLogger.error( 'Job %s matched by the TQ is not in Waiting state' % str( jobID ) )
      result = gTaskQueueDB.deleteJob( jobID )

    result = gJobDB.setJobStatus( jobID, status = 'Matched', minor = 'Assigned' )
    result = gJobLoggingDB.addLoggingRecord( jobID,
                                           status = 'Matched',
                                           minor = 'Assigned',
                                           source = 'Matcher' )

    result = gJobDB.getJobJDL( jobID )
    if not result['OK']:
      return S_ERROR( 'Failed to get the job JDL' )

    resultDict = {}
    resultDict['JDL'] = result['Value']
    resultDict['JobID'] = jobID

    matchTime = time.time() - startTime
    gLogger.info( "Match time: [%s]" % str( matchTime ) )
    gMonitor.addMark( "matchTime", matchTime )

    # Get some extra stuff into the response returned
    resOpt = gJobDB.getJobOptParameters( jobID )
    if resOpt['OK']:
      for key, value in resOpt['Value'].items():
        resultDict[key] = value
    resAtt = gJobDB.getJobAttributes( jobID, ['OwnerDN', 'OwnerGroup'] )
    if not resAtt['OK']:
      return S_ERROR( 'Could not retrieve job attributes' )
    if not resAtt['Value']:
      return S_ERROR( 'No attributes returned for job' )

    resultDict['DN'] = resAtt['Value']['OwnerDN']
    resultDict['Group'] = resAtt['Value']['OwnerGroup']
    return S_OK( resultDict )
Exemplo n.º 47
0
    def __processResourceDescription(self, resourceDescription):
        # Check and form the resource description dictionary
        resourceDict = {}
        if type(resourceDescription) in StringTypes:
            classAdAgent = ClassAd(resourceDescription)
            if not classAdAgent.isOK():
                return S_ERROR('Illegal Resource JDL')
            gLogger.verbose(classAdAgent.asJDL())

            for name in gTaskQueueDB.getSingleValueTQDefFields():
                if classAdAgent.lookupAttribute(name):
                    if name == 'CPUTime':
                        resourceDict[name] = classAdAgent.getAttributeInt(name)
                    else:
                        resourceDict[name] = classAdAgent.getAttributeString(
                            name)

            for name in gTaskQueueDB.getMultiValueMatchFields():
                if classAdAgent.lookupAttribute(name):
                    if name == 'SubmitPool':
                        resourceDict[
                            name] = classAdAgent.getListFromExpression(name)
                    else:
                        resourceDict[name] = classAdAgent.getAttributeString(
                            name)

            # Check if a JobID is requested
            if classAdAgent.lookupAttribute('JobID'):
                resourceDict['JobID'] = classAdAgent.getAttributeInt('JobID')

            for k in ('DIRACVersion', 'ReleaseVersion', 'ReleaseProject',
                      'VirtualOrganization'):
                if classAdAgent.lookupAttribute(k):
                    resourceDict[k] = classAdAgent.getAttributeString(k)

        else:
            for name in gTaskQueueDB.getSingleValueTQDefFields():
                if resourceDescription.has_key(name):
                    resourceDict[name] = resourceDescription[name]

            for name in gTaskQueueDB.getMultiValueMatchFields():
                if resourceDescription.has_key(name):
                    resourceDict[name] = resourceDescription[name]

            if resourceDescription.has_key('JobID'):
                resourceDict['JobID'] = resourceDescription['JobID']

            for k in ('DIRACVersion', 'ReleaseVersion', 'ReleaseProject',
                      'VirtualOrganization', 'PilotReference',
                      'PilotInfoReportedFlag', 'PilotBenchmark'):
                if k in resourceDescription:
                    resourceDict[k] = resourceDescription[k]

        return resourceDict
Exemplo n.º 48
0
  def _processResourceDescription( self, resourceDescription ):
    """ Check and form the resource description dictionary

        resourceDescription is a ceDict coming from a JobAgent, for example.
    """

    resourceDict = {}
    if isinstance( resourceDescription, basestring ):
      classAdAgent = ClassAd( resourceDescription )
      if not classAdAgent.isOK():
        raise ValueError( 'Illegal Resource JDL' )
      self.log.verbose( classAdAgent.asJDL() )

      for name in singleValueDefFields:
        if classAdAgent.lookupAttribute( name ):
          if name == 'CPUTime':
            resourceDict[name] = classAdAgent.getAttributeInt( name )
          else:
            resourceDict[name] = classAdAgent.getAttributeString( name )

      for name in multiValueMatchFields:
        if classAdAgent.lookupAttribute( name ):
          if name == 'SubmitPool':
            resourceDict[name] = classAdAgent.getListFromExpression( name )
          else:
            resourceDict[name] = classAdAgent.getAttributeString( name )

      # Check if a JobID is requested
      if classAdAgent.lookupAttribute( 'JobID' ):
        resourceDict['JobID'] = classAdAgent.getAttributeInt( 'JobID' )

      for k in ( 'DIRACVersion', 'ReleaseVersion', 'ReleaseProject', 'VirtualOrganization' ):
        if classAdAgent.lookupAttribute( k ):
          resourceDict[ k ] = classAdAgent.getAttributeString( k )

    else:
      for name in singleValueDefFields:
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      for name in multiValueMatchFields:
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      if 'JobID' in resourceDescription:
        resourceDict['JobID'] = resourceDescription['JobID']

      # Convert MaxRAM and NumberOfCores parameters into a list of tags
      maxRAM = resourceDescription.get( 'MaxRAM' )
      nCores = resourceDescription.get( 'NumberOfProcessors' )
      for param, key in [ ( maxRAM, 'GB' ), ( nCores, 'Cores' ) ]:
        if param:
          try:
            intValue = int( param )/1000
            if intValue <= 128:
              paramList = range( 1, intValue + 1 )
              paramTags = [ '%d%s' % ( par, key ) for par in paramList ]
              resourceDict.setdefault( "Tag", [] ).extend( paramTags )
          except ValueError:
            pass
      if 'Tag' in resourceDict:
        resourceDict['Tag'] = list( set( resourceDict['Tag'] ) )

      for k in ( 'DIRACVersion', 'ReleaseVersion', 'ReleaseProject', 'VirtualOrganization',
                 'PilotReference', 'PilotBenchmark', 'PilotInfoReportedFlag' ):
        if k in resourceDescription:
          resourceDict[ k ] = resourceDescription[ k ]

    return resourceDict
Exemplo n.º 49
0
  def __sendJobToTaskQueue( self, job, classAdJob, siteCandidates, bannedSites ):
    """This method sends jobs to the task queue agent and if candidate sites
       are defined, updates job JDL accordingly.
    """

    reqJDL = classAdJob.get_expression( 'JobRequirements' )
    classAddReq = ClassAd( reqJDL )

    if siteCandidates:
      classAddReq.insertAttributeVectorString( 'Sites', siteCandidates )
    if bannedSites:
      classAddReq.insertAttributeVectorString( 'BannedSites', bannedSites )

    if classAdJob.lookupAttribute( "SubmitPools" ):
      classAddReq.set_expression( 'SubmitPools', classAdJob.get_expression( 'SubmitPools' ) )

    if classAdJob.lookupAttribute( "GridMiddleware" ):
      classAddReq.set_expression( 'GridMiddleware', classAdJob.get_expression( 'GridMiddleware' ) )

    if classAdJob.lookupAttribute( "PilotTypes" ):
      classAddReq.set_expression( 'PilotTypes', classAdJob.get_expression( 'PilotTypes' ) )
    #HAck to migrate old jobs to new ones.
    #DELETE ON 08/09
    else:
      if classAdJob.lookupAttribute( "PilotType" ):
        classAddReq.set_expression( 'PilotTypes', classAdJob.get_expression( 'PilotType' ) )

    if classAdJob.lookupAttribute( "JobType" ):
      jobTypes = [ jt for jt in classAdJob.getListFromExpression( 'JobType' ) if jt ]
      classAddReq.insertAttributeVectorString( 'JobTypes', jobTypes )

    #Required CE's requirements
    gridCEs = [ ce for ce in classAdJob.getListFromExpression( 'GridRequiredCEs' ) if ce ]
    if gridCEs:
      classAddReq.insertAttributeVectorString( 'GridCEs', gridCEs )

    if siteCandidates:
      sites = ','.join( siteCandidates )
      classAdJob.insertAttributeString( "Site", sites )

    reqJDL = classAddReq.asJDL()
    classAdJob.insertAttributeInt( 'JobRequirements', reqJDL )

    jdl = classAdJob.asJDL()
    result = self.jobDB.setJobJDL( job, jdl )
    if not result['OK']:
      return result

    if siteCandidates:
      if len( siteCandidates ) == 1:
        self.log.verbose( 'Individual site candidate for job %s is %s' % ( job, siteCandidates[0] ) )
        self.jobDB.setJobAttribute( job, 'Site', siteCandidates[0] )
      elif bannedSites:
        remainingSites = []
        for site in siteCandidates:
          if not site in bannedSites:
            remainingSites.append( site )
        if remainingSites:
          if len( remainingSites ) == 1:
            self.log.verbose( 'Individual site candidate for job %s is %s' % ( job, remainingSites[0] ) )
            self.jobDB.setJobAttribute( job, 'Site', remainingSites[0] )
          else:
            self.log.verbose( 'Site candidates for job %s are %s' % ( job, str( remainingSites ) ) )
            result = self.jobDB.getJobAttribute(job,'Site')
            siteGroup = "Multiple"  
            if result['OK']:
              if result['Value'].startswith('Group'):
                siteGroup = result['Value']              
            self.jobDB.setJobAttribute( job, 'Site', siteGroup )
      else:
        self.log.verbose( 'Site candidates for job %s are %s' % ( job, str( siteCandidates ) ) )
        result = self.jobDB.getJobAttribute(job,'Site')
        siteGroup = "Multiple"  
        if result['OK']:
          if result['Value'].startswith('Group'):
            siteGroup = result['Value']              
        self.jobDB.setJobAttribute( job, 'Site', siteGroup )
    else:
      self.log.verbose( 'All sites are eligible for job %s' % job )
      self.jobDB.setJobAttribute( job, 'Site', 'ANY' )

    return self.setNextOptimizer( job )
Exemplo n.º 50
0
    def test_MultiBad(self):

        clad = ClassAd(TEST_JDL_MULTI_BAD)
        result = getParameterVectorLength(clad)
        self.assertTrue(not result['OK'])
Exemplo n.º 51
0
  def selectJob( self, resourceDescription ):
    """ Main job selection function to find the highest priority job
        matching the resource capacity
    """

    startTime = time.time()

    # Check and form the resource description dictionary
    resourceDict = {}
    if type( resourceDescription ) in StringTypes:
      classAdAgent = ClassAd( resourceDescription )
      if not classAdAgent.isOK():
        return S_ERROR( 'Illegal Resource JDL' )
      gLogger.verbose( classAdAgent.asJDL() )

      for name in taskQueueDB.getSingleValueTQDefFields():
        if classAdAgent.lookupAttribute( name ):
          if name == 'CPUTime':
            resourceDict[name] = classAdAgent.getAttributeInt( name )
          else:
            resourceDict[name] = classAdAgent.getAttributeString( name )

      for name in taskQueueDB.getMultiValueMatchFields():
        if classAdAgent.lookupAttribute( name ):
          resourceDict[name] = classAdAgent.getAttributeString( name )

      # Check if a JobID is requested
      if classAdAgent.lookupAttribute( 'JobID' ):
        resourceDict['JobID'] = classAdAgent.getAttributeInt( 'JobID' )

      if classAdAgent.lookupAttribute( 'DIRACVersion' ):
        resourceDict['DIRACVersion'] = classAdAgent.getAttributeString( 'DIRACVersion' )

    else:
      for name in taskQueueDB.getSingleValueTQDefFields():
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      for name in taskQueueDB.getMultiValueMatchFields():
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      if resourceDescription.has_key( 'JobID' ):
        resourceDict['JobID'] = resourceDescription['JobID']
      if resourceDescription.has_key( 'DIRACVersion' ):
        resourceDict['DIRACVersion'] = resourceDescription['DIRACVersion']

    # Check the pilot DIRAC version
    if self.checkPilotVersion:
      if 'DIRACVersion' in resourceDict:
        if self.pilotVersion and resourceDict['DIRACVersion'] != self.pilotVersion:
          return S_ERROR( 'Pilot version does not match the production version %s:%s' % \
                         ( resourceDict['DIRACVersion'], self.pilotVersion ) )

    # Get common site mask and check the agent site
    result = jobDB.getSiteMask( siteState = 'Active' )
    if result['OK']:
      maskList = result['Value']
    else:
      return S_ERROR( 'Internal error: can not get site mask' )

    if not 'Site' in resourceDict:
      return S_ERROR( 'Missing Site Name in Resource JDL' )

    siteName = resourceDict['Site']
    if resourceDict['Site'] not in maskList:
      if 'GridCE' in resourceDict:
        del resourceDict['Site']
      else:
        return S_ERROR( 'Site not in mask and GridCE not specified' )

    resourceDict['Setup'] = self.serviceInfoDict['clientSetup']

    if DEBUG:
      print "Resource description:"
      for k, v in resourceDict.items():
        print k.rjust( 20 ), v

    # Check if Job Limits are imposed onto the site
    extraConditions = {}
    if self.siteJobLimits:
      result = self.getExtraConditions( siteName )
      if result['OK']:
        extraConditions = result['Value']
    if extraConditions:
      gLogger.info( 'Job Limits for site %s are: %s' % ( siteName, str( extraConditions ) ) )

    result = taskQueueDB.matchAndGetJob( resourceDict, extraConditions = extraConditions )

    if DEBUG:
      print result

    if not result['OK']:
      return result
    result = result['Value']
    if not result['matchFound']:
      return S_ERROR( 'No match found' )

    jobID = result['jobId']
    resAtt = jobDB.getJobAttributes( jobID, ['OwnerDN', 'OwnerGroup', 'Status'] )
    if not resAtt['OK']:
      return S_ERROR( 'Could not retrieve job attributes' )
    if not resAtt['Value']:
      return S_ERROR( 'No attributes returned for job' )
    if not resAtt['Value']['Status'] == 'Waiting':
      gLogger.error( 'Job %s matched by the TQ is not in Waiting state' % str( jobID ) )
      result = taskQueueDB.deleteJob( jobID )

    result = jobDB.setJobStatus( jobID, status = 'Matched', minor = 'Assigned' )
    result = jobLoggingDB.addLoggingRecord( jobID,
                                           status = 'Matched',
                                           minor = 'Assigned',
                                           source = 'Matcher' )

    result = jobDB.getJobJDL( jobID )
    if not result['OK']:
      return S_ERROR( 'Failed to get the job JDL' )

    resultDict = {}
    resultDict['JDL'] = result['Value']
    resultDict['JobID'] = jobID

    matchTime = time.time() - startTime
    gLogger.info( "Match time: [%s]" % str( matchTime ) )
    gMonitor.addMark( "matchTime", matchTime )

    # Get some extra stuff into the response returned
    resOpt = jobDB.getJobOptParameters( jobID )
    if resOpt['OK']:
      for key, value in resOpt['Value'].items():
        resultDict[key] = value
    resAtt = jobDB.getJobAttributes( jobID, ['OwnerDN', 'OwnerGroup'] )
    if not resAtt['OK']:
      return S_ERROR( 'Could not retrieve job attributes' )
    if not resAtt['Value']:
      return S_ERROR( 'No attributes returned for job' )

    resultDict['DN'] = resAtt['Value']['OwnerDN']
    resultDict['Group'] = resAtt['Value']['OwnerGroup']
    return S_OK( resultDict )
Exemplo n.º 52
0
  def export_submitJob(self, jobDesc):
    """ Submit a job to DIRAC WMS.
        The job can be a single job, or a parametric job.
        If it is a parametric job, then the parameters will need to be unpacked.

        :param str jobDesc: job description JDL (of a single or parametric job)
        :return: S_OK/S_ERROR, a list of newly created job IDs in case of S_OK.
    """

    if self.peerUsesLimitedProxy:
      return S_ERROR(EWMSSUBM, "Can't submit using a limited proxy")

    # Check job submission permission
    result = self.jobPolicy.getJobPolicy()
    if not result['OK']:
      return S_ERROR(EWMSSUBM, 'Failed to get job policies')
    policyDict = result['Value']
    if not policyDict[RIGHT_SUBMIT]:
      return S_ERROR(EWMSSUBM, 'Job submission not authorized')

    # jobDesc is JDL for now
    jobDesc = jobDesc.strip()
    if jobDesc[0] != "[":
      jobDesc = "[%s" % jobDesc
    if jobDesc[-1] != "]":
      jobDesc = "%s]" % jobDesc

    # Check if the job is a parametric one
    jobClassAd = ClassAd(jobDesc)
    result = getParameterVectorLength(jobClassAd)
    if not result['OK']:
      return result
    nJobs = result['Value']
    parametricJob = False
    if nJobs > 0:
      # if we are here, then jobDesc was the description of a parametric job. So we start unpacking
      parametricJob = True
      if nJobs > self.maxParametricJobs:
        return S_ERROR(EWMSJDL, "Number of parametric jobs exceeds the limit of %d" % self.maxParametricJobs)
      result = generateParametricJobs(jobClassAd)
      if not result['OK']:
        return result
      jobDescList = result['Value']
    else:
      # if we are here, then jobDesc was the description of a single job.
      jobDescList = [jobDesc]

    jobIDList = []

    if parametricJob:
      initialStatus = 'Submitting'
      initialMinorStatus = 'Bulk transaction confirmation'
    else:
      initialStatus = 'Received'
      initialMinorStatus = 'Job accepted'

    for jobDescription in jobDescList:  # jobDescList because there might be a list generated by a parametric job
      result = gJobDB.insertNewJobIntoDB(jobDescription,
                                         self.owner,
                                         self.ownerDN,
                                         self.ownerGroup,
                                         self.diracSetup,
                                         initialStatus=initialStatus,
                                         initialMinorStatus=initialMinorStatus)
      if not result['OK']:
        return result

      jobID = result['JobID']
      gLogger.info('Job %s added to the JobDB for %s/%s' % (jobID, self.ownerDN, self.ownerGroup))

      gJobLoggingDB.addLoggingRecord(jobID, result['Status'], result['MinorStatus'], source='JobManager')

      jobIDList.append(jobID)

    # Set persistency flag
    retVal = gProxyManager.getUserPersistence(self.ownerDN, self.ownerGroup)
    if 'Value' not in retVal or not retVal['Value']:
      gProxyManager.setPersistency(self.ownerDN, self.ownerGroup, True)

    if parametricJob:
      result = S_OK(jobIDList)
    else:
      result = S_OK(jobIDList[0])

    result['JobID'] = result['Value']
    result['requireProxyUpload'] = self.__checkIfProxyUploadIsRequired()
    return result
Exemplo n.º 53
0
Arquivo: Job.py Projeto: petricm/DIRAC
    def _toJDL(
        self, xmlFile="", jobDescriptionObject=None
    ):  # messy but need to account for xml file being in /tmp/guid dir
        """Creates a JDL representation of itself as a Job.
    """
        # Check if we have to do old bootstrap...
        classadJob = ClassAd("[]")

        paramsDict = {}
        params = self.workflow.parameters  # ParameterCollection object

        paramList = params
        for param in paramList:
            paramsDict[param.getName()] = {"type": param.getType(), "value": param.getValue()}

        arguments = []
        scriptname = "jobDescription.xml"

        if jobDescriptionObject is None:
            # if we are here it's because there's a real file, on disk, that is named 'jobDescription.xml'
            if self.script:
                if os.path.exists(self.script):
                    scriptname = os.path.abspath(self.script)
                    self.log.verbose("Found script name %s" % scriptname)
                else:
                    self.log.error("File not found", self.script)
            else:
                if xmlFile:
                    self.log.verbose("Found XML File %s" % xmlFile)
                    scriptname = xmlFile
            self.addToInputSandbox.append(scriptname)

        elif isinstance(jobDescriptionObject, StringIO.StringIO):
            self.log.verbose("jobDescription is passed in as a StringIO object")

        else:
            self.log.error("Where's the job description?")

        arguments.append(os.path.basename(scriptname))
        if paramsDict.has_key("LogLevel"):
            if paramsDict["LogLevel"]["value"]:
                arguments.append("-o LogLevel=%s" % (paramsDict["LogLevel"]["value"]))
            else:
                self.log.warn("Job LogLevel defined with null value")
        if paramsDict.has_key("DIRACSetup"):
            if paramsDict["DIRACSetup"]["value"]:
                arguments.append("-o DIRAC/Setup=%s" % (paramsDict["DIRACSetup"]["value"]))
            else:
                self.log.warn("Job DIRACSetup defined with null value")
        if paramsDict.has_key("JobMode"):
            if paramsDict["JobMode"]["value"]:
                arguments.append("-o JobMode=%s" % (paramsDict["JobMode"]["value"]))
            else:
                self.log.warn("Job Mode defined with null value")
        if paramsDict.has_key("JobConfigArgs"):
            if paramsDict["JobConfigArgs"]["value"]:
                arguments.append("%s" % (paramsDict["JobConfigArgs"]["value"]))
            else:
                self.log.warn("JobConfigArgs defined with null value")

        classadJob.insertAttributeString("Executable", self.executable)
        self.addToOutputSandbox.append(self.stderr)
        self.addToOutputSandbox.append(self.stdout)

        # Extract i/o sandbox parameters from steps and any input data parameters
        # to do when introducing step-level api...

        # To add any additional files to input and output sandboxes
        if self.addToInputSandbox:
            extraFiles = ";".join(self.addToInputSandbox)
            if paramsDict.has_key("InputSandbox"):
                currentFiles = paramsDict["InputSandbox"]["value"]
                finalInputSandbox = currentFiles + ";" + extraFiles
                uniqueInputSandbox = uniqueElements(finalInputSandbox.split(";"))
                paramsDict["InputSandbox"]["value"] = ";".join(uniqueInputSandbox)
                self.log.verbose("Final unique Input Sandbox %s" % (";".join(uniqueInputSandbox)))
            else:
                paramsDict["InputSandbox"] = {}
                paramsDict["InputSandbox"]["value"] = extraFiles
                paramsDict["InputSandbox"]["type"] = "JDL"

        if self.addToOutputSandbox:
            extraFiles = ";".join(self.addToOutputSandbox)
            if paramsDict.has_key("OutputSandbox"):
                currentFiles = paramsDict["OutputSandbox"]["value"]
                finalOutputSandbox = currentFiles + ";" + extraFiles
                uniqueOutputSandbox = uniqueElements(finalOutputSandbox.split(";"))
                paramsDict["OutputSandbox"]["value"] = ";".join(uniqueOutputSandbox)
                self.log.verbose("Final unique Output Sandbox %s" % (";".join(uniqueOutputSandbox)))
            else:
                paramsDict["OutputSandbox"] = {}
                paramsDict["OutputSandbox"]["value"] = extraFiles
                paramsDict["OutputSandbox"]["type"] = "JDL"

        if self.addToInputData:
            extraFiles = ";".join(self.addToInputData)
            if paramsDict.has_key("InputData"):
                currentFiles = paramsDict["InputData"]["value"]
                finalInputData = extraFiles
                if currentFiles:
                    finalInputData = currentFiles + ";" + extraFiles
                uniqueInputData = uniqueElements(finalInputData.split(";"))
                paramsDict["InputData"]["value"] = ";".join(uniqueInputData)
                self.log.verbose("Final unique Input Data %s" % (";".join(uniqueInputData)))
            else:
                paramsDict["InputData"] = {}
                paramsDict["InputData"]["value"] = extraFiles
                paramsDict["InputData"]["type"] = "JDL"

        # Handle here the Parametric values
        if self.parametric:
            for pType in ["InputData", "InputSandbox"]:
                if self.parametric.has_key(pType):
                    if paramsDict.has_key(pType) and paramsDict[pType]["value"]:
                        pData = self.parametric[pType]
                        # List of lists case
                        currentFiles = paramsDict[pType]["value"].split(";")
                        tmpList = []
                        if type(pData[0]) == list:
                            for pElement in pData:
                                tmpList.append(currentFiles + pElement)
                        else:
                            for pElement in pData:
                                tmpList.append(currentFiles + [pElement])
                        self.parametric[pType] = tmpList

                    paramsDict[pType] = {}
                    paramsDict[pType]["value"] = "%s"
                    paramsDict[pType]["type"] = "JDL"
                    self.parametric["files"] = self.parametric[pType]
                    arguments.append(" -p Parametric" + pType + "=%s")
                    break

            if self.parametric.has_key("files"):
                paramsDict["Parameters"] = {}
                paramsDict["Parameters"]["value"] = self.parametric["files"]
                paramsDict["Parameters"]["type"] = "JDL"
            if self.parametric.has_key("GenericParameters"):
                paramsDict["Parameters"] = {}
                paramsDict["Parameters"]["value"] = self.parametric["GenericParameters"]
                paramsDict["Parameters"]["type"] = "JDL"
                arguments.append(" -p ParametricParameters=%s")
        ##This needs to be put here so that the InputData and/or InputSandbox parameters for parametric jobs are processed
        classadJob.insertAttributeString("Arguments", " ".join(arguments))

        # Add any JDL parameters to classad obeying lists with ';' rule
        for name, props in paramsDict.items():
            ptype = props["type"]
            value = props["value"]
            if name.lower() == "requirements" and ptype == "JDL":
                self.log.verbose("Found existing requirements: %s" % (value))

            if re.search("^JDL", ptype):
                if type(value) == list:
                    if type(value[0]) == list:
                        classadJob.insertAttributeVectorStringList(name, value)
                    else:
                        classadJob.insertAttributeVectorString(name, value)
                elif value == "%s":
                    classadJob.insertAttributeInt(name, value)
                elif not re.search(";", value) or name == "GridRequirements":  # not a nice fix...
                    classadJob.insertAttributeString(name, value)
                else:
                    classadJob.insertAttributeVectorString(name, value.split(";"))

        for fToBeRemoved in [scriptname, self.stdout, self.stderr]:
            try:
                self.addToInputSandbox.remove(fToBeRemoved)
            except ValueError:
                pass

        jdl = classadJob.asJDL()
        start = jdl.find("[")
        end = jdl.rfind("]")
        return jdl[(start + 1) : (end - 1)]
Exemplo n.º 54
0
  def __processResourceDescription( self, resourceDescription ):
    # Check and form the resource description dictionary
    resourceDict = {}
    if type( resourceDescription ) in StringTypes:
      classAdAgent = ClassAd( resourceDescription )
      if not classAdAgent.isOK():
        return S_ERROR( 'Illegal Resource JDL' )
      gLogger.verbose( classAdAgent.asJDL() )

      for name in gTaskQueueDB.getSingleValueTQDefFields():
        if classAdAgent.lookupAttribute( name ):
          if name == 'CPUTime':
            resourceDict[name] = classAdAgent.getAttributeInt( name )
          else:
            resourceDict[name] = classAdAgent.getAttributeString( name )

      for name in gTaskQueueDB.getMultiValueMatchFields():
        if classAdAgent.lookupAttribute( name ):
          resourceDict[name] = classAdAgent.getAttributeString( name )

      # Check if a JobID is requested
      if classAdAgent.lookupAttribute( 'JobID' ):
        resourceDict['JobID'] = classAdAgent.getAttributeInt( 'JobID' )

      if classAdAgent.lookupAttribute( 'DIRACVersion' ):
        resourceDict['DIRACVersion'] = classAdAgent.getAttributeString( 'DIRACVersion' )

      if classAdAgent.lookupAttribute( 'VirtualOrganization' ):
        resourceDict['VirtualOrganization'] = classAdAgent.getAttributeString( 'VirtualOrganization' )

    else:
      for name in gTaskQueueDB.getSingleValueTQDefFields():
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      for name in gTaskQueueDB.getMultiValueMatchFields():
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      if resourceDescription.has_key( 'JobID' ):
        resourceDict['JobID'] = resourceDescription['JobID']

      if resourceDescription.has_key( 'DIRACVersion' ):
        resourceDict['DIRACVersion'] = resourceDescription['DIRACVersion']

      if resourceDescription.has_key( 'VirtualOrganization' ):
        resourceDict['VirtualOrganization'] = resourceDescription['VirtualOrganization']

    return resourceDict
Exemplo n.º 55
0
    def export_submitJob(self, jobDesc):
        """ Submit a single job to DIRAC WMS
    """

        if self.peerUsesLimitedProxy:
            return S_ERROR("Can't submit using a limited proxy! (bad boy!)")

        # Check job submission permission
        result = self.jobPolicy.getJobPolicy()
        if not result['OK']:
            return S_ERROR('Failed to get job policies')
        policyDict = result['Value']
        if not policyDict[RIGHT_SUBMIT]:
            return S_ERROR('Job submission not authorized')

        #jobDesc is JDL for now
        jobDesc = jobDesc.strip()
        if jobDesc[0] != "[":
            jobDesc = "[%s" % jobDesc
        if jobDesc[-1] != "]":
            jobDesc = "%s]" % jobDesc

        # Check if the job is a parametric one
        jobClassAd = ClassAd(jobDesc)
        nParameters = getNumberOfParameters(jobClassAd)
        parametricJob = False
        if nParameters > 0:
            parametricJob = True
            result = generateParametricJobs(jobClassAd)
            if not result['OK']:
                return result
            jobDescList = result['Value']
        else:
            jobDescList = [jobDesc]

        jobIDList = []
        for jobDescription in jobDescList:
            result = gJobDB.insertNewJobIntoDB(jobDescription, self.owner,
                                               self.ownerDN, self.ownerGroup,
                                               self.diracSetup)
            if not result['OK']:
                return result

            jobID = result['JobID']
            gLogger.info('Job %s added to the JobDB for %s/%s' %
                         (jobID, self.ownerDN, self.ownerGroup))

            gJobLoggingDB.addLoggingRecord(jobID,
                                           result['Status'],
                                           result['MinorStatus'],
                                           source='JobManager')

            jobIDList.append(jobID)

        #Set persistency flag
        retVal = gProxyManager.getUserPersistence(self.ownerDN,
                                                  self.ownerGroup)
        if 'Value' not in retVal or not retVal['Value']:
            gProxyManager.setPersistency(self.ownerDN, self.ownerGroup, True)

        if parametricJob:
            result = S_OK(jobIDList)
        else:
            result = S_OK(jobIDList[0])

        result['JobID'] = result['Value']
        result['requireProxyUpload'] = self.__checkIfProxyUploadIsRequired()
        self.__sendJobsToOptimizationMind(jobIDList)
        return result
Exemplo n.º 56
0
Arquivo: Job.py Projeto: ahaupt/DIRAC
  def _toJDL( self, xmlFile = '', jobDescriptionObject = None ):  # messy but need to account for xml file being in /tmp/guid dir
    """Creates a JDL representation of itself as a Job.
    """
    #Check if we have to do old bootstrap...
    classadJob = ClassAd( '[]' )

    paramsDict = {}
    params = self.workflow.parameters # ParameterCollection object

    paramList = params
    for param in paramList:
      paramsDict[param.getName()] = {'type':param.getType(), 'value':param.getValue()}

    arguments = []
    scriptName = 'jobDescription.xml'

    if jobDescriptionObject is None:
      # if we are here it's because there's a real file, on disk, that is named 'jobDescription.xml'
      if self.script:
        if os.path.exists( self.script ):
          scriptName = os.path.abspath( self.script )
          self.log.verbose( 'Found script name %s' % scriptName )
        else:
          self.log.error( "File not found", self.script )
      else:
        if xmlFile:
          self.log.verbose( 'Found XML File %s' % xmlFile )
          scriptName = xmlFile
      self.addToInputSandbox.append( scriptName )

    elif isinstance( jobDescriptionObject, StringIO.StringIO ):
      self.log.verbose( "jobDescription is passed in as a StringIO object" )

    else:
      self.log.error( "Where's the job description?" )

    arguments.append( os.path.basename( scriptName ) )
    if paramsDict.has_key( 'LogLevel' ):
      if paramsDict['LogLevel']['value']:
        arguments.append( '-o LogLevel=%s' % ( paramsDict['LogLevel']['value'] ) )
      else:
        self.log.warn( 'Job LogLevel defined with null value' )
    if paramsDict.has_key( 'DIRACSetup' ):
      if paramsDict['DIRACSetup']['value']:
        arguments.append( '-o DIRAC/Setup=%s' % ( paramsDict['DIRACSetup']['value'] ) )
      else:
        self.log.warn( 'Job DIRACSetup defined with null value' )
    if paramsDict.has_key( 'JobMode' ):
      if paramsDict['JobMode']['value']:
        arguments.append( '-o JobMode=%s' % ( paramsDict['JobMode']['value'] ) )
      else:
        self.log.warn( 'Job Mode defined with null value' )
    if paramsDict.has_key( 'JobConfigArgs' ):
      if paramsDict['JobConfigArgs']['value']:
        arguments.append( '%s' % ( paramsDict['JobConfigArgs']['value'] ) )
      else:
        self.log.warn( 'JobConfigArgs defined with null value' )
    if self.parametricWFArguments:
      for name, value in self.parametricWFArguments.items():
        arguments.append( "-p %s='%s'" % ( name, value ) )

    classadJob.insertAttributeString( 'Executable', self.executable )
    self.addToOutputSandbox.append( self.stderr )
    self.addToOutputSandbox.append( self.stdout )

    #Extract i/o sandbox parameters from steps and any input data parameters
    #to do when introducing step-level api...

    #To add any additional files to input and output sandboxes
    if self.addToInputSandbox:
      extraFiles = ';'.join( self.addToInputSandbox )
      if paramsDict.has_key( 'InputSandbox' ):
        currentFiles = paramsDict['InputSandbox']['value']
        finalInputSandbox = currentFiles + ';' + extraFiles
        uniqueInputSandbox = uniqueElements( finalInputSandbox.split( ';' ) )
        paramsDict['InputSandbox']['value'] = ';'.join( uniqueInputSandbox )
        self.log.verbose( 'Final unique Input Sandbox %s' % ( ';'.join( uniqueInputSandbox ) ) )
      else:
        paramsDict['InputSandbox'] = {}
        paramsDict['InputSandbox']['value'] = extraFiles
        paramsDict['InputSandbox']['type'] = 'JDL'

    if self.addToOutputSandbox:
      extraFiles = ';'.join( self.addToOutputSandbox )
      if paramsDict.has_key( 'OutputSandbox' ):
        currentFiles = paramsDict['OutputSandbox']['value']
        finalOutputSandbox = currentFiles + ';' + extraFiles
        uniqueOutputSandbox = uniqueElements( finalOutputSandbox.split( ';' ) )
        paramsDict['OutputSandbox']['value'] = ';'.join( uniqueOutputSandbox )
        self.log.verbose( 'Final unique Output Sandbox %s' % ( ';'.join( uniqueOutputSandbox ) ) )
      else:
        paramsDict['OutputSandbox'] = {}
        paramsDict['OutputSandbox']['value'] = extraFiles
        paramsDict['OutputSandbox']['type'] = 'JDL'

    if self.addToInputData:
      extraFiles = ';'.join( self.addToInputData )
      if paramsDict.has_key( 'InputData' ):
        currentFiles = paramsDict['InputData']['value']
        finalInputData = extraFiles
        if currentFiles:
          finalInputData = currentFiles + ';' + extraFiles
        uniqueInputData = uniqueElements( finalInputData.split( ';' ) )
        paramsDict['InputData']['value'] = ';'.join( uniqueInputData )
        self.log.verbose( 'Final unique Input Data %s' % ( ';'.join( uniqueInputData ) ) )
      else:
        paramsDict['InputData'] = {}
        paramsDict['InputData']['value'] = extraFiles
        paramsDict['InputData']['type'] = 'JDL'

    # Handle parameter sequences
    if self.numberOfParameters > 0:
      paramsDict, arguments = self._handleParameterSequences( paramsDict, arguments )

    classadJob.insertAttributeString( 'Arguments', ' '.join( arguments ) )

    #Add any JDL parameters to classad obeying lists with ';' rule
    for name, props in paramsDict.iteritems():
      ptype = props['type']
      value = props['value']
      if isinstance( value, basestring) and re.search( ';', value ):
        value = value.split( ';' )
      if name.lower() == 'requirements' and ptype == 'JDL':
        self.log.verbose( 'Found existing requirements: %s' % ( value ) )

      if re.search( '^JDL', ptype ):
        if isinstance( value, list ):
          if isinstance( value[0], list ):
            classadJob.insertAttributeVectorStringList( name, value )
          else:
            classadJob.insertAttributeVectorInt( name, value )
        elif isinstance( value, basestring ) and value:
          classadJob.insertAttributeInt( name, value )
        elif isinstance( value, ( int, long, float ) ):
          classadJob.insertAttributeInt( name, value )

    if self.numberOfParameters > 0:
      classadJob.insertAttributeInt( 'Parameters', self.numberOfParameters )

    for fToBeRemoved in [scriptName, self.stdout, self.stderr]:
      try:
        self.addToInputSandbox.remove( fToBeRemoved )
      except ValueError:
        pass

    jdl = classadJob.asJDL()
    start = jdl.find( '[' )
    end = jdl.rfind( ']' )
    return jdl[( start + 1 ):( end - 1 )]
Exemplo n.º 57
0
  def export_submitJob( self, jobDesc ):
    """ Submit a single job to DIRAC WMS
    """

    if self.peerUsesLimitedProxy:
      return S_ERROR( "Can't submit using a limited proxy! (bad boy!)" )

    # Check job submission permission
    result = self.jobPolicy.getJobPolicy()
    if not result['OK']:
      return S_ERROR( 'Failed to get job policies' )
    policyDict = result['Value']
    if not policyDict[ RIGHT_SUBMIT ]:
      return S_ERROR('Job submission not authorized')

    #jobDesc is JDL for now
    jobDesc = jobDesc.strip()
    if jobDesc[0] != "[":
      jobDesc = "[%s" % jobDesc
    if jobDesc[-1] != "]":
      jobDesc = "%s]" % jobDesc

    # Check if the job is a parameteric one
    jobClassAd = ClassAd(jobDesc)
    parametricJob = False
    if jobClassAd.lookupAttribute('Parameters'):
      parametricJob = True
      if jobClassAd.isAttributeList('Parameters'):
        parameterList = jobClassAd.getListFromExpression('Parameters')
      else:
        nParameters = jobClassAd.getAttributeInt('Parameters')
        if not nParameters:
          value = jobClassAd.get_expression('Parameters')
          return S_ERROR('Illegal value for Parameters JDL field: %s' % value)
        if jobClassAd.lookupAttribute('ParameterStart'):
          pStart = jobClassAd.getAttributeInt('ParameterStart')
        else:
          return S_ERROR('Missing JDL field ParameterStart')
        if jobClassAd.lookupAttribute('ParameterStep'):  
          pStep = jobClassAd.getAttributeInt('ParameterStep')
          if not pStep:
            value = jobClassAd.get_expression('ParameterStep')
            return S_ERROR('Illegal value for ParameterStep JDL field: %s' % value)
        else:
          return S_ERROR('Missing JDL field ParameterStep')  
        parameterList = list( range(pStart,pStart+pStep*nParameters,pStep) )

      if len(parameterList) > MAX_PARAMETRIC_JOBS:
        return S_ERROR('The number of parametric jobs exceeded the limit of %d' % MAX_PARAMETRIC_JOBS  )  
        
      jobDescList = []
      for p in parameterList:
        jobDescList.append( jobDesc.replace('%s',str(p)) )
    else:
      jobDescList = [ jobDesc ]     

    jobIDList = []
    for jobDescription in jobDescList:
      result = gJobDB.insertNewJobIntoDB( jobDescription, self.owner, self.ownerDN, self.ownerGroup, self.diracSetup )
      if not result['OK']:
        return result

      jobID = result['JobID']
      gLogger.info( 'Job %s added to the JobDB for %s/%s' % ( jobID, self.ownerDN, self.ownerGroup ) )

      gJobLoggingDB.addLoggingRecord( jobID, result['Status'], result['MinorStatus'], source = 'JobManager' )

      jobIDList.append(jobID)

    #Set persistency flag
    retVal = gProxyManager.getUserPersistence( self.ownerDN, self.ownerGroup )
    if 'Value' not in retVal or not retVal[ 'Value' ]:
      gProxyManager.setPersistency( self.ownerDN, self.ownerGroup, True )

    if parametricJob:
      result = S_OK(jobIDList)
    else:
      result = S_OK(jobIDList[0])

    result['JobID'] = result['Value']
    result[ 'requireProxyUpload' ] = self.__checkIfProxyUploadIsRequired()
    return result
Exemplo n.º 58
0
    def submitJob(self, jdl, jobDescriptionObject=None):
        """Submit one job specified by its JDL to WMS.

        The JDL may actually be the desciption of a parametric job,
        resulting in multiple DIRAC jobs submitted to the DIRAC WMS
        """

        if os.path.exists(jdl):
            with open(jdl, "r") as fic:
                jdlString = fic.read()
        else:
            # If file JDL does not exist, assume that the JDL is passed as a string
            jdlString = jdl

        jdlString = jdlString.strip()

        gLogger.debug("Submitting JDL", jdlString)
        # Strip of comments in the jdl string
        newJdlList = []
        for line in jdlString.split("\n"):
            if not line.strip().startswith("#"):
                newJdlList.append(line)
        jdlString = "\n".join(newJdlList)

        # Check the validity of the input JDL
        if jdlString.find("[") != 0:
            jdlString = "[%s]" % jdlString
        classAdJob = ClassAd(jdlString)
        if not classAdJob.isOK():
            return S_ERROR(EWMSJDL, "Invalid job JDL")

        # Check the size and the contents of the input sandbox
        result = self.__uploadInputSandbox(classAdJob, jobDescriptionObject)
        if not result["OK"]:
            return result

        # Submit the job now and get the new job ID
        result = getParameterVectorLength(classAdJob)
        if not result["OK"]:
            return result
        nJobs = result["Value"]
        result = self.jobManager.submitJob(classAdJob.asJDL())

        if nJobs:
            gLogger.debug("Applying transactional job submission")
            # The server applies transactional bulk submission, we should confirm the jobs
            if result["OK"]:
                jobIDList = result["Value"]
                if len(jobIDList) == nJobs:
                    # Confirm the submitted jobs
                    confirmed = False
                    for _attempt in range(3):
                        result = self.jobManager.confirmBulkSubmission(jobIDList)
                        if result["OK"]:
                            confirmed = True
                            break
                        time.sleep(1)
                    if not confirmed:
                        # The bulk submission failed, try to remove the created jobs
                        resultDelete = self.jobManager.removeJob(jobIDList)
                        error = "Job submission failed to confirm bulk transaction"
                        if not resultDelete["OK"]:
                            error += "; removal of created jobs failed"
                        return S_ERROR(EWMSSUBM, error)
                else:
                    return S_ERROR(EWMSSUBM, "The number of submitted jobs does not match job description")

        if result.get("requireProxyUpload"):
            gLogger.warn("Need to upload the proxy")

        return result
Exemplo n.º 59
0
  def _processResourceDescription( self, resourceDescription ):
    """ Check and form the resource description dictionary

        resourceDescription is a ceDict coming from a JobAgent, for example.
    """

    resourceDict = {}
    if isinstance( resourceDescription, basestring ):
      classAdAgent = ClassAd( resourceDescription )
      if not classAdAgent.isOK():
        raise ValueError( 'Illegal Resource JDL' )
      self.log.verbose( classAdAgent.asJDL() )

      for name in singleValueDefFields:
        if classAdAgent.lookupAttribute( name ):
          if name == 'CPUTime':
            resourceDict[name] = classAdAgent.getAttributeInt( name )
          else:
            resourceDict[name] = classAdAgent.getAttributeString( name )

      for name in multiValueMatchFields:
        if classAdAgent.lookupAttribute( name ):
          if name == 'SubmitPool':
            resourceDict[name] = classAdAgent.getListFromExpression( name )
          else:
            resourceDict[name] = classAdAgent.getAttributeString( name )

      # Check if a JobID is requested
      if classAdAgent.lookupAttribute( 'JobID' ):
        resourceDict['JobID'] = classAdAgent.getAttributeInt( 'JobID' )

      for k in ( 'DIRACVersion', 'ReleaseVersion', 'ReleaseProject', 'VirtualOrganization' ):
        if classAdAgent.lookupAttribute( k ):
          resourceDict[ k ] = classAdAgent.getAttributeString( k )

    else:
      for name in singleValueDefFields:
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      for name in multiValueMatchFields:
        if resourceDescription.has_key( name ):
          resourceDict[name] = resourceDescription[name]

      if resourceDescription.has_key( 'JobID' ):
        resourceDict['JobID'] = resourceDescription['JobID']

      for k in ( 'DIRACVersion', 'ReleaseVersion', 'ReleaseProject', 'VirtualOrganization',
                 'PilotReference', 'PilotBenchmark', 'PilotInfoReportedFlag' ):
        if k in resourceDescription:
          resourceDict[ k ] = resourceDescription[ k ]

    return resourceDict
Exemplo n.º 60
0
    def selectJob(self, resourceDescription):
        """ Main job selection function to find the highest priority job
        matching the resource capacity
    """

        startTime = time.time()

        # Check and form the resource description dictionary
        resourceDict = {}
        if type(resourceDescription) in StringTypes:
            classAdAgent = ClassAd(resourceDescription)
            if not classAdAgent.isOK():
                return S_ERROR("Illegal Resource JDL")
            gLogger.verbose(classAdAgent.asJDL())

            for name in taskQueueDB.getSingleValueTQDefFields():
                if classAdAgent.lookupAttribute(name):
                    if name == "CPUTime":
                        resourceDict[name] = classAdAgent.getAttributeInt(name)
                    else:
                        resourceDict[name] = classAdAgent.getAttributeString(name)

            for name in taskQueueDB.getMultiValueMatchFields():
                if classAdAgent.lookupAttribute(name):
                    resourceDict[name] = classAdAgent.getAttributeString(name)

            # Check if a JobID is requested
            if classAdAgent.lookupAttribute("JobID"):
                resourceDict["JobID"] = classAdAgent.getAttributeInt("JobID")

            if classAdAgent.lookupAttribute("DIRACVersion"):
                resourceDict["DIRACVersion"] = classAdAgent.getAttributeString("DIRACVersion")

        else:
            for name in taskQueueDB.getSingleValueTQDefFields():
                if resourceDescription.has_key(name):
                    resourceDict[name] = resourceDescription[name]

            for name in taskQueueDB.getMultiValueMatchFields():
                if resourceDescription.has_key(name):
                    resourceDict[name] = resourceDescription[name]

            if resourceDescription.has_key("JobID"):
                resourceDict["JobID"] = resourceDescription["JobID"]
            if resourceDescription.has_key("DIRACVersion"):
                resourceDict["DIRACVersion"] = resourceDescription["DIRACVersion"]

        # Check the pilot DIRAC version
        if self.checkPilotVersion:
            if "DIRACVersion" in resourceDict:
                if self.pilotVersion and resourceDict["DIRACVersion"] != self.pilotVersion:
                    return S_ERROR(
                        "Pilot version does not match the production version %s:%s"
                        % (resourceDict["DIRACVersion"], self.pilotVersion)
                    )

        # Get common site mask and check the agent site
        result = jobDB.getSiteMask(siteState="Active")
        if result["OK"]:
            maskList = result["Value"]
        else:
            return S_ERROR("Internal error: can not get site mask")

        if not "Site" in resourceDict:
            return S_ERROR("Missing Site Name in Resource JDL")

        siteName = resourceDict["Site"]
        if resourceDict["Site"] not in maskList:
            if "GridCE" in resourceDict:
                del resourceDict["Site"]
            else:
                return S_ERROR("Site not in mask and GridCE not specified")

        resourceDict["Setup"] = self.serviceInfoDict["clientSetup"]

        if DEBUG:
            print "Resource description:"
            for k, v in resourceDict.items():
                print k.rjust(20), v

        # Check if Job Limits are imposed onto the site
        extraConditions = {}
        if self.siteJobLimits:
            result = self.getExtraConditions(siteName)
            if result["OK"]:
                extraConditions = result["Value"]
        if extraConditions:
            gLogger.info("Job Limits for site %s are: %s" % (siteName, str(extraConditions)))

        result = taskQueueDB.matchAndGetJob(resourceDict, extraConditions=extraConditions)

        if DEBUG:
            print result

        if not result["OK"]:
            return result
        result = result["Value"]
        if not result["matchFound"]:
            return S_ERROR("No match found")

        jobID = result["jobId"]
        resAtt = jobDB.getJobAttributes(jobID, ["OwnerDN", "OwnerGroup", "Status"])
        if not resAtt["OK"]:
            return S_ERROR("Could not retrieve job attributes")
        if not resAtt["Value"]:
            return S_ERROR("No attributes returned for job")
        if not resAtt["Value"]["Status"] == "Waiting":
            gLogger.error("Job %s matched by the TQ is not in Waiting state" % str(jobID))
            result = taskQueueDB.deleteJob(jobID)

        result = jobDB.setJobStatus(jobID, status="Matched", minor="Assigned")
        result = jobLoggingDB.addLoggingRecord(jobID, status="Matched", minor="Assigned", source="Matcher")

        result = jobDB.getJobJDL(jobID)
        if not result["OK"]:
            return S_ERROR("Failed to get the job JDL")

        resultDict = {}
        resultDict["JDL"] = result["Value"]
        resultDict["JobID"] = jobID

        matchTime = time.time() - startTime
        gLogger.info("Match time: [%s]" % str(matchTime))
        gMonitor.addMark("matchTime", matchTime)

        # Get some extra stuff into the response returned
        resOpt = jobDB.getJobOptParameters(jobID)
        if resOpt["OK"]:
            for key, value in resOpt["Value"].items():
                resultDict[key] = value
        resAtt = jobDB.getJobAttributes(jobID, ["OwnerDN", "OwnerGroup"])
        if not resAtt["OK"]:
            return S_ERROR("Could not retrieve job attributes")
        if not resAtt["Value"]:
            return S_ERROR("No attributes returned for job")

        resultDict["DN"] = resAtt["Value"]["OwnerDN"]
        resultDict["Group"] = resAtt["Value"]["OwnerGroup"]
        return S_OK(resultDict)