def _getJDLData(self, task, jobNumList, queryArguments):
     jdlData = self._getBaseJDLData(task, queryArguments)
     jdlData.extend([
         'Executable              = %s' %
         self.parentPool._getSandboxFilesIn(task)[0][1],
     ])
     try:
         for authFile in parentPool.proxy.getAuthFiles():
             jdlData.extend([
                 'x509userproxy           = %s' % authFile,
                 'use_x509userproxy       = True',
             ])
     except Exception:
         clear_current_exception()
     for jobNum in jobNumList:
         jdlData.extend(self._getRequirementJdlData(task, jobNum))
         jobStageDir = self.getStagingDir(
             htcID=HTCJobID(gcJobNum=jobNum, gcTaskID=task.taskID))
         jdlData.extend([
             '+GcJobNum               = "%s"' % jobNum,
             'arguments               = %s' % jobNum,
             'initialdir              = %s' % jobStageDir,
             'Output                  = %s' %
             os.path.join(jobStageDir, 'gc.stdout'),
             'Error                   = %s' %
             os.path.join(jobStageDir, 'gc.stderr'),
             # HACK: ignore executable (In[0]), stdout (Out[0]) and stderr (Out[1])
             'transfer_input_files    = %s' % ','.join([
                 src for descr, src, trg in
                 self.parentPool._getSandboxFilesIn(task)[1:]
             ] + [self.parentPool.getJobCfgPath(jobNum)[0]]),
             'transfer_output_files   = %s' % ','.join([
                 src for descr, src, trg in
                 self.parentPool._getSandboxFilesOut(task)[2:]
             ]),
             '+rawID                   = "%s"' %
             HTCJobID(gcJobNum=jobNum,
                      gcTaskID=task.taskID,
                      clusterID='$(Cluster)',
                      procID='$(Process)',
                      scheddURI=self.getURI(),
                      typed=False).rawID,
             'Queue',
         ])
     return jdlData
 def _digestQueryInfoMap(self, queryInfoMaps, queryArguments):
     """Digest raw queryInfoMaps to maps of HTCjobID : infoMap"""
     dataMap = {}
     for infoMap in queryInfoMaps:
         htcID = HTCJobID(rawID=infoMap['rawID'])
         dataMap[htcID] = {}
         for key in infoMap:
             if key in queryArguments:
                 dataMap[htcID][key] = infoMap[key]
     return dataMap
 def _getJDLData(self, task, jobNumList, queryArguments):
     taskFiles, proxyFile, jobFileMap = self._getSubmitFileMap(
         task, jobNumList)
     jdlData = self._getBaseJDLData(task, queryArguments)
     jdlData.extend([
         'Executable              = %s' % taskFiles[0][2],
     ])
     if proxyFile:
         jdlData.extend([
             'use_x509userproxy       = True',
             'x509userproxy           = %s' % proxyFile[2],
         ])
     for jobNum in jobNumList:
         jdlData.extend(self._getRequirementJdlData(task, jobNum))
         jobStageDir = self.getStagingDir(
             htcID=HTCJobID(gcJobNum=jobNum, gcTaskID=task.taskID))
         jdlData.extend([
             '+GcJobNum               = "%s"' % jobNum,
             'arguments               = %s' % jobNum,
             'initialdir              = %s' % jobStageDir,
             'Output                  = %s' %
             os.path.join(jobStageDir, 'gc.stdout'),
             'Error                   = %s' %
             os.path.join(jobStageDir, 'gc.stderr'),
             # HACK: ignore executable (In[0]), stdout (Out[0]) and stderr (Out[1])
             'transfer_input_files    = %s' % ','.join([
                 schd
                 for descr, gc, schd in taskFiles[1:] + jobFileMap[jobNum]
             ]),
             'transfer_output_files   = %s' % ','.join([
                 src for descr, src, trg in
                 self.parentPool._getSandboxFilesOut(task)[2:]
             ]),
             '+rawID                   = "%s"' %
             HTCJobID(gcJobNum=jobNum,
                      gcTaskID=task.taskID,
                      clusterID='$(Cluster)',
                      procID='$(Process)',
                      scheddURI=self.getURI(),
                      typed=False).rawID,
         ])
     return jdlData
    def _stageSubmitFiles(self, task, jobNumList):
        """
		Stage submission files at scheduler.
		"""
        taskFiles, proxyFile, jobFileMap = self._getSubmitFileMap(
            task, jobNumList)
        self._log(logging.DEBUG1, "Staging task files.")
        stagedJobs = []
        if proxyFile:
            taskFiles.append(proxyFile)
        for index, fileInfoBlob in enumerate(taskFiles):
            self._log(
                logging.DEBUG3, "Staging task files (%d/%d): %s" %
                (index, len(taskFiles), fileInfoBlob[0]))
            putProcess = self._adapter.LoggedPut(fileInfoBlob[1],
                                                 fileInfoBlob[2])
            if putProcess.wait(timeout=self._adapterMaxWait):
                putProcess.logError(self.parentPool.errorLog, brief=True)
                self._log(
                    logging.INFO1,
                    "Staging failure. Aborting submit." % (fileInfoBlob[0]))
                return stagedJobs
        for jobNum, jobFiles in jobFileMap:
            try:
                for fileInfoBlob in jobFiles:
                    self._log(logging.DEBUG3,
                              "Staging job files: %s" % (fileInfoBlob[0]))
                    putProcess = self._adapter.LoggedPut(
                        fileInfoBlob[1], fileInfoBlob[2])
                    if putProcess.wait(timeout=self._adapterMaxWait):
                        putProcess.logError(self.parentPool.errorLog,
                                            brief=True)
                        try:
                            self.cleanStagingDir(
                                htcID=HTCJobID(jobNum, task.taskID))
                        except Exception:
                            self._log(logging.INFO1,
                                      'unable to clean staging dir')
                        raise BackendError
            except BackendError:
                continue
            else:
                stagedJobs.append(jobNum)
        return stagedJobs
 def _getBaseJDLData(self, task, queryArguments):
     """Create a sequence of default attributes for a submission JDL"""
     jdlData = [
         '+submitTool              = "GridControl (version %s)"' %
         utils.getVersion(),
         'should_transfer_files    = YES',
         'when_to_transfer_output  = ON_EXIT',
         'periodic_remove          = (JobStatus == 5 && HoldReasonCode != 16)',
         'environment              = CONDOR_WMS_DASHID=https://%s:/$(Cluster).$(Process)'
         % self.parentPool.wmsName,
         'Universe                 = %s' %
         self.parentPool._jobSettings["Universe"],  # TODO: Unhack me
         '+GcID                    = "%s"' % self.parentPool._createGcId(
             HTCJobID(gcJobNum='$(GcJobNum)',
                      gcTaskID=task.taskID,
                      clusterID='$(Cluster)',
                      procID='$(Process)',
                      scheddURI=self.getURI(),
                      typed=False)),
         '+GcJobNumToWmsID         = "$(GcJobNum)@$(Cluster).$(Process)"',
         '+GcJobNumToGcID          = "$(GcJobNum)@$(GcID)"',
         'Log                      = %s' %
         os.path.join(self.getStagingDir(), 'gcJobs.log'),
         'job_ad_information_attrs = %s' % ','.join(
             [arg for arg in queryArguments if arg not in ['JobStatus']]),
     ]
     for key in queryArguments:
         try:
             # is this a match string? '+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"' -> MATCH_GLIDEIN_Entry_Name = "CMS_T2_DE_RWTH_grid-ce2" && MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_DE_RWTH_grid-ce2"
             matchKey = re.match("(?:MATCH_EXP_JOB_|MATCH_|JOB_)(.*)",
                                 key).group(1)
             jdlData['Head']['+JOB_%s' %
                             matchKey] = "$$(%s:Unknown)" % matchKey
         except AttributeError:
             clear_current_exception()
     for line in self.parentPool._jobSettings["ClassAd"]:
         jdlData.append('+' + line)
     for line in self.parentPool._jobSettings["JDL"]:
         jdlData.append(line)
     return jdlData
     return jdlData
Exemple #6
0
 def _splitGcId(self, gcId):
     """Split a GcId, returning wmsName and htcJobID"""
     wmsName, rawId = self._splitId(gcId)
     return (wmsName, HTCJobID(rawID=rawId))