예제 #1
0
 def submitProbeJobs(self, ce):
   """ Submit some jobs to the CEs
   """
   
   #need credentials, should be there since the initialize
   
   from DIRAC.Interfaces.API.Dirac import Dirac
   d = Dirac()
   from DIRAC.Interfaces.API.Job import Job
   
   from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
   import DIRAC
   
   ops = Operations()
   scriptname = ops.getValue("ResourceStatus/SofwareManagementScript", self.script)
   
   j = Job()
   j.setDestinationCE(ce)
   j.setCPUTime(1000)
   j.setName("Probe %s" % ce)
   j.setJobGroup("SoftwareProbe")
   j.setExecutable("%s/GlastDIRAC/ResourceStatusSystem/Client/%s" % (DIRAC.rootPath, scriptname), 
                   logFile='SoftwareProbe.log')
   j.setOutputSandbox('*.log')
   res = d.submit(j)
   if not res['OK']:
     return res
     
   return S_OK()
예제 #2
0
class DiracTestCases(unittest.TestCase):
  """ Dirac API test cases
  """
  def setUp(self):
    self.dirac = Dirac()

  def tearDown(self):
    pass

  def test_basicJob(self):
    jdl = "Parameter=Value;Parameter2=Value2"
    ret = self.dirac._Dirac__getJDLParameters(jdl)
    self.assertTrue(ret['OK'])
    self.assertIn('Parameter', ret['Value'])
    self.assertEqual('Value', ret['Value']['Parameter'])
    self.assertIn('Parameter2', ret['Value'])
    self.assertEqual('Value2', ret['Value']['Parameter2'])

  def test_JobJob(self):
    from DIRAC.Interfaces.API.Job import Job
    job = Job(stdout='printer', stderr='/dev/null')
    ret = self.dirac._Dirac__getJDLParameters(job)
    self.assertTrue(ret['OK'])
    self.assertEqual('printer', ret['Value']['StdOutput'])
    self.assertEqual('/dev/null', ret['Value']['StdError'])
def _getOutputs():
  repoLocation = ''
  clip = _Params()
  clip.registerSwitches()
  Script.parseCommandLine( ignoreErrors = False )
  repoLocation = clip.repo
  if not repoLocation:
    Script.showHelp()
    dexit(1)
  from DIRAC import gLogger
  from DIRAC.Interfaces.API.Dirac import Dirac

  dirac = Dirac(True, repoLocation)
  
  exitCode = 0
  res = dirac.monitorRepository(False)
  if not res['OK']:
    gLogger.error("Failed because %s" % res['Message'])
    dexit(1)
    
  res = dirac.retrieveRepositorySandboxes()
  if not res['OK']:
    gLogger.error("Failed because %s" % res['Message'])
    dexit(1)
  if clip.outputdata:
    res = dirac.retrieveRepositoryData()
    if not res['OK']:
      gLogger.error("Failed because %s" % res['Message'])
      exit(1)
  dexit(exitCode)
예제 #4
0
 def do_get(self,args):
   """ Download file from grid and store in a local directory
   
       usage:
       
         get <lfn> [<local_directory>] 
   """
   
   argss = args.split()
   lfn = argss[0]
   lfn = self.getPath(lfn)
   dir = ''
   if len(argss)>1:
     dir = argss[1]
       
   dirac = Dirac()
   localCWD = ''
   if dir:
     localCWD = os.getcwd()
     os.chdir(dir)
   result = dirac.getFile(lfn)
   if localCWD:
     os.chdir(localCWD)
     
   if not result['OK']:
     print 'Error: %s' %(result['Message'])
   else:
     print "File %s successfully downloaded" % lfn      
예제 #5
0
 def do_add(self,args):
   """ Upload a new file to a SE and register in the File Catalog
   
       usage:
       
         add <lfn> <pfn> <SE> [<guid>] 
   """
   
   # ToDo - adding directories
   
   argss = args.split()
   
   if len(argss) < 3:
     print "Error: unsufficient number of arguments"
   
   lfn = argss[0]
   lfn = self.getPath(lfn)
   pfn = argss[1]
   se = argss[2]
   guid = None
   if len(argss)>3:
     guid = argss[3]
       
   dirac = Dirac()
   result = dirac.addFile(lfn,pfn,se,guid,printOutput=False)
   if not result['OK']:
     print 'Error: %s' %(result['Message'])
   else:
     print "File %s successfully uploaded to the %s SE" % (lfn,se)  
예제 #6
0
 def do_replicate(self,args):
   """ Replicate a given file to a given SE
       
       usage:
         replicate <LFN> <SE> [<SourceSE>]
   """
   argss = args.split()
   if len(args) < 2:
     print "Error: unsufficient number of arguments"
   lfn = argss[0]
   lfn = self.getPath(lfn)
   se = argss[1]
   sourceSE = ''
   if len(argss)>2:
     sourceSE=argss[2]
   if len(argss)>3: 
     localCache=argss[3]
   try:
     dirac = Dirac()
     result = dirac.replicate(lfn,se,sourceSE,printOutput=True)      
     if not result['OK']:
       print 'Error: %s' %(result['Message'])
     elif not result['Value']:
       print "Replica is already present at the target SE"
     else:  
       print "File %s successfully replicated to the %s SE" % (lfn,se)  
   except Exception, x:
     print "Error: replicate failed with exception: ", x      
예제 #7
0
파일: Job.py 프로젝트: graciani/DIRAC
  def runLocal( self, dirac = None ):
    """ The dirac (API) object is for local submission.
    """

    if dirac is None:
      dirac = Dirac()

    return dirac.submit( self, mode = 'local' )
def __downloadJobDescriptionXML(jobID, basepath):
  """
  Downloads the jobDescription.xml file into the temporary directory
  created.
  
  """
  from DIRAC.Interfaces.API.Dirac import Dirac
  jdXML = Dirac()
  jdXML.getInputSandbox(jobID, basepath)
예제 #9
0
    def uploadAndRegisterFiles(self, fileList, SE="IHEPD-USER", guid=None, ePoint=""):
        """upload a set of files to SE and register it in DFC.
        user input the directory of localfile.
        argument:
          ePoint is the energy point,for scan data
        we can treat localDir as a kind of datasetName.
        """

        result_OK = 1
        errorList = []
        # fileList = self.getFilenamesByLocaldir(localDir)
        for fullpath in fileList:
            # get the attributes of the file
            fileAttr = self.__getFileAttributes(fullpath)
            if len(fileAttr) == 0:
                print "failed to get file %s attributes" % fullpath
                return S_ERROR("failed to get file attributes")
            # create dir and set dirMetadata to associated dir
            lastDir = self.registerHierarchicalDir(fileAttr, rootDir="/bes")
            dirMeta = self.getDirMetaVal(lastDir)
            if not (dirMeta.has_key("jobOptions") or dirMeta.has_key("description")):
                lastDirMetaDict = {}
                lastDirMetaDict["jobOptions"] = fileAttr["jobOptions"]
                lastDirMetaDict["description"] = fileAttr["description"]
                try:
                    self.__registerDirMetadata(lastDir, lastDirMetaDict)
                except:
                    pass
            if len(ePoint):
                lastDir = lastDir + os.sep + ePoint
            lfn = lastDir + os.sep + fileAttr["LFN"]
            # upload and register file.
            dirac = Dirac()
            result = dirac.addFile(lfn, fullpath, SE, guid, printOutput=True)
            # register file metadata
            if not result["OK"]:
                print "ERROR %s" % (result["Message"])
                # return S_ERROR(result['Message'])
                errorList.append(fullpath)
                result_OK = 0
            else:
                result = self.__registerFileMetadata(lfn, fileAttr)
                if not result["OK"]:
                    result_OK = 0
                    print "failed to register file metadata"
        if result_OK:
            return S_OK()
        else:
            return S_ERROR(errorList)
def _getOutputData():
  cliParams = _Params()
  cliParams.registerSwitches()
  Script.parseCommandLine( ignoreErrors = False )
  if not cliParams.repo:
    Script.showHelp()
    dexit(2)
  from DIRAC.Interfaces.API.Dirac import Dirac
  
  dirac = Dirac(True, cliParams.repo)

  exitCode = 0
  dirac.monitorRepository(False)
  dirac.retrieveRepositoryData()

  dexit(exitCode)
    def do_installonsite(self,argss):
        """ Install a release on a grid site : 
            installonsite tag site
        """
        args = argss.split()
        if len(args)<2:
            print self.do_installonsite.__doc__
            return
        tag = args[0]
        site = args[1]
        
        #print "Check if the software with the tag '"+tag+"' exists on the rsync server..." 
        #res = self.client.getSitesForTag(tag)
        #if not res['OK']:
          #print res['Message']
          #return 
        #print "tag found !"  

        from DIRAC.Interfaces.API.Dirac import Dirac
        d = Dirac()
        from DIRAC.Interfaces.API.Job import Job
        
        from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
        import os
        
        ops = Operations()
        scriptname = "InstallSoftware.py"
        j = Job()
        j.setDestination(site)
        j.setCPUTime(1000)
        j.setName("Installation "+tag)
        j.setExecutable(os.environ['DIRAC']+"/GlastDIRAC/ResourceStatusSystem/Client/"+scriptname , logFile='SoftwareInstallation.log')
        j.setOutputSandbox('*.log')
        res = d.submit(j)
        if not res['OK']:
          print "Could not submit the installation at site %s, message %s"%(site,res['Message'])
          return
        
        print "Job submitted, id = "+str(res['Value'])
        
        print "Add tag :"
        res = self.client.addTagAtSite(tag,site)
        if not res['OK']:
            print "Could not register tag %s at site %s, message %s"%(tag,site,res['Message'])
            return
        print "Added %s to %i CEs"%(tag,len(res['Value'][tag]))
예제 #12
0
파일: dsub.py 프로젝트: yan-tian/dsub
def submitJob(jobPara):
    dirac = Dirac()
    j = Job()
    j.setName(jobPara['jobName'])
    j.setJobGroup(jobPara['jobGroup'])
    j.setExecutable(jobPara['jobScript'], logFile = jobPara['jobScriptLog'])
    j.setInputSandbox(jobPara['inputSandbox'])
    j.setOutputSandbox(jobPara['outputSandbox'])
    j.setOutputData(jobPara['outputData'], jobPara['SE'])
    j.setDestination(jobPara['sites'])
    j.setCPUTime(jobPara['CPUTime'])
    result = dirac.submit(j)
    if result['OK']:
        print 'Job %s submitted successfully. ID = %d' %(jobPara['jobName'],result['Value'])
    else:
        print 'Job %s submitted failed' %jobPara['jobName']
    return result
예제 #13
0
def main():
  """ Main program entry point. """
  if len(sys.argv) < 2 or len(sys.argv) > 3:
    usage()
  uname = sys.argv[1]
  site = None
  if len(sys.argv) >= 3:
    site = sys.argv[2]
  print "Fetching job list for user '%s'..." % uname
  jlist = find_jobs(uname, site)
  jlist.append('1')
  print "Found %u jobs, killing..." % len(jlist)
  dirac = Dirac()
  for loc in xrange(0, len(jlist), BATCH_SIZE):
    print "%u/%u complete." % (loc, len(jlist))
    dirac.killJob(jlist[loc:loc+BATCH_SIZE])
  print "%u/%u complete." % (len(jlist), len(jlist))
  print "Exiting."
예제 #14
0
  def submitNewBigJob( self ):

    result = jobDB.getJobJDL( str( self.__jobID ) , True )
    classAdJob = ClassAd( result['Value'] )
    executableFile = ""
    if classAdJob.lookupAttribute( 'Executable' ):
      executableFile = classAdJob.getAttributeString( 'Executable' )

    tempPath = self.__tmpSandBoxDir
    dirac = Dirac()
    if not os.path.exists( tempPath ):
      os.makedirs( tempPath )

    settingJobSandBoxDir = dirac.getInputSandbox( self.__jobID, tempPath )
    self.log.info( 'Writting temporal SandboxDir in Server', settingJobSandBoxDir )
    moveData = self.__tmpSandBoxDir + "/InputSandbox" + str( self.__jobID )

    HiveV1Cli = HiveV1Client( self.__User , self.__publicIP )
    returned = HiveV1Cli.dataCopy( moveData, self.__tmpSandBoxDir )
    self.log.info( 'Copy the job contain to the Hadoop Master with HIVE: ', returned )

    jobInfo = jobDB.getJobAttributes( self.__jobID )
    if not jobInfo['OK']:
      return S_ERROR( jobInfo['Value'] )
    proxy = ""
    jobInfo = jobInfo['Value']
    if gProxyManager.userHasProxy( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] ):
      proxy = gProxyManager.downloadProxyToFile( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )
    else:
      proxy = self.__requestProxyFromProxyManager( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )

    HiveJob = "InputSandbox" + str( self.__jobID ) + "/" + executableFile
    HiveJobOutput = str( self.__jobID ) + "_" + executableFile + "_out"

    returned = HiveV1Cli.jobSubmit( tempPath, HiveJob, proxy['chain'], HiveJobOutput )
    self.log.info( 'Launch Hadoop-Hive job to the Master: ', returned )

    if not returned['OK']:
      return S_ERROR( returned['Message'] )
    else:
      self.log.info( 'Hadoop-Hive Job ID: ', returned['Value'] )

    return S_OK( returned['Value'] )
예제 #15
0
파일: Dirac.py 프로젝트: suosdu/jsub
    def submit(self, param):        
        j = Job()
        j.setName(param['jobName'])
        j.setExecutable(param['jobScript'],logFile = param['jobScriptLog'])
        if self.site:
            j.setDestination(self.site)
        if self.jobGroup:
            j.setJobGroup(self.jobGroup)            
        j.setInputSandbox(param['inputSandbox'])
        j.setOutputSandbox(param['outputSandbox'])
        j.setOutputData(param['outputData'], outputSE = self.outputSE, outputPath = self.outputPath)

        dirac = GridDirac()
        result = dirac.submit(j)

        status = {}
        status['submit'] = result['OK']
        if status['submit']:
            status['job_id'] = result['Value']

        return status
예제 #16
0
    def downloadFilesByFilelist(self, fileList, destDir=""):
        """downLoad a set of files form SE.
        use getFilesByFilelist() get a list of lfns and download these files.
        fileList get from function getFilesByDatesetName()

           Example usage:
           >>>badger.downloadFilesByFilelist(fileList)
        """
        errorDict = {}
        dirac = Dirac()
        # fileList = self.getFilesByDatasetName(dataset_name)
        for lfn in fileList:
            result = dirac.getFile(lfn, destDir, printOutput=False)
            if not result["OK"]:
                errorDict[lfn] = result["Message"]
        if errorDict:
            serr = S_ERROR()
            serr["errorDict"] = errorDict
            return serr
        else:
            return S_OK("File download successfully.")
예제 #17
0
    def submitNewBigPilot(self):

        tempPath = self.__tmpSandBoxDir + str(self.__jobID)
        dirac = Dirac()
        if not os.path.exists(tempPath):
            os.makedirs(tempPath)

        settingJobSandBoxDir = dirac.getInputSandbox(self.__jobID, tempPath)
        self.log.info("Writting temporal SandboxDir in Server", settingJobSandBoxDir)

        jobXMLName = "job:" + str(self.__jobID) + ".xml"
        with open(os.path.join(tempPath, jobXMLName), "wb") as temp_file:
            temp_file.write(self.jobWrapper())
        self.log.info("Writting temporal Hadoop Job.xml")

        HadoopV1cli = HadoopV1Client(self.__User, self.__publicIP, self.__Port)
        # returned = HadoopV1cli.dataCopy( tempPath, self.__tmpSandBoxDir )
        # self.log.info( 'Copy the job contain to the Hadoop Master: ', returned )

        jobInfo = jobDB.getJobAttributes(self.__jobID)
        if not jobInfo["OK"]:
            return S_ERROR(jobInfo["Value"])
        proxy = ""
        jobInfo = jobInfo["Value"]
        if gProxyManager.userHasProxy(jobInfo["OwnerDN"], jobInfo["OwnerGroup"]):
            proxy = gProxyManager.downloadProxyToFile(jobInfo["OwnerDN"], jobInfo["OwnerGroup"])
        else:
            proxy = self.__requestProxyFromProxyManager(jobInfo["OwnerDN"], jobInfo["OwnerGroup"])

        returned = HadoopV1cli.submitPilotJob(tempPath, jobXMLName, proxy["chain"])

        self.log.info("Launch Hadoop pilot to the Hadoop Master: ", returned)

        if not returned["OK"]:
            return S_ERROR(returned["Value"])
        else:
            self.log.info("Hadoop Job ID: ", returned["Value"])

        return S_OK(returned["Value"])
예제 #18
0
  def submitNewBigJob( self ):

    tempPath = self.__tmpSandBoxDir + str( self.__jobID )
    dirac = Dirac()
    if not os.path.exists( tempPath ):
      os.makedirs( tempPath )

    settingJobSandBoxDir = dirac.getInputSandbox( self.__jobID, tempPath )
    self.log.info( 'Writting temporal SandboxDir in Server', settingJobSandBoxDir )

    jobXMLName = "job:" + str( self.__jobID ) + '.xml'
    with open( os.path.join( tempPath, jobXMLName ), 'wb' ) as temp_file:
        temp_file.write( self.jobWrapper() )
    self.log.info( 'Writting temporal Hadoop Job.xml' )

    HadoopV1cli = HadoopV2Client( self.__User , self.__publicIP )
    returned = HadoopV1cli.dataCopy( tempPath, self.__tmpSandBoxDir )
    self.log.info( 'Copy the job contain to the Hadoop Master: ', returned )

    jobInfo = jobDB.getJobAttributes( self.__jobID )
    if not jobInfo['OK']:
      return S_ERROR( jobInfo['Value'] )
    proxy = ""
    jobInfo = jobInfo['Value']
    if gProxyManager.userHasProxy( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] ):
      proxy = gProxyManager.downloadProxyToFile( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )
    else:
      proxy = self.__requestProxyFromProxyManager( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )

    returned = HadoopV1cli.jobSubmit( tempPath, jobXMLName, proxy['chain'] )
    self.log.info( 'Launch Hadoop job to the Hadoop Master: ', returned )

    if not returned['OK']:
      return S_ERROR( returned['Message'] )
    else:
      self.log.info( 'Hadoop Job ID: ', returned['Value'] )

    return S_OK( returned['Value'] )
예제 #19
0
  def __init__( self, args = None, apis = None ):
    super( CEBaseTest, self ).__init__( args, apis )

    self.timeout = self.args.get( 'timeout', 1800 )
    self.vo = self.args.get( 'VO' )
    self.testType = self.args[ 'TestType' ]
    self.executable = self.args[ 'executable' ]
    self.__logPath = '/opt/dirac/pro/BESDIRAC/ResourceStatusSystem/SAM/log'
    self.__scriptPath = '/opt/dirac/pro/BESDIRAC/ResourceStatusSystem/SAM/sam_script'

    if 'WMSAdministrator' in self.apis:
      self.wmsAdmin = self.apis[ 'WMSAdministrator' ]
    else:
      self.wmsAdmin = RPCClient( 'WorkloadManagement/WMSAdministrator' )

    if 'Dirac' in self.apis:
      self.dirac = self.apis[ 'Dirac' ]
    else:
      self.dirac = Dirac()
예제 #20
0
Script.parseCommandLine(ignoreErrors=True)
files = []
for oFile in Script.getPositionalArgs():
  files += oFile.split(',')

import DIRAC
from DIRAC.Interfaces.API.Dirac import Dirac
from LHCbDIRAC.Core.Utilities.File import getRootFileGUIDs
from LHCbDIRAC.DataManagementSystem.Client.DMScript import printDMResult

if not files:
  Script.showHelp()
  DIRAC.exit(0)
existFiles = {}
nonExisting = []
dirac = Dirac()

for localFile in files:
  if os.path.exists(localFile):
    existFiles[os.path.realpath(localFile)] = localFile
  elif localFile.startswith('/lhcb'):
    res = dirac.getReplicas(localFile, active=True, preferDisk=True)
    if res['OK'] and localFile in res['Value']['Successful']:
      ses = res['Value']['Successful'][localFile].keys()
      for se in ses:
        res = dirac.getAccessURL(localFile, se, protocol=['root', 'xroot'])
        if res['OK'] and localFile in res['Value']['Successful']:
          existFiles[res['Value']['Successful'][localFile]] = "%s @ %s" % (localFile, se)
    else:
      nonExisting.append(localFile)
  elif localFile.startswith('root:'):
예제 #21
0
from DIRAC.Core.Base import Script

Script.setUsageMessage('\n'.join([
    __doc__.split('\n')[1], 'Usage:',
    '  %s [option|cfgfile] ... JobID ...' % Script.scriptName, 'Arguments:',
    '  JobID:    DIRAC Job ID'
]))
Script.registerSwitch("D:", "Dir=", "Store the output in this directory")
Script.parseCommandLine(ignoreErrors=True)
args = Script.getPositionalArgs()

if len(args) < 1:
    Script.showHelp()

from DIRAC.Interfaces.API.Dirac import Dirac, parseArguments
dirac = Dirac()
exitCode = 0
errorList = []

outputDir = ''
for sw, v in Script.getUnprocessedSwitches():
    if sw in ('D', 'Dir'):
        outputDir = v

for job in parseArguments(args):

    result = dirac.getJobOutputData(job, destinationDir=outputDir)
    if result['OK']:
        print 'Job %s output data retrieved' % (job)
    else:
        errorList.append((job, result['Message']))
예제 #22
0
    __doc__.split('\n')[1], 'Usage:',
    '  %s [option|cfgfile] ... PFN SE' % Script.scriptName, 'Arguments:',
    '  PFN:      Physical File Name or file containing PFNs (mandatory)',
    '  SE:       Valid DIRAC SE (mandatory)'
]))
Script.parseCommandLine(ignoreErrors=True)
args = Script.getPositionalArgs()

if len(args) < 2:
    Script.showHelp(exitCode=1)

if len(args) > 2:
    print('Only one PFN SE pair will be considered')

from DIRAC.Interfaces.API.Dirac import Dirac
dirac = Dirac()
exitCode = 0

pfn = args[0]
seName = args[1]
try:
    f = open(pfn, 'r')
    pfns = f.read().splitlines()
    f.close()
except BaseException:
    pfns = [pfn]

for pfn in pfns:
    result = dirac.getPhysicalFileMetadata(pfn, seName, printOutput=True)
    if not result['OK']:
        print('ERROR: ', result['Message'])
예제 #23
0
import os

Script.setUsageMessage('\n'.join([
    __doc__.split('\n')[1], 'Usage:',
    '  %s [option|cfgfile] ... JobID ...' % Script.scriptName, 'Arguments:',
    '  JobID:    DIRAC Job ID'
]))
Script.registerSwitch("D:", "Dir=", "Store the output in this directory")
Script.parseCommandLine(ignoreErrors=True)
args = Script.getPositionalArgs()

if len(args) < 1:
    Script.showHelp()

from DIRAC.Interfaces.API.Dirac import Dirac, parseArguments
dirac = Dirac()
exitCode = 0
errorList = []

outputDir = None
for sw, v in Script.getUnprocessedSwitches():
    if sw in ('D', 'Dir'):
        outputDir = v

for job in parseArguments(args):

    result = dirac.getInputSandbox(job, outputDir=outputDir)
    if result['OK']:
        if os.path.exists('InputSandbox%s' % job):
            print 'Job input sandbox retrieved in InputSandbox%s/' % (job)
    else:
예제 #24
0
Usage:
   %s [option] ... JobID ...
""" % Script.scriptName)

fcType = 'FileCatalog'
Script.registerSwitch("f:", "file-catalog=",
                      "Catalog client type to use (default %s)" % fcType)
Script.registerSwitch("l:", "lfn=", "File lfn")

Script.parseCommandLine(ignoreErrors=False)
options = Script.getUnprocessedSwitches()
args = Script.getPositionalArgs()

from DIRAC.Interfaces.API.Dirac import Dirac
dirac = Dirac()

from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
fccType = 'DataManagement/FileCatalog'
fcc = FileCatalogClient(fccType)

import os
import sys
import tarfile


def findFiles(jobIds):
    '''Find specified log files from the job ids'''
    lfns = []

    for jobId in jobIds:
예제 #25
0
Script.parseCommandLine(ignoreErrors=True)
args = Script.getPositionalArgs()

for switch in Script.getUnprocessedSwitches():
    if switch[0] == 'Original' or switch[0] == 'O':
        original = True

for switch in Script.getUnprocessedSwitches():
    if switch[0] == 'Original':
        original = True

if len(args) < 1:
    Script.showHelp()

from DIRAC.Interfaces.API.Dirac import Dirac, parseArguments
dirac = Dirac()
exitCode = 0
errorList = []

for job in parseArguments(args):

    result = dirac.getJobJDL(job, original=original, printOutput=True)
    if not result['OK']:
        errorList.append((job, result['Message']))
        exitCode = 2

for error in errorList:
    print("ERROR %s: %s" % error)

DIRAC.exit(exitCode)
예제 #26
0
"""
  Issue a kill signal to a running DIRAC job
"""
from __future__ import print_function
__RCSID__ = "$Id$"
import DIRAC
from DIRAC.Core.Base import Script

Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
                                     'Usage:',
                                     '  %s [option|cfgfile] ... JobID ...' % Script.scriptName,
                                     'Arguments:',
                                     '  JobID:    DIRAC Job ID' ] ) )
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()

if len( args ) < 1:
  Script.showHelp()

from DIRAC.Interfaces.API.Dirac                              import Dirac, parseArguments

result = Dirac().killJob( parseArguments( args ) )
if result['OK']:
  print('Killed jobs %s' % ','.join([str(j) for j in result['Value']]))
  exitCode = 0
else:
  print('ERROR', result['Message'])
  exitCode = 2

DIRAC.exit( exitCode )
예제 #27
0
            except:
                pass
        elif switch[0] == 'Job':
            try:
                job = int(switch[1])
            except:
                print "Invalid jobID", switch[1]

    if not job:
        args = Script.getPositionalArgs()

        if len(args) == 1:
            requestName = args[0]
    else:
        from DIRAC.Interfaces.API.Dirac import Dirac
        dirac = Dirac()
        res = dirac.attributes(job)
        if not res['OK']:
            print "Error getting job parameters", res['Message']
        else:
            jobName = res['Value'].get('JobName')
            if not jobName:
                print 'Job %d not found' % job
            else:
                requestName = jobname + '_job_%d' % job

    requests = []
    if requestName:
        requests = [requestName]
    elif resetFailed:
        res = reqClient.getRequestNamesList(['Failed'], maxReset)
예제 #28
0
    'Arguments:', '  LFN:      Logical File Name or file containing LFNs',
    '  SE:       Valid DIRAC SE', '  PROTO:    Optional protocol for accessURL'
]))
Script.parseCommandLine(ignoreErrors=True)
args = Script.getPositionalArgs()

# pylint: disable=wrong-import-position
from DIRAC.Interfaces.API.Dirac import Dirac

if len(args) < 2:
    Script.showHelp()

if len(args) > 3:
    print 'Only one LFN SE pair will be considered'

dirac = Dirac()
exitCode = 0

lfn = args[0]
seName = args[1]
proto = False
if len(args) > 2:
    proto = args[2]

try:
    f = open(lfn, 'r')
    lfns = f.read().splitlines()
    f.close()
except IOError:
    lfns = [lfn]
예제 #29
0
            "  SE:       Valid DIRAC SE",
        ]
    )
)
Script.parseCommandLine(ignoreErrors=True)
args = Script.getPositionalArgs()

if len(args) < 2:
    Script.showHelp()

if len(args) > 2:
    print "Only one LFN SE pair will be considered"

from DIRAC.Interfaces.API.Dirac import Dirac

dirac = Dirac()
exitCode = 0

lfn = args[0]
seName = args[1]

try:
    f = open(lfn, "r")
    lfns = f.read().splitlines()
    f.close()
except:
    lfns = [lfn]

for lfn in lfns:
    result = dirac.getAccessURL(lfn, seName, printOutput=True)
    if not result["OK"]:
예제 #30
0
import sys

from DIRAC.Interfaces.API.Dirac import Dirac

import os

if len(sys.argv)< 2 :
  print "Usage %s <jobName>"%sys.argv[0]
  sys.exit(1)

jobName = sys.argv[1]

finalStatus = ['Done', 'Failed']

dirac = Dirac()

idstr = open("%s/jobIdList.txt"%jobName, 'r').readlines()
ids = map(int, idstr)
print "found %s jobs"%(len(ids))

res = dirac.getJobSummary(ids)
if not res['OK']:
  print res['Message']
  sys.exit(1)

metadata = res['Value']

for jid in ids:
  jobMeta = metadata.get( jid, None )
  if not jobMeta :
예제 #31
0
import DIRAC
from DIRAC.Core.Base import Script

Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
                                     'Usage:',
                                     '  %s [option|cfgfile] ... JobID ...' % Script.scriptName,
                                     'Arguments:',
                                     '  JobID:    DIRAC Job ID' ] ) )
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()

if len( args ) < 1:
  Script.showHelp()

from DIRAC.Interfaces.API.Dirac                              import Dirac
dirac = Dirac()
exitCode = 0
errorList = []

for job in args:

  result = dirac.attributes( int(job), printOutput = True )
  if not result['OK']:
    errorList.append( ( job, result['Message'] ) )
    exitCode = 2

for error in errorList:
  print "ERROR %s: %s" % error

DIRAC.exit( exitCode )
예제 #32
0
def main():
    Script.registerSwitch("", "Status=", "Primary status")
    Script.registerSwitch("", "MinorStatus=", "Secondary status")
    Script.registerSwitch("", "ApplicationStatus=", "Application status")
    Script.registerSwitch("", "Site=", "Execution site")
    Script.registerSwitch("", "Owner=", "Owner (DIRAC nickname)")
    Script.registerSwitch("", "JobGroup=",
                          "Select jobs for specified job group")
    Script.registerSwitch(
        "", "Date=",
        "Date in YYYY-MM-DD format, if not specified default is today")
    Script.registerSwitch("", "File=",
                          "File name,if not specified default is std.out ")
    # Registering arguments will automatically add their description to the help menu
    Script.registerArgument("String: string to search for")
    _, args = Script.parseCommandLine(ignoreErrors=True)

    # Default values
    status = None
    minorStatus = None
    appStatus = None
    site = None
    owner = None
    jobGroup = None
    date = None
    filename = "std.out"

    if len(args) != 1:
        Script.showHelp()

    searchstring = str(args[0])

    for switch in Script.getUnprocessedSwitches():
        if switch[0].lower() == "status":
            status = switch[1]
        elif switch[0].lower() == "minorstatus":
            minorStatus = switch[1]
        elif switch[0].lower() == "applicationstatus":
            appStatus = switch[1]
        elif switch[0].lower() == "site":
            site = switch[1]
        elif switch[0].lower() == "owner":
            owner = switch[1]
        elif switch[0].lower() == "jobgroup":
            jobGroup = switch[1]
        elif switch[0].lower() == "date":
            date = switch[1]
        elif switch[0].lower() == "file":
            filename = switch[1]

    selDate = date
    if not date:
        selDate = "Today"

    from DIRAC.Interfaces.API.Dirac import Dirac

    dirac = Dirac()
    exitCode = 0
    errorList = []
    resultDict = {}

    result = dirac.selectJobs(
        status=status,
        minorStatus=minorStatus,
        applicationStatus=appStatus,
        site=site,
        owner=owner,
        jobGroup=jobGroup,
        date=date,
    )
    if result["OK"]:
        jobs = result["Value"]
    else:
        print("Error in selectJob", result["Message"])
        DIRAC.exit(2)

    for job in jobs:

        result = dirac.getOutputSandbox(job)
        if result["OK"]:
            if os.path.exists("%s" % job):

                lines = []
                try:
                    lines = open(os.path.join(job, filename)).readlines()
                except Exception as x:
                    errorList.append((job, x))
                for line in lines:
                    if line.count(searchstring):
                        resultDict[job] = line
                rmtree("%s" % (job))
        else:
            errorList.append((job, result["Message"]))
            exitCode = 2

    for result in resultDict.items():
        print(result)

    DIRAC.exit(exitCode)
예제 #33
0
def Flux(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job
    import time
    import os.path

    user_script = './flux.sh'
    modmacro = './CTAflux_speeed.C'
    site = "PARANAL"

    if (len(args) != 5):
        Script.showHelp()

    typeofdata = "test"
    particle = args[0]
    direction = args[1]
    MOD = args[2]
    exten = args[3]
    zenName = args[4]

    # List of files over which flux should be run

    LFN_file = "./stereofiles/lfn_%s_%s_%s_%s.lfns" % (particle, exten,
                                                       zenName, direction)

    fileLength = sum(1 for line in open(LFN_file))
    f = open(LFN_file, 'r')

    if particle == "proton":
        filesPerJob = 10
    else:
        filesPerJob = 20

    fileList = []
    text_file_name = "lfnStereoFiles_%s_%s_%s_%s.txt" % (particle, exten,
                                                         typeofdata, direction)
    text_file = open(text_file_name, "w")

    # File containing the id number of files already produced. The relaunch of these jobs will be skipped
    done_file_name = "./stereofiles/done/done_%s_%s_%s_%s.lfns" % (
        particle, exten, zenName, direction)

    if os.path.exists(done_file_name):
        done_content = [
            int(line.strip()) for line in open(done_file_name, 'r')
        ]
    else:
        done_content = []

    loop = 0
    iJob = 0

    for line in f:
        loop = loop + 1
        infileLFN = line.strip()

        fileList.append(infileLFN)
        text_file.write("%s\n" % infileLFN)
        remain = loop % filesPerJob

        if remain == 0 or loop == fileLength:
            iJob = iJob + 1

            # Skipping of already finished jobs
            if iJob in done_content:
                text_file.close()
                fileList = []
                text_file = open(text_file_name, "w")
                continue

            else:
                j = Job()
                text_file.close()
                j.setInputSandbox([
                    user_script, "setupPackageMARS.sh", "CheckFileZombie.C",
                    text_file_name, modmacro
                ])

                jobName = "%s_%s_%s_%s_%s_%s_%s" % (user_script, site,
                                                    particle, direction, iJob,
                                                    exten, zenName)
                jobOut = "%s_%s_%s_%s_%s.out" % (user_script, site, particle,
                                                 direction, iJob)
                script_args = "%s %s %s %s %s %s %s" % (
                    particle, site, iJob, direction, MOD, exten, zenName)

                j.setExecutable(user_script, script_args)
                j.setOutputSandbox([jobOut, "applicationLog.txt"])
                j.setName(jobName)
                j.setBannedSites([
                    'LCG.MSFG.fr', 'LCG.M3PEC.fr', 'LCG.OBSPM.fr',
                    'LCG.UNI-DORTMUND.de', 'LCG.UNIV-LILLE.fr',
                    'LCG.Prague.cz', 'LCG.GRIF.fr'
                ])
                Script.gLogger.info(j._toJDL())
                print "Submitting job %s %s %s %s %s %s" % (
                    user_script, zenName, particle, direction, site, iJob)
                time.sleep(3)
                Dirac().submit(j)
                fileList = []
                text_file = open(text_file_name, "w")
예제 #34
0
def main():

    from DIRAC.Core.Base import Script

    Script.registerSwitch("p:", "run_number=", "Run Number", setRunNumber)
    Script.registerSwitch("T:", "template=", "Template", setCorsikaTemplate)
    Script.registerSwitch("E:", "executable=", "Executable", setExecutable)
    Script.registerSwitch("S:", "simtelConfig=", "SimtelConfig", setConfig)
    Script.registerSwitch("V:", "version=", "Version", setVersion)
    Script.registerSwitch("M:", "mode=", "Mode", setMode)
    Script.registerSwitch("C:", "savecorsika=", "Save Corsika", setSaveCorsika)

    from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
    from DIRAC.Resources.Catalog.FileCatalog import FileCatalog

    Script.parseCommandLine()
    global fcc, fcL, storage_element

    from CTADIRAC.Core.Utilities.SoftwareInstallation import getSoftwareEnviron
    from CTADIRAC.Core.Utilities.SoftwareInstallation import installSoftwareEnviron
    from CTADIRAC.Core.Utilities.SoftwareInstallation import workingArea
    from CTADIRAC.Core.Workflow.Modules.CorsikaApp import CorsikaApp
    from CTADIRAC.Core.Workflow.Modules.Read_CtaApp import Read_CtaApp
    from DIRAC.Core.Utilities.Subprocess import systemCall

    jobID = os.environ['JOBID']
    jobID = int(jobID)
    global jobReport
    jobReport = JobReport(jobID)

    ###########
    ## Checking MD coherence
    fc = FileCatalog('LcgFileCatalog')
    res = fc._getCatalogConfigDetails('DIRACFileCatalog')
    print 'DFC CatalogConfigDetails:', res
    res = fc._getCatalogConfigDetails('LcgFileCatalog')
    print 'LCG CatalogConfigDetails:', res

    fcc = FileCatalogClient()
    fcL = FileCatalog('LcgFileCatalog')

    from DIRAC.Interfaces.API.Dirac import Dirac
    dirac = Dirac()

    #############
    simtelConfigFilesPath = 'sim_telarray/multi'
    simtelConfigFile = simtelConfigFilesPath + '/multi_cta-ultra5.cfg'
    #simtelConfigFile = simtelConfigFilesPath + '/multi_cta-prod1s.cfg'
    createGlobalsFromConfigFiles('prodConfigFile', corsikaTemplate, version)

    ######################Building prod Directory Metadata #######################
    resultCreateProdDirMD = createProdFileSystAndMD()
    if not resultCreateProdDirMD['OK']:
        DIRAC.gLogger.error('Failed to create prod Directory MD')
        jobReport.setApplicationStatus('Failed to create prod Directory MD')
        DIRAC.gLogger.error('Metadata coherence problem, no file produced')
        DIRAC.exit(-1)
    else:
        print 'prod Directory MD successfully created'

    ######################Building corsika Directory Metadata #######################

    resultCreateCorsikaDirMD = createCorsikaFileSystAndMD()
    if not resultCreateCorsikaDirMD['OK']:
        DIRAC.gLogger.error('Failed to create corsika Directory MD')
        jobReport.setApplicationStatus('Failed to create corsika Directory MD')
        DIRAC.gLogger.error(
            'Metadata coherence problem, no corsikaFile produced')
        DIRAC.exit(-1)
    else:
        print 'corsika Directory MD successfully created'

    ############ Producing Corsika File
    global CorsikaSimtelPack
    CorsikaSimtelPack = os.path.join('corsika_simhessarray', version,
                                     'corsika_simhessarray')
    install_CorsikaSimtelPack(version, 'sim')
    cs = CorsikaApp()
    cs.setSoftwarePackage(CorsikaSimtelPack)
    cs.csExe = executable
    cs.csArguments = [
        '--run-number', run_number, '--run', 'corsika', corsikaTemplate
    ]
    #############################
    corsikaReturnCode = cs.execute()

    if corsikaReturnCode != 0:
        DIRAC.gLogger.error('Corsika Application: Failed')
        jobReport.setApplicationStatus('Corsika Application: Failed')
        DIRAC.exit(-1)

    if (os.system('sleep 60')):
        DIRAC.exit(-1)

    if (os.system('grep Broken corsika_autoinputs.log') == 0):
        DIRAC.gLogger.error('Broken string found in corsika_autoinputs.log')
        jobReport.setApplicationStatus('Broken pipe')
        DIRAC.exit(-1)

###################### rename of corsika output file #######################
    rundir = 'run' + run_number
    #  filein = rundir + '/' + corsikaOutputFileName
    corsikaFileName = particle + '_' + thetaP + '_' + phiP + '_alt' + obslev + '_' + 'run' + run_number + '.corsika.gz'
    #  cmd = 'mv ' + filein + ' ' + corsikaFileName
    #  if(os.system(cmd)):
    #    DIRAC.exit( -1 )
    ########################

    ########################
    ## files spread in 1000-runs subDirectories
    runNum = int(run_number)
    subRunNumber = '%03d' % runNum
    runNumModMille = runNum % 1000
    runNumTrunc = (runNum - runNumModMille) / 1000
    runNumSeriesDir = '%03dxxx' % runNumTrunc
    print 'runNumSeriesDir=', runNumSeriesDir

    ### create corsika tar luisa ####################
    corsikaTarName = particle + '_' + thetaP + '_' + phiP + '_alt' + obslev + '_' + 'run' + run_number + '.corsika.tar.gz'
    filetar1 = rundir + '/' + 'input'
    filetar2 = rundir + '/' + 'DAT' + run_number + '.dbase'
    filetar3 = rundir + '/run' + str(int(run_number)) + '.log'
    cmdTuple = [
        '/bin/tar', 'zcf', corsikaTarName, filetar1, filetar2, filetar3
    ]
    DIRAC.gLogger.notice('Executing command tuple:', cmdTuple)
    ret = systemCall(0, cmdTuple, sendOutput)
    if not ret['OK']:
        DIRAC.gLogger.error('Failed to execute tar')
        DIRAC.exit(-1)

######################################################
    corsikaOutFileDir = os.path.join(corsikaDirPath, particle, 'Data',
                                     runNumSeriesDir)
    corsikaOutFileLFN = os.path.join(corsikaOutFileDir, corsikaFileName)
    corsikaRunNumberSeriesDirExist = fcc.isDirectory(
        corsikaOutFileDir)['Value']['Successful'][corsikaOutFileDir]
    newCorsikaRunNumberSeriesDir = (
        corsikaRunNumberSeriesDirExist != True
    )  # if new runFileSeries, will need to add new MD

    #### create a file to DISABLE_WATCHDOG_CPU_WALLCLOCK_CHECK ################
    f = open('DISABLE_WATCHDOG_CPU_WALLCLOCK_CHECK', 'w')
    f.close()

    if savecorsika == 'True':
        DIRAC.gLogger.notice('Put and register corsika File in LFC and DFC:',
                             corsikaOutFileLFN)
        ret = dirac.addFile(corsikaOutFileLFN, corsikaFileName,
                            storage_element)  # obsolete

        res = CheckCatalogCoherence(corsikaOutFileLFN)

        if res != DIRAC.S_OK:
            DIRAC.gLogger.error('Job failed: Catalog Coherence problem found')
            jobReport.setApplicationStatus('OutputData Upload Error')
            DIRAC.exit(-1)

        if not ret['OK']:
            DIRAC.gLogger.error('Error during addFile call:', ret['Message'])
            jobReport.setApplicationStatus('OutputData Upload Error')
            DIRAC.exit(-1)

    # put and register corsikaTarFile:
        corsikaTarFileDir = os.path.join(corsikaDirPath, particle, 'Log',
                                         runNumSeriesDir)
        corsikaTarFileLFN = os.path.join(corsikaTarFileDir, corsikaTarName)

        DIRAC.gLogger.notice(
            'Put and register corsikaTar File in LFC and DFC:',
            corsikaTarFileLFN)
        ret = dirac.addFile(corsikaTarFileLFN, corsikaTarName,
                            storage_element)  # obsolete

        ####Checking and restablishing catalog coherence #####################
        res = CheckCatalogCoherence(corsikaTarFileLFN)
        if res != DIRAC.S_OK:
            DIRAC.gLogger.error('Job failed: Catalog Coherence problem found')
            jobReport.setApplicationStatus('OutputData Upload Error')
            DIRAC.exit(-1)

        if not ret['OK']:
            DIRAC.gLogger.error('Error during addFile call:', ret['Message'])
            jobReport.setApplicationStatus('OutputData Upload Error')
            DIRAC.exit(-1)
######################################################################

        if newCorsikaRunNumberSeriesDir:
            insertRunFileSeriesMD(corsikaOutFileDir, runNumTrunc)
            insertRunFileSeriesMD(corsikaTarFileDir, runNumTrunc)

###### insert corsika File Level metadata ############################################
        corsikaFileMD = {}
        corsikaFileMD['runNumber'] = int(run_number)
        corsikaFileMD['jobID'] = jobID
        corsikaFileMD['corsikaReturnCode'] = corsikaReturnCode
        corsikaFileMD['nbShowers'] = nbShowers

        result = fcc.setMetadata(corsikaOutFileLFN, corsikaFileMD)
        print "result setMetadata=", result
        if not result['OK']:
            print 'ResultSetMetadata:', result['Message']

        result = fcc.setMetadata(corsikaTarFileLFN, corsikaFileMD)
        print "result setMetadata=", result
        if not result['OK']:
            print 'ResultSetMetadata:', result['Message']

#####  Exit now if only corsika simulation required
    if (mode == 'corsika_standalone'):
        DIRAC.exit()

############ Producing SimTel File
######################Building simtel Directory Metadata #######################

    cfg_dict = {
        "4MSST": 'cta-prod2-4m-dc',
        "SCSST": 'cta-prod2-sc-sst',
        "STD": 'cta-prod2',
        "NSBX3": 'cta-prod2',
        "ASTRI": 'cta-prod2-astri',
        "SCMST": 'cta-prod2-sc3',
        "NORTH": 'cta-prod2n'
    }

    #  if simtelConfig=="6INROW":
    #    all_configs=["4MSST","SCSST","ASTRI","NSBX3","STD","SCMST"]
    #  elif simtelConfig=="5INROW":
    #    all_configs=["4MSST","SCSST","ASTRI","NSBX3","STD"]
    #  elif simtelConfig=="3INROW":
    #    all_configs=["SCSST","STD","SCMST"]
    #  else:
    #    all_configs=[simtelConfig]

    ########## Added as default ###################
    if simtelConfig == "3INROW":
        all_configs = ["SCSST", "STD", "SCMST"]
    else:
        DIRAC.gLogger.error('Not valid simtelConfig: choose 3INROW')
        DIRAC.exit(-1)

    for current_conf in all_configs:
        DIRAC.gLogger.notice('current conf is', current_conf)
        if current_conf == "SCMST":
            current_version = version + '_sc3'
            DIRAC.gLogger.notice('current version is', current_version)
            #      if os.path.isdir('sim_telarray'):
            #        DIRAC.gLogger.notice('Package found in the local area. Removing package...')
            #        cmd = 'rm -R sim_telarray corsika-6990 hessioxxx corsika-run'
            #        if(os.system(cmd)):
            #          DIRAC.exit( -1 )
            #        install_CorsikaSimtelPack(current_version)
            installSoftwareEnviron(CorsikaSimtelPack, workingArea(), 'sim-sc3')
        else:
            current_version = version
            DIRAC.gLogger.notice('current version is', current_version)

########################################################

        global simtelDirPath
        global simtelProdVersion

        simtelProdVersion = current_version + '_simtel'
        simtelDirPath = os.path.join(corsikaParticleDirPath, simtelProdVersion)

        resultCreateSimtelDirMD = createSimtelFileSystAndMD(current_conf)
        if not resultCreateSimtelDirMD['OK']:
            DIRAC.gLogger.error('Failed to create simtelArray Directory MD')
            jobReport.setApplicationStatus(
                'Failed to create simtelArray Directory MD')
            DIRAC.gLogger.error(
                'Metadata coherence problem, no simtelArray File produced')
            DIRAC.exit(-1)
        else:
            DIRAC.gLogger.notice('simtel Directory MD successfully created')

############## check simtel data file LFN exists ########################
        simtelFileName = particle + '_' + str(thetaP) + '_' + str(
            phiP) + '_alt' + str(
                obslev) + '_' + 'run' + run_number + '.simtel.gz'
        simtelDirPath_conf = simtelDirPath + '_' + current_conf
        simtelOutFileDir = os.path.join(simtelDirPath_conf, 'Data',
                                        runNumSeriesDir)
        simtelOutFileLFN = os.path.join(simtelOutFileDir, simtelFileName)

        res = CheckCatalogCoherence(simtelOutFileLFN)
        if res == DIRAC.S_OK:
            DIRAC.gLogger.notice('Current conf already done', current_conf)
            continue

#### execute simtelarray ################

####################################

##   check simtel data/log/histo Output File exist
        cfg = cfg_dict[current_conf]
        cmd = 'mv sim/Data/sim_telarray/' + cfg + '/0.0deg/Data/*' + cfg + '_*.simtel.gz ' + simtelFileName
        if (os.system(cmd)):
            DIRAC.exit(-1)

############################################
        simtelRunNumberSeriesDirExist = fcc.isDirectory(
            simtelOutFileDir)['Value']['Successful'][simtelOutFileDir]
        newSimtelRunFileSeriesDir = (
            simtelRunNumberSeriesDirExist != True
        )  # if new runFileSeries, will need to add new MD

        simtelLogFileName = particle + '_' + str(thetaP) + '_' + str(
            phiP) + '_alt' + str(obslev) + '_' + 'run' + run_number + '.log.gz'
        cmd = 'mv sim/Data/sim_telarray/' + cfg + '/0.0deg/Log/*' + cfg + '_*.log.gz ' + simtelLogFileName
        if (os.system(cmd)):
            DIRAC.exit(-1)
        simtelOutLogFileDir = os.path.join(simtelDirPath_conf, 'Log',
                                           runNumSeriesDir)
        simtelOutLogFileLFN = os.path.join(simtelOutLogFileDir,
                                           simtelLogFileName)

        simtelHistFileName = particle + '_' + str(thetaP) + '_' + str(
            phiP) + '_alt' + str(
                obslev) + '_' + 'run' + run_number + '.hdata.gz'
        cmd = 'mv sim/Data/sim_telarray/' + cfg + '/0.0deg/Histograms/*' + cfg + '_*.hdata.gz ' + simtelHistFileName
        if (os.system(cmd)):
            DIRAC.exit(-1)
        simtelOutHistFileDir = os.path.join(simtelDirPath_conf, 'Histograms',
                                            runNumSeriesDir)
        simtelOutHistFileLFN = os.path.join(simtelOutHistFileDir,
                                            simtelHistFileName)

        ########### quality check on Histo #############################################
        fd = open('check_histo.sh', 'w')
        fd.write("""#! /bin/sh  
nsim=$(list_histograms %s|fgrep 'Histogram 6 '|sed 's/^.*contents: //'| sed 's:/.*$::')
nevents=%d
if [ $nsim -lt $(( $nevents - 20 )) ]; then
echo 'nsim found:' $nsim
echo 'nsim expected:' $nevents
exit 1
else
echo 'nsim found:' $nsim
echo 'nsim expected:' $nevents
fi
""" % (simtelHistFileName, int(nbShowers) * int(cscat)))
        fd.close()

        ret = getSoftwareEnviron(CorsikaSimtelPack)

        if not ret['OK']:
            error = ret['Message']
            DIRAC.gLogger.error(error, CorsikaSimtelPack)
            DIRAC.exit(-1)

        corsikaEnviron = ret['Value']

        os.system('chmod u+x check_histo.sh')
        cmdTuple = ['./check_histo.sh']
        DIRAC.gLogger.notice('Executing command tuple:', cmdTuple)
        ret = systemCall(0, cmdTuple, sendOutput, env=corsikaEnviron)
        checkHistoReturnCode, stdout, stderr = ret['Value']

        if not ret['OK']:
            DIRAC.gLogger.error('Failed to execute check_histo.sh')
            DIRAC.gLogger.error('check_histo.sh status is:',
                                checkHistoReturnCode)
            DIRAC.exit(-1)

        if (checkHistoReturnCode != 0):
            DIRAC.gLogger.error('Failure during check_histo.sh')
            DIRAC.gLogger.error('check_histo.sh status is:',
                                checkHistoReturnCode)
            jobReport.setApplicationStatus('Histo check Failed')
            DIRAC.exit(-1)

########## quality check on Log #############################
        cmd = 'zcat %s | grep Finished.' % simtelLogFileName
        DIRAC.gLogger.notice('Executing system call:', cmd)
        if (os.system(cmd)):
            jobReport.setApplicationStatus('Log check Failed')
            DIRAC.exit(-1)

################################################
        from DIRAC.Core.Utilities import List
        from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
        opsHelper = Operations()

        global seList
        seList = opsHelper.getValue('ProductionOutputs/SimtelProd', [])
        seList = List.randomize(seList)

        DIRAC.gLogger.notice('SeList is:', seList)

        #########  Upload simtel data/log/histo ##############################################

        res = upload_to_seList(simtelOutFileLFN, simtelFileName)

        if res != DIRAC.S_OK:
            DIRAC.gLogger.error('OutputData Upload Error', simtelOutFileLFN)
            jobReport.setApplicationStatus('OutputData Upload Error')
            DIRAC.exit(-1)

        res = CheckCatalogCoherence(simtelOutLogFileLFN)
        if res == DIRAC.S_OK:
            DIRAC.gLogger.notice('Log file already exists. Removing:',
                                 simtelOutLogFileLFN)
            ret = dirac.removeFile(simtelOutLogFileLFN)

        res = upload_to_seList(simtelOutLogFileLFN, simtelLogFileName)

        if res != DIRAC.S_OK:
            DIRAC.gLogger.error('Upload simtel Log Error', simtelOutLogFileLFN)
            DIRAC.gLogger.notice('Removing simtel data file:',
                                 simtelOutFileLFN)
            ret = dirac.removeFile(simtelOutFileLFN)
            jobReport.setApplicationStatus('OutputData Upload Error')
            DIRAC.exit(-1)

        res = CheckCatalogCoherence(simtelOutHistFileLFN)
        if res == DIRAC.S_OK:
            DIRAC.gLogger.notice('Histo file already exists. Removing:',
                                 simtelOutHistFileLFN)
            ret = dirac.removeFile(simtelOutHistFileLFN)

        res = upload_to_seList(simtelOutHistFileLFN, simtelHistFileName)

        if res != DIRAC.S_OK:
            DIRAC.gLogger.error('Upload simtel Histo Error',
                                simtelOutHistFileLFN)
            DIRAC.gLogger.notice('Removing simtel data file:',
                                 simtelOutFileLFN)
            ret = dirac.removeFile(simtelOutFileLFN)
            DIRAC.gLogger.notice('Removing simtel log file:',
                                 simtelOutLogFileLFN)
            ret = dirac.removeFile(simtelOutLogFileLFN)
            jobReport.setApplicationStatus('OutputData Upload Error')
            DIRAC.exit(-1)

#    simtelRunNumberSeriesDirExist = fcc.isDirectory(simtelOutFileDir)['Value']['Successful'][simtelOutFileDir]
#    newSimtelRunFileSeriesDir = (simtelRunNumberSeriesDirExist != True)  # if new runFileSeries, will need to add new MD

        if newSimtelRunFileSeriesDir:
            print 'insertRunFileSeriesMD'
            insertRunFileSeriesMD(simtelOutFileDir, runNumTrunc)
            insertRunFileSeriesMD(simtelOutLogFileDir, runNumTrunc)
            insertRunFileSeriesMD(simtelOutHistFileDir, runNumTrunc)
        else:
            print 'NotinsertRunFileSeriesMD'

###### simtel File level metadata ############################################
        simtelFileMD = {}
        simtelFileMD['runNumber'] = int(run_number)
        simtelFileMD['jobID'] = jobID
        simtelReturnCode = corsikaReturnCode
        simtelFileMD['simtelReturnCode'] = simtelReturnCode

        result = fcc.setMetadata(simtelOutFileLFN, simtelFileMD)
        print "result setMetadata=", result
        if not result['OK']:
            print 'ResultSetMetadata:', result['Message']

        result = fcc.setMetadata(simtelOutLogFileLFN, simtelFileMD)
        print "result setMetadata=", result
        if not result['OK']:
            print 'ResultSetMetadata:', result['Message']

        result = fcc.setMetadata(simtelOutHistFileLFN, simtelFileMD)
        print "result setMetadata=", result
        if not result['OK']:
            print 'ResultSetMetadata:', result['Message']

        if savecorsika == 'True':
            result = fcc.addFileAncestors(
                {simtelOutFileLFN: {
                    'Ancestors': [corsikaOutFileLFN]
                }})
            print 'result addFileAncestor:', result

            result = fcc.addFileAncestors(
                {simtelOutLogFileLFN: {
                    'Ancestors': [corsikaOutFileLFN]
                }})
            print 'result addFileAncestor:', result

            result = fcc.addFileAncestors(
                {simtelOutHistFileLFN: {
                    'Ancestors': [corsikaOutFileLFN]
                }})
            print 'result addFileAncestor:', result

#####  Exit now if only corsika simulation required
        if (mode == 'corsika_simtel'):
            continue

######### run read_cta #######################################

        rcta = Read_CtaApp()
        rcta.setSoftwarePackage(CorsikaSimtelPack)
        rcta.rctaExe = 'read_cta'

        powerlaw_dict = {
            'gamma': '-2.57',
            'gamma_ptsrc': '-2.57',
            'proton': '-2.70',
            'electron': '-3.21'
        }
        dstFileName = particle + '_' + str(thetaP) + '_' + str(
            phiP) + '_alt' + str(
                obslev) + '_' + 'run' + run_number + '.simtel-dst0.gz'
        dstHistoFileName = particle + '_' + str(thetaP) + '_' + str(
            phiP) + '_alt' + str(
                obslev) + '_' + 'run' + run_number + '.hdata-dst0.gz'

        #    rcta.rctaArguments = ['-r', '4', '-u', '--integration-scheme', '4', '--integration-window', '7,3', '--tail-cuts', '6,8', '--min-pix', '2', '--min-amp', '20', '--type', '1,0,0,400', '--tail-cuts', '9,12', '--min-amp', '20', '--type', '2,0,0,100', '--tail-cuts', '8,11', '--min-amp', '19', '--type', '3,0,0,40', '--tail-cuts', '6,9', '--min-amp', '15', '--type', '4,0,0,15', '--tail-cuts', '3.7,5.5', '--min-amp', '8', '--dst-level', '0', '--dst-file', dstFileName, '--histogram-file', dstHistoFileName, '--powerlaw', powerlaw_dict[particle], simtelFileName]

        ## added some options starting from Armazones_2K prod.

        rcta.rctaArguments = [
            '-r', '4', '-u', '--integration-scheme', '4',
            '--integration-window', '7,3', '--tail-cuts', '6,8', '--min-pix',
            '2', '--min-amp', '20', '--type', '1,0,0,400', '--tail-cuts',
            '9,12', '--min-amp', '20', '--type', '2,0,0,100', '--tail-cuts',
            '8,11', '--min-amp', '19', '--type', '3,0,0,40', '--tail-cuts',
            '6,9', '--min-amp', '15', '--type', '4,0,0,15', '--tail-cuts',
            '3.7,5.5', '--min-amp', '8', '--type', '5,0,0,70,5.6',
            '--tail-cuts', '2.4,3.2', '--min-amp', '5.6', '--dst-level', '0',
            '--dst-file', dstFileName, '--histogram-file', dstHistoFileName,
            '--powerlaw', powerlaw_dict[particle], simtelFileName
        ]

        rctaReturnCode = rcta.execute()

        if rctaReturnCode != 0:
            DIRAC.gLogger.error('read_cta Application: Failed')
            jobReport.setApplicationStatus('read_cta Application: Failed')
            DIRAC.exit(-1)

######## run dst quality checks ######################################

        fd = open('check_dst_histo.sh', 'w')
        fd.write("""#! /bin/bash  
dsthistfilename=%s
dstfile=%s
n6="$(list_histograms -h 6 ${dsthistfilename} | grep 'Histogram of type' | sed 's/.*bins, //' | sed 's/ entries.//')" 
n12001="$(list_histograms -h 12001 ${dsthistfilename} | grep 'Histogram of type' | sed 's/.*bins, //' | sed 's/ entries.//')" 
if [ $n6 -ne $n12001 ]; then
echo 'n6 found:' $n6
echo 'n12001 found:' $n12001
exit 1
else
echo 'n6 found:' $n6
echo 'n12001 found:' $n12001
fi

n12002="$(list_histograms -h 12002 ${dsthistfilename} | grep 'Histogram of type' | sed 's/.*bins, //' | sed 's/ entries.//')" 
nev="$(statio ${dstfile} | egrep '^2010' | cut -f2)"
if [ -z "$nev" ]; then nev="0"; fi

if [ $nev -ne $n12002 ]; then
echo 'nev found:' $nev
echo 'n12002 found:' $n12002
exit 1
else
echo 'nev found:' $nev
echo 'n12002 found:' $n12002
fi
""" % (dstHistoFileName, dstFileName))
        fd.close()

        os.system('chmod u+x check_dst_histo.sh')
        cmdTuple = ['./check_dst_histo.sh']
        DIRAC.gLogger.notice('Executing command tuple:', cmdTuple)
        ret = systemCall(0, cmdTuple, sendOutput, env=corsikaEnviron)
        checkHistoReturnCode, stdout, stderr = ret['Value']

        if not ret['OK']:
            DIRAC.gLogger.error('Failed to execute check_dst_histo.sh')
            DIRAC.gLogger.error('check_dst_histo.sh status is:',
                                checkHistoReturnCode)
            DIRAC.exit(-1)

        if (checkHistoReturnCode != 0):
            DIRAC.gLogger.error('Failure during check_dst_histo.sh')
            DIRAC.gLogger.error('check_dst_histo.sh status is:',
                                checkHistoReturnCode)
            jobReport.setApplicationStatus('Histo check Failed')
            DIRAC.exit(-1)

############create MD and upload dst data/histo ##########################################################

        global dstDirPath
        global dstProdVersion

        dstProdVersion = current_version + '_dst'
        dstDirPath = os.path.join(simtelDirPath_conf, dstProdVersion)

        dstOutFileDir = os.path.join(dstDirPath, 'Data', runNumSeriesDir)
        dstOutFileLFN = os.path.join(dstOutFileDir, dstFileName)

        resultCreateDstDirMD = createDstFileSystAndMD()
        if not resultCreateDstDirMD['OK']:
            DIRAC.gLogger.error('Failed to create Dst Directory MD')
            jobReport.setApplicationStatus('Failed to create Dst Directory MD')
            DIRAC.gLogger.error(
                'Metadata coherence problem, no Dst File produced')
            DIRAC.exit(-1)
        else:
            DIRAC.gLogger.notice('Dst Directory MD successfully created')
############################################################

        res = CheckCatalogCoherence(dstOutFileLFN)
        if res == DIRAC.S_OK:
            DIRAC.gLogger.notice('dst file already exists. Removing:',
                                 dstOutFileLFN)
            ret = dirac.removeFile(dstOutFileLFN)

        res = upload_to_seList(dstOutFileLFN, dstFileName)

        if res != DIRAC.S_OK:
            DIRAC.gLogger.error('Upload dst Error', dstOutFileLFN)
            jobReport.setApplicationStatus('OutputData Upload Error')
            DIRAC.exit(-1)

##############################################################
        dstHistoFileDir = os.path.join(dstDirPath, 'Histograms',
                                       runNumSeriesDir)
        dstHistoFileLFN = os.path.join(dstHistoFileDir, dstHistoFileName)

        res = CheckCatalogCoherence(dstHistoFileLFN)
        if res == DIRAC.S_OK:
            DIRAC.gLogger.notice('dst histo file already exists. Removing:',
                                 dstHistoFileLFN)
            ret = dirac.removeFile(dstHistoFileLFN)

        res = upload_to_seList(dstHistoFileLFN, dstHistoFileName)

        if res != DIRAC.S_OK:
            DIRAC.gLogger.error('Upload dst Error', dstHistoFileName)
            jobReport.setApplicationStatus('OutputData Upload Error')
            DIRAC.exit(-1)

########### Insert RunNumSeries MD ##########################

        dstRunNumberSeriesDirExist = fcc.isDirectory(
            dstOutFileDir)['Value']['Successful'][dstOutFileDir]
        newDstRunFileSeriesDir = (
            dstRunNumberSeriesDirExist != True
        )  # if new runFileSeries, will need to add new MD

        if newDstRunFileSeriesDir:
            insertRunFileSeriesMD(dstOutFileDir, runNumTrunc)
            insertRunFileSeriesMD(dstHistoFileDir, runNumTrunc)

####### dst File level metadata ###############################################
        dstFileMD = {}
        dstFileMD['runNumber'] = int(run_number)
        dstFileMD['jobID'] = jobID
        dstFileMD['rctaReturnCode'] = rctaReturnCode

        result = fcc.setMetadata(dstOutFileLFN, dstFileMD)
        print "result setMetadata=", result
        if not result['OK']:
            print 'ResultSetMetadata:', result['Message']

        result = fcc.setMetadata(dstHistoFileLFN, dstFileMD)
        print "result setMetadata=", result
        if not result['OK']:
            print 'ResultSetMetadata:', result['Message']

########## set the ancestors for dst #####################################

        result = fcc.addFileAncestors(
            {dstOutFileLFN: {
                'Ancestors': [simtelOutFileLFN]
            }})
        print 'result addFileAncestor:', result

        result = fcc.addFileAncestors(
            {dstHistoFileLFN: {
                'Ancestors': [simtelOutFileLFN]
            }})
        print 'result addFileAncestor:', result


######################################################

    DIRAC.exit()
예제 #35
0
    jobGroup = switch[1]
  elif switch[0].lower() == "date":
    date = switch[1]

selDate = date
if not date:
  selDate = 'Today'
conditions = { 'Status':status,
               'MinorStatus':minorStatus,
               'ApplicationStatus':appStatus,
               'Owner':owner,
               'JobGroup':jobGroup,
               'Date':selDate }

from DIRAC.Interfaces.API.Dirac import Dirac
dirac = Dirac()
result = dirac.selectJobs( Status = status,
                           MinorStatus = minorStatus,
                           ApplicationStatus = appStatus,
                           Site = site,
                           Owner = owner,
                           JobGroup = jobGroup,
                           Date = date )
if not result['OK']:
  print 'ERROR %s' % result['Message']
  exitCode = 2
else:
  conds = []
  for n, v in conditions.items():
    if v:
      conds.append( '%s = %s' % ( n, v ) )
예제 #36
0
 def submitJob(self):
     dirac = Dirac()
     res = dirac.submitJob(self.__job)
     gLogger.notice('Job submitted: {0}'.format(res["Value"]))
     return res
예제 #37
0
except:
  print 'Failed to get DIRAC username. No proxy set up?'
  sys.exit(1)

jobGroup = diracUsername + '.rmsynthesis.000' 
jdl += 'JobGroup = "' + jobGroup + '";\n'

print 'Will submit this DIRAC JDL:'
print '====='
print jdl
print '====='
print
# Submit the job(s)
print 'Attempting to submit job(s) in JobGroup ' + jobGroup
print
dirac = Dirac()
result = dirac.submitJob(jdl)
print
print '====='
print
print 'Submission Result: ',result
print
print '====='
print

if result['OK']:
  print 'Retrieve output with  dirac-wms-job-get-output --JobGroup ' + jobGroup
else:
  print 'There was a problem submitting your job(s) - see above!!!'
print
예제 #38
0
def HapLookupExample(destination=None):
    from CTADIRAC.Interfaces.API.HapApplicationJob import HapApplicationJob
    from DIRAC.Interfaces.API.Dirac import Dirac

    HapVersion = 'v0.16'
    ####### DoCtaIrf option values for Table prod #######################
    AnalysisType = 'ScaleParam'
    EnergyMethod = 'NoEnergy'
    CutsConfig = 'mva_40pe_Wm_E_MST_PSFA_cta0909'
    RunList = 'dstrun'
    Zenith = '20'
    Offset = '0'
    Array = 'array-E.lis'
    ParticleType = 'gamma'
    #### for Energy prod ##################
    #  AnalysisType = 'Energy'
    #  EnergyMethod = 'Oak'

    infileLFNList = [
        '/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/DST/gamma/dst_CTA_00283000.root',
        '/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/DST/gamma/dst_CTA_00283001.root',
        '/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/DST/gamma/dst_CTA_00283002.root',
        '/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/DST/gamma/dst_CTA_00283003.root',
        '/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/DST/gamma/dst_CTA_00283004.root',
        '/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/DST/gamma/dst_CTA_00283005.root',
        '/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/DST/gamma/dst_CTA_00283006.root',
        '/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/DST/gamma/dst_CTA_00283007.root',
        '/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/DST/gamma/dst_CTA_00283008.root',
        '/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/DST/gamma/dst_CTA_00283009.root'
    ]

    general_opts = ['-V', HapVersion]

    DoCtaIrf_opts = [
        '-A', AnalysisType, '-C', CutsConfig, '-R', RunList, '-Z', Zenith,
        '-O', Offset, '-T', Array, '-M', EnergyMethod, '-P', ParticleType
    ]

    opts = general_opts + DoCtaIrf_opts

    j = HapApplicationJob(opts)

    if destination:
        j.setDestination(destination)

    j.setInputSandbox([
        'LFN:/vo.cta.in2p3.fr/user/a/arrabito/HAP/mini-prod3/conf/v0.1/AnalysisConfig.tar.gz',
        'passphrase'
    ])

    j.setOutputSandbox(['DoCtaIrf.log'])

    j.setName(AnalysisType)

    j.setInputData(infileLFNList)
    j.setInputDataPolicy('Protocol')

    if (AnalysisType == 'ScaleParam'):
        outfile = 'ScaleInfo_' + RunList + '.root'
    elif (AnalysisType == 'Energy'):
        outfile = 'EnergyCalib_' + RunList + '.root'

    j.setOutputData([outfile])

    j.setCPUTime(100000)
    Script.gLogger.info(j._toJDL())
    Dirac().submit(j)
예제 #39
0
class DIRACBackend(GridBackend):
    """Grid backend using the GFAL command line tools `gfal-*`."""

    def __init__(self, **kwargs):
        GridBackend.__init__(self, catalogue_prefix='', **kwargs)

        from DIRAC.Core.Base import Script
        Script.initialize()
        from DIRAC.FrameworkSystem.Client.ProxyManagerClient import ProxyManagerClient
        self.pm = ProxyManagerClient()

        proxy = self.pm.getUserProxiesInfo()
        if not proxy['OK']:
            raise BackendException("Proxy error.")

        from DIRAC.Interfaces.API.Dirac import Dirac
        self.dirac = Dirac()

        from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
        self.fc = FileCatalog()
        from DIRAC.DataManagementSystem.Client.DataManager import DataManager
        self.dm = DataManager()

        self._xattr_cmd = sh.Command('gfal-xattr').bake(_tty_out=False)
        self._replica_checksum_cmd = sh.Command('gfal-sum').bake(_tty_out=False)
        self._bringonline_cmd = sh.Command('gfal-legacy-bringonline').bake(_tty_out=False)
        self._cp_cmd = sh.Command('gfal-copy').bake(_tty_out=False)
        self._ls_se_cmd = sh.Command('gfal-ls').bake(color='never', _tty_out=False)
        self._move_cmd = sh.Command('gfal-rename').bake(_tty_out=False)
        self._mkdir_cmd = sh.Command('gfal-mkdir').bake(_tty_out=False)

        self._replicate_cmd = sh.Command('dirac-dms-replicate-lfn').bake(_tty_out=False)
        self._add_cmd = sh.Command('dirac-dms-add-file').bake(_tty_out=False)

    @staticmethod
    def _check_return_value(ret):
        if not ret['OK']:
            raise BackendException("Failed: %s", ret['Message'])
        for path, error in ret['Value']['Failed'].items():
            if ('No such' in error) or ('Directory does not' in error):
                raise DoesNotExistException("No such file or directory.")
            else:
                raise BackendException(error)

    def _is_dir(self, lurl):
        isdir = self.fc.isDirectory(lurl)
        self._check_return_value(isdir)
        return isdir['Value']['Successful'][lurl]

    def _is_file(self, lurl):
        isfile = self.fc.isFile(lurl)
        self._check_return_value(isfile)
        return isfile['Value']['Successful'][lurl]

    def _get_dir_entry(self, lurl, infodict=None):
        """Take a lurl and return a DirEntry."""
        # If no dctionary with the information is specified, get it from the catalogue
        try:
            md = infodict['MetaData']
        except TypeError:
            md = self.fc.getFileMetadata(lurl)
            if not md['OK']:
                raise BackendException("Failed to list path '%s': %s", lurl, md['Message'])
            for path, error in md['Value']['Failed'].items():
                if 'No such file' in error:
                    # File does not exist, maybe a directory?
                    md = self.fc.getDirectoryMetadata(lurl)
                    for path, error in md['Value']['Failed'].items():
                        raise DoesNotExistException("No such file or directory.")
                else:
                    raise BackendException(md['Value']['Failed'][lurl])
            md = md['Value']['Successful'][lurl]
        return DirEntry(posixpath.basename(lurl), mode=oct(md.get('Mode', -1)), links=md.get('links', -1), gid=md['OwnerGroup'], uid=md['Owner'], size=md.get('Size', -1), modified=str(md.get('ModificationDate', '?')))

    def _iter_directory(self, lurl):
        """Iterate over entries in a directory."""

        ret = self.fc.listDirectory(lurl)
        if not ret['OK']:
            raise BackendException("Failed to list path '%s': %s", lurl, ret['Message'])
        for path, error in ret['Value']['Failed'].items():
            if 'Directory does not' in error:
                # Dir does not exist, maybe a File?
                if self.fc.isFile(lurl):
                    lst = [(lurl, None)]
                    break
                else:
                    raise DoesNotExistException("No such file or Directory.")
            else:
                raise BackendException(ret['Value']['Failed'][lurl])
        else:
            # Sort items by keys, i.e. paths
            lst = sorted(ret['Value']['Successful'][lurl]['Files'].items() + ret['Value']['Successful'][lurl]['SubDirs'].items())

        for item in lst:
            yield item # = path, dict

    def _ls(self, lurl, **kwargs):
        # Translate keyword arguments
        d = kwargs.pop('directory', False)

        if d:
            # Just the requested entry itself
            yield self._get_dir_entry(lurl)
            return

        for path, info in self._iter_directory(lurl):
            yield self._get_dir_entry(path, info)

    def _ls_se(self, surl, **kwargs):
        # Translate keyword arguments
        d = kwargs.pop('directory', False)
        args = []
        if -d:
            args.append('-d')
        args.append('-l')
        args.append(surl)
        try:
            output = self._ls_se_cmd(*args, **kwargs)
        except sh.ErrorReturnCode as e:
            if 'No such file' in e.stderr:
                raise DoesNotExistException("No such file or Directory.")
            else:
                raise BackendException(e.stderr)
        for line in output:
            fields = line.split()
            mode, links, gid, uid, size = fields[:5]
            name = fields[-1]
            modified = ' '.join(fields[5:-1])
            yield DirEntry(name, mode=mode, links=int(links), gid=gid, uid=uid, size=int(size), modified=modified)

    def _replicas(self, lurl, **kwargs):
        # Check the lurl actually exists
        self._ls(lurl, directory=True)

        rep = self.dirac.getReplicas(lurl)
        self._check_return_value(rep)
        rep = rep['Value']['Successful'][lurl]

        return rep.values()

    def _exists(self, surl, **kwargs):
        try:
            ret = self._ls_se_cmd(surl, '-d', '-l', **kwargs).strip()
        except sh.ErrorReturnCode as e:
            if 'No such file' in e.stderr:
                return False
            else:
                if len(e.stderr) == 0:
                    raise BackendException(e.stdout)
                else:
                    raise BackendException(e.stderr)
        else:
            return ret[0] != 'd' # Return `False` for directories

    def _register(self, surl, lurl, verbose=False, **kwargs):
        # Register an existing physical copy in the file catalogue
        se = storage.get_SE(surl).name
        # See if file already exists in DFC
        ret = self.fc.getFileMetadata(lurl)
        try:
            self._check_return_value(ret)
        except DoesNotExistException:
            # Add new file
            size = next(self._ls_se(surl, directory=True)).size
            checksum = self.checksum(surl)
            guid = str(uuid.uuid4()) # The guid does not seem to be important. Make it unique if possible.
            ret = self.dm.registerFile((lurl, surl, size, se, guid, checksum))
        else:
            # Add new replica
            ret = self.dm.registerReplica((lurl, surl, se))

        self._check_return_value(ret)
        if verbose:
            print_("Successfully registered replica %s of %s from %s."%(surl, lurl, se))
        return True

    def _deregister(self, surl, lurl, verbose=False, **kwargs):
        # DIRAC only needs to know the SE name to deregister a replica
        se = storage.get_SE(surl).name
        ret = self.dm.removeReplicaFromCatalog(se, [lurl])
        self._check_return_value(ret)
        if verbose:
            print_("Successfully deregistered replica of %s from %s."%(lurl, se))
        return True

    def _state(self, surl, **kwargs):
        try:
            state = self._xattr_cmd(surl, 'user.status', **kwargs).strip()
        except sh.ErrorReturnCode as e:
            if "No such file" in e.stderr:
                raise DoesNotExistException("No such file or Directory.")
            state = '?'
        except sh.SignalException_SIGSEGV:
            state = '?'
        return state

    def _checksum(self, surl, **kwargs):
        try:
            checksum = self._replica_checksum_cmd(surl, 'ADLER32', **kwargs).split()[1]
        except sh.ErrorReturnCode:
            checksum = '?'
        except sh.SignalException_SIGSEGV:
            checksum = '?'
        except IndexError:
            checksum = '?'
        return checksum

    def _bringonline(self, surl, timeout, verbose=False, **kwargs):
        if verbose:
            out = sys.stdout
        else:
            out = None
        # gfal does not notice when files come online, it seems
        # Just send a single short request, then check regularly

        if verbose:
            out = sys.stdout
        else:
            out = None

        end = time.time() + timeout

        try:
            self._bringonline_cmd('-t', 10, surl, _out=out, **kwargs)
        except sh.ErrorReturnCode as e:
            # The command fails if the file is not online
            # To be expected after 10 seconds
            if "No such file" in e.stderr:
                # Except when the file does not actually exist on the tape storage
                raise DoesNotExistException("No such file or Directory.")

        wait = 5
        while(True):
            if verbose:
                print_("Checking replica state...")
            if self.is_online(surl):
                if verbose:
                    print_("Replica brought online.")
                return True

            time_left = end - time.time()
            if time_left <= 0:
                if verbose:
                    print_("Could not bring replica online.")
                return False

            wait *= 2
            if time_left < wait:
                wait = time_left

            if verbose:
                print_("Timeout remaining: %d s"%(time_left))
                print_("Checking again in: %d s"%(wait))
            time.sleep(wait)

    def _replicate(self, source_surl, destination_surl, lurl, verbose=False, **kwargs):
        if verbose:
            out = sys.stdout
        else:
            out = None

        source = storage.get_SE(source_surl).name
        destination = storage.get_SE(destination_surl).name
        try:
            self._replicate_cmd(lurl, destination, source, _out=out, **kwargs)
        except sh.ErrorReturnCode as e:
            if 'No such file' in e.stderr:
                raise DoesNotExistException("No such file or directory.")
            else:
                if len(e.stderr) == 0:
                    raise BackendException(e.stdout)
                else:
                    raise BackendException(e.stderr)

        return True

    def _get(self, surl, localpath, verbose=False, **kwargs):
        if verbose:
            out = sys.stdout
        else:
            out = None
        try:
            self._cp_cmd('-f', '--checksum', 'ADLER32', surl, localpath, _out=out, **kwargs)
        except sh.ErrorReturnCode as e:
            if 'No such file' in e.stderr:
                raise DoesNotExistException("No such file or directory.")
            else:
                if len(e.stderr) == 0:
                    raise BackendException(e.stdout)
                else:
                    raise BackendException(e.stderr)
        return os.path.isfile(localpath)

    def _put(self, localpath, surl, lurl, verbose=False, **kwargs):
        if verbose:
            out = sys.stdout
        else:
            out = None
        se = storage.get_SE(surl).name

        try:
            self._add_cmd(lurl, localpath, se, _out=out, **kwargs)
        except sh.ErrorReturnCode as e:
            if 'No such file' in e.stderr:
                raise DoesNotExistException("No such file or directory.")
            else:
                if len(e.stderr) == 0:
                    raise BackendException(e.stdout)
                else:
                    raise BackendException(e.stderr)
        return True

    def _remove(self, surl, lurl, last=False, verbose=False, **kwargs):
        se = storage.get_SE(surl).name

        if last:
            # Delete lfn
            if verbose:
                print_("Removing all replicas of %s."%(lurl,))
            ret = self.dm.removeFile([lurl])
        else:
            if verbose:
                print_("Removing replica of %s from %s."%(lurl, se))
            ret = self.dm.removeReplica(se, [lurl])

        if not ret['OK']:
            raise BackendException('Failed: %s'%(ret['Message']))

        for lurl, error in ret['Value']['Failed'].items():
            if 'No such file' in error:
                raise DoesNotExistException("No such file or directory.")
            else:
                raise BackendException(error)

        return True

    def _rmdir(self, lurl, verbose=False):
        """Remove the an empty directory from the catalogue."""
        rep = self.fc.removeDirectory(lurl)
        self._check_return_value(rep)
        return True

    def _move_replica(self, surl, new_surl, verbose=False, **kwargs):
        if verbose:
            out = sys.stdout
        else:
            out = None

        try:
            folder = posixpath.dirname(new_surl)
            self._mkdir_cmd(folder, '-p', _out=out, **kwargs)
            self._move_cmd(surl, new_surl, _out=out, **kwargs)
        except sh.ErrorReturnCode as e:
            if 'No such file' in e.stderr:
                raise DoesNotExistException("No such file or directory.")
            else:
                if len(e.stderr) == 0:
                    raise BackendException(e.stdout)
                else:
                    raise BackendException(e.stderr)
        return True
예제 #40
0
                                     'Usage:',
                                     '  %s [option|cfgfile] ... JobID ...' % Script.scriptName,
                                     'Arguments:',
                                     '  JobID:    DIRAC Job ID or a name of the file with JobID per line' ] ) )

Script.registerSwitch( "D:", "Dir=", "Store the output in this directory" )
Script.registerSwitch( "f:", "File=", "Get output for jobs with IDs from the file" )
Script.registerSwitch( "g:", "JobGroup=", "Get output for jobs in the given group" )

Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()

from DIRAC.Interfaces.API.Dirac  import Dirac, parseArguments
from DIRAC.Core.Utilities.Time import toString, date, day

dirac = Dirac()
exitCode = 0
errorList = []

outputDir = None
group = None
jobs = []
for sw, value in Script.getUnprocessedSwitches():
  if sw in ( 'D', 'Dir' ):
    outputDir = value
  elif sw.lower() in ( 'f', 'file' ):
    if os.path.exists( value ):
      jFile = open( value )
      jobs += jFile.read().split()
      jFile.close()
  elif sw.lower() in ( 'g', 'jobgroup' ):
예제 #41
0
Script.setUsageMessage('\n'.join([
    __doc__.split('\n')[1], '\nUsage:\n',
    '  %s [option|cfgfile] ... JobID ...' % Script.scriptName,
    '\nArguments:\n', '  JobID:    DIRAC Job ID'
]))

Script.registerSwitch("f:", "File=",
                      "Get status for jobs with IDs from the file")
Script.registerSwitch("g:", "JobGroup=",
                      "Get status for jobs in the given group")

Script.parseCommandLine(ignoreErrors=True)
args = Script.getPositionalArgs()

from DIRAC.Interfaces.API.Dirac import Dirac
dirac = Dirac()
exitCode = 0

jobs = []
for key, value in Script.getUnprocessedSwitches():
    if key.lower() in ('f', 'file'):
        if os.path.exists(value):
            jFile = open(value)
            jobs += jFile.read().split()
            jFile.close()
    elif key.lower() in ('g', 'jobgroup'):
        jobDate = toString(date() - 30 * day)
        # Choose jobs no more than 30 days old
        result = dirac.selectJobs(jobGroup=value, date=jobDate)
        if not result['OK']:
            print "Error:", result['Message']
예제 #42
0
def main():
    site = "BOINC.World.org"
    status = ["Running"]
    minorStatus = None
    workerNodes = None
    since = None
    date = "today"
    full = False
    until = None
    batchIDs = None
    Script.registerSwitch("", "Site=", "   Select site (default: %s)" % site)
    Script.registerSwitch("", "Status=",
                          "   Select status (default: %s)" % status)
    Script.registerSwitch("", "MinorStatus=", "   Select minor status")
    Script.registerSwitch("", "WorkerNode=", "  Select WN")
    Script.registerSwitch("", "BatchID=", "  Select batch jobID")
    Script.registerSwitch(
        "", "Since=",
        "   Date since when to select jobs, or number of days (default: today)"
    )
    Script.registerSwitch("", "Date=",
                          "   Specify the date (check for a full day)")
    Script.registerSwitch(
        "", "Full",
        "   Printout full list of job (default: False except if --WorkerNode)")

    Script.parseCommandLine()
    from DIRAC import gLogger
    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.WorkloadManagementSystem.Client.JobMonitoringClient import JobMonitoringClient

    switches = Script.getUnprocessedSwitches()
    for switch in switches:
        if switch[0] == "Site":
            site = switch[1]
        elif switch[0] == "MinorStatus":
            minorStatus = switch[1]
        elif switch[0] == "Status":
            if switch[1].lower() == "all":
                status = [None]
            else:
                status = switch[1].split(",")
        elif switch[0] == "WorkerNode":
            workerNodes = switch[1].split(",")
        elif switch[0] == "BatchID":
            try:
                batchIDs = [int(id) for id in switch[1].split(",")]
            except Exception:
                gLogger.error("Invalid jobID", switch[1])
                DIRAC.exit(1)
        elif switch[0] == "Full":
            full = True
        elif switch[0] == "Date":
            since = switch[1].split()[0]
            until = str(
                datetime.datetime.strptime(since, "%Y-%m-%d") +
                datetime.timedelta(days=1)).split()[0]
        elif switch[0] == "Since":
            date = switch[1].lower()
            if date == "today":
                since = None
            elif date == "yesterday":
                since = 1
            elif date == "ever":
                since = 2 * 365
            elif date.isdigit():
                since = int(date)
                date += " days"
            else:
                since = date
            if isinstance(since, int):
                since = str(datetime.datetime.now() -
                            datetime.timedelta(days=since)).split()[0]

    if workerNodes or batchIDs:
        # status = [None]
        full = True

    monitoring = JobMonitoringClient()
    dirac = Dirac()

    # Get jobs according to selection
    jobs = set()
    for stat in status:
        res = dirac.selectJobs(site=site,
                               date=since,
                               status=stat,
                               minorStatus=minorStatus)
        if not res["OK"]:
            gLogger.error("Error selecting jobs", res["Message"])
            DIRAC.exit(1)
        allJobs = set(int(job) for job in res["Value"])
        if until:
            res = dirac.selectJobs(site=site, date=until, status=stat)
            if not res["OK"]:
                gLogger.error("Error selecting jobs", res["Message"])
                DIRAC.exit(1)
            allJobs -= set(int(job) for job in res["Value"])
        jobs.update(allJobs)
    if not jobs:
        gLogger.always("No jobs found...")
        DIRAC.exit(0)
    # res = monitoring.getJobsSummary( jobs )
    # print eval( res['Value'] )[jobs[0]]

    allJobs = set()
    result = {}
    wnJobs = {}
    gLogger.always("%d jobs found" % len(jobs))
    # Get host name
    for job in jobs:
        res = monitoring.getJobParameter(job, "HostName")
        node = res.get("Value", {}).get("HostName", "Unknown")
        res = monitoring.getJobParameter(job, "LocalJobID")
        batchID = res.get("Value", {}).get("LocalJobID", "Unknown")
        if workerNodes:
            if not [wn for wn in workerNodes if node.startswith(wn)]:
                continue
            allJobs.add(job)
        if batchIDs:
            if batchID not in batchIDs:
                continue
            allJobs.add(job)
        if full or status == [None]:
            allJobs.add(job)
        result.setdefault(job, {})["Status"] = status
        result[job]["Node"] = node
        result[job]["LocalJobID"] = batchID
        wnJobs[node] = wnJobs.setdefault(node, 0) + 1

    # If necessary get jobs' status
    statusCounters = {}
    if allJobs:
        allJobs = sorted(allJobs, reverse=True)
        res = monitoring.getJobsStates(allJobs)
        if not res["OK"]:
            gLogger.error("Error getting job parameter", res["Message"])
        else:
            jobStates = res["Value"]
            for job in allJobs:
                stat = (
                    jobStates.get(job, {}).get("Status", "Unknown") + "; " +
                    jobStates.get(job, {}).get("MinorStatus", "Unknown") +
                    "; " +
                    jobStates.get(job, {}).get("ApplicationStatus", "Unknown"))
                result[job]["Status"] = stat
                statusCounters[stat] = statusCounters.setdefault(stat, 0) + 1
    elif not workerNodes and not batchIDs:
        allJobs = sorted(jobs, reverse=True)

    # Print out result
    if workerNodes or batchIDs:
        gLogger.always("Found %d jobs at %s, WN %s (since %s):" %
                       (len(allJobs), site, workerNodes, date))
        if allJobs:
            gLogger.always("List of jobs:",
                           ",".join([str(job) for job in allJobs]))
    else:
        if status == [None]:
            gLogger.always("Found %d jobs at %s (since %s):" %
                           (len(allJobs), site, date))
            for stat in sorted(statusCounters):
                gLogger.always("%d jobs %s" % (statusCounters[stat], stat))
        else:
            gLogger.always("Found %d jobs %s at %s (since %s):" %
                           (len(allJobs), status, site, date))
        gLogger.always(
            "List of WNs:",
            ",".join([
                "%s (%d)" % (node, wnJobs[node]) for node in sorted(
                    wnJobs,
                    key=cmp_to_key(lambda n1, n2: (wnJobs[n2] - wnJobs[n1])))
            ]),
        )
    if full:
        if workerNodes or batchIDs:
            nodeJobs = {}
            for job in allJobs:
                status = result[job]["Status"]
                node = result[job]["Node"].split(".")[0]
                jobID = result[job].get("LocalJobID")
                nodeJobs.setdefault(node, []).append((jobID, job, status))
            if not workerNodes:
                workerNodes = sorted(nodeJobs)
            for node in workerNodes:
                for job in nodeJobs.get(node.split(".")[0], []):
                    gLogger.always("%s " % node + "(%s): %s - %s" % job)
        else:
            for job in allJobs:
                status = result[job]["Status"]
                node = result[job]["Node"]
                jobID = result[job].get("LocalJobID")
                gLogger.always("%s (%s): %s - %s" % (node, jobID, job, status))
예제 #43
0
import DIRAC
from DIRAC.Core.Base import Script

Script.setUsageMessage('\n'.join([
    __doc__.split('\n')[1], 'Usage:',
    '  %s [option|cfgfile] ... JobID ...' % Script.scriptName, 'Arguments:',
    '  JobID:    DIRAC Job ID'
]))
Script.parseCommandLine(ignoreErrors=True)
args = Script.getPositionalArgs()

if len(args) < 1:
    Script.showHelp()

from DIRAC.Interfaces.API.Dirac import Dirac
dirac = Dirac()
exitCode = 0
errorList = []

for job in args:

    result = dirac.peek(job, printout=True)
    if not result['OK']:
        errorList.append((job, result['Message']))
        exitCode = 2

for error in errorList:
    print "ERROR %s: %s" % error

DIRAC.exit(exitCode)
    result = gProxyManager.downloadProxyToFile(shifter, shifter_group, requiredTimeLeft=10000)
    print result
    if not result["OK"]:
        sys.stderr.write(result["Message"] + "\n")
        sys.stderr.write("No valid proxy found.\n")
        exit(1)

    proxy = result["Value"]
    os.environ["X509_USER_PROXY"] = proxy
    print ("*INFO* using proxy %s" % proxy)

    print "*********************************************************************************************************"
    print "Execution at : '" + time.strftime("%d/%m/%y %H:%M", time.localtime()) + "'"

    try:
        d = Dirac()
    except AttributeError:
        sys.stderr.write(time.strftime("%d/%m/%y %H:%M", time.localtime()) + " => Error loading Dirac monitor\n")
        raise Exception("Error loading Dirac monitor")

    w = RPCClient("WorkloadManagement/JobMonitoring")

    delTime = str(Time.dateTime() - delay_job_handled * Time.day)

    jobid_handled = []

    file_jobhandled = open(filename_jobhandled, "r")
    for line in file_jobhandled:
        try:
            jobid_handled.append(str(int(line)))
        except ValueError:
예제 #45
0
def main():
    global extendedPrint
    Script.registerSwitch('e', 'extended', 'Get extended printout',
                          setExtendedPrint)
    Script.parseCommandLine(ignoreErrors=True)

    from DIRAC import exit as DIRACExit
    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin

    args = Script.getPositionalArgs()

    if len(args) < 1:
        Script.showHelp()

    diracAdmin = DiracAdmin()
    dirac = Dirac()
    exitCode = 0
    errorList = []

    for gridID in args:
        result = diracAdmin.getPilotInfo(gridID)
        if not result['OK']:
            errorList.append((gridID, result['Message']))
            exitCode = 2
        else:
            res = result['Value'][gridID]
            if extendedPrint:
                tab = ''
                for key in [
                        'PilotJobReference',
                        'Status',
                        'OwnerDN',
                        'OwnerGroup',
                        'SubmissionTime',
                        'DestinationSite',
                        'GridSite',
                ]:
                    if key in res:
                        diracAdmin.log.notice('%s%s: %s' %
                                              (tab, key, res[key]))
                        if not tab:
                            tab = '  '
                diracAdmin.log.notice('')
                for jobID in res['Jobs']:
                    tab = '  '
                    result = dirac.getJobAttributes(int(jobID))
                    if not result['OK']:
                        errorList.append((gridID, result['Message']))
                        exitCode = 2
                    else:
                        job = result['Value']
                        diracAdmin.log.notice('%sJob ID: %s' % (tab, jobID))
                        tab += '  '
                        for key in [
                                'OwnerDN', 'OwnerGroup', 'JobName', 'Status',
                                'StartExecTime', 'LastUpdateTime',
                                'EndExecTime'
                        ]:
                            if key in job:
                                diracAdmin.log.notice('%s%s:' % (tab, key),
                                                      job[key])
                diracAdmin.log.notice('')
            else:
                print(diracAdmin.pPrint.pformat({gridID: res}))

    for error in errorList:
        print("ERROR %s: %s" % error)

    DIRACExit(exitCode)
from DIRAC.Core.Base import Script

Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
                                     'Usage:',
                                     '  %s [option|cfgfile] ... JobID ...' % Script.scriptName,
                                     'Arguments:',
                                     '  JobID:    DIRAC Job ID' ] ) )
Script.registerSwitch( "D:", "Dir=", "Store the output in this directory" )
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()

if len( args ) < 1:
  Script.showHelp()

from DIRAC.Interfaces.API.Dirac                              import Dirac, parseArguments
dirac = Dirac()
exitCode = 0
errorList = []

outputDir = ''
for sw, v in Script.getUnprocessedSwitches():
  if sw in ( 'D', 'Dir' ):
    outputDir = v

for job in parseArguments( args ):

  result = dirac.getJobOutputData( job, destinationDir = outputDir )
  if result['OK']:
    print 'Job %s output data retrieved' % ( job )
  else:
    errorList.append( ( job, result['Message'] ) )
예제 #47
0
        "../tools/StorageElement.py"
    ]
# these files are created by the job
outputSandbox =\
    [
        stdout,
        stderr,
        logfile
    ]

# the executible here '' is later set, so don't confuse users later on
diracJob = Job('', stdout, stderr)

# give a descriptive name
diracJob.setName('ND280Custom')

# set the program/executable, arguments, logFile, ...
diracJob.setExecutable(exe, arguments=args, logFile=logfile)

# set the job length
diracJob.setCPUTime(3600)

diracJob.setExecutionEnv(environmentDict)
diracJob.setInputSandbox(inputSandbox)
diracJob.setOutputSandbox(outputSandbox)

print 'job being submitted...'
dirac = Dirac()
result = dirac.submit(diracJob)
print 'Submission Result: ', result
예제 #48
0
args = Script.getPositionalArgs()

if len( args ) < 2 or len( args ) > 4:
  Script.showHelp()

lfn = args[0]
seName = args[1]
sourceSE = ''
localCache = ''
if len( args ) > 2:
  sourceSE = args[2]
if len( args ) == 4:
  localCache = args[3]

from DIRAC.Interfaces.API.Dirac                       import Dirac
dirac = Dirac()
exitCode = 0

try:
  f = open( lfn, 'r' )
  lfns = f.read().splitlines()
  f.close()
except:
  lfns = [lfn]

finalResult = {"Failed":[], "Successful":[]}
for lfn in lfns:
  result = dirac.replicateFile( lfn, seName, sourceSE, localCache, printOutput = True )
  if not result['OK']:
    finalResult["Failed"].append( lfn )
    print 'ERROR %s' % ( result['Message'] )
예제 #49
0
""" simple hello world job
"""

from __future__ import print_function
from DIRAC.Interfaces.API.Job import Job
from DIRAC.Interfaces.API.Dirac import Dirac
from DIRAC.DataManagementSystem.Utilities.DMSHelpers import DMSHelpers

j = Job()

j.setName("helloWorld-test")

j.setExecutable("exe-script.py", "", "Executable.log")

# <-- user settings
j.setCPUTime(172800)
tier1s = DMSHelpers().getTiers(tier=(0, 1))
j.setBannedSites(tier1s)
# user settings -->

# print j.workflow

# submit the job to dirac
result = Dirac().submitJob(j)
print(result)
예제 #50
0
    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.FrameworkSystem.Client.ProxyManagerClient import gProxyManager
    from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
    
    # get necessary credentials
    op = Operations("glast.org")
    shifter = op.getValue("Pipeline/Shifter","/DC=org/DC=doegrids/OU=People/CN=Stephan Zimmer 799865")
    shifter_group = op.getValue("Pipeline/ShifterGroup","glast_user")
    result = gProxyManager.downloadProxyToFile(shifter,shifter_group,requiredTimeLeft=10000)
    if not result['OK']:
        gLogger.error("ERROR: No valid proxy found; ",result['Message'])
        dexit(1)
    proxy = result[ 'Value' ]
    environ['X509_USER_PROXY'] = proxy
    gLogger.info("using proxy %s"%proxy)
    dirac = Dirac(True,"myRepo.rep")
    exitCode = 0
    errorList = []
    if len( args ) < 1:
        Script.showHelp()
    for job in args:
        result = dirac.kill( job )
        if result['OK']:
            gLogger.info('Killed job %s' % ( job ))
        else:
            errorList.append( ( job, result['Message'] ) )
            exitCode = 2
    for error in errorList:
        gLogger.error("ERROR %s: %s" % error)
    dexit.exit( exitCode )
예제 #51
0
    '  %s [option|cfgfile] ... JobID ...' % Script.scriptName, 'Arguments:',
    '  JobID:    DIRAC Job ID'
]))
Script.parseCommandLine(ignoreErrors=True)
args = Script.getPositionalArgs()

if len(args) < 1:
    Script.showHelp()
parameters = None
for switch in Script.getUnprocessedSwitches():
    if switch[0] == 'Parameters':
        parameters = switch[1].split(',')

from DIRAC.Interfaces.API.Dirac import Dirac, parseArguments

dirac = Dirac()

results = {'OK': True, 'Value': {'Successful': {}, 'Failed': {}}}
success = results['Value']['Successful']
failed = results['Value']['Failed']
for job in parseArguments(args):
    jobStr = 'Job %s' % job
    result = dirac.getJobParameters(job, printOutput=False)
    if not result['OK']:
        failed.update({jobStr: result['Message']})
    elif not result['Value']:
        failed.update({jobStr: 'Job not found'})
    elif parameters:
        params = dict((key, val) for key, val in result['Value'].iteritems()
                      if key in parameters)
        success.update({jobStr: params})
예제 #52
0
    jobGroup = switch[1]
  elif switch[0].lower() == "date":
    date = switch[1]

selDate = date
if not date:
  selDate = 'Today'
conditions = { 'Status':status,
               'MinorStatus':minorStatus,
               'ApplicationStatus':appStatus,
               'Owner':owner,
               'JobGroup':jobGroup,
               'Date':selDate }

from DIRAC.Interfaces.API.Dirac import Dirac
dirac = Dirac()
result = dirac.selectJobs( status = status,
                           minorStatus = minorStatus,
                           applicationStatus = appStatus,
                           site = site,
                           owner = owner,
                           jobGroup = jobGroup,
                           date = date )
if not result['OK']:
  print 'ERROR %s' % result['Message']
  exitCode = 2
else:
  conds = []
  for n, v in conditions.items():
    if v:
      conds.append( '%s = %s' % ( n, v ) )
예제 #53
0
    def __init__(self, *args, **kwargs):
        AgentModule.__init__(self, *args, **kwargs)
        self.name = "DataRecoveryAgent"
        self.enabled = False
        self.getJobInfoFromJDLOnly = False

        self.__getCSOptions()

        # This needs to be both otherwise we cannot account for all cases
        self.jobStatus = [JobStatus.DONE, JobStatus.FAILED]

        self.jobMon = JobMonitoringClient()
        self.fcClient = FileCatalogClient()
        self.tClient = TransformationClient()
        self.reqClient = ReqClient()
        self.diracAPI = Dirac()
        self.inputFilesProcessed = set()
        self.todo = {
            "NoInputFiles": [
                dict(
                    Message="NoInputFiles: OutputExists: Job 'Done'",
                    ShortMessage="NoInputFiles: job 'Done' ",
                    Counter=0,
                    Check=lambda job: job.allFilesExist() and job.status == JobStatus.FAILED,
                    Actions=lambda job, tInfo: [job.setJobDone(tInfo)],
                ),
                dict(
                    Message="NoInputFiles: OutputMissing: Job 'Failed'",
                    ShortMessage="NoInputFiles: job 'Failed' ",
                    Counter=0,
                    Check=lambda job: job.allFilesMissing() and job.status == JobStatus.DONE,
                    Actions=lambda job, tInfo: [job.setJobFailed(tInfo)],
                ),
            ],
            "InputFiles": [  # must always be first!
                dict(
                    Message="One of many Successful: clean others",
                    ShortMessage="Other Tasks --> Keep",
                    Counter=0,
                    Check=lambda job: job.allFilesExist()
                    and job.otherTasks
                    and not set(job.inputFiles).issubset(self.inputFilesProcessed),
                    Actions=lambda job, tInfo: [
                        self.inputFilesProcessed.update(job.inputFiles),
                        job.setJobDone(tInfo),
                        job.setInputProcessed(tInfo),
                    ],
                ),
                dict(
                    Message="Other Task processed Input, no Output: Fail",
                    ShortMessage="Other Tasks --> Fail",
                    Counter=0,
                    Check=lambda job: set(job.inputFiles).issubset(self.inputFilesProcessed)
                    and job.allFilesMissing()
                    and job.status != JobStatus.FAILED,
                    Actions=lambda job, tInfo: [job.setJobFailed(tInfo)],
                ),
                dict(
                    Message="Other Task processed Input: Fail and clean",
                    ShortMessage="Other Tasks --> Cleanup",
                    Counter=0,
                    Check=lambda job: set(job.inputFiles).issubset(self.inputFilesProcessed)
                    and not job.allFilesMissing(),
                    Actions=lambda job, tInfo: [job.setJobFailed(tInfo), job.cleanOutputs(tInfo)],
                ),
                dict(
                    Message="InputFile(s) missing: mark job 'Failed', mark input 'Deleted', clean",
                    ShortMessage="Input Missing --> Job 'Failed, Input 'Deleted', Cleanup",
                    Counter=0,
                    Check=lambda job: job.inputFiles and job.allInputFilesMissing() and not job.allTransFilesDeleted(),
                    Actions=lambda job, tInfo: [
                        job.cleanOutputs(tInfo),
                        job.setJobFailed(tInfo),
                        job.setInputDeleted(tInfo),
                    ],
                ),
                dict(
                    Message="InputFile(s) Deleted, output Exists: mark job 'Failed', clean",
                    ShortMessage="Input Deleted --> Job 'Failed, Cleanup",
                    Counter=0,
                    Check=lambda job: job.inputFiles
                    and job.allInputFilesMissing()
                    and job.allTransFilesDeleted()
                    and not job.allFilesMissing(),
                    Actions=lambda job, tInfo: [job.cleanOutputs(tInfo), job.setJobFailed(tInfo)],
                ),
                # All Output Exists
                dict(
                    Message="Output Exists, job Failed, input not Processed --> Job Done, Input Processed",
                    ShortMessage="Output Exists --> Job Done, Input Processed",
                    Counter=0,
                    Check=lambda job: job.allFilesExist()
                    and not job.otherTasks
                    and job.status == JobStatus.FAILED
                    and not job.allFilesProcessed()
                    and job.allInputFilesExist(),
                    Actions=lambda job, tInfo: [job.setJobDone(tInfo), job.setInputProcessed(tInfo)],
                ),
                dict(
                    Message="Output Exists, job Failed, input Processed --> Job Done",
                    ShortMessage="Output Exists --> Job Done",
                    Counter=0,
                    Check=lambda job: job.allFilesExist()
                    and not job.otherTasks
                    and job.status == JobStatus.FAILED
                    and job.allFilesProcessed()
                    and job.allInputFilesExist(),
                    Actions=lambda job, tInfo: [job.setJobDone(tInfo)],
                ),
                dict(
                    Message="Output Exists, job Done, input not Processed --> Input Processed",
                    ShortMessage="Output Exists --> Input Processed",
                    Counter=0,
                    Check=lambda job: job.allFilesExist()
                    and not job.otherTasks
                    and job.status == JobStatus.DONE
                    and not job.allFilesProcessed()
                    and job.allInputFilesExist(),
                    Actions=lambda job, tInfo: [job.setInputProcessed(tInfo)],
                ),
                # outputmissing
                dict(
                    Message="Output Missing, job Failed, input Assigned, MaxError --> Input MaxReset",
                    ShortMessage="Max ErrorCount --> Input MaxReset",
                    Counter=0,
                    Check=lambda job: job.allFilesMissing()
                    and not job.otherTasks
                    and job.status == JobStatus.FAILED
                    and job.allFilesAssigned()
                    and not set(job.inputFiles).issubset(self.inputFilesProcessed)
                    and job.allInputFilesExist()
                    and job.checkErrorCount(),
                    Actions=lambda job, tInfo: [job.setInputMaxReset(tInfo)],
                ),
                dict(
                    Message="Output Missing, job Failed, input Assigned --> Input Unused",
                    ShortMessage="Output Missing --> Input Unused",
                    Counter=0,
                    Check=lambda job: job.allFilesMissing()
                    and not job.otherTasks
                    and job.status == JobStatus.FAILED
                    and job.allFilesAssigned()
                    and not set(job.inputFiles).issubset(self.inputFilesProcessed)
                    and job.allInputFilesExist(),
                    Actions=lambda job, tInfo: [job.setInputUnused(tInfo)],
                ),
                dict(
                    Message="Output Missing, job Done, input Assigned --> Job Failed, Input Unused",
                    ShortMessage="Output Missing --> Job Failed, Input Unused",
                    Counter=0,
                    Check=lambda job: job.allFilesMissing()
                    and not job.otherTasks
                    and job.status == JobStatus.DONE
                    and job.allFilesAssigned()
                    and not set(job.inputFiles).issubset(self.inputFilesProcessed)
                    and job.allInputFilesExist(),
                    Actions=lambda job, tInfo: [job.setInputUnused(tInfo), job.setJobFailed(tInfo)],
                ),
                # some files missing, needing cleanup. Only checking for
                # assigned, because processed could mean an earlier job was
                # succesful and this one is just the duplicate that needed
                # to be removed! But we check for other tasks earlier, so
                # this should not happen
                dict(
                    Message="Some missing, job Failed, input Assigned --> cleanup, Input 'Unused'",
                    ShortMessage="Output Missing --> Cleanup, Input Unused",
                    Counter=0,
                    Check=lambda job: job.someFilesMissing()
                    and not job.otherTasks
                    and job.status == JobStatus.FAILED
                    and job.allFilesAssigned()
                    and job.allInputFilesExist(),
                    Actions=lambda job, tInfo: [job.cleanOutputs(tInfo), job.setInputUnused(tInfo)],
                ),
                dict(
                    Message="Some missing, job Done, input Assigned --> cleanup, job Failed, Input 'Unused'",
                    ShortMessage="Output Missing --> Cleanup, Job Failed, Input Unused",
                    Counter=0,
                    Check=lambda job: job.someFilesMissing()
                    and not job.otherTasks
                    and job.status == JobStatus.DONE
                    and job.allFilesAssigned()
                    and job.allInputFilesExist(),
                    Actions=lambda job, tInfo: [
                        job.cleanOutputs(tInfo),
                        job.setInputUnused(tInfo),
                        job.setJobFailed(tInfo),
                    ],
                ),
                dict(
                    Message="Some missing, job Done --> job Failed",
                    ShortMessage="Output Missing, Done --> Job Failed",
                    Counter=0,
                    Check=lambda job: not job.allFilesExist() and job.status == "Done",
                    Actions=lambda job, tInfo: [job.setJobFailed(tInfo)],
                ),
                dict(
                    Message="Something Strange",
                    ShortMessage="Strange",
                    Counter=0,
                    Check=lambda job: job.status not in (JobStatus.FAILED, JobStatus.DONE),
                    Actions=lambda job, tInfo: [],
                ),
                # should always be the last one!
                dict(
                    Message="Failed Hard",
                    ShortMessage="Failed Hard",
                    Counter=0,
                    Check=lambda job: False,  # never
                    Actions=lambda job, tInfo: [],
                ),
            ],
        }
        self.jobCache = defaultdict(lambda: (0, 0))
        # Notification options
        self.notesToSend = ""
        self.subject = "DataRecoveryAgent"
        self.startTime = time.time()
import os

Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
                                     'Usage:',
                                     '  %s [option|cfgfile] ... JobID ...' % Script.scriptName,
                                     'Arguments:',
                                     '  JobID:    DIRAC Job ID' ] ) )
Script.registerSwitch( "D:", "Dir=", "Store the output in this directory" )
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()

if len( args ) < 1:
  Script.showHelp()

from DIRAC.Interfaces.API.Dirac                              import Dirac
dirac = Dirac()
exitCode = 0
errorList = []

outputDir = None
for sw, v in Script.getUnprocessedSwitches():
  if sw in ( 'D', 'Dir' ):
    outputDir = v

for job in args:

  result = dirac.getInputSandbox( job, outputDir = outputDir )
  if result['OK']:
    if os.path.exists( 'InputSandbox%s' % job ):
      print 'Job input sandbox retrieved in InputSandbox%s/' % ( job )
  else:
예제 #55
0
Script.registerSwitch("f:", "File=",
                      "Get output for jobs with IDs from the file")
Script.registerSwitch("g:", "JobGroup=",
                      "Get output for jobs in the given group")

Script.parseCommandLine(ignoreErrors=True)
args = Script.getPositionalArgs()

import os.path

if __name__ == "__main__":

    from DIRAC.Interfaces.API.Dirac import Dirac, parseArguments
    from DIRAC.Core.Utilities.Time import toString, date, day
    dirac = Dirac()

    jobs = []
    for sw, value in Script.getUnprocessedSwitches():
        if sw.lower() in ('f', 'file'):
            if os.path.exists(value):
                jFile = open(value)
                jobs += jFile.read().split()
                jFile.close()
        elif sw.lower() in ('g', 'jobgroup'):
            group = value
            jobDate = toString(date() - 30 * day)
            result = dirac.selectJobs(jobGroup=value, date=jobDate)
            if not result['OK']:
                if not "No jobs selected" in result['Message']:
                    print "Error:", result['Message']
                                     'Usage:',
                                     '  %s [option|cfgfile] ... LFN SE' % Script.scriptName,
                                     'Arguments:',
                                     '  LFN:      Logical File Name or file containing LFNs',
                                     '  SE:       Valid DIRAC SE' ] ) )
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()

if len( args ) < 2:
  Script.showHelp()

if len( args ) > 2:
  print 'Only one LFN SE pair will be considered'

from DIRAC.Interfaces.API.Dirac                       import Dirac
dirac = Dirac()
exitCode = 0

lfn = args[0]
seName = args[1]

try:
  f = open( lfn, 'r' )
  lfns = f.read().splitlines()
  f.close()
except:
  lfns = [lfn]

for lfn in lfns:
  result = dirac.removeReplica( lfn, seName, printOutput = True )
  if not result['OK']:
    elif switch[0].lower() == "owner":
        owner = switch[1]
    elif switch[0].lower() == "jobgroup":
        jobGroup = switch[1]
    elif switch[0].lower() == "date":
        date = switch[1]
    elif switch[0].lower() == "file":
        filename = switch[1]

selDate = date
if not date:
    selDate = 'Today'

from DIRAC.Interfaces.API.Dirac import Dirac

dirac = Dirac()
exitCode = 0
errorList = []
resultDict = {}

result = dirac.selectJobs(status=status,
                          minorStatus=minorStatus,
                          applicationStatus=appStatus,
                          site=site,
                          owner=owner,
                          jobGroup=jobGroup,
                          date=date)
if result['OK']:
    jobs = result['Value']
else:
    print "Error in selectJob", result['Message']