def insertApplicationToCS(self, name, csParameter):
    """add given application found via CVMFS to the CS"""

    pars = dict(self.parameter)
    pars['name'] = name

    gLogger.notice("%(name)s: Adding version %(version)s to the CS" % pars)

    existingVersions = gConfig.getSections("%(softSec)s/%(platform)s/%(name)s" % pars, [])
    if not existingVersions['OK']:
      gLogger.error("Could not find all versions available in CS: %s" % existingVersions['Message'])
      dexit(255)
    if pars['version'] in existingVersions['Value']:
      gLogger.always('Application %s %s for %s already in CS, nothing to do' % (name.lower(),
                                                                                pars['version'],
                                                                                pars['platform']))
      return S_OK()

    csPath = self.softSec + ("/%(platform)s/%(name)s/%(version)s/" % pars)
    for par, val in csParameter.iteritems():
      gLogger.notice("Add: %s = %s" %(csPath+par, val))
      result = self.csAPI.setOption(csPath+par, val)
      if result['OK']:
        self.modifiedCS = True
      else:
        gLogger.error("Failure to add to CS", result['Message'])
        return S_ERROR("")

    return S_OK()
def _runFSTAgent():
  """ read commands line params and run FST agent for a given transformation ID """
  params = _Params()
  params.registerSwitches()
  Script.parseCommandLine()
  if not params.checkSettings()['OK']:
    Script.showHelp()
    dexit(1)

  from ILCDIRAC.ILCTransformationSystem.Agent.FileStatusTransformationAgent import FileStatusTransformationAgent
  fstAgent = FileStatusTransformationAgent('ILCTransformation/FileStatusTransformationAgent',
                                           'ILCTransformation/FileStatusTransformationAgent',
                                           'dirac-ilc-filestatus-transformation')
  fstAgent.log = gLogger
  fstAgent.enabled = params.enabled

  res = fstAgent.getTransformations(transID=params.transID)
  if not res['OK']:
    dexit(1)

  if not res['Value']:
    print("Transformation Not Found")
    dexit(1)

  trans = res['Value'][0]

  res = fstAgent.processTransformation(
      int(params.transID), trans['SourceSE'], trans['TargetSE'], trans['DataTransType'])
  if not res["OK"]:
    dexit(1)

  dexit(0)
Esempio n. 3
0
  def uploadProcessListToFileCatalog(self, path_to_process_list, appVersion):
    """Upload the new processList to the FileCatalog
    """
    from ILCDIRAC.Core.Utilities.FileUtils                       import upload
    from DIRAC.DataManagementSystem.Client.DataManager           import DataManager
    from DIRAC import gConfig, exit as dexit

    datMan = DataManager()
    LOG.notice("Removing process list from file catalog" + path_to_process_list)
    res = datMan.removeFile(path_to_process_list)
    if not res['OK']:
      LOG.error("Could not remove process list from file catalog, do it by hand")
      dexit(2)
    LOG.notice("Done removing process list from file catalog")

    res = upload(os.path.dirname(path_to_process_list) + "/", self.location )
    if not res['OK']:
      LOG.error("something went wrong in the copy")
      dexit(2)

    LOG.notice("Putting process list to local processlist directory")
    localprocesslistpath = gConfig.getOption("/LocalSite/ProcessListPath", "")
    if localprocesslistpath['Value']:

      try:
        localSvnRepo = "/afs/cern.ch/eng/clic/software/whizard/whizard_195/"
        shutil.copy(self.location, localSvnRepo) ## because it does not make a difference if we hardcode it here or in ${DIRAC}/etc/dirac.cfg, yours truly APS, JFS
      except OSError, err:
        LOG.error("Copy of process list to %s failed with error %s!" % (localSvnRepo, str(err)))

      try:
        subprocess.call( ["svn","ci", os.path.join( localSvnRepo, os.path.basename(localprocesslistpath['Value'] )), "-m'Process list for whizard version %s'" % appVersion ], shell=False )
      except OSError, err:
        LOG.error("Commit failed! Error: %s" % str(err))
def main(vo):
    # thanks to Stephane for suggesting this fix!  
    #res1 = gConfig.getSections( 'Resources/Sites/LCG/', listOrdered = True )
    res = getQueues()
    if not res['OK']:
        gLogger.error(res['Message'])
        gLogger.error("Cannot obtain Queues")
        dexit(1)
    sites = res['Value'].keys()
    values = [res['Value'][key].keys() for key in sites]
    sites_ce = dict(zip(sites,values))
    vo_ces = ldapCEs(vo)
    final_dict = {}
    for site in sites_ce:
        final_dict[site]={"Tags":[],"CE":[]}
        ces_current_site = sites_ce[site]
        for ce in ces_current_site:
            if ce in vo_ces:
                curr_ces = final_dict[site]["CE"]
                curr_ces.append(ce)
                final_dict[site].update({"Tags":ldapTag(ce,vo),"CE":curr_ces})
                #final_dict[site]={"Tags":ldapTag(ce,vo),"CE":[ce]}
    ret_dict = {}
    for key in final_dict:
        if len(final_dict[key]['CE'])!=0:
            ret_dict[key]=final_dict[key]
    return ret_dict
Esempio n. 5
0
def runTests():
  """runs the tests"""
  clip = CLIParams()
  clip.registerSwitches()
  Script.parseCommandLine()

  overlayrun = clip.testOverlay
  myMarlinSteeringFile = "clic_ild_cdr_steering_overlay_1400.0.xml" if overlayrun else "clic_ild_cdr_steering.xml"

  myLCSimPreSteeringFile = "clic_cdr_prePandoraOverlay_1400.0.lcsim" if overlayrun else "clic_cdr_prePandora.lcsim"
  myLCSimPostSteeringFile = "clic_cdr_postPandoraOverlay.lcsim"
  parameterDict = dict( mokkaVersion="0706P08",
                        mokkaSteeringFile="clic_ild_cdr.steer",
                        detectorModel="CLIC_ILD_CDR",
                        steeringFileVersion="V22",
                        machine="clic_cdr",
                        backgroundType="gghad",
                        energy=1400,
                        marlinVersion="v0111Prod",
                        rootVersion="5.34",
                        marlinSteeringFile=myMarlinSteeringFile,
                        marlinInputdata = "/ilc/user/s/sailer/testFiles/prod_clic_ild_e2e2_o_sim_2214_26.slcio",
                        gearFile='clic_ild_cdr.gear',
                        lcsimPreSteeringFile=myLCSimPreSteeringFile,
                        lcsimPostSteeringFile=myLCSimPostSteeringFile
                      )

  myTests = TestCreater(clip, parameterDict)
  res = myTests.checkForTests()
  if not res['OK']:
    dexit(1)
  myTests.run()

  return
 def addMD5SumToCS(self):
   """adds the MD5Sum of the Tarball fo the CS"""
   gLogger.notice("Adding MD5Sum to CS")
   md5sum = md5.md5(open(self.appTar).read()).hexdigest()
   result = self.diracAdmin.csSetOption("%(softSec)s/%(platform)s/%(appname)s/%(appVersion)s/Md5Sum" % self.parameter,
                                        md5sum)
   if result['OK']:
     self.modifiedCS = True
   else:
     gLogger.error("Could not add md5sum to CS")
     dexit(255)
 def commitToCS(self):
   """write changes to the CS to the server"""
   gLogger.notice("Commiting changes to the CS")
   if self.modifiedCS:
     result = self.diracAdmin.csCommitChanges(False)
     if not result[ 'OK' ]:
       gLogger.error('Commit failed with message = %s' % (result[ 'Message' ]))
       dexit(255)
     gLogger.info('Successfully committed changes to CS')
   else:
     gLogger.info('No modifications to CS required')
 def uploadTarBall(self):
   """get the tarballURL from the CS and upload the tarball there. Exits when error is encountered"""
   gLogger.notice("Uploading TarBall to the Grid")
   from ILCDIRAC.Core.Utilities.FileUtils import upload
   tarballurl = gConfig.getOption("%(softSec)s/%(platform)s/%(appname)s/TarBallURL" % self.parameter, "")
   if not tarballurl['OK'] or not tarballurl['Value']:
     gLogger.error('TarBallURL for application %(appname)s not defined' % self.parameter)
     dexit(255)
   res = upload(tarballurl['Value'], self.appTar)
   if not res['OK']:
     gLogger.error("Upload to %s failed" % tarballurl['Value'], res['Message'])
     dexit(255)
def getFiles():
  """ Get the lfns: This is not the point of this example, so keep it out of the main
  """
  fc = FileCatalogClient()
  
  meta = {}
  meta['ProdID'] = 1543
  meta["Datatype"] = "DST"
  
  result = fc.findFilesByMetadata(meta, "/ilc/prod/clic")
  if not result["OK"]:
    gLogger.error(result["Message"])
    dexit(1)
  return result['Value']
def _findInFC():
  """Find something in the FileCatalog"""
  from DIRAC import exit as dexit
  clip = _Params()
  clip.registerSwitches()
  Script.parseCommandLine()

  args = Script.getPositionalArgs()
  if len(args)<2:
    Script.showHelp('ERROR: Not enough arguments')
    gLogger.error("Run %s --help" % SCRIPTNAME )
    dexit(1)
    
  path = args[0]
  if path == '.':
    path = '/'

  ## Check that the first argument is not a MetaQuery
  if any( op in path for op in OPLIST ):
    gLogger.error("ERROR: Path '%s' is not a valid path! The first argument must be a path" % path)
    gLogger.error("Run %s --help" % SCRIPTNAME )
    dexit(1)

  gLogger.verbose("Path:", path)
  metaQuery = args[1:]
  metaDataDict = _createQueryDict(metaQuery)
  gLogger.verbose("Query:",str(metaDataDict))
  if not metaDataDict:
    gLogger.info("No query")
    dexit(1)
  
  fc = FileCatalogClient()
  res = fc.findFilesByMetadata(metaDataDict, path)
  if not res['OK']:
    gLogger.error(res['Message'])
    dexit(1)
  if not res['Value']:
    gLogger.notice("No files found")

  listToPrint = None

  if clip.printOnlyDirectories:
    listToPrint = set( "/".join(fullpath.split("/")[:-1]) for fullpath in res['Value'] )
  else:
    listToPrint = res['Value']

  for entry in listToPrint:
    print entry

  dexit(0)
def _getOutputs():
  repoLocation = ''
  clip = _Params()
  clip.registerSwitches()
  Script.parseCommandLine( ignoreErrors = False )
  repoLocation = clip.repo
  if not repoLocation:
    Script.showHelp()
    dexit(1)
  from DIRAC import gLogger
  from DIRAC.Interfaces.API.Dirac import Dirac

  dirac = Dirac(True, repoLocation)
  
  exitCode = 0
  res = dirac.monitorRepository(False)
  if not res['OK']:
    gLogger.error("Failed because %s" % res['Message'])
    dexit(1)
    
  res = dirac.retrieveRepositorySandboxes()
  if not res['OK']:
    gLogger.error("Failed because %s" % res['Message'])
    dexit(1)
  if clip.outputdata:
    res = dirac.retrieveRepositoryData()
    if not res['OK']:
      gLogger.error("Failed because %s" % res['Message'])
      exit(1)
  dexit(exitCode)
def getJob():
  """ produce a job: it's always the same, so we don't need to put it in the main
  """
  j = UserJob()
  ma = Marlin()
  ma.setVersion("v0111Prod")
  ma.setSteeringFile("clic_ild_cdr_steering.xml")
  ma.setGearFile("clic_ild_cdr.gear")
  result = j.append(ma)
  if not result['OK']:
    gLogger.error(result["Message"])
    dexit(1)
  j.setCPUTime(10000)
  j.setOutputSandbox("*.log")
  return j
def _getOutputData():
  cliParams = _Params()
  cliParams.registerSwitches()
  Script.parseCommandLine( ignoreErrors = False )
  if not cliParams.repo:
    Script.showHelp()
    dexit(2)
  from DIRAC.Interfaces.API.Dirac import Dirac
  
  dirac = Dirac(True, cliParams.repo)

  exitCode = 0
  dirac.monitorRepository(False)
  dirac.retrieveRepositoryData()

  dexit(exitCode)
Esempio n. 14
0
def getUserInfoFromPhonebook(client, clip):
  """return user information from the phonebook"""
  sudsUser = client.factory.create("ns0:MemberType")
  comm = "phonebook --login %s --terse firstname --terse surname --terse ccid --terse email" % clip.uname
  from DIRAC.Core.Utilities.Subprocess import shellCall
  res = shellCall(0, comm)
  if not res['OK']:
    gLogger.error("Failed getting user info:",res['Message'])
    gLogger.error("Please add user in e-group by hand.")
    dexit(1)
  if res['Value'][0]:
    gLogger.error("phonebook command returned an error:",res['Value'][2])
    gLogger.error("Please add user in e-group by hand.")
    dexit(1)
  output = res['Value'][1]
  if output:
    output = output.split("\n")
    if len(output)>2:
      gLogger.error("This username somehow has more than one account, please choose the right one and register by hand")
      gLogger.error("%s"%output)
      dexit(1)
    user_fname = output[0].split(";")[0] #firstname
    user_sname = output[0].split(";")[1] # surname
    phoneBookEmail = output[0].split(";")[3] #email
    if phoneBookEmail != clip.email and clip.email:
      gLogger.error( "Email for user (%s) does not match the email given in the command line (%s))" \
                     %(phoneBookEmail, clip.email))
      dexit(1)
    else:
      clip.email = phoneBookEmail
    sudsUser['PrimaryAccount'] = clip.uname.upper()
    sudsUser['ID'] = output[0].split(";")[2] # CCID
    sudsUser['Type'] = 'Person'
    sudsUser['Name'] = '%s, %s' %(user_sname.upper(), user_fname)
    sudsUser['Email'] = output[0].split(";")[3] #email
  else:
    gLogger.notice("User '%s' does not appear to be in the CERN phonebook" % clip.uname)
    if not clip.email:
      gLogger.error("Email address for external user is not given, please add -E<address>")
      dexit(1)

    sudsUser['Type'] = 'External'
    sudsUser['Email'] = clip.email

  return sudsUser
def addSoftware():
  """uploads, registers, and sends email about new software package"""
  cliParams = Params()
  cliParams.registerSwitches()
  Script.parseCommandLine( ignoreErrors = True )
  platform = cliParams.platform
  appName = cliParams.name
  appVersion = cliParams.version
  comment = cliParams.comment
  tarball_loc = cliParams.tarball
  if not platform or not appName or not comment:
    Script.showHelp()
    dexit(2)

  softAdder = SoftwareAdder(platform, appName, tarball_loc, appVersion, comment)
  softAdder.addSoftware()

  gLogger.notice("All done!")
  dexit(0)
Esempio n. 16
0
def addUserToCS(clip, userProps):
  """Add the user to the CS, return list of errors"""
  from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin
  diracAdmin = DiracAdmin()
  exitCode = 0
  errorList = []

  if not diracAdmin.csModifyUser( clip.uname, userProps, createIfNonExistant = True )['OK']:
    errorList.append( ( "add user", "Cannot register user: '******'" % clip.uname ) )
    exitCode = 255
  else:
    result = diracAdmin.csCommitChanges()
    if not result[ 'OK' ]:
      errorList.append( ( "commit", result[ 'Message' ] ) )
      exitCode = 255
  for error in errorList:
    gLogger.error( "%s: %s" % error )
  if exitCode:
    dexit(exitCode)
Esempio n. 17
0
def addUser():
  """Add user to configuration service and other things"""
  clip = Params()
  clip.registerSwitches()
  Script.parseCommandLine()
  if not ( clip.certCN and clip.groups and clip.certDN and clip.uname):
    gLogger.error("Username, DN, CN, and groups have to be given")
    Script.showHelp()
  gLogger.notice("Add User to Egroup")
  addUserToEgroup(clip)
  if not clip.email:
    gLogger.fatal("No email defined and not found in phonebook, you have to provide it: -E<email>")
    dexit(1)
  userProps = {'DN': clip.certDN, 'Email': clip.email, 'CN': clip.certCN, 'Groups': clip.groups}
  gLogger.notice("Add User to CS")
  addUserToCS(clip, userProps)
  gLogger.notice("Add User to FC")
  addUserToFC(clip)
  gLogger.notice("Done")
  def checkForTarBall(self,tarball_loc):
    """checks if the tarball exists"""
    gLogger.info("Check if tarball exists at %s" % tarball_loc)
    appTar = ''
    if tarball_loc:
      appTar = tarball_loc
      if self.appName == 'slic':
        self.appVersion = os.path.basename(tarball_loc).slit("_")[0].split("-")[1]
    else:
      if self.appVersion:
        appTar = "%s%s.tgz" % (self.appName, self.appVersion)
      else:
        gLogger.notice("Version not defined")

    if not os.path.exists(appTar):
      gLogger.error("Cannot find the file %s, exiting" % appTar)
      dexit(1)

    return appTar
def printUsers():
  """Print the list of users in the VO"""
  clip = Params()
  clip.registerSwitches()
  Script.parseCommandLine()
  clip.setURLs()
  
  from DIRAC.Core.Security.VOMSService import VOMSService
  voms = VOMSService(clip.adminUrl, clip.attributeUrl)
  res = voms.admListMembers()
  if not res['OK']:
    gLogger.error(res['Message'])
    dexit(1)
  users = res['Value']
  for user in users:
    if not clip.username:
      printUser(user, clip.addPrint)
    else:
      if user['DN'].lower().count(clip.username.lower()):
        printUser(user, clip.addPrint)
Esempio n. 20
0
def printUsers():
  """Print the list of users in the VO"""
  clip = Params()
  clip.registerSwitches()
  Script.parseCommandLine()
  clip.setURLs()
  
  from DIRAC.Core.Security.VOMSService import VOMSService
  voms = VOMSService(vo=clip.voName)
  res = voms.getUsers()
  if not res['OK']:
    gLogger.error(res['Message'])
    dexit(1)
  users = res['Value']
  for userDN, userInfo in users.iteritems():
    userInfo['DN'] = userDN
    if not clip.username:
      printUser(userInfo, clip.addPrint)
    else:
      if userDN.lower().count(clip.username.lower()):
        printUser(userInfo, clip.addPrint)
def _createLFNList():
  """create the LFnList"""
  cliparams = _Params()
  cliparams.registerSwitches()
  Script.parseCommandLine( ignoreErrors = False )
  
  repoLocation =  cliparams.repo
  if not repoLocation:
    Script.showHelp()
    dexit(2)
  from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
  dirac = DiracILC(True, repoLocation)
  
  dirac.monitorRepository(False)
  lfns = []
  lfns = dirac.retrieveRepositoryOutputDataLFNs()
  LOG.notice("lfnlist=[")
  for lfn in lfns :
    LOG.notice('"LFN:%s",' % lfn)
  LOG.notice("]")
  dexit(0)
def _extend():
  """Extends all the tasks"""
  clip = _Params()
  clip.registerSwitches()
  Script.parseCommandLine()
  
  from DIRAC import gLogger, exit as dexit
  
  if not clip.prod or not clip.tasks:
    gLogger.error("Production ID is 0 or Tasks is 0, cannot be")
    dexit(1)
    
  from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient
  tc = TransformationClient()
  res = tc.getTransformation(clip.prod)
  trans= res['Value']
  transp = trans['Plugin']
  if transp != 'Limited':
    gLogger.error("This cannot be used on productions that are not using the 'Limited' plugin")
    dexit(0)
  
  gLogger.info("Prod %s has %s tasks registered" % (clip.prod, trans['MaxNumberOfTasks']) )
  if clip.tasks >0:
    max_tasks = trans['MaxNumberOfTasks'] + clip.tasks  
    groupsize = trans['GroupSize']
    gLogger.notice("Adding %s tasks (%s file(s)) to production %s" %(clip.tasks, clip.tasks*groupsize, clip.prod))
  elif clip.tasks <0:
    max_tasks = -1
    gLogger.notice("Now all existing files in the DB for production %s will be processed." % clip.prod)
  else:
    gLogger.error("Number of tasks must be different from 0")
    dexit(1)
  res = tc.setTransformationParameter(clip.prod, 'MaxNumberOfTasks', max_tasks)
  if not res['OK']:
    gLogger.error(res['Message'])
    dexit(1)
  gLogger.notice("Production %s extended!" % clip.prod)
    
  dexit(0)
def _createTrafo():
  """reads command line parameters, makes check and creates replication transformation"""
  from DIRAC import exit as dexit
  clip = _Params()
  clip.registerSwitches()
  Script.parseCommandLine()
  if not clip.checkSettings()['OK']:
    gLogger.error("ERROR: Missing settings")
    dexit(1)
  resCreate = _createReplication( clip.targetSE, clip.sourceSE, clip.prodID, clip.datatype, clip.extraname )
  if not resCreate['OK']:
    dexit(1)
  dexit(0)
Esempio n. 24
0
def _getProdLogs():
  """get production log files from LogSE"""
  clip = _Params()
  clip.registerSwitch()
  Script.parseCommandLine()
  if not ( clip.logF or clip.logD or clip.prodid ):
    Script.showHelp()
    dexit(1)
  from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
  ops = Operations()
  storageElementName = ops.getValue('/LogStorage/LogSE', 'LogSE')
  from DIRAC.Resources.Storage.StorageElement import StorageElementItem as StorageElement
  logSE = StorageElement(storageElementName)

  if clip.prodid and not ( clip.logD or clip.logF ):
    result = _getLogFolderFromID( clip )
    if not result['OK']:
      gLogger.error( result['Message'] )
      dexit(1)

  if clip.logD:
    if not clip.noPromptBeforeDL:
      res = promptUser('Are you sure you want to get ALL the files in this directory?')
      if not res['OK']:
        dexit()
      choice = res['Value']
      if choice.lower()=='n':
        dexit(0)
  
    if isinstance(clip.logD, str):
      res = logSE.getDirectory(clip.logD, localPath=clip.outputdir)
      _printErrorReport(res)
    elif isinstance(clip.logD, list):
      for logdir in clip.logD:
        gLogger.notice('Getting log files from '+str(logdir))
        res = logSE.getDirectory(logdir, localPath=clip.outputdir)
        _printErrorReport(res)

  if clip.logF:
    res = logSE.getFile(clip.logF, localPath = clip.outputdir)
    _printErrorReport(res)
  def checkConsistency(self):
    """checks if platform is defined, application exists, etc."""
    gLogger.notice("Checking consistency")
    av_platforms = gConfig.getSections(self.softSec, [])
    if av_platforms['OK']:
      if not self.platform in av_platforms['Value']:
        gLogger.error("Platform %s unknown, available are %s." % (self.platform, ", ".join(av_platforms['Value'])))
        gLogger.error("If yours is missing add it in CS")
        dexit(255)
    else:
      gLogger.error("Could not find all platforms available in CS")
      dexit(255)

    av_apps = gConfig.getSections("%(softSec)s/%(platform)s" % self.parameter, [])
    if not av_apps['OK']:
      gLogger.error("Could not find all applications available in CS")
      dexit(255)
  def addVersionToCS(self):
    """adds the version of the application to the CS"""
    gLogger.notice("Adding version %(appVersion)s to the CS" % self.parameter)
    existingVersions = gConfig.getSections("%(softSec)s/%(platform)s/%(appname)s" % self.parameter, [])
    if not existingVersions['OK']:
      gLogger.error("Could not find all versions available in CS: %s" % existingVersions['Message'])
      dexit(255)
    if self.appVersion in existingVersions['Value']:
      gLogger.always('Application %s %s for %s already in CS, nothing to do' % (self.appName.lower(),
                                                                                self.appVersion,
                                                                                self.platform))
      dexit(0)

    result = self.diracAdmin.csSetOption("%(softSec)s/%(platform)s/%(appname)s/%(appVersion)s/TarBall" % self.parameter,
                                         self.parameter['appTar_name'])
    if result['OK']:
      self.modifiedCS = True
    else:
      gLogger.error ("Could not add version to CS")
      dexit(255)
def addSoftware():
  """uploads, registers, and sends email about new software package"""
  cliParams = Params()
  cliParams.registerSwitches()
  Script.parseCommandLine( ignoreErrors = True )

  consistent = cliParams.checkConsistency()
  if not consistent['OK']:
    gLogger.error("Error checking consistency:", consistent['Message'])
    Script.showHelp()
    dexit(2)

  softAdder = CVMFSAdder(cliParams)
  resCheck = softAdder.checkConsistency()

  if not resCheck['OK']:
    Script.showHelp()
    dexit(2)

  softAdder.addSoftware()

  gLogger.notice("All done!")
  dexit(0)
                                  'Arguments:',
                                  '  JobID:    DIRAC Job ID' ] ) )
 args = Script.getPositionalArgs() 
 from DIRAC import gLogger, exit as dexit
 from DIRAC.Interfaces.API.Dirac import Dirac
 from DIRAC.FrameworkSystem.Client.ProxyManagerClient import gProxyManager
 from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
 
 # get necessary credentials
 op = Operations("glast.org")
 shifter = op.getValue("Pipeline/Shifter","/DC=org/DC=doegrids/OU=People/CN=Stephan Zimmer 799865")
 shifter_group = op.getValue("Pipeline/ShifterGroup","glast_user")
 result = gProxyManager.downloadProxyToFile(shifter,shifter_group,requiredTimeLeft=10000)
 if not result['OK']:
     gLogger.error("ERROR: No valid proxy found; ",result['Message'])
     dexit(1)
 proxy = result[ 'Value' ]
 environ['X509_USER_PROXY'] = proxy
 gLogger.info("using proxy %s"%proxy)
 dirac = Dirac(True,"myRepo.rep")
 exitCode = 0
 errorList = []
 if len( args ) < 1:
     Script.showHelp()
 for job in args:
     result = dirac.kill( job )
     if result['OK']:
         gLogger.info('Killed job %s' % ( job ))
     else:
         errorList.append( ( job, result['Message'] ) )
         exitCode = 2
Esempio n. 29
0
def main():
    '''
    This is the script main method, which will hold all the logic.
  '''

    cliParams = Params()
    cliParams.registerSwitches()
    Script.parseCommandLine(ignoreErrors=True)

    consistent = cliParams.checkConsistency()
    if not consistent['OK']:
        gLogger.error("Error checking consistency:", consistent['Message'])
        Script.showHelp()
        dexit(2)

    ##Get prodID
    prodID = float(cliParams.prodID)

    ##Get all possible input files
    inputFiles = getInputFiles(prodID)

    ##Get suffix
    suffix = cliParams.suffix
    if suffix: suffix = "_" + suffix

    ##Jet clustering
    algorithm = "ValenciaPlugin {radius:.1f} {beta:.1f} {gamma:.1f}"
    jetRadius = float(cliParams.jetRadius)
    jetBeta = float(cliParams.jetBeta)
    jetGamma = float(cliParams.jetGamma)

    jetCluster = "ExclusiveNJets 2"
    jetRecomb = "E_scheme"

    ##Top tagger
    deltaR = float(cliParams.deltaR)
    deltaP = float(cliParams.deltaP)
    cos_theta_W_max = float(cliParams.cos_theta_W_max)

    ##Sixfermion sample
    sixFermionSample = cliParams.sixFermionSample

    ##Number of files per job
    nrFilesPerJob = int(cliParams.nrFilesPerJob)

    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
    repDir = "/afs/cern.ch/user/l/lstroem/clicdp/analysis/steering/chain/topasymmetry_wflavourtag/submit/{prodID:04.0f}/rep/".format(
        prodID=prodID)
    subprocess.call("mkdir -p " + repDir, shell=True)
    dirac = DiracILC(
        False
    )  #, repDir+"topasymmetry_vlc{suffix}.rep".format(suffix = suffix))

    jetAlgo = algorithm.format(radius=jetRadius, beta=jetBeta, gamma=jetGamma)
    inputFileList = []
    i = 0
    j = 1
    for inputFile in inputFiles:
        inputFileList.append(inputFile)
        i += 1
        if (i >= nrFilesPerJob * j) or (i == len(inputFiles)):

            jobName = "topasymmetry_chain_{jetSettings}_dR{deltaR:.2f}_dP{deltaP:.2f}_cthetaWmax{cos_theta_W_max:.2f}_1Jun2017_part{index}_{prodID:04.0f}".format(
                jetSettings=getJetSettings(jetAlgo, jetCluster, jetRecomb),
                deltaR=deltaR,
                deltaP=deltaP,
                cos_theta_W_max=cos_theta_W_max,
                index=j,
                prodID=prodID)
            jobGroup = "topasymmetry_chain_{prodID:04.0f}".format(
                prodID=prodID)
            job, outputFile, rootFile = defGridJob(jobName, jobGroup,
                                                   inputFileList)

            ##Check if outputfile already exists
            pOutCheck = subprocess.Popen(
                "dirac-dms-lfn-replicas /ilc/user/r/rstrom/" + jobGroup + "/" +
                rootFile,
                shell=True,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE)
            outCheck, errCheck = pOutCheck.communicate()
            #gLogger.notice(jobName)
            if "no such file" not in outCheck.lower():
                gLogger.notice('File exists! Skipping!')
                inputFileList = []
                j += 1
                continue  #continue #use break if only part1, use continue of run over all parts
            #gLogger.notice(jetAlgo)
            #doLogger(jobName, inputFileList, outputFile, rootFile)
            res = job.append(
                defMarlin(outputFile,
                          rootFile,
                          jetAlgo=jetAlgo,
                          jetCluster=jetCluster,
                          jetRecomb=jetRecomb,
                          deltaR=deltaR,
                          deltaP=deltaP,
                          cos_theta_W_max=cos_theta_W_max,
                          sixFermionSample=sixFermionSample))

            if not res['OK']:  #Catch if there is an error
                print res['Message']  #Print the error message
                dexit

            ##Job submission
            print job.submit(dirac)
            inputFileList = []
            j += 1
            #break #add break of only part1

    gLogger.notice("All done!")
    dexit(0)
Esempio n. 30
0
def _createTrafo():
  """reads command line parameters, makes check and creates replication transformation"""
  clip = Params()
  clip.registerSwitches(Script)
  Script.parseCommandLine()
  if not clip.checkSettings(Script)['OK']:
    gLogger.error("ERROR: Missing settings")
    return 1
  for metaValue in clip.metaValues:
    resCreate = createDataTransformation(flavour=clip.flavour,
                                         targetSE=clip.targetSE,
                                         sourceSE=clip.sourceSE,
                                         metaKey=clip.metaKey,
                                         metaValue=metaValue,
                                         extraData=clip.extraData,
                                         extraname=clip.extraname,
                                         groupSize=clip.groupSize,
                                         tGroup=clip.groupName,
                                         plugin=clip.plugin,
                                         enable=clip.enable,
                                         )
    if not resCreate['OK']:
      gLogger.error("Failed to create Transformation", resCreate['Message'])
      return 1

  return 0


if __name__ == '__main__':
  dexit(_createTrafo())
Esempio n. 31
0
  Script.parseCommandLine()
  
  from DIRAC import gLogger, exit as dexit
  
  from GlastDIRAC.ResourceStatusSystem.Client.SoftwareTagClient import SoftwareTagClient
  from DIRAC.ConfigurationSystem.Client.Helpers.Resources                import getQueues

  sw = SoftwareTagClient()
  mytag  = 'SomeTag'
  mysite = 'LCG.LAL.fr'
  
  #This is what the siteadmin does
  res = sw.addTagAtSite(mytag, mysite)
  if not res['OK']:
    gLogger.error(res['Message'])
    dexit(1)
  else:
    gLogger.notice("Added %s to %s" % (mytag, mysite))
    
  #This is the most common Call from Clients
  res = sw.getSitesForTag(mytag)
  if not res['OK']:
    gLogger.error(res['Message'])
  else:
    gLogger.notice("Sites: ", res['Value'])
  
  #Get the tags with valid status
  res = sw.getTagsWithStatus("New")
  if not res['OK']:
    gLogger.error(res['Message'])
  else:
from DIRAC import exit as dexit
from DIRAC import gLogger
Script.setUsageMessage( """
Remove the given file or a list of files from the File Catalog

Usage:
   %s <LFN | fileContainingLFNs>
""" % Script.scriptName )

Script.parseCommandLine()

from DIRAC.Core.Security.ProxyInfo import getProxyInfo
res = getProxyInfo()
if not res['OK']:
  gLogger.fatal( "Can't get proxy info", res['Message'] )
  dexit( 1 )
properties = res['Value'].get( 'groupProperties', [] )
if not 'FileCatalogManagement' in properties:
  gLogger.error( "You need to use a proxy from a group with FileCatalogManagement" )
  dexit( 5 )

from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
fc = FileCatalog()
import os

args = Script.getPositionalArgs()

if len( args ) < 1:
  Script.showHelp()
  DIRAC.exit( -1 )
else:
Esempio n. 33
0
        break
    ov = OverlayInput()
    ov.setBXOverlay(60)
    ov.setGGToHadInt(3.2)
    ov.setNbSigEvtsPerJob(n_evts_per_job)
    ov.setBkgEvtType("gghad")
    ov.setDetectorModel("CLIC_ILD_CDR")

    res = j.append(ov)
    if not res['OK']:
        print res['Message']
        break
    ma2 = Marlin()
    ma2.setVersion("v0111Prod")
    ma2.setSteeringFile("clic_ild_cdr_steering_overlay.xml")
    ma2.getInputFromApp(mo)
    ma2.setOutputDstFile("mydst_ov_%s.slcio" % i)
    res = j.append(ma2)
    if not res['OK']:
        print res['Message']
        break
    j.setOutputSandbox(
        ["mydst_no_ov_%s.slcio" % i,
         "mydst_ov_%s.slcio" % i, "*.log"])
    j.setName("SingleElectron_%s" % i)
    j.setJobGroup("singleElectrons")

    j.submit(d)

dexit(0)
Esempio n. 34
0
def doTheWhizardInstallation():
  """Do the instalation for new whizard version Copy libraries, create tarball,
  upload processList file add entry in configuration system

  """
  res = checkSLCVersion()
  if not res['OK']:
    gLogger.error(res['Message'])
    dexit(1)

  res = checkGFortranVersion()
  if not res['OK']:
    gLogger.error(res['Message'])
    dexit(1)

  cliParams = Params()
  cliParams.registerSwitches()
  Script.parseCommandLine( ignoreErrors= False)
  
  whizardResultFolder = cliParams.path
  platform = cliParams.platform
  whizard_version = cliParams.version
  appVersion = whizard_version
  beam_spectra_version = cliParams.beam_spectra

  if not whizardResultFolder or not whizard_version or not beam_spectra_version:
    Script.showHelp()
    dexit(2)
  
  from ILCDIRAC.Core.Utilities.ProcessList                     import ProcessList
  from DIRAC.ConfigurationSystem.Client.Helpers.Operations     import Operations 
  from DIRAC.Interfaces.API.DiracAdmin                         import DiracAdmin
  from ILCDIRAC.Core.Utilities.FileUtils                       import upload
  from DIRAC.DataManagementSystem.Client.DataManager           import DataManager
  diracAdmin = DiracAdmin()

  modifiedCS = False

  softwareSection = "/Operations/Defaults/AvailableTarBalls"
  processlistLocation = "ProcessList/Location"

  appName = "whizard"

  ops = Operations()
  path_to_process_list = ops.getValue(processlistLocation, "")
  if not path_to_process_list:
    gLogger.error("Could not find process list location in CS")
    dexit(2)
    
  gLogger.verbose("Getting process list from file catalog")
  datMan = DataManager()
  res = datMan.getFile(path_to_process_list)
  if not res['OK']:
    gLogger.error("Error while getting process list from storage")
    dexit(2)
  gLogger.verbose("done")

  ##just the name of the local file in current working directory
  processlist = os.path.basename(path_to_process_list)
  if not os.path.exists(processlist):
    gLogger.error("Process list does not exist locally")
    dexit(2)


  pl = ProcessList(processlist)
  
  startDir = os.getcwd()
  inputlist = {}
  os.chdir(whizardResultFolder)
  folderlist = os.listdir(whizardResultFolder)

  whiz_here = folderlist.count("whizard")
  if whiz_here == 0:
    gLogger.error("whizard executable not found in %s, please check" % whizardResultFolder)
    os.chdir(startDir)
    dexit(2)

  whizprc_here = folderlist.count("whizard.prc")
  if whizprc_here == 0:
    gLogger.error("whizard.prc not found in %s, please check" % whizardResultFolder)
    os.chdir(startDir)
    dexit(2)

  whizmdl_here = folderlist.count("whizard.mdl")
  if whizmdl_here == 0:
    gLogger.error("whizard.mdl not found in %s, please check" % whizardResultFolder)
    os.chdir(startDir)
    dexit(2)
   
    
  gLogger.verbose("Preparing process list")

  ## FIXME:: What is this doing exactly? Is this necessary? -- APS, JFS
  for f in folderlist:
    if f.count(".in"):
      infile = open(f, "r")
      found_detail = False
      
      for line in infile:
        if line.count("decay_description"):
          currprocess = f.split(".template.in")[0] 
          inputlist[currprocess] = {}        
          inputlist[currprocess]["InFile"] = f.rstrip("~")
          inputlist[currprocess]["Detail"] = line.split("\"")[1]
          found_detail = True
        if line.count("process_id") and found_detail:
          process_id = line.split("\"")[1]
          inputlist[currprocess]["Model"] = ""
          inputlist[currprocess]["Generator"] = ""
          inputlist[currprocess]["Restrictions"] = ""
          for process in process_id.split():
            print "Looking for detail of process %s" % (process)
            process_detail = getDetailsFromPRC("whizard.prc", process)  
            inputlist[currprocess]["Model"] = process_detail["Model"]
            inputlist[currprocess]["Generator"] = process_detail["Generator"]
            if len(inputlist[currprocess]["Restrictions"]):
              inputlist[currprocess]["Restrictions"] = inputlist[currprocess]["Restrictions"] + ", " + process_detail["Restrictions"]
            else:
              inputlist[currprocess]["Restrictions"] = process_detail["Restrictions"]
      #if len(inputlist[currprocess].items()):
      #  inputlist.append(processdict)    
  ## END FIXEME


  ##Update inputlist with what was found looking in the prc file
  processes = readPRCFile("whizard.prc")
  inputlist.update(processes)
  
  ##get from cross section files the cross sections for the processes in inputlist
  #Need full process list
  for f in folderlist:
    if f.count("cross_sections_"):
      crossfile = open(f, "r")
      for line in crossfile:
        line = line.rstrip().lstrip()
        if not len(line):
          continue
        if line[0] == "#" or line[0] == "!":
          continue
        if len(line.split()) < 2:
          continue
        currprocess = line.split()[0]
        if currprocess in inputlist:
          inputlist[currprocess]['CrossSection'] = line.split()[1]
  
  
  gLogger.notice("Preparing Tarball")

  ##Make a folder in the current directory of the user to store the whizard libraries, executable et al.
  localWhizardFolderRel = ("whizard" + whizard_version) # relative path
  localWhizardFolder = os.path.join(startDir, localWhizardFolderRel)

  if not os.path.exists(localWhizardFolder):
    os.makedirs(localWhizardFolder)
  
  localWhizardLibFolder = os.path.join(localWhizardFolder,'lib')
  if os.path.exists(localWhizardLibFolder):
    shutil.rmtree(localWhizardLibFolder)
  os.makedirs(localWhizardLibFolder) ##creates the lib folder

  whizardLibraries = getListOfLibraries(os.path.join(whizardResultFolder, "whizard"))
  copyLibsCall = ["rsync","-avzL"]
  for lib in whizardLibraries:
    copyLibsCall.append(lib)
  copyLibsCall.append(localWhizardLibFolder)
  subprocess.Popen(copyLibsCall, stdout=subprocess.PIPE, stderr=subprocess.PIPE)

  for fileName in folderlist:
    shutil.copy(fileName, localWhizardFolder)

  ##Get the list of md5 sums for all the files in the folder to be tarred
  os.chdir( localWhizardFolder )
  subprocess.call(["find . -type f -exec md5sum {} > ../md5_checksum.md5 \\; && mv ../md5_checksum.md5 ."], shell=True)
  os.chdir(startDir)

  ##Create the Tarball
  gLogger.notice("Creating Tarball...")
  appTar = localWhizardFolder + ".tgz"
  myappTar = tarfile.open(appTar, "w:gz")
  myappTar.add(localWhizardFolderRel)
  myappTar.close()
  
  md5sum = md5.md5(open( appTar, 'r' ).read()).hexdigest()
  
  gLogger.notice("...Done")

  gLogger.notice("Registering new Tarball in CS")
  tarballurl = {}
  
  av_platforms = gConfig.getSections(softwareSection, [])
  if av_platforms['OK']:
    if platform not in av_platforms['Value']:
      gLogger.error("Platform %s unknown, available are %s." % (platform, ", ".join(av_platforms['Value'])))
      gLogger.error("If yours is missing add it in CS")
      dexit(255)
  else:
    gLogger.error("Could not find all platforms available in CS")
    dexit(255)
  
  av_apps = gConfig.getSections("%s/%s" % (softwareSection, platform), [])
  if not av_apps['OK']:
    gLogger.error("Could not find all applications available in CS")
    dexit(255)
  
  if appName.lower() in av_apps['Value']:
    versions = gConfig.getSections("%s/%s/%s" % (softwareSection, platform, appName.lower()), 
                                   [])
    if not versions['OK']:
      gLogger.error("Could not find all versions available in CS")
      dexit(255)
    if appVersion in versions['Value']:
      gLogger.error('Application %s %s for %s already in CS, nothing to do' % (appName.lower(), appVersion, platform))
      dexit(0)
    else:
      result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion),
                                      os.path.basename(appTar))
      if result['OK']:
        modifiedCS = True
        tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "")
        if len(tarballurl['Value']) > 0:
          res = upload(tarballurl['Value'], appTar)
          if not res['OK']:
            gLogger.error("Upload to %s failed" % tarballurl['Value'])
            dexit(255)
      result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion),
                                      md5sum)
      if result['OK']:
        modifiedCS = True      
      result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection,
                                                                                         platform,
                                                                                         appName.lower(),
                                                                                         appVersion),
                                      beam_spectra_version)
      
  
  else:
    result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform,
                                                             appName.lower(), appVersion),
                                    os.path.basename(appTar))
    if result['OK']:  
      modifiedCS = True
      tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()),
                                     "")
      if len(tarballurl['Value']) > 0:
        res = upload(tarballurl['Value'], appTar)
        if not res['OK']:
          gLogger.error("Upload to %s failed" % tarballurl['Value'])
          dexit(255)

    result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion),
                                    md5sum)
          
    result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection,
                                                                                       platform,
                                                                                       appName.lower(),
                                                                                       appVersion),
                                    beam_spectra_version)

  gLogger.verbose("Done uploading the tar ball")
  
  os.remove(appTar)

  #Set for all new processes the TarBallURL
  for process in inputlist.keys():
    inputlist[process]['TarBallCSPath'] = tarballurl['Value'] + os.path.basename(appTar)
  
  pl.updateProcessList(inputlist)

  pl.writeProcessList()
  
  raw_input("Do you want to upload the process list? Press ENTER to proceed or CTRL-C to abort!")

  pl.uploadProcessListToFileCatalog(path_to_process_list, appVersion)

  #Commit the changes if nothing has failed and the CS has been modified
  if modifiedCS:
    result = diracAdmin.csCommitChanges(False)
    gLogger.verbose(result)
  gLogger.notice('All done OK!')
  dexit(0)
from DIRAC import exit as dexit
from DIRAC import gLogger
Script.setUsageMessage("""
Remove the given file or a list of files from the File Catalog

Usage:
   %s <LFN | fileContainingLFNs>
""" % Script.scriptName)

Script.parseCommandLine()

from DIRAC.Core.Security.ProxyInfo import getProxyInfo
res = getProxyInfo()
if not res['OK']:
    gLogger.fatal("Can't get proxy info", res['Message'])
    dexit(1)
properties = res['Value'].get('groupProperties', [])
if 'FileCatalogManagement' not in properties:
    gLogger.error(
        "You need to use a proxy from a group with FileCatalogManagement")
    dexit(5)

from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
fc = FileCatalog()
import os

args = Script.getPositionalArgs()

if len(args) < 1:
    Script.showHelp()
    DIRAC.exit(-1)
Esempio n. 36
0
def _getProductionSummary():
    clip = _Params()
    clip.registerSwitch()
    Script.parseCommandLine()
    from ILCDIRAC.Core.Utilities.HTML import Table
    from ILCDIRAC.Core.Utilities.ProcessList import ProcessList
    from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient
    from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
    from DIRAC import gConfig, gLogger
    prod = clip.prod
    full_detail = clip.full_det
    fc = FileCatalogClient()

    processlist = gConfig.getValue('/LocalSite/ProcessListPath')
    prl = ProcessList(processlist)
    processesdict = prl.getProcessesDict()

    trc = TransformationClient()
    prodids = []
    if not prod:
        conddict = {}
        conddict['Status'] = clip.statuses
        if clip.ptypes:
            conddict['Type'] = clip.ptypes
        res = trc.getTransformations(conddict)
        if res['OK']:
            for transfs in res['Value']:
                prodids.append(transfs['TransformationID'])
    else:
        prodids.extend(prod)

    metadata = []

    gLogger.info("Will run on prods %s" % str(prodids))

    for prodID in prodids:
        if prodID < clip.minprod:
            continue
        meta = {}
        meta['ProdID'] = prodID
        res = trc.getTransformation(str(prodID))
        if not res['OK']:
            gLogger.error("Error getting transformation %s" % prodID)
            continue
        prodtype = res['Value']['Type']
        proddetail = res['Value']['Description']
        if prodtype == 'MCReconstruction' or prodtype == 'MCReconstruction_Overlay':
            meta['Datatype'] = 'DST'
        elif prodtype == 'MCGeneration':
            meta['Datatype'] = 'gen'
        elif prodtype == 'MCSimulation':
            meta['Datatype'] = 'SIM'
        elif prodtype in ['Split', 'Merge']:
            gLogger.warn("Invalid query for %s productions" % prodtype)
            continue
        else:
            gLogger.error("Unknown production type %s" % prodtype)
            continue
        res = fc.findFilesByMetadata(meta)
        if not res['OK']:
            gLogger.error(res['Message'])
            continue
        lfns = res['Value']
        nb_files = len(lfns)
        path = ""
        if not len(lfns):
            gLogger.warn("No files found for prod %s" % prodID)
            continue
        path = os.path.dirname(lfns[0])
        res = fc.getDirectoryUserMetadata(path)
        if not res['OK']:
            gLogger.warn('No meta data found for %s' % path)
            continue
        dirmeta = {}
        dirmeta['proddetail'] = proddetail
        dirmeta['prodtype'] = prodtype
        dirmeta['nb_files'] = nb_files
        dirmeta.update(res['Value'])
        lumi = 0.
        nbevts = 0
        addinfo = None
        files = 0
        xsec = 0.0
        if not full_detail:
            lfn = lfns[0]
            info = _getFileInfo(lfn)
            nbevts = info[1] * len(lfns)
            lumi = info[0] * len(lfns)
            addinfo = info[2]
            if 'xsection' in addinfo:
                if 'sum' in addinfo['xsection']:
                    if 'xsection' in addinfo['xsection']['sum']:
                        xsec += addinfo['xsection']['sum']['xsection']
                        files += 1
        else:
            for lfn in lfns:
                info = _getFileInfo(lfn)
                lumi += info[0]
                nbevts += info[1]
                addinfo = info[2]
                if 'xsection' in addinfo:
                    if 'sum' in addinfo['xsection']:
                        if 'xsection' in addinfo['xsection']['sum']:
                            xsec += addinfo['xsection']['sum']['xsection']
                            files += 1
        if not lumi:
            xsec = 0
            files = 0
            depthDict = {}
            depSet = set()
            res = fc.getFileAncestors(lfns, [1, 2, 3, 4])
            temp_ancestorlist = []
            if res['OK']:
                for lfn, ancestorsDict in res['Value']['Successful'].items():
                    for ancestor, dep in ancestorsDict.items():
                        depthDict.setdefault(dep, [])
                        if ancestor not in temp_ancestorlist:
                            depthDict[dep].append(ancestor)
                            depSet.add(dep)
                            temp_ancestorlist.append(ancestor)
            depList = list(depSet)
            depList.sort()
            for ancestor in depthDict[depList[-1]]:
                info = _getFileInfo(ancestor)
                lumi += info[0]
                addinfo = info[2]
                if 'xsection' in addinfo:
                    if 'sum' in addinfo['xsection']:
                        if 'xsection' in addinfo['xsection']['sum']:
                            xsec += addinfo['xsection']['sum']['xsection']
                            files += 1
        if xsec and files:
            xsec /= files
            dirmeta['CrossSection'] = xsec
        else:
            dirmeta['CrossSection'] = 0.0

        if nbevts:
            dirmeta['NumberOfEvents'] = nbevts
        #if not lumi:
        #  dirmeta['Luminosity']=0
        #  dirmeta['CrossSection']=0
        #else:
        #  if nbevts:
        #    dirmeta['CrossSection']=nbevts/lumi
        #  else:
        #    dirmeta['CrossSection']=0
        #if addinfo:
        #  if 'xsection' in addinfo:
        #    if 'sum' in addinfo['xsection']:
        #      if 'xsection' in addinfo['xsection']['sum']:
        #        dirmeta['CrossSection']=addinfo['xsection']['sum']['xsection']
        if 'NumberOfEvents' not in dirmeta:
            dirmeta['NumberOfEvents'] = 0
        #print processesdict[dirmeta['EvtType']]
        dirmeta['detail'] = ''
        if dirmeta['EvtType'] in processesdict:
            if 'Detail' in processesdict[dirmeta['EvtType']]:
                detail = processesdict[dirmeta['EvtType']]['Detail']

        else:
            detail = dirmeta['EvtType']

        if not prodtype == 'MCGeneration':
            res = trc.getTransformationInputDataQuery(str(prodID))
            if res['OK']:
                if 'ProdID' in res['Value']:
                    dirmeta['MomProdID'] = res['Value']['ProdID']
        if 'MomProdID' not in dirmeta:
            dirmeta['MomProdID'] = 0
        dirmeta['detail'] = _translate(detail)

        metadata.append(dirmeta)

    detectors = {}
    detectors['ILD'] = {}
    corres = {
        "MCGeneration": 'gen',
        "MCSimulation": 'SIM',
        "MCReconstruction": "REC",
        "MCReconstruction_Overlay": "REC"
    }
    detectors['ILD']['SIM'] = []
    detectors['ILD']['REC'] = []
    detectors['SID'] = {}
    detectors['SID']['SIM'] = []
    detectors['SID']['REC'] = []
    detectors['sid'] = {}
    detectors['sid']['SIM'] = []
    detectors['sid']['REC'] = []
    detectors['gen'] = []
    for channel in metadata:
        if 'DetectorType' not in channel:
            detectors['gen'].append(
                (channel['detail'], channel['Energy'], channel['ProdID'],
                 channel['nb_files'],
                 channel['NumberOfEvents'] / channel['nb_files'],
                 channel['NumberOfEvents'], channel['CrossSection'],
                 str(channel['proddetail'])))
        else:
            if not channel['DetectorType'] in detectors:
                gLogger.error("This is unknown detector",
                              channel['DetectorType'])
                continue
            detectors[channel['DetectorType']][corres[
                channel['prodtype']]].append(
                    (channel['detail'], channel['Energy'],
                     channel['DetectorType'], channel['ProdID'],
                     channel['nb_files'],
                     channel['NumberOfEvents'] / channel['nb_files'],
                     channel['NumberOfEvents'], channel['CrossSection'],
                     channel['MomProdID'], str(channel['proddetail'])))

    with open("tables.html", "w") as of:
        of.write("""<!DOCTYPE html>
<html>
 <head>
<title> Production summary </title>
</head>
<body>
""")
        if len(detectors['gen']):
            of.write("<h1>gen prods</h1>\n")
            table = Table(header_row=('Channel', 'Energy', 'ProdID', 'Tasks',
                                      'Average Evts/task', 'Statistics',
                                      'Cross Section (fb)', 'Comment'))
            for item in detectors['gen']:
                table.rows.append(item)
            of.write(str(table))
            gLogger.info("Gen prods")
            gLogger.info(str(table))

        if len(detectors['ILD']):
            of.write("<h1>ILD prods</h1>\n")
            for ptype in detectors['ILD'].keys():
                if len(detectors['ILD'][ptype]):
                    of.write("<h2>%s</h2>\n" % ptype)
                    table = Table(header_row=('Channel', 'Energy', 'Detector',
                                              'ProdID', 'Number of Files',
                                              'Events/File', 'Statistics',
                                              'Cross Section (fb)',
                                              'Origin ProdID', 'Comment'))
                    for item in detectors['ILD'][ptype]:
                        table.rows.append(item)
                    of.write(str(table))
                    gLogger.info("ILC CDR prods %s" % ptype)
                    gLogger.info(str(table))

        if len(detectors['SID']):
            of.write("<h1>SID prods</h1>\n")
            for ptype in detectors['SID'].keys():
                if len(detectors['SID'][ptype]):
                    of.write("<h2>%s</h2>\n" % ptype)
                    table = Table(header_row=('Channel', 'Energy', 'Detector',
                                              'ProdID', 'Number of Files',
                                              'Events/File', 'Statistics',
                                              'Cross Section (fb)',
                                              'Origin ProdID', 'Comment'))
                    for item in detectors['SID'][ptype]:
                        table.rows.append(item)
                    of.write(str(table))
                    gLogger.info("SID CDR prods %s" % ptype)
                    gLogger.info(str(table))

        if len(detectors['sid']):
            of.write("<h1>sid dbd prods</h1>\n")
            for ptype in detectors['sid'].keys():
                if len(detectors['sid'][ptype]):
                    of.write("<h2>%s</h2>\n" % ptype)
                    table = Table(header_row=('Channel', 'Energy', 'Detector',
                                              'ProdID', 'Number of Files',
                                              'Events/File', 'Statistics',
                                              'Cross Section (fb)',
                                              'Origin ProdID', 'Comment'))
                    for item in detectors['sid'][ptype]:
                        table.rows.append(item)
                    of.write(str(table))
                    gLogger.info("sid DBD prods %s" % ptype)
                    gLogger.info(str(table))

        of.write("""
</body>
</html>
""")
    gLogger.notice("Check ./tables.html in any browser for the results")
    dexit(0)
Esempio n. 37
0
    #define your applications
    gen_app1 = GenericApplication()
    gen_app1.setScript("hello.sh")
    gen_app1.setArguments('something or another')
    gen_app1.setOutputFile("something.ext")

    gen_app2 = GenericApplication()
    gen_app2.setScript("something.sh")
    gen_app2.setOutputFile("other.ext")

    #define the job
    j = UserJob()
    j.setName("DummyJob")
    j.setCPUTime(1000)

    res = j.append(gen_app1)
    if not res['OK']:
        gLogger.error(res['Message'])
        dexit(1)
    res = j.append(gen_app2)
    if not res['OK']:
        gLogger.error(res['Message'])
        dexit(1)
    j.setOutputSandbox(["*.log", "something.ext", "other.ext"])

    #submit it
    res = j.submit(d)
    if not res['OK']:
        gLogger.error(res['Message'])
        dexit(1)
    dexit(0)
Usage:
   %s <LFN | fileContainingLFNs> <SE>
""" % Script.scriptName)

Script.parseCommandLine()

from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
allowUsers = Operations().getValue("DataManagement/AllowUserReplicaManagement",
                                   False)

from DIRAC.Core.Security.ProxyInfo import getProxyInfo
res = getProxyInfo()
if not res['OK']:
    gLogger.fatal("Can't get proxy info", res['Message'])
    dexit(1)
properties = res['Value'].get('groupProperties', [])

if not allowUsers:
    if 'FileCatalogManagement' not in properties:
        gLogger.error(
            "You need to use a proxy from a group with FileCatalogManagement")
        dexit(5)

from DIRAC.DataManagementSystem.Client.DataManager import DataManager
dm = DataManager()
import os
import sys
args = Script.getPositionalArgs()
if len(args) < 2:
    Script.showHelp()
    if not specialOptions.has_key("JobID"):
        my_dict = {}
        #my_dict['Status']=['Matched','Staging','Completed','Done','Failed','Rescheduled','Stalled','Waiting','Running','Checking'] # monitor all states
        my_dict['Owner'] = [user]
        local_time = datetime.datetime.utcnow()
        timedelta = local_time - datetime.timedelta(seconds=86400)
        if specialOptions.has_key("dayspassed"):
            timedelta = local_time - datetime.timedelta(
                seconds=float(specialOptions["dayspassed"]) * 3600)
        res = w.getJobs(my_dict, timedelta.strftime('%Y-%m-%d %H:%M:%S'))

        if not res['OK']:
            gLogger.error("Could not get list of running jobs.",
                          res['Message'])
            dexit(1)
        job_list = res['Value']
    else:
        job_list = specialOptions["JobID"].split(",")
        doLogging = True
    status = {}
    sites = {}

    for chunk in breakListIntoChunks(job_list, 1000):
        res = d.getJobSummary(chunk)
        if not res['OK']:
            gLogger.error("Could not get status of job list chunk,",
                          res['Message'])
            if do_xml:
                d.exit(1)
            continue
def _uploadGenFiles():
  """uploads the generator files"""
  clip = _Params()
  clip.registerSwitches()
  Script.parseCommandLine()

  
  from DIRAC import gLogger, exit as dexit

  if not clip.dir:
    gLogger.error('You need to set the path')
    Script.showHelp()
    dexit(1)
  if not clip.storageElement:
    gLogger.error('You need a storage element')
    Script.showHelp()
    dexit(1)
  
  for key in MANDATORY_KEYS:
    if key not in clip.fmeta:
      gLogger.error("Not all mandatory meta data defined, please check and add key: ", key)
      Script.showHelp()
      dexit(1)
    
  #resolve the inout files
  flist = []
  if os.path.isdir(clip.dir):
    flistd = os.listdir(clip.dir)
    for filename in flistd:
      if filename.count(".stdhep"):
        flist.append( os.path.join(clip.dir, filename) )
  elif os.path.isfile(clip.dir):
    flist.append(clip.dir)
  else:
    gLogger.error("%s is not a file nor a directory" % clip.dir)
    dexit(1)  
  
  gLogger.notice("Will eventually upload %s file(s)" % len(flist))
    
  from DIRAC.Core.Utilities.PromptUser import promptUser
    
  from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
  basepath = Operations().getValue('Production/ILC_ILD/BasePath','')
  if not basepath:
    gLogger.error('Failed to contact CS, please try again')
    dexit(1)
  
  basepath = "/".join(basepath.split("/")[:-2])+"/" #need to get rid of the ild/ part at the end
    
  finalpath = os.path.join(basepath, 'generated', clip.energy+"-"+clip.machineParams, clip.evtclass, str(clip.fmeta['GenProcessID']))
  gLogger.notice("Will upload the file(s) under %s" % finalpath)
  if not clip.force:
    res = promptUser('Continue?', ['y','n'], 'n')
    if not res['OK']:
      gLogger.error(res['Message'])
      dexit(1)
    if not res['Value'].lower()=='y':
      dexit(0)
  
  dirmeta = []
  dirmeta.append({'path':os.path.join(basepath, 'generated'), 'meta':{'Datatype':'gen'}})
  dirmeta.append({'path':os.path.join(basepath, 'generated', clip.energy+"-"+clip.machineParams), 'meta':{'Energy':clip.energy, 'MachineParams':clip.machineParams}})
  dirmeta.append({'path':os.path.join(basepath, 'generated', clip.energy+"-"+clip.machineParams, clip.evtclass), 'meta':{'EvtClass':clip.evtclass }})
  dirmeta.append({'path':finalpath, 'meta': {'EvtType':clip.evttype ,'Luminosity':clip.lumi, 'ProcessID': clip.fmeta['GenProcessID']} })
  
  final_fname_base = 'E'+clip.energy+"-"+clip.machineParams+".P"+clip.fmeta['GenProcessName']+".G"+clip.fmeta['ProgramNameVersion'] + "."+clip.particle1+clip.pol1+"."+clip.particle2+clip.pol2+".I"+str(clip.fmeta['GenProcessID'])
  gLogger.notice("Final file name(s) will be %s where XX will be replaced by file number, and ext by the input file extension" % (final_fname_base+".XX.ext") )
  if not clip.force:
    res = promptUser('Continue?', ['y','n'], 'n')
    if not res['OK']:
      gLogger.error(res['Message'])
      dexit(1)
    if not res['Value'].lower()=='y':
      dexit(0)    

  
  from DIRAC.DataManagementSystem.Client.DataManager import DataManager
  from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
  fc = FileCatalogClient()
  
  for pathdict in dirmeta:
    res = fc.createDirectory(pathdict['path'])
    if not res['OK']:
      gLogger.error("Could not create this directory in FileCatalog, abort:", pathdict['path'] )
      dexit(0)

    res = fc.setMetadata(pathdict['path'], pathdict['meta'])
    if not res['OK']:
      gLogger.error( "Failed to set meta data %s to %s\n" %(pathdict['meta'], pathdict['path']), res['Message'] )

  datMan = DataManager()
  for filename in flist:
    fnum = filename.split(".")[-2]
    fext = filename.split(".")[-1]
    final_fname = final_fname_base + '.' + fnum + "." + fext
    gLogger.notice("Uploading %s to" % filename, finalpath+"/"+final_fname)
    if not clip.force:
      res = promptUser('Continue?', ['y','n'], 'n')
      if not res['OK']:
        gLogger.error(res['Message'])
        break
      if not res['Value'].lower()=='y':
        break    

    res = datMan.putAndRegister(finalpath+"/"+final_fname, filename, clip.storageElement)
    if not res['OK']:
      gLogger.error("Failed to upload %s:" % filename, res['Message'])
      continue
    res = fc.setMetadata(finalpath+"/"+final_fname, clip.fmeta)
    if not res['OK']:
      gLogger.error("Failed setting the metadata to %s:" % filename, res['Message'])
      
  dexit(0)
Esempio n. 41
0
def _getInfo():
  """gets info about transformation"""
  clip = _Params()
  clip.registerSwitches()
  Script.parseCommandLine()

  if not clip.prodid and not clip.filename:
    Script.showHelp()
    dexit(1)

  from DIRAC import gLogger
  import os

  from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient
  tc = TransformationClient()

  from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
  fc = FileCatalogClient()
  fmeta = {}
  trans = None
  info = []

  if clip.prodid:
    res = tc.getTransformation(clip.prodid)
    if not res['OK']:
      gLogger.error(res['Message'])
      dexit(1)
    trans = res['Value']
    res = tc.getTransformationInputDataQuery( clip.prodid )
    if res['OK']:
      trans['InputDataQuery'] = res['Value']
    res = tc.getAdditionalParameters ( clip.prodid )
    if res['OK']:
      trans['AddParams'] = res['Value']
    #do something with transf
    res1 = fc.findDirectoriesByMetadata({'ProdID':clip.prodid})
    if res1['OK'] and len(res1['Value'].values()):
      gLogger.verbose("Found %i directory matching the metadata" % len(res1['Value'].values()) )
      for dirs in res1['Value'].values():
        res = fc.getDirectoryUserMetadata(dirs)
        if res['OK']:
          fmeta.update(res['Value'])
        else:
          gLogger.error("Failed to get metadata for %s, SKIPPING" % dirs)
          continue
        res = fc.listDirectory(dirs)
        if not res['OK']:
          continue
        content = res['Value']['Successful'][dirs]
        if content["Files"]:
          for f_ex in content["Files"].keys():
            res = fc.getFileUserMetadata(f_ex)
            if res['OK']:
              fmeta.update(res['Value'])
              break

    #here we have trans and fmeta
    info.append("")
    info.append("Production %s has the following parameters:" % trans['TransformationID'])
    info.extend(_createTransfoInfo(trans))

    if fmeta:
      info.append('The files created by this production have the following metadata:')
      info.extend(_createFileInfo(fmeta))
      info.append("It's possible that some meta data was not brought back,")
      info.append("in particular file level metadata, so check some individual files")

  if clip.filename:
    pid = ""
    if clip.filename.count("/"):
      fpath = os.path.dirname(clip.filename)
      res = fc.getDirectoryUserMetadata(fpath)
      if not res['OK']:
        gLogger.error(res['Message'])
        dexit(0)
      fmeta.update(res['Value'])
      res = fc.getFileUserMetadata(clip.filename)
      if not res['OK']:
        gLogger.error(res['Message'])
        dexit(1)
      fmeta.update(res['Value'])
      if 'ProdID' in fmeta:
        pid = str(fmeta['ProdID'])
      res = fc.getFileAncestors([clip.filename], 1)
      if res["OK"]:
        for dummy_lfn,ancestorsDict in res['Value']['Successful'].items():
          if ancestorsDict.keys():
            fmeta["Ancestors"] = ancestorsDict.keys()
      res = fc.getFileDescendents([clip.filename], 1)
      if res["OK"]:
        for dummy_lfn,descendDict in res['Value']['Successful'].items():
          if descendDict.keys():
            fmeta['Descendants'] = descendDict.keys()
    else:
      ext = clip.filename.split(".")[-1]
      fitems = []
      for i in clip.filename.split('.')[:-1]:
        fitems.extend(i.split('_'))
      pid = ''
      if ext == 'stdhep':
        pid = fitems[fitems.index('gen')+1]
      if ext == 'slcio':
        if 'rec' in fitems:
          pid = fitems[fitems.index('rec')+1]
        elif 'dst' in fitems:
          pid = fitems[fitems.index('dst')+1]
        elif 'sim' in fitems:
          pid = fitems[fitems.index('sim')+1]
        else:
          gLogger.error("This file does not follow the ILCDIRAC production conventions!")
          gLogger.error("Please specify a prod ID directly or check the file.")
          dexit(0)
      if not pid:
        gLogger.error("This file does not follow the ILCDIRAC production conventions!")
        gLogger.error("Please specify a prod ID directly or check the file.")
        dexit(0)
      #as task follows the prod id, to get it we need
      tid = fitems[fitems.index(pid)+1]
      last_folder = str(int(tid)/1000).zfill(3)
      res = fc.findDirectoriesByMetadata({'ProdID':int(pid)})
      if not res['OK']:
        gLogger.error(res['Message'])
        dexit(1)
      dir_ex = res['Value'].values()[0]
      fpath = ""
      if int(dir_ex.split("/")[-1]) == int(pid):
        fpath = dir_ex+last_folder+"/"
      elif int(dir_ex.split("/")[-2]) == int(pid):
        fpath = "/".join(dir_ex.split('/')[:-2])+"/"+pid.zfill(8)+"/"+last_folder+"/"
      else:
        gLogger.error('Path does not follow conventions, will not get file family')

      if fpath:
        fpath += clip.filename
        res = fc.getFileAncestors([fpath], 1)
        if res["OK"]:
          for dummy_lfn,ancestorsDict in res['Value']['Successful'].items():
            fmeta["Ancestors"] = ancestorsDict.keys()
        res = fc.getFileDescendents([fpath], 1)
        if res["OK"]:
          for dummy_lfn,descendDict in res['Value']['Successful'].items():
            fmeta['Descendants'] = descendDict.keys()

      res = fc.getDirectoryUserMetadata(dir_ex)
      if not res['OK']:
        gLogger.error(res['Message'])
      else:
        fmeta.update(res['Value'])
    res = tc.getTransformation(pid)
    if not res['OK']:
      gLogger.error(res['Message'])
      gLogger.error('Will proceed anyway')
    else:
      trans = res['Value']
      res = tc.getTransformationInputDataQuery( pid )
      if res['OK']:
        trans['InputDataQuery'] = res['Value']
      res = tc.getAdditionalParameters ( pid )
      if res['OK']:
        trans['AddParams'] = res['Value']
    info.append("")
    info.append("Input file has the following properties:")
    info.extend(_createFileInfo(fmeta))
    info.append("")
    info.append('It was created with the production %s:' % pid)
    if trans:
      info.extend(_createTransfoInfo(trans))

  gLogger.notice("\n".join(info))

  dexit(0)