Пример #1
0
 def __initLogger( self, componentName, logSection ):
   gLogger.initialize( componentName, logSection )
   if self.__debugMode == 1:
     gLogger.setLevel( "VERBOSE" )
   elif self.__debugMode == 2:
     gLogger.setLevel( "VERBOSE" )
     gLogger.showHeaders( True )
   elif self.__debugMode >= 3:
     gLogger.setLevel( "DEBUG" )
     gLogger.showHeaders( True )
Пример #2
0
  def setUp( self ):
    """c'tor

    :param self: self reference
    """
    gLogger.showHeaders( True )
    self.log = gLogger.getSubLogger( self.__class__.__name__ )
    self.processPool = ProcessPool( 4, 8, 8,
                                    poolCallback = self.poolCallback, 
                                    poolExceptionCallback = self.poolExceptionCallback )
    self.processPool.daemonize()
Пример #3
0
  def copy_file(self, filename, source_se, dest_se, dry_run=True):
    """
    copies a file from on SE to another and registers it in 
    the dirac file catalogue
    """
    res = self.__rpcclient.getReplicas(filename, False)
    if not res["OK"]:
      print "Could not get replica status for %s" % filename
      return False
  
    ses = res['Value']['Successful'][filename].keys()
    
    if not source_se in ses:
      # print "File %s not at source SE" % filename
      return False
    
    if (source_se in ses) and (not dest_se in ses):
      print "%s" % filename
      if not dry_run:
        res = self.__dm.replicateAndRegister(filename, dest_se, source_se)
        if not res['OK']:
          print "Replicate and register failed for: %s" % filename
          print res
          sleep(5)
          print "Trying again to register %s" %filename
          gLogger.setLevel( "DEBUG" )
          gLogger.showHeaders( True )
          res = self.__dm.replicateAndRegister(filename, dest_se, source_se)
          gLogger.setLevel( "INFO" )
          gLogger.showHeaders( False )
          if not res['OK']:
            print "Replicate and register failed again for: %s" % filename
            self.__badfiles.add(filename)
          return False
      return True

    # file already exists on destination SE
    return False
Пример #4
0
from DIRAC.Core.Base import Script

# Register workflow parameter switch
Script.registerSwitch('p:', 'parameter=', 'Parameters that are passed directly to the workflow')
Script.parseCommandLine()

# from DIRAC.Core.Workflow.Parameter import *
from DIRAC import gLogger
from DIRAC.Core.Workflow.Workflow import fromXMLFile
from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport
from DIRAC.AccountingSystem.Client.DataStoreClient import DataStoreClient
from DIRAC.RequestManagementSystem.Client.Request import Request

# Forcing the current directory to be the first in the PYTHONPATH
sys.path.insert(0, os.path.realpath('.'))
gLogger.showHeaders(True)


def jobexec(jobxml, wfParameters):
  jobfile = os.path.abspath(jobxml)
  if not os.path.exists(jobfile):
    gLogger.warn('Path to specified workflow %s does not exist' % (jobfile))
    sys.exit(1)
  workflow = fromXMLFile(jobfile)
  gLogger.debug(workflow)
  code = workflow.createCode()
  gLogger.debug(code)
  jobID = 0
  if 'JOBID' in os.environ:
    jobID = os.environ['JOBID']
    gLogger.info('DIRAC JobID %s is running at site %s' % (jobID, DIRAC.siteName()))
Пример #5
0
 def setUp( self ):
   gLogger.showHeaders( True )
   self.log = gLogger.getSubLogger( self.__class__.__name__ )
   self.processPool = ProcessPool( 4, 8, 8 ) 
   self.processPool.daemonize()
Пример #6
0
# Register workflow parameter switch
Script.registerSwitch('p:', 'parameter=',
                      'Parameters that are passed directly to the workflow')
Script.parseCommandLine()

# from DIRAC.Core.Workflow.Parameter import *
from DIRAC import gLogger
from DIRAC.Core.Workflow.Workflow import fromXMLFile
from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport
from DIRAC.AccountingSystem.Client.DataStoreClient import DataStoreClient
from DIRAC.RequestManagementSystem.Client.Request import Request

# Forcing the current directory to be the first in the PYTHONPATH
sys.path.insert(0, os.path.realpath('.'))
gLogger.showHeaders(True)


def jobexec(jobxml, wfParameters):
    jobfile = os.path.abspath(jobxml)
    if not os.path.exists(jobfile):
        gLogger.warn('Path to specified workflow %s does not exist' %
                     (jobfile))
        sys.exit(1)
    workflow = fromXMLFile(jobfile)
    gLogger.debug(workflow)
    code = workflow.createCode()
    gLogger.debug(code)
    jobID = 0
    if os.environ.has_key('JOBID'):
        jobID = os.environ['JOBID']
Пример #7
0
 def setUp( self ):
   gLogger.showHeaders( True )
   self.log = gLogger.getSubLogger( self.__class__.__name__ )
   self.processPool = ProcessPool( 4, 8, 8 ) 
   self.processPool.daemonize()
Пример #8
0
in the dirac file catalogue
"""
from DIRAC.Core.Base import Script
Script.initialize()

import sys
import os
import getopt
from time import sleep

from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.DataManagementSystem.Client.DataManager import DataManager
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC import gLogger
gLogger.setLevel( "INFO" )
gLogger.showHeaders( False )


class RecursiveCp(object):
  def __init__(self):
    self.__rpcclient = RPCClient( "DataManagement/FileCatalog" )
    self.__dm = DataManager()
    self.__n_files = 0
    self.__space_copied = 0L
    self.__badfiles = set()

  def search_directory(self, directory_path, source_se, dest_se, dry_run=True):
    """
    finds all files and subdirectories in a directory
    """
    print "Searching directory: %s" % directory_path
Пример #9
0
def processPool():
    gLogger.showHeaders(True)
    log = gLogger.getSubLogger("TaskCallbacksTests")
    processPool = ProcessPool(4, 8, 8)
    processPool.daemonize()
    yield processPool
Пример #10
0
def main():
    # Register workflow parameter switch
    Script.registerSwitch(
        "p:", "parameter=",
        "Parameters that are passed directly to the workflow")
    # Registering arguments will automatically add their description to the help menu
    Script.registerArgument(
        "jobXMLfile: specify path to the Job XML file description")
    Script.parseCommandLine()

    # from DIRAC.Core.Workflow.Parameter import *
    from DIRAC import gLogger
    from DIRAC.Core.Workflow.Workflow import fromXMLFile
    from DIRAC.Core.Utilities.Proxy import executeWithoutServerCertificate
    from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport
    from DIRAC.AccountingSystem.Client.DataStoreClient import DataStoreClient
    from DIRAC.RequestManagementSystem.Client.Request import Request

    # Forcing the current directory to be the first in the PYTHONPATH
    sys.path.insert(0, os.path.realpath("."))
    gLogger.showHeaders(True)

    @executeWithoutServerCertificate
    def jobexec(jobxml, wfParameters):
        jobfile = os.path.abspath(jobxml)
        if not os.path.exists(jobfile):
            gLogger.warn("Path to specified workflow %s does not exist" %
                         (jobfile))
            sys.exit(1)
        workflow = fromXMLFile(jobfile)
        gLogger.debug(workflow)
        code = workflow.createCode()
        gLogger.debug(code)
        jobID = 0
        if "JOBID" in os.environ:
            jobID = os.environ["JOBID"]
            gLogger.info("DIRAC JobID %s is running at site %s" %
                         (jobID, DIRAC.siteName()))

        workflow.addTool("JobReport", JobReport(jobID))
        workflow.addTool("AccountingReport", DataStoreClient())
        workflow.addTool("Request", Request())

        # Propagate the command line parameters to the workflow if any
        for pName, pValue in wfParameters.items():
            workflow.setValue(pName, pValue)

        # Propagate the command line parameters to the workflow module instances of each step
        for stepdefinition in workflow.step_definitions.values():
            for moduleInstance in stepdefinition.module_instances:
                for pName, pValue in wfParameters.items():
                    if moduleInstance.parameters.find(pName):
                        moduleInstance.parameters.setValue(pName, pValue)

        return workflow.execute()

    positionalArgs = Script.getPositionalArgs()
    if len(positionalArgs) != 1:
        gLogger.debug("Positional arguments were %s" % (positionalArgs))
        DIRAC.abort(1, "Must specify the Job XML file description")

    if "JOBID" in os.environ:
        gLogger.info("JobID: %s" % (os.environ["JOBID"]))

    jobXMLfile = positionalArgs[0]
    parList = Script.getUnprocessedSwitches()
    parDict = {}
    for switch, parameter in parList:
        if switch == "p":
            name, value = parameter.split("=")
            value = value.strip()

            # The comma separated list in curly brackets is interpreted as a list
            if value.startswith("{"):
                value = value[1:-1].replace('"', "").replace(" ",
                                                             "").split(",")
                value = ";".join(value)

            parDict[name] = value

    gLogger.debug("PYTHONPATH:\n%s" % ("\n".join(sys.path)))
    jobExec = jobexec(jobXMLfile, parDict)
    if not jobExec["OK"]:
        gLogger.debug("Workflow execution finished with errors, exiting")
        if jobExec["Errno"]:
            os._exit(jobExec["Errno"])
        else:
            os._exit(1)
    else:
        gLogger.debug("Workflow execution successful, exiting")
        # dirac_jobexec might interact with ARC library which cannot be closed using a simple sys.exit(0)
        # See https://bugzilla.nordugrid.org/show_bug.cgi?id=4022 for further details
        os._exit(0)