コード例 #1
0
  def __checkDestination( self ):
    if not self.params.destination:
      self.params.destination = tempfile.mkdtemp( 'DIRACTarball' )

    gLogger.notice( "Will generate tarball in %s" % self.params.destination )
    mkDir(self.params.destination)
    return S_OK()
コード例 #2
0
 def __prepareSecurityDetails(self):
   """ Obtains the connection details for the client
   """
   try:
     credDict = self.getRemoteCredentials()
     clientDN = credDict['DN']
     clientUsername = credDict['username']
     clientGroup = credDict['group']
     gLogger.debug( "Getting proxy for %s@%s (%s)" % ( clientUsername, clientGroup, clientDN ) )
     res = gProxyManager.downloadVOMSProxy( clientDN, clientGroup )
     if not res['OK']:
       return res
     chain = res['Value']
     proxyBase = "%s/proxies" % BASE_PATH
     mkDir(proxyBase)
     proxyLocation = "%s/proxies/%s-%s" % ( BASE_PATH, clientUsername, clientGroup )
     gLogger.debug("Obtained proxy chain, dumping to %s." % proxyLocation)
     res = gProxyManager.dumpProxyToFile( chain, proxyLocation )
     if not res['OK']:
       return res
     gLogger.debug("Updating environment.")
     os.environ['X509_USER_PROXY'] = res['Value']
     return res
   except Exception, error:
     exStr = "__getConnectionDetails: Failed to get client connection details."
     gLogger.exception( exStr, '', error )
     return S_ERROR(exStr)
コード例 #3
0
  def __networkToFile( self, fileHelper, destFileName = False ):
    """
    Dump incoming network data to temporal file
    """
    tfd = None
    if not destFileName:
      try:
        tfd, destFileName = tempfile.mkstemp( prefix = "DSB." )
        tfd.close()
      except Exception as e:
        gLogger.error( "%s" % repr( e ).replace( ',)', ')' ) )
        return S_ERROR( "Cannot create temporary file" )

    destFileName = os.path.realpath( destFileName )
    mkDir( os.path.dirname( destFileName ) )

    try:
      if tfd is not None:
        fd = tfd
      else:
        fd = open( destFileName, "wb" )
      result = fileHelper.networkToDataSink( fd, maxFileSize = self.__maxUploadBytes )
      fd.close()
    except Exception as e:
      gLogger.error( "Cannot open to write destination file", "%s: %s" % ( destFileName, repr( e ).replace( ',)', ')' ) ) )
      return S_ERROR( "Cannot open to write destination file" )
    if not result[ 'OK' ]:
      return result
    return S_OK( destFileName )
コード例 #4
0
def initializeStorageElementProxyHandler( serviceInfo ):
  """ handler initialisation """

  global BASE_PATH, HTTP_FLAG, HTTP_PORT, HTTP_PATH
  cfgPath = serviceInfo['serviceSectionPath']

  BASE_PATH = gConfig.getValue( "%s/BasePath" % cfgPath, BASE_PATH )
  if not BASE_PATH:
    gLogger.error( 'Failed to get the base path' )
    return S_ERROR( 'Failed to get the base path' )

  BASE_PATH = os.path.abspath( BASE_PATH )
  gLogger.info('The base path obtained is %s. Checking its existence...' % BASE_PATH)
  mkDir(BASE_PATH)

  HTTP_FLAG = gConfig.getValue( "%s/HttpAccess" % cfgPath, False )
  if HTTP_FLAG:
    HTTP_PATH = '%s/httpCache' % BASE_PATH
    HTTP_PATH = gConfig.getValue( "%s/HttpCache" % cfgPath, HTTP_PATH )
    mkDir( HTTP_PATH )
    HTTP_PORT = gConfig.getValue( "%s/HttpPort" % cfgPath, 9180 )
    gLogger.info('Creating HTTP server thread, port:%d, path:%s' % ( HTTP_PORT, HTTP_PATH ) )
    _httpThread = HttpThread( HTTP_PORT, HTTP_PATH )

  return S_OK()
コード例 #5
0
def createLocalDirectory(directory):
  """
  Create local directory
  """
  mkDir(directory)
  if not os.path.exists(directory):
    return S_ERROR('Directory creation failed')
  return S_OK('Created directory successfully')
コード例 #6
0
  def __writeSub(self, executable, nJobs):
    """ Create the Sub File for submission
    """

    self.log.debug("Working directory: %s " % self.workingDirectory)
    # We randomize the location of the pilotoutput and log, because there are just too many of them
    pre1 = makeGuid()[:3]
    pre2 = makeGuid()[:3]
    mkDir(os.path.join(self.workingDirectory, pre1, pre2))
    initialDirPrefix = "%s/%s" % (pre1, pre2)

    self.log.debug("InitialDir: %s" % os.path.join(self.workingDirectory, initialDirPrefix))

    self.log.debug("ExtraSubmitString:\n### \n %s \n###" % self.extraSubmitString)

    fd, name = tempfile.mkstemp(suffix='.sub', prefix='HTCondorCE_', dir=self.workingDirectory)
    subFile = os.fdopen(fd, 'w')

    executable = os.path.join(self.workingDirectory, executable)

    localScheddOptions = """
ShouldTransferFiles = YES
WhenToTransferOutput = ON_EXIT_OR_EVICT
""" if self.useLocalSchedd else ""

    targetUniverse = "grid" if self.useLocalSchedd else "vanilla"

    sub = """
executable = %(executable)s
universe = %(targetUniverse)s
use_x509userproxy = true
output = $(Cluster).$(Process).out
error = $(Cluster).$(Process).err
log = $(Cluster).$(Process).log
environment = "HTCONDOR_JOBID=$(Cluster).$(Process)"
initialdir = %(initialDir)s
grid_resource = condor %(ceName)s %(ceName)s:9619
transfer_output_files = ""

%(localScheddOptions)s

kill_sig=SIGTERM

%(extraString)s

Queue %(nJobs)s

""" % dict(executable=executable,
           nJobs=nJobs,
           ceName=self.ceName,
           extraString=self.extraSubmitString,
           initialDir=os.path.join(self.workingDirectory, initialDirPrefix),
           localScheddOptions=localScheddOptions,
           targetUniverse=targetUniverse,
           )
    subFile.write(sub)
    subFile.close()
    return name
コード例 #7
0
ファイル: Utils.py プロジェクト: DIRACGrid/DIRAC
def createJobWrapper(jobID, jobParams, resourceParams, optimizerParams,
                     extraOptions='',
                     defaultWrapperLocation='DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapperTemplate.py',
                     log=gLogger, logLevel='INFO'):
  """ This method creates a job wrapper filled with the CE and Job parameters to execute the job.
      Main user is the JobAgent
  """

  arguments = {'Job': jobParams,
               'CE': resourceParams,
               'Optimizer': optimizerParams}
  log.verbose('Job arguments are: \n %s' % (arguments))

  siteRoot = gConfig.getValue('/LocalSite/Root', os.getcwd())
  log.debug('SiteRootPythonDir is:\n%s' % siteRoot)
  workingDir = gConfig.getValue('/LocalSite/WorkingDirectory', siteRoot)
  mkDir('%s/job/Wrapper' % (workingDir))

  diracRoot = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))

  jobWrapperFile = '%s/job/Wrapper/Wrapper_%s' % (workingDir, jobID)
  if os.path.exists(jobWrapperFile):
    log.verbose('Removing existing Job Wrapper for %s' % (jobID))
    os.remove(jobWrapperFile)
  with open(os.path.join(diracRoot, defaultWrapperLocation), 'r') as fd:
    wrapperTemplate = fd.read()

  if 'LogLevel' in jobParams:
    logLevel = jobParams['LogLevel']
    log.info('Found Job LogLevel JDL parameter with value: %s' % (logLevel))
  else:
    log.info('Applying default LogLevel JDL parameter with value: %s' % (logLevel))

  dPython = sys.executable
  realPythonPath = os.path.realpath(dPython)
  log.debug('Real python path after resolving links is: ', realPythonPath)
  dPython = realPythonPath

  # Making real substitutions
  # wrapperTemplate = wrapperTemplate.replace( "@JOBARGS@", str( arguments ) )
  wrapperTemplate = wrapperTemplate.replace("@SITEPYTHON@", str(siteRoot))

  jobWrapperJsonFile = jobWrapperFile + '.json'
  with io.open(jobWrapperJsonFile, 'w', encoding='utf8') as jsonFile:
    json.dump(unicode(arguments), jsonFile, ensure_ascii=False)

  with open(jobWrapperFile, "w") as wrapper:
    wrapper.write(wrapperTemplate)

  jobExeFile = '%s/job/Wrapper/Job%s' % (workingDir, jobID)
  jobFileContents = \
      """#!/bin/sh
%s %s %s -o LogLevel=%s -o /DIRAC/Security/UseServerCertificate=no
""" % (dPython, jobWrapperFile, extraOptions, logLevel)
  with open(jobExeFile, 'w') as jobFile:
    jobFile.write(jobFileContents)

  return S_OK(jobExeFile)
コード例 #8
0
ファイル: RRDManager.py プロジェクト: DIRACGrid-test/DIRAC
 def __init__( self, rrdLocation, graphLocation ):
   """
   Initialize RRDManager
   """
   self.rrdLocation = rrdLocation
   self.graphLocation = graphLocation
   self.log = gLogger.getSubLogger( "RRDManager" )
   self.rrdExec = gConfig.getValue( "%s/RRDExec" % getServiceSection( "Framework/Monitoring" ), "rrdtool" )
   for path in ( self.rrdLocation, self.graphLocation ):
     mkDir( path )
コード例 #9
0
ファイル: dirac-distribution.py プロジェクト: DIRACGrid/DIRAC
  def isOK(self):
    if not self.cliParams.releasesToBuild:
      gLogger.error("Missing releases to build!")
      Script.showHelp()
      return False

    if not self.cliParams.destination:
      self.cliParams.destination = tempfile.mkdtemp('DiracDist')
    else:
      mkDir(self.cliParams.destination)
    gLogger.notice("Will generate tarballs in %s" % self.cliParams.destination)
    return True
コード例 #10
0
def __runSystemDefaults(jobID, vo):
  """
  Creates the environment for running the job and returns
  the path for the other functions.

  """


  tempdir = str(vo) + "job" + str(jobID) + "temp"

  mkDir(tempdir)
  basepath = os.getcwd()
  return basepath + os.path.sep + tempdir + os.path.sep
コード例 #11
0
ファイル: AgentModule.py プロジェクト: DIRACGrid/DIRAC
  def am_initialize(self, *initArgs):
    """ Common initialization for all the agents.

        This is executed every time an agent (re)starts.
        This is called by the AgentReactor, should not be overridden.
    """
    agentName = self.am_getModuleParam('fullName')
    result = self.initialize(*initArgs)
    if not isReturnStructure(result):
      return S_ERROR("initialize must return S_OK/S_ERROR")
    if not result['OK']:
      return S_ERROR("Error while initializing %s: %s" % (agentName, result['Message']))
    mkDir(self.am_getControlDirectory())
    workDirectory = self.am_getWorkDirectory()
    mkDir(workDirectory)
    # Set the work directory in an environment variable available to subprocesses if needed
    os.environ['AGENT_WORKDIRECTORY'] = workDirectory

    self.__moduleProperties['shifterProxy'] = self.am_getOption('shifterProxy')
    if self.am_monitoringEnabled():
      self.monitor.enable()
    if len(self.__moduleProperties['executors']) < 1:
      return S_ERROR("At least one executor method has to be defined")
    if not self.am_Enabled():
      return S_ERROR("Agent is disabled via the configuration")
    self.log.notice("=" * 40)
    self.log.notice("Loaded agent module %s" % self.__moduleProperties['fullName'])
    self.log.notice(" Site: %s" % DIRAC.siteName())
    self.log.notice(" Setup: %s" % gConfig.getValue("/DIRAC/Setup"))
    self.log.notice(" Base Module version: %s " % __RCSID__)
    self.log.notice(" Agent version: %s" % self.__codeProperties['version'])
    self.log.notice(" DIRAC version: %s" % DIRAC.version)
    self.log.notice(" DIRAC platform: %s" % DIRAC.getPlatform())
    pollingTime = int(self.am_getOption('PollingTime'))
    if pollingTime > 3600:
      self.log.notice(" Polling time: %s hours" % (pollingTime / 3600.))
    else:
      self.log.notice(" Polling time: %s seconds" % self.am_getOption('PollingTime'))
    self.log.notice(" Control dir: %s" % self.am_getControlDirectory())
    self.log.notice(" Work dir: %s" % self.am_getWorkDirectory())
    if self.am_getOption('MaxCycles') > 0:
      self.log.notice(" Cycles: %s" % self.am_getMaxCycles())
    else:
      self.log.notice(" Cycles: unlimited")
    if self.am_getWatchdogTime() > 0:
      self.log.notice(" Watchdog interval: %s" % self.am_getWatchdogTime())
    else:
      self.log.notice(" Watchdog interval: disabled ")
    self.log.notice("=" * 40)
    self.__initialized = True
    return S_OK()
コード例 #12
0
 def transfer_fromClient( self, fileID, token, fileSize, fileHelper ):
   """ Method to receive file from clients.
       fileID is the local file name in the SE.
       fileSize can be Xbytes or -1 if unknown.
       token is used for access rights confirmation.
   """
   if not self.__checkForDiskSpace( BASE_PATH, fileSize ):
     return S_ERROR( 'Not enough disk space' )
   file_path = self.__resolveFileID( fileID )
   try:
     mkDir(os.path.dirname( file_path ))
     fd = open( file_path, "wb" )
   except Exception, error:
     return S_ERROR( "Cannot open to write destination file %s: %s" % ( file_path, str( error ) ) )
コード例 #13
0
ファイル: SecurityFileLog.py プロジェクト: mesmith75/DIRAC
 def run(self):
   while True:
     secMsg = self.__messagesQueue.get()
     msgTime = secMsg[ 0 ]
     path = "%s/%s/%02d" % ( self.__basePath, msgTime.year, msgTime.month )
     mkDir( path )
     logFile = "%s/%s%02d%02d.security.log.csv" % ( path, msgTime.year, msgTime.month, msgTime.day )
     if not os.path.isfile( logFile ):
       fd = open( logFile, "w" )
       fd.write( "Time, Success, Source IP, Source Port, source Identity, destinationIP, destinationPort, destinationService, action\n" )
     else:
       fd = open( logFile, "a" )
     fd.write( "%s\n" % ", ".join( [ str( item ) for item in secMsg ] ) )
     fd.close()
コード例 #14
0
 def transfer_fromClient( self, fileID, token, fileSize, fileHelper ):
   """ Method to receive file from clients.
       fileID is the local file name in the SE.
       fileSize can be Xbytes or -1 if unknown.
       token is used for access rights confirmation.
   """
   if not self.__checkForDiskSpace( BASE_PATH, fileSize ):
     return S_ERROR( 'Not enough disk space' )
   file_path = self.__resolveFileID( fileID )
   try:
     mkDir(os.path.dirname( file_path ))
     fd = open( file_path, "wb" )
   except Exception, error:
     return S_ERROR( "Cannot open to write destination file %s: %s" % ( file_path, str( error ) ) )
コード例 #15
0
  def initializeHandler( cls, serviceInfo ):
    cls.__db = MonitoringDB()
    reportSection = serviceInfo[ 'serviceSectionPath' ]
    dataPath = gConfig.getValue( "%s/DataLocation" % reportSection, "data/monitoringPlots" )
    gLogger.info( "Data will be written into %s" % dataPath )
    mkDir( dataPath )
    try:
      testFile = "%s/moni.plot.test" % dataPath
      with open( testFile, "w" ) as _fd:
        os.unlink( testFile )
    except IOError as err:
      gLogger.fatal( "Can't write to %s" % dataPath, err )
      return S_ERROR( "Data location is not writable: %s" % repr( err ) )
    gDataCache.setGraphsLocation( dataPath )

    return S_OK()
コード例 #16
0
ファイル: MonitoringHandler.py プロジェクト: sparsh35/DIRAC
  def initializeHandler(cls, serviceInfo):
    cls.__db = MonitoringDB()
    reportSection = serviceInfo['serviceSectionPath']
    dataPath = gConfig.getValue("%s/DataLocation" % reportSection, "data/monitoringPlots")
    gLogger.info("Data will be written into %s" % dataPath)
    mkDir(dataPath)
    try:
      testFile = "%s/moni.plot.test" % dataPath
      with open(testFile, "w") as _:
        os.unlink(testFile)
    except IOError as err:
      gLogger.fatal("Can't write to %s" % dataPath, err)
      return S_ERROR("Data location is not writable: %s" % repr(err))
    gDataCache.setGraphsLocation(dataPath)

    return S_OK()
コード例 #17
0
ファイル: AgentModule.py プロジェクト: ltomassetti/DIRAC
    def am_initialize(self, *initArgs):
        agentName = self.am_getModuleParam('fullName')
        result = self.initialize(*initArgs)
        if not isReturnStructure(result):
            return S_ERROR("initialize must return S_OK/S_ERROR")
        if not result['OK']:
            return S_ERROR("Error while initializing %s: %s" %
                           (agentName, result['Message']))
        mkDir(self.am_getControlDirectory())
        workDirectory = self.am_getWorkDirectory()
        mkDir(workDirectory)
        # Set the work directory in an environment variable available to subprocesses if needed
        os.environ['AGENT_WORKDIRECTORY'] = workDirectory

        self.__moduleProperties['shifterProxy'] = self.am_getOption(
            'shifterProxy')
        if self.am_monitoringEnabled():
            self.monitor.enable()
        if len(self.__moduleProperties['executors']) < 1:
            return S_ERROR("At least one executor method has to be defined")
        if not self.am_Enabled():
            return S_ERROR("Agent is disabled via the configuration")
        self.log.notice("=" * 40)
        self.log.notice("Loaded agent module %s" %
                        self.__moduleProperties['fullName'])
        self.log.notice(" Site: %s" % DIRAC.siteName())
        self.log.notice(" Setup: %s" % gConfig.getValue("/DIRAC/Setup"))
        self.log.notice(" Base Module version: %s " % __RCSID__)
        self.log.notice(" Agent version: %s" %
                        self.__codeProperties['version'])
        self.log.notice(" DIRAC version: %s" % DIRAC.version)
        self.log.notice(" DIRAC platform: %s" % DIRAC.getPlatform())
        pollingTime = int(self.am_getOption('PollingTime'))
        if pollingTime > 3600:
            self.log.notice(" Polling time: %s hours" % (pollingTime / 3600.))
        else:
            self.log.notice(" Polling time: %s seconds" %
                            self.am_getOption('PollingTime'))
        self.log.notice(" Control dir: %s" % self.am_getControlDirectory())
        self.log.notice(" Work dir: %s" % self.am_getWorkDirectory())
        if self.am_getOption('MaxCycles') > 0:
            self.log.notice(" Cycles: %s" % self.am_getMaxCycles())
        else:
            self.log.notice(" Cycles: unlimited")
        self.log.notice("=" * 40)
        self.__initialized = True
        return S_OK()
コード例 #18
0
 def __backupCurrentConfiguration( self, backupName ):
   configurationFilename = "%s.cfg" % self.getName()
   configurationFile = os.path.join( DIRAC.rootPath, "etc", configurationFilename )
   today = Time.date()
   backupPath = os.path.join( self.getBackupDir(), str( today.year ), "%02d" % today.month )
   mkDir(backupPath)
   backupFile = os.path.join( backupPath, configurationFilename.replace( ".cfg", ".%s.zip" % backupName ) )
   if os.path.isfile( configurationFile ):
     gLogger.info( "Making a backup of configuration in %s" % backupFile )
     try:
       with zipfile.ZipFile( backupFile, "w", zipfile.ZIP_DEFLATED ) as zf:
         zf.write( configurationFile, "%s.backup.%s" % ( os.path.split( configurationFile )[1], backupName ) )
     except Exception:
       gLogger.exception()
       gLogger.error( "Cannot backup configuration data file", "file %s" % backupFile )
   else:
     gLogger.warn( "CS data file does not exist", configurationFile )
コード例 #19
0
 def __backupCurrentConfiguration(self, backupName):
   configurationFilename = "%s.cfg" % self.getName()
   configurationFile = os.path.join(DIRAC.rootPath, "etc", configurationFilename)
   today = Time.date()
   backupPath = os.path.join(self.getBackupDir(), str(today.year), "%02d" % today.month)
   mkDir(backupPath)
   backupFile = os.path.join(backupPath, configurationFilename.replace(".cfg", ".%s.zip" % backupName))
   if os.path.isfile(configurationFile):
     gLogger.info("Making a backup of configuration in %s" % backupFile)
     try:
       with zipfile.ZipFile(backupFile, "w", zipfile.ZIP_DEFLATED) as zf:
         zf.write(configurationFile, "%s.backup.%s" % (os.path.split(configurationFile)[1], backupName))
     except Exception:
       gLogger.exception()
       gLogger.error("Cannot backup configuration data file", "file %s" % backupFile)
   else:
     gLogger.warn("CS data file does not exist", configurationFile)
コード例 #20
0
ファイル: Shifter.py プロジェクト: sparsh35/DIRAC
def getShifterProxy(shifterType, fileName=False):
  """ This method returns a shifter's proxy

      :param str shifterType: ProductionManager / DataManager...
      :param str fileName: file name

      :return: S_OK(dict)/S_ERROR()
  """
  if fileName:
    mkDir(os.path.dirname(fileName))
  opsHelper = Operations()
  userName = opsHelper.getValue(cfgPath('Shifter', shifterType, 'User'), '')
  if not userName:
    return S_ERROR("No shifter User defined for %s" % shifterType)
  result = Registry.getDNForUsername(userName)
  if not result['OK']:
    return result
  userDN = result['Value'][0]
  result = Registry.findDefaultGroupForDN(userDN)
  if not result['OK']:
    return result
  defaultGroup = result['Value']
  userGroup = opsHelper.getValue(cfgPath('Shifter', shifterType, 'Group'), defaultGroup)
  vomsAttr = Registry.getVOMSAttributeForGroup(userGroup)
  if vomsAttr:
    gLogger.info("Getting VOMS [%s] proxy for shifter %s@%s (%s)" % (vomsAttr, userName,
                                                                     userGroup, userDN))
    result = gProxyManager.downloadVOMSProxyToFile(userDN, userGroup,
                                                   filePath=fileName,
                                                   requiredTimeLeft=86400,
                                                   cacheTime=86400)
  else:
    gLogger.info("Getting proxy for shifter %s@%s (%s)" % (userName, userGroup, userDN))
    result = gProxyManager.downloadProxyToFile(userDN, userGroup,
                                               filePath=fileName,
                                               requiredTimeLeft=86400,
                                               cacheTime=86400)
  if not result['OK']:
    return result
  chain = result['chain']
  fileName = result['Value']
  return S_OK({'DN': userDN,
               'username': userName,
               'group': userGroup,
               'chain': chain,
               'proxyFile': fileName})
コード例 #21
0
ファイル: Shifter.py プロジェクト: DIRACGrid/DIRAC
def getShifterProxy(shifterType, fileName=False):
  """
  This method returns a shifter's proxy

  :param shifterType: ProductionManager / DataManager...

  """
  if fileName:
    mkDir(os.path.dirname(fileName))
  opsHelper = Operations()
  userName = opsHelper.getValue(cfgPath('Shifter', shifterType, 'User'), '')
  if not userName:
    return S_ERROR("No shifter User defined for %s" % shifterType)
  result = CS.getDNForUsername(userName)
  if not result['OK']:
    return result
  userDN = result['Value'][0]
  result = CS.findDefaultGroupForDN(userDN)
  if not result['OK']:
    return result
  defaultGroup = result['Value']
  userGroup = opsHelper.getValue(cfgPath('Shifter', shifterType, 'Group'), defaultGroup)
  vomsAttr = CS.getVOMSAttributeForGroup(userGroup)
  if vomsAttr:
    gLogger.info("Getting VOMS [%s] proxy for shifter %s@%s (%s)" % (vomsAttr, userName,
                                                                     userGroup, userDN))
    result = gProxyManager.downloadVOMSProxyToFile(userDN, userGroup,
                                                   filePath=fileName,
                                                   requiredTimeLeft=86400,
                                                   cacheTime=86400)
  else:
    gLogger.info("Getting proxy for shifter %s@%s (%s)" % (userName, userGroup, userDN))
    result = gProxyManager.downloadProxyToFile(userDN, userGroup,
                                               filePath=fileName,
                                               requiredTimeLeft=86400,
                                               cacheTime=86400)
  if not result['OK']:
    return result
  chain = result['chain']
  fileName = result['Value']
  return S_OK({'DN': userDN,
               'username': userName,
               'group': userGroup,
               'chain': chain,
               'proxyFile': fileName})
コード例 #22
0
  def __prepareFileForHTTP( self, lfn, key ):
    """ Prepare proxied file for HTTP """
    global HTTP_PATH

    res = self.__prepareSecurityDetails()
    if not res['OK']:
      return res

    # Clear the local cache
    getFileDir = "%s/%s" % ( HTTP_PATH, key )
    mkDir(getFileDir)

    # Get the file to the cache
    from DIRAC.DataManagementSystem.Client.DataManager import DataManager
    dataMgr = DataManager()
    result = dataMgr.getFile( lfn, destinationDir = getFileDir )
    result['CachePath'] = getFileDir
    return result
コード例 #23
0
  def __prepareFileForHTTP( self, lfn, key ):
    """ Prepare proxied file for HTTP """
    global HTTP_PATH

    res = self.__prepareSecurityDetails()
    if not res['OK']:
      return res

    # Clear the local cache
    getFileDir = "%s/%s" % ( HTTP_PATH, key )
    mkDir(getFileDir)

    # Get the file to the cache
    from DIRAC.DataManagementSystem.Client.DataManager import DataManager
    dataMgr = DataManager()
    result = dataMgr.getFile( lfn, destinationDir = getFileDir )
    result['CachePath'] = getFileDir
    return result
コード例 #24
0
  def __writeSub( self, executable, nJobs ):
    """ Create the Sub File for submission

    """
    workingDirectory = self.ceParameters['WorkingDirectory']
    initialDir = os.path.dirname( workingDirectory )
    self.log.debug( "Working directory: %s " % workingDirectory )
    ##We randomize the location of the pilotoutput and log, because there are just too many of them
    pre1 = makeGuid()[:3]
    pre2 = makeGuid()[:3]
    mkDir( os.path.join( initialDir, pre1, pre2 ) )
    initialDirPrefix = "%s/%s" %( pre1, pre2 )

    self.log.debug( "InitialDir: %s" % os.path.join(initialDir,initialDirPrefix) )

    fd, name = tempfile.mkstemp( suffix = '.sub', prefix = 'HTCondorCE_', dir = workingDirectory )
    subFile = os.fdopen( fd, 'w' )

    executable = os.path.join( workingDirectory, executable )

    sub = """
executable = %(executable)s
universe = grid
use_x509userproxy = true
output = $(Cluster).$(Process).out
error = $(Cluster).$(Process).err
log = $(Cluster).$(Process).log
environment = "HTCONDOR_JOBID=$(Cluster).$(Process)"
initialdir = %(initialDir)s
grid_resource = condor %(ceName)s %(ceName)s:9619
ShouldTransferFiles = YES
WhenToTransferOutput = ON_EXIT_OR_EVICT
kill_sig=SIGTERM
Queue %(nJobs)s

""" % dict( executable=executable,
            nJobs=nJobs,
            ceName=self.ceName,
            initialDir=os.path.join(initialDir,initialDirPrefix),
          )
    subFile.write( sub )
    subFile.close()
    return name
コード例 #25
0
  def __moveToFinalLocation( self, localFilePath, sbPath ):
    if self.__useLocalStorage:
      hdFilePath = self.__sbToHDPath( sbPath )
      result = S_OK( ( self.__localSEName, sbPath ) )
      if os.path.isfile( hdFilePath ):
        gLogger.info( "There was already a sandbox with that name, skipping copy", sbPath )
      else:
        hdDirPath = os.path.dirname( hdFilePath )
        mkDir(hdDirPath)
        try:
          os.rename( localFilePath, hdFilePath )
        except OSError as e:
          errMsg = "Cannot move temporal file to final path"
          gLogger.error( errMsg, repr( e ).replace( ',)', ')' ) )
          result = S_ERROR( errMsg )
    else:
      result = self.__copyToExternalSE( localFilePath, sbPath )

    return result
コード例 #26
0
    def __moveToFinalLocation(self, localFilePath, sbPath):
        if self.__useLocalStorage:
            hdFilePath = self.__sbToHDPath(sbPath)
            result = S_OK((self.__localSEName, sbPath))
            if os.path.isfile(hdFilePath):
                gLogger.info("There was already a sandbox with that name, skipping copy", sbPath)
            else:
                hdDirPath = os.path.dirname(hdFilePath)
                mkDir(hdDirPath)
                try:
                    os.rename(localFilePath, hdFilePath)
                except OSError as e:
                    errMsg = "Cannot move temporal file to final path"
                    gLogger.error(errMsg, repr(e).replace(",)", ")"))
                    result = S_ERROR(errMsg)
        else:
            result = self.__copyToExternalSE(localFilePath, sbPath)

        return result
コード例 #27
0
  def transfer_fromClient( self, fileID, token, fileSize, fileHelper ):
    """ Method to receive file from clients.
        fileID is the local file name in the SE.
        fileSize can be Xbytes or -1 if unknown.
        token is used for access rights confirmation.
    """
    if not self.__checkForDiskSpace( BASE_PATH, fileSize ):
      return S_ERROR('Not enough disk space')

    file_path = "%s/%s" % ( BASE_PATH, fileID )
    mkDir( os.path.dirname( file_path ) )
    result = fileHelper.getFileDescriptor( file_path, 'w' )
    if not result['OK']:
      return S_ERROR('Failed to get file descriptor')

    fileDescriptor = result['Value']
    result = fileHelper.networkToFD(fileDescriptor)
    if not result['OK']:
      return S_ERROR('Failed to put file %s' % fileID )
    return result
コード例 #28
0
 def export_createDirectory(self, dir_path):
   """ Creates the directory on the storage
   """
   path = self.__resolveFileID(dir_path)
   gLogger.info("StorageElementHandler.createDirectory: Attempting to create %s." % path)
   if os.path.exists(path):
     if os.path.isfile(path):
       errStr = "Supplied path exists and is a file"
       gLogger.error("StorageElementHandler.createDirectory: %s." % errStr, path)
       return S_ERROR(errStr)
     gLogger.info("StorageElementHandler.createDirectory: %s already exists." % path)
     return S_OK()
   # Need to think about permissions.
   try:
     mkDir(path)
     return S_OK()
   except Exception as x:
     errStr = "Exception creating directory."
     gLogger.error("StorageElementHandler.createDirectory: %s" % errStr, repr(x))
     return S_ERROR(errStr)
コード例 #29
0
  def transfer_fromClient( self, fileID, token, fileSize, fileHelper ):
    """ Method to receive file from clients.
        fileID is the local file name in the SE.
        fileSize can be Xbytes or -1 if unknown.
        token is used for access rights confirmation.
    """
    if not self.__checkForDiskSpace( BASE_PATH, fileSize ):
      return S_ERROR('Not enough disk space')

    file_path = "%s/%s" % ( BASE_PATH, fileID )
    mkDir( os.path.dirname( file_path ) )
    result = fileHelper.getFileDescriptor( file_path, 'w' )
    if not result['OK']:
      return S_ERROR('Failed to get file descriptor')

    fileDescriptor = result['Value']
    result = fileHelper.networkToFD(fileDescriptor)
    if not result['OK']:
      return S_ERROR('Failed to put file %s' % fileID )
    return result
コード例 #30
0
    def __prepareFile(self, se, pfn):
        """ proxied prepare file """

        res = self.__prepareSecurityDetails()
        if not res['OK']:
            return res
        # Clear the local ache
        base = gConfig.getValue(
            "Systems/DataManagement/boincInstance/Services/StorageElementProxy/BasePath"
        )
        getFileDir = "%s/getFile" % base
        mkDir(getFileDir)
        # Get the file to the cache
        storageElement = StorageElement(se)
        res = returnSingleResult(
            storageElement.getFile(pfn, localPath=getFileDir))
        if not res['OK']:
            gLogger.error("prepareFile: Failed to get local copy of file.",
                          res['Message'])
            return res
        return res
コード例 #31
0
 def __init__(self):
     CLI.__init__(self)
     self.connected = False
     self.masterURL = "unset"
     self.writeEnabled = False
     self.modifiedData = False
     self.rpcClient = None
     self.do_connect()
     if self.connected:
         self.modificator = Modificator(self.rpcClient)
     else:
         self.modificator = Modificator()
     self.indentSpace = 20
     self.backupFilename = "dataChanges"
     # store history
     histfilename = os.path.basename(sys.argv[0])
     historyFile = os.path.expanduser("~/.dirac/%s.history" % histfilename[0:-3])
     mkDir(os.path.dirname(historyFile))
     if os.path.isfile(historyFile):
         readline.read_history_file(historyFile)
     readline.set_history_length(1000)
     atexit.register(readline.write_history_file, historyFile)
コード例 #32
0
  def __init__( self, host = None ):

    CLI.__init__( self )
    # Check if Port is given
    self.host = None
    self.port = None
    self.prompt = '[%s]> ' % colorize( "no host", "yellow" )
    if host:
      self.__setHost( host )
    self.cwd = ''
    self.previous_cwd = ''
    self.homeDir = ''
    self.runitComponents = [ "service", "agent", "executor", "consumer" ]

    # store history
    histfilename = os.path.basename(sys.argv[0])
    historyFile = os.path.expanduser( "~/.dirac/%s.history" % histfilename[0:-3])
    mkDir(os.path.dirname(historyFile))
    if os.path.isfile( historyFile ):
      readline.read_history_file( historyFile )
    readline.set_history_length(1000)
    atexit.register( readline.write_history_file, historyFile )
コード例 #33
0
ファイル: ServiceInterface.py プロジェクト: sparsh35/DIRAC
  def __loadConfigurationData(self):
    mkDir(os.path.join(DIRAC.rootPath, "etc", "csbackup"))
    gConfigurationData.loadConfigurationData()
    if gConfigurationData.isMaster():
      bBuiltNewConfiguration = False
      if not gConfigurationData.getName():
        DIRAC.abort(10, "Missing name for the configuration to be exported!")
      gConfigurationData.exportName()
      sVersion = gConfigurationData.getVersion()
      if sVersion == "0":
        gLogger.info("There's no version. Generating a new one")
        gConfigurationData.generateNewVersion()
        bBuiltNewConfiguration = True

      if self.sURL not in gConfigurationData.getServers():
        gConfigurationData.setServers(self.sURL)
        bBuiltNewConfiguration = True

      gConfigurationData.setMasterServer(self.sURL)

      if bBuiltNewConfiguration:
        gConfigurationData.writeRemoteConfigurationToDisk()
コード例 #34
0
ファイル: CSCLI.py プロジェクト: ahaupt/DIRAC
 def __init__( self ):
   CLI.__init__( self )
   self.connected = False
   self.masterURL = "unset"
   self.writeEnabled = False
   self.modifiedData = False
   self.rpcClient = None
   self.do_connect()
   if self.connected:
     self.modificator = Modificator ( self.rpcClient )
   else:
     self.modificator = Modificator()
   self.indentSpace = 20
   self.backupFilename = "dataChanges"
   # store history
   histfilename = os.path.basename(sys.argv[0])
   historyFile = os.path.expanduser( "~/.dirac/%s.history" % histfilename[0:-3])
   mkDir(os.path.dirname(historyFile))
   if os.path.isfile( historyFile ):
     readline.read_history_file( historyFile )
   readline.set_history_length(1000)
   atexit.register( readline.write_history_file, historyFile )
コード例 #35
0
 def initializeHandler(cls, serviceInfo):
     multiPath = PathFinder.getDatabaseSection("Accounting/MultiDB")
     cls.__acDB = MultiAccountingDB(multiPath, readOnly=True)
     # Get data location
     reportSection = serviceInfo["serviceSectionPath"]
     dataPath = gConfig.getValue("%s/DataLocation" % reportSection,
                                 "data/accountingGraphs")
     dataPath = dataPath.strip()
     if "/" != dataPath[0]:
         dataPath = os.path.realpath(f"{rootPath}/{dataPath}")
     gLogger.info(f"Data will be written into {dataPath}")
     mkDir(dataPath)
     try:
         testFile = "%s/acc.jarl.test" % dataPath
         with open(testFile, "w"):
             pass
         os.unlink(testFile)
     except IOError:
         gLogger.fatal("Can't write to %s" % dataPath)
         return S_ERROR("Data location is not writable")
     gDataCache.setGraphsLocation(dataPath)
     return S_OK()
コード例 #36
0
  def __init__( self, host = None ):

    CLI.__init__( self )
    # Check if Port is given
    self.host = None
    self.port = None
    self.prompt = '[%s]> ' % colorize( "no host", "yellow" )
    if host:
      self.__setHost( host )
    self.cwd = ''
    self.previous_cwd = ''
    self.homeDir = ''
    self.runitComponents = [ "service", "agent", "executor", "consumer" ]

    # store history
    histfilename = os.path.basename(sys.argv[0])
    historyFile = os.path.expanduser( "~/.dirac/%s.history" % histfilename[0:-3])
    mkDir(os.path.dirname(historyFile))
    if os.path.isfile( historyFile ):
      readline.read_history_file( historyFile )
    readline.set_history_length(1000)
    atexit.register( readline.write_history_file, historyFile )
コード例 #37
0
ファイル: ServiceInterface.py プロジェクト: DIRACGrid/DIRAC
    def __loadConfigurationData(self):
        mkDir(os.path.join(DIRAC.rootPath, "etc", "csbackup"))
        gConfigurationData.loadConfigurationData()
        if gConfigurationData.isMaster():
            bBuiltNewConfiguration = False
            if not gConfigurationData.getName():
                DIRAC.abort(10, "Missing name for the configuration to be exported!")
            gConfigurationData.exportName()
            sVersion = gConfigurationData.getVersion()
            if sVersion == "0":
                gLogger.info("There's no version. Generating a new one")
                gConfigurationData.generateNewVersion()
                bBuiltNewConfiguration = True

            if self.sURL not in gConfigurationData.getServers():
                gConfigurationData.setServers(self.sURL)
                bBuiltNewConfiguration = True

            gConfigurationData.setMasterServer(self.sURL)

            if bBuiltNewConfiguration:
                gConfigurationData.writeRemoteConfigurationToDisk()
コード例 #38
0
def initializeStorageElementHandler(serviceInfo):
  """  Initialize Storage Element global settings
  """

  global BASE_PATH
  global USE_TOKENS
  global MAX_STORAGE_SIZE

  BASE_PATH = getServiceOption(serviceInfo, "BasePath", BASE_PATH)
  if not BASE_PATH:
    gLogger.error('Failed to get the base path')
    return S_ERROR('Failed to get the base path')
  mkDir(BASE_PATH)

  USE_TOKENS = getServiceOption(serviceInfo, "%UseTokens", USE_TOKENS)
  MAX_STORAGE_SIZE = convertSizeUnits(getServiceOption(serviceInfo, "MaxStorageSize", MAX_STORAGE_SIZE), 'MB', 'B')

  gLogger.info('Starting DIRAC Storage Element')
  gLogger.info('Base Path: %s' % BASE_PATH)
  gLogger.info('Max size: %d Bytes' % MAX_STORAGE_SIZE)
  gLogger.info('Use access control tokens: ' + str(USE_TOKENS))
  return S_OK()
コード例 #39
0
ファイル: PopularityAgent.py プロジェクト: antolu/LHCbDIRAC
    def initialize(self):
        """ agent initialisation """
        self.am_setOption('PollingTime', 43200)
        if self.am_getOption('DirectDB', False):
            self.__stDB = StorageUsageDB()
            # self.bkClient = BookkeepingClient()#the necessary method is still not available in Bookk. client
        else:
            self.__stDB = RPCClient('DataManagement/DataUsage')
            timeout = 600
        self.__bkClient = BookkeepingClient()
        self.__dataUsageClient = DataUsageClient()
        self.__workDirectory = self.am_getOption("WorkDirectory")
        mkDir(self.__workDirectory)
        self.log.info("Working directory is %s" % self.__workDirectory)
        # by default, collects raw records from Popularity table inserted in the last day
        self.timeInterval = self.am_getOption(
            'timeIntervalForPopularityRecords', 1)
        self.queryTimeout = self.am_getOption('queryTimeout', 3600)
        self.cacheMetadata = {}
        self.limitForCommit = self.am_getOption("LimitForCommit", 1000)

        return S_OK()
コード例 #40
0
  def transfer_fromClient(self, fileID, token, fileSize, fileHelper):
    """ Method to receive file from clients.
        fileID is the local file name in the SE.
        fileSize can be Xbytes or -1 if unknown.
        token is used for access rights confirmation.
    """
    result = self.__checkForDiskSpace(fileSize)
    if not result['OK']:
      return S_ERROR('Failed to get available free space')
    elif not result['Value']:
      return S_ERROR('Not enough disk space')
    file_path = self.__resolveFileID(fileID)

    if "NoCheckSum" in token:
      fileHelper.disableCheckSum()

    try:
      mkDir(os.path.dirname(file_path))
      with open(file_path, "wb") as fd:
        return fileHelper.networkToDataSink(fd, maxFileSize=(MAX_STORAGE_SIZE))
    except Exception as error:
      return S_ERROR("Cannot open to write destination file %s: %s" % (file_path, str(error)))
コード例 #41
0
def initializeSecurityLoggingHandler(serviceInfo):
    global gSecurityFileLog

    serviceCS = serviceInfo["serviceSectionPath"]
    dataPath = gConfig.getValue("%s/DataLocation" % serviceCS, "data/securityLog")
    dataPath = dataPath.strip()
    if "/" != dataPath[0]:
        dataPath = os.path.realpath("%s/%s" % (gConfig.getValue("/LocalSite/InstancePath", rootPath), dataPath))
    gLogger.info("Data will be written into %s" % dataPath)
    mkDir(dataPath)

    try:
        testFile = "%s/seclog.jarl.test" % dataPath
        fd = file(testFile, "w")
        fd.close()
        os.unlink(testFile)
    except IOError:
        gLogger.fatal("Can't write to %s" % dataPath)
        return S_ERROR("Data location is not writable")
    # Define globals
    gSecurityFileLog = SecurityFileLog(dataPath)
    SecurityLogClient().setLogStore(gSecurityFileLog)
    return S_OK()
コード例 #42
0
 def initializeHandler(cls, serviceInfo):
   multiPath = PathFinder.getDatabaseSection("Accounting/MultiDB")
   cls.__acDB = MultiAccountingDB(multiPath, readOnly=True)
   # Get data location
   reportSection = serviceInfo['serviceSectionPath']
   dataPath = gConfig.getValue("%s/DataLocation" % reportSection, "data/accountingGraphs")
   dataPath = dataPath.strip()
   if "/" != dataPath[0]:
     dataPath = os.path.realpath("%s/%s" % (gConfig.getValue('/LocalSite/InstancePath', rootPath), dataPath))
   gLogger.info("Data will be written into %s" % dataPath)
   mkDir(dataPath)
   try:
     testFile = "%s/acc.jarl.test" % dataPath
     fd = file(testFile, "w")
     fd.close()
     os.unlink(testFile)
   except IOError:
     gLogger.fatal("Can't write to %s" % dataPath)
     return S_ERROR("Data location is not writable")
   gDataCache.setGraphsLocation(dataPath)
   gMonitor.registerActivity("plotsDrawn", "Drawn plot images", "Accounting reports", "plots", gMonitor.OP_SUM)
   gMonitor.registerActivity("reportsRequested", "Generated reports", "Accounting reports", "reports", gMonitor.OP_SUM)
   return S_OK()
コード例 #43
0
 def initializeHandler( cls, serviceInfo ):
   multiPath = PathFinder.getDatabaseSection( "Accounting/MultiDB" )
   cls.__acDB = MultiAccountingDB( multiPath, readOnly = True )
   #Get data location
   reportSection = serviceInfo[ 'serviceSectionPath' ]
   dataPath = gConfig.getValue( "%s/DataLocation" % reportSection, "data/accountingGraphs" )
   dataPath = dataPath.strip()
   if "/" != dataPath[0]:
     dataPath = os.path.realpath( "%s/%s" % ( gConfig.getValue( '/LocalSite/InstancePath', rootPath ), dataPath ) )
   gLogger.info( "Data will be written into %s" % dataPath )
   mkDir( dataPath )
   try:
     testFile = "%s/acc.jarl.test" % dataPath
     fd = file( testFile, "w" )
     fd.close()
     os.unlink( testFile )
   except IOError:
     gLogger.fatal( "Can't write to %s" % dataPath )
     return S_ERROR( "Data location is not writable" )
   gDataCache.setGraphsLocation( dataPath )
   gMonitor.registerActivity( "plotsDrawn", "Drawn plot images", "Accounting reports", "plots", gMonitor.OP_SUM )
   gMonitor.registerActivity( "reportsRequested", "Generated reports", "Accounting reports", "reports", gMonitor.OP_SUM )
   return S_OK()
コード例 #44
0
def initializeStorageElementHandler(serviceInfo):
    """Initialize Storage Element global settings"""

    global BASE_PATH
    global USE_TOKENS
    global MAX_STORAGE_SIZE

    BASE_PATH = getServiceOption(serviceInfo, "BasePath", "")
    if not BASE_PATH:
        gLogger.error("Failed to get the base path")
        return S_ERROR("Failed to get the base path")
    mkDir(BASE_PATH)

    USE_TOKENS = getServiceOption(serviceInfo, "UseTokens", USE_TOKENS)
    MAX_STORAGE_SIZE = convertSizeUnits(
        getServiceOption(serviceInfo, "MaxStorageSize", MAX_STORAGE_SIZE),
        "MB", "B")

    gLogger.info("Starting DIRAC Storage Element")
    gLogger.info("Base Path: %s" % BASE_PATH)
    gLogger.info("Max size: %d Bytes" % MAX_STORAGE_SIZE)
    gLogger.info("Use access control tokens: " + str(USE_TOKENS))
    return S_OK()
コード例 #45
0
def initializeSecurityLoggingHandler(serviceInfo):
    global gSecurityFileLog

    serviceCS = serviceInfo["serviceSectionPath"]
    dataPath = gConfig.getValue("%s/DataLocation" % serviceCS, "data/securityLog")
    dataPath = dataPath.strip()
    if "/" != dataPath[0]:
        dataPath = os.path.realpath("%s/%s" % (gConfig.getValue("/LocalSite/InstancePath", rootPath), dataPath))
    gLogger.info("Data will be written into %s" % dataPath)
    mkDir(dataPath)

    try:
        testFile = "%s/seclog.jarl.test" % dataPath
        with open(testFile, "w"):
            pass
        os.unlink(testFile)
    except IOError:
        gLogger.fatal("Can't write to %s" % dataPath)
        return S_ERROR("Data location is not writable")
    # Define globals
    gSecurityFileLog = SecurityFileLog(dataPath)
    SecurityLogClient().setLogStore(gSecurityFileLog)
    return S_OK()
コード例 #46
0
    def downloadSandbox(self,
                        sbLocation,
                        destinationDir="",
                        inMemory=False,
                        unpack=True):
        """
    Download a sandbox file and keep it in bundled form
    """
        if sbLocation.find("SB:") != 0:
            return S_ERROR("Invalid sandbox URL")
        sbLocation = sbLocation[3:]
        sbSplit = sbLocation.split("|")
        if len(sbSplit) < 2:
            return S_ERROR("Invalid sandbox URL")
        seName = sbSplit[0]
        sePFN = "|".join(sbSplit[1:])

        try:
            tmpSBDir = tempfile.mkdtemp(prefix="TMSB.")
        except IOError as e:
            return S_ERROR("Cannot create temporary file: %s" % repr(e))

        se = StorageElement(seName, vo=self.__vo)
        result = returnSingleResult(se.getFile(sePFN, localPath=tmpSBDir))

        if not result['OK']:
            return result
        sbFileName = os.path.basename(sePFN)

        result = S_OK()
        tarFileName = os.path.join(tmpSBDir, sbFileName)

        if inMemory:
            try:
                with open(tarFileName, 'r') as tfile:
                    data = tfile.read()
            except IOError as e:
                return S_ERROR('Failed to read the sandbox archive: %s' %
                               repr(e))
            finally:
                os.unlink(tarFileName)
                os.rmdir(tmpSBDir)
            return S_OK(data)

        # If destination dir is not specified use current working dir
        # If its defined ensure the dir structure is there
        if not destinationDir:
            destinationDir = os.getcwd()
        else:
            mkDir(destinationDir)

        if not unpack:
            result['Value'] = tarFileName
            return result

        try:
            sandboxSize = 0
            with tarfile.open(name=tarFileName, mode="r") as tf:
                for tarinfo in tf:
                    tf.extract(tarinfo, path=destinationDir)
                    sandboxSize += tarinfo.size
            # FIXME: here we return the size, but otherwise we always return the location: inconsistent
            # FIXME: looks like this size is used by the JobWrapper
            result['Value'] = sandboxSize
        except IOError as e:
            result = S_ERROR("Could not open bundle: %s" % repr(e))

        try:
            os.unlink(tarFileName)
            os.rmdir(tmpSBDir)
        except OSError as e:
            gLogger.warn("Could not remove temporary dir %s: %s" %
                         (tmpSBDir, repr(e)))

        return result
コード例 #47
0
ファイル: LHCbJob.py プロジェクト: antolu/LHCbDIRAC
    def setBenderModule(self,
                        benderVersion,
                        modulePath,
                        inputData=None,
                        numberOfEvents=-1):
        """Specifies Bender module to be executed.

       Any additional files should be specified in the job input sandbox.  Input data for
       Bender should be specified here (can be string or list).

       Example usage:

       >>> job = LHCbJob()
       >>> job.setBenderModule('v8r3','BenderExample.PhiMC',
       inputData=['LFN:/lhcb/production/DC06/phys-v2-lumi2/00001758/DST/00001758_00000001_5.dst'],numberOfEvents=100)

       :param benderVersion: Bender Project Version
       :type benderVersion: string
       :param modulePath: Import path to module e.g. BenderExample.PhiMC
       :type modulePath: string
       :param inputData: Input data for application
       :type inputData: single LFN or list of LFNs
       :param numberOfEvents: Number of events to process e.g. -1
       :type numberOfEvents: integer
    """
        kwargs = {
            'benderVersion': benderVersion,
            'modulePath': modulePath,
            'inputData': inputData,
            'numberOfEvents': numberOfEvents
        }
        if not isinstance(benderVersion, str):
            return self._reportError('Bender version should be a string',
                                     __name__, **kwargs)
        if not isinstance(modulePath, str):
            return self._reportError('Bender module path should be a string',
                                     __name__, **kwargs)
        if not isinstance(numberOfEvents, int):
            try:
                numberOfEvents = int(numberOfEvents)
            except ValueError:
                return self._reportError(
                    'Number of events should be an integer or convertible to an integer',
                    __name__, **kwargs)
        if not inputData:
            return S_ERROR("Need input data for Bender applications")

        if isinstance(inputData, str):
            inputData = [inputData]
        if not isinstance(inputData, list):
            return self._reportError(
                'Input data should be specified as a list or a string',
                __name__, **kwargs)

        poolCatName = 'xmlcatalog_file:pool_xml_catalog.xml'
        benderScript = ['#!/usr/bin/env python']
        benderScript.append('from Gaudi.Configuration import FileCatalog')
        benderScript.append('FileCatalog   (Catalogs = ["%s"] )' % poolCatName)
        benderScript.append('import %s as USERMODULE' % modulePath)
        benderScript.append('USERMODULE.configure()')
        benderScript.append('gaudi = USERMODULE.appMgr()')
        benderScript.append('evtSel = gaudi.evtSel()')
        benderScript.append('evtSel.open ( %s ) ' % inputData)
        benderScript.append('USERMODULE.run( %s )\n' % numberOfEvents)
        guid = makeGuid()
        tmpdir = '/tmp/' + guid
        self.log.verbose('Created temporary directory for submission %s' %
                         (tmpdir))
        mkDir(tmpdir)
        with open('%s/BenderScript.py' % tmpdir, 'w') as fopen:
            self.log.verbose('Bender script is: %s/BenderScript.py' % tmpdir)
            fopen.write('\n'.join(benderScript))
        # should try all components of the PYTHONPATH before giving up...
        userModule = '%s.py' % (modulePath.split('.')[-1])
        self.log.verbose('Looking for user module with name: %s' % userModule)
        if os.path.exists(userModule):
            self.addToInputSandbox.append(userModule)
        self.setInputData(inputData)
        self.setApplicationScript('Bender',
                                  benderVersion,
                                  '%s/BenderScript.py' % tmpdir,
                                  logFile='Bender%s.log' % benderVersion)
        return S_OK(benderScript)
コード例 #48
0
    def __writeSub(self, executable, nJobs):
        """ Create the Sub File for submission

    """

        self.log.debug("Working directory: %s " % self.workingDirectory)
        ##We randomize the location of the pilotoutput and log, because there are just too many of them
        pre1 = makeGuid()[:3]
        pre2 = makeGuid()[:3]
        mkDir(os.path.join(self.workingDirectory, pre1, pre2))
        initialDirPrefix = "%s/%s" % (pre1, pre2)

        self.log.debug("InitialDir: %s" %
                       os.path.join(self.workingDirectory, initialDirPrefix))

        self.log.debug("ExtraSubmitString:\n### \n %s \n###" %
                       self.extraSubmitString)

        fd, name = tempfile.mkstemp(suffix='.sub',
                                    prefix='HTCondorCE_',
                                    dir=self.workingDirectory)
        subFile = os.fdopen(fd, 'w')

        executable = os.path.join(self.workingDirectory, executable)

        localScheddOptions = """
ShouldTransferFiles = YES
WhenToTransferOutput = ON_EXIT_OR_EVICT
""" if self.useLocalSchedd else ""

        targetUniverse = "grid" if self.useLocalSchedd else "vanilla"

        sub = """
executable = %(executable)s
universe = %(targetUniverse)s
use_x509userproxy = true
output = $(Cluster).$(Process).out
error = $(Cluster).$(Process).err
log = $(Cluster).$(Process).log
environment = "HTCONDOR_JOBID=$(Cluster).$(Process)"
initialdir = %(initialDir)s
grid_resource = condor %(ceName)s %(ceName)s:9619
transfer_output_files = "" 

%(localScheddOptions)s

kill_sig=SIGTERM

%(extraString)s

Queue %(nJobs)s

""" % dict(
            executable=executable,
            nJobs=nJobs,
            ceName=self.ceName,
            extraString=self.extraSubmitString,
            initialDir=os.path.join(self.workingDirectory, initialDirPrefix),
            localScheddOptions=localScheddOptions,
            targetUniverse=targetUniverse,
        )
        subFile.write(sub)
        subFile.close()
        return name
コード例 #49
0
ファイル: dirac_configure.py プロジェクト: DIRACGrid/DIRAC
def runDiracConfigure(params):
    Script.registerSwitch("S:", "Setup=", "Set <setup> as DIRAC setup", params.setSetup)
    Script.registerSwitch("e:", "Extensions=", "Set <extensions> as DIRAC extensions", params.setExtensions)
    Script.registerSwitch("C:", "ConfigurationServer=", "Set <server> as DIRAC configuration server", params.setServer)
    Script.registerSwitch("I", "IncludeAllServers", "include all Configuration Servers", params.setAllServers)
    Script.registerSwitch("n:", "SiteName=", "Set <sitename> as DIRAC Site Name", params.setSiteName)
    Script.registerSwitch("N:", "CEName=", "Determiner <sitename> from <cename>", params.setCEName)
    Script.registerSwitch("V:", "VO=", "Set the VO name", params.setVO)

    Script.registerSwitch("W:", "gateway=", "Configure <gateway> as DIRAC Gateway for the site", params.setGateway)

    Script.registerSwitch("U", "UseServerCertificate", "Configure to use Server Certificate", params.setServerCert)
    Script.registerSwitch("H", "SkipCAChecks", "Configure to skip check of CAs", params.setSkipCAChecks)
    Script.registerSwitch("D", "SkipCADownload", "Configure to skip download of CAs", params.setSkipCADownload)
    Script.registerSwitch(
        "M", "SkipVOMSDownload", "Configure to skip download of VOMS info", params.setSkipVOMSDownload
    )

    Script.registerSwitch("v", "UseVersionsDir", "Use versions directory", params.setUseVersionsDir)

    Script.registerSwitch("A:", "Architecture=", "Configure /Architecture=<architecture>", params.setArchitecture)
    Script.registerSwitch("L:", "LocalSE=", "Configure LocalSite/LocalSE=<localse>", params.setLocalSE)

    Script.registerSwitch(
        "F",
        "ForceUpdate",
        "Force Update of cfg file (i.e. dirac.cfg) (otherwise nothing happens if dirac.cfg already exists)",
        params.forceUpdate,
    )

    Script.registerSwitch("O:", "output=", "output configuration file", params.setOutput)

    Script.parseCommandLine(ignoreErrors=True)

    if not params.logLevel:
        params.logLevel = DIRAC.gConfig.getValue(cfgInstallPath("LogLevel"), "")
        if params.logLevel:
            DIRAC.gLogger.setLevel(params.logLevel)
    else:
        DIRAC.gConfig.setOptionValue(cfgInstallPath("LogLevel"), params.logLevel)

    if not params.gatewayServer:
        newGatewayServer = DIRAC.gConfig.getValue(cfgInstallPath("Gateway"), "")
        if newGatewayServer:
            params.setGateway(newGatewayServer)

    if not params.configurationServer:
        newConfigurationServer = DIRAC.gConfig.getValue(cfgInstallPath("ConfigurationServer"), "")
        if newConfigurationServer:
            params.setServer(newConfigurationServer)

    if not params.includeAllServers:
        newIncludeAllServer = DIRAC.gConfig.getValue(cfgInstallPath("IncludeAllServers"), False)
        if newIncludeAllServer:
            params.setAllServers(True)

    if not params.setup:
        newSetup = DIRAC.gConfig.getValue(cfgInstallPath("Setup"), "")
        if newSetup:
            params.setSetup(newSetup)

    if not params.siteName:
        newSiteName = DIRAC.gConfig.getValue(cfgInstallPath("SiteName"), "")
        if newSiteName:
            params.setSiteName(newSiteName)

    if not params.ceName:
        newCEName = DIRAC.gConfig.getValue(cfgInstallPath("CEName"), "")
        if newCEName:
            params.setCEName(newCEName)

    if not params.useServerCert:
        newUserServerCert = DIRAC.gConfig.getValue(cfgInstallPath("UseServerCertificate"), False)
        if newUserServerCert:
            params.setServerCert(newUserServerCert)

    if not params.skipCAChecks:
        newSkipCAChecks = DIRAC.gConfig.getValue(cfgInstallPath("SkipCAChecks"), False)
        if newSkipCAChecks:
            params.setSkipCAChecks(newSkipCAChecks)

    if not params.skipCADownload:
        newSkipCADownload = DIRAC.gConfig.getValue(cfgInstallPath("SkipCADownload"), False)
        if newSkipCADownload:
            params.setSkipCADownload(newSkipCADownload)

    if not params.useVersionsDir:
        newUseVersionsDir = DIRAC.gConfig.getValue(cfgInstallPath("UseVersionsDir"), False)
        if newUseVersionsDir:
            params.setUseVersionsDir(newUseVersionsDir)
            # Set proper Defaults in configuration (even if they will be properly overwrite by gComponentInstaller
            instancePath = os.path.dirname(os.path.dirname(DIRAC.rootPath))
            rootPath = os.path.join(instancePath, "pro")
            DIRAC.gConfig.setOptionValue(cfgInstallPath("InstancePath"), instancePath)
            DIRAC.gConfig.setOptionValue(cfgInstallPath("RootPath"), rootPath)

    if not params.architecture:
        newArchitecture = DIRAC.gConfig.getValue(cfgInstallPath("Architecture"), "")
        if newArchitecture:
            params.setArchitecture(newArchitecture)

    if not params.vo:
        newVO = DIRAC.gConfig.getValue(cfgInstallPath("VirtualOrganization"), "")
        if newVO:
            params.setVO(newVO)

    if not params.extensions:
        newExtensions = DIRAC.gConfig.getValue(cfgInstallPath("Extensions"), "")
        if newExtensions:
            params.setExtensions(newExtensions)

    DIRAC.gLogger.notice("Executing: %s " % (" ".join(sys.argv)))
    DIRAC.gLogger.notice('Checking DIRAC installation at "%s"' % DIRAC.rootPath)

    if params.update:
        if params.outputFile:
            DIRAC.gLogger.notice("Will update the output file %s" % params.outputFile)
        else:
            DIRAC.gLogger.notice("Will update %s" % DIRAC.gConfig.diracConfigFilePath)

    if params.setup:
        DIRAC.gLogger.verbose("/DIRAC/Setup =", params.setup)
    if params.vo:
        DIRAC.gLogger.verbose("/DIRAC/VirtualOrganization =", params.vo)
    if params.configurationServer:
        DIRAC.gLogger.verbose("/DIRAC/Configuration/Servers =", params.configurationServer)

    if params.siteName:
        DIRAC.gLogger.verbose("/LocalSite/Site =", params.siteName)
    if params.architecture:
        DIRAC.gLogger.verbose("/LocalSite/Architecture =", params.architecture)
    if params.localSE:
        DIRAC.gLogger.verbose("/LocalSite/localSE =", params.localSE)

    if not params.useServerCert:
        DIRAC.gLogger.verbose("/DIRAC/Security/UseServerCertificate =", "no")
        # Being sure it was not there before
        Script.localCfg.deleteOption("/DIRAC/Security/UseServerCertificate")
        Script.localCfg.addDefaultEntry("/DIRAC/Security/UseServerCertificate", "no")
    else:
        DIRAC.gLogger.verbose("/DIRAC/Security/UseServerCertificate =", "yes")
        # Being sure it was not there before
        Script.localCfg.deleteOption("/DIRAC/Security/UseServerCertificate")
        Script.localCfg.addDefaultEntry("/DIRAC/Security/UseServerCertificate", "yes")

    host = DIRAC.gConfig.getValue(cfgInstallPath("Host"), "")
    if host:
        DIRAC.gConfig.setOptionValue(cfgPath("DIRAC", "Hostname"), host)

    if params.skipCAChecks:
        DIRAC.gLogger.verbose("/DIRAC/Security/SkipCAChecks =", "yes")
        # Being sure it was not there before
        Script.localCfg.deleteOption("/DIRAC/Security/SkipCAChecks")
        Script.localCfg.addDefaultEntry("/DIRAC/Security/SkipCAChecks", "yes")
    else:
        # Necessary to allow initial download of CA's
        if not params.skipCADownload:
            DIRAC.gConfig.setOptionValue("/DIRAC/Security/SkipCAChecks", "yes")
    if not params.skipCADownload:
        Script.enableCS()
        try:
            dirName = os.path.join(DIRAC.rootPath, "etc", "grid-security", "certificates")
            mkDir(dirName)
        except Exception:
            DIRAC.gLogger.exception()
            DIRAC.gLogger.fatal("Fail to create directory:", dirName)
            DIRAC.exit(-1)
        try:
            bdc = BundleDeliveryClient()
            result = bdc.syncCAs()
            if result["OK"]:
                result = bdc.syncCRLs()
        except Exception as e:
            DIRAC.gLogger.error("Failed to sync CAs and CRLs: %s" % str(e))

        Script.localCfg.deleteOption("/DIRAC/Security/SkipCAChecks")

    if params.ceName or params.siteName:
        # This is used in the pilot context, we should have a proxy, or a certificate, and access to CS
        if params.useServerCert:
            # Being sure it was not there before
            Script.localCfg.deleteOption("/DIRAC/Security/UseServerCertificate")
            Script.localCfg.addDefaultEntry("/DIRAC/Security/UseServerCertificate", "yes")
        Script.enableCS()
        # Get the site resource section
        gridSections = DIRAC.gConfig.getSections("/Resources/Sites/")
        if not gridSections["OK"]:
            DIRAC.gLogger.warn("Could not get grid sections list")
            grids = []
        else:
            grids = gridSections["Value"]
        # try to get siteName from ceName or Local SE from siteName using Remote Configuration
        for grid in grids:
            siteSections = DIRAC.gConfig.getSections("/Resources/Sites/%s/" % grid)
            if not siteSections["OK"]:
                DIRAC.gLogger.warn("Could not get %s site list" % grid)
                sites = []
            else:
                sites = siteSections["Value"]

            if not params.siteName:
                if params.ceName:
                    for site in sites:
                        res = DIRAC.gConfig.getSections("/Resources/Sites/%s/%s/CEs/" % (grid, site), [])
                        if not res["OK"]:
                            DIRAC.gLogger.warn("Could not get %s CEs list" % site)
                        if params.ceName in res["Value"]:
                            params.siteName = site
                            break
            if params.siteName:
                DIRAC.gLogger.notice("Setting /LocalSite/Site = %s" % params.siteName)
                Script.localCfg.addDefaultEntry("/LocalSite/Site", params.siteName)
                DIRAC.__siteName = False
                if params.ceName:
                    DIRAC.gLogger.notice("Setting /LocalSite/GridCE = %s" % params.ceName)
                    Script.localCfg.addDefaultEntry("/LocalSite/GridCE", params.ceName)

                if not params.localSE and params.siteName in sites:
                    params.localSE = getSEsForSite(params.siteName)
                    if params.localSE["OK"] and params.localSE["Value"]:
                        params.localSE = ",".join(params.localSE["Value"])
                        DIRAC.gLogger.notice("Setting /LocalSite/LocalSE =", params.localSE)
                        Script.localCfg.addDefaultEntry("/LocalSite/LocalSE", params.localSE)
                    break

    if params.gatewayServer:
        DIRAC.gLogger.verbose("/DIRAC/Gateways/%s =" % DIRAC.siteName(), params.gatewayServer)
        Script.localCfg.addDefaultEntry("/DIRAC/Gateways/%s" % DIRAC.siteName(), params.gatewayServer)

    # Create the local cfg if it is not yet there
    if not params.outputFile:
        params.outputFile = DIRAC.gConfig.diracConfigFilePath
    params.outputFile = os.path.abspath(params.outputFile)
    if not os.path.exists(params.outputFile):
        configDir = os.path.dirname(params.outputFile)
        mkDir(configDir)
        params.update = True
        DIRAC.gConfig.dumpLocalCFGToFile(params.outputFile)

    if params.includeAllServers:
        # We need user proxy or server certificate to continue in order to get all the CS URLs
        if not params.useServerCert:
            Script.enableCS()
            result = getProxyInfo()
            if not result["OK"]:
                DIRAC.gLogger.notice("Configuration is not completed because no user proxy is available")
                DIRAC.gLogger.notice("Create one using dirac-proxy-init and execute again with -F option")
                return 1
        else:
            Script.localCfg.deleteOption("/DIRAC/Security/UseServerCertificate")
            # When using Server Certs CA's will be checked, the flag only disables initial download
            # this will be replaced by the use of SkipCADownload
            Script.localCfg.addDefaultEntry("/DIRAC/Security/UseServerCertificate", "yes")
            Script.enableCS()

        DIRAC.gConfig.setOptionValue("/DIRAC/Configuration/Servers", ",".join(DIRAC.gConfig.getServersList()))
        DIRAC.gLogger.verbose("/DIRAC/Configuration/Servers =", ",".join(DIRAC.gConfig.getServersList()))

    if params.useServerCert:
        # always removing before dumping
        Script.localCfg.deleteOption("/DIRAC/Security/UseServerCertificate")
        Script.localCfg.deleteOption("/DIRAC/Security/SkipCAChecks")
        Script.localCfg.deleteOption("/DIRAC/Security/SkipVOMSDownload")

    if params.update:
        DIRAC.gConfig.dumpLocalCFGToFile(params.outputFile)

    # ## LAST PART: do the vomsdir/vomses magic

    # This has to be done for all VOs in the installation

    if params.skipVOMSDownload:
        return 0

    result = Registry.getVOMSServerInfo()
    if not result["OK"]:
        return 1

    error = ""
    vomsDict = result["Value"]
    for vo in vomsDict:
        voName = vomsDict[vo]["VOMSName"]
        vomsDirPath = os.path.join(DIRAC.rootPath, "etc", "grid-security", "vomsdir", voName)
        vomsesDirPath = os.path.join(DIRAC.rootPath, "etc", "grid-security", "vomses")
        for path in (vomsDirPath, vomsesDirPath):
            mkDir(path)
        vomsesLines = []
        for vomsHost in vomsDict[vo].get("Servers", {}):
            hostFilePath = os.path.join(vomsDirPath, "%s.lsc" % vomsHost)
            try:
                DN = vomsDict[vo]["Servers"][vomsHost]["DN"]
                CA = vomsDict[vo]["Servers"][vomsHost]["CA"]
                port = vomsDict[vo]["Servers"][vomsHost]["Port"]
                if not DN or not CA or not port:
                    DIRAC.gLogger.error("DN = %s" % DN)
                    DIRAC.gLogger.error("CA = %s" % CA)
                    DIRAC.gLogger.error("Port = %s" % port)
                    DIRAC.gLogger.error("Missing Parameter for %s" % vomsHost)
                    continue
                with open(hostFilePath, "wt") as fd:
                    fd.write("%s\n%s\n" % (DN, CA))
                vomsesLines.append('"%s" "%s" "%s" "%s" "%s" "24"' % (voName, vomsHost, port, DN, voName))
                DIRAC.gLogger.notice("Created vomsdir file %s" % hostFilePath)
            except Exception:
                DIRAC.gLogger.exception("Could not generate vomsdir file for host", vomsHost)
                error = "Could not generate vomsdir file for VO %s, host %s" % (voName, vomsHost)
        try:
            vomsesFilePath = os.path.join(vomsesDirPath, voName)
            with open(vomsesFilePath, "wt") as fd:
                fd.write("%s\n" % "\n".join(vomsesLines))
            DIRAC.gLogger.notice("Created vomses file %s" % vomsesFilePath)
        except Exception:
            DIRAC.gLogger.exception("Could not generate vomses file")
            error = "Could not generate vomses file for VO %s" % voName

    if params.useServerCert:
        Script.localCfg.deleteOption("/DIRAC/Security/UseServerCertificate")
        # When using Server Certs CA's will be checked, the flag only disables initial download
        # this will be replaced by the use of SkipCADownload
        Script.localCfg.deleteOption("/DIRAC/Security/SkipCAChecks")

    if error:
        return 1

    return 0
コード例 #50
0
ファイル: dirac-configure.py プロジェクト: mesmith75/DIRAC
  DIRAC.gConfig.setOptionValue( cfgPath( "DIRAC", "Hostname" ), host )

if skipCAChecks:
  DIRAC.gLogger.verbose( '/DIRAC/Security/SkipCAChecks =', 'yes' )
  #Being sure it was not there before
  Script.localCfg.deleteOption( '/DIRAC/Security/SkipCAChecks' )
  Script.localCfg.addDefaultEntry( '/DIRAC/Security/SkipCAChecks', 'yes' )
else:
  # Necessary to allow initial download of CA's
  if not skipCADownload:
    DIRAC.gConfig.setOptionValue( '/DIRAC/Security/SkipCAChecks', 'yes' )
if not skipCADownload:
  Script.enableCS()
  try:
    dirName = os.path.join( DIRAC.rootPath, 'etc', 'grid-security', 'certificates' )
    mkDir(dirName)
  except:
    DIRAC.gLogger.exception()
    DIRAC.gLogger.fatal( 'Fail to create directory:', dirName )
    DIRAC.exit( -1 )
  try:
    from DIRAC.FrameworkSystem.Client.BundleDeliveryClient import BundleDeliveryClient
    bdc = BundleDeliveryClient()
    result = bdc.syncCAs()
    if result[ 'OK' ]:
      result = bdc.syncCRLs()
  except:
    DIRAC.gLogger.exception( 'Could not import BundleDeliveryClient' )
    pass
  if not skipCAChecks:
    Script.localCfg.deleteOption( '/DIRAC/Security/SkipCAChecks' )
コード例 #51
0
def main():
    global logLevel
    global setup
    global configurationServer
    global includeAllServers
    global gatewayServer
    global siteName
    global useServerCert
    global skipCAChecks
    global skipCADownload
    global useVersionsDir
    global architecture
    global localSE
    global ceName
    global vo
    global update
    global outputFile
    global skipVOMSDownload
    global extensions

    Script.disableCS()

    Script.registerSwitch("S:", "Setup=", "Set <setup> as DIRAC setup",
                          setSetup)
    Script.registerSwitch("e:", "Extensions=",
                          "Set <extensions> as DIRAC extensions",
                          setExtensions)
    Script.registerSwitch("C:", "ConfigurationServer=",
                          "Set <server> as DIRAC configuration server",
                          setServer)
    Script.registerSwitch("I", "IncludeAllServers",
                          "include all Configuration Servers", setAllServers)
    Script.registerSwitch("n:", "SiteName=",
                          "Set <sitename> as DIRAC Site Name", setSiteName)
    Script.registerSwitch("N:", "CEName=",
                          "Determiner <sitename> from <cename>", setCEName)
    Script.registerSwitch("V:", "VO=", "Set the VO name", setVO)

    Script.registerSwitch("W:", "gateway=",
                          "Configure <gateway> as DIRAC Gateway for the site",
                          setGateway)

    Script.registerSwitch("U", "UseServerCertificate",
                          "Configure to use Server Certificate", setServerCert)
    Script.registerSwitch("H", "SkipCAChecks",
                          "Configure to skip check of CAs", setSkipCAChecks)
    Script.registerSwitch("D", "SkipCADownload",
                          "Configure to skip download of CAs",
                          setSkipCADownload)
    Script.registerSwitch("M", "SkipVOMSDownload",
                          "Configure to skip download of VOMS info",
                          setSkipVOMSDownload)

    Script.registerSwitch("v", "UseVersionsDir", "Use versions directory",
                          setUseVersionsDir)

    Script.registerSwitch("A:", "Architecture=",
                          "Configure /Architecture=<architecture>",
                          setArchitecture)
    Script.registerSwitch("L:", "LocalSE=",
                          "Configure LocalSite/LocalSE=<localse>", setLocalSE)

    Script.registerSwitch(
        "F", "ForceUpdate",
        "Force Update of cfg file (i.e. dirac.cfg) (otherwise nothing happens if dirac.cfg already exists)",
        forceUpdate)

    Script.registerSwitch("O:", "output=", "output configuration file",
                          setOutput)

    Script.setUsageMessage('\n'.join([
        __doc__.split('\n')[1], '\nUsage:',
        '  %s [options] ...\n' % Script.scriptName
    ]))

    Script.parseCommandLine(ignoreErrors=True)
    args = Script.getExtraCLICFGFiles()

    if not logLevel:
        logLevel = DIRAC.gConfig.getValue(cfgInstallPath('LogLevel'), '')
        if logLevel:
            DIRAC.gLogger.setLevel(logLevel)
    else:
        DIRAC.gConfig.setOptionValue(cfgInstallPath('LogLevel'), logLevel)

    if not gatewayServer:
        newGatewayServer = DIRAC.gConfig.getValue(cfgInstallPath('Gateway'),
                                                  '')
        if newGatewayServer:
            setGateway(newGatewayServer)

    if not configurationServer:
        newConfigurationServer = DIRAC.gConfig.getValue(
            cfgInstallPath('ConfigurationServer'), '')
        if newConfigurationServer:
            setServer(newConfigurationServer)

    if not includeAllServers:
        newIncludeAllServer = DIRAC.gConfig.getValue(
            cfgInstallPath('IncludeAllServers'), False)
        if newIncludeAllServer:
            setAllServers(True)

    if not setup:
        newSetup = DIRAC.gConfig.getValue(cfgInstallPath('Setup'), '')
        if newSetup:
            setSetup(newSetup)

    if not siteName:
        newSiteName = DIRAC.gConfig.getValue(cfgInstallPath('SiteName'), '')
        if newSiteName:
            setSiteName(newSiteName)

    if not ceName:
        newCEName = DIRAC.gConfig.getValue(cfgInstallPath('CEName'), '')
        if newCEName:
            setCEName(newCEName)

    if not useServerCert:
        newUserServerCert = DIRAC.gConfig.getValue(
            cfgInstallPath('UseServerCertificate'), False)
        if newUserServerCert:
            setServerCert(newUserServerCert)

    if not skipCAChecks:
        newSkipCAChecks = DIRAC.gConfig.getValue(
            cfgInstallPath('SkipCAChecks'), False)
        if newSkipCAChecks:
            setSkipCAChecks(newSkipCAChecks)

    if not skipCADownload:
        newSkipCADownload = DIRAC.gConfig.getValue(
            cfgInstallPath('SkipCADownload'), False)
        if newSkipCADownload:
            setSkipCADownload(newSkipCADownload)

    if not useVersionsDir:
        newUseVersionsDir = DIRAC.gConfig.getValue(
            cfgInstallPath('UseVersionsDir'), False)
        if newUseVersionsDir:
            setUseVersionsDir(newUseVersionsDir)
            # Set proper Defaults in configuration (even if they will be properly overwrite by gComponentInstaller
            instancePath = os.path.dirname(os.path.dirname(DIRAC.rootPath))
            rootPath = os.path.join(instancePath, 'pro')
            DIRAC.gConfig.setOptionValue(cfgInstallPath('InstancePath'),
                                         instancePath)
            DIRAC.gConfig.setOptionValue(cfgInstallPath('RootPath'), rootPath)

    if not architecture:
        newArchitecture = DIRAC.gConfig.getValue(
            cfgInstallPath('Architecture'), '')
        if newArchitecture:
            setArchitecture(newArchitecture)

    if not vo:
        newVO = DIRAC.gConfig.getValue(cfgInstallPath('VirtualOrganization'),
                                       '')
        if newVO:
            setVO(newVO)

    if not extensions:
        newExtensions = DIRAC.gConfig.getValue(cfgInstallPath('Extensions'),
                                               '')
        if newExtensions:
            setExtensions(newExtensions)

    DIRAC.gLogger.notice('Executing: %s ' % (' '.join(sys.argv)))
    DIRAC.gLogger.notice('Checking DIRAC installation at "%s"' %
                         DIRAC.rootPath)

    if update:
        if outputFile:
            DIRAC.gLogger.notice('Will update the output file %s' % outputFile)
        else:
            DIRAC.gLogger.notice('Will update %s' %
                                 DIRAC.gConfig.diracConfigFilePath)

    if setup:
        DIRAC.gLogger.verbose('/DIRAC/Setup =', setup)
    if vo:
        DIRAC.gLogger.verbose('/DIRAC/VirtualOrganization =', vo)
    if configurationServer:
        DIRAC.gLogger.verbose('/DIRAC/Configuration/Servers =',
                              configurationServer)

    if siteName:
        DIRAC.gLogger.verbose('/LocalSite/Site =', siteName)
    if architecture:
        DIRAC.gLogger.verbose('/LocalSite/Architecture =', architecture)
    if localSE:
        DIRAC.gLogger.verbose('/LocalSite/localSE =', localSE)

    if not useServerCert:
        DIRAC.gLogger.verbose('/DIRAC/Security/UseServerCertificate =', 'no')
        # Being sure it was not there before
        Script.localCfg.deleteOption('/DIRAC/Security/UseServerCertificate')
        Script.localCfg.addDefaultEntry('/DIRAC/Security/UseServerCertificate',
                                        'no')
    else:
        DIRAC.gLogger.verbose('/DIRAC/Security/UseServerCertificate =', 'yes')
        # Being sure it was not there before
        Script.localCfg.deleteOption('/DIRAC/Security/UseServerCertificate')
        Script.localCfg.addDefaultEntry('/DIRAC/Security/UseServerCertificate',
                                        'yes')

    host = DIRAC.gConfig.getValue(cfgInstallPath("Host"), "")
    if host:
        DIRAC.gConfig.setOptionValue(cfgPath("DIRAC", "Hostname"), host)

    if skipCAChecks:
        DIRAC.gLogger.verbose('/DIRAC/Security/SkipCAChecks =', 'yes')
        # Being sure it was not there before
        Script.localCfg.deleteOption('/DIRAC/Security/SkipCAChecks')
        Script.localCfg.addDefaultEntry('/DIRAC/Security/SkipCAChecks', 'yes')
    else:
        # Necessary to allow initial download of CA's
        if not skipCADownload:
            DIRAC.gConfig.setOptionValue('/DIRAC/Security/SkipCAChecks', 'yes')
    if not skipCADownload:
        Script.enableCS()
        try:
            dirName = os.path.join(DIRAC.rootPath, 'etc', 'grid-security',
                                   'certificates')
            mkDir(dirName)
        except BaseException:
            DIRAC.gLogger.exception()
            DIRAC.gLogger.fatal('Fail to create directory:', dirName)
            DIRAC.exit(-1)
        try:
            bdc = BundleDeliveryClient()
            result = bdc.syncCAs()
            if result['OK']:
                result = bdc.syncCRLs()
        except Exception as e:
            DIRAC.gLogger.error('Failed to sync CAs and CRLs: %s' % str(e))

        if not skipCAChecks:
            Script.localCfg.deleteOption('/DIRAC/Security/SkipCAChecks')

    if ceName or siteName:
        # This is used in the pilot context, we should have a proxy, or a certificate, and access to CS
        if useServerCert:
            # Being sure it was not there before
            Script.localCfg.deleteOption(
                '/DIRAC/Security/UseServerCertificate')
            Script.localCfg.addDefaultEntry(
                '/DIRAC/Security/UseServerCertificate', 'yes')
        Script.enableCS()
        # Get the site resource section
        gridSections = DIRAC.gConfig.getSections('/Resources/Sites/')
        if not gridSections['OK']:
            DIRAC.gLogger.warn('Could not get grid sections list')
            grids = []
        else:
            grids = gridSections['Value']
        # try to get siteName from ceName or Local SE from siteName using Remote Configuration
        for grid in grids:
            siteSections = DIRAC.gConfig.getSections('/Resources/Sites/%s/' %
                                                     grid)
            if not siteSections['OK']:
                DIRAC.gLogger.warn('Could not get %s site list' % grid)
                sites = []
            else:
                sites = siteSections['Value']

            if not siteName:
                if ceName:
                    for site in sites:
                        res = DIRAC.gConfig.getSections(
                            '/Resources/Sites/%s/%s/CEs/' % (grid, site), [])
                        if not res['OK']:
                            DIRAC.gLogger.warn('Could not get %s CEs list' %
                                               site)
                        if ceName in res['Value']:
                            siteName = site
                            break
            if siteName:
                DIRAC.gLogger.notice('Setting /LocalSite/Site = %s' % siteName)
                Script.localCfg.addDefaultEntry('/LocalSite/Site', siteName)
                DIRAC.__siteName = False
                if ceName:
                    DIRAC.gLogger.notice('Setting /LocalSite/GridCE = %s' %
                                         ceName)
                    Script.localCfg.addDefaultEntry('/LocalSite/GridCE',
                                                    ceName)

                if not localSE and siteName in sites:
                    localSE = getSEsForSite(siteName)
                    if localSE['OK'] and localSE['Value']:
                        localSE = ','.join(localSE['Value'])
                        DIRAC.gLogger.notice('Setting /LocalSite/LocalSE =',
                                             localSE)
                        Script.localCfg.addDefaultEntry(
                            '/LocalSite/LocalSE', localSE)
                    break

    if gatewayServer:
        DIRAC.gLogger.verbose('/DIRAC/Gateways/%s =' % DIRAC.siteName(),
                              gatewayServer)
        Script.localCfg.addDefaultEntry(
            '/DIRAC/Gateways/%s' % DIRAC.siteName(), gatewayServer)

    # Create the local cfg if it is not yet there
    if not outputFile:
        outputFile = DIRAC.gConfig.diracConfigFilePath
    outputFile = os.path.abspath(outputFile)
    if not os.path.exists(outputFile):
        configDir = os.path.dirname(outputFile)
        mkDir(configDir)
        update = True
        DIRAC.gConfig.dumpLocalCFGToFile(outputFile)

    if includeAllServers:
        # We need user proxy or server certificate to continue in order to get all the CS URLs
        if not useServerCert:
            Script.enableCS()
            result = getProxyInfo()
            if not result['OK']:
                DIRAC.gLogger.notice(
                    'Configuration is not completed because no user proxy is available'
                )
                DIRAC.gLogger.notice(
                    'Create one using dirac-proxy-init and execute again with -F option'
                )
                sys.exit(1)
        else:
            Script.localCfg.deleteOption(
                '/DIRAC/Security/UseServerCertificate')
            # When using Server Certs CA's will be checked, the flag only disables initial download
            # this will be replaced by the use of SkipCADownload
            Script.localCfg.addDefaultEntry(
                '/DIRAC/Security/UseServerCertificate', 'yes')
            Script.enableCS()

        DIRAC.gConfig.setOptionValue('/DIRAC/Configuration/Servers',
                                     ','.join(DIRAC.gConfig.getServersList()))
        DIRAC.gLogger.verbose('/DIRAC/Configuration/Servers =',
                              ','.join(DIRAC.gConfig.getServersList()))

    if useServerCert:
        # always removing before dumping
        Script.localCfg.deleteOption('/DIRAC/Security/UseServerCertificate')
        Script.localCfg.deleteOption('/DIRAC/Security/SkipCAChecks')
        Script.localCfg.deleteOption('/DIRAC/Security/SkipVOMSDownload')

    if update:
        DIRAC.gConfig.dumpLocalCFGToFile(outputFile)

    # ## LAST PART: do the vomsdir/vomses magic

    # This has to be done for all VOs in the installation

    if skipVOMSDownload:
        # We stop here
        sys.exit(0)

    result = Registry.getVOMSServerInfo()
    if not result['OK']:
        sys.exit(1)

    error = ''
    vomsDict = result['Value']
    for vo in vomsDict:
        voName = vomsDict[vo]['VOMSName']
        vomsDirPath = os.path.join(DIRAC.rootPath, 'etc', 'grid-security',
                                   'vomsdir', voName)
        vomsesDirPath = os.path.join(DIRAC.rootPath, 'etc', 'grid-security',
                                     'vomses')
        for path in (vomsDirPath, vomsesDirPath):
            mkDir(path)
        vomsesLines = []
        for vomsHost in vomsDict[vo].get('Servers', {}):
            hostFilePath = os.path.join(vomsDirPath, "%s.lsc" % vomsHost)
            try:
                DN = vomsDict[vo]['Servers'][vomsHost]['DN']
                CA = vomsDict[vo]['Servers'][vomsHost]['CA']
                port = vomsDict[vo]['Servers'][vomsHost]['Port']
                if not DN or not CA or not port:
                    DIRAC.gLogger.error('DN = %s' % DN)
                    DIRAC.gLogger.error('CA = %s' % CA)
                    DIRAC.gLogger.error('Port = %s' % port)
                    DIRAC.gLogger.error('Missing Parameter for %s' % vomsHost)
                    continue
                with open(hostFilePath, "wt") as fd:
                    fd.write("%s\n%s\n" % (DN, CA))
                vomsesLines.append('"%s" "%s" "%s" "%s" "%s" "24"' %
                                   (voName, vomsHost, port, DN, voName))
                DIRAC.gLogger.notice("Created vomsdir file %s" % hostFilePath)
            except Exception:
                DIRAC.gLogger.exception(
                    "Could not generate vomsdir file for host", vomsHost)
                error = "Could not generate vomsdir file for VO %s, host %s" % (
                    voName, vomsHost)
        try:
            vomsesFilePath = os.path.join(vomsesDirPath, voName)
            with open(vomsesFilePath, "wt") as fd:
                fd.write("%s\n" % "\n".join(vomsesLines))
            DIRAC.gLogger.notice("Created vomses file %s" % vomsesFilePath)
        except Exception:
            DIRAC.gLogger.exception("Could not generate vomses file")
            error = "Could not generate vomses file for VO %s" % voName

    if useServerCert:
        Script.localCfg.deleteOption('/DIRAC/Security/UseServerCertificate')
        # When using Server Certs CA's will be checked, the flag only disables initial download
        # this will be replaced by the use of SkipCADownload
        Script.localCfg.deleteOption('/DIRAC/Security/SkipCAChecks')

    if error:
        sys.exit(1)

    sys.exit(0)
コード例 #52
0
    def __getJobOutput(self, jobID, outTypes):
        """Get job outputs: output, error and logging files from HTCondor

        :param str jobID: job identifier
        :param list outTypes: output types targeted (output, error and/or logging)
        """
        _job, pathToResult, condorID = condorIDAndPathToResultFromJobRef(jobID)
        iwd = os.path.join(self.workingDirectory, pathToResult)

        try:
            mkDir(iwd)
        except OSError as e:
            errorMessage = "Failed to create the pilot output directory"
            self.log.exception(errorMessage, iwd)
            return S_ERROR(e.errno, "%s (%s)" % (errorMessage, iwd))

        if not self.useLocalSchedd:
            cmd = [
                "condor_transfer_data", "-pool",
                "%s:9619" % self.ceName, "-name", self.ceName, condorID
            ]
            result = executeGridCommand(self.proxy, cmd, self.gridEnv)
            self.log.verbose(result)

            # Getting 'logging' without 'error' and 'output' is possible but will generate command errors
            # We do not check the command errors if we only want 'logging'
            if "error" in outTypes or "output" in outTypes:
                errorMessage = "Failed to get job output from htcondor"
                if not result["OK"]:
                    self.log.error(errorMessage, result["Message"])
                    return result
                # Even if result is OK, the actual exit code of cmd can still be an error
                if result["OK"] and result["Value"][0] != 0:
                    outMessage = result["Value"][1].strip()
                    errMessage = result["Value"][2].strip()
                    varMessage = outMessage + " " + errMessage
                    self.log.error(errorMessage, varMessage)
                    return S_ERROR("%s: %s" % (errorMessage, varMessage))

        outputsSuffix = {"output": "out", "error": "err", "logging": "log"}
        outputs = {}
        for output, suffix in outputsSuffix.items():
            resOut = findFile(self.workingDirectory,
                              "%s.%s" % (condorID, suffix), pathToResult)
            if not resOut["OK"]:
                # Return an error if the output type was targeted, else we continue
                if output in outTypes:
                    self.log.error("Failed to find",
                                   "%s for condor job %s" % (output, jobID))
                    return resOut
                continue
            outputfilename = resOut["Value"]

            try:
                # If the output type is not targeted, then we directly remove the file
                if output in outTypes:
                    with open(outputfilename) as outputfile:
                        outputs[output] = outputfile.read()
                # If a local schedd is used, we cannot retrieve the outputs again if we delete them
                if not self.useLocalSchedd:
                    os.remove(outputfilename)
            except IOError as e:
                self.log.error("Failed to open",
                               "%s file: %s" % (output, str(e)))
                return S_ERROR("Failed to get pilot %s" % output)

        return S_OK(outputs)
コード例 #53
0
    def __writeSub(self, executable, nJobs, location, processors):
        """ Create the Sub File for submission.

    :param str executable: name of the script to execute
    :param int nJobs: number of desired jobs
    :param str location: directory that should contain the result of the jobs
    :param int processors: number of CPU cores to allocate
    """

        self.log.debug("Working directory: %s " % self.workingDirectory)
        mkDir(os.path.join(self.workingDirectory, location))

        self.log.debug("InitialDir: %s" %
                       os.path.join(self.workingDirectory, location))

        self.log.debug("ExtraSubmitString:\n### \n %s \n###" %
                       self.extraSubmitString)

        fd, name = tempfile.mkstemp(suffix='.sub',
                                    prefix='HTCondorCE_',
                                    dir=self.workingDirectory)
        subFile = os.fdopen(fd, 'w')

        executable = os.path.join(self.workingDirectory, executable)

        # This is used to remove outputs from the remote schedd
        # Used in case a local schedd is not used
        periodicRemove = "periodic_remove = "
        periodicRemove += "(JobStatus == 4) && "
        periodicRemove += "(time() - EnteredCurrentStatus) > (%s * 24 * 3600)" % self.daysToKeepRemoteLogs

        localScheddOptions = """
ShouldTransferFiles = YES
WhenToTransferOutput = ON_EXIT_OR_EVICT
""" if self.useLocalSchedd else periodicRemove

        targetUniverse = "grid" if self.useLocalSchedd else "vanilla"

        sub = """
executable = %(executable)s
universe = %(targetUniverse)s
use_x509userproxy = true
output = $(Cluster).$(Process).out
error = $(Cluster).$(Process).err
log = $(Cluster).$(Process).log
environment = "HTCONDOR_JOBID=$(Cluster).$(Process)"
initialdir = %(initialDir)s
grid_resource = condor %(ceName)s %(ceName)s:9619
transfer_output_files = ""
+xcount = %(processors)s
%(localScheddOptions)s

kill_sig=SIGTERM

%(extraString)s

Queue %(nJobs)s

""" % dict(
            executable=executable,
            nJobs=nJobs,
            processors=processors,
            ceName=self.ceName,
            extraString=self.extraSubmitString,
            initialDir=os.path.join(self.workingDirectory, location),
            localScheddOptions=localScheddOptions,
            targetUniverse=targetUniverse,
        )
        subFile.write(sub)
        subFile.close()
        return name
コード例 #54
0
    def getJobOutput(self, jobID, _localDir=None):
        """ TODO: condor can copy the output automatically back to the
    submission, so we just need to pick it up from the proper folder
    """
        self.log.verbose("Getting job output for jobID: %s " % jobID)
        _job, pathToResult, condorID = condorIDAndPathToResultFromJobRef(jobID)
        # FIXME: the WMSAdministrator does not know about the
        # SiteDirector WorkingDirectory, it might not even run on the
        # same machine
        # workingDirectory = self.ceParameters.get( 'WorkingDirectory', DEFAULT_WORKINGDIRECTORY )

        if not self.useLocalSchedd:
            iwd = None

            # TOREMOVE: once v7r0 will mainly be used, remove the following block that was only useful
            # when path to output was not deterministic
            status, stdout_q = commands.getstatusoutput(
                'condor_q %s %s -af SUBMIT_Iwd' %
                (self.remoteScheddOptions, condorID))
            self.log.verbose('condor_q:', stdout_q)
            if status == 0 and self.workingDirectory in stdout_q:
                iwd = stdout_q
                pathToResult = iwd

            # Use the path extracted from the pilotID
            if iwd is None:
                iwd = os.path.join(self.workingDirectory, pathToResult)

            try:
                mkDir(iwd)
            except OSError as e:
                errorMessage = "Failed to create the pilot output directory"
                self.log.exception(errorMessage, iwd)
                return S_ERROR(e.errno, '%s (%s)' % (errorMessage, iwd))

            cmd = [
                'condor_transfer_data', '-pool',
                '%s:9619' % self.ceName, '-name', self.ceName, condorID
            ]
            result = executeGridCommand(self.proxy, cmd, self.gridEnv)
            self.log.verbose(result)

            errorMessage = "Failed to get job output from htcondor"
            if not result['OK']:
                self.log.error(errorMessage, result['Message'])
                return result
            # Even if result is OK, the actual exit code of cmd can still be an error
            if result['OK'] and result['Value'][0] != 0:
                varMessage = result['Value'][1].strip()
                self.log.error(errorMessage, varMessage)
                return S_ERROR('%s: %s' % (errorMessage, varMessage))

        output = ''
        error = ''

        resOut = findFile(self.workingDirectory, '%s.out' % condorID,
                          pathToResult)
        if not resOut['OK']:
            self.log.error("Failed to find output file for condor job", jobID)
            return resOut
        outputfilename = resOut['Value'][0]

        resErr = findFile(self.workingDirectory, '%s.err' % condorID,
                          pathToResult)
        if not resErr['OK']:
            self.log.error("Failed to find error file for condor job", jobID)
            return resErr
        errorfilename = resErr['Value'][0]

        try:
            with open(outputfilename) as outputfile:
                output = outputfile.read()
        except IOError as e:
            self.log.error("Failed to open outputfile", str(e))
            return S_ERROR("Failed to get pilot output")
        try:
            with open(errorfilename) as errorfile:
                error = errorfile.read()
        except IOError as e:
            self.log.error("Failed to open errorfile", str(e))
            return S_ERROR("Failed to get pilot error")

        return S_OK((output, error))
コード例 #55
0
ファイル: SandboxStoreClient.py プロジェクト: DIRACGrid/DIRAC
  def downloadSandbox(self, sbLocation, destinationDir="", inMemory=False, unpack=True):
    """
    Download a sandbox file and keep it in bundled form
    """
    if sbLocation.find("SB:") != 0:
      return S_ERROR("Invalid sandbox URL")
    sbLocation = sbLocation[3:]
    sbSplit = sbLocation.split("|")
    if len(sbSplit) < 2:
      return S_ERROR("Invalid sandbox URL")
    seName = sbSplit[0]
    sePFN = "|".join(sbSplit[1:])

    try:
      tmpSBDir = tempfile.mkdtemp(prefix="TMSB.")
    except IOError as e:
      return S_ERROR("Cannot create temporary file: %s" % repr(e))

    se = StorageElement(seName, vo=self.__vo)
    result = returnSingleResult(se.getFile(sePFN, localPath=tmpSBDir))

    if not result['OK']:
      return result
    sbFileName = os.path.basename(sePFN)

    result = S_OK()
    tarFileName = os.path.join(tmpSBDir, sbFileName)

    if inMemory:
      try:
        with open(tarFileName, 'r') as tfile:
          data = tfile.read()
      except IOError as e:
        return S_ERROR('Failed to read the sandbox archive: %s' % repr(e))
      finally:
        os.unlink(tarFileName)
        os.rmdir(tmpSBDir)
      return S_OK(data)

    # If destination dir is not specified use current working dir
    # If its defined ensure the dir structure is there
    if not destinationDir:
      destinationDir = os.getcwd()
    else:
      mkDir(destinationDir)

    if not unpack:
      result['Value'] = tarFileName
      return result

    try:
      sandboxSize = 0
      with tarfile.open(name=tarFileName, mode="r") as tf:
        for tarinfo in tf:
          tf.extract(tarinfo, path=destinationDir)
          sandboxSize += tarinfo.size
      # FIXME: here we return the size, but otherwise we always return the location: inconsistent
      # FIXME: looks like this size is used by the JobWrapper
      result['Value'] = sandboxSize
    except IOError as e:
      result = S_ERROR("Could not open bundle: %s" % repr(e))

    try:
      os.unlink(tarFileName)
      os.rmdir(tmpSBDir)
    except OSError as e:
      gLogger.warn("Could not remove temporary dir %s: %s" % (tmpSBDir, repr(e)))

    return result