Example #1
0
class ProcessList(object):
  """ The ProcessList uses internally the CFG utility to store the processes and their properties.
  """
  def __init__(self, location):
    self.cfg = CFG()
    self.location = location
    self.goodProcessList = True
    if os.path.exists(self.location):
      self.cfg.loadFromFile(self.location)
      if not self.cfg.existsKey('Processes'):
        self.cfg.createNewSection('Processes')
    else:
      self.goodProcessList = False  
      
  def _writeProcessList(self, path):
    """ Write to text
    """
    handle, tmpName = tempfile.mkstemp()
    written = self.cfg.writeToFile(tmpName)
    os.close(handle)
    if not written:
      if os.path.exists(tmpName):
        os.remove(tmpName)
      return written
    if os.path.exists(path):
      LOG.debug("Replacing %s" % path)
    try:
      shutil.move(tmpName, path)
      return True
    except OSError, err:
      LOG.error("Failed to overwrite process list.", err)
      LOG.info("If your process list is corrupted a backup can be found %s" % tmpName)
      return False
Example #2
0
class ProcessList(object):
  """ The ProcessList uses internally the CFG utility to store the processes and their properties.
  """
  def __init__(self, location):
    self.cfg = CFG()
    self.location = location
    self.goodProcessList = True
    if os.path.exists(self.location):
      self.cfg.loadFromFile(self.location)
      if not self.cfg.existsKey('Processes'):
        self.cfg.createNewSection('Processes')
    else:
      self.goodProcessList = False  
      
  def _writeProcessList(self, path):
    """ Write to text
    """
    handle, tmpName = tempfile.mkstemp()
    written = self.cfg.writeToFile(tmpName)
    os.close(handle)
    if not written:
      if os.path.exists(tmpName):
        os.remove(tmpName)
      return written
    if os.path.exists(path):
      gLogger.debug("Replacing %s" % path)
    try:
      shutil.move(tmpName, path)
      return True
    except OSError, err:
      gLogger.error("Failed to overwrite process list.", err)
      gLogger.info("If your process list is corrupted a backup can be found %s" % tmpName)
      return False
def checkFunction():
  """ gets CPU normalisation from MFJ or calculate itself """
  from DIRAC.WorkloadManagementSystem.Client.CPUNormalization import getPowerFromMJF
  from ILCDIRAC.Core.Utilities.CPUNormalization import getCPUNormalization
  from DIRAC import gLogger, gConfig

  result = getCPUNormalization()

  if not result['OK']:
    gLogger.error( result['Message'] )

  norm = round( result['Value']['NORM'], 1 )

  gLogger.notice( 'Estimated CPU power is %.1f %s' % ( norm, result['Value']['UNIT'] ) )

  mjfPower = getPowerFromMJF()
  if mjfPower:
    gLogger.notice( 'CPU power from MJF is %.1f HS06' % mjfPower )
  else:
    gLogger.notice( 'MJF not available on this node' )

  if update and not configFile:
    gConfig.setOptionValue( '/LocalSite/CPUScalingFactor', mjfPower if mjfPower else norm )
    gConfig.setOptionValue( '/LocalSite/CPUNormalizationFactor', norm )

    gConfig.dumpLocalCFGToFile( gConfig.diracConfigFilePath )
  if configFile:
    from DIRAC.Core.Utilities.CFG import CFG
    cfg = CFG()
    try:
      # Attempt to open the given file
      cfg.loadFromFile( configFile )
    except:
      pass
    # Create the section if it does not exist
    if not cfg.existsKey( 'LocalSite' ):
      cfg.createNewSection( 'LocalSite' )
    cfg.setOption( '/LocalSite/CPUScalingFactor', mjfPower if mjfPower else norm )
    cfg.setOption( '/LocalSite/CPUNormalizationFactor', norm )

    cfg.writeToFile( configFile )


  DIRAC.exit()
  mjfPower = getPowerFromMJF()
  if mjfPower:
    gLogger.notice( 'CPU power from MJF is %.1f HS06' % mjfPower )
  else:
    gLogger.notice( 'MJF not available on this node' )

  if update and not configFile:
    gConfig.setOptionValue( '/LocalSite/CPUScalingFactor', mjfPower if mjfPower else norm )
    gConfig.setOptionValue( '/LocalSite/CPUNormalizationFactor', norm )

    gConfig.dumpLocalCFGToFile( gConfig.diracConfigFilePath )
  if configFile:
    from DIRAC.Core.Utilities.CFG import CFG
    cfg = CFG()
    try:
      # Attempt to open the given file
      cfg.loadFromFile( configFile )
    except:
      pass
    # Create the section if it does not exist
    if not cfg.existsKey( 'LocalSite' ):
      cfg.createNewSection( 'LocalSite' )
    cfg.setOption( '/LocalSite/CPUScalingFactor', mjfPower if mjfPower else norm )
    cfg.setOption( '/LocalSite/CPUNormalizationFactor', norm )

    cfg.writeToFile( configFile )


  DIRAC.exit()
Example #5
0
class JobRepository(object):

  def __init__(self, repository=None):
    self.location = repository
    if not self.location:
      if "HOME" in os.environ:
        self.location = '%s/.dirac.repo.rep' % os.environ['HOME']
      else:
        self.location = '%s/.dirac.repo.rep' % os.getcwd()
    self.repo = CFG()
    if os.path.exists(self.location):
      self.repo.loadFromFile(self.location)
      if not self.repo.existsKey('Jobs'):
        self.repo.createNewSection('Jobs')
    else:
      self.repo.createNewSection('Jobs')
    self.OK = True
    written = self._writeRepository(self.location)
    if not written:
      self.OK = False

  def isOK(self):
    return self.OK

  def readRepository(self):
    return S_OK(self.repo.getAsDict('Jobs'))

  def writeRepository(self, alternativePath=None):
    destination = self.location
    if alternativePath:
      destination = alternativePath
    written = self._writeRepository(destination)
    if not written:
      return S_ERROR("Failed to write repository")
    return S_OK(destination)

  def resetRepository(self, jobIDs=[]):
    if not jobIDs:
      jobs = self.readRepository()['Value']
      jobIDs = jobs.keys()
    paramDict = {'State': 'Submitted',
                 'Retrieved': 0,
                 'OutputData': 0}
    for jobID in jobIDs:
      self._writeJob(jobID, paramDict, True)
    self._writeRepository(self.location)
    return S_OK()

  def _writeRepository(self, path):
    handle, tmpName = tempfile.mkstemp()
    written = self.repo.writeToFile(tmpName)
    os.close(handle)
    if not written:
      if os.path.exists(tmpName):
        os.remove(tmpName)
      return written
    if os.path.exists(path):
      gLogger.debug("Replacing %s" % path)
    try:
      shutil.move(tmpName, path)
      return True
    except Exception as x:
      gLogger.error("Failed to overwrite repository.", x)
      gLogger.info("If your repository is corrupted a backup can be found %s" % tmpName)
      return False

  def appendToRepository(self, repoLocation):
    if not os.path.exists(repoLocation):
      gLogger.error("Secondary repository does not exist", repoLocation)
      return S_ERROR("Secondary repository does not exist")
    self.repo = CFG().loadFromFile(repoLocation).mergeWith(self.repo)
    self._writeRepository(self.location)
    return S_OK()

  def addJob(self, jobID, state='Submitted', retrieved=0, outputData=0, update=False):
    paramDict = {'State': state,
                 'Time': self._getTime(),
                 'Retrieved': int(retrieved),
                 'OutputData': outputData}
    self._writeJob(jobID, paramDict, update)
    self._writeRepository(self.location)
    return S_OK(jobID)

  def updateJob(self, jobID, paramDict):
    if self._existsJob(jobID):
      paramDict['Time'] = self._getTime()
      self._writeJob(jobID, paramDict, True)
      self._writeRepository(self.location)
    return S_OK()

  def updateJobs(self, jobDict):
    for jobID, paramDict in jobDict.items():
      if self._existsJob(jobID):
        paramDict['Time'] = self._getTime()
        self._writeJob(jobID, paramDict, True)
    self._writeRepository(self.location)
    return S_OK()

  def _getTime(self):
    runtime = time.ctime()
    return runtime.replace(" ", "_")

  def _writeJob(self, jobID, paramDict, update):
    jobID = str(jobID)
    jobExists = self._existsJob(jobID)
    if jobExists and (not update):
      gLogger.warn("Job exists and not overwriting")
      return S_ERROR("Job exists and not overwriting")
    if not jobExists:
      self.repo.createNewSection('Jobs/%s' % jobID)
    for key, value in paramDict.items():
      self.repo.setOption('Jobs/%s/%s' % (jobID, key), value)
    return S_OK()

  def removeJob(self, jobID):
    res = self.repo['Jobs'].deleteKey(str(jobID))  # pylint: disable=no-member
    if res:
      self._writeRepository(self.location)
    return S_OK()

  def existsJob(self, jobID):
    return S_OK(self._existsJob(jobID))

  def _existsJob(self, jobID):
    return self.repo.isSection('Jobs/%s' % jobID)

  def getLocation(self):
    return S_OK(self.location)

  def getSize(self):
    return S_OK(len(self.repo.getAsDict('Jobs')))
Example #6
0
class JobRepository( object ):

  def __init__( self, repository = None ):
    self.location = repository
    if not self.location:
      if "HOME" in os.environ:
        self.location = '%s/.dirac.repo.rep' % os.environ['HOME']
      else:
        self.location = '%s/.dirac.repo.rep' % os.getcwd()
    self.repo = CFG()
    if os.path.exists( self.location ):
      self.repo.loadFromFile( self.location )
      if not self.repo.existsKey( 'Jobs' ):
        self.repo.createNewSection( 'Jobs' )
    else:
      self.repo.createNewSection( 'Jobs' )
    self.OK = True
    written = self._writeRepository( self.location )
    if not written:
      self.OK = False

  def isOK( self ):
    return self.OK

  def readRepository( self ):
    return S_OK( self.repo.getAsDict( 'Jobs' ) )

  def writeRepository( self, alternativePath = None ):
    destination = self.location
    if alternativePath:
      destination = alternativePath
    written = self._writeRepository( destination )
    if not written:
      return S_ERROR( "Failed to write repository" )
    return S_OK( destination )

  def resetRepository( self, jobIDs = [] ):
    if not jobIDs:
      jobs = self.readRepository()['Value']
      jobIDs = jobs.keys()
    paramDict = {'State'       : 'Submitted',
                 'Retrieved'   : 0,
                 'OutputData'  : 0}
    for jobID in jobIDs:
      self._writeJob( jobID, paramDict, True )
    self._writeRepository( self.location )
    return S_OK()

  def _writeRepository( self, path ):
    handle, tmpName = tempfile.mkstemp()
    written = self.repo.writeToFile( tmpName )
    os.close( handle )
    if not written:
      if os.path.exists( tmpName ):
        os.remove( tmpName )
      return written
    if os.path.exists( path ):
      gLogger.debug( "Replacing %s" % path )
    try:
      shutil.move( tmpName, path )
      return True
    except Exception as x:
      gLogger.error( "Failed to overwrite repository.", x )
      gLogger.info( "If your repository is corrupted a backup can be found %s" % tmpName )
      return False

  def appendToRepository( self, repoLocation ):
    if not os.path.exists( repoLocation ):
      gLogger.error( "Secondary repository does not exist", repoLocation )
      return S_ERROR( "Secondary repository does not exist" )
    self.repo = CFG().loadFromFile( repoLocation ).mergeWith( self.repo )
    self._writeRepository( self.location )
    return S_OK()

  def addJob( self, jobID, state = 'Submitted', retrieved = 0, outputData = 0, update = False ):
    paramDict = { 'State'       : state,
                  'Time'        : self._getTime(),
                  'Retrieved'   : int( retrieved ),
                  'OutputData'  : outputData}
    self._writeJob( jobID, paramDict, update )
    self._writeRepository( self.location )
    return S_OK( jobID )

  def updateJob( self, jobID, paramDict ):
    if self._existsJob( jobID ):
      paramDict['Time'] = self._getTime()
      self._writeJob( jobID, paramDict, True )
      self._writeRepository( self.location )
    return S_OK()

  def updateJobs( self, jobDict ):
    for jobID, paramDict in jobDict.items():
      if self._existsJob( jobID ):
        paramDict['Time'] = self._getTime()
        self._writeJob( jobID, paramDict, True )
    self._writeRepository( self.location )
    return S_OK()

  def _getTime( self ):
    runtime = time.ctime()
    return runtime.replace( " ", "_" )

  def _writeJob( self, jobID, paramDict, update ):
    jobID = str( jobID )
    jobExists = self._existsJob( jobID )
    if jobExists and ( not update ):
      gLogger.warn( "Job exists and not overwriting" )
      return S_ERROR( "Job exists and not overwriting" )
    if not jobExists:
      self.repo.createNewSection( 'Jobs/%s' % jobID )
    for key, value in paramDict.items():
      self.repo.setOption( 'Jobs/%s/%s' % ( jobID, key ), value )
    return S_OK()

  def removeJob( self, jobID ):
    res = self.repo['Jobs'].deleteKey( str( jobID ) ) #pylint: disable=no-member
    if res:
      self._writeRepository( self.location )
    return S_OK()

  def existsJob( self, jobID ):
    return S_OK( self._existsJob( jobID ) )

  def _existsJob( self, jobID ):
    return self.repo.isSection( 'Jobs/%s' % jobID )

  def getLocation( self ):
    return S_OK( self.location )

  def getSize( self ):
    return S_OK( len( self.repo.getAsDict( 'Jobs' ) ) )
Example #7
0
    mjfPower = getPowerFromMJF()
    if mjfPower:
        gLogger.notice('CPU power from MJF is %.1f HS06' % mjfPower)
    else:
        gLogger.notice('MJF not available on this node')

    if update and not configFile:
        gConfig.setOptionValue('/LocalSite/CPUScalingFactor',
                               mjfPower if mjfPower else norm)
        gConfig.setOptionValue('/LocalSite/CPUNormalizationFactor', norm)

        gConfig.dumpLocalCFGToFile(gConfig.diracConfigFilePath)
    if configFile:
        from DIRAC.Core.Utilities.CFG import CFG
        cfg = CFG()
        try:
            # Attempt to open the given file
            cfg.loadFromFile(configFile)
        except:
            pass
        # Create the section if it does not exist
        if not cfg.existsKey('LocalSite'):
            cfg.createNewSection('LocalSite')
        cfg.setOption('/LocalSite/CPUScalingFactor',
                      mjfPower if mjfPower else norm)
        cfg.setOption('/LocalSite/CPUNormalizationFactor', norm)

        cfg.writeToFile(configFile)

    DIRAC.exit()
    result = getCPUNormalization()

    if not result["OK"]:
        DIRAC.gLogger.error(result["Message"])

    norm = int((result["Value"]["NORM"] + 0.05) * 10) / 10.0

    DIRAC.gLogger.notice("Normalization for current CPU is %.1f %s" % (norm, result["Value"]["UNIT"]))

    if update:
        DIRAC.gConfig.setOptionValue("/LocalSite/CPUNormalizationFactor", norm)
        DIRAC.gConfig.dumpLocalCFGToFile(DIRAC.gConfig.diracConfigFilePath)
    if configFile:
        from DIRAC.Core.Utilities.CFG import CFG

        cfg = CFG()
        try:
            # Attempt to open the given file
            cfg.loadFromFile(configFile)
        except:
            pass
        # Create the section if it does not exist
        if not cfg.existsKey("LocalSite"):
            cfg.createNewSection("LocalSite")
        cfg.setOption("/LocalSite/CPUNormalizationFactor", norm)

        cfg.writeToFile(configFile)

    DIRAC.exit()
#           DBName = FileCatalogDB
#         }
#       }
#   }
# }

for sct in [
        'Systems/DataManagement',
        'Systems/DataManagement/Production',
        'Systems/DataManagement/Production/URLs',
        'Systems/DataManagement/Production/Services',
        'Systems/DataManagement/Production/Services/FileCatalog',
        'Systems/DataManagement/Production/Databases',
        'Systems/DataManagement/Production/Databases/FileCatalogDB',
]:
    if not localCfg.existsKey(sct):
        localcfg.createNewSection(sct)

localCfg.setOption(
    'Systems/DataManagement/Production/Services/FileCatalog/DirectoryManager',
    'DirectoryClosure')
localCfg.setOption(
    'Systems/DataManagement/Production/Services/FileCatalog/FileManager',
    'FileManagerPs')
localCfg.setOption(
    'Systems/DataManagement/Production/Services/FileCatalog/SecurityManager',
    'FullSecurityManager')

localCfg.setOption(
    'Systems/DataManagement/Production/Databases/FileCatalogDB/DBName',
    'FileCatalogDB')
#     }
#     Databases
#       {
#         FileCatalogDB
#         {
#           DBName = FileCatalogDB
#         }
#       }
#   }
# }


for sct in ['Systems/DataManagement',
            'Systems/DataManagement/Production',
            'Systems/DataManagement/Production/URLs',
            'Systems/DataManagement/Production/Services',
            'Systems/DataManagement/Production/Services/FileCatalog',
            'Systems/DataManagement/Production/Databases',
            'Systems/DataManagement/Production/Databases/FileCatalogDB', ]:
  if not localCfg.existsKey( sct ):
    localcfg.createNewSection( sct )

localCfg.setOption( 'Systems/DataManagement/Production/Services/FileCatalog/DirectoryManager', 'DirectoryClosure' )
localCfg.setOption( 'Systems/DataManagement/Production/Services/FileCatalog/FileManager', 'FileManagerPs' )
localCfg.setOption( 'Systems/DataManagement/Production/Services/FileCatalog/SecurityManager', 'FullSecurityManager' )

localCfg.setOption( 'Systems/DataManagement/Production/Databases/FileCatalogDB/DBName', 'FileCatalogDB' )


localCfg.writeToFile( localConfigFile )