Exemple #1
0
class Synchronizer(object):
    """ Class encapsulating a lock
  allowing it to be used as a synchronizing
  decorator making the call thread-safe"""
    def __init__(self, lockName="", recursive=False):
        from DIRAC.Core.Utilities.LockRing import LockRing
        self.__lockName = lockName
        self.__lr = LockRing()
        self.__lock = self.__lr.getLock(lockName, recursive=recursive)

    def __call__(self, funcToCall):
        def lockedFunc(*args, **kwargs):
            try:
                if self.__lockName:
                    print("LOCKING", self.__lockName)
                self.__lock.acquire()
                return funcToCall(*args, **kwargs)
            finally:
                if self.__lockName:
                    print("UNLOCKING", self.__lockName)
                self.__lock.release()

        return lockedFunc

    def lock(self):
        return self.__lock.acquire()

    def unlock(self):
        return self.__lock.release()
Exemple #2
0
    def __init__(self, loadDefaultCFG=True):
        envVar = os.environ.get("DIRAC_FEWER_CFG_LOCKS", "no").lower()
        self.__locksEnabled = envVar not in ("y", "yes", "t", "true", "on",
                                             "1")
        if self.__locksEnabled:
            lr = LockRing()
            self.threadingEvent = lr.getEvent()
            self.threadingEvent.set()
            self.threadingLock = lr.getLock()
            self.runningThreadsNumber = 0

        self.__compressedConfigurationData = None
        self.configurationPath = "/DIRAC/Configuration"
        self.backupsDir = os.path.join(DIRAC.rootPath, "etc", "csbackup")
        self._isService = False
        self.localCFG = CFG()
        self.remoteCFG = CFG()
        self.mergedCFG = CFG()
        self.remoteServerList = []
        if loadDefaultCFG:
            defaultCFGFile = os.path.join(DIRAC.rootPath, "etc", "dirac.cfg")
            gLogger.debug("dirac.cfg should be at", "%s" % defaultCFGFile)
            retVal = self.loadFile(defaultCFGFile)
            if not retVal["OK"]:
                gLogger.warn("Can't load %s file" % defaultCFGFile)
        self.sync()
Exemple #3
0
class Synchronizer:
    """ Class encapsulating a lock
  allowing it to be used as a synchronizing
  decorator making the call thread-safe"""

    def __init__(self, lockName="", recursive=False):
        from DIRAC.Core.Utilities.LockRing import LockRing

        self.__lockName = lockName
        self.__lr = LockRing()
        self.__lock = self.__lr.getLock(lockName, recursive=recursive)

    def __call__(self, funcToCall):
        def lockedFunc(*args, **kwargs):
            try:
                if self.__lockName:
                    print "LOCKING", self.__lockName
                self.__lock.acquire()
                return funcToCall(*args, **kwargs)
            finally:
                if self.__lockName:
                    print "UNLOCKING", self.__lockName
                self.__lock.release()

        return lockedFunc

    def lock(self):
        return self.__lock.acquire()

    def unlock(self):
        return self.__lock.release()
 def __init__( self, loadDefaultCFG = True ):
   lr = LockRing()
   self.threadingEvent = lr.getEvent()
   self.threadingEvent.set()
   self.threadingLock = lr.getLock()
   self.runningThreadsNumber = 0
   self.compressedConfigurationData = ""
   self.configurationPath = "/DIRAC/Configuration"
   self.backupsDir = os.path.join( DIRAC.rootPath, "etc", "csbackup" )
   self._isService = False
   self.localCFG = CFG()
   self.remoteCFG = CFG()
   self.mergedCFG = CFG()
   self.remoteServerList = []
   if loadDefaultCFG:
     defaultCFGFile = os.path.join( DIRAC.rootPath, "etc", "dirac.cfg" )
     gLogger.debug( "dirac.cfg should be at", "%s" % defaultCFGFile )
     retVal = self.loadFile( defaultCFGFile )
     if not retVal[ 'OK' ]:
       gLogger.warn( "Can't load %s file" % defaultCFGFile )
   self.sync()
Exemple #5
0
 def __init__( self, loadDefaultCFG = True ):
   lr = LockRing()
   self.threadingEvent = lr.getEvent()
   self.threadingEvent.set()
   self.threadingLock = lr.getLock()
   self.runningThreadsNumber = 0
   self.compressedConfigurationData = ""
   self.configurationPath = "/DIRAC/Configuration"
   self.backupsDir = os.path.join( DIRAC.rootPath, "etc", "csbackup" )
   self._isService = False
   self.localCFG = CFG()
   self.remoteCFG = CFG()
   self.mergedCFG = CFG()
   self.remoteServerList = []
   if loadDefaultCFG:
     defaultCFGFile = os.path.join( DIRAC.rootPath, "etc", "dirac.cfg" )
     gLogger.debug( "dirac.cfg should be at", "%s" % defaultCFGFile )
     retVal = self.loadFile( defaultCFGFile )
     if not retVal[ 'OK' ]:
       gLogger.warn( "Can't load %s file" % defaultCFGFile )
   self.sync()
Exemple #6
0
class Cache(object):
    """
    Cache basic class.
    
    WARNING: None of its methods is thread safe. Acquire / Release lock when
    using them !
  """
    def __init__(self, lifeTime, updateFunc):
        """
    Constructor
    
    :Parameters:
      **lifeTime** - `int`
        Lifetime of the elements in the cache ( seconds ! )
      **updateFunc** - `function`
        This function MUST return a S_OK | S_ERROR object. In the case of the first,
        its value must be a dictionary.
    
    """

        # We set a 20% of the lifetime randomly, so that if we have thousands of jobs
        # starting at the same time, all the caches will not end at the same time.
        randomLifeTimeBias = 0.2 * random.random()

        self.log = gLogger.getSubLogger(self.__class__.__name__)

        self.__lifeTime = int(lifeTime * (1 + randomLifeTimeBias))
        self.__updateFunc = updateFunc
        # The records returned from the cache must be valid at least 30 seconds.
        self.__validSeconds = 30

        # Cache
        self.__cache = DictCache()
        self.__cacheLock = LockRing()
        self.__cacheLock.getLock(self.__class__.__name__)

    #.............................................................................
    # internal cache object getter

    def cacheKeys(self):
        """
    Cache keys getter
      
    :returns: list with valid keys on the cache
    """

        return self.__cache.getKeys(validSeconds=self.__validSeconds)

    #.............................................................................
    # acquire / release Locks

    def acquireLock(self):
        """
    Acquires Cache lock
    """

        self.__cacheLock.acquire(self.__class__.__name__)

    def releaseLock(self):
        """
    Releases Cache lock
    """

        self.__cacheLock.release(self.__class__.__name__)

    #.............................................................................
    # Cache getters

    def get(self, cacheKeys):
        """
    Gets values for cacheKeys given, if all are found ( present on the cache and
    valid ), returns S_OK with the results. If any is not neither present not
    valid, returns S_ERROR. 
    
    :Parameters:
      **cacheKeys** - `list`
        list of keys to be extracted from the cache
        
    :return: S_OK | S_ERROR
    """

        result = {}

        for cacheKey in cacheKeys:

            cacheRow = self.__cache.get(cacheKey,
                                        validSeconds=self.__validSeconds)
            if not cacheRow:
                self.log.error(str(cacheKey))
                return S_ERROR('Cannot get %s' % str(cacheKey))
            result.update({cacheKey: cacheRow})

        return S_OK(result)

    #.............................................................................
    # Cache refreshers

    def refreshCache(self):
        """     
    Purges the cache and gets fresh data from the update function.
    
    :return: S_OK | S_ERROR. If the first, its content is the new cache.    
    """

        self.log.verbose('refreshing...')

        self.__cache.purgeAll()

        newCache = self.__updateFunc()
        if not newCache['OK']:
            self.log.error(newCache['Message'])
            return newCache

        newCache = self.__updateCache(newCache['Value'])

        self.log.verbose('refreshed')

        return newCache

    #.............................................................................
    # Private methods

    def __updateCache(self, newCache):
        """
    Given the new cache dictionary, updates the internal cache with it. It sets
    a duration to the entries of <self.__lifeTime> seconds.
    
    :Parameters:
      **newCache** - `dict`
        dictionary containing a new cache
    
    :return: dictionary. It is newCache argument.    
    """

        for cacheKey, cacheValue in newCache.items():
            self.__cache.add(cacheKey, self.__lifeTime, value=cacheValue)

        # We are assuming nothing will fail while inserting in the cache. There is
        # no apparent reason to suspect from that piece of code.
        return S_OK(newCache)
Exemple #7
0
class WORM(object):
    """
  Write One - Read Many
  """
    def __init__(self, maxReads=10):
        from DIRAC.Core.Utilities.LockRing import LockRing
        self.__lr = LockRing()
        self.__lock = self.__lr.getLock()
        self.__maxReads = maxReads
        self.__semaphore = threading.Semaphore(maxReads)

    def write(self, funcToCall):
        """
    Write decorator
    """
        def __doWriteLock(*args, **kwargs):
            try:
                self.__startWriteZone()
                return funcToCall(*args, **kwargs)
            finally:
                self.__endWriteZone()

        return __doWriteLock

    def read(self, funcToCall):
        """
    Read decorator
    """
        def __doReadLock(*args, **kwargs):
            try:
                self.__startReadZone()
                return funcToCall(*args, **kwargs)
            finally:
                self.__endReadZone()

        return __doReadLock

    def __startWriteZone(self):
        """
    Locks Event to prevent further threads from reading.
    Stops current thread until no other thread is accessing.
    PRIVATE USE
    """
        self.__lock.acquire()
        for i in range(self.__maxReads):
            self.__semaphore.acquire()
        self.__lock.release()

    def __endWriteZone(self):
        """
    Unlocks Event.
    PRIVATE USE
    """
        for i in range(self.__maxReads):
            self.__semaphore.release()

    def __startReadZone(self):
        """
    Start of danger zone. This danger zone may be or may not be a mutual exclusion zone.
    Counter is maintained to know how many threads are inside and be able to enable and disable mutual exclusion.
    PRIVATE USE
    """
        self.__semaphore.acquire()

    def __endReadZone(self):
        """
    End of danger zone.
    PRIVATE USE
    """
        self.__semaphore.release()
class Cache( object ):
  """
    Cache basic class.
    
    WARNING: None of its methods is thread safe. Acquire / Release lock when
    using them !
  """
  
  def __init__( self, lifeTime, updateFunc ):
    """
    Constructor
    
    :Parameters:
      **lifeTime** - `int`
        Lifetime of the elements in the cache ( seconds ! )
      **updateFunc** - `function`
        This function MUST return a S_OK | S_ERROR object. In the case of the first,
        its value must be a dictionary.
    
    """
    
    # We set a 20% of the lifetime randomly, so that if we have thousands of jobs
    # starting at the same time, all the caches will not end at the same time.
    randomLifeTimeBias  = 0.2 * random.random()
    
    self.log            = gLogger.getSubLogger( self.__class__.__name__ )
    
    self.__lifeTime     = int( lifeTime * ( 1 + randomLifeTimeBias ) )
    self.__updateFunc   = updateFunc
    # The records returned from the cache must be valid at least 10 seconds.
    self.__validSeconds = 10
    
    # Cache
    self.__cache       = DictCache()
    self.__cacheLock   = LockRing()
    self.__cacheLock.getLock( self.__class__.__name__ )
  
  #.............................................................................
  # internal cache object getter
  
  def cacheKeys( self ):
    """
    Cache keys getter
      
    :returns: list with valid keys on the cache
    """
    
    return self.__cache.getKeys( validSeconds = self.__validSeconds ) 

  #.............................................................................
  # acquire / release Locks

  def acquireLock( self ):
    """
    Acquires Cache lock
    """
    
    self.__cacheLock.acquire( self.__class__.__name__ )

  def releaseLock( self ):
    """
    Releases Cache lock
    """
    
    self.__cacheLock.release( self.__class__.__name__)
  
  #.............................................................................
  # Cache getters

  def get( self, cacheKeys ):
    """
    Gets values for cacheKeys given, if all are found ( present on the cache and
    valid ), returns S_OK with the results. If any is not neither present not
    valid, returns S_ERROR. 
    
    :Parameters:
      **cacheKeys** - `list`
        list of keys to be extracted from the cache
        
    :return: S_OK | S_ERROR
    """

    result = {}

    for cacheKey in cacheKeys:

      cacheRow = self.__cache.get( cacheKey, validSeconds = self.__validSeconds )
      if not cacheRow:
        self.log.error( str( cacheKey ) )
        return S_ERROR( 'Cannot get %s' % str( cacheKey ) )
      result.update( { cacheKey : cacheRow } )
      
    return S_OK( result )

  #.............................................................................
  # Cache refreshers

  def refreshCache( self ):
    """     
    Purges the cache and gets fresh data from the update function.
    
    :return: S_OK | S_ERROR. If the first, its content is the new cache.    
    """

    self.log.verbose( 'refreshing...' )
    
    self.__cache.purgeAll()
    
    newCache = self.__updateFunc()
    if not newCache[ 'OK' ]:
      self.log.error( newCache[ 'Message' ] )
      return newCache
    
    newCache = self.__updateCache( newCache[ 'Value' ] )
    
    self.log.verbose( 'refreshed' )
    
    return newCache

  #.............................................................................
  # Private methods    
     
  def __updateCache( self, newCache ):
    """
    Given the new cache dictionary, updates the internal cache with it. It sets
    a duration to the entries of <self.__lifeTime> seconds.
    
    :Parameters:
      **newCache** - `dict`
        dictionary containing a new cache
    
    :return: dictionary. It is newCache argument.    
    """
    
    for cacheKey, cacheValue in newCache.items():
      self.__cache.add( cacheKey, self.__lifeTime, value = cacheValue )
    
    # We are assuming nothing will fail while inserting in the cache. There is
    # no apparent reason to suspect from that piece of code.     
    return S_OK( newCache )
Exemple #9
0
class DictCache:

  def __init__( self, deleteFunction = False ):
    """
    Initialize the dict cache.
      If a delete function is specified it will be invoked when deleting a cached object
    """
    
    self.__lock = LockRing()
    self.__lock.getLock( self.__class__.__name__, recursive = True )
    
    self.__cache = {}
    self.__deleteFunction = deleteFunction

  def exists( self, cKey, validSeconds = 0 ):
    """
    Returns True/False if the key exists for the given number of seconds
      Arguments:
        - cKey : identification key of the record
        - validSeconds : The amount of seconds the key has to be valid for
    """
    self.__lock.acquire( self.__class__.__name__ )
    try:
      #Is the key in the cache?
      if cKey in self.__cache:
        expTime = self.__cache[ cKey ][ 'expirationTime' ]
        #If it's valid return True!
        if expTime > datetime.datetime.now() + datetime.timedelta( seconds = validSeconds ):
          return True
        else:
          #Delete expired
          self.delete( cKey )
      return False
    finally:
      self.__lock.release( self.__class__.__name__ )

  def delete( self, cKey ):
    """
    Delete a key from the cache
      Arguments:
        - cKey : identification key of the record
    """
    self.__lock.acquire( self.__class__.__name__ )
    try:
      if cKey not in self.__cache:
        return
      if self.__deleteFunction:
        self.__deleteFunction( self.__cache[ cKey ][ 'value' ] )
      del( self.__cache[ cKey ] )
    finally:
      self.__lock.release( self.__class__.__name__ )

  def add( self, cKey, validSeconds, value = None ):
    """
    Add a record to the cache
      Arguments:
        - cKey : identification key of the record
        - validSeconds : valid seconds of this record
        - value : value of the record
    """
    if max( 0, validSeconds ) == 0:
      return
    self.__lock.acquire( self.__class__.__name__ )
    try:
      vD = { 'expirationTime' : datetime.datetime.now() + datetime.timedelta( seconds = validSeconds ),
             'value' : value }
      self.__cache[ cKey ] = vD
    finally:
      self.__lock.release( self.__class__.__name__ )

  def get( self, cKey, validSeconds = 0 ):
    """
    Get a record from the cache
      Arguments:
        - cKey : identification key of the record
        - validSeconds : The amount of seconds the key has to be valid for
    """
    self.__lock.acquire( self.__class__.__name__ )
    try:
      #Is the key in the cache?
      if cKey in self.__cache:
        expTime = self.__cache[ cKey ][ 'expirationTime' ]
        #If it's valid return True!
        if expTime > datetime.datetime.now() + datetime.timedelta( seconds = validSeconds ):
          return self.__cache[ cKey ][ 'value' ]
        else:
          #Delete expired
          self.delete( cKey )
      return False
    finally:
      self.__lock.release( self.__class__.__name__ )

  def showContentsInString( self ):
    """
    Return a human readable string to represent the contents
    """
    self.__lock.acquire( self.__class__.__name__ )
    try:
      data = []
      for cKey in self.__cache:
        data.append( "%s:" % str( cKey ) )
        data.append( "\tExp: %s" % self.__cache[ cKey ][ 'expirationTime' ] )
        if self.__cache[ cKey ][ 'value' ]:
          data.append( "\tVal: %s" % self.__cache[ cKey ][ 'value' ] )
      return "\n".join( data )
    finally:
      self.__lock.release( self.__class__.__name__ )

  def getKeys( self, validSeconds = 0 ):
    """
    Get keys for all contents
    """
    self.__lock.acquire( self.__class__.__name__ )
    try:
      keys = []
      limitTime = datetime.datetime.now() + datetime.timedelta( seconds = validSeconds )
      for cKey in self.__cache:
        if self.__cache[ cKey ][ 'expirationTime' ] > limitTime:
          keys.append( cKey )
      return keys
    finally:
      self.__lock.release( self.__class__.__name__ )

  def purgeExpired( self, expiredInSeconds = 0 ):
    """
    Purge all entries that are expired or will be expired in <expiredInSeconds>
    """
    self.__lock.acquire( self.__class__.__name__ )
    try:
      keys = []
      limitTime = datetime.datetime.now() + datetime.timedelta( seconds = expiredInSeconds )
      for cKey in self.__cache:
        if self.__cache[ cKey ][ 'expirationTime' ] < limitTime:
          keys.append( cKey )
      for cKey in keys:
        if self.__deleteFunction:
          self.__deleteFunction( self.__cache[ cKey ][ 'value' ] )
        del( self.__cache[ cKey ] )
    finally:
      self.__lock.release( self.__class__.__name__ )

  def purgeAll( self ):
    """
    Purge all entries
    """
    self.__lock.acquire( self.__class__.__name__ )
    try:
      keys = self.__cache.keys()
      for cKey in keys:
        if self.__deleteFunction:
          self.__deleteFunction( self.__cache[ cKey ][ 'value' ] )
        del( self.__cache[ cKey ] )
    finally:
      self.__lock.release( self.__class__.__name__)
Exemple #10
0
class WORM:
    """
  Write One - Read Many
  """

    def __init__(self, maxReads=10):
        from DIRAC.Core.Utilities.LockRing import LockRing

        self.__lr = LockRing()
        self.__lock = self.__lr.getLock()
        self.__maxReads = maxReads
        self.__semaphore = threading.Semaphore(maxReads)

    def write(self, funcToCall):
        """
    Write decorator
    """

        def __doWriteLock(*args, **kwargs):
            try:
                self.__startWriteZone()
                return funcToCall(*args, **kwargs)
            finally:
                self.__endWriteZone()

        return __doWriteLock

    def read(self, funcToCall):
        """
    Read decorator
    """

        def __doReadLock(*args, **kwargs):
            try:
                self.__startReadZone()
                return funcToCall(*args, **kwargs)
            finally:
                self.__endReadZone()

        return __doReadLock

    def __startWriteZone(self):
        """
    Locks Event to prevent further threads from reading.
    Stops current thread until no other thread is accessing.
    PRIVATE USE
    """
        self.__lock.acquire()
        for i in range(self.__maxReads):
            self.__semaphore.acquire()
        self.__lock.release()

    def __endWriteZone(self):
        """
    Unlocks Event.
    PRIVATE USE
    """
        for i in range(self.__maxReads):
            self.__semaphore.release()

    def __startReadZone(self):
        """
    Start of danger zone. This danger zone may be or may not be a mutual exclusion zone.
    Counter is maintained to know how many threads are inside and be able to enable and disable mutual exclusion.
    PRIVATE USE
    """
        self.__semaphore.acquire()

    def __endReadZone(self):
        """
    End of danger zone.
    PRIVATE USE
    """
        self.__semaphore.release()
Exemple #11
0
class Cache:
    """
    Cache basic class.

    WARNING: None of its methods is thread safe. Acquire / Release lock when
    using them !
    """
    def __init__(self, lifeTime, updateFunc):
        """
        Constructor

        :Parameters:
          **lifeTime** - `int`
            Lifetime of the elements in the cache ( seconds ! )
          **updateFunc** - `function`
            This function MUST return a S_OK | S_ERROR object. In the case of the first,
            its value must be a dictionary.

        """

        # We set a 20% of the lifetime randomly, so that if we have thousands of jobs
        # starting at the same time, all the caches will not end at the same time.
        randomLifeTimeBias = 0.2 * random.random()

        self.log = gLogger.getSubLogger(self.__class__.__name__)

        self.__lifeTime = int(lifeTime * (1 + randomLifeTimeBias))
        self.__updateFunc = updateFunc
        # The records returned from the cache must be valid at least 30 seconds.
        self.__validSeconds = 30

        # Cache
        self.__cache = DictCache()
        self.__cacheLock = LockRing()
        self.__cacheLock.getLock(self.__class__.__name__)

    # internal cache object getter

    def cacheKeys(self):
        """
        Cache keys getter

        :returns: list with keys in the cache valid for at least twice the validity period of the element
        """

        # Here we need to have more than the validity period because of the logic of the matching:
        # * get all the keys with validity T
        # * for each key K, get the element K with validity T
        # This logic fails for elements just at the limit of the required time
        return self.__cache.getKeys(validSeconds=self.__validSeconds * 2)

    # acquire / release Locks

    def acquireLock(self):
        """
        Acquires Cache lock
        """
        self.__cacheLock.acquire(self.__class__.__name__)

    def releaseLock(self):
        """
        Releases Cache lock
        """
        self.__cacheLock.release(self.__class__.__name__)

    # Cache getters

    def get(self, cacheKeys):
        """
        Gets values for cacheKeys given, if all are found ( present on the cache and
        valid ), returns S_OK with the results. If any is not neither present not
        valid, returns S_ERROR.

        :Parameters:
          **cacheKeys** - `list`
            list of keys to be extracted from the cache

        :return: S_OK | S_ERROR
        """

        result = {}

        for cacheKey in cacheKeys:
            cacheRow = self.__cache.get(cacheKey,
                                        validSeconds=self.__validSeconds)

            if not cacheRow:
                return S_ERROR("Cannot get %s" % str(cacheKey))
            result.update({cacheKey: cacheRow})

        return S_OK(result)

    def check(self, cacheKeys, vO):
        """
        Modified get() method. Attempts to find keys with a vO value appended or 'all'
        value appended. The cacheKeys passed in are 'flattened' cache keys (no vO)
        Gets values for cacheKeys given, if all are found ( present on the cache and
        valid ), returns S_OK with the results. If any is not neither present not
        valid, returns S_ERROR.

        :Parameters:
          **cacheKeys** - `list`
            list of keys to be extracted from the cache

        :return: S_OK | S_ERROR
        """

        result = {}

        for cacheKey in cacheKeys:
            longCacheKey = cacheKey + ("all", )
            cacheRow = self.__cache.get(longCacheKey,
                                        validSeconds=self.__validSeconds)
            if not cacheRow:
                longCacheKey = cacheKey + (vO, )
                cacheRow = self.__cache.get(longCacheKey,
                                            validSeconds=self.__validSeconds)
                if not cacheRow:
                    return S_ERROR(
                        'Cannot get extended %s (neither for VO = %s nor for "all" Vos)'
                        % (str(cacheKey), vO))
            result.update({longCacheKey: cacheRow})

        return S_OK(result)

    # Cache refreshers

    def refreshCache(self):
        """
        Purges the cache and gets fresh data from the update function.

        :return: S_OK | S_ERROR. If the first, its content is the new cache.
        """

        self.log.verbose("refreshing...")

        self.__cache.purgeAll()

        newCache = self.__updateFunc()
        if not newCache["OK"]:
            self.log.error(newCache["Message"])
            return newCache

        newCache = self.__updateCache(newCache["Value"])

        self.log.verbose("refreshed")

        return newCache

    # Private methods

    def __updateCache(self, newCache):
        """
        Given the new cache dictionary, updates the internal cache with it. It sets
        a duration to the entries of <self.__lifeTime> seconds.

        :Parameters:
          **newCache** - `dict`
            dictionary containing a new cache

        :return: dictionary. It is newCache argument.
        """

        for cacheKey, cacheValue in newCache.items():
            self.__cache.add(cacheKey, self.__lifeTime, value=cacheValue)

        # We are assuming nothing will fail while inserting in the cache. There is
        # no apparent reason to suspect from that piece of code.
        return S_OK(newCache)
Exemple #12
0
class DictCache:
    def __init__(self, deleteFunction=False):
        """
    Initialize the dict cache.
      If a delete function is specified it will be invoked when deleting a cached object
    """

        self.__lock = LockRing()
        self.__lock.getLock(self.__class__.__name__, recursive=True)

        self.__cache = {}
        self.__deleteFunction = deleteFunction

    def exists(self, cKey, validSeconds=0):
        """
    Returns True/False if the key exists for the given number of seconds
      Arguments:
        - cKey : identification key of the record
        - validSeconds : The amount of seconds the key has to be valid for
    """
        self.__lock.acquire(self.__class__.__name__)
        try:
            #Is the key in the cache?
            if cKey in self.__cache:
                expTime = self.__cache[cKey]['expirationTime']
                #If it's valid return True!
                if expTime > datetime.datetime.now() + datetime.timedelta(
                        seconds=validSeconds):
                    return True
                else:
                    #Delete expired
                    self.delete(cKey)
            return False
        finally:
            self.__lock.release(self.__class__.__name__)

    def delete(self, cKey):
        """
    Delete a key from the cache
      Arguments:
        - cKey : identification key of the record
    """
        self.__lock.acquire(self.__class__.__name__)
        try:
            if cKey not in self.__cache:
                return
            if self.__deleteFunction:
                self.__deleteFunction(self.__cache[cKey]['value'])
            del (self.__cache[cKey])
        finally:
            self.__lock.release(self.__class__.__name__)

    def add(self, cKey, validSeconds, value=None):
        """
    Add a record to the cache
      Arguments:
        - cKey : identification key of the record
        - validSeconds : valid seconds of this record
        - value : value of the record
    """
        if max(0, validSeconds) == 0:
            return
        self.__lock.acquire(self.__class__.__name__)
        try:
            vD = {
                'expirationTime':
                datetime.datetime.now() +
                datetime.timedelta(seconds=validSeconds),
                'value':
                value
            }
            self.__cache[cKey] = vD
        finally:
            self.__lock.release(self.__class__.__name__)

    def get(self, cKey, validSeconds=0):
        """
    Get a record from the cache
      Arguments:
        - cKey : identification key of the record
        - validSeconds : The amount of seconds the key has to be valid for
    """
        self.__lock.acquire(self.__class__.__name__)
        try:
            #Is the key in the cache?
            if cKey in self.__cache:
                expTime = self.__cache[cKey]['expirationTime']
                #If it's valid return True!
                if expTime > datetime.datetime.now() + datetime.timedelta(
                        seconds=validSeconds):
                    return self.__cache[cKey]['value']
                else:
                    #Delete expired
                    self.delete(cKey)
            return False
        finally:
            self.__lock.release(self.__class__.__name__)

    def showContentsInString(self):
        """
    Return a human readable string to represent the contents
    """
        self.__lock.acquire(self.__class__.__name__)
        try:
            data = []
            for cKey in self.__cache:
                data.append("%s:" % str(cKey))
                data.append("\tExp: %s" % self.__cache[cKey]['expirationTime'])
                if self.__cache[cKey]['value']:
                    data.append("\tVal: %s" % self.__cache[cKey]['value'])
            return "\n".join(data)
        finally:
            self.__lock.release(self.__class__.__name__)

    def getKeys(self, validSeconds=0):
        """
    Get keys for all contents
    """
        self.__lock.acquire(self.__class__.__name__)
        try:
            keys = []
            limitTime = datetime.datetime.now() + datetime.timedelta(
                seconds=validSeconds)
            for cKey in self.__cache:
                if self.__cache[cKey]['expirationTime'] > limitTime:
                    keys.append(cKey)
            return keys
        finally:
            self.__lock.release(self.__class__.__name__)

    def purgeExpired(self, expiredInSeconds=0):
        """
    Purge all entries that are expired or will be expired in <expiredInSeconds>
    """
        self.__lock.acquire(self.__class__.__name__)
        try:
            keys = []
            limitTime = datetime.datetime.now() + datetime.timedelta(
                seconds=expiredInSeconds)
            for cKey in self.__cache:
                if self.__cache[cKey]['expirationTime'] < limitTime:
                    keys.append(cKey)
            for cKey in keys:
                if self.__deleteFunction:
                    self.__deleteFunction(self.__cache[cKey]['value'])
                del (self.__cache[cKey])
        finally:
            self.__lock.release(self.__class__.__name__)

    def purgeAll(self):
        """
    Purge all entries
    """
        self.__lock.acquire(self.__class__.__name__)
        try:
            keys = self.__cache.keys()
            for cKey in keys:
                if self.__deleteFunction:
                    self.__deleteFunction(self.__cache[cKey]['value'])
                del (self.__cache[cKey])
        finally:
            self.__lock.release(self.__class__.__name__)