Exemple #1
0
 def test_clean(self):
     cache = LRUCache(3)
     cache[1] = '1'
     cache[2] = '2'
     cache[3] = '3'
     self.assertEqual(3, len(cache))
     cache.clean()
     self.assertEqual(0, len(cache))
 def test_instanciation_with_timeout_5sec(self):
     cache = LRUCache(5, 5)
     cache.put('first', 'first')
     cache.put('second', 'second')
     self.assertEqual(len(cache.keys()), 2)
     time.sleep(7)
     self.assertEqual(len(cache.keys()), 0)
     cache.stop_timer()
Exemple #3
0
    def __init__(self):
        edr_config = EDRConfig()

        try:
            with open(self.EDR_FACTIONS_CACHE, 'rb') as handle:
                self.factions_cache = pickle.load(handle)
        except:
            self.factions_cache = LRUCache(edr_config.lru_max_size(),
                                           edr_config.factions_max_age())
Exemple #4
0
 def test_get_move_front(self):
     cache = LRUCache(2)
     cache.set('foo', 10)
     cache.set('bar', 20)
     cache.get('foo')
     cache.set('baz', 30)
     self.assertEqual(cache.get('foo'), 10)
     self.assertEqual(cache.get('bar'), None)
     self.assertEqual(cache.get('baz'), 30)
Exemple #5
0
    def test_reset(self):
        c = LRUCache(3)
        c.put(4, 2)
        c.put(1, 1)
        c.put(2, 3)

        # after reset, nothing existing in the cache
        c.reset()
        self.assertEqual(c.size(), 0)
        self.assertEqual(c.cache(), {})
Exemple #6
0
 def test_get_element(self):
     cache = LRUCache(3)
     cache[1] = '1'
     cache[2] = '2'
     cache[3] = '3'
     self.assertEqual('test', cache.get(11, 'test'))
     self.assertEqual("[(1, '1'), (2, '2'), (3, '3')]", str(cache))
     self.assertEqual('1', cache.get(1, 'test'))
     self.assertEqual("[(2, '2'), (3, '3'), (1, '1')]", str(cache))
     self.assertEqual(3, len(cache))
Exemple #7
0
 def test_pop_element(self):
     cache = LRUCache(3)
     cache[1] = '1'
     cache[2] = '2'
     cache[3] = '3'
     self.assertEqual("[(1, '1'), (2, '2'), (3, '3')]", str(cache))
     self.assertEqual('1', cache.pop(1))
     self.assertEqual("[(2, '2'), (3, '3')]", str(cache))
     self.assertEqual(2, len(cache))
     self.assertRaises(KeyError, cache.pop, 1)
Exemple #8
0
    def test_get_dict_copy_and_keys(self):
        cache = LRUCache(3)
        cache[1] = '1'
        cache[2] = '2'
        cache[3] = '3'
        self.assertEqual({1: '1', 2: '2', 3: '3'}, cache.dict_copy())
        keys = cache.keys()

        # Sort keys to make sure they are in the same order on all platforms.
        keys.sort()
        self.assertEqual([1, 2, 3], keys)
        self.assertEqual(3, len(cache))
Exemple #9
0
def test_insert_over_capacity():
    cache = LRUCache(3)
    cache.put('key1', 'val1')
    cache.put('key2', 'val2')
    cache.put('key3', 'val3')
    cache.put('key4', 'val4')

    with pytest.raises(CacheMissException) as ex:
        cache.get('key1')
        assert 'key1' in str(ex)
    assert cache.get('key2') == 'val2'
    assert cache.get('key3') == 'val3'
    assert cache.get('key4') == 'val4'
Exemple #10
0
 def test_keys_appended_to_lru_cache_are_in_correct_order_after_get(self):
     context = LRUCache(capacity=5)
     context.set("Michael", "Jordan")
     context.set("Scotty", "Pippen")
     context.get("Michael")
     expected = OrderedDict([("Scotty", "Pippen"), ("Michael", "Jordan")])
     assert context.cache == expected
Exemple #11
0
 def __init__(self, server):
     self.server = server
     
     self.timespan = None
     self.records_check_interval = None
     config = EDRConfig()
     try:
         with open(self.EDR_LEGAL_RECORDS_CACHE, 'rb') as handle:
             self.records = pickle.load(handle)
     except:
         self.records = LRUCache(config.lru_max_size(), config.legal_records_max_age())
     
     self.timespan = config.legal_records_recent_threshold()
     self.records_check_interval = config.legal_records_check_interval()
Exemple #12
0
 def test_reset(self):
     cache = LRUCache(3,60)
     cache.set("a", 34)
     cache.reset()
     self.assertEqual(cache.keys(), [])
     self.assertEqual(cache.values(), [])
     self.assertEqual(cache.last_updated, None)
Exemple #13
0
 def test_key_is_removed_from_lru_cache_after_accessed_10_times(self):
     context = LRUCache(capacity=5)
     context.set("Michael", "Jordan")
     context.set("Scotty", "Pippen")
     for _ in range(11):
         context.get("Michael")
     expected = OrderedDict([("Scotty", "Pippen")])
     assert context.cache == expected
Exemple #14
0
def test_updating_item():
    lru_cache = LRUCache(capacity=3)
    lru_cache[10] = 5
    lru_cache[20] = 7
    lru_cache[40] = 9
    lru_cache[20] = 10
    assert list(lru_cache) == [(20, 10), (40, 9), (10, 5)]
Exemple #15
0
 def test_lru_remember_everything(self):
     cache = LRUCache(5)
     cache[1] = '1'
     cache[2] = '2'
     cache[3] = '3'
     self.assertEqual("[(1, '1'), (2, '2'), (3, '3')]", str(cache))
     self.assertEqual(3, len(cache))
Exemple #16
0
def test_getting_middle_item():
    lru_cache = LRUCache(capacity=3)
    lru_cache[10] = 5
    lru_cache[20] = 7
    lru_cache[40] = 9
    assert lru_cache[20] == 7
    assert list(lru_cache) == [(20, 7), (40, 9), (10, 5)]
Exemple #17
0
def test_getting_last_item_2():
    lru_cache = LRUCache(capacity=3)
    lru_cache[10] = 5
    lru_cache[20] = 7
    lru_cache[40] = 9
    assert lru_cache[10] == 5
    assert list(lru_cache) == [(10, 5), (40, 9), (20, 7)]
    def __init__(self, cache_dir,
            cache_size_unblock=4096, cache_size_block=8192):
        """Cache for block test result.

        Two caches for block/unblock are used, so we can set different
        cache size for blocked or unblocked url cache.
        """
        self.save_trigger = 64
        self.cache_dir = cache_dir
        self.cache_filename_unblock = os.path.join(cache_dir,
                self.filename_unblock)
        self.cache_filename_block = os.path.join(cache_dir, self.filename_block)

        from lrucache import LRUCache
        self.cache_unblock = LRUCache(cache_size_unblock)
        self.cache_block = LRUCache(cache_size_block)
    def __init__(self, cache_dir,
            cache_size_unblock=4096, cache_size_block=8192):
        """Cache for block test result.

        Two caches for block/unblock are used, so we can set different
        cache size for blocked or unblocked url cache.
        """
        self.save_trigger = 64
        self.cache_dir = cache_dir
        self.cache_filename_unblock = os.path.join(cache_dir,
                self.filename_unblock)
        self.cache_filename_block = os.path.join(cache_dir, self.filename_block)

        from lrucache import LRUCache
        self.cache_unblock = LRUCache(cache_size_unblock)
        self.cache_block = LRUCache(cache_size_block)
Exemple #20
0
def test_removing_item_from_cache():
    lru_cache = LRUCache(capacity=3)
    lru_cache[10] = 5
    lru_cache[20] = 7
    lru_cache[40] = 9
    del lru_cache[10]
    assert list(lru_cache) == [(40, 9), (20, 7)]
Exemple #21
0
def test_adding_single_item_to_full_cache():
    lru_cache = LRUCache(capacity=3)
    lru_cache[10] = 5
    lru_cache[20] = 5
    lru_cache[40] = 7
    lru_cache[50] = 9
    assert list(lru_cache) == [(50, 9), (40, 7), (20, 5)]
Exemple #22
0
 def __getitem__(self, key):
     item = None
     self.acquire()
     try:
         item = LRUCache.__getitem__(self, key)
     finally:
         self.release()
     return item
Exemple #23
0
def test_adding_multiple_items_to_full_cache():
    lru_cache = LRUCache(capacity=3)
    lru_cache[10] = 5
    lru_cache[20] = 5
    lru_cache[40] = 7
    lru_cache[50] = 9
    lru_cache[60] = 11
    assert list(lru_cache) == [(60, 11), (50, 9), (40, 7)]
Exemple #24
0
 def test_lru_keep_fresh(self):
     cache = LRUCache(3)
     cache[1] = '1'
     cache[2] = '2'
     cache[3] = '3'
     cache[4] = '4'
     self.assertEqual("[(2, '2'), (3, '3'), (4, '4')]", str(cache))
     self.assertEqual(3, len(cache))
Exemple #25
0
 def __getitem__(self, key):
     item = None
     self.acquire()
     try:
         item = LRUCache.__getitem__(self, key)
     finally:
         self.release()
     return item
Exemple #26
0
def test_add_item_after_removing():
    lru_cache = LRUCache(capacity=3)
    lru_cache[10] = 5
    lru_cache[20] = 7
    lru_cache[40] = 9
    del lru_cache[10]
    lru_cache[50] = 11
    assert list(lru_cache) == [(50, 11), (40, 9), (20, 7)]
Exemple #27
0
    def test_basics(self):
        cache = LRUCache(5, 60)
        self.assertEqual(cache.capacity, 5)

        sample = {"test": True, "foo": "bar"}
        cache.set("a", sample)
        result = cache.get("a")
        self.assertEqual(result, sample)
        result = cache.get("a")
        self.assertDictEqual(result, sample)

        cache.evict("a")
        result = cache.get("a")
        self.assertEqual(result, None)
Exemple #28
0
 def test_del_element(self):
     cache = LRUCache(4)
     cache[1] = '1'
     cache[2] = '2'
     cache[3] = '3'
     cache[4] = '4'
     del cache[3]
     self.assertEqual("[(1, '1'), (2, '2'), (4, '4')]", str(cache))
     self.assertEqual(3, len(cache))
Exemple #29
0
def test_add_multiple_items_after_removing():
    lru_cache = LRUCache(capacity=3)
    lru_cache[10] = 5
    lru_cache[20] = 7
    lru_cache[40] = 9
    del lru_cache[10]
    lru_cache[50] = 11
    lru_cache[60] = 13
    assert list(lru_cache) == [(60, 13), (50, 11), (40, 9)]
Exemple #30
0
    def test_lru_fresh_on_access(self):
        cache = LRUCache(3)
        cache[1] = '1'
        cache[2] = '2'
        cache[3] = '3'

        # Just access to first element.
        _ = cache[1]
        self.assertEqual("[(2, '2'), (3, '3'), (1, '1')]", str(cache))
        self.assertEqual(3, len(cache))
Exemple #31
0
	def setup(self):
		self.lruc = LRUCache(10)
		self.lruc2 = LRUCache(10)
		self.lruc3 = LRUCache(10)
		self.lruc4 = LRUCache(10)
		self.lruc5 = LRUCache(10)
		self.lruc6 = LRUCache(10)
		self.lruc_test_store= LRUCache(6)
		self.lruc_test_store_same_item= LRUCache(10)
Exemple #32
0
 def test_stale(self):
     cache = LRUCache(5, 1)
     sample = {"test": True, "foo": "bar"}
     key = "a"
     cache.set(key, sample)
     self.assertFalse(cache.is_stale(key))
     time.sleep(1.2)
     self.assertTrue(cache.is_stale(key))
     self.assertEqual(cache.get("a"), None)
 def __init__(self):
     self.max = 0
     self.previous_max = 0
     self.min = 100.0
     self.previous_min = 0
     self.sum_scanned = 0
     self.sum_awarded = 0
     self.distribution = {"last_index": 0, "bins": [0]*25}
     self.scanned_nb = 0
     self.awarded_nb = 0
     self.awarded_bounties = deque(maxlen=20)
     self.scans = deque(maxlen=20)
     self.efficiency = deque(maxlen=20)
     self.max_efficiency = 5000000
     self.max_normal_bounty = 350000 * 4 # appears to be the highest bounty per faction for NPC and the 4x boost from Nov 30th 2020
     now = EDTime.py_epoch_now()
     self.start = now
     self.current = now
     edr_config = EDRConfig()
     self.scans_cache = LRUCache(edr_config.lru_max_size(), edr_config.blips_max_age())
     self.last = {"timestamp": now, "bounty": None}
Exemple #34
0
 def __init__(self, func, targets=None, **kwargs):
     if targets is None:
         targets = []
     elif not hasattr(targets, '__iter__'):
         targets = [targets]
     self.targets = targets
     self.func = func
     #   Raise errors by default.
     self.error = kwargs.pop('error', None)#'raise')
     self.name = (
         kwargs.get('name') or self.name or
         (
             self.func.__class__.__name__
             if hasattr(func, '__class__') else self.func.__name__
         )
     )
     # self.ids = set()
     self.ids = LRUCache(max_size=10000)
     self.idx = 0
     self.idx_skip = 0
     #   Make sure we clean up if the program exits before the finalize
     #   functions are called.
     if hasattr(self.func, 'finalize'):
         atexit.register(self.finalize)
Exemple #35
0
class TestLRUCache(object):

	def setup(self):
		self.lruc = LRUCache(10)
		self.lruc2 = LRUCache(10)
		self.lruc3 = LRUCache(10)
		self.lruc4 = LRUCache(10)
		self.lruc5 = LRUCache(10)
		self.lruc6 = LRUCache(10)
		self.lruc_test_store= LRUCache(6)
		self.lruc_test_store_same_item= LRUCache(10)

	##############################
	# test construction
	##############################	
	def test_init(self):
		assert 0 == self.lruc._curSize
		assert 10 == self.lruc.maxSize

	##############################
	# test private methods
	##############################	
	def test_exceedesMaxSize(self):
		assert True == self.lruc._willExceedesMaxSize(41)
		assert False == self.lruc._willExceedesMaxSize(1)

	def test_isInCache(self):
		self.lruc2._add(Entry('b','boy',3))
		assert True==self.lruc2._isInCache('b')
		assert False==self.lruc2._isInCache('a') # we didn't put 'a' in this cache
		self.lruc2.store('c','cat',3)
		assert True ==self.lruc2._isInCache('c')

	def test_add_entry(self):
		beforeCacheLen  = len(self.lruc._cache)
		beforePQLen  = len(self.lruc._ordering)
		ent = Entry('a','apple',5)
		self.lruc._add_entry(ent);
		assert self.lruc._curSize == 5
		assert self.lruc._cache['a'].value=='apple'
		afterCacheLen  = len(self.lruc._cache)
		afterPQLen  = len(self.lruc._ordering)
		assert beforeCacheLen == beforePQLen ==0
		assert afterCacheLen ==afterPQLen ==1

	def test_add_entry_correct_priority(self):
		ent = Entry('a','apple',5)	
		ent2 = Entry('b','cat',3)	
		# add some entries to the cache
		self.lruc3._add_entry(ent)
		self.lruc3._add_entry(ent2)
		# test that they are in the right order
		smallest = heapq.heappop(self.lruc3._ordering)
		assert smallest == ent

	def test_update_entry_correct_priority(self):
		ent = Entry('a','apple',5)	
		time.sleep(0.001)
		ent2 = Entry('b','cat',3)	
		self.lruc4._add_entry(ent)
		self.lruc4._add_entry(ent2)
		time.sleep(0.001)
		ent.touch()
		self.lruc4._update_entry(ent)
		smallest = heapq.heappop(self.lruc4._ordering)
		assert smallest == ent2

	@raises(KeyError)
	def test_evict(self):
		ent = Entry('a','apple',5)	
		ent2 = Entry('b','boy',3)	
		self.lruc6._add_entry(ent)
		self.lruc6._add_entry(ent2)
		self.lruc6._evict()
		assert len(self.lruc6._ordering) == len(self.lruc6._cache) ==  1
		assert_raises(KeyError, self.lruc6.fetch('a'), "a")
		assert 'boy' == self.lruc6.fetch('b')

	##############################
	# test public api methods
	##############################
	def test_fetch(self):
		ent = Entry('a','apple',5)	
		self.lruc =  LRUCache(10)
		self.lruc._add_entry(ent)
		fetched = self.lruc.fetch(ent.key)
		assert fetched =='apple'
		

	@raises(KeyError)
	def test_store(self):
		assert len(self.lruc_test_store._ordering) == len(self.lruc_test_store._cache) ==  0
		self.lruc_test_store.store('a','apple',5)
		assert len(self.lruc_test_store._ordering) == len(self.lruc_test_store._cache) ==  1
		self.lruc_test_store.store('b','boy',3)
		assert len(self.lruc_test_store._ordering) == len(self.lruc_test_store._cache) ==  1 # a got evicted
		assert_raises(KeyError, self.lruc6.fetch('a'), "a") 
		assert 'boy' == self.lruc6.fetch('b') # only b is left

	def store_same_value_test_helper(self):
		lruc_test_store_same_item = LRUCache(10)
		assert len(self.lruc_test_store_same_item._ordering) == len(self.lruc_test_store_same_item._cache) ==  0
		self.lruc_test_store_same_item.store('b','boy',3)
		time.sleep(0.001)
		self.lruc_test_store_same_item.store('c','cat',3)
		time.sleep(0.001)
		self.lruc_test_store_same_item.store('d','dog',3)
		time.sleep(0.001)
		assert len(self.lruc_test_store_same_item._ordering) == len(self.lruc_test_store_same_item._cache) ==  3

		boy = self.lruc_test_store_same_item.fetch('b') # update the oldest entry's timestamp
		self.lruc_test_store_same_item.store('f','fun',3) # this evicts 'c' - cat, the next least recently used

		assert len(self.lruc_test_store_same_item._ordering) == len(self.lruc_test_store_same_item._cache) ==  3

	# test to see that when trying to store same item again, updates the item's timestamp 
	def test_store_same_value(self):
		self.store_same_value_test_helper()
		fun = self.lruc_test_store_same_item.fetch('f')
		assert 'fun' == fun
		dog = self.lruc_test_store_same_item.fetch('d')
		assert 'dog' == dog
		boy = self.lruc_test_store_same_item.fetch('b')
		assert 'boy' == boy

	@raises(KeyError)
	def test_store_same_value_with_keyerror(self):
		self.store_same_value_test_helper()
		cat = self.lruc_test_store_same_item.fetch('c') # raises key error, because 'c' was evicted
Exemple #36
0
 def __setitem__(self, key, obj):
     self.acquire()
     LRUCache.__setitem__(self, key, obj)
     self.release()
class BlockCache:
    filename_unblock = "lookup-cache-unblock.json"
    filename_block = "lookup-cache-block.json"
    def __init__(self, cache_dir,
            cache_size_unblock=4096, cache_size_block=8192):
        """Cache for block test result.

        Two caches for block/unblock are used, so we can set different
        cache size for blocked or unblocked url cache.
        """
        self.save_trigger = 64
        self.cache_dir = cache_dir
        self.cache_filename_unblock = os.path.join(cache_dir,
                self.filename_unblock)
        self.cache_filename_block = os.path.join(cache_dir, self.filename_block)

        from lrucache import LRUCache
        self.cache_unblock = LRUCache(cache_size_unblock)
        self.cache_block = LRUCache(cache_size_block)

    def load(self):
        self.cache_unblock.load(self.cache_filename_unblock)
        self.cache_block.load(self.cache_filename_block)

    def make_key(self, url, *args):
        key = url + json.dumps(args, sort_keys=True, indent=None,
                separators= (',', ':'))
        return key

    def save(self, force=False):
        trigger = 0 if force == True else self.save_trigger
        if self.cache_unblock.insert_count > trigger:
            self.cache_unblock.save(self.cache_filename_unblock)
            self.cache_unblock.reset_insert_count()
        if self.cache_block.insert_count > trigger:
            self.cache_block.save(self.cache_filename_block)
            self.cache_unblock.reset_insert_count()

    def __getitem__(self, key):
        if key in self.cache_unblock:
            ret = self.cache_unblock[key]
        else:
            ret = self.cache_block[key]
        return ret

    def __setitem__(self, key, value):
        if value:
            self.cache_block[key] = value
        else:
            self.cache_unblock[key] = value

    def __contains__(self, key):
        return key in self.cache_block or key in self.cache_unblock
Exemple #38
0
	def test_fetch(self):
		ent = Entry('a','apple',5)	
		self.lruc =  LRUCache(10)
		self.lruc._add_entry(ent)
		fetched = self.lruc.fetch(ent.key)
		assert fetched =='apple'
Exemple #39
0
def test_update_one():
    cache = LRUCache(3)
    cache.put('key', 'val')
    cache.put('key', 'new val')
    assert cache.get('key') == 'new val'
Exemple #40
0
class Layer(object):
    """
    Class Layer

     Description:

     Constructors:
              Layer(map, layername)

     Methods:
              IO operations
              --------------
              open(mode)                Opens a MapSend layer filepair
                                        .lay/.clt for read (mode='r') or
                                        write (mode='w').
              close()                   Closes an opened MapSend layer filepair

              read()                    Read index file and header of layer file

              write(outlayername)       Read contents of layer that is opened for
                                        read and write a copy of the layer to
                                        layer name outlayername.


    """
    def __init__(self, m, name, filename, layertype=None, nlevels=0,
                 fileidentifier=None):
        self.map = m

        ## Copy resolution and reference point from map
        self._scale = m.scale                           # [xscale, yscale]
        self._refpoint = m.refpoint                     # Reference point for conversion to discrete coordinate
        self._bbox = None
        self._dbbox = None

        self.nlevels = nlevels # Cell levels

        if m.bboxrec:
            self.dbboxrec = m.bboxrec.todiscrete(self._refpoint, self._scale)
            self.estimator = None
        else:
            ## Create layer parameter estimator object
            self.estimator = LayerParamEstimator(self)

        if filename[0:len(m.mapnumstr)] != m.mapnumstr:
            filename = m.mapnumstr + filename

        if len(filename) > 8:
            raise Exception('Length of filename %s must not exceed 8'%filename)

        self.name = name
        self.filename = filename
        self.cellelementid = 0
        self.isopen = False

        self.clearCells()

        self.mode = None
        self.bigendian = m.bigendian
        self.writedrc = False         # Write DRC file needed by mapsend software

        self.nobjects = 0
        self.category = 0 # 0=Normal layer, 1=Artificial layer
        self.fileidentifier = fileidentifier
        self.layertype = layertype

        ## Statistics from header
        self.firstcell = None
        self.lastcell = None

        ## Cell position in layer file (for use in read mode)
        self.cellfilepos = {}                          
        
        self.fhlay = None

        self.draworder = 0

        self.packed = False
        self.packer = None

    def __eq__(self, a):
        return isinstance(a, Layer) and self.name == a.name
    
    def __hash__(self):
        return hash(self.name)

    def clearCells(self):
        self.modifiedcells = {}        # Dictionary of modified cells keyed by cellnumber
        self.cellcache = LRUCache(size=32)
        self.cellfilepos = {}
        self.cellnumbers = []

    def setUnpackTable(self, filename):
        self.packed = True
        self.packer = layerpacker.LayerPacker(self.map.mapdir.open(filename).read())

    def open(self, mode):
        self.mode = mode
        if not self.isopen:
            if mode=='r' or mode=='a':
                try:
                    # First try open the layer as little endian
                    self.layerfilename = self.filename+".lay"
                    self.indexfilename = self.filename+".clt"
                    self.fhlay = self.map.mapdir.open(self.layerfilename,"r")
                    self.bigendian = False
                except IOError:
                    self.layerfilename = self.filename+".yal"
                    self.indexfilename = self.filename+".tlc"
                    self.fhlay = self.map.mapdir.open(self.layerfilename,"r")
                    self.bigendian = True
            elif mode=='w':
                if not self.bigendian:
                    self.layerfilename = self.filename+".lay"
                    self.indexfilename = self.filename+".clt"
                else:
                    self.layerfilename = self.filename+".yal"
                    self.indexfilename = self.filename+".tlc"

                if not self.map.inmemory:
                    self.shelffile = tempfile.mktemp()
                    self.shelf = shelve.open(self.shelffile)

                self.fhlay = self.map.mapdir.open(self.layerfilename,"wb")

            isopen=True

            if self.mode in ('r','a'):
                self.read_index()
                self.read_header()

    def optimize(self):
        """Optimize nlevels parameter and calculate bounding box

        This function only works when the cell is opened in write only mode
        without bounding box.

        The function works like this. A proper value for the nlevel parameter and a bounding box
        are first estimated.
        At the beginning there is only one cell but with the new nlevel value the number of cells
        may grow. Hence the cell elements might have to be placed in new cells.

        Returns a dictionary of mapping between old and new cellreferences
        
        """

        if self.mode == 'w' and self._bbox == None:
            remapdict = {}

            logging.debug("Optimizing layer "+self.name)

            dbboxrec = self.estimator.calculateDBBox()

            if dbboxrec:
                self.dbboxrec = dbboxrec

            ## Get nlevels estimate
            self.nlevels = self.estimator.calculateNlevels()

            ## Adjust bounding box borders to get integer cellsize
            if dbboxrec:
                self.dbboxrec = dbboxrec

            ## Update the bounding box of cell 1
            self.getCell(1).setbbox()

            if self.nlevels > 0:
                cellelements = []
                cellelementrefs = []

                oldcell1 = self.getCell(1)

                self.clearCells()

                ## Loop over the elements in the old cell 1 
                ## The elements need to be accessed in reversed order, otherwise the 
                ## cellelement numbers would change
                ## during the loop
                for i in range(len(oldcell1)-1,-1,-1):
                    ce = oldcell1.pop(i)
                    newcellrefs = self.addCellElement(ce)
                    remapdict[(oldcell1.cellnum, i)] = newcellrefs[0]

            return remapdict
        else:
            return {}

    def close(self):
        if (self.mode=='w') or (self.mode=='a') and len(self.modifiedcells)>0:

            ## Use estimator to calculate bounding box
            if self._bbox == None:
                self.optimize()
            
            tmplay = tempfile.NamedTemporaryFile('wb', delete=False)

            self.write_header(tmplay)

            ## The cells must be written in cell number order
            self.cellnumbers.sort()

            # Merge unchanged cells with modified cells
            for cellnum in self.cellnumbers:
                if cellnum in self.modifiedcells:
                    cell = self.modifiedcells[cellnum]
                    celldata = cell.serialize()
                    # Update index
                    self.cellfilepos[cellnum] = [tmplay.tell(), len(celldata)]
                    tmplay.write(celldata)
                else:
                    self.fhlay.seek(self.cellfilepos[cellnum][0])
                    celldata = self.fhlay.read(self.cellfilepos[cellnum][1])
                    self.cellfilepos[cellnum][0] = tmplay.tell()
                    tmplay.write(celldata)

            # Rewind and write the header again with the new cell index statistics
            tmplay.seek(0)
            self.write_header(tmplay)

            # Copy temporary file to layer file
            tmplay.close()
            tmplay = open(tmplay.name, 'rb')
            shutil.copyfileobj(tmplay, self.fhlay)
            tmplay.close()
            os.unlink(tmplay.name)

            # Create index file
            fhidx = self.map.mapdir.open(self.indexfilename,"wb")
            fhdrc = None
            if self.writedrc:
                fhdrc = self.map.mapdir.open(self.filename+".drc", "wb")

            if fhdrc:
                fhdrc.write( self.pack("I", len(self.cellnumbers)))


            for cellnum in self.cellnumbers:
                fhidx.write( self.pack("III", cellnum, self.cellfilepos[cellnum][0], self.cellfilepos[cellnum][1]) )
                if fhdrc:
                    fhdrc.write( self.pack("III", cellnum, self.cellfilepos[cellnum][0], self.cellfilepos[cellnum][1]) )
                
            fhidx.close()
            if fhdrc:
                fhdrc.close()
            
            if self.fhlay:
                self.fhlay.close()

        if self.mode == 'w' and not self.map.inmemory:
            os.unlink(self.shelffile)
        
        isopen=False

    def read_index(self):
        if self.mode in ['r','a']:
            fhidx = self.map.mapdir.open(self.indexfilename, "r")

            self.cellfilepos = {}
            self.cellnumbers = []
            while 1:
                data = fhidx.read(12)

                if len(data) == 0: break

                [cellnum,offset,cellsize] = self.unpack("3i",data)
                self.cellfilepos[cellnum] = [offset,cellsize]
                self.cellnumbers.append(cellnum)

    def read_header(self):
        self.dheader = self.fhlay.read(0x80)

        [self.category] = self.unpack("i", self.dheader[4:])
        [self.fileidentifier] = self.unpack("H", self.dheader[8:])

        tmp = self.unpack("4f", self.dheader[0xa:])
        self._bbox = Rec(N.array([tmp[0],tmp[2]]), 
                         N.array([tmp[1],tmp[3]]))

        [self.nlevels] = self.unpack("h", self.dheader[0x1a:])
        [self.nobjects] = self.unpack("i", self.dheader[0x1c:])

        [xscale] = self.unpack("d", self.dheader[0x20:])
        [yscale] = self.unpack("d", self.dheader[0x28:])

        self._scale = N.array([xscale, yscale])

        self._refpoint = N.array(self.unpack("2f", self.dheader[0x30:]))

        tmp = self.unpack("4i", self.dheader[0x38:])
        self._dbbox = Rec(N.array([tmp[0],tmp[1]]), 
                          N.array([tmp[2],tmp[3]]))

        [self.layertype] = self.unpack("b", self.dheader[0x48:])
        [unknown49] = self.unpack("b", self.dheader[0x49:])
        [self.largestcellsize] = self.unpack("i", self.dheader[0x4a:])
        [self.firstcell] = self.unpack("i", self.dheader[0x4e:])
        [self.lastcell] = self.unpack("i", self.dheader[0x52:])

        assert(unknown49, 0)

    def header_info(self):
        info = ""
        info += "Category: 0x%x"%self.category + '\n'
        info += "Fileidentifier: 0x%x"%self.fileidentifier +'\n'
        info += "Number of levels: %d"%self.nlevels + '\n'
        info += "Number of elements: %d"%self.nobjects + '\n'
        info += "Bbox: %s"%self._bbox +'\n'
        info += "Scale: %s"%str(self._scale) + '\n'
        info += "Refpoint: %s"%str(self._refpoint) + '\n'
        info += "Layer type: 0x%x"%self.layertype + '\n'
        info += "Largest cell size: 0x%x"%self.largestcellsize + '\n'
        info += "First cell: 0x%x"%self.firstcell + '\n'
        info += "Last cell: 0x%x"%self.lastcell + '\n'

        info += "Discrete Bbox: %s"%self._dbbox +'\n'
        return info

    def write_header(self, fh):
        header = "MHGO"
        header = header + self.pack("i", self.category)
        if self.fileidentifier == None:
            fileidentifier = 0xc000 | self.map.getLayerIndex(self)
        else:
            fileidentifier = self.fileidentifier

        header = header + self.pack("H", fileidentifier)

        header = header + self.pack("4f", self._bbox.minX(), self._bbox.maxX(),
                                    self._bbox.minY(), self._bbox.maxY())
        header = header + self.pack("h", self.nlevels)
        header = header + self.pack("i", self.nobjects)
        header = header + self.pack("d", self._scale[0])
        header = header + self.pack("d", self._scale[1])
        header = header + self.pack("f", self._refpoint[0])
        header = header + self.pack("f", self._refpoint[1])
        header = header + self.pack("4i", self._dbbox.minX(), self._dbbox.minY(), self._dbbox.maxX(), self._dbbox.maxY())

        header = header + self.pack("b", self.layertype)
        header = header + self.pack("b", 0)

        if len(self.cellfilepos)>0:
            largestcellsize = max([d[1] for d in self.cellfilepos.values()])
        else:
            largestcellsize = 0
        header = header + self.pack("i", largestcellsize)

        if len(self.cellnumbers) == 0:
            header = header + self.pack("i", 0) # First cell number
            header = header + self.pack("i", 0) # Last cell number
        else:
            header = header + self.pack("i", self.cellnumbers[0]) # First cell number
            header = header + self.pack("i", self.cellnumbers[-1]) # Last cell number
        
        header = header + chr(0)*(128-len(header))
                                    
        fh.write(header)
        return len(header)

    def unpack(self,types,data):
        if self.bigendian:
            prefix=">"
        else:
            prefix="<"
        return struct.unpack(prefix + types,
                             data[0:struct.calcsize(prefix+types)])

    def pack(self, types, *data):
        if self.bigendian:
            prefix=">"
        else:
            prefix="<"
        return struct.pack(prefix+types, *data)

    def markCellModified(self, cellnum):
        self.modifiedcells[cellnum] = self.getCell(cellnum)

    def getCells(self):
        for cn in self.cellnumbers:
            yield self.getCell(cn)
    
    def getCell(self, cellnum):
        if cellnum in self.modifiedcells:
            return self.modifiedcells[cellnum]
        
        if cellnum in self.cellcache:
            return self.cellcache[cellnum]

        # New cell
        if self.mode == 'w':
            if self.map.inmemory:
                cell = CellInMemory(self, cellnum)
            elif self.nlevels == 0:
                cell = CellShelve(self, cellnum)
            else:
                cell = CellCommonShelve(self, cellnum, self.shelf)
        else:
            cell = CellInMemory(self, cellnum)

        # Deserialize cell if present in the cell index
        if cellnum in self.cellfilepos:
            self.fhlay.seek(self.cellfilepos[cellnum][0])
            celldata = self.fhlay.read(self.cellfilepos[cellnum][1])

            if self.packed:
                celldata = self.packer.unpack(celldata)
            
            cell.deSerialize(celldata)
        
        self.cellcache[cellnum] = cell

        return cell

    def close_cell(self, cellnum):
        self.cellcache.pop(cellnum)

    def getCellElements(self):
        for c in self.getCells():
            for s in c.getCellElements():
                yield s
	
    def getCellElementsAndRefs(self):
        for c in self.getCells():
            for nincell, s in enumerate(c.getCellElements()):
                yield (s, (c.cellnum, nincell))
	
    def getCellElement(self, cellref):
        """Get cell element from a (cellnum, num_in_cell) pair """
        (cellnum, num_in_cell) = cellref
        
        if self.map.debug:
            print "Cellcache: "+str(self.cellcache.keys())
        cell = self.getCell(cellnum)
        try:
            cellelement = cell.getCellElement(num_in_cell)
        except IndexError:
            raise IndexError,"num_in_cell (%d) is outside the # of cellelements (%d) in cell %d, layer %s"%(num_in_cell,len(cell),cellnum, self.name)

        return cellelement
	
    def addCellElement(self, cellelem, cellnum = None):
        """Add cell element to layer. The element might be divided into smaller elements.
         Returns list of (cellnum,# in cell) pairs"""
        if self.mode in ('r', None):
            raise ValueError('Layer must be opened in write or append mode to add cell elements')

        ## Calculate the minimum cell that contains the extents of the new element
        if self._bbox != None:
            if cellnum == None:
                cellnum, level, dcellrec = get_best_cell(self._dbbox,
                                                        cellelem.dbboxrec.negY(),
                                                        self.nlevels)

#            assert cellelem.bboxrec(self).iscoveredby(self.bboxrec, xmargin=self._scale[0], ymargin=self._scale[1]), "CellElement is outside layer boundaries:" + \
#                   str(self.bboxrec(self)) + " cellelement:" + str(cellelem.bboxrec(self))
        else:
            if self.nlevels > 0:
                raise ValueError('Cannot add cell element to layer with nlevels>0 and no bounding box')
            cellnum = 1

            self.estimator.addCellElement(cellelem)
         
        cellelem.cellnum = cellnum
        
        cell = self.getCell(cellnum)

        assert cell.bboxrec == None or \
            cellelem.dbboxrec.iscoveredby(cell.dbboxrec), \
            "Incorrect cell %d with bbox %s for cell element with bbox %s"%(cellnum, cell.dbboxrec, str(cellelem.dbboxrec))
        
        nincell = cell.addCellElement(cellelem)
        
        assert self.nlevels == 0 or nincell < 2**16
        
        if not cell in self.modifiedcells:
            self.modifiedcells[cellnum] = cell
        if not cellnum in self.cellnumbers:
            self.cellnumbers.append(cellnum)

        self.nobjects += 1

        return [(cellnum, nincell)]

    def updateCellElement(self, cellelementref, cellelement):
        """the updateCellElement must be called when a cell element has been updated"""
        self.getCell(cellelementref[0]).updateElement(cellelementref[1], cellelement)

    def getName(self):
        return self.name

    def getFileName(self):
        return self.filename

    def getNObjects(self):
        return self.nobjects

    ## Bounding box property
    def get_bboxrec(self):
        if self._bbox:
            return self._bbox.negY()
        else:
            return None
    def set_bboxrec(self, rec):
        if self.mode == 'r':
            raise ValueError("Can't change boundary rectangle in read-only mode")

        self.dbboxrec = rec.todiscrete(self._refpoint, self._scale)

        # If in append mode all cell elements must be re-added to fit the new
        # cell boundaries
        if self.mode == 'a':
            cellelements = [e for e in self.getCellElements()]
            
            self.clearCells()

            for e in cellelements:
                self.addCellElement(e)            

    bboxrec = property(get_bboxrec, set_bboxrec, "Bounding box rectangle")

    def get_dbboxrec(self):
        if self._dbbox:
            return self._dbbox.negY()
        else:
            return None

    def set_dbboxrec(self, drec):
        first_time = self._dbbox == None

        if not first_time and self.mode == 'r':
            raise ValueError("Can't change boundary rectangle in read-only mode")

        self._dbbox = drec.negY()
        self._bbox = self._dbbox.tocontinous(self._refpoint, self._scale)

        if self._dbbox.width % (2 ** self.nlevels) != 0 or self._dbbox.height % (2 ** self.nlevels) != 0:
            logging.warn("bbox should be a multiple of minimum cell size, adjusting bbox borders")
            n = 2 ** (self.nlevels + 1)
            width = self._dbbox.width
            height = self._dbbox.height

            width += -width % n
            height += -height % n
                
            self._dbbox = Rec(self._dbbox.c1, self._dbbox.c1 + N.array(width, height))

        # If in append mode all cell elements must be re-added to fit the new
        # cell boundaries
        if not first_time and self.mode == 'a':
            cellelements = [e for e in self.getCellElements()]
            
            self.clearCells()

            for e in cellelements:
                self.addCellElement(e)            

    dbboxrec = property(get_dbboxrec, set_dbboxrec, "Bounding box rectangle discrete coordinates")

    @property
    def refpoint(self): return self._refpoint
    @property
    def scale(self): return self._scale

    def getLayerType(self): return self.layertype    

    def calc_cell_extents(self, cellnum): 
        """
        Calculate discrete bounding box of a cell

        Note, the extents return is in the internal coordinates with negated Y-values
        """
        ## Calculate cell level
        level=0
        while cellnum > totcells_at_level(level):
           level=level+1

        n = 2**level             # Number of rows/cols 

        lbb = self._dbbox
        layerwidth = lbb.width
        layerheight = lbb.height
        layersize = N.array([layerwidth, layerheight])

        relcnum = cellnum - (totcells_at_level(level-1)+1)

        cellsize = layersize / n

        if relcnum < n*n:
            mincorner = N.array([relcnum % n, relcnum / n]) * cellsize
            maxcorner = mincorner + cellsize
        else:
            relcnum = relcnum-n*n
            mincorner = N.array([relcnum % (n + 1), relcnum / (n + 1)]) * cellsize - cellsize/2
            maxcorner = mincorner + layersize / n

            mincorner[N.where(mincorner < 0)] = 0
            maxcorner[0] = min(maxcorner[0], layerwidth)
            maxcorner[1] = min(maxcorner[1], layerheight)

        return Rec(mincorner + lbb.c1, maxcorner + lbb.c1)

    def layer_header_nok(self, pcnt):
        """Header check from magsendtool"""
	
 	pcnt /= 100.0
        rc = 0
        if abs(((self._bbox.maxY() -self._bbox.minY())/self._scale[1]) - (self._dbbox.maxY() - self._dbbox.minY())) > pcnt * (self._dbbox.maxY() - self._dbbox.minY()):
            rc |= 1
        if abs(((self._bbox.maxX() -self._bbox.minX())/self._scale[0]) - (self._dbbox.maxX() - self._dbbox.minX())) > pcnt * (self._dbbox.maxX() - self._dbbox.minX()):
            rc |= 2
        if self._refpoint[1] != 0.0 and abs(self._bbox.centerY() - self._refpoint[1])/self._scale[1] > 0.75:
            rc |= 4
        if self._refpoint[0] != 0.0 and abs(self._bbox.centerX() - self._refpoint[0])/self._scale[0] > 0.75:
            rc |= 8
 	return rc


    def check(self):
        version=1
        if self.layer_header_nok(0.1):
            version+=1
            if self.layer_header_nok(1.0):
                version+=1
                if self.layer_header_nok(5.0):
                    raise ValueError('Incorrect layer format rc=%d at 5%% error'%self.layer_header_nok(5.0))
        return version

    @property
    def ncells(self):
        """Return the number of cells in the layer"""
        return len(self.cellnumbers)

    @property
    def info(self):
        res = "Name: "+self.getName()+"\n"
        res += "Number of objects: "+str(self.getNObjects())+"\n"
        res += "Number of cells: "+str(len(self.cellnumbers))+"\n"
        res += "Reference point: "+str(self._refpoint)+"\n"
        res += "Scale: "+str(self._scale)+"\n"
        res += "Boundaries: "+str(self._bbox)+"\n"
        res += "Discrete Boundaries: "+str(self._dbbox)+"\n"
        if self.fileidentifier:
            res += "Identifier: 0x%x\n"%self.fileidentifier
        res += "# of levels: %d\n"%self.nlevels
        res += "category: %d\n"%self.category
        if self.layertype != None:
            res += "layertype: %d\n"%self.layertype
        res += 'reflat: %f\n'%self._refpoint[1]
        res += 'reflon: %f\n'%self._refpoint[0]
        if self.firstcell:
            res += 'first cell: %d\n'%self.firstcell
        if self.lastcell:
            res += 'last cell: %d\n'%self.lastcell
        return res


    def float2discrete(self, points):
        """Convert list of coordinates from floating point to discrete coordinates"""
        return ((N.array(points) - self.refpoint) / self.scale).round().astype(int)

    def discrete2float(self, points):
        """Convert list of coordinates from discrete to floating point coordinates"""
        return N.array(points) * self.scale + self.refpoint
    
    def __repr__(self):
        return self.__class__.__name__ + '(' + self.getName() + ')'
Exemple #41
0
 def clearCells(self):
     self.modifiedcells = {}        # Dictionary of modified cells keyed by cellnumber
     self.cellcache = LRUCache(size=32)
     self.cellfilepos = {}
     self.cellnumbers = []
Exemple #42
0
def test_insert_many():
    cache = LRUCache(3)
    cache.put('key', 'val')
    cache.put('key2', 'val2')
    cache.put('key3', 'val3')
    assert cache.get('key3') == 'val3'
    number_of_connections = int ( sys.argv[3] )
    size_of_cache = int ( sys.argv[4] )
else:
    print "Usage: \"python cache-routine.py server_name port_number number_of_connections size_of_cache\": Insufficient parameters"
    exit()

s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

# for reusing the same port without waiting for TIME_WAIT to expire
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((server_name, port_number))

# Will listen to only one connection at a time
s.listen( number_of_connections )

LRU = LRUCache( size_of_cache )

while True:
    conn, addr = s.accept()
    data = conn.recv(1024)
    print "DEBUG: " + data

    # convert the received data into JSON
    json_data = json.loads(data)

    """
    fetch the request method
    GET denotes a GET cache request to check for hit/miss
    POST denotes a SAVE request once a miss happens
    """
    request_method = json_data["method"]
Exemple #44
0
def test_insert_one():
    cache = LRUCache(3)
    cache.put('key', 'val')
    assert cache.get('key') == 'val'
Exemple #45
0
class CoroutineProcessor(object):
    name = None
    log_rate = 1000
    log_rate_skip = 50

    def __init__(self, func, targets=None, **kwargs):
        if targets is None:
            targets = []
        elif not hasattr(targets, '__iter__'):
            targets = [targets]
        self.targets = targets
        self.func = func
        #   Raise errors by default.
        self.error = kwargs.pop('error', None)#'raise')
        self.name = (
            kwargs.get('name') or self.name or
            (
                self.func.__class__.__name__
                if hasattr(func, '__class__') else self.func.__name__
            )
        )
        # self.ids = set()
        self.ids = LRUCache(max_size=10000)
        self.idx = 0
        self.idx_skip = 0
        #   Make sure we clean up if the program exits before the finalize
        #   functions are called.
        if hasattr(self.func, 'finalize'):
            atexit.register(self.finalize)

    @coroutine
    def run(self, *args, **kwargs):
        self.targets = [ t.run(*args, **kwargs) for t in self.targets ]
        #   =============================================================
        def send(res):
            if isinstance(res, dict) or not hasattr(res, '__iter__'):
                res = [res]
            for item in res:
                ###############
                LOGGER.debug(pformat(item))
                ###############
                _id = item.get('id')
                #   ----------------------------------------
                if self.idx % self.log_rate == 0:
                    LOGGER.debug(
                        "[{0}] ({1}) {2}".format(self.idx, self.name, _id)
                    )
                #   ----------------------------------------
                self.idx += 1
                if _id:
                    exists = self.ids.get(_id)
                    # if _id in self.ids:
                    if exists:
                        LOGGER.warning(
                            "[{0}] ({1}) Document already processed!".format(
                            _id, self.name
                        ))
                    else:
                        # self.ids.add(_id)
                        self.ids[_id] = 1
                #   Stop processing this document?
                meta = item.get('__meta__', {}) or {}
                procs = set(meta.get('processors', []) or [])
                procs.add(self.name)
                meta['processors'] = list(procs)
                item['__meta__'] = meta
                if meta.get('skip'):
                    #   -----------------------------------
                    if self.idx_skip % self.log_rate_skip == 0:
                        LOGGER.info("[{0}] ({1}) Skipped ({2}). Current: {3} ".format(
                            self.idx_skip, self.name, _id, pformat(item['__meta__'])
                        ))
                    #   -----------------------------------
                    self.idx_skip += 1
                    continue
                #   Pass the item to the next stage
                #   in the pipeline.
                for target in self.targets:
                    target.send(item)
        #   =============================================================
        try:
            while True:
                doc = (yield)
                ###############
                LOGGER.debug(pformat(doc))
                ###############
                if doc is StopIteration:
                    raise StopIteration
                try:
                    res = self.func(doc, *args, **kwargs)
                except Exception as e:
                    if self.error == 'raise':
                        raise
                    else:
                        msg = """
                        Error processing document:
                            Task: {}
                            Exception: {}
                            Traceback: {}
                        """.format(self.name, e, traceback.format_exc())
                        LOGGER.error(textwrap.dedent(msg))
                    if DEBUG:
                        self.to_pickle(doc)
                else:
                    if res:
                        if res is StopIteration:
                            raise StopIteration
                        else:
                            send(res)
        except StopIteration as e:
            res = self.finalize()
            if res:
                #   Process final batch, if any.
                send(res)
            for target in self.targets:
                try:
                    target.send(StopIteration)
                except StopIteration:
                    pass
        except GeneratorExit:
            LOGGER.warning("[{0}] Quitting...".format(self.name))

    def to_pickle(self, doc):
        with open(os.path.join(self.path_pickle, self.name or 'UNK'), 'ab') as f:
            pickle.dump(doc, f)

    def get_state(self):
        try:
            self.func.get_state()
        except AttributeError as e:
            raise NotImplementedError

    def to_df(self):
        try:
            self.func.to_df()
        except AttributeError as e:
            raise NotImplementedError

    def to_csv(self, _id=None):
        try:
            self.func.to_csv(_id=_id)
        except AttributeError as e:
            raise NotImplementedError

    def to_json(self):
        try:
            self.func.to_json()
        except AttributeError as e:
            raise NotImplementedError

    def finalize(self):
        try:
            res = self.func.finalize()
        except AttributeError as e:
            ########################
            LOGGER.error(
                """
                Processor doesn't have a 'finalize' method: {}
                """.format(
                    self.func.name if hasattr(self.func, 'name')
                    else self.func.__name__
                )
            )
            ########################
            raise NotImplementedError
        except Exception as e:
            msg = """
            Error finalizing task:
                Processor: {}
                Exception: {}
                Traceback: {}
            """.format(self.name, e, traceback.format_exc())
            LOGGER.error(textwrap.dedent(msg))
        else:
            return res

    def __eq__(self, other):
        eq = (
            other and
            self.__class__ is other.__class__ and
            self.__dict__ == other.__dict__
        )
        return eq

    def __or__(self, other):
        if isinstance(other, CoroutineProcessor):
            return CoroutineProcessor(
                self.func, self.targets + [other], error=self.error
            )
        elif callable(other):
            return CoroutineProcessor(
                self.func,
                self.targets + [CoroutineProcessor(other, error=self.error)]
            )
        else:
            raise Exception(
                "{0} is not composable with {0}".format(self, other)
            )

    def __getitem__(self, item):
        return self.targets.__getitem__(item)

    def __len__(self):
        return len(self.targets)

    def __repr__(self):
        return "{0}({1})".format(
            self.__class__.__name__,
            ", ".join(repr(prs) for prs in self.targets)
        )

    def __call__(self, *args, **kwargs):
        return self.run(*args, **kwargs)
Exemple #46
0
 def __init__(self, num):
     LRUCache.__init__(self, num)
     self.lock = threading.RLock()
Exemple #47
0
 def __setitem__(self, key, obj):
     self.acquire()
     try:
         LRUCache.__setitem__(self, key, obj)
     finally:
         self.release()