def __init__(self, block_store, gossip, cache_keep_time=300, cache_purge_frequency=30, requested_keep_time=1200): """ :param block_store (dictionary) The block store shared with the journal :param gossip (gossip.Gossip) Broadcasts block and batch request to peers :param cache_keep_time (float) Time in seconds to keep values in TimedCaches. :param cache_purge_frequency (float) Time between purging the TimedCaches. """ self.gossip = gossip self.batch_cache = TimedCache(cache_keep_time, cache_purge_frequency) self.block_cache = BlockCache(block_store, cache_keep_time, cache_purge_frequency) self._block_store = block_store # avoid throwing away the genesis block self.block_cache[NULL_BLOCK_IDENTIFIER] = None self._seen_txns = TimedCache(cache_keep_time, cache_purge_frequency) self._incomplete_batches = TimedCache(cache_keep_time, cache_purge_frequency) self._incomplete_blocks = TimedCache(cache_keep_time, cache_purge_frequency) self._requested = TimedCache(requested_keep_time, cache_purge_frequency) self._on_block_received = None self._on_batch_received = None self._has_block = None self.lock = RLock()
def __init__(self, block_store, gossip, cache_keep_time=1200, cache_purge_frequency=30, requested_keep_time=300, metrics_registry=None): """ :param block_store (dictionary) The block store shared with the journal :param gossip (gossip.Gossip) Broadcasts block and batch request to peers :param cache_keep_time (float) Time in seconds to keep values in TimedCaches. :param cache_purge_frequency (float) Time between purging the TimedCaches. :param requested_keep_time (float) Time in seconds to keep the ids of requested objects. WARNING this time should always be less than cache_keep_time or the validator can get into a state where it fails to make progress because it thinks it has already requested something that it is missing. """ self.gossip = gossip self.batch_cache = TimedCache(cache_keep_time, cache_purge_frequency) self.block_cache = BlockCache(block_store, cache_keep_time, cache_purge_frequency) self._block_store = block_store # avoid throwing away the genesis block self.block_cache[NULL_BLOCK_IDENTIFIER] = None self._seen_txns = TimedCache(cache_keep_time, cache_purge_frequency) self._incomplete_batches = TimedCache(cache_keep_time, cache_purge_frequency) self._incomplete_blocks = TimedCache(cache_keep_time, cache_purge_frequency) self._requested = TimedCache(requested_keep_time, cache_purge_frequency) self._on_block_received = None self._on_batch_received = None self._has_block = None self.lock = RLock() if metrics_registry: # Tracks how many times an unsatisfied dependency is found self._unsatisfied_dependency_count = CounterWrapper( metrics_registry.counter( 'completer.unsatisfied_dependency_count')) # Tracks the length of the completer's _seen_txns self._seen_txns_length = GaugeWrapper( metrics_registry.gauge('completer.seen_txns_length')) # Tracks the length of the completer's _incomplete_blocks self._incomplete_blocks_length = GaugeWrapper( metrics_registry.gauge('completer.incomplete_blocks_length')) # Tracks the length of the completer's _incomplete_batches self._incomplete_batches_length = GaugeWrapper( metrics_registry.gauge('completer.incomplete_batches_length')) else: self._unsatisfied_dependency_count = CounterWrapper() self._seen_txns_length = GaugeWrapper() self._incomplete_blocks_length = GaugeWrapper() self._incomplete_batches_length = GaugeWrapper()
def __init__(self, block_store): self._block_store = block_store self._batch_info = TimedCache(keep_time=CACHE_KEEP_TIME) self._invalid = TimedCache(keep_time=CACHE_KEEP_TIME) self._pending = set() self._lock = RLock() self._observers = {}
def __init__(self, block_store, cache_keep_time=600, cache_purge_frequency=30): self._block_store = block_store self._batch_info = TimedCache(cache_keep_time, cache_purge_frequency) self._invalid = TimedCache(cache_keep_time, cache_purge_frequency) self._pending = set() self._lock = RLock() self._observers = {}
def __init__(self, metrics_registry=None): self._seen_cache = TimedCache() if metrics_registry: self._batch_dropped_count = CounterWrapper( metrics_registry.counter( 'already_validated_batch_dropped_count')) else: self._batch_dropped_count = CounterWrapper()
def __init__(self, completer, cache_keep_time=300, cache_purge_frequency=30): self.completer = completer self.pending_requests = TimedCache(cache_keep_time, cache_purge_frequency) self._lock = RLock()
def test_cache(self): bc = TimedCache(keep_time=1) with self.assertRaises(KeyError): bc["test"] bc["test"] = "value" self.assertEqual(len(bc), 1) del bc["test"] self.assertFalse("test" in bc)
def __init__(self, block_store, gossip, cache_purge_frequency=30): """ :param block_store (dictionary) The block store shared with the journal :param gossip (gossip.Gossip) Broadcasts block and batch request to peers :param cache_purge_frequency (int) The time between purging the TimedCaches. """ self.gossip = gossip self.batch_cache = TimedCache(cache_purge_frequency) self.block_cache = BlockCache(block_store, cache_purge_frequency) # avoid throwing away the genesis block self.block_cache[NULL_BLOCK_IDENTIFIER] = None self._seen_txns = TimedCache(cache_purge_frequency) self._incomplete_batches = TimedCache(cache_purge_frequency) self._incomplete_blocks = TimedCache(cache_purge_frequency) self._on_block_received = None self._on_batch_received = None self.lock = RLock() self._cache_purge_frequency = cache_purge_frequency self._purge_time = time.time() + self._cache_purge_frequency
def __init__(self, block_store, gossip): """ :param block_store (dictionary) The block store shared with the journal :param gossip (gossip.Gossip) Broadcasts block and batch request to peers """ self.gossip = gossip self.batch_cache = TimedCache() self.block_cache = BlockCache(block_store) # avoid throwing away the genesis block self.block_cache[NULL_BLOCK_IDENTIFIER] = None self._on_block_received = None self._on_batch_received = None
def handle(self, connection_id, message_content): block, _ = message_content if block.header_signature in self._seen_cache: self.block_dropped_count.inc() return HandlerResult(status=HandlerStatus.DROP) if not is_valid_block(block): LOGGER.debug("requested block's signature is invalid: %s", block.header_signature) return HandlerResult(status=HandlerStatus.DROP) self._seen_cache = TimedCache() return HandlerResult(status=HandlerStatus.PASS)
def test_access_update(self): bc = TimedCache(keep_time=1, purge_frequency=0) bc["test"] = "value" bc["test2"] = "value2" self.assertEqual(len(bc), 2) bc["test"] = "value" bc.cache["test"].timestamp = bc.cache["test"].timestamp - 2 bc["test"] # access to update timestamp bc["test2"] = "value2" # set value to activate purge self.assertEqual(len(bc), 2) self.assertTrue("test" in bc) self.assertTrue("test2" in bc)
def test_access_update(self): bc = TimedCache(keep_time=1) bc["test"] = "value" bc["test2"] = "value2" self.assertEqual(len(bc), 2) bc["test"] = "value" bc.cache["test"].timestamp = bc.cache["test"].timestamp - 2 bc["test"] # access to update timestamp bc.purge_expired() self.assertEqual(len(bc), 2) self.assertTrue("test" in bc) self.assertTrue("test2" in bc)
def handle(self, connection_id, message_content): block_response_message = GossipBlockResponse() block_response_message.ParseFromString(message_content) block = Block() block.ParseFromString(block_response_message.content) if block.header_signature in self._seen_cache: self.block_dropped_count.inc() return HandlerResult(status=HandlerStatus.DROP) if not is_valid_block(block): LOGGER.debug("requested block's signature is invalid: %s", block.header_signature) return HandlerResult(status=HandlerStatus.DROP) self._seen_cache = TimedCache() return HandlerResult(status=HandlerStatus.PASS)
def test_evict_expired(self): """ Test that values will be evicted from the cache as they time out. """ # use an invasive technique so that we don't have to sleep for # the item to expire bc = TimedCache(keep_time=1) bc["test"] = "value" bc["test2"] = "value2" self.assertEqual(len(bc), 2) # test that expired item i bc.cache["test"].timestamp = bc.cache["test"].timestamp - 2 bc.purge_expired() self.assertEqual(len(bc), 1) self.assertFalse("test" in bc) self.assertTrue("test2" in bc)
def __init__(self, network): self._network = network self._challenge_payload_cache = TimedCache( keep_time=AUTHORIZATION_CACHE_TIMEOUT)
def __init__(self, network, allowed_frequency=10): self._network = network self._last_message = TimedCache() self._allowed_frequency = allowed_frequency
def __init__(self, responder, gossip): self._responder = responder self._gossip = gossip self._seen_requests = TimedCache(CACHE_KEEP_TIME)
def __init__(self, completer, cache_purge_frequency=30): self.completer = completer self.pending_requests = TimedCache(cache_purge_frequency) self._cache_purge_frequency = cache_purge_frequency self._purge_time = time.time() + self._cache_purge_frequency self._lock = RLock()
def __init__(self, block_cache, transaction_committed, get_committed_batch_by_id, get_committed_batch_by_txn_id, get_chain_head, gossip, cache_keep_time=1200, cache_purge_frequency=30, requested_keep_time=300): """ :param block_cache (dictionary) The block cache to use for getting and storing blocks :param transaction_committed (fn(transaction_id) -> bool) A function to determine if a transaction is committed. :param batch_committed (fn(batch_id) -> bool) A function to determine if a batch is committed. :param get_committed_batch_by_txn_id (fn(transaction_id) -> Batch) A function for retrieving a committed batch from a committed transction id. :param get_chain_head (fn() -> Block) A function for getting the current chain head. :param gossip (gossip.Gossip) Broadcasts block and batch request to peers :param cache_keep_time (float) Time in seconds to keep values in TimedCaches. :param cache_purge_frequency (float) Time between purging the TimedCaches. :param requested_keep_time (float) Time in seconds to keep the ids of requested objects. WARNING this time should always be less than cache_keep_time or the validator can get into a state where it fails to make progress because it thinks it has already requested something that it is missing. """ self._gossip = gossip self._batch_cache = TimedCache(cache_keep_time, cache_purge_frequency) self._block_cache = block_cache self._transaction_committed = transaction_committed self._get_committed_batch_by_id = get_committed_batch_by_id self._get_committed_batch_by_txn_id = get_committed_batch_by_txn_id self._get_chain_head = get_chain_head # avoid throwing away the genesis block self._block_cache[NULL_BLOCK_IDENTIFIER] = None self._seen_txns = TimedCache(cache_keep_time, cache_purge_frequency) self._incomplete_batches = TimedCache(cache_keep_time, cache_purge_frequency) self._incomplete_blocks = TimedCache(cache_keep_time, cache_purge_frequency) self._requested = TimedCache(requested_keep_time, cache_purge_frequency) self._on_block_received = None self._on_batch_received = None self._has_block = None self.lock = RLock() # Tracks how many times an unsatisfied dependency is found self._unsatisfied_dependency_count = COLLECTOR.counter( 'unsatisfied_dependency_count', instance=self) # Tracks the length of the completer's _seen_txns self._seen_txns_length = COLLECTOR.gauge( 'seen_txns_length', instance=self) self._seen_txns_length.set_value(0) # Tracks the length of the completer's _incomplete_blocks self._incomplete_blocks_length = COLLECTOR.gauge( 'incomplete_blocks_length', instance=self) self._incomplete_blocks_length.set_value(0) # Tracks the length of the completer's _incomplete_batches self._incomplete_batches_length = COLLECTOR.gauge( 'incomplete_batches_length', instance=self) self._incomplete_batches_length.set_value(0)
def __init__(self): self._seen_cache = TimedCache()
def __init__(self): self._seen_cache = TimedCache() self._batch_dropped_count = COLLECTOR.counter( 'already_validated_batch_dropped_count', instance=self)
def __init__(self, block_store): self.batch_cache = TimedCache() self.block_cache = BlockCache(block_store) self._on_block_received = None self._on_batch_received = None