def test_queue_random(self): '''It should add n and return n items randomly''' self.maxDiff = None q = BigDiskQueue(memory_size=10) n = 100 num_puts = 0 num_gets = 0 q.put(num_puts) num_puts += 1 rand = random.Random() rand.seed(0) l = [] while num_gets < n: if num_puts < n and rand.randint(0, 1): q.put(num_puts) num_puts += 1 if num_puts > num_gets and rand.random() < 0.3: l.append(q.get(timeout=1)) num_gets += 1 self.assertEqual(n, len(l)) self.assertEqual(list(range(0, n)), list(sorted(l)))
def test_queue(self): '''It should add n and return n items''' q = BigDiskQueue(memory_size=10) n = 100 for i in range(n): q.put(i) count = 0 for i in range(n): item = q.get(timeout=1) # @UnusedVariable count += 1 self.assertEqual(count, n)
class Publisher(EventReactorMixin): '''Publishes values typically created by the client.''' REPUBLISH_CHECK_INTERVAL = 3600 def __init__(self, event_reactor, dht_network, kvp_table, fn_task_slot): ''' :type event_reactor: :class:`.EventReactor :type dht_network: :class:`.DHTNetwork` :type kvp_table: :class:`.KVPTable` :param fn_task_slot: A slot that represents uploads. :type fn_task_slot: :class:`FnTaskSlot` ''' EventReactorMixin.__init__(self, event_reactor) self._dht_network = dht_network self._kvp_table = kvp_table self._event_scheduler = EventScheduler(event_reactor) self._timer_id = EventID(self, 'Publish timer') self._schedule_id = EventID(self, 'Publish schedule') self._scheduled_kvpids = set() self._schedule_lock = threading.Lock() self._scan_event = threading.Event() self._publish_queue = BigDiskQueue() self._fn_task_slot = fn_task_slot self._event_reactor.register_handler(self._schedule_id, self._publish_cb) self._event_reactor.register_handler(self._timer_id, self._timer_cb) self._kvp_table.value_changed_observer.register(self._table_change_cb) self._event_scheduler.add_periodic(Publisher.REPUBLISH_CHECK_INTERVAL, self._timer_cb) self._scan_loop() self._publish_loop() @asynchronous(name='Publish loop') def _publish_loop(self): while True: kvpid = self._publish_queue.get() _logger.debug('Publishing %s', kvpid) self._fn_task_slot.add(self._dht_network.store_value, kvpid.key, kvpid.index) def _schedule_for_publish(self, abs_time, kvpid): with self._schedule_lock: if kvpid in self._scheduled_kvpids: return self._scheduled_kvpids.add(kvpid) self._event_scheduler.add_absolute(abs_time, self._schedule_id, kvpid) def _publish_cb(self, event_id, kvpid): self._publish_queue.put(kvpid) def _table_change_cb(self, *args): self._scan_event.set() def _timer_cb(self, event_id): self._scan_event.set() @asynchronous(name='Publish scan loop') def _scan_loop(self): while True: self._scan_event.wait() _logger.debug('Scanning database for publishing') self._scan_event.clear() current_time = time.time() for kvpid in self._kvp_table.keys(): kvp_record = self._kvp_table.record(kvpid) if not kvp_record.is_original: continue if kvp_record.last_update == 0: republish_time = current_time else: republish_time = \ kvp_record.last_update + DHTNetwork.TIME_REPUBLISH next_interval = Publisher.REPUBLISH_CHECK_INTERVAL if republish_time - next_interval < current_time: self._schedule_for_publish(republish_time, kvpid)