def _get_internal_queue(self, *args, **kwargs): if not self._internal_q: self._internal_q = TrQueue() if not self._reader_thread: self._reader_thread = Thread(target=self._reader_daemon) self._reader_thread.daemon = True self._reader_thread.start() obj = self._internal_q.get(*args, **kwargs) # deserialize return pickle.loads(obj)
def __init__(self, session, wait_period, worker=None, task=None, offline_log_filename=None): super(BackgroundLogService, self).__init__(task=task, wait_period=wait_period) self._worker = worker self._task_id = task.id self._queue = TrQueue() self._flush = TrEvent() self._last_event = None self._offline_log_filename = offline_log_filename self.session = session self.counter = 1 self._last_timestamp = 0
def __init__(self, task, async_enable, metrics, flush_frequency, flush_threshold): super(BackgroundReportService, self).__init__( task=task, wait_period=flush_frequency) self._flush_threshold = flush_threshold self._exit_event = TrEvent() self._queue = TrQueue() self._queue_size = 0 self._res_waiting = Semaphore() self._metrics = metrics self._storage_uri = None self._async_enable = async_enable
def _init_reader_thread(self): if not self._internal_q: self._internal_q = TrQueue() if not self._reader_thread or not self._reader_thread.is_alive(): # read before we start the thread self._reader_thread = Thread(target=self._reader_daemon) self._reader_thread.daemon = True self._reader_thread.start() # if we have waiting results # wait until thread is up and pushed some results while not self._reader_thread_started: sleep(0.2) # just in case make sure we pulled some stuff if we had any # todo: wait until a queue is not empty, but for some reason that might fail sleep(1.0)
def __init__(self): self._queue = TrQueue() self._thread = Thread(target=self._worker) self._thread.daemon = True self._thread.start()