class Worker(object): def __init__(self, queue_name, concurrency=None): self.redis = redis.Redis() self.queue = SimpleQueue(self.redis, queue_name) self.concurrency = concurrency if concurrency is not None else DEFAULT_CONCURRENCY self.logger = multiprocessing.get_logger() def process_one_task(self): proc = os.getpid() if self.queue.get_length() > 0: try: task = self.queue.dequeue() task.process_task() except Exception as e: self.logger.error(e) self.logger.info(f'Process {proc} completed successfully') else: # print('No Tasks to process.') pass def process_multiple_tasks(self): global TERMINATE processes = [] while True: for _ in range(self.concurrency): if self.queue.get_length() > 0: process = multiprocessing.Process( target=self.process_one_task) processes.append(process) process.start() for process in processes: process.join() if TERMINATE: print('Exiting Gracefully') break
def __init__(self, scale, pre=10): """ This class holds a queue of times drawn from an exponential distribution with a specified scale. Arguments: - scale: The scale parameter for the exponential distribution. - pre: Predefined size of the queue. Default=10 """ self.scale = scale self.pre = pre self.queue = SimpleQueue(maxsize=pre + 1) self.v_put = vectorize(self.queue.put_nowait) #the exponential dist is not defined for a rate of 0 #therefore if the rate is 0 (scale is None then) huge times are set if self.scale in [None, 0]: self.scale = 0 self.draw_fct = no_mut else: self.draw_fct = random.exponential #fillup the queue self.fillup() # there was: (new version compatible with pickeling see method below) self.v_get = vectorize(self.get_val)
def __setstate__(self, d): if 'simple_queue_list' in d: event_queue_list = d.pop('simple_queue_list')[::-1] d['queue'] = SimpleQueue(maxsize=d['_dist_params']['size']) while len(event_queue_list): d['queue'].put_nowait(event_queue_list.pop()) self.__dict__.update(d) self._init_draw_fct(**self._dist_params)
def __setstate__(self, d): if 'simple_queue_list' in d: event_queue_list = d.pop('simple_queue_list') d['queue'] = SimpleQueue(maxsize=d['pre'] + 1) while len(event_queue_list): d['queue'].put_nowait(event_queue_list.pop()) self.__dict__.update(d) self.__dict__['v_put'] = vectorize(self.queue.put_nowait) #d['v_put'] = vectorize(d['queue'].put_nowait) #self.__dict__.update(d) self.__dict__['v_get'] = vectorize(self.get_val) if self.scale is None: self.scale = 0 self.queue = SimpleQueue(maxsize=self.pre + 1) self.v_put = vectorize( self.queue.put_nowait ) # this is specific to the queue, thus reinit here self.draw_fct = no_mut self.fillup()
def __getstate__(self): d = dict(self.__dict__) queue = d.pop('queue') simple_queue_list = [] while True: try: simple_queue_list.append(queue.get_nowait()) except Empty: break d['simple_queue_list'] = simple_queue_list # now rebuild the queue for current object self._init_draw_fct(**self._dist_params) self.queue = SimpleQueue(maxsize=self._dist_params['size']) for _el in simple_queue_list: self.queue.put_nowait(_el) return d
def __init__(self, queue_name, concurrency=None): self.redis = redis.Redis() self.queue = SimpleQueue(self.redis, queue_name) self.concurrency = concurrency if concurrency is not None else DEFAULT_CONCURRENCY self.logger = multiprocessing.get_logger()
def __init__(self, distribution_type='exp', **kwargs): self.distribution_type = distribution_type self._init_draw_fct(**kwargs) self.queue = SimpleQueue(maxsize=self._dist_params['size']) # fill the queue self.fillup()