def _process_queue(self): while True: work_item = operations.retrieve_work_item(max_wait_time = 5) if work_item: mail_parser.handle_work_item(self.processor, work_item) operations.close_work_item(work_item, True) else: break
def _process_queue(self): while True: work_item = operations.retrieve_work_item(max_wait_time=3) if work_item: mail_parser.handle_work_item(self.processor, work_item) operations.close_work_item(work_item, True) else: logging.info('no more work items') break
def _validate_consumer(self, asset, content): work_item = operations.retrieve_work_item(max_wait_time=30) self.assert_( work_item is not None ) self.assert_( int(work_item['Asset-ID']) == asset.pk ) self.assert_( open(work_item['Local-Path']).read() == content ) operations.close_work_item( work_item = work_item, delete_from_queue = True ) work_item = operations.retrieve_work_item(max_wait_time=30) for i in work_item and work_item.iteritems() or (): print ' %s = %r' % i self.assert_( work_item is None )
def _process_queue(self): while True: work_item = operations.retrieve_work_item(max_wait_time = 3) if work_item: mail_parser.handle_work_item(self.processor, work_item) operations.close_work_item(work_item, True) else: logging.info('no more work items') break
def maybe_get_next(options): """ If the maximum number of jobs is already active just sleep. Otherwise, try to get another job to run. """ if options.max_concurrency < 1: time.sleep(2) return None return operations.retrieve_work_item(interrupt_func=must_shut_down, auto_get_asset=False)