def run(self): debug('RUN') # fix for some broken saved jobs for i, j in enumerate(self.jobs): if type(j) == str or type(j) == np.string_: self.jobs[i] = eval(j) # check to see if anything is left to do work_to_do = False for j in self.jobs: if j['status'] == 0 or j['status'] == 'Q': work_to_do = True break debug("WTD=%s" % work_to_do) if not work_to_do: return True # There is work to be done. Create a JobQueue and send stuff to it jobq = JobQueue(self, limit=self.qlimit) for j in self.jobs: if j['status'] == 0: debug("adding job %s" % j['num']) jobq.add(j) # Tell the JobQueue to start running jobs jobq.start() # join returns when JobQueue has run all the jobs res = jobq.join() if res == []: return True self.wqueue = res return False
def test_valid(self): jq = JobQueue(self.db) jq.db['jobqueue'].drop() jq._create(capped=False) self.assertFalse(jq.valid()) self.assertRaises(Exception, jq._create) jq.clear_queue()
def test_next(self): jq = JobQueue(self.db, collection_name=K.collection) self.assertRaises(Exception, jq.next) job = {'message': 'hello world!'} jq.pub(job) row = jq.next() self.assertEquals(row['data']['message'], 'hello world!') self.assertEquals(jq.queue_count(), 0)
def test_publish(self): jq = JobQueue(self.db, collection_name=K.collection) job = {'message': 'hello world!'} jq.pub(job) self.assertEquals(jq.queue_count(), 1) jq.clear_queue() jq.q = None # erase the queue self.assertRaises(Exception, jq.pub, job)
def run(self): jobq = JobQueue(self, limit=10, polltime=1) for j in self.jobs: if j['status'] == 0 or j['status'] == 'Q': debug("adding job %s" % j['num']) jobq.add(j) jobq.start() return jobq.join() == []
def test_next(self): jq = JobQueue(self.db) self.assertRaises(Exception, jq.next) job = {'message': 'hello world!'} jq.pub(job) row = jq.next() self.assertEquals(row['data']['message'], 'hello world!') jq.clear_queue()
def test_jobqueue(): signal.signal(signal.SIGALRM, lambda: pytest.fail()) signal.alarm(5) with pytest.raises(ValueError): JobQueue(0) source = list(range(1024)) queue = Queue() jq = JobQueue(8) for n in source: jq.put(store, (n, queue)) while not jq.queue.empty(): time.sleep(.1) assert (set(source) == set(queue.queue))
def __init__( self, id, logger, conf, ): self.id = id self.logger = logger self.conf = conf self.job_queue = JobQueue(logger) self.done_queue = JobQueue(logger) self.scheduler = FairFitScheduler(logger, conf) # self.scheduler = FirstFitScheduler(logger, conf) self.scheduler.attach_job_queue(self.job_queue) self.scheduler.attach_done_queue(self.done_queue) self.users = self.scheduler.users self.resources = self.scheduler.resources self.servers = self.scheduler.servers self.peers = self.scheduler.peers
def test_iter(self): NUM_JOBS = 3 num_jobs_queued = [NUM_JOBS] def iterator_wait(): num_jobs_queued[0] -= 1 return num_jobs_queued[0] < 0 jq = JobQueue(self.db, iterator_wait=iterator_wait, collection_name=K.collection) for ii in range(1, NUM_JOBS + 1): job = {'message': 'I am # ' + str(ii)} jq.pub(job) num_jobs_done = 0 for job in jq: #print job['data']['message'] num_jobs_done += 1 record = jq.q.find_one({'_id': job['_id']}) self.assertEquals(record['status'], jq.WORKING) self.assertEquals(num_jobs_done, NUM_JOBS)
def test_valid(self): jq = JobQueue(self.db, collection_name=K.collection) jq.db[K.collection].drop() jq._create(capped=False) self.assertFalse(jq.valid()) self.assertRaises(Exception, jq._create)
def test_init(self): jq = JobQueue(self.db, collection_name=K.collection) self.assertTrue(jq.valid()) self.assertRaises(Exception, jq._create)
if config.get('debug'): jobs.logger.addHandler(logging.StreamHandler()) jobs.logger.setLevel(logging.DEBUG) postgresql = config.get('postgresql', {}) threads = postgresql.get('threads', 1) pg_uri = postgresql.get('uri') kafka = config.get('kafka', {}) topics = kafka.pop('topics', '') if isinstance(topics, str): topics = (topics, ) kafka['value_deserializer'] = json_deserialize if 'auto_offset_reset' not in kafka: kafka['auto_offset_reset'] = 'earliest' print('Setting up PostgreSQL...') pool = ThreadedConnectionPool(1, threads, pg_uri) with db.get_conn(pool) as c: db.run_ddl(c) print('Connecting to Kafka...') consumer = KafkaConsumer(*topics, **kafka) jq = JobQueue(threads) print('\nWebChecker-pg service is running\n') for message in consumer: jq.put(jobs.commit_message, (pool, message))
def test_init(self): jq = JobQueue(self.db) self.assertTrue(jq.valid()) self.assertRaises(Exception, jq._create) jq.clear_queue()