def run_next(): while True: runnable_jobs = self.job_scheduler._job_collection.ready_jobs log.info( "run_next: %d runnable jobs of (%d pending, %d tasked)" % ( len(runnable_jobs), len(self.job_scheduler._job_collection.pending_jobs), len(self.job_scheduler._job_collection.tasked_jobs), )) if not runnable_jobs: break dep_cache = DepCache() ok_jobs, cancel_jobs = self.job_scheduler._check_jobs( runnable_jobs, dep_cache) self.job_scheduler._job_collection.update_many( ok_jobs, "tasked") for job in cancel_jobs: self.job_scheduler._complete_job(job, False, True) for job in ok_jobs: self.job_scheduler._spawn_job(job) self.drain_progress(skip_advance=True)
def rpc_local(fn_name, *args, **kwargs): # Run the response through a serialize/deserialize cycle to # give it that special RPC flavor. retval = json.loads( json.dumps(getattr(test_daemon, fn_name)(*args, **kwargs))) log.info("patch_daemon_rpc: %s(%s %s) -> %s" % (fn_name, args, kwargs, retval)) return retval
def job_scheduler_queue_immediate(body): log.info("job_scheduler_queue_immediate: %s" % body) job_scheduler_queue_handler.on_message(body)
def rpc_local(fn_name, *args, **kwargs): retval = getattr(manager, fn_name)(*args, **kwargs) log.info("rpc_local: %s(%s %s) -> %s" % (fn_name, args, kwargs, retval)) return retval