def test_job_depends(): if platform.platform().lower().startswith("linux"): job_spec = JobSpec( command=f"bash {data_path}/sleepy.sh", working_dir=pwd, log_file="test.log", priority=5, depends=[], ) else: job_spec = JobSpec( command=f"{data_path}/sleepy.bat", working_dir=pwd, log_file="test.log", priority=5, depends=[], ) q = JobQueue() # response = client.post("qsub", json=[job_spec.dict()]) # dependency = response.json() # dependency = ujson.loads(dependency) job_ids = q.submit([job_spec]) job_spec = JobSpec( depends=job_ids, command=f'{data_path}/echo_it.bat "goodbye"', working_dir=pwd, ) q.submit([job_spec])
def test_prune(): # DEFAULT_CONFIG.prune_job_limt = 10 q = JobQueue() q.completed_limit = 10 for i in range(15): js1 = JobSpec(command=hello, working_dir=".", priority=10) job1 = q.submit([js1]) assert len(q.queued_jobs) == 15 assert len(q.running_jobs) == 0 assert len(q.completed_jobs) == 0 for i in range(15): q.on_job_started(q.next_job().job_id) assert len(q.queued_jobs) == 0 assert len(q.running_jobs) == 15 assert len(q.completed_jobs) == 0 for job_id, job in list(q.running_jobs.items()): q.on_job_finished(job_id) assert len(q.queued_jobs) == 0 assert len(q.running_jobs) == 0 assert len(q.completed_jobs) == 15 q.prune() assert len(q.queued_jobs) == 0 assert len(q.running_jobs) == 0 assert len(q.completed_jobs) == 5
def test_priority(): q = JobQueue() js1 = JobSpec(command=hello, working_dir=".", priority=10) js2 = JobSpec(command=goodbye, working_dir=".", priority=15) job1 = q.submit([js1, js2]) exec_job_1 = q.next_job() q.on_job_started(exec_job_1.job_id) exec_job_2 = q.next_job() q.on_job_started(exec_job_2.job_id) assert exec_job_1.job_id == JobID(group=2, index=0) assert exec_job_2.job_id == JobID(group=1, index=0)
def test_work_item(): q = JobQueue() js = JobSpec(command="cmd.exe /C sleep 2.5", working_dir="d:/temp", cores=2) job_id = q.submit([js])[0] job, q2 = q.find_job(job_id) wi = WorkItem(job=job, cores=[0, 1]) print(wi) wi.start() for i in range(10): print(i) time.sleep(0.5) print(wi.get_status()) if not wi.is_running(): break print("done")
def test_is_queued(): q = JobQueue() job_spec = JobSpec( command=f'{data_path}/echo_it.bat "hello"', working_dir=pwd, log_file="test.log", priority=5, depends=[], ) job_id = q.submit([job_spec])[0] # job_id = q.submitted([job_spec]) # response = client.post("qsub", json=[job_spec.dict()]) assert job_id == JobID.parse_obj("1.0") job = q.find_job(job_id)[0] assert job.job_spec.command == job_spec.command assert job.job_spec.priority == 5 assert job.job_spec.working_dir == pwd assert job.job_spec.log_file == "test.log" assert job.job_spec.depends == [] assert len(q.queued_jobs) == 1
def test_job_spec(): js = JobSpec(command="test", working_dir="/tmp", priority=10) assert js.command == "test" assert js.working_dir == "/tmp" assert js.priority == 10 assert js.log_file is None
def test_save_and_read_queue(): # DEFAULT_CONFIG.prune_job_limt = 10 q = JobQueue(start_manager_thread=False) q.completed_limit = 10 for i in range(15): js1 = JobSpec(command=hello, working_dir=".", priority=10) job1 = q.submit([js1]) assert len(q.queued_jobs) == 15 assert len(q.running_jobs) == 0 assert len(q.completed_jobs) == 0 for i in range(10): q.on_job_started(q.next_job().job_id) assert len(q.queued_jobs) == 5 assert len(q.running_jobs) == 10 assert len(q.completed_jobs) == 0 i = 0 for job_id, job in list(q.running_jobs.items()): q.on_job_finished(job_id) i += 1 if i == 5: break assert len(q.queued_jobs) == 5 assert len(q.running_jobs) == 5 assert len(q.completed_jobs) == 5 q.save() q2 = JobQueue() q2.load() a, b = len(q.queued_jobs) + len(q.running_jobs), len(q2.queued_jobs) assert a == b # assert len(q.running_jobs) == len(q2.running_jobs) assert len(q.completed_jobs) == len(q2.completed_jobs)
def test_pool(): q = JobQueue() js = JobSpec(command="cmd.exe /C sleep 2.5", working_dir="d:/temp", cores=1) q.submit([js] * 20) # job, q2 = q.find_job(job_id) pool = DynamicProcessPool(q, 20) pool._start_manager_thread() time.sleep(4) print(q.running_jobs.values()) assert len(q.running_jobs) > 0 time.sleep(10) pool.join(wait=True) print(q.running_jobs.values()) assert len(q.running_jobs) == 0
def test_job_sorting(): jobs = [ Job( job_spec=JobSpec(command="", working_dir="", priority=5), job_id=JobID(group=1, index=0), ), Job( job_spec=JobSpec(command="", working_dir="", priority=5), job_id=JobID(group=2, index=0), ), Job( job_spec=JobSpec(command="", working_dir="", priority=4), job_id=JobID(group=3, index=0), ), Job( job_spec=JobSpec(command="", working_dir="", priority=6), job_id=JobID(group=4, index=0), ), Job( job_spec=JobSpec(command="", working_dir="", priority=5), job_id=JobID(group=5, index=1), ), Job( job_spec=JobSpec(command="", working_dir="", priority=5), job_id=JobID(group=5, index=0), ), ] jobs2 = list(sorted(jobs)) for j in jobs2: print(j) assert jobs2[0].job_id.group == 4 assert jobs2[-1].job_id.group == 3
def qsub( command, args, priority=1, logfile=None, log=False, depends=None, debug=False, walltime=None, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, ): """Submits one job to the queue""" command_str = command + " " + " ".join(f'"{arg}"' for arg in args) working_dir = encode_path(os.getcwd()) if len(depends) == 1 and " " in depends[0]: depends = depends[0].split() # print(depends) if depends: depends = [JobID.parse_obj(d) for d in depends] else: depends = [] if walltime: if ":" in walltime: hrs, minutes, sec = [int(x) for x in walltime.split(":")] seconds = sec + 60 * minutes + hrs * 3600 else: walltime = float(walltime) if log and not logfile: logfile = str(Path(command).with_suffix(".lqts.log")) job_spec = JobSpec( command=command_str, working_dir=working_dir, log_file=logfile, priority=priority, depends=depends, walltime=walltime, cores=cores, alternate_runner=alternate_runner, ) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=[job_spec.dict()]) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)
def _qsub_argfile( command, argfile, priority=1, log=False, depends=None, debug=False, submit_delay=0.0, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, walltime=None, ): from glob import glob # files = glob(files) job_specs = [] working_dir = encode_path(os.getcwd()) depends = [JobID.parse(d) for d in depends] with open(argfile) as f: for iline, argline in enumerate(f): command_str = f"{command} {argline}" # print(command) if log: log_file = str( Path(argfile).with_suffix(f".lqts.{iline:0>3}.log")) else: log_file = None js = JobSpec( command=command_str, working_dir=working_dir, log_file=log_file, priority=priority, depends=depends, cores=cores, alternate_runner=alternate_runner, ) job_specs.append(js.dict()) if debug: for js in job_specs: print(js) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=job_specs) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)
def qsub_multi( commands, args, priority=1, logfile=None, depends=None, debug=False, log=False, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, ): """ Submits mutilple jobs to the queue. Use this if have have multiple commands that you wish to run and you can specify them with a glob pattern $ qsub mycommand*.bat -- --option1 --option2 value2 """ from glob import iglob commands = iglob(commands) job_specs = [] working_dir = encode_path(os.getcwd()) depends = [JobID.parse(d) for d in depends] for command in commands: # print(f, print(args)) command_str = f"{command} " + " ".join(f'"{arg}"' for arg in args) # print(command) if log: logfile = str(Path(command).with_suffix(".lqts.log")) else: logfile = None js = JobSpec( command=command_str, working_dir=working_dir, log_file=logfile, priority=priority, depends=depends, cores=cores, alternate_runner=alternate_runner, ) job_specs.append(js.dict()) if debug: for js in job_specs: print(js) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=job_specs) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)
def qsub_cmulti(command, file_pattern, args, priority=1, logfile=None, depends=None, debug=False, log=False, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, changewd=False): """ Submits mutlitiple jobs to the queue. Runs **command** for each file in **files**. Pass in args. $ qsub mycommand.exe MyInputFile*.inp -- --do --it [-----------] [--------------] [--------] command filepattern args """ from glob import iglob files = iglob(file_pattern) job_specs = [] working_dir = encode_path(os.getcwd()) depends = [JobID.parse(d) for d in depends] for f in files: if changewd: working_dir = str(Path(f).resolve().parent) f = Path(f).name # print(f, print(args)) command_str = f"{command} {f} " + " ".join(f'"{arg}"' for arg in args) # print(command) if log: logfile = str(Path(f).with_suffix(".lqts.log")) else: logfile = None js = JobSpec( command=command_str, working_dir=working_dir, log_file=logfile, priority=priority, depends=depends, cores=cores, alternate_runner=alternate_runner, ) job_specs.append(js.dict()) if debug: for js in job_specs: print(js) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=job_specs) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)
def get_job_spec(): js = JobSpec(command=hello, working_dir=".", priority=10) return js
workercount=app.pool.max_workers) script_block = env.get_template("js_script_template.jinja").render() c = Counter([job.status.value for job in jobs]) for letter in "QDRC": if letter not in c: c[letter] = 0 summary_text = " ".join(f"{s}:{c}" for s, c in c.items()) page_text = page_template.render( page_title="Queue Status", navbar="", buttonbar=buttonbar, summary=summary_text, table=render_qstat_table(jobs, include_complete), script_block=script_block, ) return page_text if __name__ == "__main__": job = Job( job_id=JobID(group=1, index=2), job_spec=JobSpec(command="cmd /c echo hello", working_dir="/tmp"), ) print(render_qstat_page(jobs=[job]))