def test_priority(): q = JobQueue() js1 = JobSpec(command=hello, working_dir=".", priority=10) js2 = JobSpec(command=goodbye, working_dir=".", priority=15) job1 = q.submit([js1, js2]) exec_job_1 = q.next_job() q.on_job_started(exec_job_1.job_id) exec_job_2 = q.next_job() q.on_job_started(exec_job_2.job_id) assert exec_job_1.job_id == JobID(group=2, index=0) assert exec_job_2.job_id == JobID(group=1, index=0)
def qdel(job_ids, debug=False, port=config.port, ip_address=config.ip_address): """Delete jobs from the queue""" if not job_ids and sys.stdin.seekable(): # get the job ids from standard input job_id_string = "".join(c for c in sys.stdin.read() if c in digits) job_ids = job_id_string.split() if not job_ids: print("no jobs to wait on.") return config.port = port config.ip_address = ip_address # Parse the job ids input_job_ids = job_ids job_ids = [] for job_id in list(input_job_ids): if "." not in job_id: # A job group was specified response = requests.get( f"{config.url}/jobgroup?group_number={int(job_id)}") # print(response.json()) if response.status_code == 200: job_ids.extend([JobID(**item) for item in response.json()]) else: job_ids.append(JobID.parse_obj(job_id)) job_ids = [jid.dict() for jid in set(job_ids)] response = requests.post(f"{config.url}/qdel", json=job_ids) print("Jobs deleted: " + response.text)
def qpriority(priority=10, job_ids=None, port=config.port, ip_address=config.ip_address): """Change the priority of one or more jobs""" if not job_ids and sys.stdin.seekable(): # get the job ids from standard input job_id_string = "".join(c for c in sys.stdin.read() if c in digits) job_ids = job_id_string.split() if not job_ids: print("no jobs to wait on.") return config.port = port config.ip_address = ip_address # Parse the job ids input_job_ids = job_ids job_ids = [] for job_id in list(input_job_ids): if "." not in job_id: # A job group was specified response = requests.get( f"{config.url}/jobgroup?group_number={int(job_id)}") # print(response.json()) if response.status_code == 200: job_ids.extend([JobID(**item) for item in response.json()]) else: job_ids.append(JobID.parse_obj(job_id)) # job_ids = set(job_ids) job_ids = [jid.dict() for jid in set(job_ids)] # data = {"priority": priority, "job_ids": job_ids} # print(data) response = requests.post(f"{config.url}/qpriority", params={"priority": priority}, json=job_ids) print(response.text)
def test_job_sorting(): jobs = [ Job( job_spec=JobSpec(command="", working_dir="", priority=5), job_id=JobID(group=1, index=0), ), Job( job_spec=JobSpec(command="", working_dir="", priority=5), job_id=JobID(group=2, index=0), ), Job( job_spec=JobSpec(command="", working_dir="", priority=4), job_id=JobID(group=3, index=0), ), Job( job_spec=JobSpec(command="", working_dir="", priority=6), job_id=JobID(group=4, index=0), ), Job( job_spec=JobSpec(command="", working_dir="", priority=5), job_id=JobID(group=5, index=1), ), Job( job_spec=JobSpec(command="", working_dir="", priority=5), job_id=JobID(group=5, index=0), ), ] jobs2 = list(sorted(jobs)) for j in jobs2: print(j) assert jobs2[0].job_id.group == 4 assert jobs2[-1].job_id.group == 3
def qsub( command, args, priority=1, logfile=None, log=False, depends=None, debug=False, walltime=None, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, ): """Submits one job to the queue""" command_str = command + " " + " ".join(f'"{arg}"' for arg in args) working_dir = encode_path(os.getcwd()) if len(depends) == 1 and " " in depends[0]: depends = depends[0].split() # print(depends) if depends: depends = [JobID.parse_obj(d) for d in depends] else: depends = [] if walltime: if ":" in walltime: hrs, minutes, sec = [int(x) for x in walltime.split(":")] seconds = sec + 60 * minutes + hrs * 3600 else: walltime = float(walltime) if log and not logfile: logfile = str(Path(command).with_suffix(".lqts.log")) job_spec = JobSpec( command=command_str, working_dir=working_dir, log_file=logfile, priority=priority, depends=depends, walltime=walltime, cores=cores, alternate_runner=alternate_runner, ) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=[job_spec.dict()]) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)
def _qsub_argfile( command, argfile, priority=1, log=False, depends=None, debug=False, submit_delay=0.0, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, walltime=None, ): from glob import glob # files = glob(files) job_specs = [] working_dir = encode_path(os.getcwd()) depends = [JobID.parse(d) for d in depends] with open(argfile) as f: for iline, argline in enumerate(f): command_str = f"{command} {argline}" # print(command) if log: log_file = str( Path(argfile).with_suffix(f".lqts.{iline:0>3}.log")) else: log_file = None js = JobSpec( command=command_str, working_dir=working_dir, log_file=log_file, priority=priority, depends=depends, cores=cores, alternate_runner=alternate_runner, ) job_specs.append(js.dict()) if debug: for js in job_specs: print(js) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=job_specs) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)
def qsub_multi( commands, args, priority=1, logfile=None, depends=None, debug=False, log=False, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, ): """ Submits mutilple jobs to the queue. Use this if have have multiple commands that you wish to run and you can specify them with a glob pattern $ qsub mycommand*.bat -- --option1 --option2 value2 """ from glob import iglob commands = iglob(commands) job_specs = [] working_dir = encode_path(os.getcwd()) depends = [JobID.parse(d) for d in depends] for command in commands: # print(f, print(args)) command_str = f"{command} " + " ".join(f'"{arg}"' for arg in args) # print(command) if log: logfile = str(Path(command).with_suffix(".lqts.log")) else: logfile = None js = JobSpec( command=command_str, working_dir=working_dir, log_file=logfile, priority=priority, depends=depends, cores=cores, alternate_runner=alternate_runner, ) job_specs.append(js.dict()) if debug: for js in job_specs: print(js) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=job_specs) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)
def qsub_cmulti(command, file_pattern, args, priority=1, logfile=None, depends=None, debug=False, log=False, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, changewd=False): """ Submits mutlitiple jobs to the queue. Runs **command** for each file in **files**. Pass in args. $ qsub mycommand.exe MyInputFile*.inp -- --do --it [-----------] [--------------] [--------] command filepattern args """ from glob import iglob files = iglob(file_pattern) job_specs = [] working_dir = encode_path(os.getcwd()) depends = [JobID.parse(d) for d in depends] for f in files: if changewd: working_dir = str(Path(f).resolve().parent) f = Path(f).name # print(f, print(args)) command_str = f"{command} {f} " + " ".join(f'"{arg}"' for arg in args) # print(command) if log: logfile = str(Path(f).with_suffix(".lqts.log")) else: logfile = None js = JobSpec( command=command_str, working_dir=working_dir, log_file=logfile, priority=priority, depends=depends, cores=cores, alternate_runner=alternate_runner, ) job_specs.append(js.dict()) if debug: for js in job_specs: print(js) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=job_specs) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)
def test_job_id(): jid = JobID() assert jid.group == 1 assert jid.index == 0
workercount=app.pool.max_workers) script_block = env.get_template("js_script_template.jinja").render() c = Counter([job.status.value for job in jobs]) for letter in "QDRC": if letter not in c: c[letter] = 0 summary_text = " ".join(f"{s}:{c}" for s, c in c.items()) page_text = page_template.render( page_title="Queue Status", navbar="", buttonbar=buttonbar, summary=summary_text, table=render_qstat_table(jobs, include_complete), script_block=script_block, ) return page_text if __name__ == "__main__": job = Job( job_id=JobID(group=1, index=2), job_spec=JobSpec(command="cmd /c echo hello", working_dir="/tmp"), ) print(render_qstat_page(jobs=[job]))
def qwait( job_ids=None, interval=5, port=config.port, ip_address=config.ip_address, verbose=0 ): """Blocks until the specified jobs have completed""" config.ip_address = ip_address config.port = port if not job_ids and sys.stdin.seekable(): # get the job ids from standard input job_id_string = "".join(c for c in sys.stdin.read() if c in digits) job_ids = job_id_string.split() if not job_ids: print("no jobs to wait on.") return # Parse the job ids input_job_ids = job_ids job_ids = [] for job_id in list(input_job_ids): if "." not in job_id: # A job group was specified response = requests.get(f"{config.url}/jobgroup?group_number={int(job_id)}") # print(response.json()) if response.status_code == 200: job_ids.extend([JobID(**item) for item in response.json()]) else: job_ids.append(JobID.parse_obj(job_id)) job_ids = set(job_ids) t = None done_waiting = False num_jobs_left = None while not done_waiting: options = {"completed": False} response = requests.get(f"{config.url}/qstat", json=options) # , json=message) queued_or_running_job_ids = set( Job(**ujson.loads(item)).job_id for item in response.json() ) waiting_on = job_ids.intersection(queued_or_running_job_ids) # print(waiting_on) if len(waiting_on) > 0: done_waiting = False time.sleep(interval) if t is None: if len(waiting_on) <= 20: msg = str(waiting_on) else: lsw = list(str(job) for job in sorted(waiting_on)) msg = f"{len(waiting_on)} jobs: {lsw[0]} - {lsw[-1]}" del lsw print(f"Waiting on {msg}") t = tqdm.tqdm(total=len(waiting_on)) num_jobs_left = len(waiting_on) else: num_finshed = num_jobs_left - len(waiting_on) t.update(num_finshed) num_jobs_left = len(waiting_on) else: done_waiting = True