def qsub_multi( commands, args, priority=1, logfile=None, depends=None, debug=False, log=False, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, ): """ Submits mutilple jobs to the queue. Use this if have have multiple commands that you wish to run and you can specify them with a glob pattern $ qsub mycommand*.bat -- --option1 --option2 value2 """ from glob import iglob commands = iglob(commands) job_specs = [] working_dir = encode_path(os.getcwd()) depends = [JobID.parse(d) for d in depends] for command in commands: # print(f, print(args)) command_str = f"{command} " + " ".join(f'"{arg}"' for arg in args) # print(command) if log: logfile = str(Path(command).with_suffix(".lqts.log")) else: logfile = None js = JobSpec( command=command_str, working_dir=working_dir, log_file=logfile, priority=priority, depends=depends, cores=cores, alternate_runner=alternate_runner, ) job_specs.append(js.dict()) if debug: for js in job_specs: print(js) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=job_specs) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)
def _qsub_argfile( command, argfile, priority=1, log=False, depends=None, debug=False, submit_delay=0.0, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, walltime=None, ): from glob import glob # files = glob(files) job_specs = [] working_dir = encode_path(os.getcwd()) depends = [JobID.parse(d) for d in depends] with open(argfile) as f: for iline, argline in enumerate(f): command_str = f"{command} {argline}" # print(command) if log: log_file = str( Path(argfile).with_suffix(f".lqts.{iline:0>3}.log")) else: log_file = None js = JobSpec( command=command_str, working_dir=working_dir, log_file=log_file, priority=priority, depends=depends, cores=cores, alternate_runner=alternate_runner, ) job_specs.append(js.dict()) if debug: for js in job_specs: print(js) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=job_specs) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)
def qsub_cmulti(command, file_pattern, args, priority=1, logfile=None, depends=None, debug=False, log=False, cores=1, port=config.port, ip_address=config.ip_address, alternate_runner=False, changewd=False): """ Submits mutlitiple jobs to the queue. Runs **command** for each file in **files**. Pass in args. $ qsub mycommand.exe MyInputFile*.inp -- --do --it [-----------] [--------------] [--------] command filepattern args """ from glob import iglob files = iglob(file_pattern) job_specs = [] working_dir = encode_path(os.getcwd()) depends = [JobID.parse(d) for d in depends] for f in files: if changewd: working_dir = str(Path(f).resolve().parent) f = Path(f).name # print(f, print(args)) command_str = f"{command} {f} " + " ".join(f'"{arg}"' for arg in args) # print(command) if log: logfile = str(Path(f).with_suffix(".lqts.log")) else: logfile = None js = JobSpec( command=command_str, working_dir=working_dir, log_file=logfile, priority=priority, depends=depends, cores=cores, alternate_runner=alternate_runner, ) job_specs.append(js.dict()) if debug: for js in job_specs: print(js) config.port = port config.ip_address = ip_address response = requests.post(f"{config.url}/qsub", json=job_specs) if response.status_code == 200: if debug: print(response) json_data = response.json() if len(json_data) <= 20: print(" ".join(str(JobID(**item)) for item in response.json())) else: print(JobID(**json_data[0]).group) else: print(response)