def test_worker_one_online_job(xenon_server): wf = mul(6, 7) job = next(iter(wf._workflow.nodes.values())) job_message = JobMessage(1234, job) registry = serial.base() msg = registry.to_json(job_message) m = Machine(scheduler_adaptor='local') scheduler = m.scheduler job_config = XenonJobConfig() executable, arguments = job_config.command_line() xjob_description = xenon.JobDescription(executable=str(executable), arguments=arguments) xjob, xstdout = scheduler.submit_interactive_job(xjob_description, [msg.encode()]) scheduler.wait_until_done(xjob) result_json = ''.join(m.stdout.decode() for m in xstdout if m.stdout) assert len(result_json) > 0 result = registry.from_json(result_json) assert result.status == 'done' assert result.key == 1234 assert result.msg is None assert result.value == 42 scheduler.close()
def test_machine_batch_job(xenon_server, tmpdir): m = Machine(scheduler_adaptor='local') scheduler = m.scheduler tmpdir = Path(str(tmpdir)) stdout_file = Path(tmpdir) / 'hostname.txt' job_description = xenon.JobDescription(executable='/bin/hostname', stdout=str(stdout_file)) job = scheduler.submit_batch_job(job_description) scheduler.wait_until_done(job) lines = [line.strip() for line in stdout_file.open()] assert lines[0] == socket.gethostname() scheduler.close()
def __init__(self, *, queue_name=None, environment=None, time_out=1000, scheduler_arguments=None, **kwargs): super(XenonJobConfig, self).__init__(**kwargs) self.time_out = time_out executable, arguments = self.command_line() self.xenon_job_description = xenon.JobDescription( executable=str(executable), arguments=arguments, working_directory=str(self.working_dir), queue_name=queue_name, environment=environment, scheduler_arguments=scheduler_arguments)
def test_worker_ten_online_jobs(xenon_server): registry = serial.base() def single_job(wf): job = next(iter(wf._workflow.nodes.values())) job_message = JobMessage(0, job) return (registry.to_json(job_message) + '\n').encode() m = Machine(scheduler_adaptor='local') scheduler = m.scheduler job_config = XenonJobConfig(verbose=True) executable, arguments = job_config.command_line() xjob_description = xenon.JobDescription(executable=str(executable), arguments=arguments) xjob, xstdout = scheduler.submit_interactive_job( xjob_description, [single_job(mul(10, i)) for i in range(10)]) scheduler.wait_until_done(xjob) result_json = "" for m in xstdout: if m.stdout: result_json += m.stdout.decode() if m.stderr: for l in m.stderr.decode().splitlines(): print("remote:", l) results = [registry.from_json(r) for r in result_json.splitlines()] print("results: ", end='') for r in results: print(r.value, end=' ') print() assert len(results) == 10 for i, result in enumerate(results): assert result.status == 'done' assert result.key == 0 assert result.msg is None assert result.value == i * 10 scheduler.close()
def test_worker_one_batch_job(xenon_server, tmpdir): tmpdir = Path(str(tmpdir)) infile = tmpdir / 'infile.json' outfile = tmpdir / 'outfile.json' wf = add(1, 1) job = next(iter(wf._workflow.nodes.values())) job_message = JobMessage(42, job) registry = serial.base() print(registry.to_json(job_message), file=infile.open('w')) m = Machine(scheduler_adaptor='local') scheduler = m.scheduler job_config = XenonJobConfig() executable, arguments = job_config.command_line() job_description = xenon.JobDescription(executable=str(executable), arguments=arguments, stdin=str(infile), stdout=str(outfile)) job = scheduler.submit_batch_job(job_description) scheduler.wait_until_done(job) result_json = [line.strip() for line in outfile.open()] assert len(result_json) == 1 result = registry.from_json(result_json[0]) assert result.status == 'done' assert result.key == 42 assert result.msg is None assert result.value == 2 scheduler.close()
objects that contain a ``stdout`` and ``stderr`` field, containing a ``bytes`` object. Here we're only reading from ``stdout``.""" return s.next().stdout.decode().strip() # our input lines input_lines = [ "Zlfgvp aboyr tnf,", "Urnil lrg syrrgvat sebz tenfc,", "Oyhr yvxr oheavat vpr." ] # the job description, make sure you run the script from the examples # directory! job_description = xenon.JobDescription( executable='python3', arguments=['rot13.py']) # start the xenon-grpc server xenon.init() # on the local adaptor with xenon.Scheduler.create( adaptor='slurm', location='ssh://fs0.das5.cs.vu.nl/home/jhidding', certificate_credential=xenon.CertificateCredential( username='******', certfile='/home/johannes/.ssh/id_rsa'), properties={ 'xenon.adaptors.schedulers.ssh.strictHostKeyChecking': 'false' }) as scheduler:
"""The :py:meth:`submit_interactive_job()` method returns a stream of objects that contain a ``stdout`` and ``stderr`` field, containing a ``bytes`` object. Here we're only reading from ``stdout``.""" return s.next().stdout.decode().strip() # our input lines input_lines = [ "Zlfgvp aboyr tnf,", "Urnil lrg syrrgvat sebz tenfc,", "Oyhr yvxr oheavat vpr." ] # the job description, make sure you run the script from the examples # directory! job_description = xenon.JobDescription(executable='python', arguments=['rot13.py'], queue_name='multi') # start the xenon-grpc server xenon.init() # on the local adaptor with xenon.Scheduler.create(adaptor='local') as scheduler: input_queue, input_stream = make_input_stream() # submit an interactive job, this gets us the job-id and a stream yielding # job output from stdout and stderr. job, output_stream = scheduler.submit_interactive_job( description=job_description, stdin_stream=input_stream()) # next we feed the input_queue with messages