def load_config(pack): """Utility method loading the YAML configuration from inside a pack file. Decompresses the config.yml file from the tarball to a temporary file then loads it. Note that decompressing a single file is inefficient, thus calling this method can be slow. """ tmp = Path.tempdir(prefix='reprozip_') try: # Loads info from package tar = tarfile.open(str(pack), 'r:*') f = tar.extractfile('METADATA/version') version = f.read() f.close() if version != b'REPROZIP VERSION 1\n': logging.critical("Unknown pack format") sys.exit(1) tar.extract('METADATA/config.yml', path=str(tmp)) tar.close() configfile = tmp / 'METADATA/config.yml' ret = reprounzip.common.load_config(configfile, canonical=True) finally: tmp.rmtree() return ret
def graph(args): """graph subcommand. Reads in the trace sqlite3 database and writes out a graph in GraphViz DOT format. """ if args.pack is not None: tmp = Path.tempdir(prefix='reprounzip_') try: tar = tarfile.open(args.pack, 'r:*') f = tar.extractfile('METADATA/version') version = f.read() f.close() if version != b'REPROZIP VERSION 1\n': logging.critical("Unknown pack format") sys.exit(1) try: tar.extract('METADATA/config.yml', path=str(tmp)) tar.extract('METADATA/trace.sqlite3', path=str(tmp)) except KeyError as e: logging.critical("Error extracting from pack: %s", e.args[0]) generate(Path(args.target[0]), tmp / 'METADATA', args.all_forks) finally: tmp.rmtree() else: generate(Path(args.target[0]), Path(args.dir), args.all_forks)
def wrapper(*args, **kwargs): tmp = Path.tempdir(prefix='reprozip_tests_') try: with tmp.in_dir(): return f(*args, **kwargs) finally: tmp.rmtree(ignore_errors=True)
def prepare_upload(self, files): if 'current_image' not in self.unpacked_info: stderr.write("Image doesn't exist yet, have you run " "setup/build?\n") sys.exit(1) self.build_directory = Path.tempdir(prefix='reprozip_build_') self.docker_copy = []
def prepare_upload(self, files): if 'current_image' not in self.unpacked_info: sys.stderr.write("Image doesn't exist yet, have you run " "setup/build?\n") sys.exit(1) self.build_directory = Path.tempdir(prefix='reprozip_build_') self.docker_copy = []
def download(self, remote_path, local_path): # Docker copies to a file in the specified directory, cannot just take # a file name (#4272) tmpdir = Path.tempdir(prefix='reprozip_docker_output_') try: subprocess.check_call(['docker', 'cp', self.container + b':' + remote_path.path, tmpdir.path]) (tmpdir / remote_path.name).copyfile(local_path) finally: tmpdir.rmtree()
def setUpClass(cls): """Builds a test hierarchy.""" cls.tmp = Path.tempdir() cls.tmp.open('w', 'file').close() cls.tmp.open('w', u'r\xE9mi\'s file').close() d = cls.tmp.mkdir(u'r\xE9pertoire') d.open('w', 'file').close() d.mkdir('nested') if issubclass(Path, PosixPath): (d / 'last').symlink('..') else: d.open('w', 'last').close()
def download(self, remote_path, local_path): # Docker copies to a file in the specified directory, cannot just take # a file name (#4272) tmpdir = Path.tempdir(prefix='reprozip_docker_output_') try: ret = subprocess.call(['docker', 'cp', self.container + b':' + remote_path.path, tmpdir.path]) if ret != 0: logging.critical("Can't get output file: %s", remote_path) sys.exit(1) (tmpdir / remote_path.name).copyfile(local_path) finally: tmpdir.rmtree()
def test_cwd(self): """Tests cwd, in_dir.""" cwd = os.getcwd() if os.name == 'nt' and isinstance(cwd, bytes): cwd = cwd.decode('mbcs') elif os.name != 'nt' and isinstance(cwd, unicode): cwd = cwd.encode(sys.getfilesystemencoding()) self.assertEqual(Path.cwd().path, cwd) tmp = Path.tempdir() with tmp.in_dir(): self.assertEqual(Path.cwd(), tmp) self.assertNotEqual(Path.cwd(), tmp) self.assertTrue(tmp.exists()) tmp.rmdir() self.assertFalse(tmp.exists())
def test_cwd(self): """Tests cwd, in_dir.""" cwd = os.getcwd() if os.name == 'nt' and isinstance(cwd, bytes): cwd = cwd.decode('mbcs') elif os.name != 'nt' and isinstance(cwd, unicode): cwd = cwd.encode(sys.getfilesystemencoding()) self.assertEqual(Path.cwd().path, cwd) tmp = Path.tempdir().resolve() with tmp.in_dir(): self.assertEqual(Path.cwd(), tmp) self.assertNotEqual(Path.cwd(), tmp) self.assertTrue(tmp.exists()) tmp.rmdir() self.assertFalse(tmp.exists())
def do_dot_test(self, expected, **kwargs): graph.Process._id_gen = 0 tmpdir = Path.tempdir(prefix='rpz_testgraph_') target = tmpdir / 'graph.dot' try: graph.generate(target, self._trace / 'config.yml', self._trace / 'trace.sqlite3', **kwargs) if expected is False: self.fail("DOT generation didn't fail as expected") with target.open('r') as fp: self.assertEqual(expected, fp.read()) except UsageError: if expected is not False: raise finally: tmpdir.rmtree()
def test_make_dir_writable(self): """Tests make_dir_writable with read-only dir.""" def check_mode(mod, path): self.assertEqual(oct((path.stat().st_mode & 0o0700) >> 6), oct(mod)) tmp = Path.tempdir() try: (tmp / 'some' / 'path').mkdir(parents=True) (tmp / 'some' / 'path').chmod(0o555) with make_dir_writable(tmp / 'some' / 'path'): check_mode(7, tmp / 'some') check_mode(7, tmp / 'some' / 'path') check_mode(7, tmp / 'some') check_mode(5, tmp / 'some' / 'path') finally: (tmp / 'some').chmod(0o755) (tmp / 'some' / 'path').chmod(0o755) tmp.rmtree()
def do_json_test(self, expected, **kwargs): graph.Process._id_gen = 0 tmpdir = Path.tempdir(prefix='rpz_testgraph_') target = tmpdir / 'graph.json' try: graph.generate(target, self._trace / 'config.yml', self._trace / 'trace.sqlite3', graph_format='json', **kwargs) if expected is False: self.fail("JSON generation didn't fail as expected") with target.open('r', encoding='utf-8') as fp: obj = json.load(fp) self.assertEqual(expected, obj) except SystemExit: if expected is not False: raise finally: tmpdir.rmtree()
def do_vistrails(target): """Create a VisTrails workflow that runs the experiment. This is called from signals after an experiment has been setup by any unpacker. """ record_usage(do_vistrails=True) unpacker = signals.unpacker dot_vistrails = Path('~/.vistrails').expand_user() runs, packages, other_files = load_config(target / 'config.yml', canonical=True) for i, run in enumerate(runs): module_name = write_cltools_module(run, dot_vistrails) # Writes VisTrails workflow bundle = target / 'vistrails.vt' logging.info("Writing VisTrails workflow %s...", bundle) vtdir = Path.tempdir(prefix='reprounzip_vistrails_') try: with vtdir.open('w', 'vistrail', encoding='utf-8', newline='\n') as fp: vistrail = VISTRAILS_TEMPLATE cmdline = ' '.join(shell_escape(arg) for arg in run['argv']) vistrail = vistrail.format( date='2014-11-12 15:31:18', unpacker=unpacker, directory=escape_xml(str(target.absolute())), cmdline=escape_xml(cmdline), module_name=module_name, run=i) fp.write(vistrail) with bundle.open('wb') as fp: z = zipfile.ZipFile(fp, 'w') with vtdir.in_dir(): for path in Path('.').recursedir(): z.write(str(path)) z.close() finally: vtdir.rmtree()
def test_rewrite(self): tmp = Path.tempdir() try: # Create original file orig = tmp / 'unix.txt' # Write some contents with orig.open('wb') as fp: fp.write(b"Some\ncontent\nin here\n") if issubclass(Path, PosixPath): orig.chmod(0o755) # Rewrite it in place! with orig.rewrite(read_newline='\n', write_newline='\r\n') as (r, w): w.write(r.read()) with orig.open('rb') as fp: self.assertEqual(fp.read(), b"Some\r\ncontent\r\nin here\r\n") if issubclass(Path, PosixPath): self.assertTrue(orig.stat().st_mode & 0o100) finally: tmp.rmtree()
def do_vistrails(target): """Create a VisTrails workflow that runs the experiment. This is called from signals after an experiment has been setup by any unpacker. """ record_usage(do_vistrails=True) unpacker = signals.unpacker dot_vistrails = Path('~/.vistrails').expand_user() runs, packages, other_files = load_config(target / 'config.yml', canonical=True) for i, run in enumerate(runs): module_name = write_cltools_module(run, dot_vistrails) # Writes VisTrails workflow bundle = target / 'vistrails.vt' logging.info("Writing VisTrails workflow %s...", bundle) vtdir = Path.tempdir(prefix='reprounzip_vistrails_') try: with vtdir.open('w', 'vistrail', encoding='utf-8', newline='\n') as fp: vistrail = VISTRAILS_TEMPLATE cmdline = ' '.join(shell_escape(arg) for arg in run['argv']) vistrail = vistrail.format(date='2014-11-12 15:31:18', unpacker=unpacker, directory=escape_xml( str(target.absolute())), cmdline=escape_xml(cmdline), module_name=module_name, run=i) fp.write(vistrail) with bundle.open('wb') as fp: z = zipfile.ZipFile(fp, 'w') with vtdir.in_dir(): for path in Path('.').recursedir(): z.write(str(path)) z.close() finally: vtdir.rmtree()
def post(self): self._notebook_file = Path(self.get_body_argument('file')) name = self._notebook_file.unicodename if name.endswith('.ipynb'): name = name[:-6] name = u'%s_%s.rpz' % (name, datetime.now().strftime('%Y%m%d-%H%M%S')) self._pack_file = self._notebook_file.parent / name self.nbapp.log.info("reprozip: tracing request from client: file=%r", self._notebook_file) self._tempdir = Path.tempdir() self.nbapp.log.info("reprozip: created temp directory %r", self._tempdir) proc = Subprocess([ sys.executable, '-c', 'from reprozip_jupyter.main import main; main()', 'trace', '--dont-save-notebook', '-d', self._tempdir.path, self._notebook_file.path ], stdin=subprocess.PIPE) proc.stdin.close() proc.set_exit_callback(self._trace_done) self.nbapp.log.info("reprozip: started tracing...")
def functional_tests(): destination = os.environ['TEJ_DESTINATION'] logging.info("Using TEJ_DESTINATION %s" % destination) if 'COVER' in os.environ: tej = os.environ['COVER'].split(' ') + [ bytes(Path.cwd() / 'tej/__main__.py'), '-v', '-v' ] else: tej = ['tej', '-v', '-v'] for path in ('~/.tej', '~/tej 2'): path = Path(path).expand_user() try: path.remove() except OSError: path.rmtree(ignore_errors=True) Path('~/tej 2').expand_user().mkdir() logging.info("Creating default queue") check_call(tej + ['setup', destination]) assert Path('~/.tej').expand_user().is_dir() RemoteQueue(destination, '~/.tej').cleanup() assert not Path('~/.tej').expand_user().exists() logging.info("Creating a queue with a link") check_call(tej + [ 'setup', destination, '--queue', 'tej 2/queue', '--make-link', 'tej 2/link' ]) assert Path('~/tej 2/queue').expand_user().is_dir() with Path('~/tej 2/link').expand_user().open('r') as fp: assert fp.read() == ('tejdir: %s\n' % Path('~/tej 2/queue').expand_user()) logging.info("Adding links") check_call(tej + [ 'setup', destination, '--only-links', '--queue', '~/tej 2/queue', '--make-link', '~/tej 2/link2' ]) with Path('~/tej 2/link2').expand_user().open('r') as fp: assert fp.read() == ('tejdir: %s\n' % Path('~/tej 2/queue').expand_user()) assert not Path('~/.tej').expand_user().exists() check_call(tej + [ 'setup', destination, '--only-links', '--queue', '~/tej 2/queue', '--make-default-link' ]) with Path('~/.tej').expand_user().open('r') as fp: assert fp.read() == ('tejdir: %s\n' % Path('~/tej 2/queue').expand_user()) logging.info("Calling status for non-existent job") output = check_output(tej + ['status', destination, '--id', 'nonexistent']) assert output == b'not found\n' logging.info("Submitting a job") jobdir = Path.tempdir(prefix='tej-tests-') try: with jobdir.open('w', 'start.sh', newline='\n') as fp: fp.write('#!/bin/sh\n' '[ -f dir1/data1 ] || exit 1\n' '[ "$(cat dir2/dir3/data2)" = data2 ] || exit 2\n' 'echo "stdout here"\n' 'while ! [ -e ~/"tej 2/job1done" ]; do\n' ' sleep 1\n' 'done\n' 'echo "job output" > job1results\n') with jobdir.mkdir('dir1').open('wb', 'data1') as fp: fp.write(b'data1\n') with jobdir.mkdir('dir2').mkdir('dir3').open('w', 'data2') as fp: fp.write('data2\n') job_id = check_output(tej + ['submit', destination, jobdir.path]) job_id = job_id.rstrip().decode('ascii') finally: jobdir.rmtree() logging.info("Check status while forgetting job id") assert call(tej + ['status', destination]) != 0 logging.info("Check status of running job") output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'running\n' logging.info("Finish job") Path('~/tej 2/job1done').expand_user().open('w').close() time.sleep(2) logging.info("Check status of finished job") output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'finished 0\n' logging.info("Download job results") destdir = Path.tempdir(prefix='tej-tests-') try: check_call(tej + [ 'download', destination, '--id', job_id, 'job1results', '_stdout' ], cwd=destdir.path) with destdir.open('r', 'job1results') as fp: assert fp.read() == 'job output\n' with destdir.open('r', '_stdout') as fp: assert fp.read() == 'stdout here\n' finally: destdir.rmtree() logging.info("List jobs") output = check_output(tej + ['list', destination]) assert output == ('%s finished\n' % job_id).encode('ascii') logging.info("Kill already finished job") output = check_output(tej + ['kill', destination, '--id', job_id]) assert output == b'' logging.info("Remove finished job") check_call(tej + ['delete', destination, '--id', job_id]) output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'not found\n' logging.info("Submit another job") jobdir = Path.tempdir(prefix='tej-tests-') try: with jobdir.open('w', 'start.sh', newline='\n') as fp: fp.write('#!/bin/sh\n' 'sleep 20\n') job_id = make_unique_name() check_call(tej + ['submit', destination, '--id', job_id, jobdir.path]) finally: jobdir.rmtree() output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'running\n' logging.info("Remove still running job") assert call(tej + ['delete', destination, '--id', job_id]) != 0 logging.info("Kill running job") output = check_output(tej + ['kill', destination, '--id', job_id]) output = check_output(tej + ['status', destination, '--id', job_id]) assert re.match(b'finished [0-9]+\n', output) logging.info("Remove killed job") check_call(tej + ['delete', destination, '--id', job_id]) output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'not found\n' jobdir = Path.tempdir(prefix='tej-tests-') try: logging.info("Start remote command job") job_id = check_output( tej + ['submit', destination, '--script', 'echo "hi"', jobdir.path]) job_id = job_id.rstrip().decode('ascii') finally: jobdir.rmtree() time.sleep(2) logging.info("Download remote command job output") destdir = Path.tempdir(prefix='tej-tests-') try: check_call(tej + ['download', destination, '--id', job_id, '_stdout'], cwd=destdir.path) with destdir.open('r', '_stdout') as fp: assert fp.read() == 'hi\n' finally: destdir.rmtree() logging.info("Remove finished job") output = check_output(tej + ['delete', destination, '--id', job_id]) assert output == b'' output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'not found\n' RemoteQueue(destination, 'tej 2/link').cleanup() assert not Path('~/tej 2/link').expand_user().exists() assert not Path('~/tej 2/queue').expand_user().exists()
def setUpClass(cls): if sys.version_info < (2, 7, 3): raise unittest.SkipTest("Python version not supported by reprozip") cls._trace = Path.tempdir(prefix='rpz_testdb_') conn = make_database([ ('proc', 0, None, False), ('open', 0, "/some/dir", True, FILE_WDIR), ('exec', 0, "/bin/sh", "/some/dir", "sh\0script_1\0"), ('open', 0, "/usr/share/1_one.pyc", False, FILE_READ), ('open', 0, "/some/dir/one", False, FILE_WRITE), ('exec', 0, "/usr/bin/python", "/some/dir", "python\0drive.py\0"), ('open', 0, "/some/dir/drive.py", False, FILE_READ), ('open', 0, "/some/dir/one", False, FILE_READ), ('open', 0, "/etc/2_two.cfg", False, FILE_READ), ('proc', 1, 0, False), ('open', 1, "/some/dir", True, FILE_WDIR), ('exec', 1, "/some/dir/experiment", "/some/dir", "experiment\0"), ('open', 1, "/some/dir/one", False, FILE_STAT), ('open', 1, "/usr/lib/2_one.so", False, FILE_READ), ('open', 1, "/some/dir/two", False, FILE_WRITE), ('exec', 0, "/usr/bin/wc", "/some/dir", "wc\0out.txt\0"), ('exit', 1), ('open', 0, "/some/dir/two", False, FILE_READ), ('exit', 0), ('proc', 2, None, False), ('open', 2, "/some/dir", True, FILE_WDIR), ('exec', 2, "/bin/sh", "/some/dir", "sh\0script_2\0"), ('proc', 3, 2, True), ('exit', 3), ('proc', 4, 2, False), ('open', 4, "/some/dir", True, FILE_WDIR), ('exec', 4, "/usr/bin/python", "/some/dir", "python\0-\0"), ('open', 4, "/some/dir/one", False, FILE_READ), ('open', 4, "/some/dir/thing", False, FILE_WRITE), ('exec', 2, "/some/dir/report", "/some/dir", "./report\0-v\0"), ('open', 2, "/some/dir/thing", False, FILE_READ), ('exit', 4), ('open', 2, "/some/dir/result", False, FILE_WRITE), ('exit', 2), ], cls._trace / 'trace.sqlite3') conn.close() with (cls._trace / 'config.yml').open('w', encoding='utf-8') as fp: fp.write("""\ version: "1.1" runs: - id: first run architecture: x86_64 argv: [sh, "script_1"] binary: /some/dir/one distribution: [debian, '8.0'] environ: {USER: remram} exitcode: 0 uid: 1000 gid: 1000 hostname: test workingdir: /user/dir - architecture: x86_64 argv: ["sh", "script_2"] binary: /some/dir/one distribution: [debian, '8.0'] environ: {USER: remram} exitcode: 0 uid: 1000 gid: 1000 hostname: test workingdir: /user/dir inputs_outputs: - name: important path: "/some/dir/one" written_by_runs: [0] read_by_runs: [1] packages: - name: pkg1 version: "1.0" size: 10000 packfiles: true files: - "/usr/share/1_one.py" - "/usr/share/1_two.py" - "/usr/bin/wc" - name: pkg2 version: "1.0" size: 10000 packfiles: true files: - "/usr/lib/2_one.so" - "/etc/2_two.cfg" meta: {"section": "libs"} - name: python version: "2.7" size: 5000000 packfiles: true files: - "/usr/bin/python" meta: {"section": "python"} - name: unused version: "0.1" size: 100 packfiles: true files: - "/an/unused/file" other_files: - "/bin/sh" - "/usr/share/1_one.pyc" - "/some/dir/drive.py" - "/some/dir/experiment" - "/some/dir/report" """)
def do_vistrails(target, pack=None, **kwargs): """Create a VisTrails workflow that runs the experiment. This is called from signals after an experiment has been setup by any unpacker. """ record_usage(do_vistrails=True) config = load_config(target / 'config.yml', canonical=True) # Writes VisTrails workflow bundle = target / 'vistrails.vt' logging.info("Writing VisTrails workflow %s...", bundle) vtdir = Path.tempdir(prefix='reprounzip_vistrails_') ids = IdScope() try: with vtdir.open('w', 'vistrail', encoding='utf-8', newline='\n') as fp: wf = Workflow(fp, ids) # Directory module, refering to this directory d = wf.add_module('%s:Directory' % rpz_id, rpz_version) wf.add_function(d, 'directory', [(directory_sig, str(target.resolve()))]) connect_from = d for i, run in enumerate(config.runs): inputs = sorted(n for n, f in iteritems(config.inputs_outputs) if i in f.read_runs) outputs = sorted(n for n, f in iteritems(config.inputs_outputs) if i in f.write_runs) ports = itertools.chain((('input', p) for p in inputs), (('output', p) for p in outputs)) # Run module r = wf.add_module('%s:Run' % rpz_id, rpz_version) wf.add_function(r, 'cmdline', [ (string_sig, ' '.join(shell_escape(arg) for arg in run['argv']))]) wf.add_function(r, 'run_number', [(integer_sig, i)]) # Port specs for input/output files for type_, name in ports: wf.add_port_spec(r, name, type_, [file_pkg_mod]) # Draw connection wf.connect(connect_from, experiment_sig, 'experiment', r, experiment_sig, 'experiment') connect_from = r wf.close() with bundle.open('wb') as fp: z = zipfile.ZipFile(fp, 'w') with vtdir.in_dir(): for path in Path('.').recursedir(): z.write(str(path)) z.close() finally: vtdir.rmtree()
def functional_tests(): destination = os.environ['TEJ_DESTINATION'] logging.info("Using TEJ_DESTINATION %s" % destination) if 'COVER' in os.environ: tej = os.environ['COVER'].split(' ') + [ bytes(Path.cwd() / 'tej/__main__.py'), '-v', '-v'] else: tej = ['tej', '-v', '-v'] for path in ('~/.tej', '~/tej 2'): path = Path(path).expand_user() try: path.remove() except OSError: path.rmtree(ignore_errors=True) Path('~/tej 2').expand_user().mkdir() logging.info("Creating default queue") check_call(tej + ['setup', destination]) assert Path('~/.tej').expand_user().is_dir() RemoteQueue(destination, '~/.tej').cleanup() assert not Path('~/.tej').expand_user().exists() logging.info("Creating a queue with a link") check_call(tej + ['setup', destination, '--queue', 'tej 2/queue', '--make-link', 'tej 2/link']) assert Path('~/tej 2/queue').expand_user().is_dir() with Path('~/tej 2/link').expand_user().open('r') as fp: assert fp.read() == ('tejdir: %s\n' % Path('~/tej 2/queue').expand_user()) logging.info("Adding links") check_call(tej + ['setup', destination, '--only-links', '--queue', '~/tej 2/queue', '--make-link', '~/tej 2/link2']) with Path('~/tej 2/link2').expand_user().open('r') as fp: assert fp.read() == ('tejdir: %s\n' % Path('~/tej 2/queue').expand_user()) assert not Path('~/.tej').expand_user().exists() check_call(tej + ['setup', destination, '--only-links', '--queue', '~/tej 2/queue', '--make-default-link']) with Path('~/.tej').expand_user().open('r') as fp: assert fp.read() == ('tejdir: %s\n' % Path('~/tej 2/queue').expand_user()) logging.info("Calling status for non-existent job") output = check_output(tej + ['status', destination, '--id', 'nonexistent']) assert output == b'not found\n' logging.info("Submitting a job") jobdir = Path.tempdir(prefix='tej-tests-') try: with jobdir.open('w', 'start.sh', newline='\n') as fp: fp.write('#!/bin/sh\n' '[ -f dir1/data1 ] || exit 1\n' '[ "$(cat dir2/dir3/data2)" = data2 ] || exit 2\n' 'echo "stdout here"\n' 'while ! [ -e ~/"tej 2/job1done" ]; do\n' ' sleep 1\n' 'done\n' 'echo "job output" > job1results\n') with jobdir.mkdir('dir1').open('wb', 'data1') as fp: fp.write(b'data1\n') with jobdir.mkdir('dir2').mkdir('dir3').open('w', 'data2') as fp: fp.write('data2\n') job_id = check_output(tej + ['submit', destination, jobdir.path]) job_id = job_id.rstrip().decode('ascii') finally: jobdir.rmtree() logging.info("Check status while forgetting job id") assert call(tej + ['status', destination]) != 0 logging.info("Check status of running job") output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'running\n' logging.info("Finish job") Path('~/tej 2/job1done').expand_user().open('w').close() time.sleep(2) logging.info("Check status of finished job") output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'finished 0\n' logging.info("Download job results") destdir = Path.tempdir(prefix='tej-tests-') try: check_call(tej + ['download', destination, '--id', job_id, 'job1results', '_stdout'], cwd=destdir.path) with destdir.open('r', 'job1results') as fp: assert fp.read() == 'job output\n' with destdir.open('r', '_stdout') as fp: assert fp.read() == 'stdout here\n' finally: destdir.rmtree() logging.info("List jobs") output = check_output(tej + ['list', destination]) assert output == ('%s finished\n' % job_id).encode('ascii') logging.info("Kill already finished job") output = check_output(tej + ['kill', destination, '--id', job_id]) assert output == b'' logging.info("Remove finished job") check_call(tej + ['delete', destination, '--id', job_id]) output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'not found\n' logging.info("Submit another job") jobdir = Path.tempdir(prefix='tej-tests-') try: with jobdir.open('w', 'start.sh', newline='\n') as fp: fp.write('#!/bin/sh\n' 'sleep 20\n') job_id = make_unique_name() check_call(tej + ['submit', destination, '--id', job_id, jobdir.path]) finally: jobdir.rmtree() output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'running\n' logging.info("Remove still running job") assert call(tej + ['delete', destination, '--id', job_id]) != 0 logging.info("Kill running job") output = check_output(tej + ['kill', destination, '--id', job_id]) output = check_output(tej + ['status', destination, '--id', job_id]) assert re.match(b'finished [0-9]+\n', output) logging.info("Remove killed job") check_call(tej + ['delete', destination, '--id', job_id]) output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'not found\n' jobdir = Path.tempdir(prefix='tej-tests-') try: logging.info("Start remote command job") job_id = check_output(tej + ['submit', destination, '--script', 'echo "hi"', jobdir.path]) job_id = job_id.rstrip().decode('ascii') finally: jobdir.rmtree() time.sleep(2) logging.info("Download remote command job output") destdir = Path.tempdir(prefix='tej-tests-') try: check_call(tej + ['download', destination, '--id', job_id, '_stdout'], cwd=destdir.path) with destdir.open('r', '_stdout') as fp: assert fp.read() == 'hi\n' finally: destdir.rmtree() logging.info("Remove finished job") output = check_output(tej + ['delete', destination, '--id', job_id]) assert output == b'' output = check_output(tej + ['status', destination, '--id', job_id]) assert output == b'not found\n' RemoteQueue(destination, 'tej 2/link').cleanup() assert not Path('~/tej 2/link').expand_user().exists() assert not Path('~/tej 2/queue').expand_user().exists()
def functional_tests(): destination = os.environ['TEJ_DESTINATION'] logging.info("Using TEJ_DESTINATION %s" % destination) for path in ('~/.tej', '~/tej2'): path = Path(path).expand_user() try: path.remove() except OSError: path.rmtree(ignore_errors=True) Path('~/tej2').expand_user().mkdir() logging.info("Creating default queue") check_call(['tej', 'setup', destination]) assert Path('~/.tej').expand_user().is_dir() Path('~/.tej').expand_user().rmtree() logging.info("Creating a queue with a link") check_call(['tej', 'setup', destination, '--queue', '~/tej2/queue', '--make-link', '~/tej2/link']) assert Path('~/tej2/queue').expand_user().is_dir() with Path('~/tej2/link').expand_user().open('r') as fp: assert fp.read() == 'tejdir: %s\n' % Path('~/tej2/queue').expand_user() logging.info("Adding links") check_call(['tej', 'setup', destination, '--only-links', '--queue', '~/tej2/queue', '--make-link', '~/tej2/link2']) with Path('~/tej2/link2').expand_user().open('r') as fp: assert fp.read() == 'tejdir: %s\n' % Path('~/tej2/queue').expand_user() assert not Path('~/.tej').expand_user().exists() check_call(['tej', 'setup', destination, '--only-links', '--queue', '~/tej2/queue', '--make-default-link']) with Path('~/.tej').expand_user().open('r') as fp: assert fp.read() == 'tejdir: %s\n' % Path('~/tej2/queue').expand_user() logging.info("Calling status for non-existent job") output = check_output(['tej', 'status', destination, '--id', 'nonexistent']) assert output == b'not found\n' logging.info("Submitting a job") jobdir = Path.tempdir(prefix='tej-tests-') try: with jobdir.open('w', 'start.sh', newline='\n') as fp: fp.write('#!/bin/sh\n' '[ -f dir1/data1 ] || exit 1\n' '[ "$(cat dir2/dir3/data2)" = data2 ] || exit 2\n' 'echo "stdout here"\n' 'while ! [ -e ~/tej2/job1done ]; do\n' ' sleep 1\n' 'done\n' 'echo "job output" > job1results\n') with jobdir.mkdir('dir1').open('wb', 'data1') as fp: fp.write(b'data1\n') with jobdir.mkdir('dir2').mkdir('dir3').open('w', 'data2') as fp: fp.write('data2\n') job_id = check_output(['tej', 'submit', destination, jobdir.path]) job_id = job_id.rstrip().decode('ascii') finally: jobdir.rmtree() logging.info("Check status of running job") output = check_output(['tej', 'status', destination, '--id', job_id]) assert output == b'running\n' logging.info("Finish job") Path('~/tej2/job1done').expand_user().open('w').close() time.sleep(2) logging.info("Check status of finished job") output = check_output(['tej', 'status', destination, '--id', job_id]) assert output == b'finished 0\n' logging.info("Download job results") destdir = Path.tempdir(prefix='tej-tests-') try: check_call(['tej', 'download', destination, '--id', job_id, 'job1results', '_stdout'], cwd=destdir.path) with destdir.open('r', 'job1results') as fp: assert fp.read() == 'job output\n' with destdir.open('r', '_stdout') as fp: assert fp.read() == 'stdout here\n' finally: destdir.rmtree() logging.info("List jobs") output = check_output(['tej', 'list', destination]) assert output == ('%s finished\n' % job_id).encode('ascii') logging.info("Remove finished job") check_call(['tej', 'delete', destination, '--id', job_id]) output = check_output(['tej', 'status', destination, '--id', job_id]) assert output == b'not found\n'
def setUpClass(cls): cls._trace = Path.tempdir(prefix='rpz_testdb_') conn = make_database([ ('proc', 0, None, False), ('open', 0, "/some/dir", True, FILE_WDIR), ('exec', 0, "/bin/sh", "/some/dir", "sh\0script_1\0"), ('open', 0, "/usr/share/1_one.pyc", False, FILE_READ), ('open', 0, "/some/dir/one", False, FILE_WRITE), ('exec', 0, "/usr/bin/python", "/some/dir", "python\0drive.py\0"), ('open', 0, "/some/dir/drive.py", False, FILE_READ), ('open', 0, "/some/dir/one", False, FILE_READ), ('open', 0, "/etc/2_two.cfg", False, FILE_READ), ('proc', 1, 0, False), ('open', 1, "/some/dir", True, FILE_WDIR), ('exec', 1, "/some/dir/experiment", "/some/dir", "experiment\0"), ('open', 1, "/some/dir/one", False, FILE_STAT), ('open', 1, "/usr/lib/2_one.so", False, FILE_READ), ('open', 1, "/some/dir/two", False, FILE_WRITE), ('exec', 0, "/usr/bin/wc", "/some/dir", "wc\0out.txt\0"), ('exit', 1), ('open', 0, "/some/dir/two", False, FILE_READ), ('exit', 0), ('proc', 2, None, False), ('open', 2, "/some/dir", True, FILE_WDIR), ('exec', 2, "/bin/sh", "/some/dir", "sh\0script_2\0"), ('proc', 3, 2, True), ('exit', 3), ('proc', 4, 2, False), ('open', 4, "/some/dir", True, FILE_WDIR), ('exec', 4, "/usr/bin/python", "/some/dir", "python\0-\0"), ('open', 4, "/some/dir/one", False, FILE_READ), ('open', 4, "/some/dir/thing", False, FILE_WRITE), ('exec', 2, "/some/dir/report", "/some/dir", "./report\0-v\0"), ('open', 2, "/some/dir/thing", False, FILE_READ), ('exit', 4), ('open', 2, "/some/dir/result", False, FILE_WRITE), ('exit', 2), ], cls._trace / 'trace.sqlite3') conn.close() with (cls._trace / 'config.yml').open('w', encoding='utf-8') as fp: fp.write("""\ version: "1.1" runs: - id: first run architecture: x86_64 argv: [sh, "script_1"] binary: /some/dir/one distribution: [debian, '8.0'] environ: {USER: remram} exitcode: 0 uid: 1000 gid: 1000 hostname: test workingdir: /user/dir - architecture: x86_64 argv: ["sh", "script_2"] binary: /some/dir/one distribution: [debian, '8.0'] environ: {USER: remram} exitcode: 0 uid: 1000 gid: 1000 hostname: test workingdir: /user/dir inputs_outputs: - name: important path: "/some/dir/one" written_by_runs: [0] read_by_runs: [1] packages: - name: pkg1 version: "1.0" size: 10000 packfiles: true files: - "/usr/share/1_one.py" - "/usr/share/1_two.py" - "/usr/bin/wc" - name: pkg2 version: "1.0" size: 10000 packfiles: true files: - "/usr/lib/2_one.so" - "/etc/2_two.cfg" meta: {"section": "libs"} - name: python version: "2.7" size: 5000000 packfiles: true files: - "/usr/bin/python" meta: {"section": "python"} - name: unused version: "0.1" size: 100 packfiles: true files: - "/an/unused/file" other_files: - "/bin/sh" - "/usr/share/1_one.pyc" - "/some/dir/drive.py" - "/some/dir/experiment" - "/some/dir/report" """)
def setUp(self): self.tmpdir = Path.tempdir()