def mk_scheduler( self, tmp_path: Path, rules, include_trace_on_error: bool = True, max_workunit_verbosity: LogLevel = LogLevel.DEBUG, ) -> SchedulerSession: """Creates a SchedulerSession for a Scheduler with the given Rules installed.""" build_root = tmp_path / "build_root" build_root.mkdir(parents=True, exist_ok=True) local_execution_root_dir = os.path.realpath(safe_mkdtemp()) named_caches_dir = os.path.realpath(safe_mkdtemp()) scheduler = Scheduler( ignore_patterns=[], use_gitignore=False, build_root=build_root.as_posix(), local_execution_root_dir=local_execution_root_dir, named_caches_dir=named_caches_dir, ca_certs_path=None, rules=rules, union_membership=UnionMembership({}), executor=self._executor, execution_options=DEFAULT_EXECUTION_OPTIONS, local_store_options=DEFAULT_LOCAL_STORE_OPTIONS, include_trace_on_error=include_trace_on_error, ) return scheduler.new_session( build_id="buildid_for_test", max_workunit_level=max_workunit_verbosity, )
def setUp(self): self.origin = safe_mkdtemp() with pushd(self.origin): subprocess.check_call(['git', 'init', '--bare']) self.gitdir = safe_mkdtemp() self.worktree = safe_mkdtemp() self.readme_file = os.path.join(self.worktree, 'README') with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): self.init_repo('depot', self.origin) touch(self.readme_file) subprocess.check_call(['git', 'add', 'README']) subprocess.check_call(['git', 'commit', '-am', 'initial commit with decode -> \x81b']) subprocess.check_call(['git', 'tag', 'first']) subprocess.check_call(['git', 'push', '--tags', 'depot', 'master']) subprocess.check_call(['git', 'branch', '--set-upstream', 'master', 'depot/master']) with safe_open(self.readme_file, 'w') as readme: readme.write('Hello World.') subprocess.check_call(['git', 'commit', '-am', 'Update README.']) self.clone2 = safe_mkdtemp() with pushd(self.clone2): self.init_repo('origin', self.origin) subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) with safe_open(os.path.realpath('README'), 'a') as readme: readme.write('--') subprocess.check_call(['git', 'commit', '-am', 'Update README 2.']) subprocess.check_call(['git', 'push', '--tags', 'origin', 'master']) self.git = Git(gitdir=self.gitdir, worktree=self.worktree)
def test_mkdtemp_setup_teardown( self, tempfile_mkdtemp, dirutil_safe_rmtree, os_getpid, atexit_register ): def faux_cleaner(): pass DIR1, DIR2 = "fake_dir1__does_not_exist", "fake_dir2__does_not_exist" # Make sure other "pids" are not cleaned. dirutil._MKDTEMP_DIRS["fluffypants"].add("yoyo") tempfile_mkdtemp.side_effect = (DIR1, DIR2) os_getpid.return_value = "unicorn" try: assert DIR1 == dirutil.safe_mkdtemp(dir="1", cleaner=faux_cleaner) assert DIR2 == dirutil.safe_mkdtemp(dir="2", cleaner=faux_cleaner) assert "unicorn" in dirutil._MKDTEMP_DIRS assert {DIR1, DIR2} == dirutil._MKDTEMP_DIRS["unicorn"] dirutil._mkdtemp_atexit_cleaner() assert "unicorn" not in dirutil._MKDTEMP_DIRS assert {"yoyo"} == dirutil._MKDTEMP_DIRS["fluffypants"] finally: dirutil._MKDTEMP_DIRS.pop("unicorn", None) dirutil._MKDTEMP_DIRS.pop("fluffypants", None) dirutil._mkdtemp_unregister_cleaner() atexit_register.assert_called_once_with(faux_cleaner) assert os_getpid.called assert [ unittest.mock.call(dir="1"), unittest.mock.call(dir="2"), ] == tempfile_mkdtemp.mock_calls assert sorted([unittest.mock.call(DIR1), unittest.mock.call(DIR2)]) == sorted( dirutil_safe_rmtree.mock_calls )
def test_mkdtemp_setup_teardown(self): def faux_cleaner(): pass DIR1, DIR2 = "fake_dir1__does_not_exist", "fake_dir2__does_not_exist" self._mox.StubOutWithMock(atexit, "register") self._mox.StubOutWithMock(os, "getpid") self._mox.StubOutWithMock(tempfile, "mkdtemp") self._mox.StubOutWithMock(dirutil, "safe_rmtree") atexit.register(faux_cleaner) # Ensure only called once. tempfile.mkdtemp(dir="1").AndReturn(DIR1) tempfile.mkdtemp(dir="2").AndReturn(DIR2) os.getpid().MultipleTimes().AndReturn("unicorn") dirutil.safe_rmtree(DIR1) dirutil.safe_rmtree(DIR2) # Make sure other "pids" are not cleaned. dirutil._MKDTEMP_DIRS["fluffypants"].add("yoyo") try: self._mox.ReplayAll() self.assertEquals(DIR1, dirutil.safe_mkdtemp(dir="1", cleaner=faux_cleaner)) self.assertEquals(DIR2, dirutil.safe_mkdtemp(dir="2", cleaner=faux_cleaner)) self.assertIn("unicorn", dirutil._MKDTEMP_DIRS) self.assertEquals({DIR1, DIR2}, dirutil._MKDTEMP_DIRS["unicorn"]) dirutil._mkdtemp_atexit_cleaner() self.assertNotIn("unicorn", dirutil._MKDTEMP_DIRS) self.assertEquals({"yoyo"}, dirutil._MKDTEMP_DIRS["fluffypants"]) finally: dirutil._MKDTEMP_DIRS.pop("unicorn", None) dirutil._MKDTEMP_DIRS.pop("fluffypants", None) dirutil._mkdtemp_unregister_cleaner() self._mox.VerifyAll()
def mk_scheduler( self, rules, include_trace_on_error: bool = True, ) -> SchedulerSession: """Creates a SchedulerSession for a Scheduler with the given Rules installed.""" work_dir = self._create_work_dir() build_root = os.path.join(work_dir, "build_root") os.makedirs(build_root) local_store_dir = os.path.realpath(safe_mkdtemp()) local_execution_root_dir = os.path.realpath(safe_mkdtemp()) named_caches_dir = os.path.realpath(safe_mkdtemp()) scheduler = Scheduler( native=self._native, ignore_patterns=[], use_gitignore=False, build_root=build_root, local_store_dir=local_store_dir, local_execution_root_dir=local_execution_root_dir, named_caches_dir=named_caches_dir, ca_certs_path=None, rules=rules, union_membership=UnionMembership({}), executor=self._executor, execution_options=DEFAULT_EXECUTION_OPTIONS, include_trace_on_error=include_trace_on_error, ) return scheduler.new_session(build_id="buildid_for_test", )
def test_mkdtemp_setup_teardown( self, tempfile_mkdtemp, dirutil_safe_rmtree, os_getpid, atexit_register ): def faux_cleaner(): pass DIR1, DIR2 = "fake_dir1__does_not_exist", "fake_dir2__does_not_exist" # Make sure other "pids" are not cleaned. dirutil._MKDTEMP_DIRS["fluffypants"].add("yoyo") tempfile_mkdtemp.side_effect = (DIR1, DIR2) os_getpid.return_value = "unicorn" try: self.assertEqual(DIR1, dirutil.safe_mkdtemp(dir="1", cleaner=faux_cleaner)) self.assertEqual(DIR2, dirutil.safe_mkdtemp(dir="2", cleaner=faux_cleaner)) self.assertIn("unicorn", dirutil._MKDTEMP_DIRS) self.assertEqual({DIR1, DIR2}, dirutil._MKDTEMP_DIRS["unicorn"]) dirutil._mkdtemp_atexit_cleaner() self.assertNotIn("unicorn", dirutil._MKDTEMP_DIRS) self.assertEqual({"yoyo"}, dirutil._MKDTEMP_DIRS["fluffypants"]) finally: dirutil._MKDTEMP_DIRS.pop("unicorn", None) dirutil._MKDTEMP_DIRS.pop("fluffypants", None) dirutil._mkdtemp_unregister_cleaner() atexit_register.assert_called_once_with(faux_cleaner) self.assertTrue(os_getpid.called) self.assertEqual( [unittest.mock.call(dir="1"), unittest.mock.call(dir="2")], tempfile_mkdtemp.mock_calls ) self.assertEqual( sorted([unittest.mock.call(DIR1), unittest.mock.call(DIR2)]), sorted(dirutil_safe_rmtree.mock_calls), )
def test_mkdtemp_setup_teardown(self, tempfile_mkdtemp, dirutil_safe_rmtree, os_getpid, atexit_register): def faux_cleaner(): pass DIR1, DIR2 = 'fake_dir1__does_not_exist', 'fake_dir2__does_not_exist' # Make sure other "pids" are not cleaned. dirutil._MKDTEMP_DIRS['fluffypants'].add('yoyo') tempfile_mkdtemp.side_effect = (DIR1, DIR2) os_getpid.return_value = 'unicorn' try: self.assertEquals(DIR1, dirutil.safe_mkdtemp(dir='1', cleaner=faux_cleaner)) self.assertEquals(DIR2, dirutil.safe_mkdtemp(dir='2', cleaner=faux_cleaner)) self.assertIn('unicorn', dirutil._MKDTEMP_DIRS) self.assertEquals({DIR1, DIR2}, dirutil._MKDTEMP_DIRS['unicorn']) dirutil._mkdtemp_atexit_cleaner() self.assertNotIn('unicorn', dirutil._MKDTEMP_DIRS) self.assertEquals({'yoyo'}, dirutil._MKDTEMP_DIRS['fluffypants']) finally: dirutil._MKDTEMP_DIRS.pop('unicorn', None) dirutil._MKDTEMP_DIRS.pop('fluffypants', None) dirutil._mkdtemp_unregister_cleaner() atexit_register.assert_called_once_with(faux_cleaner) self.assertTrue(os_getpid.called) self.assertEqual([mock.call(dir='1'), mock.call(dir='2')], tempfile_mkdtemp.mock_calls) self.assertEqual([mock.call(DIR1), mock.call(DIR2)], dirutil_safe_rmtree.mock_calls)
def test_mkdtemp_setup_teardown(self, tempfile_mkdtemp, dirutil_safe_rmtree, os_getpid, atexit_register): def faux_cleaner(): pass DIR1, DIR2 = 'fake_dir1__does_not_exist', 'fake_dir2__does_not_exist' # Make sure other "pids" are not cleaned. dirutil._MKDTEMP_DIRS['fluffypants'].add('yoyo') tempfile_mkdtemp.side_effect = (DIR1, DIR2) os_getpid.return_value = 'unicorn' try: self.assertEquals( DIR1, dirutil.safe_mkdtemp(dir='1', cleaner=faux_cleaner)) self.assertEquals( DIR2, dirutil.safe_mkdtemp(dir='2', cleaner=faux_cleaner)) self.assertIn('unicorn', dirutil._MKDTEMP_DIRS) self.assertEquals({DIR1, DIR2}, dirutil._MKDTEMP_DIRS['unicorn']) dirutil._mkdtemp_atexit_cleaner() self.assertNotIn('unicorn', dirutil._MKDTEMP_DIRS) self.assertEquals({'yoyo'}, dirutil._MKDTEMP_DIRS['fluffypants']) finally: dirutil._MKDTEMP_DIRS.pop('unicorn', None) dirutil._MKDTEMP_DIRS.pop('fluffypants', None) dirutil._mkdtemp_unregister_cleaner() atexit_register.assert_called_once_with(faux_cleaner) self.assertTrue(os_getpid.called) self.assertEqual( [mock.call(dir='1'), mock.call(dir='2')], tempfile_mkdtemp.mock_calls) self.assertEqual([mock.call(DIR1), mock.call(DIR2)], dirutil_safe_rmtree.mock_calls)
def test_mkdtemp_setup_teardown(self): def faux_cleaner(): pass DIR1, DIR2 = 'fake_dir1__does_not_exist', 'fake_dir2__does_not_exist' self._mox.StubOutWithMock(atexit, 'register') self._mox.StubOutWithMock(os, 'getpid') self._mox.StubOutWithMock(tempfile, 'mkdtemp') self._mox.StubOutWithMock(dirutil, 'safe_rmtree') atexit.register(faux_cleaner) # Ensure only called once. tempfile.mkdtemp(dir='1').AndReturn(DIR1) tempfile.mkdtemp(dir='2').AndReturn(DIR2) os.getpid().MultipleTimes().AndReturn('unicorn') dirutil.safe_rmtree(DIR1) dirutil.safe_rmtree(DIR2) # Make sure other "pids" are not cleaned. dirutil._MKDTEMP_DIRS['fluffypants'].add('yoyo') try: self._mox.ReplayAll() self.assertEquals(DIR1, dirutil.safe_mkdtemp(dir='1', cleaner=faux_cleaner)) self.assertEquals(DIR2, dirutil.safe_mkdtemp(dir='2', cleaner=faux_cleaner)) self.assertIn('unicorn', dirutil._MKDTEMP_DIRS) self.assertEquals({DIR1, DIR2}, dirutil._MKDTEMP_DIRS['unicorn']) dirutil._mkdtemp_atexit_cleaner() self.assertNotIn('unicorn', dirutil._MKDTEMP_DIRS) self.assertEquals({'yoyo'}, dirutil._MKDTEMP_DIRS['fluffypants']) finally: dirutil._MKDTEMP_DIRS.pop('unicorn', None) dirutil._MKDTEMP_DIRS.pop('fluffypants', None) dirutil._mkdtemp_unregister_cleaner() self._mox.VerifyAll()
def test_mkdtemp_setup_teardown(self): def faux_cleaner(): pass DIR1, DIR2 = 'fake_dir1__does_not_exist', 'fake_dir2__does_not_exist' self._mox.StubOutWithMock(atexit, 'register') self._mox.StubOutWithMock(os, 'getpid') self._mox.StubOutWithMock(tempfile, 'mkdtemp') self._mox.StubOutWithMock(dirutil, 'safe_rmtree') atexit.register(faux_cleaner) # Ensure only called once. tempfile.mkdtemp(dir='1').AndReturn(DIR1) tempfile.mkdtemp(dir='2').AndReturn(DIR2) os.getpid().MultipleTimes().AndReturn('unicorn') dirutil.safe_rmtree(DIR1) dirutil.safe_rmtree(DIR2) # Make sure other "pids" are not cleaned. dirutil._MKDTEMP_DIRS['fluffypants'].add('yoyo') try: self._mox.ReplayAll() self.assertEquals( DIR1, dirutil.safe_mkdtemp(dir='1', cleaner=faux_cleaner)) self.assertEquals( DIR2, dirutil.safe_mkdtemp(dir='2', cleaner=faux_cleaner)) self.assertIn('unicorn', dirutil._MKDTEMP_DIRS) self.assertEquals({DIR1, DIR2}, dirutil._MKDTEMP_DIRS['unicorn']) dirutil._mkdtemp_atexit_cleaner() self.assertNotIn('unicorn', dirutil._MKDTEMP_DIRS) self.assertEquals({'yoyo'}, dirutil._MKDTEMP_DIRS['fluffypants']) finally: dirutil._MKDTEMP_DIRS.pop('unicorn', None) dirutil._MKDTEMP_DIRS.pop('fluffypants', None) dirutil._mkdtemp_unregister_cleaner() self._mox.VerifyAll()
def setUp(self): self.origin = safe_mkdtemp() with pushd(self.origin): subprocess.check_call(['git', 'init', '--bare']) self.gitdir = safe_mkdtemp() self.worktree = safe_mkdtemp() self.readme_file = os.path.join(self.worktree, 'README') with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): self.init_repo('depot', self.origin) touch(self.readme_file) subprocess.check_call(['git', 'add', 'README']) safe_mkdir(os.path.join(self.worktree, 'dir')) with open(os.path.join(self.worktree, 'dir', 'f'), 'w') as f: f.write("file in subdir") # Make some symlinks os.symlink('f', os.path.join(self.worktree, 'dir', 'relative-symlink')) os.symlink('no-such-file', os.path.join(self.worktree, 'dir', 'relative-nonexistent')) os.symlink('dir/f', os.path.join(self.worktree, 'dir', 'not-absolute\u2764')) os.symlink('../README', os.path.join(self.worktree, 'dir', 'relative-dotdot')) os.symlink('dir', os.path.join(self.worktree, 'link-to-dir')) os.symlink('README/f', os.path.join(self.worktree, 'not-a-dir')) os.symlink('loop1', os.path.join(self.worktree, 'loop2')) os.symlink('loop2', os.path.join(self.worktree, 'loop1')) subprocess.check_call(['git', 'add', 'README', 'dir', 'loop1', 'loop2', 'link-to-dir', 'not-a-dir']) subprocess.check_call(['git', 'commit', '-am', 'initial commit with decode -> \x81b']) self.initial_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip() subprocess.check_call(['git', 'tag', 'first']) subprocess.check_call(['git', 'push', '--tags', 'depot', 'master']) subprocess.check_call(['git', 'branch', '--set-upstream-to', 'depot/master']) with safe_open(self.readme_file, 'w') as readme: readme.write('Hello World.\u2764'.encode('utf-8')) subprocess.check_call(['git', 'commit', '-am', 'Update README.']) self.current_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip() self.clone2 = safe_mkdtemp() with pushd(self.clone2): self.init_repo('origin', self.origin) subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) with safe_open(os.path.realpath('README'), 'a') as readme: readme.write('--') subprocess.check_call(['git', 'commit', '-am', 'Update README 2.']) subprocess.check_call(['git', 'push', '--tags', 'origin', 'master']) self.git = Git(gitdir=self.gitdir, worktree=self.worktree)
def launch_repl(self, targets): temp_dir = safe_mkdtemp() node_paths = self.context.products.get_data(NodePaths) package_json_path = os.path.join(temp_dir, 'package.json') package = { 'name': self.SYNTHETIC_NODE_TARGET_NAME, 'version': '0.0.0', 'dependencies': { dep.package_name: self.render_npm_package_dependency(node_paths, dep) for dep in targets } } with open(package_json_path, 'wb') as fp: json.dump(package, fp, indent=2) args = self.get_passthru_args() node_repl = self.node_distribution.node_command(args=args) with pushd(temp_dir): result, npm_install = self.execute_npm(args=['install'], workunit_name=self.SYNTHETIC_NODE_TARGET_NAME) if result != 0: raise TaskError('npm install of synthetic REPL module failed:\n' '\t{} failed with exit code {}'.format(npm_install, result)) repl_session = node_repl.run() repl_session.wait()
def test_bundled_classpath(self): """This creates the following classpath basedir/libs/A.jar:basedir/resources """ RESOURCES = 'resources' LIB_DIR = 'libs' JAR_FILE = 'A.jar' basedir = safe_mkdtemp() lib_dir = os.path.join(basedir, LIB_DIR) resource_dir = os.path.join(lib_dir, RESOURCES) jar_file = os.path.join(lib_dir, JAR_FILE) for dir in (lib_dir, resource_dir): safe_mkdir(dir) touch(jar_file) classpath = [jar_file, resource_dir] with bundled_classpath(classpath) as bundled_cp: self.assertEquals(1, len(bundled_cp)) bundled_jar = bundled_cp[0] self.assertTrue(os.path.exists(bundled_jar)) with open_zip(bundled_jar) as synthetic_jar: self.assertListEqual([Manifest.PATH], synthetic_jar.namelist()) # manifest should contain the absolute path of both jar and resource directory self.assertEquals('{}: {} {}/\n'.format(Manifest.CLASS_PATH, os.path.realpath(jar_file), os.path.realpath(resource_dir)), synthetic_jar.read(Manifest.PATH).replace('\n ', '')) safe_rmtree(resource_dir)
def _create_config(self, config): with open(os.path.join(safe_mkdtemp(), 'test_config.ini'), 'w') as fp: for section, options in config.items(): fp.write('[{}]\n'.format(section)) for key, value in options.items(): fp.write('{}: {}\n'.format(key, value)) return Config.load(configpaths=[fp.name])
def mk_scheduler( self, rules=None, union_rules=None, project_tree=None, work_dir=None, include_trace_on_error=True, should_report_workunits=False, execution_options=None, ): """Creates a SchedulerSession for a Scheduler with the given Rules installed.""" rules = rules or [] work_dir = work_dir or self._create_work_dir() project_tree = project_tree or self.mk_fs_tree(work_dir=work_dir) local_store_dir = os.path.realpath(safe_mkdtemp()) if execution_options is not None: eo = asdict(DEFAULT_EXECUTION_OPTIONS) eo.update(execution_options) execution_options = ExecutionOptions(**eo) scheduler = Scheduler( native=self._native, ignore_patterns=project_tree.ignore_patterns, use_gitignore=False, build_root=project_tree.build_root, local_store_dir=local_store_dir, rules=rules, union_rules=union_rules, execution_options=execution_options or DEFAULT_EXECUTION_OPTIONS, include_trace_on_error=include_trace_on_error, ) return scheduler.new_session( zipkin_trace_v2=False, build_id="buildid_for_test", should_report_workunits=should_report_workunits, )
def fake_interpreter(python_tag: str, abi_tag: str, version: Tuple[int, int, int]): interpreter_dir = safe_mkdtemp() binary = os.path.join(interpreter_dir, "python") values = dict( binary=binary, python_tag=python_tag, abi_tag=abi_tag, platform_tag="", version=version, supported_tags=[], env_markers={}, ) id_str = json.dumps(values) with open(binary, "w") as fp: fp.write( dedent( f""" #!{PythonInterpreter.get().binary} from __future__ import print_function print({id_str!r}) """ ).strip() ) chmod_plus_x(binary) return PythonInterpreter.from_binary(binary)
def launch_repl(self, targets): temp_dir = safe_mkdtemp() node_paths = self.context.products.get_data(NodePaths) package_json_path = os.path.join(temp_dir, 'package.json') package = { 'name': self.SYNTHETIC_NODE_TARGET_NAME, 'version': '0.0.0', 'dependencies': { dep.package_name: self.render_npm_package_dependency(node_paths, dep) for dep in targets } } with open(package_json_path, 'wb') as fp: json.dump(package, fp, indent=2) args = self.get_passthru_args() node_repl = self.node_distribution.node_command(args=args) with pushd(temp_dir): result, npm_install = self.execute_npm( args=['install'], workunit_name=self.SYNTHETIC_NODE_TARGET_NAME) if result != 0: raise TaskError( 'npm install of synthetic REPL module failed:\n' '\t{} failed with exit code {}'.format( npm_install, result)) repl_session = node_repl.run() repl_session.wait()
def safe_classpath(classpath, synthetic_jar_dir, custom_name=None): """Bundles classpath into one synthetic jar that includes original classpath in its manifest. This is to ensure classpath length never exceeds platform ARG_MAX. :param list classpath: Classpath to be bundled. :param string synthetic_jar_dir: directory to store the synthetic jar, if `None` a temp directory will be provided and cleaned up upon process exit. Otherwise synthetic jar will remain in the supplied directory, only for debugging purpose. :param custom_name: filename of the synthetic jar to be created. :returns: A classpath (singleton list with just the synthetic jar). :rtype: list of strings """ if synthetic_jar_dir: safe_mkdir(synthetic_jar_dir) else: synthetic_jar_dir = safe_mkdtemp() # Quote the paths so that if they contain reserved characters can be safely passed to JVM classloader. bundled_classpath = map(urllib.quote, relativize_classpath(classpath, synthetic_jar_dir)) manifest = Manifest() manifest.addentry(Manifest.CLASS_PATH, ' '.join(bundled_classpath)) with temporary_file(root_dir=synthetic_jar_dir, cleanup=False, suffix='.jar') as jar_file: with open_zip(jar_file, mode='w', compression=ZIP_STORED) as jar: jar.writestr(Manifest.PATH, manifest.contents()) if custom_name: custom_path = os.path.join(synthetic_jar_dir, custom_name) safe_concurrent_rename(jar_file.name, custom_path) return [custom_path] else: return [jar_file.name]
def safe_classpath(classpath, synthetic_jar_dir, custom_name=None): """Bundles classpath into one synthetic jar that includes original classpath in its manifest. This is to ensure classpath length never exceeds platform ARG_MAX. :param list classpath: Classpath to be bundled. :param string synthetic_jar_dir: directory to store the synthetic jar, if `None` a temp directory will be provided and cleaned up upon process exit. Otherwise synthetic jar will remain in the supplied directory, only for debugging purpose. :param custom_name: filename of the synthetic jar to be created. :returns: A classpath (singleton list with just the synthetic jar). :rtype: list of strings """ if synthetic_jar_dir: safe_mkdir(synthetic_jar_dir) else: synthetic_jar_dir = safe_mkdtemp() # Quote the paths so that if they contain reserved characters can be safely passed to JVM classloader. bundled_classpath = [parse.quote(cp) for cp in relativize_classpath(classpath, synthetic_jar_dir)] manifest = Manifest() manifest.addentry(Manifest.CLASS_PATH, ' '.join(bundled_classpath)) with temporary_file(root_dir=synthetic_jar_dir, cleanup=False, suffix='.jar') as jar_file: with open_zip(jar_file, mode='w', compression=ZIP_STORED) as jar: jar.writestr(Manifest.PATH, manifest.contents()) if custom_name: custom_path = os.path.join(synthetic_jar_dir, custom_name) safe_concurrent_rename(jar_file.name, custom_path) return [custom_path] else: return [jar_file.name]
def safe_classpath(classpath, synthetic_jar_dir): """Bundles classpath into one synthetic jar that includes original classpath in its manifest. This is to ensure classpath length never exceeds platform ARG_MAX. :param list classpath: Classpath to be bundled. :param string synthetic_jar_dir: directory to store the synthetic jar, if `None` a temp directory will be provided and cleaned up upon process exit. Otherwise synthetic jar will remain in the supplied directory, only for debugging purpose. :returns: A classpath (singleton list with just the synthetic jar). :rtype: list of strings """ if synthetic_jar_dir: safe_mkdir(synthetic_jar_dir) else: synthetic_jar_dir = safe_mkdtemp() bundled_classpath = relativize_classpath(classpath, synthetic_jar_dir) manifest = Manifest() manifest.addentry(Manifest.CLASS_PATH, ' '.join(bundled_classpath)) with temporary_file(root_dir=synthetic_jar_dir, cleanup=False, suffix='.jar') as jar_file: with open_zip(jar_file, mode='w', compression=ZIP_STORED) as jar: jar.writestr(Manifest.PATH, manifest.contents()) return [jar_file.name]
def execute_antlr_test(self, expected_package, target_workdir_fun=None): target = self.get_antlr_target() context = self.create_context() task = self.prepare_execute(context) target_workdir_fun = target_workdir_fun or (lambda x: safe_mkdtemp(dir=x)) target_workdir = target_workdir_fun(task.workdir) # Generate code, then create a synthetic target. task.execute_codegen(target, target_workdir) syn_target = task._inject_synthetic_target(target, target_workdir) actual_sources = [s for s in Fileset.rglobs('*.java', root=target_workdir)] expected_sources = syn_target.sources_relative_to_source_root() self.assertEquals(set(expected_sources), set(actual_sources)) # and that the synthetic target has a valid source root and the generated sources have the # expected java package def get_package(path): with open(path) as fp: for line in fp: match = self.PACKAGE_RE.match(line) if match: return match.group('package_name') return None for source in syn_target.sources_relative_to_source_root(): source_path = os.path.join(target_workdir, source) self.assertTrue(os.path.isfile(source_path), "{0} is not the source root for {1}".format(target_workdir, source)) self.assertEqual(expected_package, get_package(source_path)) self.assertIn(syn_target, context.targets()) return syn_target
def _generate_antlr_requirement(self, library): antlr_builder = functools.partial( PythonAntlrBuilder, ivy_bootstrapper=self._ivy_bootstrapper, workdir=safe_mkdtemp(dir=self.path(), prefix="antlr."), ) return self._generate_requirement(library, antlr_builder)
def _do_test_duplication(self, targets, allow_dups, should_fail): task = self._create_dummy_task(target_roots=targets, strategy="isolated", allow_dups=allow_dups) target_workdirs = {t: safe_mkdtemp(dir=task.workdir) for t in targets} syn_targets = [] # Generate and inject code for each target. def execute(): for target in targets: target_workdir = target_workdirs[target] task.execute_codegen(target, target_workdir) task._handle_duplicate_sources(target, target_workdir) syn_targets.append(task._inject_synthetic_target(target, target_workdir)) if should_fail: # If we're expected to fail, validate the resulting message. with self.assertRaises(SimpleCodegenTask.DuplicateSourceError) as cm: execute() should_contain = ["org/pantsbuild/example/ParentClass"] should_not_contain = ["org/pantsbuild/example/ChildClass"] message = str(cm.exception) for item in should_contain: self.assertTrue(item in message, 'Error message should contain "{}".'.format(item)) for item in should_not_contain: self.assertFalse(item in message, 'Error message should not contain "{}".'.format(item)) else: # Execute successfully. execute() return tuple(syn_targets)
def _do_test_duplication(self, targets, allow_dups, should_fail): task = self._create_dummy_task(target_roots=targets, allow_dups=allow_dups) target_workdirs = {t: safe_mkdtemp(dir=task.workdir) for t in targets} syn_targets = [] # Generate and inject code for each target. def execute(): for target in targets: target_workdir = target_workdirs[target] task.execute_codegen(target, target_workdir) task._handle_duplicate_sources(target, target_workdir) fingerprint = CacheKey("test", target.invalidation_hash()) syn_targets.append(task._inject_synthetic_target(target, target_workdir, fingerprint)) if should_fail: # If we're expected to fail, validate the resulting message. with self.assertRaises(SimpleCodegenTask.DuplicateSourceError) as cm: execute() should_contain = ['org/pantsbuild/example/ParentClass'] should_not_contain = ['org/pantsbuild/example/ChildClass'] message = str(cm.exception) for item in should_contain: self.assertTrue(item in message, 'Error message should contain "{}".'.format(item)) for item in should_not_contain: self.assertFalse(item in message, 'Error message should not contain "{}".'.format(item)) else: # Execute successfully. execute() return tuple(syn_targets)
def setUp(self): self.work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, self.work_dir) self.build_root = os.path.join(self.work_dir, 'build_root') shutil.copytree(os.path.join(os.path.dirname(__file__), 'examples/mapper_test'), self.build_root) subjects = Subjects() self._goal = 'list' symbol_table_cls = TargetTable project_tree_key = subjects.put( FileSystemProjectTree(self.build_root)) address_mapper_key = subjects.put( AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=JsonParser, build_pattern=r'.+\.BUILD.json$')) tasks = ( create_fs_tasks(project_tree_key) + create_graph_tasks(address_mapper_key, symbol_table_cls) ) self.scheduler = LocalScheduler({self._goal: UnhydratedStruct}, tasks, subjects, symbol_table_cls) self.a_b = Address.parse('a/b') self.a_b_target = Target(name='b', dependencies=['//d:e'], configurations=['//a', Struct(embedded='yes')], type_alias='target')
def _generate_thrift_requirement(self, library): thrift_builder = functools.partial( PythonThriftBuilder, thrift_binary_factory=self._thrift_binary_factory, workdir=safe_mkdtemp(dir=self.path(), prefix="thrift."), ) return self._generate_requirement(library, thrift_builder)
def execute_antlr_test(self, expected_package, target_workdir_fun=None): target = self.get_antlr_target() context = self.create_context() task = self.prepare_execute(context) target_workdir_fun = target_workdir_fun or ( lambda x: safe_mkdtemp(dir=x)) # Do not use task.workdir here, because when we calculating hash for synthetic target # we need persistent source paths in terms of relative position to build root. target_workdir = target_workdir_fun(self.build_root) # Generate code, then create a synthetic target. task.execute_codegen(target, target_workdir) syn_target = task._inject_synthetic_target(target, target_workdir) actual_sources = [ s for s in Fileset.rglobs('*.java', root=target_workdir) ] expected_sources = syn_target.sources_relative_to_source_root() self.assertEquals(set(expected_sources), set(actual_sources)) # and that the synthetic target has a valid source root and the generated sources have the # expected java package def get_package(path): with open(path) as fp: for line in fp: match = self.PACKAGE_RE.match(line) if match: return match.group('package_name') return None for source in syn_target.sources_relative_to_source_root(): source_path = os.path.join(target_workdir, source) self.assertTrue( os.path.isfile(source_path), "{0} is not the source root for {1}".format( target_workdir, source)) self.assertEqual(expected_package, get_package(source_path)) self.assertIn(syn_target, context.targets()) # Check that the output file locations match the package if expected_package is not None: expected_path_prefix = expected_package.replace( '.', os.path.sep) + os.path.sep for source in syn_target.sources_relative_to_source_root(): self.assertTrue( source.startswith(expected_path_prefix), "{0} does not start with {1}".format( source, expected_path_prefix)) # Check that empty directories have been removed for root, dirs, files in os.walk(target_workdir): for d in dirs: full_dir = os.path.join(root, d) self.assertTrue( os.listdir(full_dir), "Empty directories should have been removed ({0})".format( full_dir)) return syn_target
def test_custom_manifest_dir(self): with self._test_custom_manifest() as (jar, manifest_contents): basedir = safe_mkdtemp() with safe_open(os.path.join(basedir, "META-INF/MANIFEST.MF"), "wb") as fp: fp.write(manifest_contents) jar.write(basedir)
def mk_scheduler( self, rules=None, union_rules=None, project_tree=None, work_dir=None, include_trace_on_error=True, should_report_workunits=False, ): """Creates a SchedulerSession for a Scheduler with the given Rules installed.""" rules = rules or [] work_dir = work_dir or self._create_work_dir() project_tree = project_tree or self.mk_fs_tree(work_dir=work_dir) local_store_dir = os.path.realpath(safe_mkdtemp()) scheduler = Scheduler(self._native, project_tree, local_store_dir, rules, union_rules, DEFAULT_EXECUTION_OPTIONS, include_trace_on_error=include_trace_on_error) return scheduler.new_session( zipkin_trace_v2=False, build_id="buildid_for_test", should_report_workunits=should_report_workunits)
def binary(self): """Load and return the path to the native engine binary.""" lib_name = '{}.so'.format(NATIVE_ENGINE_MODULE) lib_path = os.path.join(safe_mkdtemp(), lib_name) with closing(pkg_resources.resource_stream(__name__, lib_name)) as input_fp: with open(lib_path, 'wb') as output_fp: output_fp.write(input_fp.read()) return lib_path
def run_execute(self, target, recursive=False): self.set_options(recursive=recursive, interpreter=[]) context = self.context(target_roots=[target]) workdir = safe_mkdtemp(dir=self.build_root) setup_py = self.create_task(context, workdir) setup_py.run_one = MagicMock() setup_py.run_one.return_value = True setup_py.execute() yield setup_py
def create_python_file(self, file_content): if self.file_required: tmpdir = safe_mkdtemp() with open(os.path.join(tmpdir, 'file.py'), 'wb') as fp: fp.write(file_content) fp.close() return PythonFile.parse(fp.name) else: return PythonFile.from_statement(file_content)
def bdist(self, source_dir: Path) -> Path: """Generates a wheel from the setup.py project at `source_dir` and returns the wheel path.""" dist_dir = safe_mkdtemp() return self._collect_distribution( source_dir=source_dir, setup_command=["bdist_wheel", "--dist-dir", dist_dir], dist_dir=Path(dist_dir), )
def create_python_file(self, file_content): if self.file_required: tmpdir = safe_mkdtemp() with open(os.path.join(tmpdir, "file.py"), "w") as fp: fp.write(file_content) fp.close() return PythonFile.parse("file.py", root=tmpdir) else: return PythonFile.from_statement(file_content)
def __init__(self, workdir, target, root_dir, target_suffix=None): self.target = target self.suffix = target_suffix or "" self.root = root_dir self.chroot = RelativeChroot(root_dir, os.path.join(workdir, "codegen"), target.name) codegen_root = safe_mkdtemp(dir=self.chroot.path(), prefix="codegen.") self.codegen_root = os.path.relpath(codegen_root, self.chroot.path()) self.created_packages = set() self.created_namespace_packages = set()
def __init__(self) -> None: """Initialize the context with an empty client params. This is the "connection uninitialized" state. """ self._lock = threading.Lock() self._client_params: InitializeBuildParams | None = None self._notify_client: Callable[[BSPNotification], None] | None = None self.tempdir: Path = Path(safe_mkdtemp(prefix="bsp"))
def __init__(self, path=None): """Initialize the database. :param path: database directory location, if `None` a temporary location will be provided and cleaned up upon process exit. """ self._path = path or safe_mkdtemp() self._env = lmdb.open(self._path, map_size=self.MAX_DATABASE_SIZE, metasync=False, sync=False, map_async=True, writemap=self.USE_SPARSE_FILES)
def add_consolidated_bundle(self, context, tgt, files_dict): """Add a bundle to the classpath as if it has been consolidated already.""" consolidated_classpath = context.products.get_data( "consolidated_classpath", init_func=ClasspathProducts.init_func(self.pants_workdir)) # Create a temporary directory under the target id, then dump all files. target_dir = os.path.join(self.test_workdir, tgt.id) safe_mkdir(target_dir) entry_path = safe_mkdtemp(dir=target_dir) classpath_dir = safe_mkdtemp(dir=target_dir) for rel_path, content in files_dict.items(): safe_file_dump(os.path.join(entry_path, rel_path), content) # Create Jar to mimic consolidate classpath behavior. jarpath = os.path.join(classpath_dir, "output-0.jar") with self.task.open_jar(jarpath, overwrite=True, compressed=False) as jar: jar.write(entry_path) consolidated_classpath.add_for_target(tgt, [("default", jarpath)])
def add_to_runtime_classpath(self, context, tgt, files_dict): """Creates and adds the given files to the classpath for the given target under a temp path.""" compile_classpath = context.products.get_data('runtime_classpath', ClasspathProducts) # Create a temporary directory under the target id, then dump all files. target_dir = os.path.join(self.test_workdir, tgt.id) safe_mkdir(target_dir) classpath_dir = safe_mkdtemp(dir=target_dir) for rel_path, content in files_dict.items(): safe_file_dump(os.path.join(classpath_dir, rel_path), content) # Add to the classpath. compile_classpath.add_for_target(tgt, [('default', classpath_dir)])
def add_consolidated_bundle(self, context, tgt, files_dict): """Add a bundle to the classpath as if it has been consolidated already. """ consolidated_classpath = context.products.get_data( 'consolidated_classpath', init_func=ClasspathProducts.init_func(self.pants_workdir) ) # Create a temporary directory under the target id, then dump all files. target_dir = os.path.join(self.test_workdir, tgt.id) safe_mkdir(target_dir) entry_path = safe_mkdtemp(dir=target_dir) classpath_dir = safe_mkdtemp(dir=target_dir) for rel_path, content in files_dict.items(): safe_file_dump(os.path.join(entry_path, rel_path), content) # Create Jar to mimic consolidate classpath behavior. jarpath = os.path.join(classpath_dir, 'output-0.jar') with self.task.open_jar(jarpath, overwrite=True, compressed=False) as jar: jar.write(entry_path) consolidated_classpath.add_for_target(tgt, [('default', jarpath)])
def _extract_tar(self, tar_path, unpack_dir, filter_func=None): temp_unpack_dir = safe_mkdtemp() with self.context.new_workunit(name='tar-extract'): p = Popen(['tar', 'xzf', tar_path, '-C', temp_unpack_dir], stdout=PIPE, stderr=PIPE) out, err = p.communicate() if p.returncode != 0: raise self.TarExtractionError('Error unpacking tar file "{}" (code={}).\nStderr: {}' .format(tar_path, p.returncode, err)) with self.context.new_workunit(name='filtered-copy'): copied = self._filtered_copy(temp_unpack_dir, unpack_dir, filter_func=filter_func) self.context.log.info('Copied {} extracted files.'.format(copied))
def __init__(self, workdir, target, root_dir, target_suffix=None): self.target = target self.suffix = target_suffix or '' self.root = root_dir self.chroot = RelativeChroot(root_dir, os.path.join(workdir, 'codegen'), target.name) codegen_root = safe_mkdtemp(dir=self.chroot.path(), prefix='codegen.') self.codegen_root = os.path.relpath(codegen_root, self.chroot.path()) self.created_packages = set() self.created_namespace_packages = set()
def setUp(self): self.work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, self.work_dir) self.build_root = os.path.join(self.work_dir, 'build_root') shutil.copytree(os.path.join(os.path.dirname(__file__), 'examples/mapper_test'), self.build_root) parser = partial(parse_json, symbol_table={'configuration': Configuration, 'target': Target}) self.address_mapper = AddressMapper(build_root=self.build_root, build_pattern=r'.+\.BUILD.json$', parser=parser)
def setUp(self): self.basedir = safe_mkdtemp() self.file_list = ['a', 'b', 'c'] self.file_tar = os.path.join(self.basedir, 'test.tar') with TarFile.open(self.file_tar, mode='w') as tar: for f in self.file_list: full_path = os.path.join(self.basedir, f) touch(full_path) tar.add(full_path, f) safe_delete(full_path)
def fake_interpreter(id_str): interpreter_dir = safe_mkdtemp() binary = os.path.join(interpreter_dir, 'binary') with open(binary, 'w') as fp: fp.write(dedent(""" #!{} from __future__ import print_function print({!r}) """.format(PythonInterpreter.get().binary, id_str)).strip()) chmod_plus_x(binary) return PythonInterpreter.from_binary(binary)
def setUp(self): super(JvmdocGenTest, self).setUp() self.workdir = safe_mkdtemp() self.t1 = self.make_target('t1') context = self.context(target_roots=[self.t1]) self.targets = context.targets() self.populate_compile_classpath(context) self.task = self.create_task(context, self.workdir)
def execute_antlr_test(self, expected_package, target_workdir_fun=None): target = self.get_antlr_target() context = self.create_context() task = self.prepare_execute(context) target_workdir_fun = target_workdir_fun or (lambda x: safe_mkdtemp(dir=x)) # Do not use task.workdir here, because when we calculating hash for synthetic target # we need persistent source paths in terms of relative position to build root. target_workdir = target_workdir_fun(self.build_root) vt = DummyVersionedTarget(target, target_workdir) # Generate code, then create a synthetic target. task.execute_codegen(target, target_workdir) sources = task._capture_sources((vt,))[0] syn_target = task._inject_synthetic_target(vt, sources) actual_sources = [s for s in Fileset.rglobs('*.java', root=target_workdir)] expected_sources = syn_target.sources_relative_to_source_root() self.assertEqual(set(expected_sources), set(actual_sources)) # Check that the synthetic target has a valid source root and the generated sources have the # expected java package def get_package(path): with open(path, 'r') as fp: for line in fp: match = self.PACKAGE_RE.match(line) if match: return match.group('package_name') return None for source in syn_target.sources_relative_to_source_root(): source_path = os.path.join(target_workdir, source) self.assertTrue(os.path.isfile(source_path), "{0} is not the source root for {1}".format(target_workdir, source)) self.assertEqual(expected_package, get_package(source_path)) self.assertIn(syn_target, context.targets()) # Check that the output file locations match the package if expected_package is not None: expected_path_prefix = expected_package.replace('.', os.path.sep) + os.path.sep for source in syn_target.sources_relative_to_source_root(): self.assertTrue(source.startswith(expected_path_prefix), "{0} does not start with {1}".format(source, expected_path_prefix)) # Check that empty directories have been removed for root, dirs, files in os.walk(target_workdir): for d in dirs: full_dir = os.path.join(root, d) self.assertTrue(os.listdir(full_dir), "Empty directories should have been removed ({0})".format(full_dir)) return syn_target
def binary(self): """Load and return the path to the native engine binary.""" lib_name = '{}.so'.format(NATIVE_ENGINE_MODULE) lib_path = os.path.join(safe_mkdtemp(), lib_name) with closing(pkg_resources.resource_stream(__name__, lib_name)) as input_fp: # NB: The header stripping code here must be coordinated with header insertion code in # build-support/bin/native/bootstrap_code.sh engine_version = input_fp.readline().strip() repo_version = input_fp.readline().strip() logger.debug('using {} built at {}'.format(engine_version, repo_version)) with open(lib_path, 'wb') as output_fp: output_fp.write(input_fp.read()) return lib_path
def setUp(self): super(JvmTaskTest, self).setUp() self.workdir = safe_mkdtemp() self.t1 = self.make_target('t1') self.t2 = self.make_target('t2') self.t3 = self.make_target('t3') context = self.context(target_roots=[self.t1, self.t2, self.t3]) self.populate_compile_classpath(context) self.task = self.create_task(context, self.workdir)
def create_scheduler(rules, union_rules=None, validate=True, native=None): """Create a Scheduler.""" native = native or init_native() return Scheduler( native, FileSystemProjectTree(os.getcwd()), './.pants.d', safe_mkdtemp(), rules, union_rules, execution_options=DEFAULT_EXECUTION_OPTIONS, validate=validate, )