def setUp(self): self.origin = safe_mkdtemp() with pushd(self.origin): subprocess.check_call(['git', 'init', '--bare']) self.gitdir = safe_mkdtemp() self.worktree = safe_mkdtemp() self.readme_file = os.path.join(self.worktree, 'README') with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): self.init_repo('depot', self.origin) touch(self.readme_file) subprocess.check_call(['git', 'add', 'README']) subprocess.check_call(['git', 'commit', '-am', 'initial commit with decode -> \x81b']) subprocess.check_call(['git', 'tag', 'first']) subprocess.check_call(['git', 'push', '--tags', 'depot', 'master']) subprocess.check_call(['git', 'branch', '--set-upstream', 'master', 'depot/master']) with safe_open(self.readme_file, 'w') as readme: readme.write('Hello World.') subprocess.check_call(['git', 'commit', '-am', 'Update README.']) self.clone2 = safe_mkdtemp() with pushd(self.clone2): self.init_repo('origin', self.origin) subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) with safe_open(os.path.realpath('README'), 'a') as readme: readme.write('--') subprocess.check_call(['git', 'commit', '-am', 'Update README 2.']) subprocess.check_call(['git', 'push', '--tags', 'origin', 'master']) self.git = Git(gitdir=self.gitdir, worktree=self.worktree)
def execute(self): target = self.require_single_root_target() if self.is_node_module(target): node_paths = self.context.products.get_data(NodePaths) node_path = node_paths.node_path(target) package_manager = self.get_package_manager_for_target(target=target) if package_manager == self.node_distribution.PACKAGE_MANAGER_NPM: args = ['run-script', self.get_options().script_name, '--'] + self.get_passthru_args() with pushd(node_path): result, npm_run = self.execute_npm(args, workunit_labels=[WorkUnitLabel.RUN]) if result != 0: raise TaskError('npm run script failed:\n' '\t{} failed with exit code {}'.format(npm_run, result)) elif package_manager == self.node_distribution.PACKAGE_MANAGER_YARNPKG: args = ['run', self.get_options().script_name, '--'] + self.get_passthru_args() with pushd(node_path): returncode, yarnpkg_run_command = self.execute_yarnpkg( args=args, workunit_labels=[WorkUnitLabel.RUN]) if returncode != 0: raise TaskError('yarnpkg run script failed:\n' '\t{} failed with exit code {}'.format(yarnpkg_run_command, returncode)) else: raise RuntimeError('Unknown package manager: {}'.format(package_manager))
def _execute(self, all_targets): """Implements abstract TestRunnerTaskMixin._execute.""" targets = self._get_test_targets() if not targets: return node_paths = self.context.products.get_data(NodePaths) for target in targets: node_path = node_paths.node_path(target.dependencies[0]) self.context.log.debug( 'Testing node module (first dependency): {}'.format(target.dependencies[0])) package_manager = self.get_package_manager_for_target(target=target.dependencies[0]) if package_manager == self.node_distribution.PACKAGE_MANAGER_NPM: args = ['run-script', target.script_name, '--'] + self.get_passthru_args() with pushd(node_path): self._currently_executing_test_targets = [target] result, npm_test_command = self.execute_npm(args, workunit_labels=[WorkUnitLabel.TEST]) if result != 0: raise TaskError('npm test script failed:\n' '\t{} failed with exit code {}'.format(npm_test_command, result)) elif package_manager == self.node_distribution.PACKAGE_MANAGER_YARNPKG: args = ['run', target.script_name, '--'] + self.get_passthru_args() with pushd(node_path): self._currently_executing_test_targets = [target] result, npm_test_command = self.execute_yarnpkg( args=args, workunit_labels=[WorkUnitLabel.TEST]) if result != 0: raise TaskError('npm test script failed:\n' '\t{} failed with exit code {}'.format(npm_test_command, result)) self._currently_executing_test_targets = []
def setUp(self): self.origin = safe_mkdtemp() with pushd(self.origin): subprocess.check_call(['git', 'init', '--bare']) self.gitdir = safe_mkdtemp() self.worktree = safe_mkdtemp() self.readme_file = os.path.join(self.worktree, 'README') with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): self.init_repo('depot', self.origin) touch(self.readme_file) subprocess.check_call(['git', 'add', 'README']) safe_mkdir(os.path.join(self.worktree, 'dir')) with open(os.path.join(self.worktree, 'dir', 'f'), 'w') as f: f.write("file in subdir") # Make some symlinks os.symlink('f', os.path.join(self.worktree, 'dir', 'relative-symlink')) os.symlink('no-such-file', os.path.join(self.worktree, 'dir', 'relative-nonexistent')) os.symlink('dir/f', os.path.join(self.worktree, 'dir', 'not-absolute\u2764')) os.symlink('../README', os.path.join(self.worktree, 'dir', 'relative-dotdot')) os.symlink('dir', os.path.join(self.worktree, 'link-to-dir')) os.symlink('README/f', os.path.join(self.worktree, 'not-a-dir')) os.symlink('loop1', os.path.join(self.worktree, 'loop2')) os.symlink('loop2', os.path.join(self.worktree, 'loop1')) subprocess.check_call(['git', 'add', 'README', 'dir', 'loop1', 'loop2', 'link-to-dir', 'not-a-dir']) subprocess.check_call(['git', 'commit', '-am', 'initial commit with decode -> \x81b']) self.initial_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip() subprocess.check_call(['git', 'tag', 'first']) subprocess.check_call(['git', 'push', '--tags', 'depot', 'master']) subprocess.check_call(['git', 'branch', '--set-upstream-to', 'depot/master']) with safe_open(self.readme_file, 'w') as readme: readme.write('Hello World.\u2764'.encode('utf-8')) subprocess.check_call(['git', 'commit', '-am', 'Update README.']) self.current_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip() self.clone2 = safe_mkdtemp() with pushd(self.clone2): self.init_repo('origin', self.origin) subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) with safe_open(os.path.realpath('README'), 'a') as readme: readme.write('--') subprocess.check_call(['git', 'commit', '-am', 'Update README 2.']) subprocess.check_call(['git', 'push', '--tags', 'origin', 'master']) self.git = Git(gitdir=self.gitdir, worktree=self.worktree)
def test_nested_pushd(self): pre_cwd = os.getcwd() with temporary_dir() as tempdir1: with pushd(tempdir1): self.assertEquals(os.path.realpath(tempdir1), os.getcwd()) with temporary_dir(root_dir=tempdir1) as tempdir2: with pushd(tempdir2): self.assertEquals(os.path.realpath(tempdir2), os.getcwd()) self.assertEquals(os.path.realpath(tempdir1), os.getcwd()) self.assertEquals(os.path.realpath(tempdir1), os.getcwd()) self.assertEquals(pre_cwd, os.getcwd()) self.assertEquals(pre_cwd, os.getcwd())
def test_via_pants_runner(self) -> None: with temporary_dir() as root: root = os.path.realpath(root) touch(os.path.join(root, "BUILD_ROOT")) with pushd(root): assert root == self.build_root.path self.build_root.reset() child = os.path.join(root, "one", "two") safe_mkdir(child) with pushd(child): assert root == self.build_root.path
def test_nested_pushd(self): pre_cwd = os.getcwd() with temporary_dir() as tempdir1: with pushd(tempdir1) as path1: self.assertEquals(os.path.realpath(tempdir1), os.getcwd()) with temporary_dir(root_dir=tempdir1) as tempdir2: with pushd(tempdir2) as path2: self.assertEquals(os.path.realpath(tempdir2), os.getcwd()) self.assertEquals(os.path.realpath(tempdir1), os.getcwd()) self.assertEquals(os.path.realpath(tempdir1), os.getcwd()) self.assertEquals(pre_cwd, os.getcwd()) self.assertEquals(pre_cwd, os.getcwd())
def test_via_pants_runner(self): with temporary_dir() as root: root = os.path.realpath(root) touch(os.path.join(root, 'pants')) with pushd(root): self.assertEqual(root, BuildRoot().path) BuildRoot().reset() child = os.path.join(root, 'one', 'two') safe_mkdir(child) with pushd(child): self.assertEqual(root, BuildRoot().path)
def _execute(self, all_targets): """Implements abstract TestRunnerTaskMixin._execute.""" targets = self._get_test_targets() if not targets: return node_paths = self.context.products.get_data(NodePaths) for target in targets: node_path = node_paths.node_path(target.dependencies[0]) self.context.log.debug( 'Testing node module (first dependency): {}'.format( target.dependencies[0])) package_manager = self.get_package_manager_for_target( target=target.dependencies[0]) if package_manager == self.node_distribution.PACKAGE_MANAGER_NPM: args = ['run-script', target.script_name, '--' ] + self.get_passthru_args() with pushd(node_path): self._currently_executing_test_targets = [target] result, npm_test_command = self.execute_npm( args, node_paths=node_paths.all_node_paths, workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.TEST]) if result != 0: raise TaskError('npm test script failed:\n' '\t{} failed with exit code {}'.format( npm_test_command, result)) elif package_manager == self.node_distribution.PACKAGE_MANAGER_YARNPKG: args = ['run', target.script_name, '--' ] + self.get_passthru_args() with pushd(node_path): self._currently_executing_test_targets = [target] result, npm_test_command = self.execute_yarnpkg( args=args, node_paths=node_paths.all_node_paths, workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.TEST]) if result != 0: raise TaskError('npm test script failed:\n' '\t{} failed with exit code {}'.format( npm_test_command, result)) self._currently_executing_test_targets = []
def launch_repl(self, targets): temp_dir = safe_mkdtemp() node_paths = self.context.products.get_data(NodePaths) package_json_path = os.path.join(temp_dir, 'package.json') package = { 'name': self.SYNTHETIC_NODE_TARGET_NAME, 'version': '0.0.0', 'dependencies': { dep.package_name: self.render_npm_package_dependency(node_paths, dep) for dep in targets } } with open(package_json_path, 'wb') as fp: json.dump(package, fp, indent=2) args = self.get_passthru_args() node_repl = self.node_distribution.node_command(args=args) with pushd(temp_dir): result, npm_install = self.execute_npm(args=['install'], workunit_name=self.SYNTHETIC_NODE_TARGET_NAME) if result != 0: raise TaskError('npm install of synthetic REPL module failed:\n' '\t{} failed with exit code {}'.format(npm_install, result)) repl_session = node_repl.run() repl_session.wait()
def _run_javascriptstyle(self, target, bootstrap_dir, files, config=None, ignore_path=None, other_args=None): args = ['eslint', '--'] if config: args.extend(['--config', config]) else: args.extend(['--no-eslintrc']) if ignore_path: args.extend(['--ignore-path', ignore_path]) if self.fix: self.context.log.info('Autoformatting is enabled for javascriptstyle.') args.extend(['--fix']) if self.get_options().color: args.extend(['--color']) ignore_patterns = self._get_target_ignore_patterns(target) if ignore_patterns: # Wrap ignore-patterns in quotes to avoid conflict with shell glob pattern args.extend([arg for ignore_args in ignore_patterns for arg in ['--ignore-pattern', '"{}"'.format(ignore_args)]]) if other_args: args.extend(other_args) args.extend(files) with pushd(bootstrap_dir): result, yarn_run_command = self.execute_yarnpkg( args=args, workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.PREP]) self.context.log.debug('Javascript style command: {}'.format(yarn_run_command)) return (result, yarn_run_command)
def test_rm_rf_no_such_file_not_an_error(self, file_name="./vanishing_file") -> None: with temporary_dir() as td, pushd(td), unittest.mock.patch( "pants.util.dirutil.shutil.rmtree" ) as mock_rmtree: mock_rmtree.side_effect = OSError(errno.ENOENT, os.strerror(errno.ENOENT)) touch(file_name) rm_rf(file_name)
def _create_dist(self, dist_tgt, dist_target_dir, setup_requires_pex, snapshot_fingerprint, is_platform_specific): """Create a .whl file for the specified python_distribution target.""" self._copy_sources(dist_tgt, dist_target_dir) setup_py_snapshot_version_argv = self._generate_snapshot_bdist_wheel_argv( snapshot_fingerprint, is_platform_specific) cmd = safe_shlex_join(setup_requires_pex.cmdline(setup_py_snapshot_version_argv)) with self.context.new_workunit('setup.py', cmd=cmd, labels=[WorkUnitLabel.TOOL]) as workunit: with pushd(dist_target_dir): result = setup_requires_pex.run(args=setup_py_snapshot_version_argv, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) if result != 0: raise self.BuildLocalPythonDistributionsError( "Installation of python distribution from target {target} into directory {into_dir} " "failed (return value of run() was: {rc!r}).\n" "The pex with any requirements is located at: {interpreter}.\n" "The host system's compiler and linker were used.\n" "The setup command was: {command}." .format(target=dist_tgt, into_dir=dist_target_dir, rc=result, interpreter=setup_requires_pex.path(), command=setup_py_snapshot_version_argv))
def launch_repl(self, targets): with temporary_dir() as temp_dir: node_paths = self.context.products.get_data(NodePaths) package_json_path = os.path.join(temp_dir, 'package.json') package = { 'name': self.SYNTHETIC_NODE_TARGET_NAME, 'version': '0.0.0', 'dependencies': { target.package_name: node_paths.node_path(target) if self.is_node_module(target) else target.version for target in targets } } with open(package_json_path, 'wb') as fp: json.dump(package, fp, indent=2) args = self.get_passthru_args() node_repl = self.node_distribution.node_command( args=args, node_paths=node_paths.all_node_paths if node_paths else None) with pushd(temp_dir): # TODO: Expose npm command options via node subsystems. result, npm_install = self.execute_npm(['install', '--no-optional'], workunit_name=self.SYNTHETIC_NODE_TARGET_NAME) if result != 0: raise TaskError('npm install of synthetic REPL module failed:\n' '\t{} failed with exit code {}'.format(npm_install, result)) repl_session = node_repl.run() repl_session.wait()
def resolve_target(self, node_task, target, results_dir, node_paths): self._copy_sources(target, results_dir) with pushd(results_dir): if not os.path.exists('package.json'): raise TaskError( 'Cannot find package.json. Did you forget to put it in target sources?') package_manager = node_task.get_package_manager_for_target(target=target) if package_manager == node_task.node_distribution.PACKAGE_MANAGER_NPM: if os.path.exists('npm-shrinkwrap.json'): node_task.context.log.info('Found npm-shrinkwrap.json, will not inject package.json') else: node_task.context.log.warn( 'Cannot find npm-shrinkwrap.json. Did you forget to put it in target sources? ' 'This package will fall back to inject package.json with pants BUILD dependencies ' 'including node_remote_module and other node dependencies. However, this is ' 'not fully supported.') self._emit_package_descriptor(node_task, target, results_dir, node_paths) result, npm_install = node_task.execute_npm(['install'], workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.COMPILER]) if result != 0: raise TaskError('Failed to resolve dependencies for {}:\n\t{} failed with exit code {}' .format(target.address.reference(), npm_install, result)) elif package_manager == node_task.node_distribution.PACKAGE_MANAGER_YARNPKG: if not os.path.exists('yarn.lock'): raise TaskError( 'Cannot find yarn.lock. Did you forget to put it in target sources?') returncode, yarnpkg_command = node_task.execute_yarnpkg( args=[], workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.COMPILER]) if returncode != 0: raise TaskError('Failed to resolve dependencies for {}:\n\t{} failed with exit code {}' .format(target.address.reference(), yarnpkg_command, returncode))
def test_cmdline_only(self): jvm_binary = self.make_target('src/java/com/pants:binary', JvmBinary, main="com.pants.Binary") jvm_run = self.prepare_task(args=['--test-only-write-cmd-line=a'], targets=[jvm_binary], build_graph=self.build_graph) round_manager = RoundManager(jvm_run.context) jvm_run.prepare(round_manager) self.populate_exclusive_groups(context=jvm_run.context, classpaths=['bob', 'fred']) with temporary_dir() as pwd: with pushd(pwd): cmdline_file = os.path.join(pwd, 'a') self.assertFalse(os.path.exists(cmdline_file)) jvm_run.execute() self.assertTrue(os.path.exists(cmdline_file)) with open(cmdline_file) as fp: contents = fp.read() expected_suffix = 'java -cp bob:fred com.pants.Binary' self.assertEquals(expected_suffix, contents[-len(expected_suffix):])
def execute(self): node_paths = self.context.products.get_data(NodePaths) runtime_classpath_product = self.context.products.get_data( 'runtime_classpath', init_func=ClasspathProducts.init_func(self.get_options().pants_workdir)) bundleable_js_product = self.context.products.get_data( 'bundleable_js', init_func=lambda: defaultdict(MultipleRootedProducts)) targets = self.context.targets(predicate=self.is_node_module) with self.invalidated(targets, invalidate_dependents=True) as invalidation_check: for vt in invalidation_check.all_vts: target = vt.target node_installed_path = node_paths.node_path(target) with pushd(node_installed_path): if not vt.valid: self._run_build_script( target, vt.results_dir, node_installed_path, node_paths.all_node_paths) if not target.payload.dev_dependency: output_dir = self._get_output_dir(target, node_installed_path) # Make sure that there is output generated. if not os.path.exists(output_dir): raise TaskError( 'Target {} has build script {} specified, but did not generate any output ' 'at {}.\n'.format( target.address.reference(), target.payload.build_script, output_dir)) absolute_symlink(output_dir, os.path.join(vt.results_dir, target.address.target_name)) bundleable_js_product[target].add_abs_paths(output_dir, [output_dir]) runtime_classpath_product.add_for_target(target, [('default', vt.results_dir)])
def _resolve_local_module(self, node_path, node_paths, node_module): _copy_sources(buildroot=get_buildroot(), node_module=node_module, dest_dir=node_path) self._emit_package_descriptor(node_module, node_path, node_paths) with pushd(node_path): # TODO(John Sirois): Handle dev dependency resolution. result, npm_install = self.execute_npm( args=['install'], workunit_name=node_module.address.reference()) if result != 0: raise TaskError( 'Failed to resolve dependencies for {}:\n\t{} failed with exit code {}' .format(node_module.address.reference(), npm_install, result)) # TODO(John Sirois): This will be part of install in npm 3.x, detect or control the npm # version we use and only conditionally execute this. result, npm_dedupe = self.execute_npm( args=['dedupe'], workunit_name=node_module.address.reference()) if result != 0: raise TaskError( 'Failed to dedupe dependencies for {}:\n\t{} failed with exit code {}' .format(node_module.address.reference(), npm_dedupe, result))
def test_execute_yarnpkg(self): task = self.create_task(self.context()) with temporary_dir() as chroot: proof = os.path.join(chroot, "proof") self.assertFalse(os.path.exists(proof)) package = { "name": "pantsbuild.pants.test", "version": "0.0.0", "scripts": { "proof": f'echo "42" > {proof}' }, } with open(os.path.join(chroot, "package.json"), "w") as fp: json.dump(package, fp) with pushd(chroot): returncode, _ = task.run_script( "proof", package_manager=task.node_distribution.get_package_manager( package_manager="yarnpkg"), workunit_name="test", ) self.assertEqual(0, returncode) self.assertTrue(os.path.exists(proof)) with open(proof) as fp: self.assertEqual("42", fp.read().strip())
def launch_repl(self, targets): temp_dir = safe_mkdtemp() node_paths = self.context.products.get_data(NodePaths) package_json_path = os.path.join(temp_dir, 'package.json') package = { 'name': self.SYNTHETIC_NODE_TARGET_NAME, 'version': '0.0.0', 'dependencies': { dep.package_name: self.render_npm_package_dependency(node_paths, dep) for dep in targets } } with open(package_json_path, 'wb') as fp: json.dump(package, fp, indent=2) args = self.get_passthru_args() node_repl = self.node_distribution.node_command(args=args) with pushd(temp_dir): result, npm_install = self.execute_npm( args=['install'], workunit_name=self.SYNTHETIC_NODE_TARGET_NAME) if result != 0: raise TaskError( 'npm install of synthetic REPL module failed:\n' '\t{} failed with exit code {}'.format( npm_install, result)) repl_session = node_repl.run() repl_session.wait()
def test_detect_worktree(tmp_path: Path, origin: PurePath, git: MutatingGitWorktree) -> None: clone = tmp_path / "clone" clone.mkdir() with pushd(clone.as_posix()): init_repo("origin", origin) subprocess.check_call(["git", "pull", "--tags", "origin", "main:main"]) def worktree_relative_to(cwd: str, expected: PurePath | None): # Given a cwd relative to the worktree, tests that the worktree is detected as # 'expected'. abs_cwd = clone / cwd abs_cwd.mkdir(parents=True, exist_ok=True) with pushd(str(abs_cwd)): if expected: worktree = git_worktree() assert worktree and expected == worktree.worktree else: assert git_worktree() is None worktree_relative_to("..", None) worktree_relative_to(".", clone) worktree_relative_to("is", clone) worktree_relative_to("is/a", clone) worktree_relative_to("is/a/dir", clone)
def resolve_target(self, node_task, target, results_dir, node_paths): self._copy_sources(target, results_dir) with pushd(results_dir): if not os.path.exists('package.json'): raise TaskError( 'Cannot find package.json. Did you forget to put it in target sources?') if os.path.exists('package-lock.json'): node_task.context.log.info('Found package-lock.json, will not inject package.json') else: node_task.context.log.warn( 'Cannot find package-lock.json. Did you forget to put it in target sources? ' 'This package will fall back to inject package.json with pants BUILD dependencies ' 'including node_remote_module and other node dependencies. However, this is ' 'not fully supported.') self._emit_package_descriptor(node_task, target, results_dir, node_paths) self._rewrite_package_descriptor(node_task, target, results_dir, node_paths) result, command = node_task.install_module( target=target, install_optional=self.get_options().install_optional, workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.COMPILER]) if result != 0: raise TaskError('Failed to resolve dependencies for {}:\n\t{} failed with exit code {}' .format(target.address.reference(), command, result))
def resolve_target(self, node_task, target, results_dir, node_paths): self._copy_sources(target, results_dir) with pushd(results_dir): if not os.path.exists('package.json'): raise TaskError( 'Cannot find package.json. Did you forget to put it in target sources?') # TODO: remove/remodel the following section when node_module dependency is fleshed out. package_manager = node_task.get_package_manager(target=target).name if package_manager == PACKAGE_MANAGER_NPM: if os.path.exists('npm-shrinkwrap.json'): node_task.context.log.info('Found npm-shrinkwrap.json, will not inject package.json') else: node_task.context.log.warn( 'Cannot find npm-shrinkwrap.json. Did you forget to put it in target sources? ' 'This package will fall back to inject package.json with pants BUILD dependencies ' 'including node_remote_module and other node dependencies. However, this is ' 'not fully supported.') self._emit_package_descriptor(node_task, target, results_dir, node_paths) elif package_manager == PACKAGE_MANAGER_YARNPKG: if not os.path.exists('yarn.lock'): raise TaskError( 'Cannot find yarn.lock. Did you forget to put it in target sources?') result, command = node_task.install_module( target=target, install_optional=self.get_options().install_optional, workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.COMPILER]) if result != 0: raise TaskError('Failed to resolve dependencies for {}:\n\t{} failed with exit code {}' .format(target.address.reference(), command, result))
def test_detect_worktree(self): with temporary_dir() as _clone: with pushd(_clone): clone = os.path.realpath(_clone) self.init_repo("origin", self.origin) subprocess.check_call( ["git", "pull", "--tags", "origin", "master:master"]) def worktree_relative_to(cwd, expected): # Given a cwd relative to the worktree, tests that the worktree is detected as 'expected'. orig_cwd = os.getcwd() try: abs_cwd = os.path.join(clone, cwd) if not os.path.isdir(abs_cwd): os.mkdir(abs_cwd) os.chdir(abs_cwd) actual = Git.detect_worktree() self.assertEqual(expected, actual) finally: os.chdir(orig_cwd) worktree_relative_to("..", None) worktree_relative_to(".", clone) worktree_relative_to("is", clone) worktree_relative_to("is/a", clone) worktree_relative_to("is/a/dir", clone)
def execute(self): targets = self.get_targets(self.is_non_synthetic_python_target) with self.invalidated(targets=targets) as invalidation_check: if not invalidation_check.invalid_vts: logging.debug(self.NOOP_MSG_HAS_TARGET_BUT_NO_SOURCE) return invalid_tgts = [vt.target for vt in invalidation_check.invalid_vts] sources = self._calculate_isortable_python_sources(invalid_tgts) if not sources: logging.debug(self.NOOP_MSG_HAS_TARGET_BUT_NO_SOURCE) return isort = self.context.products.get_data(IsortPrep.tool_instance_cls) isort_subsystem = IsortPrep.tool_subsystem_cls.global_instance() args = [ *self.get_passthru_args(), *isort_subsystem.get_args(), '--filter-files', *sources ] # NB: We execute isort out of process to avoid unwanted side-effects from importing it: # https://github.com/timothycrosley/isort/issues/456 with pushd(get_buildroot()): workunit_factory = functools.partial(self.context.new_workunit, name='run-isort', labels=[WorkUnitLabel.TOOL, WorkUnitLabel.LINT]) cmdline, exit_code = isort.run(workunit_factory, args) if exit_code != 0: raise TaskError( f"Exited with return code {exit_code} while running `{cmdline}`.", exit_code=exit_code )
def test_build_file_rev(self): # Test that the build_file_rev global option works. Because the # test framework does not yet support bootstrap options, this test # in fact just directly calls ScmBuildFile.set_rev. with pushd(self.root_dir): subprocess.check_call(['git', 'init']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'Your Name']) subprocess.check_call(['git', 'add', '.']) subprocess.check_call(['git', 'commit', '-m' 'initial commit']) subprocess.check_call(['rm', '-rf', 'path-that-does-exist', 'grandparent', 'BUILD', 'BUILD.twitter']) my_buildfile = self.create_buildfile('grandparent/parent/BUILD') buildfile = self.create_buildfile('grandparent/parent/BUILD.twitter') self.assertEquals(OrderedSet([buildfile]), OrderedSet(my_buildfile.siblings())) self.assertEquals(OrderedSet([my_buildfile]), OrderedSet(buildfile.siblings())) buildfile = self.create_buildfile('grandparent/parent/child2/child3/BUILD') self.assertEquals(OrderedSet(), OrderedSet(buildfile.siblings())) buildfiles = ScmBuildFile.scan_buildfiles(os.path.join(self.root_dir, 'grandparent')) self.assertEquals(OrderedSet([ self.create_buildfile('grandparent/parent/BUILD'), self.create_buildfile('grandparent/parent/BUILD.twitter'), self.create_buildfile('grandparent/parent/child1/BUILD'), self.create_buildfile('grandparent/parent/child1/BUILD.twitter'), self.create_buildfile('grandparent/parent/child2/child3/BUILD'), self.create_buildfile('grandparent/parent/child5/BUILD'), ]), buildfiles)
def test_rm_rf_permission_error_raises(self, file_name="./perm_guarded_file") -> None: with temporary_dir() as td, pushd(td), unittest.mock.patch( "pants.util.dirutil.shutil.rmtree" ) as mock_rmtree, pytest.raises(OSError): mock_rmtree.side_effect = OSError(errno.EACCES, os.strerror(errno.EACCES)) touch(file_name) rm_rf(file_name)
def setup_cmdline_run(self, extra_jvm_options=None, **options): """Run the JvmRun task in command line only mode with the specified extra options. :returns: the command line string """ # NB: We must set `--run-args=[]` because the unit test does not properly set up the # `RunOptions(GoalSubsystem)`. self.set_options(only_write_cmd_line="a", args=[], **options) jvm_binary = self.make_target( "src/java/org/pantsbuild:binary", JvmBinary, main="org.pantsbuild.Binary", extra_jvm_options=extra_jvm_options, ) context = self.context(target_roots=[jvm_binary]) jvm_run = self.create_task(context) self._cmdline_classpath = [ os.path.join(self.pants_workdir, c) for c in ["bob", "fred"] ] self.populate_runtime_classpath(context=jvm_run.context, classpath=self._cmdline_classpath) with temporary_dir() as pwd: with pushd(pwd): cmdline_file = os.path.join(pwd, "a") self.assertFalse(os.path.exists(cmdline_file)) jvm_run.execute() self.assertTrue(os.path.exists(cmdline_file)) with open(cmdline_file, "r") as fp: contents = fp.read() yield contents
def run_tests(self, targets, args=None, fast=True, debug=False): test_builder = PythonTestBuilder( self.context(), targets, args or [], fast=fast, debug=debug, interpreter=self._cache_current_interpreter()) with pushd(self.build_root): return test_builder.run()
def execute(self): node_paths = self.context.products.get_data(NodePaths) runtime_classpath_product = self.context.products.get_data( 'runtime_classpath', init_func=ClasspathProducts.init_func(self.get_options().pants_workdir)) bundleable_js_product = self.context.products.get_data( 'bundleable_js', init_func=lambda: defaultdict(MultipleRootedProducts)) targets = self.context.targets(predicate=self.is_node_module) with self.invalidated(targets, invalidate_dependents=True) as invalidation_check: for vt in invalidation_check.all_vts: target = vt.target node_installed_path = node_paths.node_path(target) with pushd(node_installed_path): if not vt.valid: self._run_build_script(target, vt.results_dir, node_installed_path) if not target.payload.dev_dependency: output_dir = self._get_output_dir(target, node_installed_path) # Make sure that there is output generated. if not os.path.exists(output_dir): raise TaskError( 'Target {} has build script {} specified, but did not generate any output ' 'at {}.\n'.format( target.address.reference(), target.payload.build_script, output_dir)) absolute_symlink(output_dir, os.path.join(vt.results_dir, target.address.target_name)) bundleable_js_product[target].add_abs_paths(output_dir, [output_dir]) runtime_classpath_product.add_for_target(target, [('default', vt.results_dir)])
def test_combined_cache(self): """Make sure that the combined cache finds what it should and that it backfills.""" httpd = None httpd_thread = None try: with temporary_dir() as http_root: with temporary_dir() as cache_root: with pushd(http_root): # SimpleRESTHandler serves from the cwd. httpd = SocketServer.TCPServer(('localhost', 0), SimpleRESTHandler) port = httpd.server_address[1] httpd_thread = Thread(target=httpd.serve_forever) httpd_thread.start() with temporary_dir() as artifact_root: local = LocalArtifactCache(None, artifact_root, cache_root) remote = RESTfulArtifactCache(MockLogger(), artifact_root, 'http://localhost:%d' % port) combined = CombinedArtifactCache([local, remote]) key = CacheKey('muppet_key', 'fake_hash', 42) with temporary_file(artifact_root) as f: # Write the file. f.write(TEST_CONTENT1) path = f.name f.close() # No cache has key. self.assertFalse(local.has(key)) self.assertFalse(remote.has(key)) self.assertFalse(combined.has(key)) # No cache returns key. self.assertFalse(bool(local.use_cached_files(key))) self.assertFalse(bool(remote.use_cached_files(key))) self.assertFalse(bool(combined.use_cached_files(key))) # Attempting to use key that no cache had should not change anything. self.assertFalse(local.has(key)) self.assertFalse(remote.has(key)) self.assertFalse(combined.has(key)) # Add to only remote cache. remote.insert(key, [path]) self.assertFalse(local.has(key)) self.assertTrue(remote.has(key)) self.assertTrue(combined.has(key)) # Successfully using via remote should NOT change local. self.assertTrue(bool(remote.use_cached_files(key))) self.assertFalse(local.has(key)) # Successfully using via combined SHOULD backfill local. self.assertTrue(bool(combined.use_cached_files(key))) self.assertTrue(local.has(key)) self.assertTrue(bool(local.use_cached_files(key))) finally: if httpd: httpd.shutdown() if httpd_thread: httpd_thread.join()
def test_execute_yarnpkg(self): task = self.create_task(self.context()) with temporary_dir() as chroot: proof = os.path.join(chroot, 'proof') self.assertFalse(os.path.exists(proof)) package = { 'name': 'pantsbuild.pants.test', 'version': '0.0.0', 'scripts': { 'proof': 'echo "42" > {}'.format(proof) } } with open(os.path.join(chroot, 'package.json'), 'w') as fp: json.dump(package, fp) with pushd(chroot): returncode, _ = task.run_script( 'proof', package_manager=task.node_distribution.get_package_manager( package_manager='yarnpkg'), workunit_name='test') self.assertEqual(0, returncode) self.assertTrue(os.path.exists(proof)) with open(proof) as fp: self.assertEqual('42', fp.read().strip())
def launch_repl(self, targets): with temporary_dir() as temp_dir: node_paths = self.context.products.get_data(NodePaths) package_json_path = os.path.join(temp_dir, 'package.json') package = { 'name': self.SYNTHETIC_NODE_TARGET_NAME, 'version': '0.0.0', 'dependencies': { target.package_name: node_paths.node_path(target) if self.is_node_module(target) else target.version for target in targets } } with open(package_json_path, 'wb') as fp: json.dump(package, fp, indent=2) args = self.get_passthru_args() node_repl = self.node_distribution.node_command( args=args, node_paths=node_paths.all_node_paths if node_paths else None) with pushd(temp_dir): # TODO: Expose npm command options via node subsystems. result, npm_install = self.execute_npm( ['install', '--no-optional'], workunit_name=self.SYNTHETIC_NODE_TARGET_NAME) if result != 0: raise TaskError( 'npm install of synthetic REPL module failed:\n' '\t{} failed with exit code {}'.format( npm_install, result)) repl_session = node_repl.run() repl_session.wait()
def execute(self): targets = self.get_targets(self.is_non_synthetic_python_target) with self.invalidated(targets=targets) as invalidation_check: if not invalidation_check.invalid_vts: logging.debug(self.NOOP_MSG_HAS_TARGET_BUT_NO_SOURCE) return invalid_tgts = [vt.target for vt in invalidation_check.invalid_vts] sources = self._calculate_isortable_python_sources(invalid_tgts) if not sources: logging.debug(self.NOOP_MSG_HAS_TARGET_BUT_NO_SOURCE) return isort = self.context.products.get_data(IsortPrep.Isort) args = self.get_passthru_args() + sources # NB: We execute isort out of process to avoid unwanted side-effects from importing it: # https://github.com/timothycrosley/isort/issues/456 with pushd(get_buildroot()): workunit_factory = functools.partial(self.context.new_workunit, name='run-isort', labels=[WorkUnitLabel.TOOL, WorkUnitLabel.LINT]) cmdline, exit_code = isort.run(workunit_factory, args) if exit_code != 0: raise TaskError('{} ... exited non-zero ({}).'.format(cmdline, exit_code), exit_code=exit_code)
def test_symlink_remote_lib(self): with pushd(self.build_root): with temporary_dir() as d: SourceRoot.register('3rdparty') spec = '3rdparty/github.com/user/lib' remote_lib_src_dir = os.path.join(d, spec) self.create_file(os.path.join(remote_lib_src_dir, 'file.go')) go_remote_lib = self.make_target(spec=spec, target_type=GoRemoteLibrary) context = self.context() go_remote_lib_src = context.products.get_data('go_remote_lib_src', init_func=lambda: defaultdict(str)) go_remote_lib_src[go_remote_lib] = remote_lib_src_dir ws_task = self.create_task(context) gopath = ws_task.get_gopath(go_remote_lib) ws_task._symlink_remote_lib(gopath, go_remote_lib, set()) workspace_dir = os.path.join(gopath, 'src/github.com/user/lib') self.assertTrue(os.path.isdir(workspace_dir)) link = os.path.join(workspace_dir, 'file.go') self.assertEqual(os.readlink(link), os.path.join(remote_lib_src_dir, 'file.go'))
def workspace(self, *buildfiles): with temporary_dir() as root_dir: with BuildRoot().temporary(root_dir): with pushd(root_dir): for buildfile in buildfiles: touch(os.path.join(root_dir, buildfile)) yield os.path.realpath(root_dir)
def test_execute_yarnpkg(self): task = self.create_task(self.context()) with temporary_dir() as chroot: proof = os.path.join(chroot, 'proof') self.assertFalse(os.path.exists(proof)) package = { 'name': 'pantsbuild.pants.test', 'version': '0.0.0', 'scripts': { 'proof': 'echo "42" > {}'.format(proof) } } mode = 'w' if PY3 else 'wb' with open(os.path.join(chroot, 'package.json'), mode) as fp: json.dump(package, fp) with pushd(chroot): returncode, _ = task.run_script( 'proof', package_manager=task.node_distribution.get_package_manager(package_manager='yarnpkg'), workunit_name='test') self.assertEqual(0, returncode) self.assertTrue(os.path.exists(proof)) with open(proof) as fp: self.assertEqual('42', fp.read().strip())
def test_detect_worktree(self): with temporary_dir() as _clone: with pushd(_clone): clone = os.path.realpath(_clone) self.init_repo('origin', self.origin) subprocess.check_call( ['git', 'pull', '--tags', 'origin', 'master:master']) def worktree_relative_to(cwd, expected): # Given a cwd relative to the worktree, tests that the worktree is detected as 'expected'. orig_cwd = os.getcwd() try: abs_cwd = os.path.join(clone, cwd) if not os.path.isdir(abs_cwd): os.mkdir(abs_cwd) os.chdir(abs_cwd) actual = Git.detect_worktree() self.assertEqual(expected, actual) finally: os.chdir(orig_cwd) worktree_relative_to('..', None) worktree_relative_to('.', clone) worktree_relative_to('is', clone) worktree_relative_to('is/a', clone) worktree_relative_to('is/a/dir', clone)
def execute_codegen(self, target, target_workdir): args = self.build_args(target, target_workdir) logging.debug(f"Executing grpcio code generation with args: [{args}]") with pushd(get_buildroot()): workunit_factory = functools.partial( self.context.new_workunit, name='run-grpcio', labels=[WorkUnitLabel.TOOL, WorkUnitLabel.LINT]) cmdline, exit_code = self._grpcio_binary.run( workunit_factory, args) if exit_code != 0: raise TaskError( f'{cmdline} ... exited non-zero ({exit_code}).', exit_code=exit_code) # Create __init__.py in each subdirectory of the target directory so that setup_py recognizes # them as modules. target_workdir_path = Path(target_workdir) sources = [ str(p.relative_to(target_workdir_path)) for p in target_workdir_path.rglob("*.py") ] for missing_init in identify_missing_init_files(sources): Path(target_workdir_path, missing_init).touch() logging.info( f"Grpcio finished code generation into: [{target_workdir}]")
def _execute(self, all_targets): """Implements abstract TestRunnerTaskMixin._execute.""" targets = self._get_test_targets() if not targets: return node_paths = self.context.products.get_data(NodePaths) for target in targets: node_module = target.dependencies[0] self.context.log.debug( 'Testing node module (first dependency): {}'.format(node_module)) with pushd(node_paths.node_path(node_module)): self._currently_executing_test_targets = [target] result, test_command = self.run_script( target.script_name, package_manager=self.get_package_manager(target=node_module), target=target, script_args=self.get_passthru_args(), node_paths=node_paths.all_node_paths, workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.TEST]) if result != 0: raise TaskError('test script failed:\n' '\t{} failed with exit code {}'.format(test_command, result)) self._currently_executing_test_targets = []
def execute_codegen(self, target, target_workdir): node_paths = self.context.products.get_data(NodePaths) if not node_paths: raise TaskError("No npm distribution was found!") dest_dir = os.path.join(target_workdir, self.get_options().destination_dir, target.name) # Added "bail" to the args since webpack only returns failure on failed transpiling, treating missing deps or # syntax errors as soft errors. This resulted in Pants returning success while the canary fails health check. webpack_args = [ '--bail', '--output-path={}'.format(dest_dir), '--env={}'.format(self.get_options().env), ] args = list(self.webpack_subsystem.get_distribution_args() + webpack_args) with pushd(node_paths.node_path(target)): result, command = self.run_script( 'webpack', target=target, script_args=args, node_paths=node_paths.all_node_paths, workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.RUN]) if result != 0: raise TaskError('Run script failed:\n' '\t{} failed with exit code {}'.format( command, result))
def test_symlink_remote_lib(self): with pushd(self.build_root): with temporary_dir() as d: SourceRoot.register('3rdparty') spec = '3rdparty/github.com/user/lib' remote_lib_src_dir = os.path.join(d, spec) remote_files = ['file.go', 'file.cc', 'file.hh'] for remote_file in remote_files: self.create_file( os.path.join(remote_lib_src_dir, remote_file)) go_remote_lib = self.make_target(spec=spec, target_type=GoRemoteLibrary) context = self.context() go_remote_lib_src = context.products.get_data( 'go_remote_lib_src', init_func=lambda: defaultdict(str)) go_remote_lib_src[go_remote_lib] = remote_lib_src_dir ws_task = self.create_task(context) gopath = ws_task.get_gopath(go_remote_lib) ws_task._symlink_remote_lib(gopath, go_remote_lib, set()) workspace_dir = os.path.join(gopath, 'src/github.com/user/lib') self.assertTrue(os.path.isdir(workspace_dir)) for remote_file in remote_files: link = os.path.join(workspace_dir, remote_file) self.assertEqual( os.readlink(link), os.path.join(remote_lib_src_dir, remote_file))
def _run_javascriptstyle(self, target, bootstrap_dir, files, config=None, ignore_path=None, other_args=None): args = [] if config: args.extend(['--config', config]) else: args.extend(['--no-eslintrc']) if ignore_path: args.extend(['--ignore-path', ignore_path]) if self.fix: self.context.log.info( 'Autoformatting is enabled for javascriptstyle.') args.extend(['--fix']) if self.get_options().color: args.extend(['--color']) ignore_patterns = self._get_target_ignore_patterns(target) if ignore_patterns: # Wrap ignore-patterns in quotes to avoid conflict with shell glob pattern args.extend([ arg for ignore_args in ignore_patterns for arg in ['--ignore-pattern', f'{ignore_args}'] ]) if other_args: args.extend(other_args) args.extend(files) with pushd(bootstrap_dir): return self.run_cli('eslint', args=args)
def execute_codegen(self, target, target_workdir): node_paths = self.context.products.get_data(NodePaths) if not node_paths: raise TaskError("No npm distribution was found!") node_path = node_paths.node_path(target) dest_dir = os.path.join(target_workdir, self.get_options().destination_dir) # NOTE(mateo): The target_workdir is the 'current' symlink and not respected by clean=True. Need to fix upstream. safe_mkdir(os.path.realpath(target_workdir), clean=True) # Added "bail" to the args since webpack only returns failure on failed transpiling, treating missing deps or # syntax errors as soft errors. This resulted in Pants returning success while the canary fails health check. args = [ 'run-script', 'webpack', '--', '--bail', '--output-path={}'.format(dest_dir), '--env=dist', ] with pushd(node_path): result, npm_run = self.execute_npm( args=args, workunit_labels=[WorkUnitLabel.RUN], ) if result: raise TaskError( dedent(""" webpack command: \n\t{} failed with exit code {} """.format(' '.join(npm_run.cmd), result)))
def _create_dist(self, dist_tgt, dist_target_dir, setup_requires_pex, snapshot_fingerprint, is_platform_specific): """Create a .whl file for the specified python_distribution target.""" self._copy_sources(dist_tgt, dist_target_dir) setup_py_snapshot_version_argv = self._generate_snapshot_bdist_wheel_argv( snapshot_fingerprint, is_platform_specific) cmd = safe_shlex_join( setup_requires_pex.cmdline(setup_py_snapshot_version_argv)) with self.context.new_workunit('setup.py', cmd=cmd, labels=[WorkUnitLabel.TOOL ]) as workunit: with pushd(dist_target_dir): result = setup_requires_pex.run( args=setup_py_snapshot_version_argv, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) if result != 0: raise self.BuildLocalPythonDistributionsError( "Installation of python distribution from target {target} into directory {into_dir} " "failed (return value of run() was: {rc!r}).\n" "The pex with any requirements is located at: {interpreter}.\n" "The host system's compiler and linker were used.\n" "The setup command was: {command}.".format( target=dist_tgt, into_dir=dist_target_dir, rc=result, interpreter=setup_requires_pex.path(), command=setup_py_snapshot_version_argv))
def _execute(self, all_targets): """Implements abstract TestRunnerTaskMixin._execute.""" targets = self._get_test_targets() if not targets: return node_paths = self.context.products.get_data(NodePaths) for target in targets: node_module = target.dependencies[0] self.context.log.debug( 'Testing node module (first dependency): {}'.format( node_module)) with pushd(node_paths.node_path(node_module)): self._currently_executing_test_targets = [target] result, test_command = self.run_script( target.script_name, package_manager=self.get_package_manager( target=node_module), target=target, script_args=self.get_passthru_args(), node_paths=node_paths.all_node_paths, workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.TEST]) if result != 0: raise TaskError('test script failed:\n' '\t{} failed with exit code {}'.format( test_command, result)) self._currently_executing_test_targets = []
def test_rm_rf_permission_error_raises(self, file_name='./perm_guarded_file'): with temporary_dir() as td, pushd(td), \ mock.patch('pants.util.dirutil.shutil.rmtree') as mock_rmtree, \ self.assertRaises(OSError): mock_rmtree.side_effect = OSError(errno.EACCES, os.strerror(errno.EACCES)) touch(file_name) rm_rf(file_name)
def launch_repl(self, targets): with temporary_dir() as temp_dir: node_paths = self.context.products.get_data(NodePaths) package_json_path = os.path.join(temp_dir, 'package.json') package = { 'name': self.SYNTHETIC_NODE_TARGET_NAME, 'version': '0.0.0', 'dependencies': { target.package_name: node_paths.node_path(target) if self.is_node_module(target) else target.version for target in targets } } mode = 'w' if PY3 else 'wb' with open(package_json_path, mode) as fp: json.dump(package, fp, indent=2) args = self.get_passthru_args() node_repl = self.node_distribution.node_command( args=args, node_paths=node_paths.all_node_paths if node_paths else None) with pushd(temp_dir): result, command = self.install_module( package_manager=self.node_distribution.get_package_manager(package_manager=PACKAGE_MANAGER_NPM), workunit_name=self.SYNTHETIC_NODE_TARGET_NAME) if result != 0: raise TaskError('npm install of synthetic REPL module failed:\n' '\t{} failed with exit code {}'.format(command, result)) repl_session = node_repl.run() repl_session.wait()
def test_detect_worktree(self): with temporary_dir() as _clone: with pushd(_clone): clone = os.path.realpath(_clone) self.init_repo('origin', self.origin) subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) def worktree_relative_to(cwd, expected): """Given a cwd relative to the worktree, tests that the worktree is detected as 'expected'.""" orig_cwd = os.getcwd() try: abs_cwd = os.path.join(clone, cwd) if not os.path.isdir(abs_cwd): os.mkdir(abs_cwd) os.chdir(abs_cwd) actual = Git.detect_worktree() self.assertEqual(expected, actual) finally: os.chdir(orig_cwd) worktree_relative_to('..', None) worktree_relative_to('.', clone) worktree_relative_to('is', clone) worktree_relative_to('is/a', clone) worktree_relative_to('is/a/dir', clone)
def test_simple_pushd(self): pre_cwd = os.getcwd() with temporary_dir() as tempdir: with pushd(tempdir) as path: self.assertEquals(tempdir, path) self.assertEquals(os.path.realpath(tempdir), os.getcwd()) self.assertEquals(pre_cwd, os.getcwd()) self.assertEquals(pre_cwd, os.getcwd())
def test_detect_worktree_somewhere_else(self): with temporary_dir() as somewhere_else: with pushd(somewhere_else): loc = Git.detect_worktree(dir=somewhere_else) self.assertEquals(None, loc) subprocess.check_call(['git', 'init']) loc = Git.detect_worktree(dir=somewhere_else) self.assertEquals(os.path.realpath(somewhere_else), loc)
def run_tests(self, targets, *passthru_args, **options): """Run the tests in the specified targets, with the specified PytestRun task options.""" self.set_options(**self._augment_options(options)) with pushd(self.build_root): result = self.invoke_tasks( target_roots=targets, passthru_args=list(passthru_args), ) return result.context
def _resolve(self, node_module, node_path, node_paths): _copy_sources(buildroot=get_buildroot(), node_module=node_module, dest_dir=node_path) self._emit_package_descriptor(node_module, node_path, node_paths) with pushd(node_path): # TODO(John Sirois): Handle dev dependency resolution. result, npm_install = self.execute_npm(args=['install'], workunit_name=node_module.address.reference()) if result != 0: raise TaskError('Failed to resolve dependencies for {}:\n\t{} failed with exit code {}' .format(node_module.address.reference(), npm_install, result))
def run_tests(self, targets): options = { 'colors': False, 'level': 'info' # When debugging a test failure it may be helpful to set this to 'debug'. } self.set_options(**options) context = self.context(target_roots=targets) pytest_run_task = self.create_task(context) with pushd(self.build_root): pytest_run_task.execute()
def resolve_target(self, node_task, target, results_dir, node_paths): self._copy_sources(target, results_dir) self._emit_package_descriptor(node_task, target, results_dir, node_paths) with pushd(results_dir): result, npm_install = node_task.execute_npm(['install'], workunit_name=target.address.reference(), workunit_labels=[WorkUnitLabel.COMPILER]) if result != 0: raise TaskError('Failed to resolve dependencies for {}:\n\t{} failed with exit code {}' .format(target.address.reference(), npm_install, result))