def init_repo(remote_name, remote): # TODO (peiyu) clean this up, use `git_util.initialize_repo`. subprocess.check_call(['git', 'init']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'Your Name']) subprocess.check_call(['git', 'config', 'commit.gpgSign', 'false']) subprocess.check_call(['git', 'remote', 'add', remote_name, remote])
def execute(self, test_output_file=None): if self.options.skip: return sources = self._calculate_isortable_python_sources( self.context.target_roots) if not sources: logging.debug(self.NOOP_MSG_HAS_TARGET_BUT_NO_SOURCE) return isort_script = BinaryUtil.Factory.create().select_script( 'scripts/isort', self.options.version, 'isort.pex') cmd = [isort_script] + self.get_passthru_args() + sources logging.debug(' '.join(cmd)) try: subprocess.check_call(cmd, cwd=get_buildroot(), stderr=test_output_file, stdout=test_output_file) except subprocess.CalledProcessError as e: raise TaskError('{} ... exited non-zero ({}).'.format( ' '.join(cmd), e.returncode))
def initialize_repo(worktree, gitdir=None): """Initialize a git repository for the given `worktree`. NB: The given `worktree` must contain at least one file which will be committed to form an initial commit. :param string worktree: The path to the git work tree. :param string gitdir: An optional path to the `.git` dir to use. :returns: A `Git` repository object that can be used to interact with the repo. :rtype: :class:`pants.scm.git.Git` """ @contextmanager def use_gitdir(): if gitdir: yield gitdir else: with temporary_dir() as d: yield d with use_gitdir() as git_dir, environment_as(GIT_DIR=git_dir, GIT_WORK_TREE=worktree): subprocess.check_call(['git', 'init']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'Your Name']) subprocess.check_call(['git', 'add', '.']) subprocess.check_call(['git', 'commit', '-am', 'Add project files.']) yield Git(gitdir=git_dir, worktree=worktree)
def test_detect_worktree(self): with temporary_dir() as _clone: with pushd(_clone): clone = os.path.realpath(_clone) self.init_repo('origin', self.origin) subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) def worktree_relative_to(cwd, expected): # Given a cwd relative to the worktree, tests that the worktree is detected as 'expected'. orig_cwd = os.getcwd() try: abs_cwd = os.path.join(clone, cwd) if not os.path.isdir(abs_cwd): os.mkdir(abs_cwd) os.chdir(abs_cwd) actual = Git.detect_worktree() self.assertEqual(expected, actual) finally: os.chdir(orig_cwd) worktree_relative_to('..', None) worktree_relative_to('.', clone) worktree_relative_to('is', clone) worktree_relative_to('is/a', clone) worktree_relative_to('is/a/dir', clone)
def test_refresh_with_conflict(self): with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): self.assertEqual(set(), self.git.changed_files()) self.assertEqual({'README'}, self.git.changed_files(from_commit='HEAD^')) self.assertEqual({'README'}, self.git.changes_in('HEAD')) # Create a change on this branch that is incompatible with the change to master with open(self.readme_file, 'w') as readme: readme.write('Conflict') subprocess.check_call(['git', 'commit', '-am', 'Conflict']) self.assertEquals(set(), self.git.changed_files(include_untracked=True, from_commit='HEAD')) with self.assertRaises(Scm.LocalException): self.git.refresh(leave_clean=False) # The repo is dirty self.assertEquals({'README'}, self.git.changed_files(include_untracked=True, from_commit='HEAD')) with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): subprocess.check_call(['git', 'reset', '--hard', 'HEAD']) # Now try with leave_clean with self.assertRaises(Scm.LocalException): self.git.refresh(leave_clean=True) # The repo is clean self.assertEquals(set(), self.git.changed_files(include_untracked=True, from_commit='HEAD'))
def commit_contents_to_files(content, *files): for path in files: with safe_open(os.path.join(self.worktree, path), 'w') as fp: fp.write(content) subprocess.check_call(['git', 'add', '.']) subprocess.check_call(['git', 'commit', '-m', 'change {}'.format(files)]) return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
def run_command(self, cmd, workunit): try: self.context.log.debug('Executing: {0}'.format(cmd)) # TODO: capture stdout/stderr and redirect to log subprocess.check_call(cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) except subprocess.CalledProcessError as e: raise TaskError('Execution failed: {0}'.format(e))
def mkremote(self, remote_name): with temporary_dir() as remote_uri: subprocess.check_call(['git', 'remote', 'add', remote_name, remote_uri]) try: yield remote_uri finally: subprocess.check_call(['git', 'remote', 'remove', remote_name])
def commit_contents_to_files(message, encoding, content, *files): for path in files: with safe_open(os.path.join(self.worktree, path), 'w') as fp: fp.write(content) subprocess.check_call(['git', 'add', '.']) subprocess.check_call([ 'git', 'config', '--local', '--add', 'i18n.commitencoding', encoding ]) subprocess.check_call( ['git', 'config', '--local', 'commit.gpgSign', 'false']) try: subprocess.check_call( ['git', 'commit', '-m', message.encode(encoding)]) finally: subprocess.check_call([ 'git', 'config', '--local', '--unset-all', 'i18n.commitencoding' ]) return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
def execute(self): if self.get_options().skip: return sbt_dist_targets = self.context.targets( self.source_target_constraint.satisfied_by) jvm_dist_locator = DistributionLocator.cached() with self.invalidated( sbt_dist_targets, invalidate_dependents=True) as invalidation_check: # Check that we have at most one sbt dist per directory. seen_basedirs = {} for vt in invalidation_check.all_vts: base_dir = vt.target.address.spec_path if base_dir in seen_basedirs: prev_target = seen_basedirs[base_dir] raise self.PublishLocalSbtDistsError( "multiple sbt dists defined in the same directory: current = {}, previous = {}" .format(vt.target, prev_target)) else: seen_basedirs[base_dir] = vt.target for vt in invalidation_check.invalid_vts: base_dir = vt.target.address.spec_path with self.context.new_workunit( name='publish-local-sbt-dists', labels=[WorkUnitLabel.COMPILER], ) as workunit: sbt_version_args = ['-sbt-version', self._sbt.version ] if self._sbt.version else [] argv = ['sbt'] + sbt_version_args + [ '-java-home', jvm_dist_locator.home, '-ivy', self._sbt.local_publish_repo, '-batch', 'publishLocal', ] try: subprocess.check_call(argv, cwd=os.path.join( get_buildroot(), base_dir), stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) except OSError as e: workunit.set_outcome(WorkUnit.FAILURE) raise self.PublishLocalSbtDistsError( "Error invoking sbt with command {} for target {}: {}" .format(argv, vt.target, e), e) except subprocess.CalledProcessError as e: workunit.set_outcome(WorkUnit.FAILURE) raise self.PublishLocalSbtDistsError( "Error publishing local sbt dist with command {} for target {}. Exit code was: {}" .format(argv, vt.target, e.returncode), e)
def execute(self): cwd_code = os.path.join(self.workdir, 'Cwd.java') with open(cwd_code, 'w') as fp: fp.write( dedent(""" import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Arrays; import com.martiansoftware.nailgun.NGContext; public class Cwd { public static void nailMain(NGContext context) throws IOException { String comm_file = context.getArgs()[0]; String cwd = context.getWorkingDirectory(); communicate(comm_file, cwd, "nailMain"); } public static void main(String[] args) throws IOException { String comm_file = args[0]; String cwd = System.getProperty("user.dir"); communicate(comm_file, cwd, "main"); } private static void communicate(String comm_file, String cwd, String source) throws IOException { Files.write(Paths.get(comm_file), Arrays.asList(source, cwd), Charset.forName("UTF-8")); } } """)) javac = self.dist.binary('javac') nailgun_cp = self.tool_classpath('nailgun-server') classes_dir = os.path.join(self.workdir, 'classes') safe_mkdir(classes_dir) subprocess.check_call([ javac, '-cp', os.pathsep.join(nailgun_cp), '-d', classes_dir, '-Werror', cwd_code ]) comm_file = os.path.join(self.workdir, 'comm_file') with temporary_dir() as python_cwd: with pushd(python_cwd): exit_code = self.runjava(nailgun_cp + [classes_dir], 'Cwd', args=[comm_file]) if exit_code != 0: raise TaskError(exit_code=exit_code) with open(comm_file, 'rb') as fp: source, java_cwd = fp.read().strip().decode( 'utf-8').splitlines() return source, java_cwd, python_cwd
def _execute_buildozer_command(cls, buildozer_command): try: subprocess.check_call(buildozer_command, cwd=get_buildroot()) except subprocess.CalledProcessError as err: if err.returncode == 3: logger.warn('{} ... no changes were made'.format(buildozer_command)) else: raise TaskError('{} ... exited non-zero ({}).'.format(buildozer_command, err.returncode))
def init_repo(remote_name, remote): # TODO (peiyu) clean this up, use `git_util.initialize_repo`. subprocess.check_call(['git', 'init']) subprocess.check_call( ['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'Your Name']) subprocess.check_call(['git', 'config', 'commit.gpgSign', 'false']) subprocess.check_call(['git', 'remote', 'add', remote_name, remote])
def execute(self, buildozer_command, spec, context=None): try: subprocess.check_call([self.select(context), buildozer_command, spec], cwd=get_buildroot()) except subprocess.CalledProcessError as err: if err.returncode == 3: logger.warn('{} ... no changes were made'.format(buildozer_command)) else: raise TaskError('{} ... exited non-zero ({}).'.format(buildozer_command, err.returncode))
def visualize_execution_graph(scheduler): with temporary_file_path(cleanup=False, suffix='.dot') as dot_file: scheduler.visualize_graph_to_file(dot_file) print('dot file saved to: {}'.format(dot_file)) with temporary_file_path(cleanup=False, suffix='.svg') as image_file: subprocess.check_call('dot -Tsvg -o{} {}'.format(image_file, dot_file), shell=True) print('svg file saved to: {}'.format(image_file)) desktop.ui_open(image_file)
def setUp(self): super(ScmPantsIgnoreTest, self).setUp() self.prepare() subprocess.check_call(['git', 'init']) subprocess.check_call( ['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'Your Name']) subprocess.check_call(['git', 'add', '.']) subprocess.check_call(['git', 'commit', '-m' 'initial commit'])
def _execute_buildozer_command(cls, buildozer_command): try: subprocess.check_call(buildozer_command, cwd=get_buildroot()) except subprocess.CalledProcessError as err: if err.returncode == 3: logger.warn( '{} ... no changes were made'.format(buildozer_command)) else: raise TaskError('{} ... exited non-zero ({}).'.format( buildozer_command, err.returncode))
def test_changes_in(self): """Test finding changes in a diffspecs To some extent this is just testing functionality of git not pants, since all pants says is that it will pass the diffspec to git diff-tree, but this should serve to at least document the functionality we belive works. """ with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): def commit_contents_to_files(content, *files): for path in files: with safe_open(os.path.join(self.worktree, path), 'w') as fp: fp.write(content) subprocess.check_call(['git', 'add', '.']) subprocess.check_call(['git', 'commit', '-m', 'change {}'.format(files)]) return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip() # We can get changes in HEAD or by SHA c1 = commit_contents_to_files('1', 'foo') self.assertEqual({'foo'}, self.git.changes_in('HEAD')) self.assertEqual({'foo'}, self.git.changes_in(c1)) # Changes in new HEAD, from old-to-new HEAD, in old HEAD, or from old-old-head to new. commit_contents_to_files('2', 'bar') self.assertEqual({'bar'}, self.git.changes_in('HEAD')) self.assertEqual({'bar'}, self.git.changes_in('HEAD^..HEAD')) self.assertEqual({'foo'}, self.git.changes_in('HEAD^')) self.assertEqual({'foo'}, self.git.changes_in('HEAD~1')) self.assertEqual({'foo', 'bar'}, self.git.changes_in('HEAD^^..HEAD')) # New commit doesn't change results-by-sha self.assertEqual({'foo'}, self.git.changes_in(c1)) # Files changed in multiple diffs within a range c3 = commit_contents_to_files('3', 'foo') self.assertEqual({'foo', 'bar'}, self.git.changes_in('{}..{}'.format(c1, c3))) # Changes in a tag subprocess.check_call(['git', 'tag', 'v1']) self.assertEqual({'foo'}, self.git.changes_in('v1')) # Introduce a new filename c4 = commit_contents_to_files('4', 'baz') self.assertEqual({'baz'}, self.git.changes_in('HEAD')) # Tag-to-sha self.assertEqual({'baz'}, self.git.changes_in('{}..{}'.format('v1', c4))) # We can get multiple changes from one ref commit_contents_to_files('5', 'foo', 'bar') self.assertEqual({'foo', 'bar'}, self.git.changes_in('HEAD')) self.assertEqual({'foo', 'bar', 'baz'}, self.git.changes_in('HEAD~4..HEAD')) self.assertEqual({'foo', 'bar', 'baz'}, self.git.changes_in('{}..HEAD'.format(c1))) self.assertEqual({'foo', 'bar', 'baz'}, self.git.changes_in('{}..{}'.format(c1, c4)))
def build_checker_wheel(root_dir): target = Checkstyle._CHECKER_ADDRESS_SPEC subprocess.check_call([os.path.join(get_buildroot(), 'pants'), '--pants-distdir={}'.format(root_dir), 'setup-py', '--run=bdist_wheel --universal', target]) for root, _, files in os.walk(root_dir): for f in files: if f.endswith('.whl'): return os.path.join(root, f) raise AssertionError('Failed to generate a wheel for {}'.format(target))
def commit_contents_to_files(message, encoding, content, *files): for path in files: with safe_open(os.path.join(self.worktree, path), 'w') as fp: fp.write(content) subprocess.check_call(['git', 'add', '.']) subprocess.check_call(['git', 'config', '--local', '--add', 'i18n.commitencoding', encoding]) subprocess.check_call(['git', 'config', '--local', 'commit.gpgSign', 'false']) try: subprocess.check_call(['git', 'commit', '-m', message.encode(encoding)]) finally: subprocess.check_call(['git', 'config', '--local', '--unset-all', 'i18n.commitencoding']) return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
def _build_binary(self, ensime_binary_target_spec): pants_config_files_args = [ '"{}"'.format(f) for f in self._bootstrap_config_files ] with temporary_dir() as tmpdir: cmd = [ './pants', '--pants-config-files=[{}]'.format( ','.join(pants_config_files_args)), '--pants-distdir={}'.format(tmpdir), 'binary', ensime_binary_target_spec, ] env = self._get_subproc_env() with self.context.new_workunit( name='bootstrap-ensime-gen-subproc', labels=[WorkUnitLabel.COMPILER], # TODO: replace space join with safe_shlex_join() when #5493 is merged! cmd=' '.join(cmd), ) as workunit: try: subprocess.check_call(cmd, cwd=get_buildroot(), stdout=workunit.output('stdout'), stderr=workunit.output('stderr'), env=env) except OSError as e: workunit.set_outcome(WorkUnit.FAILURE) raise self.BootstrapEnsimeError( "Error invoking pants for the ensime-gen binary with command {} from target {}: {}" .format(cmd, ensime_binary_target_spec, e), e) except subprocess.CalledProcessError as e: workunit.set_outcome(WorkUnit.FAILURE) raise self.BootstrapEnsimeError( "Error generating the ensime-gen binary with command {} from target {}. " "Exit code was: {}.".format(cmd, ensime_binary_target_spec, e.returncode), e) dist_jar = self._collect_dist_jar(tmpdir) jar_fname = os.path.basename(dist_jar) cached_jar_path = os.path.join(self.workdir, jar_fname) shutil.move(dist_jar, cached_jar_path)
def _run_zef_command(self, workunit_factory, argv): subproc_env = os.environ.copy() subproc_env['PATH'] = create_path_env_var(self.path_entries, subproc_env, prepend=True) all_argv = ['zef'] + argv pretty_printed_argv = safe_shlex_join(all_argv) try: if workunit_factory: with workunit_factory(cmd=pretty_printed_argv) as workunit: return subprocess.check_call( all_argv, env=subproc_env, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) else: output = subprocess.check_output(all_argv, env=subproc_env) logger.debug( "output from running zef command {!r} with env {!r}:\n{}". format(all_argv, subproc_env, output)) except (OSError, subprocess.CalledProcessError) as e: raise self.ZefException("Error with zef command '{}': {}".format( pretty_printed_argv, e), e, exit_code=e.returncode)
def execute(self, test_output_file=None): sources = self._calculate_isortable_python_sources( self.get_targets(self.is_non_synthetic_python_target)) if not sources: logging.debug(self.NOOP_MSG_HAS_TARGET_BUT_NO_SOURCE) return isort_script = Isort.global_instance().select(context=self.context) cmd = [isort_script] + self.get_passthru_args() + sources logging.debug(' '.join(cmd)) try: subprocess.check_call(cmd, cwd=get_buildroot(), stderr=test_output_file, stdout=test_output_file) except subprocess.CalledProcessError as e: raise TaskError('{} ... exited non-zero ({}).'.format(' '.join(cmd), e.returncode))
def _execute_buildozer_script(self, command): for root in self.context.target_roots: address = root.address buildozer_command = [ self._executable, command, '//{}:{}'.format(address._spec_path, address._target_name) ] try: subprocess.check_call(buildozer_command, cwd=get_buildroot()) except subprocess.CalledProcessError as err: if (err.returncode == 3): raise TaskError('{} ... no changes were made'.format( buildozer_command)) else: raise TaskError('{} ... exited non-zero ({}).'.format( buildozer_command, err.returncode))
def test_detect_worktree_no_cwd(self): with temporary_dir() as _clone: with pushd(_clone): clone = os.path.realpath(_clone) self.init_repo('origin', self.origin) subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) def worktree_relative_to(some_dir, expected): # Given a directory relative to the worktree, tests that the worktree is detected as 'expected'. subdir = os.path.join(clone, some_dir) if not os.path.isdir(subdir): os.mkdir(subdir) actual = Git.detect_worktree(subdir=subdir) self.assertEqual(expected, actual) worktree_relative_to('..', None) worktree_relative_to('.', clone) worktree_relative_to('is', clone) worktree_relative_to('is/a', clone) worktree_relative_to('is/a/dir', clone)
def execute(self, test_output_file=None): sources = self._calculate_isortable_python_sources( self.get_targets(self.is_non_synthetic_python_target)) if not sources: logging.debug(self.NOOP_MSG_HAS_TARGET_BUT_NO_SOURCE) return isort_script = Isort.global_instance().select(context=self.context) cmd = [isort_script] + self.get_passthru_args() + sources logging.debug(' '.join(cmd)) # NB: We execute isort out of process to avoid unwanted side-effects from importing it: # https://github.com/timothycrosley/isort/issues/456 try: subprocess.check_call(cmd, cwd=get_buildroot(), stderr=test_output_file, stdout=test_output_file) except subprocess.CalledProcessError as e: raise TaskError('{} ... exited non-zero ({}).'.format( ' '.join(cmd), e.returncode))
def invoke_perl6(self, argv, perl6_env, workunit_factory=None): full_argv = [self._perl6_exe_filename] + list(argv) subproc_env = self._get_perl6_subproc_os_env(perl6_env) pretty_printed_argv = safe_shlex_join(full_argv) try: logger.debug('running perl6 comand {!r} with env {!r}'.format(full_argv, subproc_env)) if workunit_factory: with workunit_factory(cmd=pretty_printed_argv) as workunit: # TODO: should we be catching KeyboardInterrupt or something? return subprocess.check_call( full_argv, env=subproc_env, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) else: return subprocess.check_call(full_argv, env=subproc_env) except (OSError, subprocess.CalledProcessError) as e: raise self.Perl6InvocationError( "Error with perl6 command '{}': {}".format(pretty_printed_argv, e), e, exit_code=e.returncode)
def _execute_junit_runner(self, list_of_filename_content_tuples, create_some_resources=True, target_name=None): # Create the temporary base test directory test_rel_path = 'tests/java/org/pantsbuild/foo' test_abs_path = self.create_dir(test_rel_path) # Create the temporary classes directory under work dir test_classes_abs_path = self.create_workdir_dir(test_rel_path) test_java_file_abs_paths = [] # Generate the temporary java test source code. for filename, content in list_of_filename_content_tuples: test_java_file_rel_path = os.path.join(test_rel_path, filename) test_java_file_abs_path = self.create_file(test_java_file_rel_path, content) test_java_file_abs_paths.append(test_java_file_abs_path) # Invoke ivy to resolve classpath for junit. classpath_file_abs_path = os.path.join(test_abs_path, 'junit.classpath') ivy_subsystem = global_subsystem_instance(IvySubsystem) distribution = DistributionLocator.cached(jdk=True) ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy() ivy.execute(args=[ '-cachepath', classpath_file_abs_path, '-dependency', 'junit', 'junit-dep', '4.10' ], executor=SubprocessExecutor(distribution=distribution)) with open(classpath_file_abs_path) as fp: classpath = fp.read() # Now directly invoke javac to compile the test java code into classfiles that we can later # inject into a product mapping for JUnitRun to execute against. javac = distribution.binary('javac') subprocess.check_call( [javac, '-d', test_classes_abs_path, '-cp', classpath] + test_java_file_abs_paths) # If a target_name is specified create a target with it, otherwise create a junit_tests target. if target_name: target = self.target(target_name) else: target = self.create_library(test_rel_path, 'junit_tests', 'foo_test', ['FooTest.java']) target_roots = [] if create_some_resources: # Create a synthetic resource target. target_roots.append(self.make_target('some_resources', Resources)) target_roots.append(target) # Set the context with the two targets, one junit_tests target and # one synthetic resources target. # The synthetic resources target is to make sure we won't regress # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note # in that bug, the resources target must be the first one in the list. context = self.context(target_roots=target_roots) # Before we run the task, we need to inject the "runtime_classpath" with # the compiled test java classes that JUnitRun will know which test # classes to execute. In a normal run, this "runtime_classpath" will be # populated by java compilation step. self.populate_runtime_classpath(context=context, classpath=[test_classes_abs_path]) # Finally execute the task. self.execute(context)
def _execute_junit_runner(self, list_of_filename_content_tuples, create_some_resources=True, target_name=None): # Create the temporary base test directory test_rel_path = 'tests/java/org/pantsbuild/foo' test_abs_path = self.create_dir(test_rel_path) # Create the temporary classes directory under work dir test_classes_abs_path = self.create_workdir_dir(test_rel_path) test_java_file_abs_paths = [] # Generate the temporary java test source code. for filename, content in list_of_filename_content_tuples: test_java_file_rel_path = os.path.join(test_rel_path, filename) test_java_file_abs_path = self.create_file(test_java_file_rel_path, content) test_java_file_abs_paths.append(test_java_file_abs_path) # Invoke ivy to resolve classpath for junit. classpath_file_abs_path = os.path.join(test_abs_path, 'junit.classpath') ivy_subsystem = global_subsystem_instance(IvySubsystem) distribution = DistributionLocator.cached(jdk=True) ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy() ivy.execute(args=['-cachepath', classpath_file_abs_path, '-dependency', 'junit', 'junit-dep', '4.10'], executor=SubprocessExecutor(distribution=distribution)) with open(classpath_file_abs_path) as fp: classpath = fp.read() # Now directly invoke javac to compile the test java code into classfiles that we can later # inject into a product mapping for JUnitRun to execute against. javac = distribution.binary('javac') subprocess.check_call( [javac, '-d', test_classes_abs_path, '-cp', classpath] + test_java_file_abs_paths) # If a target_name is specified create a target with it, otherwise create a junit_tests target. if target_name: target = self.target(target_name) else: target = self.create_library(test_rel_path, 'junit_tests', 'foo_test', ['FooTest.java']) target_roots = [] if create_some_resources: # Create a synthetic resource target. target_roots.append(self.make_target('some_resources', Resources)) target_roots.append(target) # Set the context with the two targets, one junit_tests target and # one synthetic resources target. # The synthetic resources target is to make sure we won't regress # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note # in that bug, the resources target must be the first one in the list. context = self.context(target_roots=target_roots) # Before we run the task, we need to inject the "runtime_classpath" with # the compiled test java classes that JUnitRun will know which test # classes to execute. In a normal run, this "runtime_classpath" will be # populated by java compilation step. self.populate_runtime_classpath(context=context, classpath=[test_classes_abs_path]) # Finally execute the task. self.execute(context)
def test_build_file_rev(self): # Test that the build_file_rev global option works. Because the # test framework does not yet support bootstrap options, this test # in fact just directly calls ScmBuildFile.set_rev. with pushd(self.root_dir): subprocess.check_call(['git', 'init']) subprocess.check_call( ['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'Your Name']) subprocess.check_call(['git', 'add', '.']) subprocess.check_call(['git', 'commit', '-m' 'initial commit']) subprocess.check_call([ 'rm', '-rf', 'path-that-does-exist', 'grandparent', 'BUILD', 'BUILD.twitter' ]) self._project_tree = ScmProjectTree(self.root_dir, Git(worktree=self.root_dir), 'HEAD') my_buildfile = self.create_buildfile('grandparent/parent/BUILD') buildfile = self.create_buildfile( 'grandparent/parent/BUILD.twitter') self.assertEquals( OrderedSet([my_buildfile, buildfile]), OrderedSet(self.get_build_files_family('grandparent/parent'))) self.assertEquals( OrderedSet([ self.create_buildfile( 'grandparent/parent/child2/child3/BUILD') ]), OrderedSet( self.get_build_files_family( 'grandparent/parent/child2/child3'))) buildfiles = self.scan_buildfiles('grandparent') self.assertEquals( OrderedSet([ self.create_buildfile('grandparent/parent/BUILD'), self.create_buildfile('grandparent/parent/BUILD.twitter'), self.create_buildfile('grandparent/parent/child1/BUILD'), self.create_buildfile( 'grandparent/parent/child1/BUILD.twitter'), self.create_buildfile( 'grandparent/parent/child2/child3/BUILD'), self.create_buildfile('grandparent/parent/child5/BUILD'), ]), buildfiles)
def setUp(self): self.origin = safe_mkdtemp() with pushd(self.origin): subprocess.check_call(['git', 'init', '--bare']) self.gitdir = safe_mkdtemp() self.worktree = safe_mkdtemp() self.readme_file = os.path.join(self.worktree, 'README') with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): self.init_repo('depot', self.origin) touch(self.readme_file) subprocess.check_call(['git', 'add', 'README']) safe_mkdir(os.path.join(self.worktree, 'dir')) with open(os.path.join(self.worktree, 'dir', 'f'), 'w') as f: f.write("file in subdir") # Make some symlinks os.symlink('f', os.path.join(self.worktree, 'dir', 'relative-symlink')) os.symlink('no-such-file', os.path.join(self.worktree, 'dir', 'relative-nonexistent')) os.symlink('dir/f', os.path.join(self.worktree, 'dir', 'not-absolute\u2764')) os.symlink('../README', os.path.join(self.worktree, 'dir', 'relative-dotdot')) os.symlink('dir', os.path.join(self.worktree, 'link-to-dir')) os.symlink('README/f', os.path.join(self.worktree, 'not-a-dir')) os.symlink('loop1', os.path.join(self.worktree, 'loop2')) os.symlink('loop2', os.path.join(self.worktree, 'loop1')) subprocess.check_call(['git', 'add', 'README', 'dir', 'loop1', 'loop2', 'link-to-dir', 'not-a-dir']) subprocess.check_call(['git', 'commit', '-am', 'initial commit with decode -> \x81b']) self.initial_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip() subprocess.check_call(['git', 'tag', 'first']) subprocess.check_call(['git', 'push', '--tags', 'depot', 'master']) subprocess.check_call(['git', 'branch', '--set-upstream-to', 'depot/master']) with safe_open(self.readme_file, 'w') as readme: readme.write('Hello World.\u2764'.encode('utf-8')) subprocess.check_call(['git', 'commit', '-am', 'Update README.']) self.current_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip() self.clone2 = safe_mkdtemp() with pushd(self.clone2): self.init_repo('origin', self.origin) subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) with safe_open(os.path.realpath('README'), 'a') as readme: readme.write('--') subprocess.check_call(['git', 'commit', '-am', 'Update README 2.']) subprocess.check_call(['git', 'push', '--tags', 'origin', 'master']) self.git = Git(gitdir=self.gitdir, worktree=self.worktree)
def test_integration(self): self.assertEqual(set(), self.git.changed_files()) self.assertEqual({'README'}, self.git.changed_files(from_commit='HEAD^')) tip_sha = self.git.commit_id self.assertTrue(tip_sha) self.assertTrue(tip_sha in self.git.changelog()) merge_base = self.git.merge_base() self.assertTrue(merge_base) self.assertTrue(merge_base in self.git.changelog()) with self.assertRaises(Scm.LocalException): self.git.server_url with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): with self.mkremote('origin') as origin_uri: # We shouldn't be fooled by remotes with origin in their name. with self.mkremote('temp_origin'): origin_url = self.git.server_url self.assertEqual(origin_url, origin_uri) self.assertTrue(self.git.tag_name.startswith('first-'), msg='un-annotated tags should be found') self.assertEqual('master', self.git.branch_name) def edit_readme(): with open(self.readme_file, 'a') as fp: fp.write('More data.') edit_readme() with open(os.path.join(self.worktree, 'INSTALL'), 'w') as untracked: untracked.write('make install') self.assertEqual({'README'}, self.git.changed_files()) self.assertEqual({'README', 'INSTALL'}, self.git.changed_files(include_untracked=True)) # Confirm that files outside of a given relative_to path are ignored self.assertEqual(set(), self.git.changed_files(relative_to='non-existent')) self.git.commit('API Changes.') try: # These changes should be rejected because our branch point from origin is 1 commit behind # the changes pushed there in clone 2. self.git.push() except Scm.RemoteException: with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree): subprocess.check_call(['git', 'reset', '--hard', 'depot/master']) self.git.refresh() edit_readme() self.git.commit('''API '"' " Changes.''') self.git.push() # HEAD is merged into master self.assertEqual(self.git.commit_date(self.git.merge_base()), self.git.commit_date('HEAD')) self.assertEqual(self.git.commit_date('HEAD'), self.git.commit_date('HEAD')) self.git.tag('second', message='''Tagged ' " Changes''') with temporary_dir() as clone: with pushd(clone): self.init_repo('origin', self.origin) subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master']) with open(os.path.realpath('README')) as readme: self.assertEqual('--More data.', readme.read()) git = Git() # Check that we can pick up committed and uncommitted changes. with safe_open(os.path.realpath('CHANGES'), 'w') as changes: changes.write('none') subprocess.check_call(['git', 'add', 'CHANGES']) self.assertEqual({'README', 'CHANGES'}, git.changed_files(from_commit='first')) self.assertEqual('master', git.branch_name) self.assertEqual('second', git.tag_name, msg='annotated tags should be found')
def _assert_subprocess_success(self, worktree, cmd, **kwargs): self.assertEqual(0, subprocess.check_call(cmd, cwd=worktree, **kwargs))
def initialize_repo(worktree, gitdir=None): """Initialize a git repository for the given `worktree`. NB: The given `worktree` must contain at least one file which will be committed to form an initial commit. :param string worktree: The path to the git work tree. :param string gitdir: An optional path to the `.git` dir to use. :returns: A `Git` repository object that can be used to interact with the repo. :rtype: :class:`pants.scm.git.Git` """ @contextmanager def use_gitdir(): if gitdir: yield gitdir else: with temporary_dir() as d: yield d with use_gitdir() as git_dir, environment_as(GIT_DIR=git_dir, GIT_WORK_TREE=worktree): subprocess.check_call(['git', 'init']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) # TODO: This method inherits the global git settings, so if a developer has gpg signing on, this # will turn that off. We should probably just disable reading from the global config somehow: # https://git-scm.com/docs/git-config. subprocess.check_call(['git', 'config', 'commit.gpgSign', 'false']) subprocess.check_call(['git', 'config', 'user.name', 'Your Name']) subprocess.check_call(['git', 'add', '.']) subprocess.check_call(['git', 'commit', '-am', 'Add project files.']) yield Git(gitdir=git_dir, worktree=worktree)
def test_fetch_cmake_success(self): self.assertEqual( 0, subprocess.check_call([self.script_path, 'cmake', '3.9.5']))