def test_cached_conflicting(self): with distribution( executables=exe('bin/java', '1.7.0_33')) as dist_root: with env(PATH=os.path.join(dist_root, 'bin')): with self.assertRaises(Distribution.Error): DistributionLocator.cached(minimum_version='1.7.0_00', maximum_version='1.6.0_50')
def test_cached_bad_input(self): with distribution( executables=exe('bin/java', '1.7.0_33')) as dist_root: with env(PATH=os.path.join(dist_root, 'bin')): with self.assertRaises(ValueError): DistributionLocator.cached(minimum_version=1.7, maximum_version=1.8)
def is_missing_jvm(version): init_subsystem(DistributionLocator) try: DistributionLocator.cached(minimum_version=version, maximum_version='{}.9999'.format(version)) return False except DistributionLocator.Error: return True
def execute(self): (accept_predicate, reject_predicate) = Target.lang_discriminator('java') targets = self.require_homogeneous_targets(accept_predicate, reject_predicate) if targets: tools_classpath = self.tool_classpath('scala-repl') self.context.release_lock() with preserve_stty_settings(): classpath = self.classpath(targets, cp=tools_classpath) # The scala repl requires -Dscala.usejavacp=true since Scala 2.8 when launching in the way # we do here (not passing -classpath as a program arg to scala.tools.nsc.MainGenericRunner). jvm_options = self.jvm_options if not any(opt.startswith('-Dscala.usejavacp=') for opt in jvm_options): jvm_options.append('-Dscala.usejavacp=true') print('') # Start REPL output on a new line. try: # NOTE: We execute with no workunit, as capturing REPL output makes it very sluggish. DistributionLocator.cached().execute_java(classpath=classpath, main=self.get_options().main, jvm_options=jvm_options, args=self.args) except KeyboardInterrupt: # TODO(John Sirois): Confirm with Steve Gury that finally does not work on mac and an # explicit catch of KeyboardInterrupt is required. pass
def preferred_jvm_distribution(self, platforms): """Returns a jvm Distribution with a version that should work for all the platforms.""" if not platforms: return DistributionLocator.cached() min_version = max(platform.target_level for platform in platforms) max_version = Revision(*(min_version.components + [9999])) if self._strict_jvm_version else None return DistributionLocator.cached(minimum_version=min_version, maximum_version=max_version)
def is_missing_jvm(version): with subsystem_instance(DistributionLocator): try: DistributionLocator.cached(minimum_version=version, maximum_version='{}.9999'.format(version)) return False except DistributionLocator.Error: return True
def launch_repl(self, classpath): # The scala repl requires -Dscala.usejavacp=true since Scala 2.8 when launching in the way # we do here (not passing -classpath as a program arg to scala.tools.nsc.MainGenericRunner). jvm_options = self.jvm_options if not any(opt.startswith("-Dscala.usejavacp=") for opt in jvm_options): jvm_options.append("-Dscala.usejavacp=true") # NOTE: We execute with no workunit, as capturing REPL output makes it very sluggish. DistributionLocator.cached().execute_java( classpath=classpath, main=self.get_options().main, jvm_options=jvm_options, args=self.args )
def launch_repl(self, classpath): # The scala repl requires -Dscala.usejavacp=true since Scala 2.8 when launching in the way # we do here (not passing -classpath as a program arg to scala.tools.nsc.MainGenericRunner). jvm_options = self.jvm_options if not any(opt.startswith('-Dscala.usejavacp=') for opt in jvm_options): jvm_options.append('-Dscala.usejavacp=true') # NOTE: We execute with no workunit, as capturing REPL output makes it very sluggish. DistributionLocator.cached().execute_java(classpath=classpath, main=self.get_options().main, jvm_options=jvm_options, args=self.args)
def set_distribution(self, minimum_version=None, maximum_version=None, jdk=False): try: self._dist = DistributionLocator.cached( minimum_version=minimum_version, maximum_version=maximum_version, jdk=jdk ) except DistributionLocator.Error as e: raise TaskError(e)
def resolve_jars(self, targets): # TODO: Why is this computed directly here instead of taking from the actual product # computed by the {Ivy,Coursier}Resolve task? executor = SubprocessExecutor(DistributionLocator.cached()) confs = [] if self.get_options().libraries: confs.append("default") if self.get_options().libraries_sources: confs.append("sources") if self.get_options().libraries_javadocs: confs.append("javadoc") compile_classpath = None if confs: compile_classpath = ClasspathProducts( self.get_options().pants_workdir) CoursierMixin.resolve( self, targets, compile_classpath, sources=self.get_options().libraries_sources, javadoc=self.get_options().libraries_javadocs, executor=executor, ) return compile_classpath
def runner(self, jvm_options=None, args=None, executor=None): """Creates an ivy commandline client runner for the given args.""" args = args or [] jvm_options = jvm_options or [] executor = executor or SubprocessExecutor(DistributionLocator.cached()) if not isinstance(executor, Executor): raise ValueError( "The executor argument must be an Executor instance, given {} of type {}".format( executor, type(executor) ) ) if self._ivy_cache_dir and "-cache" not in args: # TODO(John Sirois): Currently this is a magic property to support hand-crafted <caches/> in # ivysettings.xml. Ideally we'd support either simple -caches or these hand-crafted cases # instead of just hand-crafted. Clean this up by taking over ivysettings.xml and generating # it from BUILD constructs. jvm_options += ["-Divy.cache.dir={}".format(self._ivy_cache_dir)] if self._ivy_settings and "-settings" not in args: args = ["-settings", self._ivy_settings] + args jvm_options += self._extra_jvm_options return executor.runner( classpath=self._classpath, main="org.apache.ivy.Main", jvm_options=jvm_options, args=args )
def create_scaladoc_command(self, classpath, gendir, *targets): sources = [] for target in targets: sources.extend(target.sources_relative_to_buildroot()) # TODO(Tejal Desai): pantsbuild/pants/65: Remove java_sources attribute for ScalaLibrary # A '.scala' owning target may not have java_sources, eg: junit_tests if hasattr(target, 'java_sources'): for java_target in target.java_sources: sources.extend(java_target.sources_relative_to_buildroot()) if not sources: return None scala_platform = ScalaPlatform.global_instance() tool_classpath = [cp_entry.path for cp_entry in scala_platform.compiler_classpath_entries( self.context.products, self.context._scheduler)] args = ['-usejavacp', '-classpath', ':'.join(classpath), '-d', gendir] args.extend(self.args) args.extend(sources) java_executor = SubprocessExecutor(DistributionLocator.cached()) runner = java_executor.runner(jvm_options=self.jvm_options, classpath=tool_classpath, main='scala.tools.nsc.ScalaDoc', args=args) return runner.command
def _bundle_and_run(self, bundle_args, classpath): self.assert_success(self.run_pants(['clean-all'])) pants_command = list(bundle_args) pants_command.append( 'testprojects/src/java/org/pantsbuild/testproject/shading:third') self.assert_success(self.run_pants(pants_command)) main_class = 'org.pantsbuild.testproject.shading.Third' with subsystem_instance(DistributionLocator): executor = SubprocessExecutor( DistributionLocator.cached(minimum_version='1.7')) p = executor.spawn(classpath, main_class, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() self.assertEqual(0, p.returncode, err) class_names = json.loads(out.strip()) self.assertEqual( { 'Gson': 'moc.elgoog.nosg.Gson', 'Third': 'org.pantsbuild.testproject.shading.Third', 'Second': 'hello.org.pantsbuild.testproject.shading.Second', }, class_names)
def resolve_jars(self, targets): # TODO: Why is this computed directly here instead of taking from the actual product # computed by the {Ivy,Coursier}Resolve task? executor = SubprocessExecutor(DistributionLocator.cached()) confs = [] if self.get_options().libraries: confs.append('default') if self.get_options().libraries_sources: confs.append('sources') if self.get_options().libraries_javadocs: confs.append('javadoc') compile_classpath = None if confs: compile_classpath = ClasspathProducts( self.get_options().pants_workdir) if JvmResolveSubsystem.global_instance().get_options( ).resolver == 'ivy': IvyTaskMixin.resolve(self, executor=executor, targets=targets, classpath_products=compile_classpath, confs=confs) else: CoursierMixin.resolve( self, targets, compile_classpath, sources=self.get_options().libraries_sources, javadoc=self.get_options().libraries_javadocs, executor=executor) return compile_classpath
def runner(self, jvm_options=None, args=None, executor=None): """Creates an ivy commandline client runner for the given args.""" args = args or [] if self._ivy_settings and "-settings" not in args: args = ["-settings", self._ivy_settings] + args options = list(jvm_options) if jvm_options else [] if self._ivy_resolution_cache_dir and "-cache" not in args: # TODO(John Sirois): Currently this is a magic property to support hand-crafted <caches/> in # ivysettings.xml. Ideally we'd support either simple -caches or these hand-crafted cases # instead of just hand-crafted. Clean this up by taking over ivysettings.xml and generating # it from BUILD constructs. options += [ "-Divy.cache.dir={}".format(self._ivy_resolution_cache_dir) ] options += self._extra_jvm_options executor = executor or SubprocessExecutor(DistributionLocator.cached()) if not isinstance(executor, Executor): raise ValueError( "The executor argument must be an Executor instance, given {} of type {}" .format(executor, type(executor))) return executor.runner(classpath=self._classpath, main="org.apache.ivy.Main", jvm_options=options, args=args)
def distribution(self): if self._dist is None: # Currently no Java 8 for Android. I considered max=1.7.0_50. See comment in _render_args(). self._dist = DistributionLocator.cached(minimum_version='1.6.0_00', maximum_version='1.7.0_99', jdk=True) return self._dist
def launch_repl(self, classpath): # The scala repl requires -Dscala.usejavacp=true since Scala 2.8 when launching in the way # we do here (not passing -classpath as a program arg to scala.tools.nsc.MainGenericRunner). jvm_options = self.jvm_options if not any(opt.startswith('-Dscala.usejavacp=') for opt in jvm_options): jvm_options.append('-Dscala.usejavacp=true') # NOTE: We execute with no workunit, as capturing REPL output makes it very sluggish. # # NOTE: Disable creating synthetic jar here because the classLoader used by REPL # does not load Class-Path from manifest. DistributionLocator.cached().execute_java(classpath=classpath, main=self.get_options().main, jvm_options=jvm_options, args=self.args, create_synthetic_jar=False)
def execute( self, jvm_options=None, args=None, executor=None, workunit_factory=None, workunit_name=None, workunit_labels=None, ): """Executes the ivy commandline client with the given args. Raises Ivy.Error if the command fails for any reason. :param executor: Java executor to run ivy with. """ # NB(gmalmquist): It should be OK that we can't declare a subsystem_dependency in this file # (because it's just a plain old object), because Ivy is only constructed by Bootstrapper, which # makes an explicit call to IvySubsystem.global_instance() in its constructor, which in turn has # a declared dependency on DistributionLocator. executor = executor or SubprocessExecutor(DistributionLocator.cached()) runner = self.runner(jvm_options=jvm_options, args=args, executor=executor) try: with self.resolution_lock(): result = util.execute_runner(runner, workunit_factory, workunit_name, workunit_labels) if result != 0: raise self.Error( "Ivy command failed with exit code {}{}".format( result, ": " + " ".join(args) if args else "")) except executor.Error as e: raise self.Error("Problem executing ivy: {}".format(e))
def _build_wsimport_cmd(self, target, target_workdir, url): distribution = DistributionLocator.cached(maximum_version="1.10.999", jdk=True) # Ported and trimmed down from: # https://java.net/projects/jax-ws-commons/sources/svn/content/trunk/ # jaxws-maven-plugin/src/main/java/org/jvnet/jax_ws_commons/jaxws/WsImportMojo.java?rev=1191 cmd = ["{}/bin/wsimport".format(distribution.real_home)] if self.get_options().ws_verbose: cmd.append("-verbose") if self.get_options().ws_quiet: cmd.append("-quiet") cmd.append("-Xnocompile") # Always let pants do the compiling work. cmd.extend(["-keep", "-s", os.path.abspath(target_workdir)]) cmd.extend(["-d", os.path.abspath(target_workdir)]) if target.payload.xjc_args: cmd.extend(("-B{}".format(a) if a.startswith("-") else a) for a in target.payload.xjc_args) cmd.append( "-B-no-header" ) # Don't let xjc write out a timestamp, because it'll break caching. cmd.extend(target.payload.extra_args) cmd.append(url) if self.get_options().level == "debug": cmd.append("-Xdebug") return cmd
def execute(self): targets = self.context.targets(predicate=self._is_benchmark) if not targets: raise TaskError('No jvm targets specified for benchmarking.') # For rewriting JDK classes to work, the JAR file has to be listed specifically in # the JAR manifest as something that goes in the bootclasspath. # The MANIFEST list a jar 'allocation.jar' this is why we have to rename it agent_tools_classpath = self.tool_classpath('benchmark-agent') agent_jar = agent_tools_classpath[0] allocation_jar = os.path.join(os.path.dirname(agent_jar), "allocation.jar") # TODO(Steve Gury): Find a solution to avoid copying the jar every run and being resilient # to version upgrade shutil.copyfile(agent_jar, allocation_jar) os.environ['ALLOCATION_JAR'] = str(allocation_jar) benchmark_tools_classpath = self.tool_classpath('benchmark-tool') # Collect a transitive classpath for the benchmark targets. classpath = self.classpath(targets, benchmark_tools_classpath) java_executor = SubprocessExecutor(DistributionLocator.cached()) exit_code = execute_java(classpath=classpath, main=self._CALIPER_MAIN, jvm_options=self.jvm_options, args=self.args, workunit_factory=self.context.new_workunit, workunit_name='caliper', workunit_labels=[WorkUnitLabel.RUN], executor=java_executor, create_synthetic_jar=self.synthetic_classpath) if exit_code != 0: raise TaskError('java {} ... exited non-zero ({})'.format(self._CALIPER_MAIN, exit_code))
def execute(self): if self.get_options().skip: return sbt_dist_targets = self.context.targets( self.source_target_constraint.satisfied_by) jvm_dist_locator = DistributionLocator.cached() with self.invalidated( sbt_dist_targets, invalidate_dependents=True) as invalidation_check: # Check that we have at most one sbt dist per directory. seen_basedirs = {} for vt in invalidation_check.all_vts: base_dir = vt.target.address.spec_path if base_dir in seen_basedirs: prev_target = seen_basedirs[base_dir] raise self.PublishLocalSbtDistsError( "multiple sbt dists defined in the same directory: current = {}, previous = {}" .format(vt.target, prev_target)) else: seen_basedirs[base_dir] = vt.target for vt in invalidation_check.invalid_vts: base_dir = vt.target.address.spec_path with self.context.new_workunit( name='publish-local-sbt-dists', labels=[WorkUnitLabel.COMPILER], ) as workunit: sbt_version_args = ['-sbt-version', self._sbt.version ] if self._sbt.version else [] argv = ['sbt'] + sbt_version_args + [ '-java-home', jvm_dist_locator.home, '-ivy', self._sbt.local_publish_repo, '-batch', 'publishLocal', ] try: subprocess.check_call(argv, cwd=os.path.join( get_buildroot(), base_dir), stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) except OSError as e: workunit.set_outcome(WorkUnit.FAILURE) raise self.PublishLocalSbtDistsError( "Error invoking sbt with command {} for target {}: {}" .format(argv, vt.target, e), e) except subprocess.CalledProcessError as e: workunit.set_outcome(WorkUnit.FAILURE) raise self.PublishLocalSbtDistsError( "Error publishing local sbt dist with command {} for target {}. Exit code was: {}" .format(argv, vt.target, e.returncode), e)
def resolve_jars(self, targets): # TODO: Why is this computed directly here instead of taking from the actual product # computed by the {Ivy,Coursier}Resolve task? executor = SubprocessExecutor(DistributionLocator.cached()) confs = [] if self.get_options().libraries: confs.append('default') if self.get_options().libraries_sources: confs.append('sources') if self.get_options().libraries_javadocs: confs.append('javadoc') compile_classpath = None if confs: compile_classpath = ClasspathProducts(self.get_options().pants_workdir) if JvmResolveSubsystem.global_instance().get_options().resolver == 'ivy': IvyTaskMixin.resolve(self, executor=executor, targets=targets, classpath_products=compile_classpath, confs=confs) else: CoursierMixin.resolve(self, targets, compile_classpath, sources=self.get_options().libraries_sources, javadoc=self.get_options().libraries_javadocs, executor=executor) return compile_classpath
def resolve_jars(self, targets): executor = SubprocessExecutor(DistributionLocator.cached()) confs = [] if self.get_options().libraries: confs.append('default') if self.get_options().libraries_sources: confs.append('sources') if self.get_options().libraries_javadocs: confs.append('javadoc') compile_classpath = None if confs: compile_classpath = ClasspathProducts( self.get_options().pants_workdir) if JvmResolveSubsystem.global_instance().get_options( ).resolver == 'ivy': IvyTaskMixin.resolve(self, executor=executor, targets=targets, classpath_products=compile_classpath, confs=confs) else: CoursierMixin.resolve( self, targets, compile_classpath, sources=self.get_options().libraries_sources, javadoc=self.get_options().libraries_javadocs) return compile_classpath
def setUp(self): self.jarjar = '/not/really/jarjar.jar' with subsystem_instance(DistributionLocator): executor = SubprocessExecutor(DistributionLocator.cached()) self.shader = Shader(jarjar_classpath=[self.jarjar], executor=executor) self.output_jar = '/not/really/shaded.jar'
def locate_tools_jar(): try: return DistributionLocator.cached(jdk=True).find_libs(['tools.jar']) except DistributionLocator.Error: self.context.log.info('Failed to locate tools.jar. ' 'Install a JDK to increase performance of Zinc.') return []
def execute( self, jvm_options=None, args=None, executor=None, workunit_factory=None, workunit_name=None, workunit_labels=None, ): """Executes the ivy commandline client with the given args. Raises Ivy.Error if the command fails for any reason. :param executor: Java executor to run ivy with. """ # NB(gmalmquist): It should be OK that we can't declare a subsystem_dependency in this file # (because it's just a plain old object), because Ivy is only constructed by Bootstrapper, which # makes an explicit call to IvySubsystem.global_instance() in its constructor, which in turn has # a declared dependency on DistributionLocator. executor = executor or SubprocessExecutor(DistributionLocator.cached()) runner = self.runner(jvm_options=jvm_options, args=args, executor=executor) try: with self.resolution_lock: result = util.execute_runner(runner, workunit_factory, workunit_name, workunit_labels) if result != 0: raise self.Error( "Ivy command failed with exit code {}{}".format(result, ": " + " ".join(args) if args else "") ) except executor.Error as e: raise self.Error("Problem executing ivy: {}".format(e))
def create_scaladoc_command(self, classpath, gendir, *targets): sources = [] for target in targets: sources.extend(target.sources_relative_to_buildroot()) # TODO(Tejal Desai): pantsbuild/pants/65: Remove java_sources attribute for ScalaLibrary # A '.scala' owning target may not have java_sources, eg: junit_tests if hasattr(target, 'java_sources'): for java_target in target.java_sources: sources.extend(java_target.sources_relative_to_buildroot()) if not sources: return None scala_platform = ScalaPlatform.global_instance() tool_classpath = scala_platform.compiler_classpath( self.context.products) args = ['-usejavacp', '-classpath', ':'.join(classpath), '-d', gendir] args.extend(self.args) args.extend(sources) java_executor = SubprocessExecutor(DistributionLocator.cached()) runner = java_executor.runner(jvm_options=self.jvm_options, classpath=tool_classpath, main='scala.tools.nsc.ScalaDoc', args=args) return runner.command
def resolve_jars(self, targets): executor = SubprocessExecutor(DistributionLocator.cached()) confs = [] if self.get_options().libraries: confs.append('default') if self.get_options().libraries_sources: confs.append('sources') if self.get_options().libraries_javadocs: confs.append('javadoc') # TODO(gmalmquist): This is a terrible hack for backwards-compatibility with the pants-plugin. # Kill it ASAP, and update test_export_integration#test_export_jar_path_with_excludes_soft to # use the flag actually scoped for this task. export_options = self.get_options() try: ivy_options = self.context.options.for_scope('resolve.ivy') except OptionsError: # No resolve.ivy task installed, so continue silently. ivy_options = [] for name in set.intersection(set(export_options), set(ivy_options)): if not ivy_options.is_default(name): setattr(export_options, name, RankedValue(RankedValue.FLAG, ivy_options[name])) confs = confs or export_options.confs compile_classpath = None if confs: compile_classpath = ClasspathProducts(self.get_options().pants_workdir) self.resolve(executor=executor, targets=targets, classpath_products=compile_classpath, confs=confs) return compile_classpath
def resolve_jars(self, targets): executor = SubprocessExecutor(DistributionLocator.cached()) confs = [] if self.get_options().libraries: confs.append('default') if self.get_options().libraries_sources: confs.append('sources') if self.get_options().libraries_javadocs: confs.append('javadoc') # TODO(gmalmquist): This is a terrible hack for backwards-compatibility with the pants-plugin. # Kill it ASAP, and update test_export_integration#test_export_jar_path_with_excludes_soft to # use the flag actually scoped for this task. export_options = self.get_options() try: ivy_options = self.context.options.for_scope('resolve.ivy') except OptionsError: # No resolve.ivy task installed, so continue silently. ivy_options = [] for name in set.intersection(set(export_options), set(ivy_options)): if not ivy_options.is_default(name): setattr(export_options, name, RankedValue(RankedValue.FLAG, ivy_options[name])) confs = confs or export_options.confs compile_classpath = None if confs: compile_classpath = ClasspathProducts(self.get_options().pants_workdir) self.resolve(executor=executor, targets=targets, classpath_products=compile_classpath, confs=confs, extra_args=()) return compile_classpath
def execute_junit_runner(self, content): # Create the temporary base test directory test_rel_path = 'tests/java/org/pantsbuild/foo' test_abs_path = os.path.join(self.build_root, test_rel_path) self.create_dir(test_rel_path) # Generate the temporary java test source code. test_java_file_rel_path = os.path.join(test_rel_path, 'FooTest.java') test_java_file_abs_path = os.path.join(self.build_root, test_java_file_rel_path) self.create_file(test_java_file_rel_path, content) # Invoke ivy to resolve classpath for junit. classpath_file_abs_path = os.path.join(test_abs_path, 'junit.classpath') with subsystem_instance(IvySubsystem) as ivy_subsystem: distribution = DistributionLocator.cached(jdk=True) ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy() ivy.execute(args=['-cachepath', classpath_file_abs_path, '-dependency', 'junit', 'junit-dep', '4.10'], executor=SubprocessExecutor(distribution=distribution)) with open(classpath_file_abs_path) as fp: classpath = fp.read() # Now directly invoking javac to compile the test java code into java class # so later we can inject the class into products mapping for JUnitRun to execute # the test on. javac = distribution.binary('javac') subprocess.check_call( [javac, '-d', test_abs_path, '-cp', classpath, test_java_file_abs_path]) # Create a java_tests target and a synthetic resource target. java_tests = self.create_library(test_rel_path, 'java_tests', 'foo_test', ['FooTest.java']) resources = self.make_target('some_resources', Resources) # Set the context with the two targets, one java_tests target and # one synthetic resources target. # The synthetic resources target is to make sure we won't regress # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note # in that bug, the resources target must be the first one in the list. context = self.context(target_roots=[resources, java_tests]) # Before we run the task, we need to inject the "classes_by_target" with # the compiled test java classes that JUnitRun will know which test # classes to execute. In a normal run, this "classes_by_target" will be # populated by java compiling step. class_products = context.products.get_data( 'classes_by_target', lambda: defaultdict(MultipleRootedProducts)) java_tests_products = MultipleRootedProducts() java_tests_products.add_rel_paths(test_abs_path, ['FooTest.class']) class_products[java_tests] = java_tests_products # Also we need to add the FooTest.class's classpath to the compile_classpath # products data mapping so JUnitRun will be able to add that into the final # classpath under which the junit will be executed. self.populate_compile_classpath(context=context, classpath=[test_abs_path]) # Finally execute the task. self.execute(context)
def preferred_jvm_distribution(cls, platforms, strict=False): """Returns a jvm Distribution with a version that should work for all the platforms. Any one of those distributions whose version is >= all requested platforms' versions can be returned unless strict flag is set. :param iterable platforms: An iterable of platform settings. :param bool strict: If true, only distribution whose version matches the minimum required version can be returned, i.e, the max target_level of all the requested platforms. :returns: Distribution one of the selected distributions. """ if not platforms: return DistributionLocator.cached() min_version = max(platform.target_level for platform in platforms) max_version = Revision(*(min_version.components + [9999])) if strict else None return DistributionLocator.cached(minimum_version=min_version, maximum_version=max_version)
def create_analysis_tools(self): return AnalysisTools( DistributionLocator.cached().real_home, ZincAnalysisParser(), ZincAnalysis, get_buildroot(), self.get_options().pants_workdir, )
def setUp(self): self.jarjar = '/not/really/jarjar.jar' init_subsystem(DistributionLocator) executor = SubprocessExecutor(DistributionLocator.cached()) self.shader = Shader(jarjar_classpath=[self.jarjar], executor=executor, binary_package_excludes=['javax']) self.output_jar = '/not/really/shaded.jar'
def _compile_schema(self, args): classpath = DistributionLocator.cached(jdk=True).find_libs( ['tools.jar']) java_main = 'com.sun.tools.internal.xjc.Driver' return self.runjava(classpath=classpath, main=java_main, args=args, workunit_name='xjc')
def execute(self): # The called binary may block for a while, allow concurrent pants activity during this pants # idle period. # # TODO(John Sirois): refactor lock so that I can do: # with self.context.lock.yield(): # - blocking code # # Currently re-acquiring the lock requires a path argument that was set up by the goal # execution engine. I do not want task code to learn the lock location. # http://jira.local.twitter.com/browse/AWESOME-1317 target = self.require_single_root_target() working_dir = None cwd_opt = self.get_options().cwd if cwd_opt != _CWD_NOT_PRESENT: working_dir = self.get_options().cwd if not working_dir: working_dir = target.address.spec_path logger.debug("Working dir is {0}".format(working_dir)) if isinstance(target, JvmApp): binary = target.binary else: binary = target # Some targets will not have extra_jvm_options in their payload, # so we can't access it with target.payload.extra_jvm_options extra_jvm_options = target.payload.get_field_value("extra_jvm_options") # We can't throw if binary isn't a JvmBinary, because perhaps we were called on a # python_binary, in which case we have to no-op and let python_run do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. if isinstance(binary, JvmBinary): jvm = DistributionLocator.cached() executor = CommandLineGrabber( jvm) if self.only_write_cmd_line else None self.context.release_lock() with self.context.new_workunit(name='run', labels=[WorkUnitLabel.RUN]): result = jvm.execute_java( classpath=self.classpath([target]), main=self.get_options().main or binary.main, executor=executor, jvm_options=self.jvm_options + extra_jvm_options, args=self.args, cwd=working_dir, synthetic_jar_dir=self.workdir, create_synthetic_jar=self.synthetic_classpath) if self.only_write_cmd_line: with safe_open(expand_path(self.only_write_cmd_line), 'w') as outfile: outfile.write(' '.join(executor.cmd)) elif result != 0: raise TaskError('java {} ... exited non-zero ({})'.format( binary.main, result), exit_code=result)
def execute_junit_runner(self, content): # Create the temporary base test directory test_rel_path = 'tests/java/org/pantsbuild/foo' test_abs_path = self.create_dir(test_rel_path) # Generate the temporary java test source code. test_java_file_rel_path = os.path.join(test_rel_path, 'FooTest.java') test_java_file_abs_path = self.create_file(test_java_file_rel_path, content) # Create the temporary classes directory under work dir test_classes_abs_path = self.create_workdir_dir(test_rel_path) # Invoke ivy to resolve classpath for junit. classpath_file_abs_path = os.path.join(test_abs_path, 'junit.classpath') with subsystem_instance(IvySubsystem) as ivy_subsystem: distribution = DistributionLocator.cached(jdk=True) ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy() ivy.execute(args=[ '-cachepath', classpath_file_abs_path, '-dependency', 'junit', 'junit-dep', '4.10' ], executor=SubprocessExecutor(distribution=distribution)) with open(classpath_file_abs_path) as fp: classpath = fp.read() # Now directly invoking javac to compile the test java code into java class # so later we can inject the class into products mapping for JUnitRun to execute # the test on. javac = distribution.binary('javac') subprocess.check_call([ javac, '-d', test_classes_abs_path, '-cp', classpath, test_java_file_abs_path ]) # Create a java_tests target and a synthetic resource target. java_tests = self.create_library(test_rel_path, 'java_tests', 'foo_test', ['FooTest.java']) resources = self.make_target('some_resources', Resources) # Set the context with the two targets, one java_tests target and # one synthetic resources target. # The synthetic resources target is to make sure we won't regress # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note # in that bug, the resources target must be the first one in the list. context = self.context(target_roots=[resources, java_tests]) # Before we run the task, we need to inject the "runtime_classpath" with # the compiled test java classes that JUnitRun will know which test # classes to execute. In a normal run, this "runtime_classpath" will be # populated by java compilation step. self.populate_runtime_classpath(context=context, classpath=[test_classes_abs_path]) # Finally execute the task. self.execute(context)
def execute_junit_runner(self, content, create_some_resources=True, **kwargs): # Create the temporary base test directory test_rel_path = 'tests/java/org/pantsbuild/foo' test_abs_path = self.create_dir(test_rel_path) # Generate the temporary java test source code. test_java_file_rel_path = os.path.join(test_rel_path, 'FooTest.java') test_java_file_abs_path = self.create_file(test_java_file_rel_path, content) # Create the temporary classes directory under work dir test_classes_abs_path = self.create_workdir_dir(test_rel_path) # Invoke ivy to resolve classpath for junit. classpath_file_abs_path = os.path.join(test_abs_path, 'junit.classpath') ivy_subsystem = global_subsystem_instance(IvySubsystem) distribution = DistributionLocator.cached(jdk=True) ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy() ivy.execute(args=['-cachepath', classpath_file_abs_path, '-dependency', 'junit', 'junit-dep', '4.10'], executor=SubprocessExecutor(distribution=distribution)) with open(classpath_file_abs_path) as fp: classpath = fp.read() # Now directly invoking javac to compile the test java code into java class # so later we can inject the class into products mapping for JUnitRun to execute # the test on. javac = distribution.binary('javac') subprocess.check_call( [javac, '-d', test_classes_abs_path, '-cp', classpath, test_java_file_abs_path]) # If a target_name is specified create a target with it, otherwise create a java_tests target. if 'target_name' in kwargs: target = self.target(kwargs['target_name']) else: target = self.create_library(test_rel_path, 'java_tests', 'foo_test', ['FooTest.java']) target_roots = [] if create_some_resources: # Create a synthetic resource target. target_roots.append(self.make_target('some_resources', Resources)) target_roots.append(target) # Set the context with the two targets, one java_tests target and # one synthetic resources target. # The synthetic resources target is to make sure we won't regress # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note # in that bug, the resources target must be the first one in the list. context = self.context(target_roots=target_roots) # Before we run the task, we need to inject the "runtime_classpath" with # the compiled test java classes that JUnitRun will know which test # classes to execute. In a normal run, this "runtime_classpath" will be # populated by java compilation step. self.populate_runtime_classpath(context=context, classpath=[test_classes_abs_path]) # Finally execute the task. self.execute(context)
def execute_tool(self, classpath, main, args=None): init_subsystem(DistributionLocator) executor = SubprocessExecutor(DistributionLocator.cached()) process = executor.spawn(classpath, main, args=args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() self.assertEqual(0, process.returncode) self.assertEqual('', err.strip()) yield out
def _fallback_platform(self): logger.warning('No default jvm platform is defined.') source_level = JvmPlatform.parse_java_version( DistributionLocator.cached().version) target_level = source_level platform_name = f'(DistributionLocator.cached().version {source_level})' return JvmPlatformSettings(source_level, target_level, [], name=platform_name)
def execute_tool(self, classpath, main, args=None): init_subsystem(DistributionLocator) executor = SubprocessExecutor(DistributionLocator.cached()) process = executor.spawn(classpath, main, args=args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() self.assertEqual(0, process.returncode) self.assertEqual('', err.strip().decode()) yield out.decode()
def test_shader_project(self): """Test that the binary target at the ``shading_project`` can be built and run. Explicitly checks that the classes end up with the correct shaded fully qualified classnames. """ shading_project = "testprojects/src/java/org/pantsbuild/testproject/shading" self.assert_success(self.run_pants(["clean-all"])) self.assert_success(self.run_pants(["binary", shading_project])) expected_classes = { # Explicitly excluded by a shading_exclude() rule. "org/pantsbuild/testproject/shadingdep/PleaseDoNotShadeMe.class", # Not matched by any rule, so stays the same. "org/pantsbuild/testproject/shading/Main.class", # Shaded with the target_id prefix, along with the default pants prefix. ("__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/otherpackage/" "ShadeWithTargetId.class"), # Also shaded with the target_id prefix and default pants prefix, but for a different target # (so the target_id is different). ("__shaded_by_pants__/org/pantsbuild/testproject/shading/ShadeSelf.class" ), # All these are shaded by the same shading_relocate_package(), which is recursive by default. "__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/subpackage/Subpackaged.class", "__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/SomeClass.class", "__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/Dependency.class", # Shaded by a shading_relocate() that completely renames the package and class name. "org/pantsbuild/testproject/foo/bar/MyNameIsDifferentNow.class", } path = os.path.join("dist", "shading.jar") init_subsystem(DistributionLocator) execute_java = DistributionLocator.cached( minimum_version="1.6").execute_java self.assertEqual( 0, execute_java(classpath=[path], main="org.pantsbuild.testproject.shading.Main")) self.assertEqual( 0, execute_java( classpath=[path], main="org.pantsbuild.testproject.foo.bar.MyNameIsDifferentNow" ), ) received_classes = set() with temporary_dir() as tempdir: ZIP.extract(path, tempdir, filter_func=lambda f: f.endswith(".class")) for root, dirs, files in os.walk(tempdir): for name in files: received_classes.add( os.path.relpath(os.path.join(root, name), tempdir)) self.assertEqual(expected_classes, received_classes)
def execute_codegen(self, target, target_workdir): execute_java = DistributionLocator.cached().execute_java args = self.format_args_for_target(target, target_workdir) if args: result = execute_java( classpath=self.tool_classpath("wire-compiler"), main="com.squareup.wire.WireCompiler", args=args ) if result != 0: raise TaskError("Wire compiler exited non-zero ({0})".format(result))
def assert_run_ant_version(classpath): with subsystem_instance(DistributionLocator): executor = SubprocessExecutor(DistributionLocator.cached()) process = executor.spawn(classpath, 'org.apache.tools.ant.Main', args=['-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() self.assertEqual(0, process.returncode) self.assertTrue(out.strip().startswith('Apache Ant(TM) version 1.9.4')) self.assertEqual('', err.strip())
def execute(self): # The called binary may block for a while, allow concurrent pants activity during this pants # idle period. # # TODO(John Sirois): refactor lock so that I can do: # with self.context.lock.yield(): # - blocking code # # Currently re-acquiring the lock requires a path argument that was set up by the goal # execution engine. I do not want task code to learn the lock location. # http://jira.local.twitter.com/browse/AWESOME-1317 target = self.require_single_root_target() working_dir = None cwd_opt = self.get_options().cwd if cwd_opt != _CWD_NOT_PRESENT: working_dir = self.get_options().cwd if not working_dir: working_dir = target.address.spec_path logger.debug("Working dir is {0}".format(working_dir)) if isinstance(target, JvmApp): binary = target.binary else: binary = target # This task is installed in the "run" goal. # This means that, when invoked with ./pants run, it will run regardless of whether # the target is a jvm target. # As a result, not all targets passed here will have good defaults for extra_jvm_options extra_jvm_options = binary.payload.get_field_value("extra_jvm_options", []) # We can't throw if binary isn't a JvmBinary, because perhaps we were called on a # python_binary, in which case we have to no-op and let python_run do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. if isinstance(binary, JvmBinary): jvm = DistributionLocator.cached() executor = CommandLineGrabber(jvm) if self.only_write_cmd_line else None self.context.release_lock() with self.context.new_workunit(name='run', labels=[WorkUnitLabel.RUN]): result = jvm.execute_java( classpath=self.classpath([target]), main=self.get_options().main or binary.main, executor=executor, jvm_options=self.jvm_options + extra_jvm_options, args=self.args, cwd=working_dir, synthetic_jar_dir=self.workdir, create_synthetic_jar=self.synthetic_classpath ) if self.only_write_cmd_line: with safe_open(expand_path(self.only_write_cmd_line), 'w') as outfile: outfile.write(' '.join(executor.cmd)) elif result != 0: raise TaskError('java {} ... exited non-zero ({})'.format(binary.main, result), exit_code=result)
def execute_codegen(self, target, target_workdir): execute_java = DistributionLocator.cached().execute_java args = self.format_args_for_target(target, target_workdir) if args: result = execute_java(classpath=self.tool_classpath('wire-compiler'), main='com.squareup.wire.WireCompiler', args=args) if result != 0: raise TaskError('Wire compiler exited non-zero ({0})'.format(result))
def locate_tools_jar(): try: return DistributionLocator.cached(jdk=True).find_libs( ['tools.jar']) except DistributionLocator.Error: self.context.log.info( 'Failed to locate tools.jar. ' 'Install a JDK to increase performance of Zinc.') return []
def create(cls, context, executor=None): """Creates and returns a new Shader. :param Executor executor: Optional java executor to run jarjar with. """ if executor is None: executor = SubprocessExecutor(DistributionLocator.cached()) classpath = cls.global_instance().tool_classpath_from_products(context.products, 'jarjar', cls.options_scope) return Shader(classpath, executor)
def execute_codegen(self, target, target_workdir): execute_java = DistributionLocator.cached().execute_java args = self._format_args_for_target(target, target_workdir) if args: # NB(zundel): execute_java() will use nailgun bt default. # Replace the class here with the main() to invoke for sake-wire-codegen result = execute_java(classpath=self.tool_classpath('wire-compiler'), main='com.squareup.sake.wire.SakeWireCodegenCli', args=args) if result != 0: raise TaskError('Wire compiler exited non-zero ({0})'.format(result))
def create_javadoc_command(self, classpath, gendir, *targets): sources = [] for target in targets: sources.extend(target.sources_relative_to_buildroot()) if not sources: return None # Without a JDK/tools.jar we have no javadoc tool and cannot proceed, so check/acquire early. jdk = DistributionLocator.cached(jdk=True) tool_classpath = jdk.find_libs(['tools.jar']) args = ['-quiet', '-encoding', 'UTF-8', '-notimestamp', '-use', '-Xmaxerrs', '10000', # the default is 100 '-Xmaxwarns', '10000', # the default is 100 '-d', gendir] # Always provide external linking for java API offlinelinks = {'http://download.oracle.com/javase/8/docs/api/'} def link(target): for jar in target.jar_dependencies: if jar.apidocs: offlinelinks.add(jar.apidocs) for target in targets: target.walk(link, lambda t: isinstance(t, (JvmTarget, JarLibrary))) for link in offlinelinks: args.extend(['-linkoffline', link, link]) args.extend(self.args) javadoc_classpath_file = os.path.join(self.workdir, '{}.classpath'.format(os.path.basename(gendir))) with open(javadoc_classpath_file, 'w') as f: f.write('-classpath ') f.write(':'.join(classpath)) args.extend(['@{}'.format(javadoc_classpath_file)]) javadoc_sources_file = os.path.join(self.workdir, '{}.source.files'.format(os.path.basename(gendir))) with open(javadoc_sources_file, 'w') as f: f.write('\n'.join(sources)) args.extend(['@{}'.format(javadoc_sources_file)]) java_executor = SubprocessExecutor(jdk) runner = java_executor.runner(jvm_options=self.jvm_options, classpath=tool_classpath, main='com.sun.tools.javadoc.Main', args=args) return runner.command
def execute_codegen(self, targets): # Invoke the generator once per target. Because the wire compiler has flags that try to reduce # the amount of code emitted, Invoking them all together will break if one target specifies a # service_writer and another does not, or if one specifies roots and another does not. execute_java = DistributionLocator.cached().execute_java for target in targets: args = self.format_args_for_target(target) if args: result = execute_java( classpath=self.tool_classpath("wire-compiler"), main="com.squareup.wire.WireCompiler", args=args ) if result != 0: raise TaskError("Wire compiler exited non-zero ({0})".format(result))
def execute_junit_runner(self, content): # Create the temporary base test directory test_rel_path = "tests/java/org/pantsbuild/foo" test_abs_path = self.create_dir(test_rel_path) # Generate the temporary java test source code. test_java_file_rel_path = os.path.join(test_rel_path, "FooTest.java") test_java_file_abs_path = self.create_file(test_java_file_rel_path, content) # Create the temporary classes directory under work dir test_classes_abs_path = self.create_workdir_dir(test_rel_path) # Invoke ivy to resolve classpath for junit. classpath_file_abs_path = os.path.join(test_abs_path, "junit.classpath") with subsystem_instance(IvySubsystem) as ivy_subsystem: distribution = DistributionLocator.cached(jdk=True) ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy() ivy.execute( args=["-cachepath", classpath_file_abs_path, "-dependency", "junit", "junit-dep", "4.10"], executor=SubprocessExecutor(distribution=distribution), ) with open(classpath_file_abs_path) as fp: classpath = fp.read() # Now directly invoking javac to compile the test java code into java class # so later we can inject the class into products mapping for JUnitRun to execute # the test on. javac = distribution.binary("javac") subprocess.check_call([javac, "-d", test_classes_abs_path, "-cp", classpath, test_java_file_abs_path]) # Create a java_tests target and a synthetic resource target. java_tests = self.create_library(test_rel_path, "java_tests", "foo_test", ["FooTest.java"]) resources = self.make_target("some_resources", Resources) # Set the context with the two targets, one java_tests target and # one synthetic resources target. # The synthetic resources target is to make sure we won't regress # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note # in that bug, the resources target must be the first one in the list. context = self.context(target_roots=[resources, java_tests]) # Before we run the task, we need to inject the "runtime_classpath" with # the compiled test java classes that JUnitRun will know which test # classes to execute. In a normal run, this "runtime_classpath" will be # populated by java compilation step. self.populate_runtime_classpath(context=context, classpath=[test_classes_abs_path]) # Finally execute the task. self.execute(context)
def resolve_jars(self, targets): """Resolve :param targets: targets that have dependencies to resolve :return: structure containing the path to resolved jars :rtype: ClasspathProducts """ executor = SubprocessExecutor(DistributionLocator.cached()) classpath_products = self.context.products.get_data('classpath_products', init_func=ClasspathProducts.init_func(self.get_options().pants_workdir)) self.resolve(executor=executor, targets=targets, classpath_products=classpath_products, confs=['default'], extra_args=()) return classpath_products
def _bundle_and_run(self, bundle_args, classpath): self.assert_success(self.run_pants(['clean-all'])) pants_command = list(bundle_args) pants_command.append('testprojects/src/java/org/pantsbuild/testproject/shading:third') self.assert_success(self.run_pants(pants_command)) main_class = 'org.pantsbuild.testproject.shading.Third' with subsystem_instance(DistributionLocator): executor = SubprocessExecutor(DistributionLocator.cached(minimum_version='1.7')) p = executor.spawn(classpath, main_class, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() self.assertEqual(0, p.returncode, err) class_names = json.loads(out.strip()) self.assertEqual({ 'Gson': 'moc.elgoog.nosg.Gson', 'Third': 'org.pantsbuild.testproject.shading.Third', 'Second': 'hello.org.pantsbuild.testproject.shading.Second', }, class_names)
def test_shader_project(self): """Test that the binary target at the ``shading_project`` can be built and run. Explicitly checks that the classes end up with the correct shaded fully qualified classnames. """ shading_project = 'testprojects/src/java/org/pantsbuild/testproject/shading' self.assert_success(self.run_pants(['clean-all'])) self.assert_success(self.run_pants(['binary', shading_project])) expected_classes = { # Explicitly excluded by a shading_exclude() rule. 'org/pantsbuild/testproject/shadingdep/PleaseDoNotShadeMe.class', # Not matched by any rule, so stays the same. 'org/pantsbuild/testproject/shading/Main.class', # Shaded with the target_id prefix, along with the default pants prefix. ('__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/otherpackage/' 'ShadeWithTargetId.class'), # Also shaded with the target_id prefix and default pants prefix, but for a different target # (so the target_id is different). ('__shaded_by_pants__/org/pantsbuild/testproject/shading/ShadeSelf.class'), # All these are shaded by the same shading_relocate_package(), which is recursive by default. '__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/subpackage/Subpackaged.class', '__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/SomeClass.class', '__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/Dependency.class', # Shaded by a shading_relocate() that completely renames the package and class name. 'org/pantsbuild/testproject/foo/bar/MyNameIsDifferentNow.class', } path = os.path.join('dist', 'shading.jar') init_subsystem(DistributionLocator) execute_java = DistributionLocator.cached(minimum_version='1.6').execute_java self.assertEqual(0, execute_java(classpath=[path], main='org.pantsbuild.testproject.shading.Main')) self.assertEqual(0, execute_java(classpath=[path], main='org.pantsbuild.testproject.foo.bar.MyNameIsDifferentNow')) received_classes = set() with temporary_dir() as tempdir: ZIP.extract(path, tempdir, filter_func=lambda f: f.endswith('.class')) for root, dirs, files in os.walk(tempdir): for name in files: received_classes.add(os.path.relpath(os.path.join(root, name), tempdir)) self.assertEqual(expected_classes, received_classes)