def dumped_chroot(self, targets): python_repos = create_subsystem(PythonRepos) with subsystem_instance(IvySubsystem) as ivy_subsystem: ivy_bootstrapper = Bootstrapper(ivy_subsystem=ivy_subsystem) with subsystem_instance(ThriftBinary.Factory) as thrift_binary_factory: interpreter_cache = PythonInterpreterCache(self.python_setup, python_repos) interpreter_cache.setup() interpreters = list(interpreter_cache.matched_interpreters([ self.python_setup.interpreter_requirement])) self.assertGreater(len(interpreters), 0) interpreter = interpreters[0] with temporary_dir() as chroot: pex_builder = PEXBuilder(path=chroot, interpreter=interpreter) python_chroot = PythonChroot(python_setup=self.python_setup, python_repos=python_repos, ivy_bootstrapper=ivy_bootstrapper, thrift_binary_factory=thrift_binary_factory.create, interpreter=interpreter, builder=pex_builder, targets=targets, platforms=['current']) try: python_chroot.dump() yield pex_builder, python_chroot finally: python_chroot.delete()
def dumped_chroot(self, targets): python_repos = create_subsystem(PythonRepos) with subsystem_instance(IvySubsystem) as ivy_subsystem: ivy_bootstrapper = Bootstrapper(ivy_subsystem=ivy_subsystem) with subsystem_instance( ThriftBinary.Factory) as thrift_binary_factory: interpreter_cache = PythonInterpreterCache( self.python_setup, python_repos) interpreter_cache.setup() interpreters = list( interpreter_cache.matches( [self.python_setup.interpreter_requirement])) self.assertGreater(len(interpreters), 0) interpreter = interpreters[0] with temporary_dir() as chroot: pex_builder = PEXBuilder(path=chroot, interpreter=interpreter) python_chroot = PythonChroot( python_setup=self.python_setup, python_repos=python_repos, ivy_bootstrapper=ivy_bootstrapper, thrift_binary_factory=thrift_binary_factory.create, interpreter=interpreter, builder=pex_builder, targets=targets, platforms=['current']) try: python_chroot.dump() yield pex_builder, python_chroot finally: python_chroot.delete()
def missing_jvm(version): with subsystem_instance(DistributionLocator): try: DistributionLocator.locate(minimum_version=version, maximum_version='{}.9999'.format(version)) return False except DistributionLocator.Error: return True
def is_missing_jvm(version): with subsystem_instance(DistributionLocator): try: DistributionLocator.cached(minimum_version=version, maximum_version='{}.9999'.format(version)) return False except DistributionLocator.Error: return True
def do_test_thrift(self, inspect_chroot=None): # TODO(benjy): This hacks around PythonChroot's dependency on source roots. # Most tests get SourceRoot functionality set up for them by their test context. # However PythonChroot isn't a task and doesn't use context. Rather it accesses source roots # directly via Target.target_base. Remove this when we have a better way. with subsystem_instance(SourceRootConfig): self.create_file(relpath='src/thrift/core/identifiers.thrift', contents=dedent(""" namespace py core const string HELLO = "Hello" const string WORLD = "World!" """)) core_const = self.make_target(spec='src/thrift/core', target_type=PythonThriftLibrary, sources=['identifiers.thrift']) self.create_file(relpath='src/thrift/test/const.thrift', contents=dedent(""" namespace py test include "core/identifiers.thrift" const list<string> MESSAGE = [identifiers.HELLO, identifiers.WORLD] """)) test_const = self.make_target(spec='src/thrift/test', target_type=PythonThriftLibrary, sources=['const.thrift'], dependencies=[core_const]) self.create_file(relpath='src/python/test/main.py', contents=dedent(""" from test.constants import MESSAGE def say_hello(): print(' '.join(MESSAGE)) """)) binary = self.make_target(spec='src/python/test', target_type=PythonBinary, source='main.py', dependencies=[test_const]) yield binary, test_const with self.dumped_chroot([binary]) as (pex_builder, python_chroot): pex_builder.set_entry_point('test.main:say_hello') pex_builder.freeze() pex = python_chroot.pex() process = pex.run(blocking=False, stdout=subprocess.PIPE) stdout, _ = process.communicate() self.assertEqual(0, process.returncode) self.assertEqual('Hello World!', stdout.strip()) if inspect_chroot: # Snap a clean copy of the chroot with just the chroots added files. chroot = pex_builder.clone().path() inspect_chroot(chroot)
def test_go_thrift_gen_simple(self): with self.temporary_workdir() as workdir: args = [ 'gen', 'contrib/go/testprojects/src/thrift/thrifttest:fleem' ] pants_run = self.run_pants_with_workdir(args, workdir) self.assert_success(pants_run) with subsystem_instance(GoDistribution.Factory) as factory: go_dist = factory.create() go_dist.create_go_cmd('env', args=['GOOS']).check_output().strip() go_dist.create_go_cmd('env', args=['GOARCH']).check_output().strip() expected_files = { 'src/go/thrifttest/duck/constants.go', 'src/go/thrifttest/duck/ttypes.go', } # Fetch the hash for task impl version. go_thrift_contents = os.listdir( os.path.join(workdir, 'gen', 'go-thrift')) self.assertEqual(len(go_thrift_contents), 1) root = os.path.join( workdir, 'gen', 'go-thrift', go_thrift_contents[0], 'contrib.go.testprojects.src.thrift.thrifttest.fleem', 'current') self.assertEquals(sorted(expected_files), sorted(exact_files(root)))
def scala_platform_setup(self): options = { 'scala-platform': { 'version': 'custom', 'suffix_version': '2.10', 'runtime_spec': '//:scala-library-custom', } } with subsystem_instance(ScalaPlatform, **options): self.make_target('//:scalastyle', JarLibrary, jars=[JarDependency('org.scalastyle', 'scalastyle_2.10', '0.3.2')] ) self.make_target('//:scala-repl', JarLibrary, jars=[ JarDependency(org = 'org.scala-lang', name = 'jline', rev = '2.10.5'), JarDependency(org = 'org.scala-lang', name = 'scala-compiler', rev = '2.10.5')]) self.make_target('//:scalac', JarLibrary, jars=[JarDependency('org.scala-lang', 'scala-compiler', '2.10.5')]) yield
def test_force_override(self): jars = list(self.a.payload.jars) with temporary_file_path() as ivyxml: with subsystem_instance(JarDependencyManagement): IvyUtils.generate_ivy([self.a], jars=jars, excludes=[], ivyxml=ivyxml, confs=['default']) doc = ET.parse(ivyxml).getroot() conf = self.find_single(doc, 'configurations/conf') self.assert_attributes(conf, name='default') dependencies = list(doc.findall('dependencies/dependency')) self.assertEqual(2, len(dependencies)) dep1 = dependencies[0] self.assert_attributes(dep1, org='org1', name='name1', rev='rev1') conf = self.find_single(dep1, 'conf') self.assert_attributes(conf, name='default', mapped='default') dep2 = dependencies[1] self.assert_attributes(dep2, org='org2', name='name2', rev='rev2', force='true') conf = self.find_single(dep1, 'conf') self.assert_attributes(conf, name='default', mapped='default') override = self.find_single(doc, 'dependencies/override') self.assert_attributes(override, org='org2', module='name2', rev='rev2')
def test_preferred_jvm_distributions(self): self.set_options_for_scope('jvm-platform', default_platform='java9999', platforms={ 'java9999': { 'target': '9999' }, 'java10000': { 'target': '10000' } }) with self.fake_distribution(version='9999') as strict_home: with self.fake_distribution(version='10000') as non_strict_home: self.set_options_for_scope( 'jvm-distributions', paths={ normalize_os_name(get_os_name()): [strict_home, non_strict_home] }) with subsystem_instance(DistributionLocator) as locator: locator._reset( ) # Make sure we get a fresh read from the options set just above. self.addCleanup( locator._reset ) # And make sure we we clean up the values we cache. export_json = self.execute_export_json() self.assertEqual( { 'strict': strict_home, 'non_strict': non_strict_home }, export_json['preferred_jvm_distributions']['java9999'])
def pants_daemon_launcher(self, options=None): options = options or {} with subsystem_instance(PantsDaemonLauncher.Factory, **options) as factory: pdl = factory.create(None) pdl.pantsd = self.mock_pantsd pdl.watchman_launcher = self.mock_watchman_launcher yield pdl
def test_force_override(self): jars = list(self.a.payload.jars) with temporary_file_path() as ivyxml: with subsystem_instance(JarDependencyManagement): IvyUtils.generate_ivy([self.a], jars=jars, excludes=[], ivyxml=ivyxml, confs=['default']) doc = ET.parse(ivyxml).getroot() conf = self.find_single(doc, 'configurations/conf') self.assert_attributes(conf, name='default') dependencies = list(doc.findall('dependencies/dependency')) self.assertEqual(2, len(dependencies)) dep1 = dependencies[0] self.assert_attributes(dep1, org='org1', name='name1', rev='rev1') conf = self.find_single(dep1, 'conf') self.assert_attributes(conf, name='default', mapped='default') dep2 = dependencies[1] self.assert_attributes(dep2, org='org2', name='name2', rev='rev2', force='true') conf = self.find_single(dep1, 'conf') self.assert_attributes(conf, name='default', mapped='default') override = self.find_single(doc, 'dependencies/override') self.assert_attributes(override, org='org2', module='name2', rev='rev2')
def test_exported_antlr(self): self.create_file(relpath='src/antlr/exported/exported.g', contents=dedent(""" grammar exported; options { language = Python; } WORD: ('a'..'z'|'A'..'Z'|'0'..'9'|'-'|'_')+; static: WORD; """)) target = self.make_target(spec='src/antlr/exported', target_type=PythonAntlrLibrary, antlr_version='3.1.3', sources=['exported.g'], module='exported', provides=PythonArtifact(name='test.exported', version='0.0.0')) # TODO(John Sirois): This hacks around a direct but undeclared dependency # `pants.java.distribution.distribution.Distribution` gained in # https://rbcommons.com/s/twitter/r/2657 # Remove this once proper Subsystem dependency chains are re-established. with subsystem_instance(JVM): with self.run_execute(target) as created: self.assertEqual([target], created.keys())
def test_no_jvm_restriction(self): with subsystem_instance(DistributionLocator): distribution = DistributionLocator.locate() target_spec = 'testprojects/src/java/org/pantsbuild/testproject/printversion' run = self.run_pants(['run', target_spec]) self.assert_success(run) self.assertIn('java.home:{}'.format(distribution.home), run.stdout_data)
def test_ivy_classifiers(self): with self.temporary_workdir() as workdir: test_target = 'testprojects/tests/java/org/pantsbuild/testproject/ivyclassifier:ivyclassifier' json_data = self.run_export(test_target, workdir, load_libs=True) with subsystem_instance(IvySubsystem) as ivy_subsystem: ivy_cache_dir = ivy_subsystem.get_options().cache_dir avro_lib_info = json_data.get('libraries').get( 'org.apache.avro:avro:1.7.7') self.assertIsNotNone(avro_lib_info) self.assertEquals( avro_lib_info.get('default'), os.path.join(ivy_cache_dir, 'org.apache.avro/avro/jars/avro-1.7.7.jar')) self.assertEquals( avro_lib_info.get('tests'), os.path.join( ivy_cache_dir, 'org.apache.avro/avro/jars/avro-1.7.7-tests.jar')) self.assertEquals( avro_lib_info.get('javadoc'), os.path.join( ivy_cache_dir, 'org.apache.avro/avro/javadocs/avro-1.7.7-javadoc.jar') ) self.assertEquals( avro_lib_info.get('sources'), os.path.join( ivy_cache_dir, 'org.apache.avro/avro/sources/avro-1.7.7-sources.jar'))
def test_force_override(self): jars = list(self.a.payload.jars) with temporary_file_path() as ivyxml: with subsystem_instance(JarDependencyManagement): IvyUtils.generate_ivy([self.a], jars=jars, excludes=[], ivyxml=ivyxml, confs=["default"]) doc = ET.parse(ivyxml).getroot() conf = self.find_single(doc, "configurations/conf") self.assert_attributes(conf, name="default") dependencies = list(doc.findall("dependencies/dependency")) self.assertEqual(2, len(dependencies)) dep1 = dependencies[0] self.assert_attributes(dep1, org="org1", name="name1", rev="rev1") conf = self.find_single(dep1, "conf") self.assert_attributes(conf, name="default", mapped="default") dep2 = dependencies[1] self.assert_attributes(dep2, org="org2", name="name2", rev="rev2", force="true") conf = self.find_single(dep1, "conf") self.assert_attributes(conf, name="default", mapped="default") override = self.find_single(doc, "dependencies/override") self.assert_attributes(override, org="org2", module="name2", rev="rev2")
def test_export_jar_path(self): with self.temporary_workdir() as workdir: test_target = 'examples/tests/java/org/pantsbuild/example/usethrift:usethrift' json_data = self.run_export(test_target, workdir, load_libs=True) with subsystem_instance(IvySubsystem) as ivy_subsystem: ivy_cache_dir = ivy_subsystem.get_options().cache_dir common_lang_lib_info = json_data.get('libraries').get( 'commons-lang:commons-lang:2.5') self.assertIsNotNone(common_lang_lib_info) self.assertEquals( common_lang_lib_info.get('default'), os.path.join( ivy_cache_dir, 'commons-lang/commons-lang/jars/commons-lang-2.5.jar')) self.assertEquals( common_lang_lib_info.get('javadoc'), os.path.join( ivy_cache_dir, 'commons-lang/commons-lang/javadocs/commons-lang-2.5-javadoc.jar' )) self.assertEquals( common_lang_lib_info.get('sources'), os.path.join( ivy_cache_dir, 'commons-lang/commons-lang/sources/commons-lang-2.5-sources.jar' ))
def missing_jvm(version): with subsystem_instance(DistributionLocator): try: DistributionLocator.locate(minimum_version=version, maximum_version="{}.9999".format(version)) return False except DistributionLocator.Error: return True
def _distribution_locator(**options): with subsystem_instance(DistributionLocator, **options) as locator: locator._reset() # Force a fresh locator. try: yield locator finally: locator._reset() # And make sure we we clean up the values we cache.
def setUp(self): self.jarjar = '/not/really/jarjar.jar' with subsystem_instance(DistributionLocator): executor = SubprocessExecutor(DistributionLocator.cached()) self.shader = Shader(jarjar_classpath=[self.jarjar], executor=executor) self.output_jar = '/not/really/shaded.jar'
def scala_platform_setup(self): options = { 'scala-platform': { 'version': 'custom', 'suffix_version': '2.10', 'runtime_spec': '//:scala-library-custom', } } with subsystem_instance(ScalaPlatform, **options): self.make_target('//:scalastyle', JarLibrary, jars=[ JarDependency('org.scalastyle', 'scalastyle_2.10', '0.3.2') ]) self.make_target('//:scala-repl', JarLibrary, jars=[ JarDependency(org='org.scala-lang', name='jline', rev='2.10.5'), JarDependency(org='org.scala-lang', name='scala-compiler', rev='2.10.5') ]) self.make_target('//:scalac', JarLibrary, jars=[ JarDependency('org.scala-lang', 'scala-compiler', '2.10.5') ]) yield
def test_exported_antlr(self): SourceRoot.register('src/antlr', PythonThriftLibrary) self.create_file(relpath='src/antlr/exported/exported.g', contents=dedent(""" grammar exported; options { language = Python; } WORD: ('a'..'z'|'A'..'Z'|'0'..'9'|'-'|'_')+; static: WORD; """)) target = self.make_target(spec='src/antlr/exported', target_type=PythonAntlrLibrary, antlr_version='3.1.3', sources=['exported.g'], module='exported', provides=PythonArtifact(name='test.exported', version='0.0.0')) # TODO(John Sirois): This hacks around a direct but undeclared dependency # `pants.java.distribution.distribution.Distribution` gained in # https://rbcommons.com/s/twitter/r/2657 # Remove this once proper Subsystem dependency chains are re-established. with subsystem_instance(JVM): with self.run_execute(target) as created: self.assertEqual([target], created.keys())
def _bundle_and_run(self, bundle_args, classpath): self.assert_success(self.run_pants(['clean-all'])) pants_command = list(bundle_args) pants_command.append( 'testprojects/src/java/org/pantsbuild/testproject/shading:third') self.assert_success(self.run_pants(pants_command)) main_class = 'org.pantsbuild.testproject.shading.Third' with subsystem_instance(DistributionLocator): executor = SubprocessExecutor( DistributionLocator.cached(minimum_version='1.7')) p = executor.spawn(classpath, main_class, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() self.assertEqual(0, p.returncode, err) class_names = json.loads(out.strip()) self.assertEqual( { 'Gson': 'moc.elgoog.nosg.Gson', 'Third': 'org.pantsbuild.testproject.shading.Third', 'Second': 'hello.org.pantsbuild.testproject.shading.Second', }, class_names)
def execute_junit_runner(self, content): # Create the temporary base test directory test_rel_path = 'tests/java/org/pantsbuild/foo' test_abs_path = os.path.join(self.build_root, test_rel_path) self.create_dir(test_rel_path) # Generate the temporary java test source code. test_java_file_rel_path = os.path.join(test_rel_path, 'FooTest.java') test_java_file_abs_path = os.path.join(self.build_root, test_java_file_rel_path) self.create_file(test_java_file_rel_path, content) # Invoke ivy to resolve classpath for junit. classpath_file_abs_path = os.path.join(test_abs_path, 'junit.classpath') with subsystem_instance(IvySubsystem) as ivy_subsystem: distribution = DistributionLocator.cached(jdk=True) ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy() ivy.execute(args=['-cachepath', classpath_file_abs_path, '-dependency', 'junit', 'junit-dep', '4.10'], executor=SubprocessExecutor(distribution=distribution)) with open(classpath_file_abs_path) as fp: classpath = fp.read() # Now directly invoking javac to compile the test java code into java class # so later we can inject the class into products mapping for JUnitRun to execute # the test on. javac = distribution.binary('javac') subprocess.check_call( [javac, '-d', test_abs_path, '-cp', classpath, test_java_file_abs_path]) # Create a java_tests target and a synthetic resource target. java_tests = self.create_library(test_rel_path, 'java_tests', 'foo_test', ['FooTest.java']) resources = self.make_target('some_resources', Resources) # Set the context with the two targets, one java_tests target and # one synthetic resources target. # The synthetic resources target is to make sure we won't regress # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note # in that bug, the resources target must be the first one in the list. context = self.context(target_roots=[resources, java_tests]) # Before we run the task, we need to inject the "classes_by_target" with # the compiled test java classes that JUnitRun will know which test # classes to execute. In a normal run, this "classes_by_target" will be # populated by java compiling step. class_products = context.products.get_data( 'classes_by_target', lambda: defaultdict(MultipleRootedProducts)) java_tests_products = MultipleRootedProducts() java_tests_products.add_rel_paths(test_abs_path, ['FooTest.class']) class_products[java_tests] = java_tests_products # Also we need to add the FooTest.class's classpath to the compile_classpath # products data mapping so JUnitRun will be able to add that into the final # classpath under which the junit will be executed. self.populate_compile_classpath(context=context, classpath=[test_abs_path]) # Finally execute the task. self.execute(context)
def execute_junit_runner(self, content): # Create the temporary base test directory test_rel_path = 'tests/java/org/pantsbuild/foo' test_abs_path = os.path.join(self.build_root, test_rel_path) self.create_dir(test_rel_path) # Generate the temporary java test source code. test_java_file_rel_path = os.path.join(test_rel_path, 'FooTest.java') test_java_file_abs_path = os.path.join(self.build_root, test_java_file_rel_path) self.create_file(test_java_file_rel_path, content) # Invoke ivy to resolve classpath for junit. distribution = Distribution.cached(jdk=True) executor = SubprocessExecutor(distribution=distribution) classpath_file_abs_path = os.path.join(test_abs_path, 'junit.classpath') with subsystem_instance(IvySubsystem) as ivy_subsystem: ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy() ivy.execute(args=['-cachepath', classpath_file_abs_path, '-dependency', 'junit', 'junit-dep', '4.10'], executor=executor) with open(classpath_file_abs_path) as fp: classpath = fp.read() # Now directly invoking javac to compile the test java code into java class # so later we can inject the class into products mapping for JUnitRun to execute # the test on. javac = distribution.binary('javac') subprocess.check_call( [javac, '-d', test_abs_path, '-cp', classpath, test_java_file_abs_path]) # Create a java_tests target and a synthetic resource target. java_tests = self.create_library(test_rel_path, 'java_tests', 'foo_test', ['FooTest.java']) resources = self.make_target('some_resources', Resources) # Set the context with the two targets, one java_tests target and # one synthetic resources target. # The synthetic resources target is to make sure we won't regress # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note # in that bug, the resources target must be the first one in the list. context = self.context(target_roots=[resources, java_tests]) # Before we run the task, we need to inject the "classes_by_target" with # the compiled test java classes that JUnitRun will know which test # classes to execute. In a normal run, this "classes_by_target" will be # populated by java compiling step. class_products = context.products.get_data( 'classes_by_target', lambda: defaultdict(MultipleRootedProducts)) java_tests_products = MultipleRootedProducts() java_tests_products.add_rel_paths(test_abs_path, ['FooTest.class']) class_products[java_tests] = java_tests_products # Also we need to add the FooTest.class's classpath to the compile_classpath # products data mapping so JUnitRun will be able to add that into the final # classpath under which the junit will be executed. self.populate_compile_classpath(context=context, classpath=[test_abs_path]) # Finally execute the task. self.execute(context)
def pants_daemon_launcher(self, options=None): options = options or {} with subsystem_instance(PantsDaemonLauncher.Factory, **options) as factory: pdl = factory.create(None) pdl.pantsd = self.mock_pantsd pdl.watchman_launcher = self.mock_watchman_launcher yield pdl
def visualize_build_request(build_root, goals, subjects): with subsystem_instance(Native.Factory) as native_factory: scheduler = setup_json_scheduler(build_root, native_factory.create()) execution_request = scheduler.build_request(goals, subjects) # NB: Calls `schedule` independently of `execute`, in order to render a graph before validating it. scheduler.schedule(execution_request) visualize_execution_graph(scheduler)
def get_two_distributions(): with subsystem_instance(DistributionLocator): try: java7 = DistributionLocator.locate(minimum_version='1.7', maximum_version='1.7.9999') java8 = DistributionLocator.locate(minimum_version='1.8', maximum_version='1.8.9999') return java7, java8 except DistributionLocator.Error: return None
def get_two_distributions(): with subsystem_instance(DistributionLocator): try: java7 = DistributionLocator.locate(minimum_version='1.7', maximum_version='1.7.9999') java8 = DistributionLocator.locate(minimum_version='1.8', maximum_version='1.8.9999') return java7, java8 except DistributionLocator.Error: return None
def _distribution_locator(**options): with subsystem_instance(DistributionLocator, **options) as locator: locator._reset() # Force a fresh locator. try: yield locator finally: locator._reset( ) # And make sure we we clean up the values we cache.
def test_no_jvm_restriction(self): with subsystem_instance(DistributionLocator): distribution = DistributionLocator.locate() target_spec = 'testprojects/src/java/org/pantsbuild/testproject/printversion' run = self.run_pants(['run', target_spec]) self.assert_success(run) self.assertIn('java.home:{}'.format(distribution.home), run.stdout_data)
def execute_junit_runner(self, content, create_some_resources=True, **kwargs): # Create the temporary base test directory test_rel_path = 'tests/java/org/pantsbuild/foo' test_abs_path = self.create_dir(test_rel_path) # Generate the temporary java test source code. test_java_file_rel_path = os.path.join(test_rel_path, 'FooTest.java') test_java_file_abs_path = self.create_file(test_java_file_rel_path, content) # Create the temporary classes directory under work dir test_classes_abs_path = self.create_workdir_dir(test_rel_path) # Invoke ivy to resolve classpath for junit. classpath_file_abs_path = os.path.join(test_abs_path, 'junit.classpath') with subsystem_instance(IvySubsystem) as ivy_subsystem: distribution = DistributionLocator.cached(jdk=True) ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy() ivy.execute(args=['-cachepath', classpath_file_abs_path, '-dependency', 'junit', 'junit-dep', '4.10'], executor=SubprocessExecutor(distribution=distribution)) with open(classpath_file_abs_path) as fp: classpath = fp.read() # Now directly invoking javac to compile the test java code into java class # so later we can inject the class into products mapping for JUnitRun to execute # the test on. javac = distribution.binary('javac') subprocess.check_call( [javac, '-d', test_classes_abs_path, '-cp', classpath, test_java_file_abs_path]) # If a target_name is specified, create a target with it, otherwise create a java_tests target. if 'target_name' in kwargs: target = self.target(kwargs['target_name']) else: target = self.create_library(test_rel_path, 'java_tests', 'foo_test', ['FooTest.java']) target_roots = [] if create_some_resources: # Create a synthetic resource target. target_roots.append(self.make_target('some_resources', Resources)) target_roots.append(target) # Set the context with the two targets, one java_tests target and # one synthetic resources target. # The synthetic resources target is to make sure we won't regress # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note # in that bug, the resources target must be the first one in the list. context = self.context(target_roots=target_roots) # Before we run the task, we need to inject the "runtime_classpath" with # the compiled test java classes that JUnitRun will know which test # classes to execute. In a normal run, this "runtime_classpath" will be # populated by java compilation step. self.populate_runtime_classpath(context=context, classpath=[test_classes_abs_path]) # Finally execute the task. self.execute(context)
def test_parse_proxy_string(self): with subsystem_instance(IvySubsystem) as ivy_subsystem: self.assertEquals(('example.com', 1234), ivy_subsystem._parse_proxy_string('http://example.com:1234')) self.assertEquals(('secure-example.com', 999), ivy_subsystem._parse_proxy_string('http://secure-example.com:999')) # trailing slash is ok self.assertEquals(('example.com', 1234), ivy_subsystem._parse_proxy_string('http://example.com:1234/'))
def assert_run_ant_version(classpath): with subsystem_instance(DistributionLocator): executor = SubprocessExecutor(DistributionLocator.cached()) process = executor.spawn(classpath, 'org.apache.tools.ant.Main', args=['-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() self.assertEqual(0, process.returncode) self.assertTrue(out.strip().startswith('Apache Ant(TM) version 1.9.4')) self.assertEqual('', err.strip())
def execute_tool(self, classpath, main, args=None): with subsystem_instance(DistributionLocator): executor = SubprocessExecutor(DistributionLocator.cached()) process = executor.spawn(classpath, main, args=args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() self.assertEqual(0, process.returncode) self.assertEqual('', err.strip()) yield out
def test_simple(self): with subsystem_instance(IvySubsystem) as ivy_subsystem: bootstrapper = Bootstrapper(ivy_subsystem=ivy_subsystem) ivy = bootstrapper.ivy() self.assertIsNotNone(ivy.ivy_cache_dir) self.assertIsNone(ivy.ivy_settings) bootstrap_jar_path = os.path.join(ivy_subsystem.get_options().pants_bootstrapdir, 'tools', 'jvm', 'ivy', 'bootstrap.jar') self.assertTrue(os.path.exists(bootstrap_jar_path))
def visualize_build_request(build_root, goals, subjects): with subsystem_instance(Native.Factory) as native_factory: scheduler = setup_json_scheduler(build_root, native_factory.create()) execution_request = scheduler.build_request(goals, subjects) # NB: Calls `reduce` independently of `execute`, in order to render a graph before validating it. engine = LocalSerialEngine(scheduler, Storage.create()) engine.reduce(execution_request) visualize_execution_graph(scheduler)
def do_test_thrift(self, inspect_chroot=None): # TODO(benjy): This hacks around PythonChroot's dependency on source roots. # Most tests get SourceRoot functionality set up for them by their test context. # However PythonChroot isn't a task and doesn't use context. Rather it accesses source roots # directly via Target.target_base. Remove this when we have a better way. with subsystem_instance(SourceRootConfig): self.create_file(relpath='src/thrift/core/identifiers.thrift', contents=dedent(""" namespace py core const string HELLO = "Hello" const string WORLD = "World!" """)) core_const = self.make_target(spec='src/thrift/core', target_type=PythonThriftLibrary, sources=['identifiers.thrift']) self.create_file(relpath='src/thrift/test/const.thrift', contents=dedent(""" namespace py test include "core/identifiers.thrift" const list<string> MESSAGE = [identifiers.HELLO, identifiers.WORLD] """)) test_const = self.make_target(spec='src/thrift/test', target_type=PythonThriftLibrary, sources=['const.thrift'], dependencies=[core_const]) self.create_file(relpath='src/python/test/main.py', contents=dedent(""" from test.constants import MESSAGE def say_hello(): print(' '.join(MESSAGE)) """)) binary = self.make_target(spec='src/python/test', target_type=PythonBinary, source='main.py', dependencies=[test_const]) yield binary, test_const with self.dumped_chroot([binary]) as (pex_builder, python_chroot): pex_builder.set_entry_point('test.main:say_hello') pex_builder.freeze() pex = python_chroot.pex() process = pex.run(blocking=False, stdout=subprocess.PIPE) stdout, _ = process.communicate() self.assertEqual(0, process.returncode) self.assertEqual('Hello World!', stdout.strip()) if inspect_chroot: # Snap a clean copy of the chroot with just the chroots added files. chroot = pex_builder.clone().path() inspect_chroot(chroot)
def fetcher(self, import_path): with subsystem_instance(Fetchers, **{'go-fetchers' : { 'mapping' : {'.*': 'ArchiveFetcher'}, }, 'go-archive-fetcher' : { 'matchers' : {'.*': ('', None, 0)}, 'prefixes' :['foo', 'bar/baz'], }}) as fetchers: yield fetchers.get_fetcher(import_path)
def custom_scala_platform_setup(self): with subsystem_instance(ScalaPlatform): # We don't need to specify :scalac or :scala-repl since they are never being fetched. self.make_target('//:scalastyle', JarLibrary, jars=[JarDependency('org.scalastyle', 'scalastyle_2.10', '0.3.2')], ) self.set_options_for_scope(ScalaPlatform.options_scope, version='custom') yield
def scala_platform_setup(self): with subsystem_instance(ScalaPlatform): self.make_target(':scala-compiler', JarLibrary, jars=[JarDependency('org.scala-lang', 'scala-compiler', '2.10.5')]) self.make_target(':scala-library', JarLibrary, jars=[JarDependency('org.scala-lang', 'scala-library', '2.10.5')]) self.set_options_for_scope(ScalaPlatform.options_scope, scalac=':scala-compiler') yield
def test_simple(self): with subsystem_instance(IvySubsystem) as ivy_subsystem: bootstrapper = Bootstrapper(ivy_subsystem=ivy_subsystem) ivy = bootstrapper.ivy() self.assertIsNotNone(ivy.ivy_cache_dir) self.assertIsNone(ivy.ivy_settings) bootstrap_jar_path = os.path.join( ivy_subsystem.get_options().pants_bootstrapdir, 'tools', 'jvm', 'ivy', 'bootstrap.jar') self.assertTrue(os.path.exists(bootstrap_jar_path))
def scala_platform_setup(self): with subsystem_instance(ScalaPlatform): self.make_target(':scalastyle', JarLibrary, jars=[JarDependency('org.scalastyle', 'scalastyle_2.10', '0.3.2')] ) self.make_target(':scalastyle_211', JarLibrary, jars=[JarDependency('org.scalastyle', 'scalastyle_2.11', '0.8.0')] ) self.make_target(':scala-compiler', JarLibrary, jars=[JarDependency('org.scala-lang', 'scala-compiler', '2.10.5')]) self.make_target(':scala-compiler_211', JarLibrary, jars=[JarDependency('org.scala-lang', 'scala-compiler', '2.11.7')]) self.make_target(':scala-repl', JarLibrary, jars=[ JarDependency(org = 'org.scala-lang', name = 'jline', rev = '2.10.5'), JarDependency(org = 'org.scala-lang', name = 'scala-compiler', rev = '2.10.5')]) self.make_target(':scala-repl_211', JarLibrary, jars=[ JarDependency(org = 'org.scala-lang', name = 'jline', rev = '2.11.7'), JarDependency(org = 'org.scala-lang', name = 'scala-compiler', rev = '2.11.7')]) self.make_target(':scala-library', JarLibrary, jars=[JarDependency('org.scala-lang', 'scala-library', '2.10.5')]) self.set_options_for_scope(ScalaPlatform.options_scope, scalac=':scala-compiler') # Scala Platform requires options to be defined for any registered tools in ScalaPlatform, # because all jvm tools are bootstrapped. self.set_options_for_scope(ScalaPlatform.options_scope, version='custom') self.set_options_for_scope(ScalaPlatform.options_scope, scalac_2_10=':scala-compiler') self.set_options_for_scope(ScalaPlatform.options_scope, scalac_2_11=':scala-compiler_211') self.set_options_for_scope(ScalaPlatform.options_scope, scala_2_10_repl=':scala-repl') self.set_options_for_scope(ScalaPlatform.options_scope, scala_2_11_repl=':scala-repl_211') self.set_options_for_scope(ScalaPlatform.options_scope, scala_repl=':scala-repl') self.set_options_for_scope(ScalaPlatform.options_scope, scalastyle_2_10=':scalastyle') self.set_options_for_scope(ScalaPlatform.options_scope, scalastyle_2_11=':scalastyle_211') yield
def test_shader_project(self): """Test that the binary target at the ``shading_project`` can be built and run. Explicitly checks that the classes end up with the correct shaded fully qualified classnames. """ shading_project = 'testprojects/src/java/org/pantsbuild/testproject/shading' self.assert_success(self.run_pants(['clean-all'])) self.assert_success(self.run_pants(['binary', shading_project])) expected_classes = { # Explicitly excluded by a shading_exclude() rule. 'org/pantsbuild/testproject/shadingdep/PleaseDoNotShadeMe.class', # Not matched by any rule, so stays the same. 'org/pantsbuild/testproject/shading/Main.class', # Shaded with the target_id prefix, along with the default pants prefix. ('__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/otherpackage/' 'ShadeWithTargetId.class'), # Also shaded with the target_id prefix and default pants prefix, but for a different target # (so the target_id is different). ('__shaded_by_pants__/org/pantsbuild/testproject/shading/ShadeSelf.class' ), # All these are shaded by the same shading_relocate_package(), which is recursive by default. '__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/subpackage/Subpackaged.class', '__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/SomeClass.class', '__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/Dependency.class', # Shaded by a shading_relocate() that completely renames the package and class name. 'org/pantsbuild/testproject/foo/bar/MyNameIsDifferentNow.class', } path = os.path.join('dist', 'shading.jar') with subsystem_instance(DistributionLocator): execute_java = DistributionLocator.cached( minimum_version='1.6').execute_java self.assertEquals( 0, execute_java(classpath=[path], main='org.pantsbuild.testproject.shading.Main')) self.assertEquals( 0, execute_java( classpath=[path], main= 'org.pantsbuild.testproject.foo.bar.MyNameIsDifferentNow')) received_classes = set() with temporary_dir() as tempdir: ZIP.extract(path, tempdir, filter_func=lambda f: f.endswith('.class')) for root, dirs, files in os.walk(tempdir): for name in files: received_classes.add( os.path.relpath(os.path.join(root, name), tempdir)) self.assertEqual(expected_classes, received_classes)
def test_unknown_kwargs(self): options = { Target.UnknownArguments.options_scope: { 'ignored': { 'Target': ['foobar'] } } } with subsystem_instance(Target.UnknownArguments, **options): target = self.make_target('foo:bar', Target, foobar='barfoo') self.assertFalse(hasattr(target, 'foobar'))
def distribution(files=None, executables=None, java_home=None): with subsystem_instance(DistributionLocator): with temporary_dir() as dist_root: for f in maybe_list(files or ()): touch(os.path.join(dist_root, f)) for executable in maybe_list(executables or (), expected_type=EXE): path = os.path.join(dist_root, executable.relpath) with safe_open(path, 'w') as fp: java_home = os.path.join(dist_root, java_home) if java_home else dist_root fp.write(executable.contents(java_home)) chmod_plus_x(path) yield dist_root
def visualize_build_request(build_root, goals, subjects): with subsystem_instance(Native.Factory) as native_factory: scheduler = setup_json_scheduler(build_root, native_factory.create()) execution_request = scheduler.build_request(goals, subjects) # NB: Calls `reduce` independently of `execute`, in order to render a graph before validating it. engine = LocalSerialEngine(scheduler, Storage.create()) try: engine.reduce(execution_request) visualize_execution_graph(scheduler, execution_request) finally: engine.close()
def test_jvm_meets_min_and_max_distribution(self): with subsystem_instance(DistributionLocator): distribution = DistributionLocator.locate() target_spec = 'testprojects/src/java/org/pantsbuild/testproject/printversion' run = self.run_pants(['run', target_spec], config={ 'jvm-distributions': { 'minimum_version': str(distribution.version), 'maximum_version': str(distribution.version) } }) self.assert_success(run) self.assertIn('java.home:{}'.format(distribution.home), run.stdout_data)
def test_parse_proxy_string(self): with subsystem_instance(IvySubsystem) as ivy_subsystem: self.assertEquals( ('example.com', 1234), ivy_subsystem._parse_proxy_string('http://example.com:1234')) self.assertEquals(('secure-example.com', 999), ivy_subsystem._parse_proxy_string( 'http://secure-example.com:999')) # trailing slash is ok self.assertEquals( ('example.com', 1234), ivy_subsystem._parse_proxy_string('http://example.com:1234/'))
def do_fetch(self, import_path, version_override=None, github_api_responses=None, expected_fetch=None): # Simulate a series of github api calls to list refs for the given import paths. # Optionally asserts an expected fetch call to the underlying fetcher. with subsystem_instance(GopkgInFetcher) as fetcher: fetcher._do_get = mock.Mock(spec=fetcher._do_get) fetcher._do_get.side_effect = github_api_responses fetcher._do_fetch = mock.Mock(spec=fetcher._do_fetch) with temporary_dir() as dest: fetcher.fetch(import_path, dest, rev=version_override) if expected_fetch: expected_url, expected_rev = expected_fetch fetcher._do_fetch.assert_called_once_with(expected_url, dest, expected_rev)
def test_validate_live(self): with self.assertRaises(Distribution.Error): Distribution(bin_path=os.path.dirname(self.JAVA), minimum_version='999.9.9').validate() with self.assertRaises(Distribution.Error): Distribution(bin_path=os.path.dirname(self.JAVA), maximum_version='0.0.1').validate() Distribution(bin_path=os.path.dirname(self.JAVA)).validate() Distribution(bin_path=os.path.dirname(self.JAVA), minimum_version='1.3.1').validate() Distribution(bin_path=os.path.dirname(self.JAVA), maximum_version='999.999.999').validate() Distribution(bin_path=os.path.dirname(self.JAVA), minimum_version='1.3.1', maximum_version='999.999.999').validate() with subsystem_instance(DistributionLocator): DistributionLocator.locate(jdk=False)
def distribution(files=None, executables=None, java_home=None): with subsystem_instance(DistributionLocator): with temporary_dir() as dist_root: with environment_as(DIST_ROOT=os.path.join(dist_root, java_home) if java_home else dist_root): for f in maybe_list(files or ()): touch(os.path.join(dist_root, f)) for executable in maybe_list(executables or (), expected_type=EXE): path = os.path.join(dist_root, executable.relpath) with safe_open(path, 'w') as fp: fp.write(executable.contents or '') chmod_plus_x(path) yield dist_root
def _test_jvm_does_not_meet_distribution_requirements(self, min_version_arg=None, max_version_arg=None, min_version_option=None, max_version_option=None): options = { 'jvm-distributions': { 'minimum_version': min_version_option, 'maximum_version': max_version_option, } } with subsystem_instance(DistributionLocator, **options) as dist_loader: with self.assertRaises(Distribution.Error): dist_loader.cached(min_version_arg, max_version_arg, jdk=False)
def custom_scala_platform_setup(self): with subsystem_instance(ScalaPlatform): # We don't need to specify :scalac or :scala-repl since they are never being fetched. self.make_target( '//:scalastyle', JarLibrary, jars=[ JarDependency('org.scalastyle', 'scalastyle_2.10', '0.3.2') ], ) self.set_options_for_scope(ScalaPlatform.options_scope, version='custom') yield
def execute_junit_runner(self, content): # Create the temporary base test directory test_rel_path = "tests/java/org/pantsbuild/foo" test_abs_path = self.create_dir(test_rel_path) # Generate the temporary java test source code. test_java_file_rel_path = os.path.join(test_rel_path, "FooTest.java") test_java_file_abs_path = self.create_file(test_java_file_rel_path, content) # Create the temporary classes directory under work dir test_classes_abs_path = self.create_workdir_dir(test_rel_path) # Invoke ivy to resolve classpath for junit. classpath_file_abs_path = os.path.join(test_abs_path, "junit.classpath") with subsystem_instance(IvySubsystem) as ivy_subsystem: distribution = DistributionLocator.cached(jdk=True) ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy() ivy.execute( args=["-cachepath", classpath_file_abs_path, "-dependency", "junit", "junit-dep", "4.10"], executor=SubprocessExecutor(distribution=distribution), ) with open(classpath_file_abs_path) as fp: classpath = fp.read() # Now directly invoking javac to compile the test java code into java class # so later we can inject the class into products mapping for JUnitRun to execute # the test on. javac = distribution.binary("javac") subprocess.check_call([javac, "-d", test_classes_abs_path, "-cp", classpath, test_java_file_abs_path]) # Create a java_tests target and a synthetic resource target. java_tests = self.create_library(test_rel_path, "java_tests", "foo_test", ["FooTest.java"]) resources = self.make_target("some_resources", Resources) # Set the context with the two targets, one java_tests target and # one synthetic resources target. # The synthetic resources target is to make sure we won't regress # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note # in that bug, the resources target must be the first one in the list. context = self.context(target_roots=[resources, java_tests]) # Before we run the task, we need to inject the "runtime_classpath" with # the compiled test java classes that JUnitRun will know which test # classes to execute. In a normal run, this "runtime_classpath" will be # populated by java compilation step. self.populate_runtime_classpath(context=context, classpath=[test_classes_abs_path]) # Finally execute the task. self.execute(context)
def requirements(cls, tools): sdk_home = os.environ.get('ANDROID_HOME') android_sdk = os.path.abspath(sdk_home) if sdk_home else None if android_sdk: for tool in tools: if not os.path.isfile(os.path.join(android_sdk, tool)): return False else: return False try: with subsystem_instance(DistributionLocator) as locator: locator.cached(minimum_version=cls.JAVA_MIN, maximum_version=cls.JAVA_MAX) except Distribution.Error: return False return True
def test_jvm_meets_min_and_max_distribution(self): with subsystem_instance(DistributionLocator): distribution = DistributionLocator.locate() target_spec = 'testprojects/src/java/org/pantsbuild/testproject/printversion' run = self.run_pants( ['run', target_spec], config={ 'jvm-distributions': { 'minimum_version': str(distribution.version), 'maximum_version': str(distribution.version) } }) self.assert_success(run) self.assertIn('java.home:{}'.format(distribution.home), run.stdout_data)
def _test_jvm_does_not_meet_distribution_requirements( self, min_version_arg=None, max_version_arg=None, min_version_option=None, max_version_option=None): options = { 'jvm-distributions': { 'minimum_version': min_version_option, 'maximum_version': max_version_option, } } with subsystem_instance(DistributionLocator, **options) as dist_loader: with self.assertRaises(Distribution.Error): dist_loader.cached(min_version_arg, max_version_arg, jdk=False)
def fetcher(self, import_path): with subsystem_instance( Fetchers, **{ 'go-fetchers': { 'mapping': { '.*': 'ArchiveFetcher' }, }, 'go-archive-fetcher': { 'matchers': { '.*': ('', None, 0) }, 'prefixes': ['foo', 'bar/baz'], } }) as fetchers: yield fetchers.get_fetcher(import_path)