def _do_test_caching(self, *compiles): """Tests that the given compiles within the same workspace produce the given artifact counts.""" with temporary_dir() as cache_dir, \ self.temporary_workdir() as workdir, \ temporary_dir(root_dir=get_buildroot()) as src_dir: def complete_config(config): # Clone the input config and add cache settings. cache_settings = {'write_to': [cache_dir], 'read_from': [cache_dir]} return dict(config.items() + [('cache.compile.zinc', cache_settings)]) buildfile = os.path.join(src_dir, 'BUILD') spec = os.path.join(src_dir, ':cachetest') artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), '{}.cachetest'.format(os.path.basename(src_dir))) for c in compiles: # Clear the src directory and recreate the files. safe_mkdir(src_dir, clean=True) self.create_file(buildfile, """java_library(name='cachetest', sources=rglobs('*.java', '*.scala'))""") for name, content in c.srcfiles.items(): self.create_file(os.path.join(src_dir, name), content) # Compile, and confirm that we have the right count of artifacts. self.run_compile(spec, complete_config(c.config), workdir) self.assertEquals(c.artifact_count, len(os.listdir(artifact_dir)))
def _do_test_caching(self, *compiles): """Tests that the given compiles within the same workspace produce the given artifact counts.""" with temporary_dir() as cache_dir, \ self.temporary_workdir() as workdir, \ temporary_dir(root_dir=get_buildroot()) as src_dir: def complete_config(config): # Clone the input config and add cache settings. cache_settings = { 'write_to': [cache_dir], 'read_from': [cache_dir] } return dict(config.items() + [('cache.compile.zinc', cache_settings)]) buildfile = os.path.join(src_dir, 'BUILD') spec = os.path.join(src_dir, ':cachetest') artifact_dir = os.path.join( cache_dir, ZincCompile.stable_name(), '{}.cachetest'.format(os.path.basename(src_dir))) for c in compiles: # Clear the src directory and recreate the files. safe_mkdir(src_dir, clean=True) self.create_file( buildfile, """java_library(name='cachetest', sources=rglobs('*.java', '*.scala'))""" ) for name, content in c.srcfiles.items(): self.create_file(os.path.join(src_dir, name), content) # Compile, and confirm that we have the right count of artifacts. self.run_compile(spec, complete_config(c.config), workdir) self.assertEquals(c.artifact_count, len(os.listdir(artifact_dir)))
def test_leave_one(self): """Ensure that max-old of 1 removes all but one files""" with temporary_dir() as cache_dir: artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main') touch(os.path.join(artifact_dir, 'old_cache_test1')) touch(os.path.join(artifact_dir, 'old_cache_test2')) touch(os.path.join(artifact_dir, 'old_cache_test3')) touch(os.path.join(artifact_dir, 'old_cache_test4')) touch(os.path.join(artifact_dir, 'old_cache_test5')) config = {'cache.compile.zinc': {'write_to': [cache_dir]}} pants_run = self.run_pants(self.create_platform_args(6) + ['compile.zinc', 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', '--cache-max-entries-per-target=1'], config=config) self.assert_success(pants_run) # One artifact for java 6 self.assertEqual(len(os.listdir(artifact_dir)), 1) # Rerun for java 7 pants_run = self.run_pants(self.create_platform_args(7) + ['compile.zinc', 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', '--cache-max-entries-per-target=1'], config) self.assert_success(pants_run) # One artifact for java 7 self.assertEqual(len(os.listdir(artifact_dir)), 1)
def test_java_compile_produces_different_artifact_depending_on_java_version( self): # Ensure that running java compile with java 6 and then java 7 # produces two different artifacts. with temporary_dir() as cache_dir: artifact_dir = os.path.join( cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main' ) config = {'cache.compile.zinc': {'write_to': [cache_dir]}} pants_run = self.run_pants( self.create_platform_args(6) + [ 'compile.java', 'testprojects/src/java/org/pantsbuild/testproject/unicode/main' ], config) self.assert_success(pants_run) # One artifact for java 6 self.assertEqual(len(os.listdir(artifact_dir)), 1) # Rerun for java 7 pants_run = self.run_pants( self.create_platform_args(7) + [ 'compile.java', 'testprojects/src/java/org/pantsbuild/testproject/unicode/main' ], config) self.assert_success(pants_run) # One artifact for java 6 and one for 7 self.assertEqual(len(os.listdir(artifact_dir)), 2)
def test_java_compile_produces_different_artifact_depending_on_java_version(self): # Ensure that running java compile with java 6 and then java 7 # produces two different artifacts. with temporary_dir() as cache_dir: artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main') config = {'cache.compile.zinc': {'write_to': [cache_dir]}} pants_run = self.run_pants(self.create_platform_args(6) + ['compile', 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'], config) self.assert_success(pants_run) # One artifact for java 6 self.assertEqual(len(os.listdir(artifact_dir)), 1) # Rerun for java 7 pants_run = self.run_pants(self.create_platform_args(7) + ['compile', 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'], config) self.assert_success(pants_run) # One artifact for java 6 and one for 7 self.assertEqual(len(os.listdir(artifact_dir)), 2)
def test_java_compile_with_different_resolved_jars_produce_different_artifacts(self): # Since unforced dependencies resolve to the highest version including transitive jars, # We want to ensure that running java compile with binary incompatible libraries will # produces two different artifacts. with temporary_dir(root_dir=self.workdir_root()) as workdir, temporary_dir() as cache_dir: path_prefix = "testprojects/src/java/org/pantsbuild/testproject/jarversionincompatibility" dotted_path = path_prefix.replace(os.path.sep, ".") artifact_dir = os.path.join( cache_dir, ZincCompile.stable_name(), "{}.jarversionincompatibility".format(dotted_path) ) config = { "cache.compile.zinc": {"write_to": [cache_dir], "read_from": [cache_dir]}, "compile.zinc": {"incremental_caching": True}, } pants_run = self.run_pants_with_workdir( ["compile", ("{}:only-15-directly".format(path_prefix))], workdir, config ) self.assert_success(pants_run) # One artifact for guava 15 self.assertEqual(len(os.listdir(artifact_dir)), 1) # Rerun for guava 16 pants_run = self.run_pants_with_workdir( ["compile", ("{}:alongside-16".format(path_prefix))], workdir, config ) self.assert_success(pants_run) # One artifact for guava 15 and one for guava 16 self.assertEqual(len(os.listdir(artifact_dir)), 2)
def test_nocache(self): with temporary_dir() as cache_dir: bad_artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.nocache.nocache') good_artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.nocache.cache_me') config = {'cache.compile.zinc': {'write_to': [cache_dir]}} pants_run = self.run_pants(['compile', 'testprojects/src/java/org/pantsbuild/testproject/nocache::'], config) self.assert_success(pants_run) # The nocache target is labeled with no_cache so it should not be written to the # artifact cache. self.assertFalse(os.path.exists(bad_artifact_dir)) # But cache_me should be written. self.assertEqual(len(os.listdir(good_artifact_dir)), 1)
def test_buildcache_leave_none(self): """Ensure that max-old of zero removes all files This test should ensure that conditional doesn't change to the simpler test of if max_old since we need to handle zero as well. """ with temporary_dir() as cache_dir: artifact_dir = os.path.join( cache_dir, ZincCompile.stable_name(), "testprojects.src.java.org.pantsbuild.testproject.unicode.main.main", ) touch(os.path.join(artifact_dir, "old_cache_test1")) touch(os.path.join(artifact_dir, "old_cache_test2")) touch(os.path.join(artifact_dir, "old_cache_test3")) touch(os.path.join(artifact_dir, "old_cache_test4")) touch(os.path.join(artifact_dir, "old_cache_test5")) config = {"cache.compile.zinc": {"write_to": [cache_dir]}} pants_run = self.run_pants( self.create_platform_args(6) + [ "compile.zinc", "testprojects/src/java/org/pantsbuild/testproject/unicode/main", "--cache-max-entries-per-target=0", ], config=config, ) self.assert_success(pants_run) # Cache cleanup disabled for 0 self.assertEqual(len(os.listdir(artifact_dir)), 6) # Rerun for java 7 pants_run = self.run_pants( self.create_platform_args(7) + [ "compile.zinc", "testprojects/src/java/org/pantsbuild/testproject/unicode/main", "--cache-max-entries-per-target=0", ], config, ) self.assert_success(pants_run) # Cache cleanup disabled for 0 self.assertEqual(len(os.listdir(artifact_dir)), 7)
def test_java_compile_reads_resource_mapping(self): # Ensure that if an annotation processor produces a resource-mapping, # the artifact contains that resource mapping. with temporary_dir() as cache_dir: artifact_dir = os.path.join( cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.annotation.main.main' ) config = {'cache.compile.zinc': {'write_to': [cache_dir]}} pants_run = self.run_pants([ 'compile', 'testprojects/src/java/org/pantsbuild/testproject/annotation/main' ], config) self.assert_success(pants_run) self.assertTrue(os.path.exists(artifact_dir)) artifacts = os.listdir(artifact_dir) self.assertEqual(len(artifacts), 1) with temporary_dir() as extract_dir: TarArchiver.extract(os.path.join(artifact_dir, artifacts[0]), extract_dir) all_files = set() for dirpath, dirs, files in safe_walk(extract_dir): for name in files: path = os.path.join(dirpath, name) all_files.add(path) # Locate the report file on the classpath. report_file_name = 'deprecation_report.txt' reports = [ f for f in all_files if f.endswith(report_file_name) ] self.assertEquals( 1, len(reports), 'Expected exactly one {} file; got: {}'.format( report_file_name, all_files)) with open(reports[0]) as fp: annotated_classes = [ line.rstrip() for line in fp.read().splitlines() ] self.assertEquals( { 'org.pantsbuild.testproject.annotation.main.Main', 'org.pantsbuild.testproject.annotation.main.Main$TestInnerClass' }, set(annotated_classes))
def test_java_compile_reads_resource_mapping(self): # Ensure that if an annotation processor produces a resource-mapping, # the artifact contains that resource mapping. with temporary_dir() as cache_dir: artifact_dir = os.path.join( cache_dir, ZincCompile.stable_name(), "testprojects.src.java.org.pantsbuild.testproject.annotation.main.main", ) config = {"cache.compile.zinc": {"write_to": [cache_dir]}} pants_run = self.run_pants( ["compile", "testprojects/src/java/org/pantsbuild/testproject/annotation/main"], config ) self.assert_success(pants_run) self.assertTrue(os.path.exists(artifact_dir)) artifacts = os.listdir(artifact_dir) self.assertEqual(len(artifacts), 1) with temporary_dir() as extract_dir: TarArchiver.extract(os.path.join(artifact_dir, artifacts[0]), extract_dir) all_files = set() for dirpath, dirs, files in safe_walk(extract_dir): for name in files: path = os.path.join(dirpath, name) all_files.add(path) # Locate the report file on the classpath. report_file_name = "deprecation_report.txt" reports = [f for f in all_files if f.endswith(report_file_name)] self.assertEquals( 1, len(reports), "Expected exactly one {} file; got: {}".format(report_file_name, all_files) ) with open(reports[0]) as fp: annotated_classes = [line.rstrip() for line in fp.read().splitlines()] self.assertEquals( { "org.pantsbuild.testproject.annotation.main.Main", "org.pantsbuild.testproject.annotation.main.Main$TestInnerClass", }, set(annotated_classes), )
def test_buildcache_leave_none(self): """Ensure that max-old of zero removes all files This test should ensure that conditional doesn't change to the simpler test of if max_old since we need to handle zero as well. """ with temporary_dir() as cache_dir: artifact_dir = os.path.join( cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main' ) touch(os.path.join(artifact_dir, 'old_cache_test1')) touch(os.path.join(artifact_dir, 'old_cache_test2')) touch(os.path.join(artifact_dir, 'old_cache_test3')) touch(os.path.join(artifact_dir, 'old_cache_test4')) touch(os.path.join(artifact_dir, 'old_cache_test5')) config = {'cache.compile.zinc': {'write_to': [cache_dir]}} pants_run = self.run_pants(self.create_platform_args(6) + [ 'compile.zinc', 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', '--cache-max-entries-per-target=0' ], config=config) self.assert_success(pants_run) # Cache cleanup disabled for 0 self.assertEqual(len(os.listdir(artifact_dir)), 6) # Rerun for java 7 pants_run = self.run_pants( self.create_platform_args(7) + [ 'compile.zinc', 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', '--cache-max-entries-per-target=0' ], config) self.assert_success(pants_run) # Cache cleanup disabled for 0 self.assertEqual(len(os.listdir(artifact_dir)), 7)
def test_incremental_caching(self): """Tests that with --no-incremental-caching, we don't write incremental artifacts.""" with temporary_dir() as cache_dir, \ self.temporary_workdir() as workdir, \ temporary_dir(root_dir=get_buildroot()) as src_dir: def config(incremental_caching): return { 'cache.compile.zinc': { 'write_to': [cache_dir], 'read_from': [cache_dir] }, 'compile.zinc': { 'incremental_caching': incremental_caching }, } srcfile = os.path.join(src_dir, 'A.java') buildfile = os.path.join(src_dir, 'BUILD') spec = os.path.join(src_dir, ':cachetest') artifact_dir = os.path.join( cache_dir, ZincCompile.stable_name(), '{}.cachetest'.format(os.path.basename(src_dir))) self.create_file(srcfile, """class A {}""") self.create_file( buildfile, """java_library(name='cachetest', sources=['A.java'])""") # Confirm that the result is one cached artifact. self.run_compile(spec, config(False), workdir) clean_artifacts = os.listdir(artifact_dir) self.assertEquals(1, len(clean_artifacts)) # Modify the file, and confirm that artifacts haven't changed. self.create_file(srcfile, """final class A {}""") self.run_compile(spec, config(False), workdir) self.assertEquals(clean_artifacts, os.listdir(artifact_dir)) # Modify again, this time with incremental and confirm that we have a second artifact. self.create_file(srcfile, """public final class A {}""") self.run_compile(spec, config(True), workdir) self.assertEquals(2, len(os.listdir(artifact_dir)))
def test_java_compile_with_different_resolved_jars_produce_different_artifacts( self): # Since unforced dependencies resolve to the highest version including transitive jars, # We want to ensure that running java compile with binary incompatible libraries will # produces two different artifacts. with temporary_dir(root_dir=self.workdir_root() ) as workdir, temporary_dir() as cache_dir: path_prefix = 'testprojects/src/java/org/pantsbuild/testproject/jarversionincompatibility' dotted_path = path_prefix.replace(os.path.sep, '.') artifact_dir = os.path.join( cache_dir, ZincCompile.stable_name(), '{}.jarversionincompatibility'.format(dotted_path)) config = { 'cache.compile.zinc': { 'write_to': [cache_dir], 'read_from': [cache_dir], }, 'compile.zinc': { 'incremental_caching': True, }, } pants_run = self.run_pants_with_workdir( ['compile.java', ('{}:only-15-directly'.format(path_prefix))], workdir, config) self.assert_success(pants_run) # One artifact for guava 15 self.assertEqual(len(os.listdir(artifact_dir)), 1) # Rerun for guava 16 pants_run = self.run_pants_with_workdir( ['compile.java', (u'{}:alongside-16'.format(path_prefix))], workdir, config) self.assert_success(pants_run) # One artifact for guava 15 and one for guava 16 self.assertEqual(len(os.listdir(artifact_dir)), 2)
def test_java_compile_with_different_resolved_jars_produce_different_artifacts(self): # Since unforced dependencies resolve to the highest version including transitive jars, # We want to ensure that running java compile with binary incompatible libraries will # produces two different artifacts. with self.temporary_workdir() as workdir, temporary_dir() as cache_dir: path_prefix = 'testprojects/src/java/org/pantsbuild/testproject/jarversionincompatibility' dotted_path = path_prefix.replace(os.path.sep, '.') artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), '{}.jarversionincompatibility'.format(dotted_path)) config = { 'cache.compile.zinc': { 'write_to': [cache_dir], 'read_from': [cache_dir], }, 'compile.zinc': { 'incremental_caching': True, }, } pants_run = self.run_pants_with_workdir(['compile', ('{}:only-15-directly'.format(path_prefix))], workdir, config) self.assert_success(pants_run) # One artifact for guava 15 self.assertEqual(len(os.listdir(artifact_dir)), 1) # Rerun for guava 16 pants_run = self.run_pants_with_workdir(['compile', (u'{}:alongside-16'.format(path_prefix))], workdir, config) self.assert_success(pants_run) # One artifact for guava 15 and one for guava 16 self.assertEqual(len(os.listdir(artifact_dir)), 2)
def test_incremental_caching(self): """Tests that with --no-incremental-caching, we don't write incremental artifacts.""" with temporary_dir() as cache_dir, \ temporary_dir(root_dir=self.workdir_root()) as workdir, \ temporary_dir(root_dir=get_buildroot()) as src_dir: tool_name = 'zinc' def config(incremental_caching): return { 'cache.compile.{}'.format(tool_name): {'write_to': [cache_dir], 'read_from': [cache_dir]}, 'compile.{}'.format(tool_name): {'incremental_caching': incremental_caching}, } srcfile = os.path.join(src_dir, 'A.java') buildfile = os.path.join(src_dir, 'BUILD') spec = os.path.join(src_dir, ':cachetest') artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), '{}.cachetest'.format(os.path.basename(src_dir))) self.create_file(srcfile, """class A {}""") self.create_file(buildfile, """java_library(name='cachetest', sources=['A.java'])""") # Confirm that the result is one cached artifact. self.run_compile(spec, config(False), workdir, tool_name) clean_artifacts = os.listdir(artifact_dir) self.assertEquals(1, len(clean_artifacts)) # Modify the file, and confirm that artifacts haven't changed. self.create_file(srcfile, """final class A {}""") self.run_compile(spec, config(False), workdir, tool_name) self.assertEquals(clean_artifacts, os.listdir(artifact_dir)) # Modify again, this time with incremental and confirm that we have a second artifact. self.create_file(srcfile, """public final class A {}""") self.run_compile(spec, config(True), workdir, tool_name) self.assertEquals(2, len(os.listdir(artifact_dir)))