def main_filespecs(): build_root, goals, args = pop_build_root_and_goals( '[build root path] [filespecs]*', sys.argv[1:]) # Create PathGlobs for each arg relative to the buildroot. path_globs = PathGlobs.create('', include=args, exclude=[]) visualize_build_request(build_root, goals, path_globs)
def main_filespecs(): build_root, goals, args = pop_build_root_and_goals( '[build root path] [filespecs]*', sys.argv[1:]) # Create PathGlobs for each arg relative to the buildroot. path_globs = PathGlobs.create('', include=args, exclude=[]) visualize_build_request(build_root, goals, path_globs)
def main_filespecs(): build_root, goals, args = pop_build_root_and_goals( '[build root path] [filespecs]*', sys.argv[1:]) # Create PathGlobs for each arg relative to the buildroot. path_globs = [PathGlobs.create('', globs=[arg]) for arg in args] visualize_build_request(build_root, goals, path_globs)
def parse_address_family(address_mapper, directory): """Given an AddressMapper and a directory, return an AddressFamily. The AddressFamily may be empty, but it will not be None. """ patterns = tuple( join(directory.path, p) for p in address_mapper.build_patterns) path_globs = PathGlobs.create('', include=patterns, exclude=address_mapper.build_ignore_patterns) snapshot = yield Get(Snapshot, PathGlobs, path_globs) files_content = yield Get(FilesContent, DirectoryDigest, snapshot.directory_digest) if not files_content: raise ResolveError( 'Directory "{}" does not contain build files.'.format( directory.path)) address_maps = [] for filecontent_product in files_content.dependencies: address_maps.append( AddressMap.parse(filecontent_product.path, filecontent_product.content, address_mapper.parser)) yield AddressFamily.create(directory.path, address_maps)
def path_globs(self): """Creates a `PathGlobs` object for the paths matched by these Sources. This field may be projected to request the content of the files for this Sources object. """ return PathGlobs.create(self.spec_path, include=self.filespecs, exclude=(self.excludes or []))
def calculate_package_search_path(jvm_package_name, source_roots): """Return PathGlobs to match directories where the given JVMPackageName might exist.""" rel_package_dir = jvm_package_name.name.replace('.', os_sep) specs = [ os_path_join(srcroot, rel_package_dir) for srcroot in source_roots.srcroots ] return PathGlobs.create('', include=specs)
def path_globs(self): """Creates a `PathGlobs` object for the paths matched by these Sources. This field may be projected to request the content of the files for this Sources object. """ return PathGlobs.create( self.spec_path, files=self.files, globs=self.globs, rglobs=self.rglobs, zglobs=self.zglobs )
def path_globs(self): """Creates a `PathGlobs` object for the paths matched by these Sources. This field may be projected to request the content of the files for this Sources object. """ return PathGlobs.create(self.spec_path, files=self.files, globs=self.globs, rglobs=self.rglobs, zglobs=self.zglobs)
def test_gather_snapshot_of_pathglobs(self): project_tree = self.mk_example_fs_tree() scheduler = self.mk_scheduler(project_tree=project_tree) empty_step_context = StepContext(node_builder=None, project_tree=project_tree, node_states=[], inline_nodes=False) request = scheduler.execution_request([Snapshot], [PathGlobs.create('', globs=['fs_test/a/b/*'])]) LocalSerialEngine(scheduler).reduce(request) root_entries = scheduler.root_entries(request).items() self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler) snapshot = state.value self.assert_archive_files(['fs_test/a/b/1.txt', 'fs_test/a/b/2'], snapshot, empty_step_context)
def test_gather_snapshot_of_pathglobs(self): project_tree = self.mk_example_fs_tree() scheduler = self.mk_scheduler(project_tree=project_tree, tasks=create_snapshot_tasks(project_tree)) snapshot_archive_root = os.path.join(project_tree.build_root, '.snapshots') request = scheduler.execution_request([Snapshot], [PathGlobs.create('', globs=['fs_test/a/b/*'])]) LocalSerialEngine(scheduler).reduce(request) root_entries = scheduler.root_entries(request).items() self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler) snapshot = state.value self.assert_archive_files(['fs_test/a/b/1.txt', 'fs_test/a/b/2'], snapshot, snapshot_archive_root)
def test_gather_snapshot_of_pathglobs(self): project_tree = self.mk_example_fs_tree() scheduler = self.mk_scheduler(project_tree=project_tree, tasks=create_snapshot_tasks(project_tree)) snapshot_archive_root = os.path.join(project_tree.build_root, '.snapshots') request = scheduler.execution_request([Snapshot], [PathGlobs.create('', globs=['fs_test/a/b/*'])]) LocalSerialEngine(scheduler).reduce(request) root_entries = scheduler.root_entries(request).items() self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler) snapshot = state.value self.assert_archive_files(['fs_test/a/b/1.txt', 'fs_test/a/b/2'], snapshot, snapshot_archive_root)
def test_integration_concat_with_snapshots_stdout(self): scheduler = self.mk_scheduler_in_example_fs(create_cat_stdout_rules()) cat_exe_req = CatExecutionRequest( ShellCat(BinaryLocation('/bin/cat')), PathGlobs.create('', include=['fs_test/a/b/*'])) self.assertEqual( repr(cat_exe_req), "CatExecutionRequest(shell_cat=ShellCat(binary_location=BinaryLocation(bin_path='/bin/cat')), path_globs=PathGlobs(include=(u'fs_test/a/b/*',), exclude=(), glob_match_error_behavior=GlobMatchErrorBehavior(failure_behavior=u'ignore')))" ) results = self.execute(scheduler, Concatted, cat_exe_req) self.assertEqual(1, len(results)) concatted = results[0] self.assertEqual(Concatted(str('one\ntwo\n')), concatted)
def _spec_to_globs(address_mapper, specs): """Given a Specs object, return a PathGlobs object for the build files that it matches.""" patterns = set() for spec in specs.dependencies: if type(spec) is DescendantAddresses: patterns.update(join(spec.directory, '**', pattern) for pattern in address_mapper.build_patterns) elif type(spec) in (SiblingAddresses, SingleAddress): patterns.update(join(spec.directory, pattern) for pattern in address_mapper.build_patterns) elif type(spec) is AscendantAddresses: patterns.update(join(f, pattern) for pattern in address_mapper.build_patterns for f in _recursive_dirname(spec.directory)) else: raise ValueError('Unrecognized Spec type: {}'.format(spec)) return PathGlobs.create('', include=patterns, exclude=address_mapper.build_ignore_patterns)
def test_javac_version_example(self): sources = PathGlobs.create('', include=['inputs/src/java/simple/Simple.java']) scheduler = self.mk_scheduler_in_example_fs([ ExecuteProcess.create_in(product_type=ExecuteProcessRequest, input_selectors=(Select(Javac),), input_conversion=process_request_from_java_sources), SingletonRule(Javac, Javac()), ]) req = scheduler.product_request(ExecuteProcessRequest, [sources]) request = scheduler.execution_request([ExecuteProcessResult], req) root_entries = scheduler.execute(request).root_products self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler, request) result = state.value self.assertEqual(0, result.exit_code) self.assertIn('javac', result.stderr)
def test_failed_output_conversion_propagates_throw(self): scheduler = self.mk_scheduler_in_example_fs([ # subject to files / product of subject to files for snapshot. SnapshottedProcess.create(product_type=Concatted, binary_type=ShellCatToOutFile, input_selectors=(Select(Snapshot),), input_conversion=file_list_to_args_for_cat_with_snapshot_subjects_and_output_file, output_conversion=fail_process_result), SingletonRule(ShellCatToOutFile, ShellCatToOutFile()), ]) request = scheduler.execution_request([Concatted], [PathGlobs.create('', include=['fs_test/a/b/*'])]) root_entries = scheduler.execute(request).root_products self.assertEquals(1, len(root_entries)) self.assertFirstEntryIsThrow(root_entries, in_msg='Failed in output conversion!')
def spec_to_globs(address_mapper, specs): """Given a Spec object, return a PathGlobs object for the build files that it matches. """ patterns = set() for spec in specs.dependencies: if type(spec) is DescendantAddresses: patterns.update(join(spec.directory, '**', pattern) for pattern in address_mapper.build_patterns) elif type(spec) in (SiblingAddresses, SingleAddress): patterns.update(join(spec.directory, pattern) for pattern in address_mapper.build_patterns) elif type(spec) is AscendantAddresses: patterns.update(join(f, pattern) for pattern in address_mapper.build_patterns for f in _recursive_dirname(spec.directory)) else: raise ValueError('Unrecognized Spec type: {}'.format(spec)) return PathGlobs.create('', include=patterns, exclude=address_mapper.build_ignore_patterns)
def spec_to_globs(address_mapper, spec): """Given a Spec object, return a PathGlobs object for the build files that it matches.""" if type(spec) is DescendantAddresses: directory = spec.directory patterns = [join('**', pattern) for pattern in address_mapper.build_patterns] elif type(spec) in (SiblingAddresses, SingleAddress): directory = spec.directory patterns = address_mapper.build_patterns elif type(spec) is AscendantAddresses: directory = '' patterns = [ join(f, pattern) for pattern in address_mapper.build_patterns for f in _recursive_dirname(spec.directory) ] else: raise ValueError('Unrecognized Spec type: {}'.format(spec)) return PathGlobs.create(directory, include=patterns, exclude=[])
def test_failed_output_conversion_propagates_throw(self): scheduler = self.mk_scheduler_in_example_fs([ # subject to files / product of subject to files for snapshot. SnapshottedProcess.create(product_type=Concatted, binary_type=ShellCatToOutFile, input_selectors=(Select(Snapshot),), input_conversion=file_list_to_args_for_cat_with_snapshot_subjects_and_output_file, output_conversion=fail_process_result), SingletonRule(ShellCatToOutFile, ShellCatToOutFile()), ]) request = scheduler.execution_request([Concatted], [PathGlobs.create('', include=['fs_test/a/b/*'])]) root_entries = scheduler.execute(request).root_products self.assertEquals(1, len(root_entries)) self.assertFirstEntryIsThrow(root_entries, in_msg='Failed in output conversion!')
def test_failed_command_propagates_throw(self): scheduler = self.mk_scheduler_in_example_fs([ # subject to files / product of subject to files for snapshot. SnapshottedProcess.create(product_type=Concatted, binary_type=ShellFailCommand, input_selectors=tuple(), input_conversion=empty_process_request, output_conversion=fail_process_result), SingletonRule(ShellFailCommand, ShellFailCommand()), ]) request = scheduler.execution_request([Concatted], [PathGlobs.create('', include=['fs_test/a/b/*'])]) root_entries = scheduler.execute(request).root_products self.assertEquals(1, len(root_entries)) self.assertFirstEntryIsThrow(root_entries, in_msg='Running ShellFailCommand failed with non-zero exit code: 1')
def spec_to_globs(address_mapper, spec): """Given a Spec object, return a PathGlobs object for the build files that it matches.""" if type(spec) is DescendantAddresses: directory = spec.directory patterns = [join('**', pattern) for pattern in address_mapper.build_patterns] elif type(spec) in (SiblingAddresses, SingleAddress): directory = spec.directory patterns = address_mapper.build_patterns elif type(spec) is AscendantAddresses: directory = '' patterns = [ join(f, pattern) for pattern in address_mapper.build_patterns for f in _recursive_dirname(spec.directory) ] else: raise ValueError('Unrecognized Spec type: {}'.format(spec)) return PathGlobs.create(directory, include=patterns, exclude=[])
def test_failed_command_propagates_throw(self): scheduler = self.mk_scheduler_in_example_fs([ # subject to files / product of subject to files for snapshot. SnapshottedProcess.create(product_type=Concatted, binary_type=ShellFailCommand, input_selectors=tuple(), input_conversion=empty_process_request, output_conversion=fail_process_result), SingletonRule(ShellFailCommand, ShellFailCommand()), ]) request = scheduler.execution_request([Concatted], [PathGlobs.create('', include=['fs_test/a/b/*'])]) root_entries = scheduler.execute(request).root_products self.assertEquals(1, len(root_entries)) self.assertFirstEntryIsThrow(root_entries, in_msg='Running ShellFailCommand failed with non-zero exit code: 1')
def parse_address_family(address_mapper, directory): """Given an AddressMapper and a directory, return an AddressFamily. The AddressFamily may be empty, but it will not be None. """ patterns = tuple(join(directory.path, p) for p in address_mapper.build_patterns) path_globs = PathGlobs.create('', include=patterns, exclude=address_mapper.build_ignore_patterns) files_content = yield Get(FilesContent, PathGlobs, path_globs) if not files_content: raise ResolveError('Directory "{}" does not contain build files.'.format(directory.path)) address_maps = [] for filecontent_product in files_content.dependencies: address_maps.append(AddressMap.parse(filecontent_product.path, filecontent_product.content, address_mapper.parser)) yield AddressFamily.create(directory.path, address_maps)
def test_javac_compilation_example_rust_success(self): sources = PathGlobs.create('', include=['scheduler_inputs/src/java/simple/Simple.java']) scheduler = self.mk_scheduler_in_example_fs([ ExecuteProcess.create_in( product_type=ExecuteProcessRequest, input_selectors=(Select(Javac), Select(Snapshot), Select(JavaOutputDir)), input_conversion=process_request_java_args_from_java_sources), SingletonRule(JavaOutputDir, JavaOutputDir('testing')), SingletonRule(Javac, Javac()), ]) req = scheduler.product_request(ExecuteProcessRequest, [sources]) request = scheduler.execution_request([ExecuteProcessResult], req) root_entries = scheduler.execute(request).root_products self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler, request) execution_result = state.value self.assertEqual(0, execution_result.exit_code)
def test_integration_concat_with_snapshot_subjects_test(self): scheduler = self.mk_scheduler_in_example_fs([ # subject to files / product of subject to files for snapshot. SnapshottedProcess.create(product_type=Concatted, binary_type=ShellCatToOutFile, input_selectors=(Select(Snapshot),), input_conversion=file_list_to_args_for_cat_with_snapshot_subjects_and_output_file, output_conversion=process_result_to_concatted_from_outfile), SingletonRule(ShellCatToOutFile, ShellCatToOutFile()), ]) request = scheduler.execution_request([Concatted], [PathGlobs.create('', include=['fs_test/a/b/*'])]) root_entries = scheduler.execute(request).root_products self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler, request) concatted = state.value self.assertEqual(Concatted('one\ntwo\n'), concatted)
def test_integration_concat_with_snapshot_subjects_test(self): scheduler = self.mk_scheduler_in_example_fs([ # subject to files / product of subject to files for snapshot. SnapshottedProcess.create(product_type=Concatted, binary_type=ShellCatToOutFile, input_selectors=(Select(Snapshot),), input_conversion=file_list_to_args_for_cat_with_snapshot_subjects_and_output_file, output_conversion=process_result_to_concatted_from_outfile), SingletonRule(ShellCatToOutFile, ShellCatToOutFile()), ]) request = scheduler.execution_request([Concatted], [PathGlobs.create('', include=['fs_test/a/b/*'])]) root_entries = scheduler.execute(request).root_products self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler) concatted = state.value self.assertEqual(Concatted('one\ntwo\n'), concatted)
def test_javac_compilation_example_success(self): scheduler = self.mk_scheduler_in_example_fs( create_javac_compile_rules()) request = JavacCompileRequest( BinaryLocation('/usr/bin/javac'), JavacSources( PathGlobs.create( '', include=[ 'scheduler_inputs/src/java/simple/Simple.java', ]))) self.assertEqual( repr(request), "JavacCompileRequest(binary_location=BinaryLocation(bin_path='/usr/bin/javac'), javac_sources=JavacSources(path_globs=PathGlobs(include=(u'scheduler_inputs/src/java/simple/Simple.java',), exclude=())))" ) results = self.execute(scheduler, JavacCompileResult, request) self.assertEqual(1, len(results))
def test_integration_concat_with_snapshot_subjects_test(self): scheduler = self.mk_scheduler_in_example_fs([ # subject to files / product of subject to files for snapshot. SnapshottedProcess(product_type=Concatted, binary_type=ShellCatToOutFile, input_selectors=(Select(Files),), input_conversion=file_list_to_args_for_cat_with_snapshot_subjects_and_output_file, output_conversion=process_result_to_concatted_from_outfile), [ShellCatToOutFile, [], ShellCatToOutFile] ]) request = scheduler.execution_request([Concatted], [PathGlobs.create('', globs=['fs_test/a/b/*'])]) LocalSerialEngine(scheduler).reduce(request) root_entries = scheduler.root_entries(request).items() self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler) concatted = state.value self.assertEqual(Concatted('one\ntwo\n'), concatted)
def test_javac_compilation_example(self): sources = PathGlobs.create('', include=['scheduler_inputs/src/java/simple/Simple.java']) scheduler = self.mk_scheduler_in_example_fs([ SnapshottedProcess.create(ClasspathEntry, Javac, (Select(Snapshot), SelectLiteral(JavaOutputDir('build'), JavaOutputDir)), java_sources_to_javac_args, process_result_to_classpath_entry), [Javac, [], Javac] ]) request = scheduler.execution_request( [ClasspathEntry], [sources]) LocalSerialEngine(scheduler).reduce(request) root_entries = scheduler.root_entries(request).items() self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler) classpath_entry = state.value self.assertIsInstance(classpath_entry, ClasspathEntry) self.assertTrue(os.path.exists(os.path.join(classpath_entry.path, 'simple', 'Simple.class')))
def test_javac_compilation_example(self): sources = PathGlobs.create('', include=['scheduler_inputs/src/java/simple/Simple.java']) scheduler = self.mk_scheduler_in_example_fs([ SnapshottedProcess.create(ClasspathEntry, Javac, (Select(Snapshot), Select(JavaOutputDir)), java_sources_to_javac_args, process_result_to_classpath_entry), SingletonRule(JavaOutputDir, JavaOutputDir('build')), SingletonRule(Javac, Javac()), ]) request = scheduler.execution_request( [ClasspathEntry], [sources]) root_entries = scheduler.execute(request).root_products self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler) classpath_entry = state.value self.assertIsInstance(classpath_entry, ClasspathEntry) self.assertTrue(os.path.exists(os.path.join(classpath_entry.path, 'simple', 'Simple.class')))
def test_javac_compilation_example_failure(self): scheduler = self.mk_scheduler_in_example_fs( create_javac_compile_rules()) request = JavacCompileRequest( BinaryLocation('/usr/bin/javac'), JavacSources( PathGlobs.create( '', include=[ 'scheduler_inputs/src/java/simple/Broken.java', ]))) self.assertEqual( repr(request), "JavacCompileRequest(binary_location=BinaryLocation(bin_path='/usr/bin/javac'), javac_sources=JavacSources(path_globs=PathGlobs(include=(u'scheduler_inputs/src/java/simple/Broken.java',), exclude=())))" ) with self.assertRaises(ProcessExecutionFailure) as cm: self.execute_raising_throw(scheduler, JavacCompileResult, request) e = cm.exception self.assertEqual(1, e.exit_code) self.assertIn("NOT VALID JAVA", e.stderr)
def test_javac_compilation_example(self): sources = PathGlobs.create('', files=['scheduler_inputs/src/java/simple/Simple.java']) scheduler = self.mk_scheduler_in_example_fs([ SnapshottedProcess.create(ClasspathEntry, Javac, (Select(Files), Select(Snapshot), SelectLiteral(JavaOutputDir('build'), JavaOutputDir)), java_sources_to_javac_args, process_result_to_classpath_entry), [Javac, [], Javac] ]) request = scheduler.execution_request( [ClasspathEntry], [sources]) LocalSerialEngine(scheduler).reduce(request) root_entries = scheduler.root_entries(request).items() self.assertEquals(1, len(root_entries)) state = self.assertFirstEntryIsReturn(root_entries, scheduler) classpath_entry = state.value self.assertIsInstance(classpath_entry, ClasspathEntry) self.assertTrue(os.path.exists(os.path.join(classpath_entry.path, 'simple', 'Simple.class')))
def specs(filespecs): if isinstance(filespecs, PathGlobs): return filespecs else: return PathGlobs.create('', include=filespecs)
def specs(relative_to, *filespecs): return PathGlobs.create(relative_to, include=filespecs)
def buildfile_path_globs_for_dir(address_mapper, directory): patterns = address_mapper.build_patterns return BuildFileGlobs(PathGlobs.create(directory.path, include=patterns, exclude=()))
def specs(relative_to, *filespecs): return PathGlobs.create(relative_to, include=filespecs)
def to_path_globs(self, relpath): """Return two PathGlobs representing the included and excluded Files for these patterns.""" return PathGlobs.create(relpath, self._file_globs, self._excluded_file_globs)
def descendant_addresses_to_globs(descendant_addresses): """Given a DescendantAddresses object, return a PathGlobs object for matching directories.""" return PathGlobs.create(Dirs, descendant_addresses.directory, globs=['.', '*', '**/*'])
def buildfile_path_globs_for_dir(address_mapper, directory): patterns = tuple(join(directory.path, p) for p in address_mapper.build_patterns) return BuildFileGlobs(PathGlobs.create('', include=patterns, exclude=address_mapper.build_ignore_patterns))
def path_globs(self): """Creates a `PathGlobs` object for the paths matched by these Sources. This field may be projected to request the content of the files for this Sources object. """ return PathGlobs.create(self.spec_path, include=self.filespecs, exclude=(self.excludes or []))
def calculate_package_search_path(jvm_package_name, source_roots): """Return PathGlobs to match directories where the given JVMPackageName might exist.""" rel_package_dir = jvm_package_name.name.replace('.', os_sep) specs = [os_path_join(srcroot, rel_package_dir) for srcroot in source_roots.srcroots] return PathGlobs.create('', include=specs)
def descendant_addresses_to_globs(descendant_addresses): """Given a DescendantAddresses object, return a PathGlobs object for matching directories.""" return PathGlobs.create(Dirs, descendant_addresses.directory, globs=['.', '*', '**/*'])
def main_filespecs(): build_root, goals, args = pop_build_root_and_goals('[build root path] [filespecs]*', sys.argv[1:]) # Create PathGlobs for each arg relative to the buildroot. path_globs = [PathGlobs.create('', globs=[arg]) for arg in args] visualize_build_request(build_root, goals, path_globs)
def buildfile_path_globs_for_dir(address_mapper, directory): patterns = address_mapper.build_patterns return BuildFileGlobs(PathGlobs.create(directory.path, include=patterns, exclude=()))