def test_build_file_forms(self): with self.workspace('a/b/c/BUILD') as root_dir: build_file = BuildFile(FileSystemProjectTree(root_dir), relpath='a/b/c/BUILD') self.assert_address('a/b/c', 'c', BuildFileAddress(build_file)) self.assert_address('a/b/c', 'foo', BuildFileAddress(build_file, target_name='foo')) self.assertEqual('a/b/c:foo', BuildFileAddress(build_file, target_name='foo').spec) with self.workspace('BUILD') as root_dir: build_file = BuildFile(FileSystemProjectTree(root_dir), relpath='BUILD') self.assert_address('', 'foo', BuildFileAddress(build_file, target_name='foo')) self.assertEqual('//:foo', BuildFileAddress(build_file, target_name='foo').spec)
def setup_legacy_graph(path_ignore_patterns): """Construct and return the components necessary for LegacyBuildGraph construction. :param list path_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the `--pants-ignore` global option. :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls). """ build_root = get_buildroot() project_tree = FileSystemProjectTree(build_root, path_ignore_patterns) symbol_table_cls = LegacySymbolTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser) # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = ( create_legacy_graph_tasks() + create_fs_tasks() + create_graph_tasks(address_mapper, symbol_table_cls) ) scheduler = LocalScheduler(dict(), tasks, project_tree) engine = LocalSerialEngine(scheduler, Storage.create(debug=False)) return LegacyGraphHelper(scheduler, engine, symbol_table_cls, LegacyBuildGraph)
def setup(options=None): if not options: options, _ = OptionsInitializer(OptionsBootstrapper()).setup() build_root = get_buildroot() cmd_line_spec_parser = CmdLineSpecParser(build_root) spec_roots = [cmd_line_spec_parser.parse_spec(spec) for spec in options.target_specs] storage = Storage.create(debug=False) project_tree = FileSystemProjectTree(build_root) symbol_table_cls = LegacyTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser) # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The ExpGraph # will explicitly request the products it needs. tasks = ( create_legacy_graph_tasks() + create_fs_tasks() + create_graph_tasks(address_mapper, symbol_table_cls) ) return ( LocalScheduler(dict(), tasks, storage, project_tree), storage, options, spec_roots, symbol_table_cls )
def setup_json_scheduler(build_root, native): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype :class:`pants.engine.scheduler.LocalScheduler` """ symbol_table = ExampleTable() # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(build_patterns=('BLD.json',), parser=JsonParser(symbol_table)) work_dir = os_path_join(build_root, '.pants.d') project_tree = FileSystemProjectTree(build_root) goals = { 'compile': Classpath, # TODO: to allow for running resolve alone, should split out a distinct 'IvyReport' product. 'resolve': Classpath, 'list': BuildFileAddresses, GenGoal.name(): GenGoal, 'ls': Snapshot, 'cat': FilesContent, } tasks = [ # Codegen GenGoal.rule(), gen_apache_java_thrift, gen_apache_python_thrift, gen_scrooge_scala_thrift, gen_scrooge_java_thrift, SingletonRule(Scrooge, Scrooge(Address.parse('src/scala/scrooge'))) ] + [ # scala dependency inference reify_scala_sources, select_package_address, calculate_package_search_path, SingletonRule(SourceRoots, SourceRoots(('src/java','src/scala'))), ] + [ # Remote dependency resolution ivy_resolve, select_rev, ] + [ # Compilers isolate_resources, write_name_file, javac, scalac, ] + ( create_graph_rules(address_mapper, symbol_table) ) + ( create_fs_rules() ) return LocalScheduler(work_dir, goals, tasks, project_tree, native)
def _get_project_tree(self, build_file_rev, pants_ignore): """Creates the project tree for build files for use in a given pants run.""" if build_file_rev: return ScmProjectTree(self._root_dir, get_scm(), build_file_rev, pants_ignore) else: return FileSystemProjectTree(self._root_dir, pants_ignore)
def to_url(m): if m.group(1): return m.group(0) # It's an http(s) url. path = m.group(0) if path.startswith('/'): path = os.path.relpath(path, buildroot) else: # See if it's a reference to a target in a BUILD file. parts = path.split(':') if len(parts) == 2: putative_dir = parts[0] else: putative_dir = path if os.path.isdir(os.path.join(buildroot, putative_dir)): build_files = list( BuildFile.get_build_files_family( FileSystemProjectTree(buildroot), putative_dir)) if build_files: path = build_files[0].relpath else: return None if os.path.exists(os.path.join(buildroot, path)): # The reporting server serves file content at /browse/<path_from_buildroot>. return '/browse/{}'.format(path) else: return None
def test_context_aware_object_factories(self): contents = dedent(""" create_java_libraries(base_name="create-java-libraries", provides_java_name="test-java", provides_scala_name="test-scala") make_lib("com.foo.test", "does_not_exists", "1.0") path_util("baz") """) self.create_file('3rdparty/BUILD', contents) build_file = BuildFile(FileSystemProjectTree(self.build_root), '3rdparty/BUILD') address_map = self.build_file_parser.parse_build_file(build_file) registered_proxies = set(address_map.values()) self.assertEqual(len(registered_proxies), 3) targets_created = {} for target_proxy in registered_proxies: targets_created[target_proxy.addressed_name] = target_proxy.addressed_type self.assertEqual({'does_not_exists', 'create-java-libraries-scala', 'create-java-libraries-java'}, set(targets_created.keys())) self.assertEqual(targets_created['does_not_exists'], self.JarLibrary) self.assertEqual(targets_created['create-java-libraries-java'], self.JavaLibrary) self.assertEqual(targets_created['create-java-libraries-scala'], self.ScalaLibrary) self.assertEqual({'3rdparty/baz'}, self._paths)
def test_sibling_build_files(self): self.add_to_build_file('BUILD', dedent( """ fake(name="base", dependencies=[ ':foo', ]) """)) self.add_to_build_file('BUILD.foo', dedent( """ fake(name="foo", dependencies=[ ':bat', ]) """)) self.add_to_build_file('./BUILD.bar', dedent( """ fake(name="bat") """)) bar_build_file = self.create_buildfile('BUILD.bar') base_build_file = self.create_buildfile('BUILD') foo_build_file = self.create_buildfile('BUILD.foo') address_map = self.build_file_parser.address_map_from_build_files( BuildFile.get_build_files_family(FileSystemProjectTree(self.build_root), ".")) addresses = address_map.keys() self.assertEqual({bar_build_file.relpath, base_build_file.relpath, foo_build_file.relpath}, {address.rel_path for address in addresses}) self.assertEqual({'//:base', '//:foo', '//:bat'}, {address.spec for address in addresses})
def to_url(m): if m.group(1): return m.group(0) # It's an http(s) url. path = m.group(0) if path.startswith("/"): path = os.path.relpath(path, buildroot) elif path.startswith(".."): # The path is not located inside the buildroot, so it's definitely not a BUILD file. return None else: # The path is located in the buildroot: see if it's a reference to a target in a BUILD file. parts = path.split(":") if len(parts) == 2: putative_dir = parts[0] else: putative_dir = path if os.path.isdir(os.path.join(buildroot, putative_dir)): build_files = list( BuildFile.get_build_files_family( FileSystemProjectTree(buildroot), putative_dir)) if build_files: path = build_files[0].relpath else: return None if os.path.exists(os.path.join(buildroot, path)): # The reporting server serves file content at /browse/<path_from_buildroot>. return "/browse/{}".format(path) else: return None
def create_fs_tasks(buildroot): """Creates tasks that consume the filesystem. These tasks are all considered "native", and should have their outputs re-validated for every build. TODO: They should likely get their own ProductGraph.Node type for efficiency/invalidation. :param project_tree: A ProjectTree instance to use for filesystem operations. """ fspt = FileSystemProjectTree(buildroot) return [ # Unfiltered requests for subdirectories. (RecursiveSubDirectories, [ Select(Path), SelectDependencies( RecursiveSubDirectories, DirectoryListing, field='directories') ], recursive_subdirectories), ] + [ # "Native" operations. (Paths, [SelectDependencies(Paths, PathGlobs)], merge_paths), (Paths, [SelectLiteral(fspt, ProjectTree), Select(PathGlob)], file_exists), (FilesContent, [SelectLiteral(fspt, ProjectTree), Select(Paths)], files_content), (DirectoryListing, [SelectLiteral(fspt, ProjectTree), Select(Path)], list_directory), ]
def test_sibling_build_files_duplicates(self): # This workspace is malformed, you can't shadow a name in a sibling BUILD file self.add_to_build_file('BUILD', dedent( """ fake(name="base", dependencies=[ ':foo', ]) """)) self.add_to_build_file('BUILD.foo', dedent( """ fake(name="foo", dependencies=[ ':bat', ]) """)) self.add_to_build_file('./BUILD.bar', dedent( """ fake(name="base") """)) with self.assertRaises(BuildFileParser.SiblingConflictException): self.build_file_parser.address_map_from_build_files( BuildFile.get_build_files_family(FileSystemProjectTree(self.build_root), '.'))
def all_roots(self): """Return all known source roots. Returns a generator over (source root, list of langs, category) triples. Note: Requires a directory walk to match actual directories against patterns. However we don't descend into source roots, once found, so this should be fast in practice. Note: Does not follow symlinks. """ project_tree = FileSystemProjectTree(get_buildroot(), self._options.pants_ignore) fixed_roots = set() for root, langs, category in self._trie.fixed(): if project_tree.exists(root): yield self._source_root_factory.create(root, langs, category) fixed_roots.add(root) for relpath, dirnames, _ in project_tree.walk("", topdown=True): match = self._trie.find(relpath) if match: if not any( fixed_root.startswith(relpath) for fixed_root in fixed_roots): yield match # Found a source root not a prefix of any fixed roots. del dirnames[:] # Don't continue to walk into it.
def get_project_tree(options): """Creates the project tree for build files for use in a given pants run.""" pants_ignore = options.pants_ignore or [] if options.build_file_rev: return ScmProjectTree(get_buildroot(), get_scm(), options.build_file_rev, pants_ignore) else: return FileSystemProjectTree(get_buildroot(), pants_ignore)
def setUp(self): self.base_dir = tempfile.mkdtemp() # Seed a BUILD outside the build root that should not be detected touch(os.path.join(self.base_dir, "BUILD")) self.root_dir = os.path.join(self.base_dir, "root") self.touch("grandparent/parent/BUILD") self.touch("grandparent/parent/BUILD.twitter") # Tricky! This is a directory self.makedirs("grandparent/parent/BUILD.dir") self.makedirs("grandparent/BUILD") self.touch("BUILD") self.touch("BUILD.twitter") self.touch("grandparent/parent/child1/BUILD") self.touch("grandparent/parent/child1/BUILD.twitter") self.touch("grandparent/parent/child2/child3/BUILD") self.makedirs("grandparent/parent/child2/BUILD") self.makedirs("grandparent/parent/child4") self.touch("grandparent/parent/child5/BUILD") self.makedirs("path-that-does-exist") self.touch("path-that-does-exist/BUILD.invalid.suffix") # This exercises https://github.com/pantsbuild/pants/issues/1742 # Prior to that fix, BUILD directories were handled, but not if there was a valid BUILD file # sibling. self.makedirs("issue_1742/BUILD") self.touch("issue_1742/BUILD.sibling") self._project_tree = FileSystemProjectTree(self.root_dir) self.buildfile = self.create_buildfile("grandparent/parent/BUILD")
def setUp(self): super(FilemapIntegrationTest, self).setUp() project_tree = FileSystemProjectTree(os.path.abspath(self.PATH_PREFIX), ['BUILD', '.*']) scan_set = set() for root, dirs, files in project_tree.walk(''): scan_set.update({os.path.join(root, f) for f in files}) self.assertEquals(scan_set, self.TEST_EXCLUDE_FILES)
def setup_legacy_graph(pants_ignore_patterns, workdir, build_root=None, native=None, symbol_table_cls=None, build_ignore_patterns=None, exclude_target_regexps=None, subproject_roots=None): """Construct and return the components necessary for LegacyBuildGraph construction. :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the '--pants-ignore' global option. :param str workdir: The pants workdir. :param str build_root: A path to be used as the build root. If None, then default is used. :param Native native: An instance of the native-engine subsystem. :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or None to use the default. :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD files, usually taken from the '--build-ignore' global option. :param list exclude_target_regexps: A list of regular expressions for excluding targets. :param list subproject_roots: Paths that correspond with embedded build roots under the current build root. :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls). """ build_root = build_root or get_buildroot() scm = get_scm() symbol_table_cls = symbol_table_cls or LegacySymbolTable project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns) # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper( symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser, build_ignore_patterns=build_ignore_patterns, exclude_target_regexps=exclude_target_regexps, subproject_roots=subproject_roots) # Load the native backend. native = native or Native.Factory.global_instance().create() # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = (create_legacy_graph_tasks(symbol_table_cls) + create_fs_rules() + create_graph_rules(address_mapper, symbol_table_cls)) # TODO: Do not use the cache yet, as it incurs a high overhead. scheduler = LocalScheduler(workdir, dict(), tasks, project_tree, native) engine = LocalSerialEngine(scheduler, use_cache=False) change_calculator = EngineChangeCalculator( scheduler, engine, symbol_table_cls, scm) if scm else None return LegacyGraphHelper(scheduler, engine, symbol_table_cls, change_calculator)
def setUp(self): super(FilemapIntegrationTest, self).setUp() self.path_prefix = 'testprojects/tests/python/pants/file_sets/' project_tree = FileSystemProjectTree(abspath(self.path_prefix), ['BUILD', '.*']) scan_set = set() for root, dirs, files in project_tree.walk(''): scan_set.update({join(root, f) for f in files}) self.assertEquals(scan_set, self.TEST_EXCLUDE_FILES)
def test_exclude_target_regexps(self): project_tree = FileSystemProjectTree(self.build_root) address_mapper_with_exclude = BuildFileAddressMapper( self.build_file_parser, project_tree, exclude_target_regexps=[r'.*:b.*']) self.assert_scanned(['::'], expected=[':root', 'a', 'a/b:c'], address_mapper=address_mapper_with_exclude)
def test_raises_parse_error(self): self.add_to_build_file('BUILD', 'foo(name = = "baz")') build_file = BuildFile(FileSystemProjectTree(self.build_root), 'BUILD') with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Test some corner cases for the context printing # Error at beginning of BUILD file build_file = self.add_to_build_file('begin/BUILD', dedent(""" *?&INVALID! = 'foo' target( name='bar', dependencies= [ ':baz', ], ) """)) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error at end of BUILD file build_file = self.add_to_build_file('end/BUILD', dedent(""" target( name='bar', dependencies= [ ':baz', ], ) *?&INVALID! = 'foo' """)) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error in the middle of BUILD file > 6 lines build_file = self.add_to_build_file('middle/BUILD', dedent(""" target( name='bar', *?&INVALID! = 'foo' dependencies = [ ':baz', ], ) """)) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error in very short build file. build_file = self.add_to_build_file('short/BUILD', dedent(""" target(name='bar', dependencies = [':baz'],) *?&INVALID! = 'foo' """)) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file)
def create_scheduler(rules, validate=True, native=None): """Create a Scheduler.""" native = native or init_native() return Scheduler( native, FileSystemProjectTree(os.getcwd()), './.pants.d', rules, execution_options=DEFAULT_EXECUTION_OPTIONS, validate=validate, )
def create_scheduler(rules, validate=True): """Create a Scheduler.""" native = init_native() return Scheduler( native, FileSystemProjectTree(os.getcwd()), './.pants.d', rules, remote_store_server=None, remote_execution_server=None, validate=validate, )
def setup_json_scheduler(build_root, native): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype :class:`pants.engine.scheduler.SchedulerSession` """ symbol_table = ExampleTable() # Register "literal" subjects required for these rules. address_mapper = AddressMapper(build_patterns=('BLD.json',), parser=JsonParser(symbol_table)) work_dir = os_path_join(build_root, '.pants.d') project_tree = FileSystemProjectTree(build_root) rules = [ # Codegen GenGoal.rule(), gen_apache_java_thrift, gen_apache_python_thrift, gen_scrooge_scala_thrift, gen_scrooge_java_thrift, SingletonRule(Scrooge, Scrooge(Address.parse('src/scala/scrooge'))) ] + [ # scala dependency inference reify_scala_sources, select_package_address, calculate_package_search_path, SingletonRule(SourceRoots, SourceRoots(('src/java','src/scala'))), ] + [ # Remote dependency resolution ivy_resolve, select_rev, ] + [ # Compilers isolate_resources, write_name_file, javac, scalac, ] + ( create_graph_rules(address_mapper, symbol_table) ) + ( create_fs_rules() ) scheduler = Scheduler(native, project_tree, work_dir, rules, DEFAULT_EXECUTION_OPTIONS, None, None) return scheduler.new_session()
def mk_fs_tree(self, build_root_src=None, ignore_patterns=None, work_dir=None): """Create a temporary FilesystemProjectTree. :param build_root_src: Optional directory to pre-populate from; otherwise, empty. :returns: A FilesystemProjectTree. """ work_dir = work_dir or self._create_work_dir() build_root = os.path.join(work_dir, 'build_root') if build_root_src is not None: shutil.copytree(build_root_src, build_root, symlinks=True) else: os.makedirs(build_root) return FileSystemProjectTree(build_root, ignore_patterns=ignore_patterns)
def setUp(self): super().setUp() project_tree = FileSystemProjectTree(os.path.abspath(self.PATH_PREFIX), ['BUILD', '.*']) scan_set = set() def should_ignore(file): return file.endswith('.pyc') or file.endswith('__init__.py') for root, dirs, files in project_tree.walk(''): scan_set.update({os.path.join(root, f) for f in files if not should_ignore(f)}) self.assertEqual(scan_set, self.TEST_EXCLUDE_FILES)
def mk_fs_tree(self, build_root_src=None): """Create a temporary FilesystemProjectTree. :param build_root_src: Optional directory to pre-populate from; otherwise, empty. :returns: A FilesystemProjectTree. """ work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, work_dir) build_root = os.path.join(work_dir, 'build_root') if build_root_src is not None: shutil.copytree(build_root_src, build_root, symlinks=True) else: os.mkdir(build_root) return FileSystemProjectTree(build_root)
def create_scheduler(rules, union_rules=None, validate=True, native=None): """Create a Scheduler.""" native = native or init_native() tree = FileSystemProjectTree(os.getcwd()) return Scheduler( native=native, ignore_patterns=tree.ignore_patterns, build_root=tree.build_root, local_store_dir="./.pants.d", rules=rules, union_rules=union_rules, execution_options=DEFAULT_EXECUTION_OPTIONS, validate=validate, )
def test_build_file_forms(self) -> None: with self.workspace("a/b/c/BUILD") as root_dir: build_file = BuildFile(FileSystemProjectTree(root_dir), relpath="a/b/c/BUILD") self.assert_address("a/b/c", "c", BuildFileAddress(build_file=build_file)) self.assert_address( "a/b/c", "foo", BuildFileAddress(build_file=build_file, target_name="foo")) self.assertEqual( "a/b/c:foo", BuildFileAddress(build_file=build_file, target_name="foo").spec) with self.workspace("BUILD") as root_dir: build_file = BuildFile(FileSystemProjectTree(root_dir), relpath="BUILD") self.assert_address( "", "foo", BuildFileAddress(build_file=build_file, target_name="foo")) self.assertEqual( "//:foo", BuildFileAddress(build_file=build_file, target_name="foo").spec)
def test_build_ignore_patterns(self): expected_specs = [':root', 'a', 'a:b', 'a/b', 'a/b:c'] # This bogus BUILD file gets in the way of parsing. self.add_to_build_file('some/dir', 'COMPLETELY BOGUS BUILDFILE)\n') with self.assertRaises(AddressLookupError): self.assert_scanned(['::'], expected=expected_specs) project_tree = FileSystemProjectTree(self.build_root) address_mapper_with_ignore = BuildFileAddressMapper( self.build_file_parser, project_tree, build_ignore_patterns=['some']) self.assert_scanned(['::'], expected=expected_specs, address_mapper=address_mapper_with_ignore)
def setUp(self): super().setUp() self.project_tree = FileSystemProjectTree(self.build_root) self.complicated_dep_comments = dedent("""\ target_type( # This comment should be okay name = 'no_bg_no_cry', # Side comments here will stay # This comment should be okay dependencies = [ # nbgbc_above1 # nbgnc_above2 'really/need/this:dep', #nobgnc_side ':whitespace_above', ':only_side',#only_side #only_above ':only_above' ], # This comment is also fine thing = object() # And finally this comment survives )""") self.multi_target_build_string = dedent("""\ # This comment should stay target_type( name = 'target_top', dependencies = [ ':dep_a', ] ) target_type( name = 'target_middle', dependencies = [ ':dep_b', ] ) # This comment should be okay target_type( name = 'target_bottom', ) # Also this one though it's weird""")
def setUp(self): """ :API: public """ super(BaseTest, self).setUp() Goal.clear() Subsystem.reset() self.real_build_root = BuildRoot().path self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) self.subprocess_dir = os.path.join(self.build_root, '.pids') self.addCleanup(safe_rmtree, self.build_root) self.pants_workdir = os.path.join(self.build_root, '.pants.d') safe_mkdir(self.pants_workdir) self.options = defaultdict(dict) # scope -> key-value mapping. self.options[''] = { 'pants_workdir': self.pants_workdir, 'pants_supportdir': os.path.join(self.build_root, 'build-support'), 'pants_distdir': os.path.join(self.build_root, 'dist'), 'pants_configdir': os.path.join(self.build_root, 'config'), 'pants_subprocessdir': self.subprocess_dir, 'cache_key_gen_version': '0-test', } self.options['cache'] = { 'read_from': [], 'write_to': [], } BuildRoot().path = self.build_root self.addCleanup(BuildRoot().reset) self._build_configuration = BuildConfiguration() self._build_configuration.register_aliases(self.alias_groups) self.build_file_parser = BuildFileParser(self._build_configuration, self.build_root) self.project_tree = FileSystemProjectTree(self.build_root) self.address_mapper = BuildFileAddressMapper( self.build_file_parser, self.project_tree, build_ignore_patterns=self.build_ignore_patterns) self.build_graph = MutableBuildGraph( address_mapper=self.address_mapper)