def setUp(self): super(FilemapIntegrationTest, self).setUp() project_tree = FileSystemProjectTree(os.path.abspath(self.PATH_PREFIX), ['BUILD', '.*']) scan_set = set() for root, dirs, files in project_tree.walk(''): scan_set.update({os.path.join(root, f) for f in files}) self.assertEquals(scan_set, self.TEST_EXCLUDE_FILES)
def setUp(self): super(FilemapIntegrationTest, self).setUp() self.path_prefix = 'testprojects/tests/python/pants/file_sets/' project_tree = FileSystemProjectTree(abspath(self.path_prefix), ['BUILD']) scan_set = set() for root, dirs, files in project_tree.walk(''): scan_set.update({join(root, f) for f in files}) self.assertEquals(scan_set, self.TEST_EXCLUDE_FILES)
def setUp(self): super(FilemapIntegrationTest, self).setUp() project_tree = FileSystemProjectTree(os.path.abspath(self.PATH_PREFIX), ['BUILD', '.*']) scan_set = set() def should_ignore(file): return file.endswith('.pyc') for root, dirs, files in project_tree.walk(''): scan_set.update({os.path.join(root, f) for f in files if not should_ignore(f)}) self.assertEqual(scan_set, self.TEST_EXCLUDE_FILES)
def test_sibling_build_files(self): self.add_to_build_file('BUILD', dedent( """ fake(name="base", dependencies=[ ':foo', ]) """)) self.add_to_build_file('BUILD.foo', dedent( """ fake(name="foo", dependencies=[ ':bat', ]) """)) self.add_to_build_file('./BUILD.bar', dedent( """ fake(name="bat") """)) bar_build_file = self.create_buildfile('BUILD.bar') base_build_file = self.create_buildfile('BUILD') foo_build_file = self.create_buildfile('BUILD.foo') address_map = self.build_file_parser.address_map_from_build_files( BuildFile.get_build_files_family(FileSystemProjectTree(self.build_root), ".")) addresses = address_map.keys() self.assertEqual({bar_build_file.relpath, base_build_file.relpath, foo_build_file.relpath}, {address.rel_path for address in addresses}) self.assertEqual({'//:base', '//:foo', '//:bat'}, {address.spec for address in addresses})
def test_context_aware_object_factories(self): contents = dedent(""" create_java_libraries(base_name="create-java-libraries", provides_java_name="test-java", provides_scala_name="test-scala") make_lib("com.foo.test", "does_not_exist", "1.0") path_util("baz") """) self.create_file('3rdparty/BUILD', contents) build_file = BuildFile(FileSystemProjectTree(self.build_root), '3rdparty/BUILD') address_map = self.build_file_parser.parse_build_file(build_file) registered_proxies = set(address_map.values()) self.assertEqual(len(registered_proxies), 3) targets_created = {} for target_proxy in registered_proxies: targets_created[target_proxy.addressed_name] = target_proxy.addressed_type self.assertEqual({'does_not_exist', 'create-java-libraries-scala', 'create-java-libraries-java'}, set(targets_created.keys())) self.assertEqual(targets_created['does_not_exist'], self.JarLibrary) self.assertEqual(targets_created['create-java-libraries-java'], self.JavaLibrary) self.assertEqual(targets_created['create-java-libraries-scala'], self.ScalaLibrary)
def test_sibling_build_files_duplicates(self): # This workspace is malformed, you can't shadow a name in a sibling BUILD file self.add_to_build_file('BUILD', dedent( """ fake(name="base", dependencies=[ ':foo', ]) """)) self.add_to_build_file('BUILD.foo', dedent( """ fake(name="foo", dependencies=[ ':bat', ]) """)) self.add_to_build_file('./BUILD.bar', dedent( """ fake(name="base") """)) with self.assertRaises(BuildFileParser.SiblingConflictException): self.build_file_parser.address_map_from_build_files( BuildFile.get_build_files_family(FileSystemProjectTree(self.build_root), '.'))
def to_url(m): if m.group(1): return m.group(0) # It's an http(s) url. path = m.group(0) if path.startswith('/'): path = os.path.relpath(path, buildroot) elif path.startswith('..'): # The path is not located inside the buildroot, so it's definitely not a BUILD file. return None else: # The path is located in the buildroot: see if it's a reference to a target in a BUILD file. parts = path.split(':') if len(parts) == 2: putative_dir = parts[0] else: putative_dir = path if os.path.isdir(os.path.join(buildroot, putative_dir)): build_files = list( BuildFile.get_build_files_family( FileSystemProjectTree(buildroot), putative_dir)) if build_files: path = build_files[0].relpath else: return None if os.path.exists(os.path.join(buildroot, path)): # The reporting server serves file content at /browse/<path_from_buildroot>. return '/browse/{}'.format(path) else: return None
def setup_json_scheduler(build_root, native): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype :class:`pants.engine.scheduler.LocalScheduler` """ symbol_table = ExampleTable() # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(build_patterns=('BLD.json', ), parser=JsonParser(symbol_table)) work_dir = os_path_join(build_root, '.pants.d') project_tree = FileSystemProjectTree(build_root) goals = { 'compile': Classpath, # TODO: to allow for running resolve alone, should split out a distinct 'IvyReport' product. 'resolve': Classpath, 'list': BuildFileAddresses, GenGoal.name(): GenGoal, 'ls': Snapshot, 'cat': FilesContent, } tasks = [ # Codegen GenGoal.rule(), gen_apache_java_thrift, gen_apache_python_thrift, gen_scrooge_scala_thrift, gen_scrooge_java_thrift, SingletonRule(Scrooge, Scrooge(Address.parse('src/scala/scrooge'))) ] + [ # scala dependency inference reify_scala_sources, extract_scala_imports, select_package_address, calculate_package_search_path, SingletonRule(SourceRoots, SourceRoots(('src/java', 'src/scala'))), ] + [ # Remote dependency resolution ivy_resolve, select_rev, ] + [ # Compilers isolate_resources, write_name_file, javac, scalac, ] + (create_graph_rules(address_mapper, symbol_table)) + (create_fs_rules()) return LocalScheduler(work_dir, goals, tasks, project_tree, native, graph_lock=None)
def setup_legacy_graph(pants_ignore_patterns, workdir, build_root=None, native=None, symbol_table_cls=None, build_ignore_patterns=None, exclude_target_regexps=None, subproject_roots=None): """Construct and return the components necessary for LegacyBuildGraph construction. :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the '--pants-ignore' global option. :param str workdir: The pants workdir. :param str build_root: A path to be used as the build root. If None, then default is used. :param Native native: An instance of the native-engine subsystem. :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or None to use the default. :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD files, usually taken from the '--build-ignore' global option. :param list exclude_target_regexps: A list of regular expressions for excluding targets. :param list subproject_roots: Paths that correspond with embedded build roots under the current build root. :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls). """ build_root = build_root or get_buildroot() scm = get_scm() symbol_table_cls = symbol_table_cls or LegacySymbolTable project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns) # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper( symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser, build_ignore_patterns=build_ignore_patterns, exclude_target_regexps=exclude_target_regexps, subproject_roots=subproject_roots) # Load the native backend. native = native or Native.Factory.global_instance().create() # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = (create_legacy_graph_tasks(symbol_table_cls) + create_fs_rules() + create_graph_rules(address_mapper, symbol_table_cls)) # TODO: Do not use the cache yet, as it incurs a high overhead. scheduler = LocalScheduler(workdir, dict(), tasks, project_tree, native) engine = LocalSerialEngine(scheduler, use_cache=False) change_calculator = EngineChangeCalculator( scheduler, engine, symbol_table_cls, scm) if scm else None return LegacyGraphHelper(scheduler, engine, symbol_table_cls, change_calculator)
def test_exclude_target_regexps(self): project_tree = FileSystemProjectTree(self.build_root) address_mapper_with_exclude = BuildFileAddressMapper( self.build_file_parser, project_tree, exclude_target_regexps=[r'.*:b.*']) self.assert_scanned(['::'], expected=[':root', 'a', 'a/b:c'], address_mapper=address_mapper_with_exclude)
def test_raises_parse_error(self): self.add_to_build_file('BUILD', 'foo(name = = "baz")') build_file = BuildFile(FileSystemProjectTree(self.build_root), 'BUILD') with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Test some corner cases for the context printing # Error at beginning of BUILD file build_file = self.add_to_build_file('begin/BUILD', dedent(""" *?&INVALID! = 'foo' target( name='bar', dependencies= [ ':baz', ], ) """)) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error at end of BUILD file build_file = self.add_to_build_file('end/BUILD', dedent(""" target( name='bar', dependencies= [ ':baz', ], ) *?&INVALID! = 'foo' """)) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error in the middle of BUILD file > 6 lines build_file = self.add_to_build_file('middle/BUILD', dedent(""" target( name='bar', *?&INVALID! = 'foo' dependencies = [ ':baz', ], ) """)) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error in very short build file. build_file = self.add_to_build_file('short/BUILD', dedent(""" target(name='bar', dependencies = [':baz'],) *?&INVALID! = 'foo' """)) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file)
def create_scheduler(rules, validate=True, native=None): """Create a Scheduler.""" native = native or init_native() return Scheduler( native, FileSystemProjectTree(os.getcwd()), './.pants.d', rules, execution_options=DEFAULT_EXECUTION_OPTIONS, validate=validate, )
def test_build_file_forms(self): with self.workspace('a/b/c/BUILD') as root_dir: build_file = BuildFile(FileSystemProjectTree(root_dir), relpath='a/b/c/BUILD') self.assert_address('a/b/c', 'c', BuildFileAddress(build_file)) self.assert_address( 'a/b/c', 'foo', BuildFileAddress(build_file, target_name='foo')) self.assertEqual( 'a/b/c:foo', BuildFileAddress(build_file, target_name='foo').spec) with self.workspace('BUILD') as root_dir: build_file = BuildFile(FileSystemProjectTree(root_dir), relpath='BUILD') self.assert_address( '', 'foo', BuildFileAddress(build_file, target_name='foo')) self.assertEqual( '//:foo', BuildFileAddress(build_file, target_name='foo').spec)
def setup_json_scheduler(build_root, native): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype :class:`pants.engine.scheduler.SchedulerSession` """ symbol_table = ExampleTable() # Register "literal" subjects required for these rules. address_mapper = AddressMapper(build_patterns=('BLD.json',), parser=JsonParser(symbol_table)) work_dir = os_path_join(build_root, '.pants.d') project_tree = FileSystemProjectTree(build_root) rules = [ # Codegen GenGoal.rule(), gen_apache_java_thrift, gen_apache_python_thrift, gen_scrooge_scala_thrift, gen_scrooge_java_thrift, SingletonRule(Scrooge, Scrooge(Address.parse('src/scala/scrooge'))) ] + [ # scala dependency inference reify_scala_sources, select_package_address, calculate_package_search_path, SingletonRule(SourceRoots, SourceRoots(('src/java','src/scala'))), ] + [ # Remote dependency resolution ivy_resolve, select_rev, ] + [ # Compilers isolate_resources, write_name_file, javac, scalac, ] + ( create_graph_rules(address_mapper, symbol_table) ) + ( create_fs_rules() ) scheduler = Scheduler(native, project_tree, work_dir, rules, DEFAULT_EXECUTION_OPTIONS, None, None) return scheduler.new_session()
def create_scheduler(rules, validate=True): """Create a Scheduler.""" native = init_native() return Scheduler( native, FileSystemProjectTree(os.getcwd()), './.pants.d', rules, remote_store_server=None, remote_execution_server=None, validate=validate, )
def mk_fs_tree(self, build_root_src=None, ignore_patterns=None, work_dir=None): """Create a temporary FilesystemProjectTree. :param build_root_src: Optional directory to pre-populate from; otherwise, empty. :returns: A FilesystemProjectTree. """ work_dir = work_dir or self._create_work_dir() build_root = os.path.join(work_dir, 'build_root') if build_root_src is not None: shutil.copytree(build_root_src, build_root, symlinks=True) else: os.makedirs(build_root) return FileSystemProjectTree(build_root, ignore_patterns=ignore_patterns)
def create_scheduler(rules, union_rules=None, validate=True, native=None): """Create a Scheduler.""" native = native or init_native() tree = FileSystemProjectTree(os.getcwd()) return Scheduler( native=native, ignore_patterns=tree.ignore_patterns, build_root=tree.build_root, local_store_dir="./.pants.d", rules=rules, union_rules=union_rules, execution_options=DEFAULT_EXECUTION_OPTIONS, validate=validate, )
def all_roots(self): """Return all known source roots. Returns a generator over (source root, list of langs, category) triples. Note: Requires a directory walk to match actual directories against patterns. However we don't descend into source roots, once found, so this should be fast in practice. Note: Does not follow symlinks. """ project_tree = FileSystemProjectTree(get_buildroot(), self._options.pants_ignore) fixed_roots = set() for root, langs, category in self._trie.fixed(): if project_tree.exists(root): yield self._source_root_factory.create(root, langs, category) fixed_roots.add(root) for relpath, dirnames, _ in project_tree.walk("", topdown=True): match = self._trie.find(relpath) if match: if not any(fixed_root.startswith(relpath) for fixed_root in fixed_roots): yield match # Found a source root not a prefix of any fixed roots. del dirnames[:] # Don't continue to walk into it.
def mk_fs_tree(self, build_root_src=None): """Create a temporary FilesystemProjectTree. :param build_root_src: Optional directory to pre-populate from; otherwise, empty. :returns: A FilesystemProjectTree. """ work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, work_dir) build_root = os.path.join(work_dir, 'build_root') if build_root_src is not None: shutil.copytree(build_root_src, build_root, symlinks=True) else: os.mkdir(build_root) return FileSystemProjectTree(build_root)
def test_build_file_forms(self) -> None: with self.workspace("a/b/c/BUILD") as root_dir: build_file = BuildFile(FileSystemProjectTree(root_dir), relpath="a/b/c/BUILD") self.assert_address("a/b/c", "c", BuildFileAddress(build_file=build_file)) self.assert_address( "a/b/c", "foo", BuildFileAddress(build_file=build_file, target_name="foo")) self.assertEqual( "a/b/c:foo", BuildFileAddress(build_file=build_file, target_name="foo").spec) with self.workspace("BUILD") as root_dir: build_file = BuildFile(FileSystemProjectTree(root_dir), relpath="BUILD") self.assert_address( "", "foo", BuildFileAddress(build_file=build_file, target_name="foo")) self.assertEqual( "//:foo", BuildFileAddress(build_file=build_file, target_name="foo").spec)
def test_build_ignore_patterns(self): expected_specs = [':root', 'a', 'a:b', 'a/b', 'a/b:c'] # This bogus BUILD file gets in the way of parsing. self.add_to_build_file('some/dir', 'COMPLETELY BOGUS BUILDFILE)\n') with self.assertRaises(AddressLookupError): self.assert_scanned(['::'], expected=expected_specs) project_tree = FileSystemProjectTree(self.build_root) address_mapper_with_ignore = BuildFileAddressMapper( self.build_file_parser, project_tree, build_ignore_patterns=['some']) self.assert_scanned(['::'], expected=expected_specs, address_mapper=address_mapper_with_ignore)
def setUp(self): """ :API: public """ super(BaseTest, self).setUp() Goal.clear() Subsystem.reset() self.real_build_root = BuildRoot().path self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) self.subprocess_dir = os.path.join(self.build_root, '.pids') self.addCleanup(safe_rmtree, self.build_root) self.pants_workdir = os.path.join(self.build_root, '.pants.d') safe_mkdir(self.pants_workdir) self.options = defaultdict(dict) # scope -> key-value mapping. self.options[''] = { 'pants_workdir': self.pants_workdir, 'pants_supportdir': os.path.join(self.build_root, 'build-support'), 'pants_distdir': os.path.join(self.build_root, 'dist'), 'pants_configdir': os.path.join(self.build_root, 'config'), 'pants_subprocessdir': self.subprocess_dir, 'cache_key_gen_version': '0-test', } self.options['cache'] = { 'read_from': [], 'write_to': [], } BuildRoot().path = self.build_root self.addCleanup(BuildRoot().reset) self._build_configuration = BuildConfiguration() self._build_configuration.register_aliases(self.alias_groups) self.build_file_parser = BuildFileParser(self._build_configuration, self.build_root) self.project_tree = FileSystemProjectTree(self.build_root) self.address_mapper = BuildFileAddressMapper( self.build_file_parser, self.project_tree, build_ignore_patterns=self.build_ignore_patterns) self.build_graph = MutableBuildGraph( address_mapper=self.address_mapper)
def setUp(self): super().setUp() self.project_tree = FileSystemProjectTree(self.build_root) self.complicated_dep_comments = dedent("""\ target_type( # This comment should be okay name = 'no_bg_no_cry', # Side comments here will stay # This comment should be okay dependencies = [ # nbgbc_above1 # nbgnc_above2 'really/need/this:dep', #nobgnc_side ':whitespace_above', ':only_side',#only_side #only_above ':only_above' ], # This comment is also fine thing = object() # And finally this comment survives )""") self.multi_target_build_string = dedent("""\ # This comment should stay target_type( name = 'target_top', dependencies = [ ':dep_a', ] ) target_type( name = 'target_middle', dependencies = [ ':dep_b', ] ) # This comment should be okay target_type( name = 'target_bottom', ) # Also this one though it's weird""")
def setup_legacy_graph(pants_ignore_patterns, symbol_table_cls=None, build_ignore_patterns=None, exclude_target_regexps=None): """Construct and return the components necessary for LegacyBuildGraph construction. :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the '--pants-ignore' global option. :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or None to use the default. :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD files, usually taken from the '--build-ignore' global option. :param list exclude_target_regexps: A list of regular expressions for excluding targets. :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls). """ build_root = get_buildroot() scm = get_scm() project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns) symbol_table_cls = symbol_table_cls or LegacySymbolTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper( symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser, build_ignore_patterns=build_ignore_patterns, exclude_target_regexps=exclude_target_regexps) # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = (create_legacy_graph_tasks(symbol_table_cls) + create_fs_tasks() + create_graph_tasks(address_mapper, symbol_table_cls)) scheduler = LocalScheduler(dict(), tasks, project_tree) # TODO: Do not use the cache yet, as it incurs a high overhead. engine = LocalSerialEngine(scheduler, Storage.create(), use_cache=False) change_calculator = EngineChangeCalculator(engine, scm) if scm else None return LegacyGraphHelper(scheduler, engine, symbol_table_cls, change_calculator)
def do_test_exposed_context_aware_object(self, context_aware_object_factory): self._register_aliases(context_aware_object_factories={'george': context_aware_object_factory}) aliases = self.build_configuration.registered_aliases() self.assertEqual({}, aliases.target_types) self.assertEqual({}, aliases.target_macro_factories) self.assertEqual({}, aliases.objects) self.assertEqual(dict(george=context_aware_object_factory), aliases.context_aware_object_factories) with temporary_dir() as root: build_file_path = os.path.join(root, 'george', 'BUILD') touch(build_file_path) build_file = BuildFile(FileSystemProjectTree(root), 'george/BUILD') parse_state = self.build_configuration.initialize_parse_state(build_file) self.assertEqual(0, len(parse_state.registered_addressable_instances)) self.assertEqual(1, len(parse_state.parse_globals)) yield parse_state.parse_globals['george']
def setUp(self): """ :API: public """ super(BaseTest, self).setUp() # Avoid resetting the Runtracker here, as that is specific to fork'd process cleanup. clean_global_runtime_state(reset_runtracker=False, reset_subsystem=True) self.real_build_root = BuildRoot().path self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) self.subprocess_dir = os.path.join(self.build_root, '.pids') self.addCleanup(safe_rmtree, self.build_root) self.pants_workdir = os.path.join(self.build_root, '.pants.d') safe_mkdir(self.pants_workdir) self.options = defaultdict(dict) # scope -> key-value mapping. self.options[''] = { 'pants_workdir': self.pants_workdir, 'pants_supportdir': os.path.join(self.build_root, 'build-support'), 'pants_distdir': os.path.join(self.build_root, 'dist'), 'pants_configdir': os.path.join(self.build_root, 'config'), 'pants_subprocessdir': self.subprocess_dir, 'cache_key_gen_version': '0-test', } self.options['cache'] = { 'read_from': [], 'write_to': [], } BuildRoot().path = self.build_root self.addCleanup(BuildRoot().reset) self._build_configuration = BuildConfiguration() self._build_configuration.register_aliases(self.alias_groups) self.build_file_parser = BuildFileParser(self._build_configuration, self.build_root) self.project_tree = FileSystemProjectTree(self.build_root) self.reset_build_graph()
def setUp(self): super(BaseTest, self).setUp() Goal.clear() Subsystem.reset() self.real_build_root = BuildRoot().path self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) self.addCleanup(safe_rmtree, self.build_root) self.pants_workdir = os.path.join(self.build_root, '.pants.d') safe_mkdir(self.pants_workdir) self.options = defaultdict(dict) # scope -> key-value mapping. self.options[''] = { 'pants_workdir': self.pants_workdir, 'pants_supportdir': os.path.join(self.build_root, 'build-support'), 'pants_distdir': os.path.join(self.build_root, 'dist'), 'pants_configdir': os.path.join(self.build_root, 'config'), 'cache_key_gen_version': '0-test', } self.options['cache'] = { 'read_from': [], 'write_to': [], } BuildRoot().path = self.build_root self.addCleanup(BuildRoot().reset) # We need a pants.ini, even if empty. get_buildroot() uses its presence. self.create_file('pants.ini') self._build_configuration = BuildConfiguration() self._build_configuration.register_aliases(self.alias_groups) self.build_file_parser = BuildFileParser(self._build_configuration, self.build_root) self.address_mapper = BuildFileAddressMapper( self.build_file_parser, FileSystemProjectTree(self.build_root)) self.build_graph = BuildGraph(address_mapper=self.address_mapper)
def mk_scheduler(self, tasks=None, goals=None, storage=None, build_root_src=None, symbol_table_cls=EmptyTable): """Creates a Scheduler with "native" tasks already included, and the given additional tasks.""" goals = goals or dict() tasks = tasks or [] storage = storage or Storage.create(in_memory=True) work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, work_dir) build_root = os.path.join(work_dir, 'build_root') if build_root_src is not None: shutil.copytree(build_root_src, build_root) else: os.mkdir(build_root) tasks = list(tasks) + create_fs_tasks() project_tree = FileSystemProjectTree(build_root) scheduler = LocalScheduler(goals, tasks, storage, project_tree) return scheduler, storage, build_root
def create_buildfile(self, path): return BuildFile(FileSystemProjectTree(self.build_root), path)
def test_raises_execute_error(self): self.add_to_build_file('BUILD', 'undefined_alias(name="baz")') build_file = BuildFile(FileSystemProjectTree(self.build_root), 'BUILD') with self.assertRaises(BuildFileParser.ExecuteError): self.build_file_parser.parse_build_file(build_file)
def test_exposed_object(self): self.add_to_build_file('BUILD', """fake_object""") build_file = BuildFile(FileSystemProjectTree(self.build_root), 'BUILD') address_map = self.build_file_parser.parse_build_file(build_file) self.assertEqual(len(address_map), 0)
def get_project_tree(options): """Creates the project tree for build files for use in a given pants run.""" pants_ignore = options.pants_ignore or [] return FileSystemProjectTree(get_buildroot(), pants_ignore)
def setup_legacy_graph(pants_ignore_patterns, workdir, build_file_imports_behavior, build_root=None, native=None, build_file_aliases=None, build_ignore_patterns=None, exclude_target_regexps=None, subproject_roots=None, include_trace_on_error=True): """Construct and return the components necessary for LegacyBuildGraph construction. :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the '--pants-ignore' global option. :param str workdir: The pants workdir. :param build_file_imports_behavior: How to behave if a BUILD file being parsed tries to use import statements. Valid values: "allow", "warn", "error". :type build_file_imports_behavior: string :param str build_root: A path to be used as the build root. If None, then default is used. :param Native native: An instance of the native-engine subsystem. :param build_file_aliases: BuildFileAliases to register. :type build_file_aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases` :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD files, usually taken from the '--build-ignore' global option. :param list exclude_target_regexps: A list of regular expressions for excluding targets. :param list subproject_roots: Paths that correspond with embedded build roots under the current build root. :param bool include_trace_on_error: If True, when an error occurs, the error message will include the graph trace. :returns: A tuple of (scheduler, engine, symbol_table, build_graph_cls). """ build_root = build_root or get_buildroot() scm = get_scm() if not build_file_aliases: _, build_config = OptionsInitializer( OptionsBootstrapper()).setup(init_logging=False) build_file_aliases = build_config.registered_aliases() symbol_table = LegacySymbolTable(build_file_aliases) project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns) # Register "literal" subjects required for these tasks. parser = LegacyPythonCallbacksParser(symbol_table, build_file_aliases, build_file_imports_behavior) address_mapper = AddressMapper( parser=parser, build_ignore_patterns=build_ignore_patterns, exclude_target_regexps=exclude_target_regexps, subproject_roots=subproject_roots) # Load the native backend. native = native or Native.create() # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = (create_legacy_graph_tasks(symbol_table) + create_fs_rules() + create_graph_rules(address_mapper, symbol_table) + create_process_rules()) scheduler = LocalScheduler( workdir, dict(), tasks, project_tree, native, include_trace_on_error=include_trace_on_error) change_calculator = EngineChangeCalculator(scheduler, symbol_table, scm) if scm else None return LegacyGraphHelper(scheduler, symbol_table, change_calculator)