def setup_json_scheduler(build_root): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype tuple of (:class:`pants.engine.exp.graph.Graph`, :class:`pants.engine.exp.scheduler.LocalScheduler`) """ symbol_table = { 'apache_thrift_configuration': ApacheThriftConfiguration, 'jar': Jar, 'requirement': Requirement, 'scrooge_configuration': ScroogeConfiguration, 'sources': AddressableSources, 'target': Target, 'build_properties': BuildPropertiesConfiguration } json_parser = functools.partial(parse_json, symbol_table=symbol_table) graph = Graph( AddressMapper(build_root=build_root, build_pattern=r'^BLD.json$', parser=json_parser)) planners = Planners([ ApacheThriftPlanner(), BuildPropertiesPlanner(), GlobalIvyResolvePlanner(), JavacPlanner(), ScalacPlanner(), ScroogePlanner(), UnpickleableInputsPlanner(), UnpickleableResultPlanner() ]) scheduler = LocalScheduler(graph, planners) return graph, scheduler
def create(self, build_pattern=None, parser_cls=None, inline=False): mapper = AddressMapper(build_root=os.path.dirname(__file__), symbol_table_cls=TestTable, build_pattern=build_pattern, parser_cls=parser_cls) return LocalScheduler({self._goal: [self._product]}, create_graph_tasks(mapper))
def setup(options=None): if not options: options, _ = OptionsInitializer(OptionsBootstrapper()).setup() build_root = get_buildroot() cmd_line_spec_parser = CmdLineSpecParser(build_root) spec_roots = [cmd_line_spec_parser.parse_spec(spec) for spec in options.target_specs] storage = Storage.create(debug=False) project_tree = FileSystemProjectTree(build_root) symbol_table_cls = LegacyTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser) # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The ExpGraph # will explicitly request the products it needs. tasks = ( create_legacy_graph_tasks() + create_fs_tasks() + create_graph_tasks(address_mapper, symbol_table_cls) ) return ( LocalScheduler(dict(), tasks, storage, project_tree), storage, options, spec_roots, symbol_table_cls )
def setUp(self): self.work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, self.work_dir) self.build_root = os.path.join(self.work_dir, 'build_root') shutil.copytree(os.path.join(os.path.dirname(__file__), 'examples/mapper_test'), self.build_root) subjects = Subjects() self._goal = 'list' symbol_table_cls = TargetTable project_tree_key = subjects.put( FileSystemProjectTree(self.build_root)) address_mapper_key = subjects.put( AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=JsonParser, build_pattern=r'.+\.BUILD.json$')) tasks = ( create_fs_tasks(project_tree_key) + create_graph_tasks(address_mapper_key, symbol_table_cls) ) self.scheduler = LocalScheduler({self._goal: UnhydratedStruct}, tasks, subjects, symbol_table_cls) self.a_b = Address.parse('a/b') self.a_b_target = Target(name='b', dependencies=['//d:e'], configurations=['//a', Struct(embedded='yes')], type_alias='target')
def create(self, build_pattern=None, parser_cls=None, inline=False): symbol_table_cls = TestTable mapper = AddressMapper(symbol_table_cls=symbol_table_cls, build_pattern=build_pattern, parser_cls=parser_cls) tasks = (create_fs_tasks(self._build_root) + create_graph_tasks(mapper, symbol_table_cls)) return LocalScheduler({self._goal: self._product}, symbol_table_cls, tasks)
def mk_scheduler(self, tasks=None, goals=None, storage=None, project_tree=None, symbol_table_cls=EmptyTable): """Creates a Scheduler with "native" tasks already included, and the given additional tasks.""" goals = goals or dict() tasks = tasks or [] storage = storage or Storage.create(in_memory=True) project_tree = project_tree or self.mk_fs_tree() tasks = list(tasks) + create_fs_tasks() scheduler = LocalScheduler(goals, tasks, storage, project_tree) return scheduler, storage
def setUp(self): self.work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, self.work_dir) self.build_root = os.path.join(self.work_dir, 'build_root') shutil.copytree( os.path.join(os.path.dirname(__file__), 'examples/mapper_test'), self.build_root) self._goal = 'list' symbol_table_cls = TargetTable self.address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=JsonParser, build_pattern=r'.+\.BUILD.json$') tasks = (create_fs_tasks(self.build_root) + create_graph_tasks(self.address_mapper, symbol_table_cls)) self.scheduler = LocalScheduler({self._goal: UnhydratedStruct}, symbol_table_cls, tasks) self.a_b = Address.parse('a/b') self.a_b_target = Target( name='b', dependencies=['//d:e'], configurations=['//a', Struct(embedded='yes')])
def mk_scheduler(self, tasks=None, goals=None, storage=None, build_root_src=None, symbol_table_cls=EmptyTable): """Creates a Scheduler with "native" tasks already included, and the given additional tasks.""" goals = goals or dict() tasks = tasks or [] storage = storage or Storage.create(in_memory=True) work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, work_dir) build_root = os.path.join(work_dir, 'build_root') if build_root_src is not None: shutil.copytree(build_root_src, build_root) else: os.mkdir(build_root) tasks = list(tasks) + create_fs_tasks() project_tree = FileSystemProjectTree(build_root) scheduler = LocalScheduler(goals, tasks, storage, project_tree) return scheduler, storage, build_root
def setup_json_scheduler(build_root, debug=True): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype A tuple of :class:`pants.engine.exp.scheduler.LocalScheduler`, :class:`pants.engine.exp.storage.Storage`. """ storage = Storage.create(debug=debug) symbol_table_cls = ExampleTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, build_pattern=r'^BLD.json$', parser_cls=JsonParser) source_roots = SourceRoots(('src/java','src/scala')) scrooge_tool_address = Address.parse('src/scala/scrooge') goals = { 'compile': Classpath, # TODO: to allow for running resolve alone, should split out a distinct 'IvyReport' product. 'resolve': Classpath, 'list': Address, GenGoal.name(): GenGoal, 'unpickleable': UnpickleableResult, 'ls': File, 'cat': FileContent, } tasks = [ # Codegen GenGoal.signature(), (JavaSources, [Select(ThriftSources), SelectVariant(ApacheThriftJavaConfiguration, 'thrift')], gen_apache_thrift), (PythonSources, [Select(ThriftSources), SelectVariant(ApacheThriftPythonConfiguration, 'thrift')], gen_apache_thrift), (ScalaSources, [Select(ThriftSources), SelectVariant(ScroogeScalaConfiguration, 'thrift'), SelectLiteral(scrooge_tool_address, Classpath)], gen_scrooge_thrift), (JavaSources, [Select(ThriftSources), SelectVariant(ScroogeJavaConfiguration, 'thrift'), SelectLiteral(scrooge_tool_address, Classpath)], gen_scrooge_thrift), ] + [ # scala dependency inference (ScalaSources, [Select(ScalaInferredDepsSources), SelectDependencies(Address, ImportedJVMPackages)], reify_scala_sources), (ImportedJVMPackages, [SelectProjection(FilesContent, PathGlobs, ('path_globs',), ScalaInferredDepsSources)], extract_scala_imports), (Address, [Select(JVMPackageName), SelectDependencies(AddressFamily, Dirs)], select_package_address), (PathGlobs, [Select(JVMPackageName), SelectLiteral(source_roots, SourceRoots)], calculate_package_search_path), ] + [ # Remote dependency resolution (Classpath, [Select(Jar)], ivy_resolve), (Jar, [Select(ManagedJar), SelectVariant(ManagedResolve, 'resolve')], select_rev), ] + [ # Compilers (Classpath, [Select(ResourceSources)], isolate_resources), (Classpath, [Select(BuildPropertiesConfiguration)], write_name_file), (Classpath, [Select(JavaSources), SelectDependencies(Classpath, JavaSources)], javac), (Classpath, [Select(ScalaSources), SelectDependencies(Classpath, ScalaSources)], scalac), ] + [ # TODO (UnpickleableOutput, [], unpickleable_output), (UnpickleableResult, [Select(UnpickleableOutput)], unpickleable_input), ] + ( create_graph_tasks(address_mapper, symbol_table_cls) ) + ( create_fs_tasks() ) project_tree = FileSystemProjectTree(build_root) return LocalScheduler(goals, tasks, storage, project_tree, None, GraphValidator(symbol_table_cls)), storage
def setup_json_scheduler(build_root): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype tuple of (:class:`pants.engine.exp.graph.Graph`, :class:`pants.engine.exp.scheduler.LocalScheduler`) """ symbol_table = { 'apache_thrift_java_configuration': ApacheThriftJavaConfiguration, 'apache_thrift_python_configuration': ApacheThriftPythonConfiguration, 'jar': Jar, 'managed_jar': ManagedJar, 'managed_resolve': ManagedResolve, 'requirement': Requirement, 'scrooge_java_configuration': ScroogeJavaConfiguration, 'scrooge_scala_configuration': ScroogeScalaConfiguration, 'java': JavaSources, 'python': PythonSources, 'resources': ResourceSources, 'scala': ScalaSources, 'thrift': ThriftSources, 'target': Target, 'build_properties': BuildPropertiesConfiguration } json_parser = functools.partial(parse_json, symbol_table=symbol_table) graph = Graph( AddressMapper(build_root=build_root, build_pattern=r'^BLD.json$', parser=json_parser)) # TODO(John Sirois): once the options system is plumbed, make the tool spec configurable. # It could also just be pointed at the scrooge jar at that point. scrooge_tool_address = Address.parse('src/scala/scrooge') products_by_goal = { 'compile': [Classpath], # TODO: to allow for running resolve alone, should split out a distinct 'IvyReport' product. 'resolve': [Classpath], 'gen': [JavaSources, PythonSources, ResourceSources, ScalaSources], 'unpickleable': [UnpickleableResult], } tasks = [ (JavaSources, [ Select(ThriftSources), SelectVariant('thrift', ApacheThriftJavaConfiguration) ], gen_apache_thrift), (PythonSources, [ Select(ThriftSources), SelectVariant('thrift', ApacheThriftPythonConfiguration) ], gen_apache_thrift), (ScalaSources, [ Select(ThriftSources), SelectVariant('thrift', ScroogeScalaConfiguration), SelectAddress(scrooge_tool_address, Classpath) ], gen_scrooge_thrift), (JavaSources, [ Select(ThriftSources), SelectVariant('thrift', ScroogeJavaConfiguration), SelectAddress(scrooge_tool_address, Classpath) ], gen_scrooge_thrift), (Classpath, [Select(Jar)], ivy_resolve), (Jar, [Select(ManagedJar), SelectVariant('resolve', ManagedResolve)], select_rev), (Classpath, [Select(ResourceSources)], isolate_resources), (Classpath, [Select(BuildPropertiesConfiguration)], write_name_file), (Classpath, [Select(JavaSources), SelectDependencies(Classpath, JavaSources)], javac), (Classpath, [Select(ScalaSources), SelectDependencies(Classpath, ScalaSources)], scalac), (UnpickleableOutput, [], unpickleable_output), (UnpickleableResult, [Select(UnpickleableOutput)], unpickleable_input), ] scheduler = LocalScheduler(graph, products_by_goal, tasks) return graph, scheduler
def setup_json_scheduler(build_root): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype :class:`pants.engine.exp.scheduler.LocalScheduler` """ address_mapper = AddressMapper(build_root=build_root, symbol_table_cls=ExampleTable, build_pattern=r'^BLD.json$', parser_cls=JsonParser) # TODO(John Sirois): once the options system is plumbed, make the tool spec configurable. # It could also just be pointed at the scrooge jar at that point. scrooge_tool_address = Address.parse('src/scala/scrooge') # TODO: Placeholder for the SourceRoot subsystem. source_roots = SourceRoots(build_root, ('src/java', )) products_by_goal = { 'compile': [Classpath], # TODO: to allow for running resolve alone, should split out a distinct 'IvyReport' product. 'resolve': [Classpath], 'gen': [JavaSources, PythonSources, ResourceSources, ScalaSources], 'unpickleable': [UnpickleableResult], } tasks = [ # Codegen (JavaSources, [ Select(ThriftSources), SelectVariant(ApacheThriftJavaConfiguration, 'thrift') ], gen_apache_thrift), (PythonSources, [ Select(ThriftSources), SelectVariant(ApacheThriftPythonConfiguration, 'thrift') ], gen_apache_thrift), (ScalaSources, [ Select(ThriftSources), SelectVariant(ScroogeScalaConfiguration, 'thrift'), SelectLiteral(scrooge_tool_address, Classpath) ], gen_scrooge_thrift), (JavaSources, [ Select(ThriftSources), SelectVariant(ScroogeJavaConfiguration, 'thrift'), SelectLiteral(scrooge_tool_address, Classpath) ], gen_scrooge_thrift), ] + [ # scala dependency inference (ScalaSources, [ Select(Address), Select(ScalaInferredDepsSources), SelectDependencies(Address, ImportedJVMPackages) ], reify_scala_sources), (ImportedJVMPackages, [ Select(Address), Select(ScalaInferredDepsSources), SelectLiteral(source_roots, SourceRoots) ], extract_scala_imports), # TODO: The request for an AddressFamily for each member of a SearchPath will fail whenever # a member of the path doesn't exist. Need to allow for optional products and to then # request the AddressFamilies optionally here. (Address, [ Select(JVMPackageName), SelectDependencies(AddressFamily, SearchPath) ], select_package_address), (SearchPath, [ Select(JVMPackageName), SelectLiteral(source_roots, SourceRoots) ], calculate_package_search_path), ] + [ # Remote dependency resolution (Classpath, [Select(Jar)], ivy_resolve), (Jar, [Select(ManagedJar), SelectVariant(ManagedResolve, 'resolve')], select_rev), ] + [ # Compilers (Classpath, [Select(ResourceSources)], isolate_resources), (Classpath, [Select(BuildPropertiesConfiguration)], write_name_file), (Classpath, [Select(JavaSources), SelectDependencies(Classpath, JavaSources)], javac), (Classpath, [Select(ScalaSources), SelectDependencies(Classpath, ScalaSources)], scalac), ] + [ # TODO (UnpickleableOutput, [], unpickleable_output), (UnpickleableResult, [Select(UnpickleableOutput)], unpickleable_input), ] + (create_graph_tasks(address_mapper)) scheduler = LocalScheduler(products_by_goal, tasks) return scheduler
class AddressMapperTest(unittest.TestCase): def setUp(self): self.work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, self.work_dir) self.build_root = os.path.join(self.work_dir, 'build_root') shutil.copytree(os.path.join(os.path.dirname(__file__), 'examples/mapper_test'), self.build_root) subjects = Subjects() self._goal = 'list' symbol_table_cls = TargetTable project_tree_key = subjects.put( FileSystemProjectTree(self.build_root)) address_mapper_key = subjects.put( AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=JsonParser, build_pattern=r'.+\.BUILD.json$')) tasks = ( create_fs_tasks(project_tree_key) + create_graph_tasks(address_mapper_key, symbol_table_cls) ) self.scheduler = LocalScheduler({self._goal: UnhydratedStruct}, tasks, subjects, symbol_table_cls) self.a_b = Address.parse('a/b') self.a_b_target = Target(name='b', dependencies=['//d:e'], configurations=['//a', Struct(embedded='yes')], type_alias='target') def resolve(self, spec): request = self.scheduler.build_request(goals=[self._goal], subjects=[spec]) result = LocalSerialEngine(self.scheduler).execute(request) if result.error: raise result.error # Expect a single root. state, = result.root_products.values() if type(state) is Throw: raise state.exc return state.value def resolve_multi(self, spec): return {uhs.address: uhs.struct for uhs in self.resolve(spec)} def test_no_address_no_family(self): spec = SingleAddress('a/c', None) # Should fail: does not exist. with self.assertRaises(ResolveError): self.resolve(spec) # Exists on disk, but not yet in memory. # NB: Graph invalidation not yet implemented. build_file = os.path.join(self.build_root, 'a/c/c.BUILD.json') with safe_open(build_file, 'w') as fp: fp.write('{"type_alias": "struct", "name": "c"}') with self.assertRaises(ResolveError): self.resolve(spec) # Success. self.scheduler.product_graph.clear() resolved = self.resolve(spec) self.assertEqual(1, len(resolved)) self.assertEqual(Struct(name='c', type_alias='struct'), resolved[0].struct) def test_resolve(self): resolved = self.resolve(SingleAddress('a/b', None)) self.assertEqual(1, len(resolved)) self.assertEqual(self.a_b, resolved[0].address) @staticmethod def addr(spec): return Address.parse(spec) def test_walk_addressables(self): self.assertEqual({self.addr('//:root'): Struct(name='root', type_alias='struct'), self.addr('a/b:b'): self.a_b_target, self.addr('a/d:d'): Target(name='d', type_alias='target'), self.addr('a/d/e:e'): Target(name='e', type_alias='target'), self.addr('a/d/e:e-prime'): Struct(name='e-prime', type_alias='struct')}, self.resolve_multi(DescendantAddresses(''))) def test_walk_addressables_rel_path(self): self.assertEqual({self.addr('a/d:d'): Target(name='d', type_alias='target'), self.addr('a/d/e:e'): Target(name='e', type_alias='target'), self.addr('a/d/e:e-prime'): Struct(name='e-prime', type_alias='struct')}, self.resolve_multi(DescendantAddresses('a/d'))) @pytest.mark.xfail(reason='''Excludes are not implemented: expects excludes=['a/b', 'a/d/e'])''') def test_walk_addressables_path_excludes(self): self.assertEqual({self.addr('//:root'): Struct(name='root'), self.addr('a/d:d'): Target(name='d')}, self.resolve_multi(DescendantAddresses('')))