def setup(options=None): if not options: options, _ = OptionsInitializer(OptionsBootstrapper()).setup() build_root = get_buildroot() cmd_line_spec_parser = CmdLineSpecParser(build_root) spec_roots = [cmd_line_spec_parser.parse_spec(spec) for spec in options.target_specs] storage = Storage.create(debug=False) # Ignore any dotfile below build_root except . itself project_tree = FileSystemProjectTree(build_root, ['.*', 'build-support/*.venv/']) symbol_table_cls = LegacyTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser) # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The ExpGraph # will explicitly request the products it needs. tasks = ( create_legacy_graph_tasks() + create_fs_tasks() + create_graph_tasks(address_mapper, symbol_table_cls) ) return ( LocalScheduler(dict(), tasks, storage, project_tree), storage, options, spec_roots, symbol_table_cls )
def setup(): build_root = get_buildroot() cmd_line_spec_parser = CmdLineSpecParser(build_root) spec_roots = [cmd_line_spec_parser.parse_spec(spec) for spec in sys.argv[1:]] storage = Storage.create(debug=False) project_tree = FileSystemProjectTree(build_root) symbol_table_cls = LegacyTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser) # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The ExpGraph # will explicitly request the products it needs. tasks = ( create_legacy_graph_tasks() + create_fs_tasks() + create_graph_tasks(address_mapper, symbol_table_cls) ) return ( LocalScheduler(dict(), tasks, symbol_table_cls, project_tree), storage, spec_roots, symbol_table_cls )
def _process_initializer(node_builder, storage): """Another picklable top-level function that provides multi-processes' initial states. States are returned as a tuple. States are `Closable` so they can be cleaned up once processes are done. """ return (node_builder, Storage.clone(storage))
def setUp(self): self.storage = Storage.create(in_memory=True) self.result = StepResult(state='something') self.request = StepRequest(step_id=123, node='some node', dependencies={'some dep': 'some state', 'another dep': 'another state'}, project_tree='some project tree')
def setup(options=None): if not options: options, _ = OptionsInitializer(OptionsBootstrapper()).setup() build_root = get_buildroot() cmd_line_spec_parser = CmdLineSpecParser(build_root) spec_roots = [cmd_line_spec_parser.parse_spec(spec) for spec in options.target_specs] storage = Storage.create(debug=False) project_tree = FileSystemProjectTree(build_root) symbol_table_cls = LegacyTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser) # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The ExpGraph # will explicitly request the products it needs. tasks = ( create_legacy_graph_tasks() + create_fs_tasks() + create_graph_tasks(address_mapper, symbol_table_cls) ) return ( LocalScheduler(dict(), tasks, storage, project_tree), storage, options, spec_roots, symbol_table_cls )
def setUp(self): """Setup cache as well as request and result.""" self.storage = Storage.create(in_memory=True) self.cache = Cache.create(storage=self.storage) request = StepRequest(step_id=123, node='some node', dependencies={'some dep': 'some state', 'another dep': 'another state'}, project_tree='some project tree') self.result = StepResult(state='something') self.keyed_request = self.storage.key_for_request(request)
def setUp(self): self.storage = Storage.create(in_memory=True) self.result = StepResult(state='something') self.request = StepRequest(step_id=123, node='some node', dependencies={ 'some dep': 'some state', 'another dep': 'another state' }, project_tree='some project tree')
def setUp(self): """Setup cache as well as request and result.""" self.storage = Storage.create(in_memory=True) self.cache = Cache.create(storage=self.storage) request = StepRequest(step_id=123, node='some node', dependencies={ 'some dep': 'some state', 'another dep': 'another state' }, project_tree='some project tree') self.result = StepResult(state='something') self.keyed_request = self.storage.key_for_request(request)
def __init__(self, scheduler, storage=None, cache=None): """ :param scheduler: The local scheduler for creating execution graphs. :type scheduler: :class:`pants.engine.exp.scheduler.LocalScheduler` :param storage: The storage instance for serializables keyed by their hashes. :type storage: :class:`pants.engine.exp.storage.Storage` :param cache: The cache instance for storing execution results, by default it uses the same Storage instance if not specified. :type cache: :class:`pants.engine.exp.storage.Cache` """ self._scheduler = scheduler self._storage = storage or Storage.create() self._cache = cache or Cache.create(storage)
def mk_scheduler(self, tasks=None, goals=None, storage=None, project_tree=None, symbol_table_cls=EmptyTable): """Creates a Scheduler with "native" tasks already included, and the given additional tasks.""" goals = goals or dict() tasks = tasks or [] storage = storage or Storage.create(in_memory=True) project_tree = project_tree or self.mk_fs_tree() tasks = list(tasks) + create_fs_tasks() scheduler = LocalScheduler(goals, tasks, storage, project_tree) return scheduler, storage
def test_storage(self): with closing(Storage.create(in_memory=True)) as storage: key = storage.put(self.TEST_PATH) self.assertEquals(self.TEST_PATH, storage.get(key)) # The deserialized blob is equal by not the same as the input data. self.assertFalse(storage.get(key) is self.TEST_PATH) # Any other keys won't exist in the subjects. self.assertNotEqual(self.TEST_KEY, key) with self.assertRaises(InvalidKeyError): self.assertFalse(storage.get(self.TEST_KEY)) # Verify key and value's types must match. key._type = str with self.assertRaises(ValueError): storage.get(key)
def setUp(self): # Set up a scheduler that supports address mapping. symbol_table_cls = TargetTable self.storage = Storage.create(in_memory=True) address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=JsonParser, build_pattern=r'.+\.BUILD.json$') tasks = create_graph_tasks(address_mapper, symbol_table_cls) build_root_src = os.path.join(os.path.dirname(__file__), 'examples/mapper_test') self.scheduler, _, self.build_root = self.mk_scheduler(tasks=tasks, build_root_src=build_root_src, storage=self.storage, symbol_table_cls=symbol_table_cls) self.a_b = Address.parse('a/b') self.a_b_target = Target(name='b', dependencies=['//d:e'], configurations=['//a', Struct(embedded='yes')], type_alias='target')
def mk_scheduler(self, tasks=None, goals=None, storage=None, build_root_src=None, symbol_table_cls=EmptyTable): """Creates a Scheduler with "native" tasks already included, and the given additional tasks.""" goals = goals or dict() tasks = tasks or [] storage = storage or Storage.create(in_memory=True) work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, work_dir) build_root = os.path.join(work_dir, 'build_root') if build_root_src is not None: shutil.copytree(build_root_src, build_root) else: os.mkdir(build_root) tasks = list(tasks) + create_fs_tasks() project_tree = FileSystemProjectTree(build_root) scheduler = LocalScheduler(goals, tasks, storage, project_tree) return scheduler, storage, build_root
def mk_scheduler(self, tasks=None, goals=None, storage=None, build_root_src=None, symbol_table_cls=EmptyTable): """Creates a Scheduler with "native" tasks already included, and the given additional tasks.""" goals = goals or dict() tasks = tasks or [] storage = storage or Storage.create(in_memory=True) work_dir = safe_mkdtemp() self.addCleanup(safe_rmtree, work_dir) build_root = os.path.join(work_dir, 'build_root') if build_root_src is not None: shutil.copytree(build_root_src, build_root) else: os.mkdir(build_root) tasks = list(tasks) + create_fs_tasks() project_tree = FileSystemProjectTree(build_root) scheduler = LocalScheduler(goals, tasks, symbol_table_cls, project_tree) return scheduler, storage, build_root
def setUp(self): super(GraphTestBase, self).setUp() self.storage = Storage.create(in_memory=True)
def setup_json_scheduler(build_root, debug=True): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype A tuple of :class:`pants.engine.exp.scheduler.LocalScheduler`, :class:`pants.engine.exp.storage.Storage`. """ storage = Storage.create(debug=debug) symbol_table_cls = ExampleTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, build_pattern=r'^BLD.json$', parser_cls=JsonParser) source_roots = SourceRoots(('src/java','src/scala')) scrooge_tool_address = Address.parse('src/scala/scrooge') goals = { 'compile': Classpath, # TODO: to allow for running resolve alone, should split out a distinct 'IvyReport' product. 'resolve': Classpath, 'list': Address, GenGoal.name(): GenGoal, 'unpickleable': UnpickleableResult, 'ls': Path, 'cat': FileContent, } tasks = [ # Codegen GenGoal.signature(), (JavaSources, [Select(ThriftSources), SelectVariant(ApacheThriftJavaConfiguration, 'thrift')], gen_apache_thrift), (PythonSources, [Select(ThriftSources), SelectVariant(ApacheThriftPythonConfiguration, 'thrift')], gen_apache_thrift), (ScalaSources, [Select(ThriftSources), SelectVariant(ScroogeScalaConfiguration, 'thrift'), SelectLiteral(scrooge_tool_address, Classpath)], gen_scrooge_thrift), (JavaSources, [Select(ThriftSources), SelectVariant(ScroogeJavaConfiguration, 'thrift'), SelectLiteral(scrooge_tool_address, Classpath)], gen_scrooge_thrift), ] + [ # scala dependency inference (ScalaSources, [Select(ScalaInferredDepsSources), SelectDependencies(Address, ImportedJVMPackages)], reify_scala_sources), (ImportedJVMPackages, [SelectProjection(FilesContent, PathGlobs, ('path_globs',), ScalaInferredDepsSources)], extract_scala_imports), (Address, [Select(JVMPackageName), SelectDependencies(AddressFamily, Paths)], select_package_address), (PathGlobs, [Select(JVMPackageName), SelectLiteral(source_roots, SourceRoots)], calculate_package_search_path), ] + [ # Remote dependency resolution (Classpath, [Select(Jar)], ivy_resolve), (Jar, [Select(ManagedJar), SelectVariant(ManagedResolve, 'resolve')], select_rev), ] + [ # Compilers (Classpath, [Select(ResourceSources)], isolate_resources), (Classpath, [Select(BuildPropertiesConfiguration)], write_name_file), (Classpath, [Select(JavaSources), SelectDependencies(Classpath, JavaSources)], javac), (Classpath, [Select(ScalaSources), SelectDependencies(Classpath, ScalaSources)], scalac), ] + [ # TODO (UnpickleableOutput, [], unpickleable_output), (UnpickleableResult, [Select(UnpickleableOutput)], unpickleable_input), ] + ( create_graph_tasks(address_mapper, symbol_table_cls) ) + ( create_fs_tasks() ) project_tree = FileSystemProjectTree(build_root) return LocalScheduler(goals, tasks, symbol_table_cls, project_tree), storage
def setup_json_scheduler(build_root, debug=True): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype A tuple of :class:`pants.engine.exp.scheduler.LocalScheduler`, :class:`pants.engine.exp.storage.Storage`. """ storage = Storage.create(debug=debug) symbol_table_cls = ExampleTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, build_pattern=r'^BLD.json$', parser_cls=JsonParser) source_roots = SourceRoots(('src/java','src/scala')) scrooge_tool_address = Address.parse('src/scala/scrooge') goals = { 'compile': Classpath, # TODO: to allow for running resolve alone, should split out a distinct 'IvyReport' product. 'resolve': Classpath, 'list': Address, GenGoal.name(): GenGoal, 'unpickleable': UnpickleableResult, 'ls': File, 'cat': FileContent, } tasks = [ # Codegen GenGoal.signature(), (JavaSources, [Select(ThriftSources), SelectVariant(ApacheThriftJavaConfiguration, 'thrift')], gen_apache_thrift), (PythonSources, [Select(ThriftSources), SelectVariant(ApacheThriftPythonConfiguration, 'thrift')], gen_apache_thrift), (ScalaSources, [Select(ThriftSources), SelectVariant(ScroogeScalaConfiguration, 'thrift'), SelectLiteral(scrooge_tool_address, Classpath)], gen_scrooge_thrift), (JavaSources, [Select(ThriftSources), SelectVariant(ScroogeJavaConfiguration, 'thrift'), SelectLiteral(scrooge_tool_address, Classpath)], gen_scrooge_thrift), ] + [ # scala dependency inference (ScalaSources, [Select(ScalaInferredDepsSources), SelectDependencies(Address, ImportedJVMPackages)], reify_scala_sources), (ImportedJVMPackages, [SelectProjection(FilesContent, PathGlobs, ('path_globs',), ScalaInferredDepsSources)], extract_scala_imports), (Address, [Select(JVMPackageName), SelectDependencies(AddressFamily, Dirs)], select_package_address), (PathGlobs, [Select(JVMPackageName), SelectLiteral(source_roots, SourceRoots)], calculate_package_search_path), ] + [ # Remote dependency resolution (Classpath, [Select(Jar)], ivy_resolve), (Jar, [Select(ManagedJar), SelectVariant(ManagedResolve, 'resolve')], select_rev), ] + [ # Compilers (Classpath, [Select(ResourceSources)], isolate_resources), (Classpath, [Select(BuildPropertiesConfiguration)], write_name_file), (Classpath, [Select(JavaSources), SelectDependencies(Classpath, JavaSources)], javac), (Classpath, [Select(ScalaSources), SelectDependencies(Classpath, ScalaSources)], scalac), ] + [ # TODO (UnpickleableOutput, [], unpickleable_output), (UnpickleableResult, [Select(UnpickleableOutput)], unpickleable_input), ] + ( create_graph_tasks(address_mapper, symbol_table_cls) ) + ( create_fs_tasks() ) project_tree = FileSystemProjectTree(build_root) return LocalScheduler(goals, tasks, storage, project_tree, None, GraphValidator(symbol_table_cls)), storage
def setUp(self): build_root = os.path.join(os.path.dirname(__file__), 'examples', 'scheduler_inputs') self.scheduler, self.storage = setup_json_scheduler(build_root, debug=True) self.cache = Cache.create(Storage.create()) self.java = Address.parse('src/java/codegen/simple')