Example #1
0
    def test_full_graph_for_planner_example(self):
        symbol_table = TargetTable()
        address_mapper = AddressMapper(JsonParser(symbol_table),
                                       '*.BUILD.json')
        rules = create_graph_rules(address_mapper) + create_fs_rules()

        fullgraph_str = self.create_full_graph(rules)

        print('---diagnostic------')
        print(fullgraph_str)
        print('/---diagnostic------')

        in_root_rules = False
        in_all_rules = False
        all_rules = []
        root_rule_lines = []
        for line in fullgraph_str.splitlines():
            if line.startswith('  // root subject types:'):
                pass
            elif line.startswith('  // root entries'):
                in_root_rules = True
            elif line.startswith('  // internal entries'):
                in_all_rules = True
            elif in_all_rules:
                all_rules.append(line)
            elif in_root_rules:
                root_rule_lines.append(line)
            else:
                pass

        self.assertTrue(6 < len(all_rules))
        self.assertTrue(12 < len(root_rule_lines))  # 2 lines per entry
Example #2
0
 def assert_digest(self, filespecs_or_globs, expected_files):
   with self.mk_project_tree() as project_tree:
     scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree)
     result = self.execute(scheduler, Snapshot, self.specs(filespecs_or_globs))[0]
     # Confirm all expected files were digested.
     self.assertEqual(set(expected_files), set(result.files))
     self.assertTrue(result.directory_digest.fingerprint is not None)
Example #3
0
 def assert_content(self, filespecs_or_globs, expected_content):
     with self.mk_project_tree() as project_tree:
         scheduler = self.mk_scheduler(rules=create_fs_rules(),
                                       project_tree=project_tree)
         actual_content = self.read_file_content(scheduler,
                                                 filespecs_or_globs)
         self.assertEqual(expected_content, actual_content)
Example #4
0
def setup_json_scheduler(build_root, native):
    """Return a build graph and scheduler configured for BLD.json files under the given build root.

  :rtype :class:`pants.engine.scheduler.LocalScheduler`
  """

    symbol_table_cls = ExampleTable

    # Register "literal" subjects required for these tasks.
    # TODO: Replace with `Subsystems`.
    address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls,
                                   build_patterns=('BLD.json', ),
                                   parser_cls=JsonParser)

    work_dir = os_path_join(build_root, '.pants.d')
    project_tree = FileSystemProjectTree(build_root)

    goals = {
        'compile': Classpath,
        # TODO: to allow for running resolve alone, should split out a distinct 'IvyReport' product.
        'resolve': Classpath,
        'list': BuildFileAddresses,
        GenGoal.name(): GenGoal,
        'ls': Snapshot,
        'cat': FilesContent,
    }
    tasks = [
        # Codegen
        GenGoal.rule(),
        gen_apache_java_thrift,
        gen_apache_python_thrift,
        gen_scrooge_scala_thrift,
        gen_scrooge_java_thrift,
        SingletonRule(Scrooge, Scrooge(Address.parse('src/scala/scrooge')))
    ] + [
        # scala dependency inference
        reify_scala_sources,
        extract_scala_imports,
        select_package_address,
        calculate_package_search_path,
        SingletonRule(SourceRoots, SourceRoots(('src/java', 'src/scala'))),
    ] + [
        # Remote dependency resolution
        ivy_resolve,
        select_rev,
    ] + [
        # Compilers
        isolate_resources,
        write_name_file,
        javac,
        scalac,
    ] + (create_graph_rules(address_mapper,
                            symbol_table_cls)) + (create_fs_rules())

    return LocalScheduler(work_dir,
                          goals,
                          tasks,
                          project_tree,
                          native,
                          graph_lock=None)
Example #5
0
    def assert_mutated_directory_digest(
            self, mutation_function: Callable[[FileSystemProjectTree, str],
                                              Exception]):
        with self.mk_project_tree() as project_tree:
            scheduler = self.mk_scheduler(
                rules=create_fs_rules(),
                project_tree=project_tree,
            )
            dir_path = "a/"
            dir_glob = dir_path + "*"
            initial_snapshot = self.execute_expecting_one_result(
                scheduler, Snapshot, self.path_globs([dir_glob])).value
            assert not initial_snapshot.is_empty
            assertion_error = mutation_function(project_tree, dir_path)

            def assertion_fn():
                new_snapshot = self.execute_expecting_one_result(
                    scheduler, Snapshot, self.path_globs([dir_glob])).value
                assert not new_snapshot.is_empty
                if initial_snapshot.directory_digest != new_snapshot.directory_digest:
                    # successfully invalidated snapshot and got a new digest
                    return True
                return False

            if not self.try_with_backoff(assertion_fn):
                raise assertion_error
Example #6
0
    def test_file_content_invalidated_after_parent_deletion(self) -> None:
        """Test that FileContent is invalidated after deleting parent directory."""

        with self.mk_project_tree() as project_tree:
            scheduler = self.mk_scheduler(
                rules=create_fs_rules(),
                project_tree=project_tree,
            )
            fname = "a/b/1.txt"
            # read the original file so we have nodes to invalidate.
            original_content = self.read_file_content(scheduler, [fname])
            self.assertIn(fname, original_content)
            path_to_parent_dir = os.path.join(project_tree.build_root, "a/b/")
            shutil.rmtree(path_to_parent_dir)

            def assertion_fn():
                new_content = self.read_file_content(scheduler, [fname])
                if new_content.get(fname) is None:
                    return True
                return False

            if not self.try_with_backoff(assertion_fn):
                raise AssertionError(
                    f"Deleting parent dir and could still read file from original snapshot."
                )
Example #7
0
    def test_file_content_invalidated(self) -> None:
        """Test that we can update files and have the native engine invalidate previous operations
        on those files."""

        with self.mk_project_tree() as project_tree:
            scheduler = self.mk_scheduler(
                rules=create_fs_rules(),
                project_tree=project_tree,
            )
            fname = "4.txt"
            new_data = "rouf"
            # read the original file so we have a cached value.
            self.read_file_content(scheduler, [fname])
            path_to_fname = os.path.join(project_tree.build_root, fname)
            with open(path_to_fname, "w") as f:
                f.write(new_data)

            def assertion_fn():
                new_content = self.read_file_content(scheduler, [fname])
                if new_content[fname].decode("utf-8") == new_data:
                    # successfully read new data
                    return True
                return False

            if not self.try_with_backoff(assertion_fn):
                raise AssertionError(
                    f"New content {new_data} was not found in the FilesContent of the "
                    "modified file {path_to_fname}, instead we found {new_content[fname]}"
                )
Example #8
0
  def setUp(self):
    # Set up a scheduler that supports address mapping.
    symbol_table = TargetTable()
    address_mapper = AddressMapper(parser=JsonParser(symbol_table),
                                   build_patterns=('*.BUILD.json',))
    rules = create_fs_rules() + create_graph_rules(address_mapper, symbol_table)
    # TODO handle updating the rule graph when passed unexpected root selectors.
    # Adding the following task allows us to get around the fact that SelectDependencies
    # requests are not currently supported.
    rules.append(TaskRule(UnhydratedStructs,
                          [SelectDependencies(UnhydratedStruct,
                                              BuildFileAddresses,
                                              field_types=(Address,),
                                              field='addresses')],
                          UnhydratedStructs))

    project_tree = self.mk_fs_tree(os.path.join(os.path.dirname(__file__), 'examples/mapper_test'))
    self.build_root = project_tree.build_root
    self.scheduler = self.mk_scheduler(rules=rules, project_tree=project_tree)

    self.a_b = Address.parse('a/b')
    self.a_b_target = Target(name='b',
                             dependencies=['//d:e'],
                             configurations=['//a', Struct(embedded='yes')],
                             type_alias='target')
Example #9
0
 def test_snapshot_from_outside_buildroot_failure(self):
   with temporary_dir() as temp_dir:
     scheduler = self.mk_scheduler(rules=create_fs_rules())
     globs = PathGlobs(("*",), ())
     with self.assertRaises(Exception) as cm:
       scheduler.capture_snapshots((PathGlobsAndRoot(globs, text_type(os.path.join(temp_dir, "doesnotexist"))),))
     self.assertIn("doesnotexist", str(cm.exception))
Example #10
0
  def setUp(self):
    # Set up a scheduler that supports address mapping.
    symbol_table_cls = TargetTable
    address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls,
                                   parser_cls=JsonParser,
                                   build_patterns=('*.BUILD.json',))
    rules = create_fs_rules() + create_graph_rules(address_mapper, symbol_table_cls)
    # TODO handle updating the rule graph when passed unexpected root selectors.
    # Adding the following task allows us to get around the fact that SelectDependencies
    # requests are not currently supported.
    rules.append(TaskRule(UnhydratedStructs,
                          [SelectDependencies(UnhydratedStruct,
                                              BuildFileAddresses,
                                              field_types=(Address,),
                                              field='addresses')],
                          UnhydratedStructs))

    project_tree = self.mk_fs_tree(os.path.join(os.path.dirname(__file__), 'examples/mapper_test'))
    self.build_root = project_tree.build_root
    self.scheduler = self.mk_scheduler(rules=rules, project_tree=project_tree)

    self.a_b = Address.parse('a/b')
    self.a_b_target = Target(name='b',
                             dependencies=['//d:e'],
                             configurations=['//a', Struct(embedded='yes')],
                             type_alias='target')
Example #11
0
  def test_full_graph_for_planner_example(self):
    symbol_table_cls = TargetTable
    address_mapper = AddressMapper(symbol_table_cls, JsonParser, '*.BUILD.json')
    rules = create_graph_rules(address_mapper, symbol_table_cls) + create_fs_rules()

    rule_index = RuleIndex.create(rules)
    fullgraph_str = self.create_full_graph(rule_index)

    print('---diagnostic------')
    print(fullgraph_str)
    print('/---diagnostic------')

    in_root_rules = False
    in_all_rules = False
    all_rules = []
    root_rule_lines = []
    for line in fullgraph_str.splitlines():
      if line.startswith('  // root subject types:'):
        pass
      elif line.startswith('  // root entries'):
        in_root_rules = True
      elif line.startswith('  // internal entries'):
        in_all_rules = True
      elif in_all_rules:
        all_rules.append(line)
      elif in_root_rules:
        root_rule_lines.append(line)
      else:
        pass

    self.assertEquals(36, len(all_rules))
    self.assertEquals(66, len(root_rule_lines)) # 2 lines per entry
Example #12
0
    def test_full_graph_for_planner_example(self):
        address_mapper = AddressMapper(JsonParser(TARGET_TABLE),
                                       "*.BUILD.json")
        rules = create_graph_rules(address_mapper) + create_fs_rules()

        fullgraph_str = self.create_full_graph(rules)

        print("---diagnostic------")
        print(fullgraph_str)
        print("/---diagnostic------")

        in_root_rules = False
        in_all_rules = False
        all_rules = []
        root_rule_lines = []
        for line in fullgraph_str.splitlines():
            if line.startswith("  // root subject types:"):
                pass
            elif line.startswith("  // root entries"):
                in_root_rules = True
            elif line.startswith("  // internal entries"):
                in_all_rules = True
            elif in_all_rules:
                all_rules.append(line)
            elif in_root_rules:
                root_rule_lines.append(line)
            else:
                pass

        self.assertTrue(6 < len(all_rules))
        self.assertTrue(12 < len(root_rule_lines))  # 2 lines per entry
Example #13
0
 def assert_content(self, filespecs_or_globs, expected_content):
   with self.mk_project_tree() as project_tree:
     scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree)
     snapshot = self.execute_expecting_one_result(scheduler, Snapshot, self.specs(filespecs_or_globs)).value
     result = self.execute_expecting_one_result(scheduler, FilesContent, snapshot.directory_digest).value
     actual_content = {f.path: f.content for f in result.dependencies}
     self.assertEqual(expected_content, actual_content)
Example #14
0
  def test_full_graph_for_planner_example(self):
    address_mapper = AddressMapper(JsonParser(TARGET_TABLE), '*.BUILD.json')
    rules = create_graph_rules(address_mapper) + create_fs_rules()

    fullgraph_str = self.create_full_graph(rules)

    print('---diagnostic------')
    print(fullgraph_str)
    print('/---diagnostic------')

    in_root_rules = False
    in_all_rules = False
    all_rules = []
    root_rule_lines = []
    for line in fullgraph_str.splitlines():
      if line.startswith('  // root subject types:'):
        pass
      elif line.startswith('  // root entries'):
        in_root_rules = True
      elif line.startswith('  // internal entries'):
        in_all_rules = True
      elif in_all_rules:
        all_rules.append(line)
      elif in_root_rules:
        root_rule_lines.append(line)
      else:
        pass

    self.assertTrue(6 < len(all_rules))
    self.assertTrue(12 < len(root_rule_lines)) # 2 lines per entry
Example #15
0
 def assert_walk_snapshot(self, field, filespecs_or_globs, paths, ignore_patterns=None, prepare=None):
   with self.mk_project_tree(ignore_patterns=ignore_patterns) as project_tree:
     scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree)
     if prepare:
       prepare(project_tree)
     result = self.execute(scheduler, Snapshot, self.specs(filespecs_or_globs))[0]
     self.assertEqual(sorted(getattr(result, field)), sorted(paths))
Example #16
0
 def test_multiple_snapshots_from_outside_buildroot(self):
     with temporary_dir() as temp_dir:
         with open(os.path.join(temp_dir, "roland"), "w") as f:
             f.write("European Burmese")
         with open(os.path.join(temp_dir, "susannah"), "w") as f:
             f.write("I don't know")
         scheduler = self.mk_scheduler(rules=create_fs_rules())
         snapshots = scheduler.capture_snapshots((
             PathGlobsAndRoot(PathGlobs(["roland"]), temp_dir),
             PathGlobsAndRoot(PathGlobs(["susannah"]), temp_dir),
             PathGlobsAndRoot(PathGlobs(["doesnotexist"]), temp_dir),
         ))
         self.assertEqual(3, len(snapshots))
         self.assert_snapshot_equals(
             snapshots[0],
             ["roland"],
             Digest(
                 "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
                 80),
         )
         self.assert_snapshot_equals(
             snapshots[1],
             ["susannah"],
             Digest(
                 "d3539cfc21eb4bab328ca9173144a8e932c515b1b9e26695454eeedbc5a95f6f",
                 82),
         )
         self.assert_snapshot_equals(snapshots[2], [],
                                     EMPTY_DIRECTORY_DIGEST)
Example #17
0
 def test_snapshot_from_outside_buildroot_failure(self):
     with temporary_dir() as temp_dir:
         scheduler = self.mk_scheduler(rules=create_fs_rules())
         globs = PathGlobs(("*", ), ())
         with self.assertRaises(Exception) as cm:
             scheduler.capture_snapshots((PathGlobsAndRoot(
                 globs, str(os.path.join(temp_dir, "doesnotexist"))), ))
         self.assertIn("doesnotexist", str(cm.exception))
Example #18
0
  def create(self, build_patterns=None, parser=None):
    address_mapper = AddressMapper(build_patterns=build_patterns,
                                   parser=parser)

    rules = create_fs_rules() + create_graph_rules(address_mapper) + [SingletonRule(SymbolTable, TestTable())]
    project_tree = self.mk_fs_tree(os.path.join(os.path.dirname(__file__), 'examples'))
    scheduler = self.mk_scheduler(rules=rules, project_tree=project_tree)
    return scheduler
Example #19
0
 def assert_content(self, filespecs, expected_content):
     with self.mk_project_tree() as project_tree:
         scheduler = self.mk_scheduler(rules=create_fs_rules(),
                                       project_tree=project_tree)
         result = self.execute(scheduler, FilesContent,
                               self.specs('', *filespecs))[0]
         actual_content = {f.path: f.content for f in result.dependencies}
         self.assertEquals(expected_content, actual_content)
Example #20
0
  def setup_legacy_graph(pants_ignore_patterns,
                         workdir,
                         build_root=None,
                         native=None,
                         symbol_table_cls=None,
                         build_ignore_patterns=None,
                         exclude_target_regexps=None,
                         subproject_roots=None,
                         include_trace_on_error=True):
    """Construct and return the components necessary for LegacyBuildGraph construction.

    :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree,
                                       usually taken from the '--pants-ignore' global option.
    :param str workdir: The pants workdir.
    :param str build_root: A path to be used as the build root. If None, then default is used.
    :param Native native: An instance of the native-engine subsystem.
    :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or
                                         None to use the default.
    :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD
                                       files, usually taken from the '--build-ignore' global option.
    :param list exclude_target_regexps: A list of regular expressions for excluding targets.
    :param list subproject_roots: Paths that correspond with embedded build roots
                                  under the current build root.
    :param bool include_trace_on_error: If True, when an error occurs, the error message will
                include the graph trace.
    :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls).
    """

    build_root = build_root or get_buildroot()
    scm = get_scm()
    symbol_table_cls = symbol_table_cls or LegacySymbolTable

    project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns)

    # Register "literal" subjects required for these tasks.
    # TODO: Replace with `Subsystems`.
    address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls,
                                   parser_cls=LegacyPythonCallbacksParser,
                                   build_ignore_patterns=build_ignore_patterns,
                                   exclude_target_regexps=exclude_target_regexps,
                                   subproject_roots=subproject_roots)

    # Load the native backend.
    native = native or Native.create()

    # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The
    # LegacyBuildGraph will explicitly request the products it needs.
    tasks = (
      create_legacy_graph_tasks(symbol_table_cls) +
      create_fs_rules() +
      create_graph_rules(address_mapper, symbol_table_cls)
    )

    # TODO: Do not use the cache yet, as it incurs a high overhead.
    scheduler = LocalScheduler(workdir, dict(), tasks, project_tree, native, include_trace_on_error=include_trace_on_error)
    change_calculator = EngineChangeCalculator(scheduler, symbol_table_cls, scm) if scm else None

    return LegacyGraphHelper(scheduler, symbol_table_cls, change_calculator)
Example #21
0
    def setup_legacy_graph(pants_ignore_patterns,
                           workdir,
                           build_root=None,
                           native=None,
                           symbol_table_cls=None,
                           build_ignore_patterns=None,
                           exclude_target_regexps=None,
                           subproject_roots=None):
        """Construct and return the components necessary for LegacyBuildGraph construction.

    :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree,
                                       usually taken from the '--pants-ignore' global option.
    :param str workdir: The pants workdir.
    :param str build_root: A path to be used as the build root. If None, then default is used.
    :param Native native: An instance of the native-engine subsystem.
    :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or
                                         None to use the default.
    :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD
                                       files, usually taken from the '--build-ignore' global option.
    :param list exclude_target_regexps: A list of regular expressions for excluding targets.
    :param list subproject_roots: Paths that correspond with embedded build roots
                                  under the current build root.
    :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls).
    """

        build_root = build_root or get_buildroot()
        scm = get_scm()
        symbol_table_cls = symbol_table_cls or LegacySymbolTable

        project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns)

        # Register "literal" subjects required for these tasks.
        # TODO: Replace with `Subsystems`.
        address_mapper = AddressMapper(
            symbol_table_cls=symbol_table_cls,
            parser_cls=LegacyPythonCallbacksParser,
            build_ignore_patterns=build_ignore_patterns,
            exclude_target_regexps=exclude_target_regexps,
            subproject_roots=subproject_roots)

        # Load the native backend.
        native = native or Native.Factory.global_instance().create()

        # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The
        # LegacyBuildGraph will explicitly request the products it needs.
        tasks = (create_legacy_graph_tasks(symbol_table_cls) +
                 create_fs_rules() +
                 create_graph_rules(address_mapper, symbol_table_cls))

        # TODO: Do not use the cache yet, as it incurs a high overhead.
        scheduler = LocalScheduler(workdir, dict(), tasks, project_tree,
                                   native)
        engine = LocalSerialEngine(scheduler, use_cache=False)
        change_calculator = EngineChangeCalculator(
            scheduler, engine, symbol_table_cls, scm) if scm else None

        return LegacyGraphHelper(scheduler, engine, symbol_table_cls,
                                 change_calculator)
Example #22
0
  def create(self, build_patterns=None, parser=None):
    address_mapper = AddressMapper(build_patterns=build_patterns,
                                   parser=parser)
    symbol_table = address_mapper.parser.symbol_table

    rules = create_fs_rules() + create_graph_rules(address_mapper, symbol_table)
    project_tree = self.mk_fs_tree(os.path.join(os.path.dirname(__file__), 'examples'))
    scheduler = self.mk_scheduler(rules=rules, project_tree=project_tree)
    return scheduler
Example #23
0
 def assert_walk_snapshot(
     self, field, filespecs_or_globs, paths, ignore_patterns=None, prepare=None
 ):
     with self.mk_project_tree(ignore_patterns=ignore_patterns) as project_tree:
         scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree)
         if prepare:
             prepare(project_tree)
         result = self.execute(scheduler, Snapshot, self.path_globs(filespecs_or_globs))[0]
         self.assertEqual(sorted(getattr(result, field)), sorted(paths))
Example #24
0
  def create(self, build_patterns=None, parser=None):
    address_mapper = AddressMapper(build_patterns=build_patterns,
                                   parser=parser)
    symbol_table = address_mapper.parser.symbol_table

    rules = create_fs_rules() + create_graph_rules(address_mapper, symbol_table)
    project_tree = self.mk_fs_tree(os.path.join(os.path.dirname(__file__), 'examples'))
    scheduler = self.mk_scheduler(rules=rules, project_tree=project_tree)
    return scheduler
Example #25
0
 def assert_digest(self, filespecs_or_globs, expected_files):
     with self.mk_project_tree() as project_tree:
         scheduler = self.mk_scheduler(rules=create_fs_rules(),
                                       project_tree=project_tree)
         result = self.execute(scheduler, Snapshot,
                               self.specs(filespecs_or_globs))[0]
         # Confirm all expected files were digested.
         self.assertEqual(set(expected_files), set(result.files))
         self.assertTrue(result.directory_digest.fingerprint is not None)
Example #26
0
 def assert_content(self, filespecs_or_globs, expected_content):
     with self.mk_project_tree() as project_tree:
         scheduler = self.mk_scheduler(rules=create_fs_rules(),
                                       project_tree=project_tree)
         snapshot = self.execute_expecting_one_result(
             scheduler, Snapshot, self.specs(filespecs_or_globs)).value
         result = self.execute_expecting_one_result(
             scheduler, FilesContent, snapshot.directory_digest).value
         actual_content = {f.path: f.content for f in result.dependencies}
         self.assertEquals(expected_content, actual_content)
Example #27
0
  def assert_fsnodes(self, filespecs_or_globs, subject_product_pairs):
    with self.mk_project_tree() as project_tree:
      scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree)
      request = self.execute_request(scheduler, Snapshot, self.specs(filespecs_or_globs))

      # Validate that FilesystemNodes for exactly the given subjects are reachable under this
      # request.
      fs_nodes = [n for n, _ in scheduler.product_graph.walk(roots=request.roots)
                  if type(n) is "TODO: need a new way to filter for FS intrinsics"]
      self.assertEquals(set((n.subject, n.product) for n in fs_nodes), set(subject_product_pairs))
Example #28
0
 def test_snapshot_from_outside_buildroot(self):
   with temporary_dir() as temp_dir:
     with open(os.path.join(temp_dir, "roland"), "w") as f:
       f.write("European Burmese")
     scheduler = self.mk_scheduler(rules=create_fs_rules())
     globs = PathGlobs(("*",), ())
     snapshot = scheduler.capture_snapshots((PathGlobsAndRoot(globs, temp_dir),))[0]
     self.assert_snapshot_equals(snapshot, ["roland"], Digest(
       "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
       80
     ))
Example #29
0
  def create(self, build_patterns=None, parser=None):
    address_mapper = AddressMapper(build_patterns=build_patterns,
                                   parser=parser)

    @rule
    def symbol_table_singleton() -> SymbolTable:
      return TEST_TABLE
    rules = create_fs_rules() + create_graph_rules(address_mapper) + [symbol_table_singleton]
    project_tree = self.mk_fs_tree(os.path.join(os.path.dirname(__file__), 'examples'))
    scheduler = self.mk_scheduler(rules=rules, project_tree=project_tree)
    return scheduler
Example #30
0
  def create(self, build_patterns=None, parser=None):
    address_mapper = AddressMapper(build_patterns=build_patterns,
                                   parser=parser)

    @rule(SymbolTable, [])
    def symbol_table_singleton():
      return TEST_TABLE
    rules = create_fs_rules() + create_graph_rules(address_mapper) + [symbol_table_singleton]
    project_tree = self.mk_fs_tree(os.path.join(os.path.dirname(__file__), 'examples'))
    scheduler = self.mk_scheduler(rules=rules, project_tree=project_tree)
    return scheduler
Example #31
0
 def test_snapshot_from_outside_buildroot(self):
   with temporary_dir() as temp_dir:
     with open(os.path.join(temp_dir, "roland"), "w") as f:
       f.write("European Burmese")
     scheduler = self.mk_scheduler(rules=create_fs_rules())
     globs = PathGlobs(("*",), ())
     snapshot = scheduler.capture_snapshots((PathGlobsAndRoot(globs, text_type(temp_dir)),))[0]
     self.assert_snapshot_equals(snapshot, ["roland"], Digest(
       text_type("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"),
       80
     ))
Example #32
0
def setup_json_scheduler(build_root, native):
  """Return a build graph and scheduler configured for BLD.json files under the given build root.

  :rtype :class:`pants.engine.scheduler.SchedulerSession`
  """

  symbol_table = ExampleTable()

  # Register "literal" subjects required for these rules.
  address_mapper = AddressMapper(build_patterns=('BLD.json',),
                                 parser=JsonParser(symbol_table))

  work_dir = os_path_join(build_root, '.pants.d')
  project_tree = FileSystemProjectTree(build_root)

  rules = [
      # Codegen
      GenGoal.rule(),
      gen_apache_java_thrift,
      gen_apache_python_thrift,
      gen_scrooge_scala_thrift,
      gen_scrooge_java_thrift,
      SingletonRule(Scrooge, Scrooge(Address.parse('src/scala/scrooge')))
    ] + [
      # scala dependency inference
      reify_scala_sources,
      select_package_address,
      calculate_package_search_path,
      SingletonRule(SourceRoots, SourceRoots(('src/java','src/scala'))),
    ] + [
      # Remote dependency resolution
      ivy_resolve,
      select_rev,
    ] + [
      # Compilers
      isolate_resources,
      write_name_file,
      javac,
      scalac,
    ] + (
      create_graph_rules(address_mapper, symbol_table)
    ) + (
      create_fs_rules()
    )

  scheduler = Scheduler(native,
                        project_tree,
                        work_dir,
                        rules,
                        DEFAULT_EXECUTION_OPTIONS,
                        None,
                        None)
  return scheduler.new_session()
Example #33
0
 def mk_scheduler(self,
                  tasks=None,
                  goals=None,
                  project_tree=None,
                  work_dir=None):
   """Creates a Scheduler with "native" tasks already included, and the given additional tasks."""
   goals = goals or dict()
   tasks = tasks or []
   work_dir = work_dir or self._create_work_dir()
   project_tree = project_tree or self.mk_fs_tree(work_dir=work_dir)
   tasks = list(tasks) + create_fs_rules()
   return LocalScheduler(work_dir, goals, tasks, project_tree, self._native)
Example #34
0
 def prime_store_with_roland_digest(self):
   """This method primes the store with a directory of a file named 'roland' and contents 'European Burmese'."""
   with temporary_dir() as temp_dir:
     with open(os.path.join(temp_dir, "roland"), "w") as f:
       f.write("European Burmese")
     scheduler = self.mk_scheduler(rules=create_fs_rules())
     globs = PathGlobs(("*",), ())
     snapshot = scheduler.capture_snapshots((PathGlobsAndRoot(globs, text_type(temp_dir)),))[0]
     self.assert_snapshot_equals(snapshot, ["roland"], DirectoryDigest(
       text_type("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"),
       80
     ))
Example #35
0
    def create(self, build_patterns=None, parser=None) -> SchedulerSession:
        address_mapper = AddressMapper(
            parser=parser, prelude_glob_patterns=(), build_patterns=build_patterns
        )

        @rule
        def symbol_table_singleton() -> SymbolTable:
            return TEST_TABLE

        rules = create_fs_rules() + create_graph_rules(address_mapper) + [symbol_table_singleton]
        project_tree = self.mk_fs_tree(os.path.join(os.path.dirname(__file__), "examples"))
        return cast(SchedulerSession, self.mk_scheduler(rules=rules, project_tree=project_tree))
Example #36
0
def setup_json_scheduler(build_root, native):
  """Return a build graph and scheduler configured for BLD.json files under the given build root.

  :rtype :class:`pants.engine.scheduler.SchedulerSession`
  """

  symbol_table = ExampleTable()

  # Register "literal" subjects required for these rules.
  address_mapper = AddressMapper(build_patterns=('BLD.json',),
                                 parser=JsonParser(symbol_table))

  work_dir = os_path_join(build_root, '.pants.d')
  project_tree = FileSystemProjectTree(build_root)

  rules = [
      # Codegen
      GenGoal.rule(),
      gen_apache_java_thrift,
      gen_apache_python_thrift,
      gen_scrooge_scala_thrift,
      gen_scrooge_java_thrift,
      SingletonRule(Scrooge, Scrooge(Address.parse('src/scala/scrooge')))
    ] + [
      # scala dependency inference
      reify_scala_sources,
      select_package_address,
      calculate_package_search_path,
      SingletonRule(SourceRoots, SourceRoots(('src/java','src/scala'))),
    ] + [
      # Remote dependency resolution
      ivy_resolve,
      select_rev,
    ] + [
      # Compilers
      isolate_resources,
      write_name_file,
      javac,
      scalac,
    ] + (
      create_graph_rules(address_mapper, symbol_table)
    ) + (
      create_fs_rules()
    )

  scheduler = Scheduler(native,
                        project_tree,
                        work_dir,
                        rules,
                        DEFAULT_EXECUTION_OPTIONS,
                        None,
                        None)
  return scheduler.new_session()
Example #37
0
 def mk_scheduler(self,
                  tasks=None,
                  goals=None,
                  project_tree=None,
                  work_dir=None):
     """Creates a Scheduler with "native" tasks already included, and the given additional tasks."""
     goals = goals or dict()
     tasks = tasks or []
     work_dir = work_dir or self._create_work_dir()
     project_tree = project_tree or self.mk_fs_tree(work_dir=work_dir)
     tasks = list(tasks) + create_fs_rules()
     return LocalScheduler(work_dir, goals, tasks, project_tree,
                           self._native)
Example #38
0
 def assert_walk_snapshot(self,
                          field,
                          filespecs,
                          paths,
                          ignore_patterns=None):
     with self.mk_project_tree(
             ignore_patterns=ignore_patterns) as project_tree:
         scheduler = self.mk_scheduler(rules=create_fs_rules(),
                                       project_tree=project_tree)
         result = self.execute(scheduler, Snapshot,
                               self.specs('', *filespecs))[0]
         self.assertEquals(sorted([p.path for p in getattr(result, field)]),
                           sorted(paths))
Example #39
0
  def test_materialize_directories(self):
    # I tried passing in the digest of a file, but it didn't make it to the
    # rust code due to all of the checks we have in place (which is probably a good thing).
    self.prime_store_with_roland_digest()

    with temporary_dir() as temp_dir:
      dir_path = os.path.join(temp_dir, "containing_roland")
      digest = DirectoryDigest(
        text_type("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"),
        80
      )
      scheduler = self.mk_scheduler(rules=create_fs_rules())
      scheduler.materialize_directories((DirectoryToMaterialize(text_type(dir_path), digest),))

      created_file = os.path.join(dir_path, "roland")
      with open(created_file) as f:
        content = f.read()
        self.assertEquals(content, "European Burmese")
Example #40
0
  def setUp(self) -> None:
    # Set up a scheduler that supports address mapping.
    address_mapper = AddressMapper(parser=JsonParser(TARGET_TABLE),
                                   build_patterns=('*.BUILD.json',))

    # We add the `unhydrated_structs` rule because it is otherwise not used in the core engine.
    rules = [
        unhydrated_structs
      ] + create_fs_rules() + create_graph_rules(address_mapper)

    project_tree = self.mk_fs_tree(os.path.join(os.path.dirname(__file__), 'examples/mapper_test'))
    self.build_root = project_tree.build_root
    self.scheduler = self.mk_scheduler(rules=rules, project_tree=project_tree)

    self.a_b = Address.parse('a/b')
    self.a_b_target = Target(address=self.a_b,
                             dependencies=['//a/d/e'],
                             configurations=['//a/d/e', Struct(embedded='yes')],
                             type_alias='target')
Example #41
0
  def setUp(self):
    # Set up a scheduler that supports address mapping.
    address_mapper = AddressMapper(parser=JsonParser(TARGET_TABLE),
                                   build_patterns=('*.BUILD.json',))

    # We add the `unhydrated_structs` rule because it is otherwise not used in the core engine.
    rules = [
        unhydrated_structs
      ] + create_fs_rules() + create_graph_rules(address_mapper)

    project_tree = self.mk_fs_tree(os.path.join(os.path.dirname(__file__), 'examples/mapper_test'))
    self.build_root = project_tree.build_root
    self.scheduler = self.mk_scheduler(rules=rules, project_tree=project_tree)

    self.a_b = Address.parse('a/b')
    self.a_b_target = Target(address=self.a_b,
                             dependencies=['//a/d/e'],
                             configurations=['//a/d/e', Struct(embedded='yes')],
                             type_alias='target')
Example #42
0
    def test_merge_directories(self):
        with temporary_dir() as temp_dir:
            with open(os.path.join(temp_dir, "roland"), "w") as f:
                f.write("European Burmese")
            with open(os.path.join(temp_dir, "susannah"), "w") as f:
                f.write("Not sure actually")
            scheduler = self.mk_scheduler(rules=create_fs_rules())
            (empty_snapshot, roland_snapshot, susannah_snapshot,
             both_snapshot) = (scheduler.capture_snapshots((
                 PathGlobsAndRoot(PathGlobs(("doesnotmatch", ), ()),
                                  text_type(temp_dir)),
                 PathGlobsAndRoot(PathGlobs(("roland", ), ()),
                                  text_type(temp_dir)),
                 PathGlobsAndRoot(PathGlobs(("susannah", ), ()),
                                  text_type(temp_dir)),
                 PathGlobsAndRoot(PathGlobs(("*", ), ()), text_type(temp_dir)),
             )))

            empty_merged = scheduler.merge_directories(
                (empty_snapshot.directory_digest))
            self.assertEqual(
                empty_snapshot.directory_digest,
                empty_merged,
            )

            roland_merged = scheduler.merge_directories((
                roland_snapshot.directory_digest,
                empty_snapshot.directory_digest,
            ))
            self.assertEqual(
                roland_snapshot.directory_digest,
                roland_merged,
            )

            both_merged = scheduler.merge_directories((
                roland_snapshot.directory_digest,
                susannah_snapshot.directory_digest,
            ))

            self.assertEqual(both_snapshot.directory_digest, both_merged)
Example #43
0
 def test_multiple_snapshots_from_outside_buildroot(self):
   with temporary_dir() as temp_dir:
     with open(os.path.join(temp_dir, "roland"), "w") as f:
       f.write("European Burmese")
     with open(os.path.join(temp_dir, "susannah"), "w") as f:
       f.write("I don't know")
     scheduler = self.mk_scheduler(rules=create_fs_rules())
     snapshots = scheduler.capture_snapshots((
       PathGlobsAndRoot(PathGlobs(("roland",), ()), text_type(temp_dir)),
       PathGlobsAndRoot(PathGlobs(("susannah",), ()), text_type(temp_dir)),
       PathGlobsAndRoot(PathGlobs(("doesnotexist",), ()), text_type(temp_dir)),
     ))
     self.assertEqual(3, len(snapshots))
     self.assert_snapshot_equals(snapshots[0], ["roland"], Digest(
       text_type("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"),
       80
     ))
     self.assert_snapshot_equals(snapshots[1], ["susannah"], Digest(
       text_type("d3539cfc21eb4bab328ca9173144a8e932c515b1b9e26695454eeedbc5a95f6f"),
       82
     ))
     self.assert_snapshot_equals(snapshots[2], [], EMPTY_DIRECTORY_DIGEST)
Example #44
0
    def test_full_graph_for_planner_example(self):
        symbol_table_cls = TargetTable
        address_mapper = AddressMapper(symbol_table_cls, JsonParser,
                                       '*.BUILD.json')
        tasks = create_graph_rules(address_mapper,
                                   symbol_table_cls) + create_fs_rules()

        rule_index = RuleIndex.create(tasks)
        root_subject_types = {
            Address, PathGlobs, SingleAddress, SiblingAddresses,
            DescendantAddresses, AscendantAddresses
        }
        fullgraph_str = self.create_full_graph(root_subject_types, rule_index)

        print('---diagnostic------')
        print(fullgraph_str)
        print('/---diagnostic------')

        in_root_rules = False
        in_all_rules = False
        all_rules = []
        root_rule_lines = []
        for line in fullgraph_str.splitlines():
            if line.startswith('  // root subject types:'):
                pass
            elif line.startswith('  // root entries'):
                in_root_rules = True
            elif line.startswith('  // internal entries'):
                in_all_rules = True
            elif in_all_rules:
                all_rules.append(line)
            elif in_root_rules:
                root_rule_lines.append(line)
            else:
                pass

        self.assertEquals(31, len(all_rules))
        self.assertEquals(56, len(root_rule_lines))  # 2 lines per entry
Example #45
0
  def test_merge_directories(self):
    with temporary_dir() as temp_dir:
      with open(os.path.join(temp_dir, "roland"), "w") as f:
        f.write("European Burmese")
      with open(os.path.join(temp_dir, "susannah"), "w") as f:
        f.write("Not sure actually")
      scheduler = self.mk_scheduler(rules=create_fs_rules())
      (empty_snapshot, roland_snapshot, susannah_snapshot, both_snapshot) = (
          scheduler.capture_snapshots((
            PathGlobsAndRoot(PathGlobs(("doesnotmatch",), ()), text_type(temp_dir)),
            PathGlobsAndRoot(PathGlobs(("roland",), ()), text_type(temp_dir)),
            PathGlobsAndRoot(PathGlobs(("susannah",), ()), text_type(temp_dir)),
            PathGlobsAndRoot(PathGlobs(("*",), ()), text_type(temp_dir)),
        ))
      )

      empty_merged = scheduler.merge_directories((empty_snapshot.directory_digest))
      self.assertEquals(
        empty_snapshot.directory_digest,
        empty_merged,
      )

      roland_merged = scheduler.merge_directories((
        roland_snapshot.directory_digest,
        empty_snapshot.directory_digest,
      ))
      self.assertEquals(
        roland_snapshot.directory_digest,
        roland_merged,
      )

      both_merged = scheduler.merge_directories((
        roland_snapshot.directory_digest,
        susannah_snapshot.directory_digest,
      ))

      self.assertEquals(both_snapshot.directory_digest, both_merged)
Example #46
0
 def mk_scheduler_in_example_fs(self, rules):
   rules = list(rules) + create_fs_rules()
   return self.mk_scheduler(rules=rules, project_tree=self.mk_example_fs_tree())
Example #47
0
 def test_merge_zero_directories(self):
     scheduler = self.mk_scheduler(rules=create_fs_rules())
     dir = scheduler.merge_directories(())
     self.assertEqual(EMPTY_DIRECTORY_DIGEST, dir)
 def mk_scheduler_in_example_fs(self, rules):
   rules = list(rules) + create_fs_rules() + [RootRule(ExecuteProcessRequest)]
   return self.mk_scheduler(rules=rules, project_tree=self.mk_example_fs_tree())
Example #49
0
 def assert_content(self, filespecs, expected_content):
   with self.mk_project_tree() as project_tree:
     scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree)
     result = self.execute(scheduler, FilesContent, self.specs('', *filespecs))[0]
     actual_content = {f.path: f.content for f in result.dependencies}
     self.assertEquals(expected_content, actual_content)
  def setup_legacy_graph(pants_ignore_patterns,
                         workdir,
                         build_file_imports_behavior,
                         build_root=None,
                         native=None,
                         build_file_aliases=None,
                         rules=None,
                         build_ignore_patterns=None,
                         exclude_target_regexps=None,
                         subproject_roots=None,
                         include_trace_on_error=True):
    """Construct and return the components necessary for LegacyBuildGraph construction.

    :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree,
                                       usually taken from the '--pants-ignore' global option.
    :param str workdir: The pants workdir.
    :param build_file_imports_behavior: How to behave if a BUILD file being parsed tries to use
      import statements. Valid values: "allow", "warn", "error".
    :type build_file_imports_behavior: string
    :param str build_root: A path to be used as the build root. If None, then default is used.
    :param Native native: An instance of the native-engine subsystem.
    :param build_file_aliases: BuildFileAliases to register.
    :type build_file_aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases`
    :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD
                                       files, usually taken from the '--build-ignore' global option.
    :param list exclude_target_regexps: A list of regular expressions for excluding targets.
    :param list subproject_roots: Paths that correspond with embedded build roots
                                  under the current build root.
    :param bool include_trace_on_error: If True, when an error occurs, the error message will
                include the graph trace.
    :returns: A tuple of (scheduler, engine, symbol_table, build_graph_cls).
    """

    build_root = build_root or get_buildroot()
    scm = get_scm()

    if not build_file_aliases:
      build_file_aliases = EngineInitializer.get_default_build_file_aliases()

    if not rules:
      rules = []

    symbol_table = LegacySymbolTable(build_file_aliases)

    project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns)

    # Register "literal" subjects required for these tasks.
    parser = LegacyPythonCallbacksParser(
      symbol_table,
      build_file_aliases,
      build_file_imports_behavior
    )
    address_mapper = AddressMapper(parser=parser,
                                   build_ignore_patterns=build_ignore_patterns,
                                   exclude_target_regexps=exclude_target_regexps,
                                   subproject_roots=subproject_roots)

    # Load the native backend.
    native = native or Native.create()

    # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The
    # LegacyBuildGraph will explicitly request the products it needs.
    tasks = (
      create_legacy_graph_tasks(symbol_table) +
      create_fs_rules() +
      create_graph_rules(address_mapper, symbol_table) +
      create_process_rules() +
      rules
    )

    scheduler = LocalScheduler(workdir, dict(), tasks, project_tree, native, include_trace_on_error=include_trace_on_error)
    change_calculator = EngineChangeCalculator(scheduler, symbol_table, scm) if scm else None

    return LegacyGraphHelper(scheduler, symbol_table, change_calculator)
Example #51
0
 def test_merge_zero_directories(self):
   scheduler = self.mk_scheduler(rules=create_fs_rules())
   dir = scheduler.merge_directories(())
   self.assertEqual(EMPTY_DIRECTORY_DIGEST, dir)
Example #52
0
 def assert_walk_snapshot(self, field, filespecs, paths, ignore_patterns=None):
   with self.mk_project_tree(ignore_patterns=ignore_patterns) as project_tree:
     scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree)
     result = self.execute(scheduler, Snapshot, self.specs('', *filespecs))[0]
     self.assertEquals(sorted([p.path for p in getattr(result, field)]), sorted(paths))
Example #53
0
 def mk_configured_scheduler(self):
   return self.mk_scheduler(
     rules=create_fs_rules() + create_process_rules(),
     project_tree=FileSystemProjectTree(self.build_root),
     work_dir=self.pants_workdir
   )
Example #54
0
  def setup_legacy_graph_extended(
    pants_ignore_patterns,
    workdir,
    local_store_dir,
    build_file_imports_behavior,
    options_bootstrapper,
    build_configuration,
    build_root=None,
    native=None,
    glob_match_error_behavior=None,
    build_ignore_patterns=None,
    exclude_target_regexps=None,
    subproject_roots=None,
    include_trace_on_error=True,
    execution_options=None,
  ):
    """Construct and return the components necessary for LegacyBuildGraph construction.

    :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree,
                                       usually taken from the '--pants-ignore' global option.
    :param str workdir: The pants workdir.
    :param local_store_dir: The directory to use for storing the engine's LMDB store in.
    :param build_file_imports_behavior: How to behave if a BUILD file being parsed tries to use
      import statements. Valid values: "allow", "warn", "error".
    :type build_file_imports_behavior: string
    :param str build_root: A path to be used as the build root. If None, then default is used.
    :param Native native: An instance of the native-engine subsystem.
    :param options_bootstrapper: A `OptionsBootstrapper` object containing bootstrap options.
    :type options_bootstrapper: :class:`pants.options.options_bootstrapper.OptionsBootstrapper`
    :param build_configuration: The `BuildConfiguration` object to get build file aliases from.
    :type build_configuration: :class:`pants.build_graph.build_configuration.BuildConfiguration`
    :param glob_match_error_behavior: How to behave if a glob specified for a target's sources or
                                      bundles does not expand to anything.
    :type glob_match_error_behavior: :class:`pants.option.global_options.GlobMatchErrorBehavior`
    :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD
                                       files, usually taken from the '--build-ignore' global option.
    :param list exclude_target_regexps: A list of regular expressions for excluding targets.
    :param list subproject_roots: Paths that correspond with embedded build roots
                                  under the current build root.
    :param bool include_trace_on_error: If True, when an error occurs, the error message will
                include the graph trace.
    :param execution_options: Option values for (remote) process execution.
    :type execution_options: :class:`pants.option.global_options.ExecutionOptions`
    :returns: A LegacyGraphScheduler.
    """

    build_root = build_root or get_buildroot()
    build_configuration = build_configuration or BuildConfigInitializer.get(options_bootstrapper)
    bootstrap_options = options_bootstrapper.bootstrap_options.for_global_scope()

    build_file_aliases = build_configuration.registered_aliases()
    rules = build_configuration.rules()

    symbol_table = _legacy_symbol_table(build_file_aliases)

    project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns)

    execution_options = execution_options or DEFAULT_EXECUTION_OPTIONS

    # Register "literal" subjects required for these rules.
    parser = LegacyPythonCallbacksParser(
      symbol_table,
      build_file_aliases,
      build_file_imports_behavior
    )
    address_mapper = AddressMapper(parser=parser,
                                   build_ignore_patterns=build_ignore_patterns,
                                   exclude_target_regexps=exclude_target_regexps,
                                   subproject_roots=subproject_roots)

    @rule(GlobMatchErrorBehavior, [])
    def glob_match_error_behavior_singleton():
      return glob_match_error_behavior or GlobMatchErrorBehavior.ignore

    @rule(BuildConfiguration, [])
    def build_configuration_singleton():
      return build_configuration

    @rule(SymbolTable, [])
    def symbol_table_singleton():
      return symbol_table

    # Create a Scheduler containing graph and filesystem rules, with no installed goals. The
    # LegacyBuildGraph will explicitly request the products it needs.
    rules = (
      [
        RootRule(Console),
        glob_match_error_behavior_singleton,
        build_configuration_singleton,
        symbol_table_singleton,
      ] +
      create_legacy_graph_tasks() +
      create_fs_rules() +
      create_process_rules() +
      create_graph_rules(address_mapper) +
      create_options_parsing_rules() +
      structs_rules() +
      # TODO: This should happen automatically, but most tests (e.g. tests/python/pants_test/auth) fail if it's not here:
      python_test_runner.rules() +
      rules
    )

    goal_map = EngineInitializer._make_goal_map_from_rules(rules)

    union_rules = build_configuration.union_rules()

    scheduler = Scheduler(
      native,
      project_tree,
      workdir,
      local_store_dir,
      rules,
      union_rules,
      execution_options,
      include_trace_on_error=include_trace_on_error,
      visualize_to_dir=bootstrap_options.native_engine_visualize_to,
    )

    return LegacyGraphScheduler(scheduler, build_file_aliases, goal_map)
Example #55
0
def setup_json_scheduler(build_root, native):
  """Return a build graph and scheduler configured for BLD.json files under the given build root.

  :rtype :class:`pants.engine.scheduler.LocalScheduler`
  """

  symbol_table = ExampleTable()

  # Register "literal" subjects required for these tasks.
  # TODO: Replace with `Subsystems`.
  address_mapper = AddressMapper(build_patterns=('BLD.json',),
                                 parser=JsonParser(symbol_table))

  work_dir = os_path_join(build_root, '.pants.d')
  project_tree = FileSystemProjectTree(build_root)

  goals = {
      'compile': Classpath,
      # TODO: to allow for running resolve alone, should split out a distinct 'IvyReport' product.
      'resolve': Classpath,
      'list': BuildFileAddresses,
      GenGoal.name(): GenGoal,
      'ls': Snapshot,
      'cat': FilesContent,
    }
  tasks = [
      # Codegen
      GenGoal.rule(),
      gen_apache_java_thrift,
      gen_apache_python_thrift,
      gen_scrooge_scala_thrift,
      gen_scrooge_java_thrift,
      SingletonRule(Scrooge, Scrooge(Address.parse('src/scala/scrooge')))
    ] + [
      # scala dependency inference
      reify_scala_sources,
      extract_scala_imports,
      select_package_address,
      calculate_package_search_path,
      SingletonRule(SourceRoots, SourceRoots(('src/java','src/scala'))),
    ] + [
      # Remote dependency resolution
      ivy_resolve,
      select_rev,
    ] + [
      # Compilers
      isolate_resources,
      write_name_file,
      javac,
      scalac,
    ] + (
      create_graph_rules(address_mapper, symbol_table)
    ) + (
      create_fs_rules()
    )

  return LocalScheduler(work_dir,
                        goals,
                        tasks,
                        project_tree,
                        native)
Example #56
0
  def setup_legacy_graph_extended(
    pants_ignore_patterns,
    workdir,
    build_file_imports_behavior,
    build_configuration,
    build_root=None,
    native=None,
    glob_match_error_behavior=None,
    rules=None,
    build_ignore_patterns=None,
    exclude_target_regexps=None,
    subproject_roots=None,
    include_trace_on_error=True,
    remote_store_server=None,
    remote_execution_server=None
  ):
    """Construct and return the components necessary for LegacyBuildGraph construction.

    :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree,
                                       usually taken from the '--pants-ignore' global option.
    :param str workdir: The pants workdir.
    :param build_file_imports_behavior: How to behave if a BUILD file being parsed tries to use
      import statements. Valid values: "allow", "warn", "error".
    :type build_file_imports_behavior: string
    :param str build_root: A path to be used as the build root. If None, then default is used.
    :param Native native: An instance of the native-engine subsystem.
    :param build_configuration: The `BuildConfiguration` object to get build file aliases from.
    :type build_configuration: :class:`pants.build_graph.build_configuration.BuildConfiguration`
    :param glob_match_error_behavior: How to behave if a glob specified for a target's sources or
                                      bundles does not expand to anything.
    :type glob_match_error_behavior: :class:`pants.option.global_options.GlobMatchErrorBehavior`
    :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD
                                       files, usually taken from the '--build-ignore' global option.
    :param list exclude_target_regexps: A list of regular expressions for excluding targets.
    :param list subproject_roots: Paths that correspond with embedded build roots
                                  under the current build root.
    :param bool include_trace_on_error: If True, when an error occurs, the error message will
                include the graph trace.
    :returns: A LegacyGraphScheduler.
    """

    build_root = build_root or get_buildroot()
    build_configuration = build_configuration or BuildConfigInitializer.get(OptionsBootstrapper())
    build_file_aliases = build_configuration.registered_aliases()
    rules = rules or build_configuration.rules() or []

    symbol_table = LegacySymbolTable(build_file_aliases)

    project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns)

    # Register "literal" subjects required for these rules.
    parser = LegacyPythonCallbacksParser(
      symbol_table,
      build_file_aliases,
      build_file_imports_behavior
    )
    address_mapper = AddressMapper(parser=parser,
                                   build_ignore_patterns=build_ignore_patterns,
                                   exclude_target_regexps=exclude_target_regexps,
                                   subproject_roots=subproject_roots)

    # Load the native backend.
    native = native or Native.create()

    # Create a Scheduler containing graph and filesystem rules, with no installed goals. The
    # LegacyBuildGraph will explicitly request the products it needs.
    rules = (
      [
        SingletonRule.from_instance(GlobMatchErrorBehavior.create(glob_match_error_behavior)),
        SingletonRule.from_instance(build_configuration),
      ] +
      create_legacy_graph_tasks(symbol_table) +
      create_fs_rules() +
      create_process_rules() +
      create_graph_rules(address_mapper, symbol_table) +
      create_options_parsing_rules() +
      rules
    )

    scheduler = Scheduler(
      native,
      project_tree,
      workdir,
      rules,
      remote_store_server,
      remote_execution_server,
      include_trace_on_error=include_trace_on_error,
    )

    return LegacyGraphScheduler(scheduler, symbol_table)