コード例 #1
0
    def setUp(self):
        build_root = os.path.join(os.path.dirname(__file__), 'examples',
                                  'scheduler_inputs')
        self.spec_parser = CmdLineSpecParser(build_root)
        self.scheduler = setup_json_scheduler(build_root, inline_nodes=False)
        self.pg = self.scheduler.product_graph
        self.engine = LocalSerialEngine(self.scheduler)

        self.guava = Address.parse('3rdparty/jvm:guava')
        self.thrift = Address.parse('src/thrift/codegen/simple')
        self.java = Address.parse('src/java/codegen/simple')
        self.java_simple = Address.parse('src/java/simple')
        self.java_multi = Address.parse('src/java/multiple_classpath_entries')
        self.no_variant_thrift = Address.parse(
            'src/java/codegen/selector:conflict')
        self.unconfigured_thrift = Address.parse(
            'src/thrift/codegen/unconfigured')
        self.resources = Address.parse('src/resources/simple')
        self.consumes_resources = Address.parse('src/java/consumes_resources')
        self.consumes_managed_thirdparty = Address.parse(
            'src/java/managed_thirdparty')
        self.managed_guava = Address.parse('3rdparty/jvm/managed:guava')
        self.managed_hadoop = Address.parse(
            '3rdparty/jvm/managed:hadoop-common')
        self.managed_resolve_latest = Address.parse(
            '3rdparty/jvm/managed:latest-hadoop')
        self.inferred_deps = Address.parse('src/scala/inferred_deps')
コード例 #2
0
 def execute_request(self, scheduler, product, *subjects):
   """Creates, runs, and returns an ExecutionRequest for the given product and subjects."""
   request = scheduler.execution_request([product], subjects)
   engine = LocalSerialEngine(scheduler)
   res = engine.execute(request)
   if res.error:
     raise res.error
   return request
コード例 #3
0
 def execute_request(self, scheduler, product, *subjects):
     """Creates, runs, and returns an ExecutionRequest for the given product and subjects."""
     request = scheduler.execution_request([product], subjects)
     engine = LocalSerialEngine(scheduler)
     res = engine.execute(request)
     if res.error:
         raise res.error
     return request
コード例 #4
0
def visualize_build_request(build_root, goals, subjects):
    with subsystem_instance(Native.Factory) as native_factory:
        scheduler = setup_json_scheduler(build_root, native_factory.create())

        execution_request = scheduler.build_request(goals, subjects)
        # NB: Calls `reduce` independently of `execute`, in order to render a graph before validating it.
        engine = LocalSerialEngine(scheduler, Storage.create())
        engine.reduce(execution_request)
        visualize_execution_graph(scheduler)
コード例 #5
0
ファイル: visualizer.py プロジェクト: pombredanne/pants
def visualize_build_request(build_root, goals, subjects):
  with subsystem_instance(Native.Factory) as native_factory:
    scheduler = setup_json_scheduler(build_root, native_factory.create())

    execution_request = scheduler.build_request(goals, subjects)
    # NB: Calls `reduce` independently of `execute`, in order to render a graph before validating it.
    engine = LocalSerialEngine(scheduler, Storage.create())
    engine.reduce(execution_request)
    visualize_execution_graph(scheduler)
コード例 #6
0
ファイル: visualizer.py プロジェクト: CaitieM20/pants
def visualize_build_request(build_root, goals, subjects):
  scheduler = setup_json_scheduler(build_root)

  execution_request = scheduler.build_request(goals, subjects)
  # NB: Calls `reduce` independently of `execute`, in order to render a graph before validating it.
  engine = LocalSerialEngine(scheduler, Storage.create())
  try:
    engine.reduce(execution_request)
    visualize_execution_graph(scheduler, execution_request)
  finally:
    engine.close()
コード例 #7
0
ファイル: test_graph.py プロジェクト: neven7/pants
 def _populate(self, scheduler, address):
   """Perform an ExecutionRequest to parse the given Address into a Struct."""
   request = scheduler.execution_request([self._product], [address])
   LocalSerialEngine(scheduler).reduce(request)
   root_entries = scheduler.root_entries(request).items()
   self.assertEquals(1, len(root_entries))
   return root_entries[0]
コード例 #8
0
  def setup_legacy_graph(path_ignore_patterns):
    """Construct and return the components necessary for LegacyBuildGraph construction.

    :param list path_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree,
                                      usually taken from the `--pants-ignore` global option.
    :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls).
    """

    build_root = get_buildroot()
    project_tree = FileSystemProjectTree(build_root, path_ignore_patterns)
    symbol_table_cls = LegacySymbolTable

    # Register "literal" subjects required for these tasks.
    # TODO: Replace with `Subsystems`.
    address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls,
                                   parser_cls=LegacyPythonCallbacksParser)

    # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The
    # LegacyBuildGraph will explicitly request the products it needs.
    tasks = (
      create_legacy_graph_tasks() +
      create_fs_tasks() +
      create_graph_tasks(address_mapper, symbol_table_cls)
    )

    scheduler = LocalScheduler(dict(), tasks, project_tree)
    engine = LocalSerialEngine(scheduler, Storage.create(debug=False))

    return LegacyGraphHelper(scheduler, engine, symbol_table_cls, LegacyBuildGraph)
コード例 #9
0
ファイル: engine_initializer.py プロジェクト: lenucksi/pants
    def setup_legacy_graph(pants_ignore_patterns,
                           workdir,
                           build_root=None,
                           native=None,
                           symbol_table_cls=None,
                           build_ignore_patterns=None,
                           exclude_target_regexps=None,
                           subproject_roots=None):
        """Construct and return the components necessary for LegacyBuildGraph construction.

    :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree,
                                       usually taken from the '--pants-ignore' global option.
    :param str workdir: The pants workdir.
    :param str build_root: A path to be used as the build root. If None, then default is used.
    :param Native native: An instance of the native-engine subsystem.
    :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or
                                         None to use the default.
    :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD
                                       files, usually taken from the '--build-ignore' global option.
    :param list exclude_target_regexps: A list of regular expressions for excluding targets.
    :param list subproject_roots: Paths that correspond with embedded build roots
                                  under the current build root.
    :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls).
    """

        build_root = build_root or get_buildroot()
        scm = get_scm()
        symbol_table_cls = symbol_table_cls or LegacySymbolTable

        project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns)

        # Register "literal" subjects required for these tasks.
        # TODO: Replace with `Subsystems`.
        address_mapper = AddressMapper(
            symbol_table_cls=symbol_table_cls,
            parser_cls=LegacyPythonCallbacksParser,
            build_ignore_patterns=build_ignore_patterns,
            exclude_target_regexps=exclude_target_regexps,
            subproject_roots=subproject_roots)

        # Load the native backend.
        native = native or Native.Factory.global_instance().create()

        # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The
        # LegacyBuildGraph will explicitly request the products it needs.
        tasks = (create_legacy_graph_tasks(symbol_table_cls) +
                 create_fs_rules() +
                 create_graph_rules(address_mapper, symbol_table_cls))

        # TODO: Do not use the cache yet, as it incurs a high overhead.
        scheduler = LocalScheduler(workdir, dict(), tasks, project_tree,
                                   native)
        engine = LocalSerialEngine(scheduler, use_cache=False)
        change_calculator = EngineChangeCalculator(
            scheduler, engine, symbol_table_cls, scm) if scm else None

        return LegacyGraphHelper(scheduler, engine, symbol_table_cls,
                                 change_calculator)
コード例 #10
0
 def _populate(self, scheduler, address):
     """Perform an ExecutionRequest to parse the given Address into a Struct."""
     # NB: requesting any of the possible types that come from parsing a BUILD file
     request = scheduler.execution_request([TestTable.constraint()],
                                           [address])
     LocalSerialEngine(scheduler).reduce(request)
     root_entries = scheduler.root_entries(request).items()
     self.assertEquals(1, len(root_entries))
     return root_entries[0]
コード例 #11
0
def visualize_build_request(build_root, goals, subjects):
  scheduler, storage = setup_json_scheduler(build_root)
  execution_request = scheduler.build_request(goals, subjects)
  # NB: Calls `reduce` independently of `execute`, in order to render a graph before validating it.
  engine = LocalSerialEngine(scheduler, storage)
  engine.start()
  try:
    engine.reduce(execution_request)
    visualize_execution_graph(scheduler, storage, execution_request)
  finally:
    engine.close()
コード例 #12
0
ファイル: test_mapper.py プロジェクト: kageiit/pants
  def resolve(self, spec):
    request = self.scheduler.execution_request([UnhydratedStruct], [spec])
    result = LocalSerialEngine(self.scheduler, self.storage).execute(request)
    if result.error:
      raise result.error

    # Expect a single root.
    state, = result.root_products.values()
    if type(state) is Throw:
      raise state.exc
    return state.value
コード例 #13
0
  def resolve(self, spec):
    select = SelectDependencies(UnhydratedStruct, Addresses, field_types=(Address,))
    request = self.scheduler.selection_request([(select, spec)])
    result = LocalSerialEngine(self.scheduler).execute(request)
    if result.error:
      raise result.error

    # Expect a single root.
    state, = result.root_products.values()
    if type(state) is Throw:
      raise Exception(state.exc)
    return state.value
コード例 #14
0
  def test_gather_snapshot_of_pathglobs(self):
    project_tree = self.mk_example_fs_tree()
    scheduler = self.mk_scheduler(project_tree=project_tree, tasks=create_snapshot_tasks(project_tree))
    snapshot_archive_root = os.path.join(project_tree.build_root, '.snapshots')

    request = scheduler.execution_request([Snapshot],
                                          [PathGlobs.create('', globs=['fs_test/a/b/*'])])
    LocalSerialEngine(scheduler).reduce(request)

    root_entries = scheduler.root_entries(request).items()
    self.assertEquals(1, len(root_entries))
    state = self.assertFirstEntryIsReturn(root_entries, scheduler)
    snapshot = state.value
    self.assert_archive_files(['fs_test/a/b/1.txt', 'fs_test/a/b/2'], snapshot,
                              snapshot_archive_root)
コード例 #15
0
  def test_gather_snapshot_of_pathglobs(self):
    project_tree = self.mk_example_fs_tree()
    scheduler = self.mk_scheduler(project_tree=project_tree)
    empty_step_context = StepContext(node_builder=None, project_tree=project_tree, node_states=[], inline_nodes=False)

    request = scheduler.execution_request([Snapshot],
                                          [PathGlobs.create('', globs=['fs_test/a/b/*'])])
    LocalSerialEngine(scheduler).reduce(request)

    root_entries = scheduler.root_entries(request).items()
    self.assertEquals(1, len(root_entries))
    state = self.assertFirstEntryIsReturn(root_entries, scheduler)
    snapshot = state.value
    self.assert_archive_files(['fs_test/a/b/1.txt', 'fs_test/a/b/2'], snapshot,
                              empty_step_context)
コード例 #16
0
ファイル: engine_initializer.py プロジェクト: awiss/pants
    def setup_legacy_graph(pants_ignore_patterns,
                           symbol_table_cls=None,
                           build_ignore_patterns=None,
                           exclude_target_regexps=None):
        """Construct and return the components necessary for LegacyBuildGraph construction.

    :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree,
                                       usually taken from the '--pants-ignore' global option.
    :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or
                                         None to use the default.
    :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD
                                       files, usually taken from the '--build-ignore' global option.
    :param list exclude_target_regexps: A list of regular expressions for excluding targets.
    :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls).
    """

        build_root = get_buildroot()
        scm = get_scm()
        project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns)
        symbol_table_cls = symbol_table_cls or LegacySymbolTable

        # Register "literal" subjects required for these tasks.
        # TODO: Replace with `Subsystems`.
        address_mapper = AddressMapper(
            symbol_table_cls=symbol_table_cls,
            parser_cls=LegacyPythonCallbacksParser,
            build_ignore_patterns=build_ignore_patterns,
            exclude_target_regexps=exclude_target_regexps)

        # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The
        # LegacyBuildGraph will explicitly request the products it needs.
        tasks = (create_legacy_graph_tasks(symbol_table_cls) +
                 create_fs_tasks() +
                 create_graph_tasks(address_mapper, symbol_table_cls))

        scheduler = LocalScheduler(dict(), tasks, project_tree)
        # TODO: Do not use the cache yet, as it incurs a high overhead.
        engine = LocalSerialEngine(scheduler,
                                   Storage.create(),
                                   use_cache=False)
        change_calculator = EngineChangeCalculator(engine,
                                                   scm) if scm else None

        return LegacyGraphHelper(scheduler, engine, symbol_table_cls,
                                 change_calculator)
コード例 #17
0
  def test_failed_command_propagates_throw(self):
    scheduler = self.mk_scheduler_in_example_fs([
      # subject to files / product of subject to files for snapshot.
      SnapshottedProcess.create(product_type=Concatted,
                                binary_type=ShellFailCommand,
                                input_selectors=tuple(),
                                input_conversion=empty_process_request,
                                output_conversion=fail_process_result),
      [ShellFailCommand, [], ShellFailCommand]
    ])

    request = scheduler.execution_request([Concatted],
                                          [PathGlobs.create('', include=['fs_test/a/b/*'])])
    LocalSerialEngine(scheduler).reduce(request)

    root_entries = scheduler.root_entries(request).items()
    self.assertEquals(1, len(root_entries))
    self.assertFirstEntryIsThrow(root_entries,
                                 in_msg='Running ShellFailCommand failed with non-zero exit code: 1')
コード例 #18
0
  def test_failed_output_conversion_propagates_throw(self):
    scheduler = self.mk_scheduler_in_example_fs([
      # subject to files / product of subject to files for snapshot.
      SnapshottedProcess.create(product_type=Concatted,
                                binary_type=ShellCatToOutFile,
                                input_selectors=(Select(Snapshot),),
                                input_conversion=file_list_to_args_for_cat_with_snapshot_subjects_and_output_file,
                                output_conversion=fail_process_result),
      [ShellCatToOutFile, [], ShellCatToOutFile]
    ])

    request = scheduler.execution_request([Concatted],
                                          [PathGlobs.create('', include=['fs_test/a/b/*'])])
    LocalSerialEngine(scheduler).reduce(request)

    root_entries = scheduler.root_entries(request).items()
    self.assertEquals(1, len(root_entries))
    self.assertFirstEntryIsThrow(root_entries,
                                 in_msg='Failed in output conversion!')
コード例 #19
0
ファイル: test_scheduler.py プロジェクト: pombredanne/pants
  def setUp(self):
    build_root = os.path.join(os.path.dirname(__file__), 'examples', 'scheduler_inputs')
    self.spec_parser = CmdLineSpecParser(build_root)
    self.scheduler = setup_json_scheduler(build_root, self._native)
    self.engine = LocalSerialEngine(self.scheduler)

    self.guava = Address.parse('3rdparty/jvm:guava')
    self.thrift = Address.parse('src/thrift/codegen/simple')
    self.java = Address.parse('src/java/codegen/simple')
    self.java_simple = Address.parse('src/java/simple')
    self.java_multi = Address.parse('src/java/multiple_classpath_entries')
    self.no_variant_thrift = Address.parse('src/java/codegen/selector:conflict')
    self.unconfigured_thrift = Address.parse('src/thrift/codegen/unconfigured')
    self.resources = Address.parse('src/resources/simple')
    self.consumes_resources = Address.parse('src/java/consumes_resources')
    self.consumes_managed_thirdparty = Address.parse('src/java/managed_thirdparty')
    self.managed_guava = Address.parse('3rdparty/jvm/managed:guava')
    self.managed_hadoop = Address.parse('3rdparty/jvm/managed:hadoop-common')
    self.managed_resolve_latest = Address.parse('3rdparty/jvm/managed:latest-hadoop')
    self.inferred_deps = Address.parse('src/scala/inferred_deps')
コード例 #20
0
  def test_integration_concat_with_snapshot_subjects_test(self):
    scheduler = self.mk_scheduler_in_example_fs([
      # subject to files / product of subject to files for snapshot.
      SnapshottedProcess.create(product_type=Concatted,
                                binary_type=ShellCatToOutFile,
                                input_selectors=(Select(Snapshot),),
                                input_conversion=file_list_to_args_for_cat_with_snapshot_subjects_and_output_file,
                                output_conversion=process_result_to_concatted_from_outfile),
      [ShellCatToOutFile, [], ShellCatToOutFile],
    ])

    request = scheduler.execution_request([Concatted],
                                          [PathGlobs.create('', include=['fs_test/a/b/*'])])
    LocalSerialEngine(scheduler).reduce(request)

    root_entries = scheduler.root_entries(request).items()
    self.assertEquals(1, len(root_entries))
    state = self.assertFirstEntryIsReturn(root_entries, scheduler)
    concatted = state.value

    self.assertEqual(Concatted('one\ntwo\n'), concatted)
コード例 #21
0
ファイル: test_scheduler.py プロジェクト: weian/pants
    def setUp(self):
        build_root = os.path.join(os.path.dirname(__file__), "examples", "scheduler_inputs")
        self.spec_parser = CmdLineSpecParser(build_root)
        self.scheduler, self.storage = setup_json_scheduler(build_root)
        self.pg = self.scheduler.product_graph
        self.engine = LocalSerialEngine(self.scheduler, self.storage)

        self.guava = Address.parse("3rdparty/jvm:guava")
        self.thrift = Address.parse("src/thrift/codegen/simple")
        self.java = Address.parse("src/java/codegen/simple")
        self.java_simple = Address.parse("src/java/simple")
        self.java_multi = Address.parse("src/java/multiple_classpath_entries")
        self.no_variant_thrift = Address.parse("src/java/codegen/selector:conflict")
        self.unconfigured_thrift = Address.parse("src/thrift/codegen/unconfigured")
        self.resources = Address.parse("src/resources/simple")
        self.consumes_resources = Address.parse("src/java/consumes_resources")
        self.consumes_managed_thirdparty = Address.parse("src/java/managed_thirdparty")
        self.managed_guava = Address.parse("3rdparty/jvm/managed:guava")
        self.managed_hadoop = Address.parse("3rdparty/jvm/managed:hadoop-common")
        self.managed_resolve_latest = Address.parse("3rdparty/jvm/managed:latest-hadoop")
        self.inferred_deps = Address.parse("src/scala/inferred_deps")
コード例 #22
0
  def test_javac_compilation_example(self):
    sources = PathGlobs.create('', include=['scheduler_inputs/src/java/simple/Simple.java'])

    scheduler = self.mk_scheduler_in_example_fs([
      SnapshottedProcess.create(ClasspathEntry,
                                Javac,
                                (Select(Snapshot), SelectLiteral(JavaOutputDir('build'), JavaOutputDir)),
                                java_sources_to_javac_args,
                                process_result_to_classpath_entry),
      [Javac, [], Javac]
    ])

    request = scheduler.execution_request(
      [ClasspathEntry],
      [sources])
    LocalSerialEngine(scheduler).reduce(request)

    root_entries = scheduler.root_entries(request).items()
    self.assertEquals(1, len(root_entries))
    state = self.assertFirstEntryIsReturn(root_entries, scheduler)
    classpath_entry = state.value
    self.assertIsInstance(classpath_entry, ClasspathEntry)
    self.assertTrue(os.path.exists(os.path.join(classpath_entry.path, 'simple', 'Simple.class')))
コード例 #23
0
ファイル: test_engine.py プロジェクト: pankajroark/pants
 def create_engine(self, root_subject_types, rules, include_trace_on_error):
     engine = LocalSerialEngine(
         self.scheduler(root_subject_types, rules),
         include_trace_on_error=include_trace_on_error)
     return engine
コード例 #24
0
ファイル: test_scheduler.py プロジェクト: pombredanne/pants
class SchedulerTest(unittest.TestCase):

  _native = init_native()

  def setUp(self):
    build_root = os.path.join(os.path.dirname(__file__), 'examples', 'scheduler_inputs')
    self.spec_parser = CmdLineSpecParser(build_root)
    self.scheduler = setup_json_scheduler(build_root, self._native)
    self.engine = LocalSerialEngine(self.scheduler)

    self.guava = Address.parse('3rdparty/jvm:guava')
    self.thrift = Address.parse('src/thrift/codegen/simple')
    self.java = Address.parse('src/java/codegen/simple')
    self.java_simple = Address.parse('src/java/simple')
    self.java_multi = Address.parse('src/java/multiple_classpath_entries')
    self.no_variant_thrift = Address.parse('src/java/codegen/selector:conflict')
    self.unconfigured_thrift = Address.parse('src/thrift/codegen/unconfigured')
    self.resources = Address.parse('src/resources/simple')
    self.consumes_resources = Address.parse('src/java/consumes_resources')
    self.consumes_managed_thirdparty = Address.parse('src/java/managed_thirdparty')
    self.managed_guava = Address.parse('3rdparty/jvm/managed:guava')
    self.managed_hadoop = Address.parse('3rdparty/jvm/managed:hadoop-common')
    self.managed_resolve_latest = Address.parse('3rdparty/jvm/managed:latest-hadoop')
    self.inferred_deps = Address.parse('src/scala/inferred_deps')

  def assert_select_for_subjects(self, walk, selector, subjects, variants=None):
    raise ValueError(walk)

  def build(self, build_request):
    """Execute the given request and return roots as a list of ((subject, product), value) tuples."""
    result = self.engine.execute(build_request)
    self.assertIsNone(result.error)
    return self.scheduler.root_entries(build_request).items()

  def request(self, goals, *subjects):
    return self.scheduler.build_request(goals=goals, subjects=subjects)

  def assert_root(self, root, subject, return_value):
    """Asserts that the given root has the given result."""
    self.assertEquals(subject, root[0][0])
    self.assertEquals(Return(return_value), root[1])

  def assert_root_failed(self, root, subject, msg_str):
    """Asserts that the root was a Throw result containing the given msg string."""
    self.assertEquals(subject, root[0][0])
    self.assertEquals(Throw, type(root[1]))
    self.assertIn(msg_str, str(root[1].exc))

  def test_compile_only_3rdparty(self):
    build_request = self.request(['compile'], self.guava)
    root, = self.build(build_request)
    self.assert_root(root, self.guava, Classpath(creator='ivy_resolve'))

  @unittest.skip('Skipped to expedite landing #3821; see: #4027.')
  def test_compile_only_3rdparty_internal(self):
    build_request = self.request(['compile'], '3rdparty/jvm:guava')
    root, = self.build(build_request)

    # Expect a SelectNode for each of the Jar/Classpath.
    self.assert_select_for_subjects(walk, Select(Jar), [self.guava])
    self.assert_select_for_subjects(walk, Select(Classpath), [self.guava])

  @unittest.skip('Skipped to expedite landing #3821; see: #4020.')
  def test_gen(self):
    build_request = self.request(['gen'], self.thrift)
    root, = self.build(build_request)

    # Root: expect the synthetic GenGoal product.
    self.assert_root(root, self.thrift, GenGoal("non-empty input to satisfy the Goal constructor"))

    variants = {'thrift': 'apache_java'}
    # Expect ThriftSources to have been selected.
    self.assert_select_for_subjects(walk, Select(ThriftSources), [self.thrift], variants=variants)
    # Expect an ApacheThriftJavaConfiguration to have been used via the default Variants.
    self.assert_select_for_subjects(walk, SelectVariant(ApacheThriftJavaConfiguration,
                                                        variant_key='thrift'),
                                    [self.thrift],
                                    variants=variants)

  @unittest.skip('Skipped to expedite landing #3821; see: #4020.')
  def test_codegen_simple(self):
    build_request = self.request(['compile'], self.java)
    root, = self.build(build_request)

    # The subgraph below 'src/thrift/codegen/simple' will be affected by its default variants.
    subjects = [self.guava, self.java, self.thrift]
    variant_subjects = [
        Jar(org='org.apache.thrift', name='libthrift', rev='0.9.2', type_alias='jar'),
        Jar(org='commons-lang', name='commons-lang', rev='2.5', type_alias='jar'),
        Address.parse('src/thrift:slf4j-api')]

    # Root: expect a DependenciesNode depending on a SelectNode with compilation via javac.
    self.assert_root(root, self.java, Classpath(creator='javac'))

    # Confirm that exactly the expected subjects got Classpaths.
    self.assert_select_for_subjects(walk, Select(Classpath), subjects)
    self.assert_select_for_subjects(walk, Select(Classpath), variant_subjects,
                                    variants={'thrift': 'apache_java'})

  def test_consumes_resources(self):
    build_request = self.request(['compile'], self.consumes_resources)
    root, = self.build(build_request)
    self.assert_root(root, self.consumes_resources, Classpath(creator='javac'))

  @unittest.skip('Skipped to expedite landing #3821; see: #4027.')
  def test_consumes_resources_internal(self):
    build_request = self.request(['compile'], self.consumes_resources)
    root, = self.build(build_request)

    # Confirm a classpath for the resources target and other subjects. We know that they are
    # reachable from the root (since it was involved in this walk).
    subjects = [self.resources,
                self.consumes_resources,
                self.guava]
    self.assert_select_for_subjects(walk, Select(Classpath), subjects)

  @unittest.skip('Skipped to expedite landing #3821; see: #4020.')
  def test_managed_resolve(self):
    """A managed resolve should consume a ManagedResolve and ManagedJars to produce Jars."""
    build_request = self.request(['compile'], self.consumes_managed_thirdparty)
    root, = self.build(build_request)

    # Validate the root.
    self.assert_root(root, self.consumes_managed_thirdparty, Classpath(creator='javac'))

    # Confirm that we produced classpaths for the managed jars.
    managed_jars = [self.managed_guava, self.managed_hadoop]
    self.assert_select_for_subjects(walk, Select(Classpath), [self.consumes_managed_thirdparty])
    self.assert_select_for_subjects(walk, Select(Classpath), managed_jars,
                                    variants={'resolve': 'latest-hadoop'})

    # Confirm that the produced jars had the appropriate versions.
    self.assertEquals({Jar('org.apache.hadoop', 'hadoop-common', '2.7.0'),
                       Jar('com.google.guava', 'guava', '18.0')},
                      {ret.value for node, ret in walk
                       if node.product == Jar})

  def test_dependency_inference(self):
    """Scala dependency inference introduces dependencies that do not exist in BUILD files."""
    build_request = self.request(['compile'], self.inferred_deps)
    root, = self.build(build_request)
    self.assert_root(root, self.inferred_deps, Classpath(creator='scalac'))

  @unittest.skip('Skipped to expedite landing #3821; see: #4027.')
  def test_dependency_inference_internal(self):
    """Scala dependency inference introduces dependencies that do not exist in BUILD files."""
    build_request = self.request(['compile'], self.inferred_deps)
    root, = self.build(build_request)

    # Confirm that we requested a classpath for the root and inferred targets.
    self.assert_select_for_subjects(walk, Select(Classpath), [self.inferred_deps, self.java_simple])

  def test_multiple_classpath_entries(self):
    """Multiple Classpath products for a single subject currently cause a failure."""
    build_request = self.request(['compile'], self.java_multi)
    root, = self.build(build_request)

    # Validate that the root failed.
    self.assert_root_failed(root, self.java_multi, "Conflicting values produced for")

  def test_descendant_specs(self):
    """Test that Addresses are produced via recursive globs of the 3rdparty/jvm directory."""
    spec = self.spec_parser.parse_spec('3rdparty/jvm::')
    selector = Select(BuildFileAddresses)
    build_request = self.scheduler.selection_request([(selector, spec)])
    ((subject, _), root), = self.build(build_request)

    # Validate the root.
    self.assertEqual(spec, subject)
    self.assertEqual(BuildFileAddresses, type(root.value))

    # Confirm that a few expected addresses are in the list.
    self.assertIn(self.guava, root.value.dependencies)
    self.assertIn(self.managed_guava, root.value.dependencies)
    self.assertIn(self.managed_resolve_latest, root.value.dependencies)

  def test_sibling_specs(self):
    """Test that sibling Addresses are parsed in the 3rdparty/jvm directory."""
    spec = self.spec_parser.parse_spec('3rdparty/jvm:')
    selector = Select(BuildFileAddresses)
    build_request = self.scheduler.selection_request([(selector,spec)])
    ((subject, _), root), = self.build(build_request)

    # Validate the root.
    self.assertEqual(spec, subject)
    self.assertEqual(BuildFileAddresses, type(root.value))

    # Confirm that an expected address is in the list.
    self.assertIn(self.guava, root.value.dependencies)
    # And that a subdirectory address is not.
    self.assertNotIn(self.managed_guava, root.value.dependencies)

  def test_scheduler_visualize(self):
    spec = self.spec_parser.parse_spec('3rdparty/jvm:')
    build_request = self.request(['list'], spec)
    self.build(build_request)

    with temporary_dir() as td:
      output_path = os.path.join(td, 'output.dot')
      self.scheduler.visualize_graph_to_file(output_path)
      with open(output_path, 'rb') as fh:
        graphviz_output = fh.read().strip()

    self.assertIn('digraph', graphviz_output)
    self.assertIn(' -> ', graphviz_output)
コード例 #25
0
 def serial_engine(self):
     with closing(LocalSerialEngine(self.scheduler)) as e:
         yield e
コード例 #26
0
 def test_serial_engine_simple(self):
     with closing(LocalSerialEngine(self.scheduler)) as engine:
         self.assert_engine(engine)
コード例 #27
0
ファイル: test_engine.py プロジェクト: kageiit/pants
 def test_serial_engine_simple(self):
     engine = LocalSerialEngine(self.scheduler, self.storage, self.cache)
     self.assert_engine(engine)
コード例 #28
0
 def serial_engine(self):
     yield LocalSerialEngine(self.scheduler)
コード例 #29
0
class SchedulerTest(unittest.TestCase):

    _native = init_native()

    def setUp(self):
        build_root = os.path.join(os.path.dirname(__file__), 'examples',
                                  'scheduler_inputs')
        self.spec_parser = CmdLineSpecParser(build_root)
        self.scheduler = setup_json_scheduler(build_root, self._native)
        self.engine = LocalSerialEngine(self.scheduler)

        self.guava = Address.parse('3rdparty/jvm:guava')
        self.thrift = Address.parse('src/thrift/codegen/simple')
        self.java = Address.parse('src/java/codegen/simple')
        self.java_simple = Address.parse('src/java/simple')
        self.java_multi = Address.parse('src/java/multiple_classpath_entries')
        self.no_variant_thrift = Address.parse(
            'src/java/codegen/selector:conflict')
        self.unconfigured_thrift = Address.parse(
            'src/thrift/codegen/unconfigured')
        self.resources = Address.parse('src/resources/simple')
        self.consumes_resources = Address.parse('src/java/consumes_resources')
        self.consumes_managed_thirdparty = Address.parse(
            'src/java/managed_thirdparty')
        self.managed_guava = Address.parse('3rdparty/jvm/managed:guava')
        self.managed_hadoop = Address.parse(
            '3rdparty/jvm/managed:hadoop-common')
        self.managed_resolve_latest = Address.parse(
            '3rdparty/jvm/managed:latest-hadoop')
        self.inferred_deps = Address.parse('src/scala/inferred_deps')

    def assert_select_for_subjects(self,
                                   walk,
                                   selector,
                                   subjects,
                                   variants=None):
        raise ValueError(walk)

    def build(self, build_request):
        """Execute the given request and return roots as a list of ((subject, product), value) tuples."""
        result = self.engine.execute(build_request)
        self.assertIsNone(result.error)
        return self.scheduler.root_entries(build_request).items()

    def request(self, goals, *subjects):
        return self.scheduler.build_request(goals=goals, subjects=subjects)

    def assert_root(self, root, subject, return_value):
        """Asserts that the given root has the given result."""
        self.assertEquals(subject, root[0][0])
        self.assertEquals(Return(return_value), root[1])

    def assert_root_failed(self, root, subject, msg_str):
        """Asserts that the root was a Throw result containing the given msg string."""
        self.assertEquals(subject, root[0][0])
        self.assertEquals(Throw, type(root[1]))
        self.assertIn(msg_str, str(root[1].exc))

    def test_compile_only_3rdparty(self):
        build_request = self.request(['compile'], self.guava)
        root, = self.build(build_request)
        self.assert_root(root, self.guava, Classpath(creator='ivy_resolve'))

    @unittest.skip('Skipped to expedite landing #3821; see: #4027.')
    def test_compile_only_3rdparty_internal(self):
        build_request = self.request(['compile'], '3rdparty/jvm:guava')
        root, = self.build(build_request)

        # Expect a SelectNode for each of the Jar/Classpath.
        self.assert_select_for_subjects(walk, Select(Jar), [self.guava])
        self.assert_select_for_subjects(walk, Select(Classpath), [self.guava])

    @unittest.skip('Skipped to expedite landing #3821; see: #4020.')
    def test_gen(self):
        build_request = self.request(['gen'], self.thrift)
        root, = self.build(build_request)

        # Root: expect the synthetic GenGoal product.
        self.assert_root(
            root, self.thrift,
            GenGoal("non-empty input to satisfy the Goal constructor"))

        variants = {'thrift': 'apache_java'}
        # Expect ThriftSources to have been selected.
        self.assert_select_for_subjects(walk,
                                        Select(ThriftSources), [self.thrift],
                                        variants=variants)
        # Expect an ApacheThriftJavaConfiguration to have been used via the default Variants.
        self.assert_select_for_subjects(walk,
                                        SelectVariant(
                                            ApacheThriftJavaConfiguration,
                                            variant_key='thrift'),
                                        [self.thrift],
                                        variants=variants)

    @unittest.skip('Skipped to expedite landing #3821; see: #4020.')
    def test_codegen_simple(self):
        build_request = self.request(['compile'], self.java)
        root, = self.build(build_request)

        # The subgraph below 'src/thrift/codegen/simple' will be affected by its default variants.
        subjects = [self.guava, self.java, self.thrift]
        variant_subjects = [
            Jar(org='org.apache.thrift',
                name='libthrift',
                rev='0.9.2',
                type_alias='jar'),
            Jar(org='commons-lang',
                name='commons-lang',
                rev='2.5',
                type_alias='jar'),
            Address.parse('src/thrift:slf4j-api')
        ]

        # Root: expect a DependenciesNode depending on a SelectNode with compilation via javac.
        self.assert_root(root, self.java, Classpath(creator='javac'))

        # Confirm that exactly the expected subjects got Classpaths.
        self.assert_select_for_subjects(walk, Select(Classpath), subjects)
        self.assert_select_for_subjects(walk,
                                        Select(Classpath),
                                        variant_subjects,
                                        variants={'thrift': 'apache_java'})

    def test_consumes_resources(self):
        build_request = self.request(['compile'], self.consumes_resources)
        root, = self.build(build_request)
        self.assert_root(root, self.consumes_resources,
                         Classpath(creator='javac'))

    @unittest.skip('Skipped to expedite landing #3821; see: #4027.')
    def test_consumes_resources_internal(self):
        build_request = self.request(['compile'], self.consumes_resources)
        root, = self.build(build_request)

        # Confirm a classpath for the resources target and other subjects. We know that they are
        # reachable from the root (since it was involved in this walk).
        subjects = [self.resources, self.consumes_resources, self.guava]
        self.assert_select_for_subjects(walk, Select(Classpath), subjects)

    @unittest.skip('Skipped to expedite landing #3821; see: #4020.')
    def test_managed_resolve(self):
        """A managed resolve should consume a ManagedResolve and ManagedJars to produce Jars."""
        build_request = self.request(['compile'],
                                     self.consumes_managed_thirdparty)
        root, = self.build(build_request)

        # Validate the root.
        self.assert_root(root, self.consumes_managed_thirdparty,
                         Classpath(creator='javac'))

        # Confirm that we produced classpaths for the managed jars.
        managed_jars = [self.managed_guava, self.managed_hadoop]
        self.assert_select_for_subjects(walk, Select(Classpath),
                                        [self.consumes_managed_thirdparty])
        self.assert_select_for_subjects(walk,
                                        Select(Classpath),
                                        managed_jars,
                                        variants={'resolve': 'latest-hadoop'})

        # Confirm that the produced jars had the appropriate versions.
        self.assertEquals(
            {
                Jar('org.apache.hadoop', 'hadoop-common', '2.7.0'),
                Jar('com.google.guava', 'guava', '18.0')
            }, {ret.value
                for node, ret in walk if node.product == Jar})

    def test_dependency_inference(self):
        """Scala dependency inference introduces dependencies that do not exist in BUILD files."""
        build_request = self.request(['compile'], self.inferred_deps)
        root, = self.build(build_request)
        self.assert_root(root, self.inferred_deps, Classpath(creator='scalac'))

    @unittest.skip('Skipped to expedite landing #3821; see: #4027.')
    def test_dependency_inference_internal(self):
        """Scala dependency inference introduces dependencies that do not exist in BUILD files."""
        build_request = self.request(['compile'], self.inferred_deps)
        root, = self.build(build_request)

        # Confirm that we requested a classpath for the root and inferred targets.
        self.assert_select_for_subjects(walk, Select(Classpath),
                                        [self.inferred_deps, self.java_simple])

    def test_multiple_classpath_entries(self):
        """Multiple Classpath products for a single subject currently cause a failure."""
        build_request = self.request(['compile'], self.java_multi)
        root, = self.build(build_request)

        # Validate that the root failed.
        self.assert_root_failed(root, self.java_multi,
                                "Conflicting values produced for")

    def test_descendant_specs(self):
        """Test that Addresses are produced via recursive globs of the 3rdparty/jvm directory."""
        spec = self.spec_parser.parse_spec('3rdparty/jvm::')
        selector = SelectDependencies(Address,
                                      Addresses,
                                      field_types=(Address, ))
        build_request = self.scheduler.selection_request([(selector, spec)])
        ((subject, _), root), = self.build(build_request)

        # Validate the root.
        self.assertEqual(spec, subject)
        self.assertEqual(tuple, type(root.value))

        # Confirm that a few expected addresses are in the list.
        self.assertIn(self.guava, root.value)
        self.assertIn(self.managed_guava, root.value)
        self.assertIn(self.managed_resolve_latest, root.value)

    def test_sibling_specs(self):
        """Test that sibling Addresses are parsed in the 3rdparty/jvm directory."""
        spec = self.spec_parser.parse_spec('3rdparty/jvm:')
        selector = SelectDependencies(Address,
                                      Addresses,
                                      field_types=(Address, ))
        build_request = self.scheduler.selection_request([(selector, spec)])
        ((subject, _), root), = self.build(build_request)

        # Validate the root.
        self.assertEqual(spec, subject)
        self.assertEqual(tuple, type(root.value))

        # Confirm that an expected address is in the list.
        self.assertIn(self.guava, root.value)
        # And that a subdirectory address is not.
        self.assertNotIn(self.managed_guava, root.value)

    def test_scheduler_visualize(self):
        spec = self.spec_parser.parse_spec('3rdparty/jvm:')
        build_request = self.request(['list'], spec)
        self.build(build_request)

        with temporary_dir() as td:
            output_path = os.path.join(td, 'output.dot')
            self.scheduler.visualize_graph_to_file(output_path)
            with open(output_path, 'rb') as fh:
                graphviz_output = fh.read().strip()

        self.assertIn('digraph', graphviz_output)
        self.assertIn(' -> ', graphviz_output)
コード例 #30
0
class SchedulerTest(unittest.TestCase):
    def setUp(self):
        build_root = os.path.join(os.path.dirname(__file__), 'examples',
                                  'scheduler_inputs')
        self.spec_parser = CmdLineSpecParser(build_root)
        self.scheduler = setup_json_scheduler(build_root, inline_nodes=False)
        self.pg = self.scheduler.product_graph
        self.engine = LocalSerialEngine(self.scheduler)

        self.guava = Address.parse('3rdparty/jvm:guava')
        self.thrift = Address.parse('src/thrift/codegen/simple')
        self.java = Address.parse('src/java/codegen/simple')
        self.java_simple = Address.parse('src/java/simple')
        self.java_multi = Address.parse('src/java/multiple_classpath_entries')
        self.no_variant_thrift = Address.parse(
            'src/java/codegen/selector:conflict')
        self.unconfigured_thrift = Address.parse(
            'src/thrift/codegen/unconfigured')
        self.resources = Address.parse('src/resources/simple')
        self.consumes_resources = Address.parse('src/java/consumes_resources')
        self.consumes_managed_thirdparty = Address.parse(
            'src/java/managed_thirdparty')
        self.managed_guava = Address.parse('3rdparty/jvm/managed:guava')
        self.managed_hadoop = Address.parse(
            '3rdparty/jvm/managed:hadoop-common')
        self.managed_resolve_latest = Address.parse(
            '3rdparty/jvm/managed:latest-hadoop')
        self.inferred_deps = Address.parse('src/scala/inferred_deps')

    def assert_select_for_subjects(self,
                                   walk,
                                   selector,
                                   subjects,
                                   variants=None):
        node_type = SelectNode

        variants = tuple(variants.items()) if variants else None
        self.assertEqual(
            {node_type(subject, variants, selector)
             for subject in subjects}, {
                 node
                 for node, _ in walk if node.product == selector.product
                 and isinstance(node, node_type) and node.variants == variants
             })

    def build_and_walk(self, build_request):
        """Build and then walk the given build_request, returning the walked graph as a list."""
        result = self.engine.execute(build_request)
        self.assertIsNone(result.error)
        return list(self.scheduler.product_graph.walk(build_request.roots))

    def request(self, goals, *addresses):
        return self.request_specs(
            goals, *[self.spec_parser.parse_spec(str(a)) for a in addresses])

    def request_specs(self, goals, *specs):
        return self.scheduler.build_request(goals=goals, subjects=specs)

    def assert_resolve_only(self, goals, root_specs, jars):
        build_request = self.request(goals, *root_specs)
        walk = self.build_and_walk(build_request)

        # Expect a SelectNode for each of the Jar/Classpath.
        self.assert_select_for_subjects(walk, Select(Jar), jars)
        self.assert_select_for_subjects(walk, Select(Classpath), jars)

    def assert_root(self, walk, node, return_value):
        """Asserts that the first Node in a walk was a DependenciesNode with the single given result."""
        root, root_state = walk[0]
        self.assertEquals(type(root), DependenciesNode)
        self.assertEquals(Return([return_value]), root_state)
        self.assertIn(
            (node, Return(return_value)),
            [(d, self.pg.state(d)) for d in self.pg.dependencies_of(root)])

    def assert_root_failed(self, walk, node, thrown_type):
        """Asserts that the first Node in a walk was a DependenciesNode with a Throw result."""
        root, root_state = walk[0]
        self.assertEquals(type(root), DependenciesNode)
        self.assertEquals(Throw, type(root_state))
        dependencies = [(d, self.pg.state(d))
                        for d in self.pg.dependencies_of(root)]
        self.assertIn(
            (node, thrown_type),
            [(k, type(v.exc)) for k, v in dependencies if type(v) is Throw])

    def test_type_error_on_unexpected_subject_type(self):
        with self.assertRaises(TypeError) as cm:
            self.scheduler.build_request(goals={}, subjects=['string'])
        self.assertEquals(
            "Unsupported root subject type: <type 'unicode'> for u'string'",
            str(cm.exception))

    def test_resolve(self):
        self.assert_resolve_only(goals=['resolve'],
                                 root_specs=['3rdparty/jvm:guava'],
                                 jars=[self.guava])

    def test_compile_only_3rdparty(self):
        self.assert_resolve_only(goals=['compile'],
                                 root_specs=['3rdparty/jvm:guava'],
                                 jars=[self.guava])

    def test_gen_noop(self):
        # TODO(John Sirois): Ask around - is this OK?
        # This is different than today.  There is a gen'able target reachable from the java target, but
        # the scheduler 'pull-seeding' has ApacheThriftPlanner stopping short since the subject it's
        # handed is not thrift.
        build_request = self.request(['gen'], self.java)
        walk = self.build_and_walk(build_request)

        self.assert_select_for_subjects(walk, Select(JavaSources,
                                                     optional=True),
                                        [self.java])

    def test_gen(self):
        build_request = self.request(['gen'], self.thrift)
        walk = self.build_and_walk(build_request)

        # Root: expect the synthetic GenGoal product.
        self.assert_root(
            walk, SelectNode(self.thrift, None, Select(GenGoal)),
            GenGoal("non-empty input to satisfy the Goal constructor"))

        variants = {'thrift': 'apache_java'}
        # Expect ThriftSources to have been selected.
        self.assert_select_for_subjects(walk,
                                        Select(ThriftSources), [self.thrift],
                                        variants=variants)
        # Expect an ApacheThriftJavaConfiguration to have been used via the default Variants.
        self.assert_select_for_subjects(walk,
                                        SelectVariant(
                                            ApacheThriftJavaConfiguration,
                                            variant_key='thrift'),
                                        [self.thrift],
                                        variants=variants)

    def test_codegen_simple(self):
        build_request = self.request(['compile'], self.java)
        walk = self.build_and_walk(build_request)

        # The subgraph below 'src/thrift/codegen/simple' will be affected by its default variants.
        subjects = [self.guava, self.java, self.thrift]
        variant_subjects = [
            Jar(org='org.apache.thrift',
                name='libthrift',
                rev='0.9.2',
                type_alias='jar'),
            Jar(org='commons-lang',
                name='commons-lang',
                rev='2.5',
                type_alias='jar'),
            Address.parse('src/thrift:slf4j-api')
        ]

        # Root: expect a DependenciesNode depending on a SelectNode with compilation via javac.
        self.assert_root(walk, SelectNode(self.java, None, Select(Classpath)),
                         Classpath(creator='javac'))

        # Confirm that exactly the expected subjects got Classpaths.
        self.assert_select_for_subjects(walk, Select(Classpath), subjects)
        self.assert_select_for_subjects(walk,
                                        Select(Classpath),
                                        variant_subjects,
                                        variants={'thrift': 'apache_java'})

    def test_consumes_resources(self):
        build_request = self.request(['compile'], self.consumes_resources)
        walk = self.build_and_walk(build_request)

        # Validate the root.
        self.assert_root(
            walk, SelectNode(self.consumes_resources, None, Select(Classpath)),
            Classpath(creator='javac'))

        # Confirm a classpath for the resources target and other subjects. We know that they are
        # reachable from the root (since it was involved in this walk).
        subjects = [self.resources, self.consumes_resources, self.guava]
        self.assert_select_for_subjects(walk, Select(Classpath), subjects)

    def test_managed_resolve(self):
        """A managed resolve should consume a ManagedResolve and ManagedJars to produce Jars."""
        build_request = self.request(['compile'],
                                     self.consumes_managed_thirdparty)
        walk = self.build_and_walk(build_request)

        # Validate the root.
        self.assert_root(
            walk,
            SelectNode(self.consumes_managed_thirdparty, None,
                       Select(Classpath)), Classpath(creator='javac'))

        # Confirm that we produced classpaths for the managed jars.
        managed_jars = [self.managed_guava, self.managed_hadoop]
        self.assert_select_for_subjects(walk, Select(Classpath),
                                        [self.consumes_managed_thirdparty])
        self.assert_select_for_subjects(walk,
                                        Select(Classpath),
                                        managed_jars,
                                        variants={'resolve': 'latest-hadoop'})

        # Confirm that the produced jars had the appropriate versions.
        self.assertEquals(
            {
                Jar('org.apache.hadoop', 'hadoop-common', '2.7.0'),
                Jar('com.google.guava', 'guava', '18.0')
            }, {
                ret.value
                for node, ret in walk
                if node.product == Jar and isinstance(node, SelectNode)
            })

    def test_dependency_inference(self):
        """Scala dependency inference introduces dependencies that do not exist in BUILD files."""
        build_request = self.request(['compile'], self.inferred_deps)
        walk = self.build_and_walk(build_request)

        # Validate the root.
        self.assert_root(
            walk, SelectNode(self.inferred_deps, None, Select(Classpath)),
            Classpath(creator='scalac'))

        # Confirm that we requested a classpath for the root and inferred targets.
        self.assert_select_for_subjects(walk, Select(Classpath),
                                        [self.inferred_deps, self.java_simple])

    def test_multiple_classpath_entries(self):
        """Multiple Classpath products for a single subject currently cause a failure."""
        build_request = self.request(['compile'], self.java_multi)
        walk = self.build_and_walk(build_request)

        # Validate that the root failed.
        self.assert_root_failed(
            walk, SelectNode(self.java_multi, None, Select(Classpath)),
            ConflictingProducersError)

    def test_descendant_specs(self):
        """Test that Addresses are produced via recursive globs of the 3rdparty/jvm directory."""
        spec = self.spec_parser.parse_spec('3rdparty/jvm::')
        build_request = self.request_specs(['list'], spec)
        walk = self.build_and_walk(build_request)

        # Validate the root.
        root, root_state = walk[0]
        root_value = root_state.value
        self.assertEqual(
            DependenciesNode(
                spec, None,
                SelectDependencies(Address, Addresses,
                                   field_types=(Address, ))), root)
        self.assertEqual(list, type(root_value))

        # Confirm that a few expected addresses are in the list.
        self.assertIn(self.guava, root_value)
        self.assertIn(self.managed_guava, root_value)
        self.assertIn(self.managed_resolve_latest, root_value)

    def test_sibling_specs(self):
        """Test that sibling Addresses are parsed in the 3rdparty/jvm directory."""
        spec = self.spec_parser.parse_spec('3rdparty/jvm:')
        build_request = self.request_specs(['list'], spec)
        walk = self.build_and_walk(build_request)

        # Validate the root.
        root, root_state = walk[0]
        root_value = root_state.value
        self.assertEqual(
            DependenciesNode(
                spec, None,
                SelectDependencies(Address, Addresses,
                                   field_types=(Address, ))), root)
        self.assertEqual(list, type(root_value))

        # Confirm that an expected address is in the list.
        self.assertIn(self.guava, root_value)
        # And that an subdirectory address is not.
        self.assertNotIn(self.managed_guava, root_value)

    def test_scheduler_visualize(self):
        spec = self.spec_parser.parse_spec('3rdparty/jvm:')
        build_request = self.request_specs(['list'], spec)
        self.build_and_walk(build_request)

        graphviz_output = '\n'.join(
            self.scheduler.product_graph.visualize(build_request.roots))

        with temporary_dir() as td:
            output_path = os.path.join(td, 'output.dot')
            self.scheduler.visualize_graph_to_file(build_request.roots,
                                                   output_path)
            with open(output_path, 'rb') as fh:
                graphviz_disk_output = fh.read().strip()

        self.assertEqual(graphviz_output, graphviz_disk_output)
        self.assertIn('digraph', graphviz_output)
        self.assertIn(' -> ', graphviz_output)
コード例 #31
0
class SchedulerTest(unittest.TestCase):
  def setUp(self):
    build_root = os.path.join(os.path.dirname(__file__), 'examples', 'scheduler_inputs')
    self.spec_parser = CmdLineSpecParser(build_root)
    self.scheduler = setup_json_scheduler(build_root, inline_nodes=False)
    self.pg = self.scheduler.product_graph
    self.engine = LocalSerialEngine(self.scheduler)

    self.guava = Address.parse('3rdparty/jvm:guava')
    self.thrift = Address.parse('src/thrift/codegen/simple')
    self.java = Address.parse('src/java/codegen/simple')
    self.java_simple = Address.parse('src/java/simple')
    self.java_multi = Address.parse('src/java/multiple_classpath_entries')
    self.no_variant_thrift = Address.parse('src/java/codegen/selector:conflict')
    self.unconfigured_thrift = Address.parse('src/thrift/codegen/unconfigured')
    self.resources = Address.parse('src/resources/simple')
    self.consumes_resources = Address.parse('src/java/consumes_resources')
    self.consumes_managed_thirdparty = Address.parse('src/java/managed_thirdparty')
    self.managed_guava = Address.parse('3rdparty/jvm/managed:guava')
    self.managed_hadoop = Address.parse('3rdparty/jvm/managed:hadoop-common')
    self.managed_resolve_latest = Address.parse('3rdparty/jvm/managed:latest-hadoop')
    self.inferred_deps = Address.parse('src/scala/inferred_deps')

  def assert_select_for_subjects(self, walk, selector, subjects, variants=None):
    node_type = SelectNode

    variants = tuple(variants.items()) if variants else None
    self.assertEqual({node_type(subject, variants, selector) for subject in subjects},
                     {node for node, _ in walk
                      if node.product == selector.product and isinstance(node, node_type) and node.variants == variants})

  def build_and_walk(self, build_request):
    """Build and then walk the given build_request, returning the walked graph as a list."""
    result = self.engine.execute(build_request)
    self.assertIsNone(result.error)
    return list(self.scheduler.product_graph.walk(build_request.roots))

  def request(self, goals, *addresses):
    return self.request_specs(goals, *[self.spec_parser.parse_spec(str(a)) for a in addresses])

  def request_specs(self, goals, *specs):
    return self.scheduler.build_request(goals=goals, subjects=specs)

  def assert_resolve_only(self, goals, root_specs, jars):
    build_request = self.request(goals, *root_specs)
    walk = self.build_and_walk(build_request)

    # Expect a SelectNode for each of the Jar/Classpath.
    self.assert_select_for_subjects(walk, Select(Jar), jars)
    self.assert_select_for_subjects(walk, Select(Classpath), jars)

  def assert_root(self, walk, node, return_value):
    """Asserts that the first Node in a walk was a DependenciesNode with the single given result."""
    root, root_state = walk[0]
    self.assertEquals(type(root), DependenciesNode)
    self.assertEquals(Return([return_value]), root_state)
    self.assertIn((node, Return(return_value)),
                  [(d, self.pg.state(d)) for d in self.pg.dependencies_of(root)])

  def assert_root_failed(self, walk, node, thrown_type):
    """Asserts that the first Node in a walk was a DependenciesNode with a Throw result."""
    root, root_state = walk[0]
    self.assertEquals(type(root), DependenciesNode)
    self.assertEquals(Throw, type(root_state))
    dependencies = [(d, self.pg.state(d)) for d in self.pg.dependencies_of(root)]
    self.assertIn((node, thrown_type), [(k, type(v.exc))
                                        for k, v in dependencies if type(v) is Throw])

  def test_resolve(self):
    self.assert_resolve_only(goals=['resolve'],
                             root_specs=['3rdparty/jvm:guava'],
                             jars=[self.guava])

  def test_compile_only_3rdparty(self):
    self.assert_resolve_only(goals=['compile'],
                             root_specs=['3rdparty/jvm:guava'],
                             jars=[self.guava])

  def test_gen_noop(self):
    # TODO(John Sirois): Ask around - is this OK?
    # This is different than today.  There is a gen'able target reachable from the java target, but
    # the scheduler 'pull-seeding' has ApacheThriftPlanner stopping short since the subject it's
    # handed is not thrift.
    build_request = self.request(['gen'], self.java)
    walk = self.build_and_walk(build_request)

    self.assert_select_for_subjects(walk, Select(JavaSources, optional=True), [self.java])

  def test_gen(self):
    build_request = self.request(['gen'], self.thrift)
    walk = self.build_and_walk(build_request)

    # Root: expect the synthetic GenGoal product.
    self.assert_root(walk,
                     SelectNode(self.thrift, None, Select(GenGoal)),
                     GenGoal("non-empty input to satisfy the Goal constructor"))

    variants = {'thrift': 'apache_java'}
    # Expect ThriftSources to have been selected.
    self.assert_select_for_subjects(walk, Select(ThriftSources), [self.thrift], variants=variants)
    # Expect an ApacheThriftJavaConfiguration to have been used via the default Variants.
    self.assert_select_for_subjects(walk, SelectVariant(ApacheThriftJavaConfiguration,
                                                        variant_key='thrift'),
                                    [self.thrift],
                                    variants=variants)

  def test_codegen_simple(self):
    build_request = self.request(['compile'], self.java)
    walk = self.build_and_walk(build_request)

    # The subgraph below 'src/thrift/codegen/simple' will be affected by its default variants.
    subjects = [
        self.guava,
        self.java,
        self.thrift]
    variant_subjects = [
        Jar(org='org.apache.thrift', name='libthrift', rev='0.9.2', type_alias='jar'),
        Jar(org='commons-lang', name='commons-lang', rev='2.5', type_alias='jar'),
        Address.parse('src/thrift:slf4j-api')]

    # Root: expect a DependenciesNode depending on a SelectNode with compilation via javac.
    self.assert_root(walk,
                     SelectNode(self.java, None, Select(Classpath)),
                     Classpath(creator='javac'))

    # Confirm that exactly the expected subjects got Classpaths.
    self.assert_select_for_subjects(walk, Select(Classpath), subjects)
    self.assert_select_for_subjects(walk, Select(Classpath), variant_subjects,
                                    variants={'thrift': 'apache_java'})

  def test_consumes_resources(self):
    build_request = self.request(['compile'], self.consumes_resources)
    walk = self.build_and_walk(build_request)

    # Validate the root.
    self.assert_root(walk,
                     SelectNode(self.consumes_resources, None, Select(Classpath)),
                     Classpath(creator='javac'))

    # Confirm a classpath for the resources target and other subjects. We know that they are
    # reachable from the root (since it was involved in this walk).
    subjects = [self.resources,
                self.consumes_resources,
                self.guava]
    self.assert_select_for_subjects(walk, Select(Classpath), subjects)

  def test_managed_resolve(self):
    """A managed resolve should consume a ManagedResolve and ManagedJars to produce Jars."""
    build_request = self.request(['compile'], self.consumes_managed_thirdparty)
    walk = self.build_and_walk(build_request)

    # Validate the root.
    self.assert_root(walk,
                     SelectNode(self.consumes_managed_thirdparty, None, Select(Classpath)),
                     Classpath(creator='javac'))

    # Confirm that we produced classpaths for the managed jars.
    managed_jars = [self.managed_guava,
                    self.managed_hadoop]
    self.assert_select_for_subjects(walk, Select(Classpath), [self.consumes_managed_thirdparty])
    self.assert_select_for_subjects(walk, Select(Classpath), managed_jars,
                                    variants={'resolve': 'latest-hadoop'})

    # Confirm that the produced jars had the appropriate versions.
    self.assertEquals({Jar('org.apache.hadoop', 'hadoop-common', '2.7.0'),
                       Jar('com.google.guava', 'guava', '18.0')},
                      {ret.value for node, ret in walk
                       if node.product == Jar and isinstance(node, SelectNode)})

  def test_dependency_inference(self):
    """Scala dependency inference introduces dependencies that do not exist in BUILD files."""
    build_request = self.request(['compile'], self.inferred_deps)
    walk = self.build_and_walk(build_request)

    # Validate the root.
    self.assert_root(walk,
                     SelectNode(self.inferred_deps, None, Select(Classpath)),
                     Classpath(creator='scalac'))

    # Confirm that we requested a classpath for the root and inferred targets.
    self.assert_select_for_subjects(walk, Select(Classpath), [self.inferred_deps, self.java_simple])

  def test_multiple_classpath_entries(self):
    """Multiple Classpath products for a single subject currently cause a failure."""
    build_request = self.request(['compile'], self.java_multi)
    walk = self.build_and_walk(build_request)

    # Validate that the root failed.
    self.assert_root_failed(walk,
                            SelectNode(self.java_multi, None, Select(Classpath)),
                            ConflictingProducersError)

  def test_descendant_specs(self):
    """Test that Addresses are produced via recursive globs of the 3rdparty/jvm directory."""
    spec = self.spec_parser.parse_spec('3rdparty/jvm::')
    build_request = self.request_specs(['list'], spec)
    walk = self.build_and_walk(build_request)

    # Validate the root.
    root, root_state = walk[0]
    root_value = root_state.value
    self.assertEqual(DependenciesNode(spec, None, SelectDependencies(Address, Addresses)), root)
    self.assertEqual(list, type(root_value))

    # Confirm that a few expected addresses are in the list.
    self.assertIn(self.guava, root_value)
    self.assertIn(self.managed_guava, root_value)
    self.assertIn(self.managed_resolve_latest, root_value)

  def test_sibling_specs(self):
    """Test that sibling Addresses are parsed in the 3rdparty/jvm directory."""
    spec = self.spec_parser.parse_spec('3rdparty/jvm:')
    build_request = self.request_specs(['list'], spec)
    walk = self.build_and_walk(build_request)

    # Validate the root.
    root, root_state = walk[0]
    root_value = root_state.value
    self.assertEqual(DependenciesNode(spec, None, SelectDependencies(Address, Addresses)), root)
    self.assertEqual(list, type(root_value))

    # Confirm that an expected address is in the list.
    self.assertIn(self.guava, root_value)
    # And that an subdirectory address is not.
    self.assertNotIn(self.managed_guava, root_value)

  def test_scheduler_visualize(self):
    spec = self.spec_parser.parse_spec('3rdparty/jvm:')
    build_request = self.request_specs(['list'], spec)
    self.build_and_walk(build_request)

    graphviz_output = '\n'.join(self.scheduler.product_graph.visualize(build_request.roots))

    with temporary_dir() as td:
      output_path = os.path.join(td, 'output.dot')
      self.scheduler.visualize_graph_to_file(build_request.roots, output_path)
      with open(output_path, 'rb') as fh:
        graphviz_disk_output = fh.read().strip()

    self.assertEqual(graphviz_output, graphviz_disk_output)
    self.assertIn('digraph', graphviz_output)
    self.assertIn(' -> ', graphviz_output)