Exemplo n.º 1
0
  def test_resolve_without_a_matching_target(self):
    graph_mock = mock.Mock()
    graph_mock.get_target = mock.Mock(return_value=None)
    graph_mock.inject_specs_closure = mock.Mock(return_value=[Address('a','different')])

    mapper = LegacyAddressMapper(graph_mock, '')
    with self.assertRaises(AddressMapper.BuildFileScanError):
      mapper.resolve(Address('a', 'address'))
Exemplo n.º 2
0
  def test_scan_specs_returns_ordered_set(self):
    address = Address('a', 'b')

    graph_mock = mock.Mock()
    graph_mock.inject_specs_closure = mock.Mock(return_value=[address, address])

    mapper = LegacyAddressMapper(graph_mock, '')
    self.assertEqual(OrderedSet([address]), mapper.scan_specs([SiblingAddresses('any')]))
Exemplo n.º 3
0
  def test_addresses_in_spec_path_wraps_error_in_buildfile_scan_error(self):
    graph_mock = mock.Mock()
    graph_mock.inject_specs_closure = mock.Mock(side_effect=LegacyBuildGraph.InvalidCommandLineSpecError('some msg'))

    mapper = LegacyAddressMapper(graph_mock, '')
    with self.assertRaises(AddressMapper.BuildFileScanError) as cm:
      mapper.addresses_in_spec_path('some/path')
    self.assertEqual('some msg', str(cm.exception))
Exemplo n.º 4
0
    def test_resolve_without_a_matching_target(self):
        graph_mock = mock.Mock()
        graph_mock.get_target = mock.Mock(return_value=None)
        graph_mock.inject_specs_closure = mock.Mock(
            return_value=[Address('a', 'different')])

        mapper = LegacyAddressMapper(graph_mock, '')
        with self.assertRaises(AddressMapper.BuildFileScanError):
            mapper.resolve(Address('a', 'address'))
Exemplo n.º 5
0
    def test_resolve_with_a_target(self):
        target = LegacyTarget(None, None)
        address = Address('a', 'a')

        graph_mock = mock.Mock()
        graph_mock.get_target = mock.Mock(return_value=target)

        mapper = LegacyAddressMapper(graph_mock, '')
        self.assertEqual((address, target), mapper.resolve(address))
Exemplo n.º 6
0
  def test_resolve_with_a_target(self):
    target = LegacyTarget(None, None)
    address = Address('a', 'a')

    graph_mock = mock.Mock()
    graph_mock.get_target = mock.Mock(return_value=target)

    mapper = LegacyAddressMapper(graph_mock, '')
    self.assertEqual((address, target), mapper.resolve(address))
Exemplo n.º 7
0
    def test_scan_specs_returns_ordered_set(self):
        address = Address('a', 'b')

        graph_mock = mock.Mock()
        graph_mock.inject_specs_closure = mock.Mock(
            return_value=[address, address])

        mapper = LegacyAddressMapper(graph_mock, '')
        self.assertEqual(OrderedSet([address]),
                         mapper.scan_specs([SiblingAddresses('any')]))
Exemplo n.º 8
0
    def test_addresses_in_spec_path_wraps_error_in_buildfile_scan_error(self):
        graph_mock = mock.Mock()
        graph_mock.inject_specs_closure = mock.Mock(
            side_effect=LegacyBuildGraph.InvalidCommandLineSpecError(
                'some msg'))

        mapper = LegacyAddressMapper(graph_mock, '')
        with self.assertRaises(AddressMapper.BuildFileScanError) as cm:
            mapper.addresses_in_spec_path('some/path')
        self.assertEqual('some msg', str(cm.exception))
Exemplo n.º 9
0
  def test_scan_addresses_with_root_specified(self):
    address = Address('a', 'b')

    graph_mock = mock.Mock()
    graph_mock.inject_specs_closure = mock.Mock(return_value=[address])

    mapper = LegacyAddressMapper(graph_mock, '/some/build/root')
    absolute_root_path = '/some/build/root/a'
    mapper.scan_addresses(absolute_root_path)

    graph_mock.inject_specs_closure.assert_called_with([DescendantAddresses('a')])
Exemplo n.º 10
0
    def test_scan_addresses_with_root_specified(self):
        address = Address('a', 'b')

        graph_mock = mock.Mock()
        graph_mock.inject_specs_closure = mock.Mock(return_value=[address])

        mapper = LegacyAddressMapper(graph_mock, '/some/build/root')
        absolute_root_path = '/some/build/root/a'
        mapper.scan_addresses(absolute_root_path)

        graph_mock.inject_specs_closure.assert_called_with(
            [DescendantAddresses('a')])
Exemplo n.º 11
0
    def test_other_throw_is_fail(self) -> None:
        # scan_addresses() should raise an error if the scheduler returns an error it can't ignore.
        class ThrowReturningScheduler:
            def execution_request(self, *args):
                pass

            def execute(self, *args):
                return [], [(("some-thing", None), Throw(Exception("just an exception")))]

        with temporary_dir() as build_root:
            mapper = LegacyAddressMapper(ThrowReturningScheduler(), build_root)

            with self.assertRaises(LegacyAddressMapper.BuildFileScanError) as cm:
                mapper.scan_addresses(os.path.join(build_root, "foo"))
            self.assertIn("just an exception", str(cm.exception))
Exemplo n.º 12
0
  def test_other_throw_is_fail(self):
    # scan_addresses() should raise an error if the scheduler returns an error it can't ignore.
    class ThrowReturningScheduler(object):
      def execution_request(self, *args):
        pass

      def execute(self, *args):
        return [], [(('some-thing', None), Throw(Exception('just an exception')))]

    with temporary_dir() as build_root:
      mapper = LegacyAddressMapper(ThrowReturningScheduler(), build_root)

      with self.assertRaises(LegacyAddressMapper.BuildFileScanError) as cm:
        mapper.scan_addresses(os.path.join(build_root, 'foo'))
      self.assertIn('just an exception', str(cm.exception))
Exemplo n.º 13
0
  def test_other_throw_is_fail(self):
    # scan_addresses() should raise an error if the scheduler returns an error it can't ignore.
    class ThrowReturningScheduler(object):
      def execution_request(self, *args):
        pass

      def execute(self, *args):
        return ExecutionResult(None, [(('some-thing', None), Throw(Exception('just an exception')))])

    with temporary_dir() as build_root:
      mapper = LegacyAddressMapper(ThrowReturningScheduler(), build_root)

      with self.assertRaises(LegacyAddressMapper.BuildFileScanError) as cm:
        mapper.scan_addresses(os.path.join(build_root, 'foo'))
      self.assertIn('just an exception', str(cm.exception))
Exemplo n.º 14
0
    def create_build_graph(self,
                           target_roots,
                           build_root=None,
                           include_trace_on_error=True):
        """Construct and return a `BuildGraph` given a set of input specs.

    :param TargetRoots target_roots: The targets root of the request.
    :param string build_root: The build root.
    :returns: A tuple of (BuildGraph, AddressMapper).
    """
        logger.debug('target_roots are: %r', target_roots)
        graph = LegacyBuildGraph.create(
            self.scheduler,
            self.engine,
            self.symbol_table_cls,
            include_trace_on_error=include_trace_on_error)
        logger.debug('build_graph is: %s', graph)
        with self.scheduler.locked():
            # Ensure the entire generator is unrolled.
            for _ in graph.inject_specs_closure(target_roots.as_specs()):
                pass

        logger.debug('engine cache stats: %s', self.engine.cache_stats())
        address_mapper = LegacyAddressMapper(self.scheduler, self.engine,
                                             build_root or get_buildroot())
        logger.debug('address_mapper is: %s', address_mapper)
        return graph, address_mapper
Exemplo n.º 15
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    sources_set = FrozenOrderedSet(owners_request.sources)
    dirs_set = FrozenOrderedSet(
        os.path.dirname(source) for source in sources_set)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
    candidate_targets = await Get[HydratedTargets](
        AddressSpecs(candidate_specs))

    # Match the source globs against the expanded candidate targets.
    def owns_any_source(legacy_target: HydratedTarget) -> bool:
        """Given a `HydratedTarget` instance, check if it owns the given source file."""
        target_kwargs = legacy_target.adaptor.kwargs()

        # Handle `sources`-declaring targets.
        # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target,
        # so we don't actually call `fileset.matches` here.
        # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons:
        #  1) having two implementations isn't great
        #  2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match
        target_sources = target_kwargs.get("sources", None)
        return target_sources and any_matches_filespec(
            paths=sources_set, spec=target_sources.filespec)

    build_file_addresses = await MultiGet(
        Get[BuildFileAddress](Address, ht.adaptor.address)
        for ht in candidate_targets)
    owners = Addresses(
        ht.adaptor.address
        for ht, bfa in zip(candidate_targets, build_file_addresses)
        if LegacyAddressMapper.any_is_declaring_file(bfa, sources_set)
        or owns_any_source(ht))
    return Owners(owners)
Exemplo n.º 16
0
    def iter_target_addresses_for_sources(self, sources):
        """Bulk, iterable form of `target_addresses_for_source`."""
        # Walk up the buildroot looking for targets that would conceivably claim changed sources.
        sources_set = set(sources)
        subjects = [
            AscendantAddresses(directory=d)
            for d in self._unique_dirs_for_sources(sources_set)
        ]

        for hydrated_targets in self._engine.product_request(
                HydratedTargets, subjects):
            for hydrated_target in hydrated_targets.dependencies:
                legacy_address = hydrated_target.adaptor.address

                # Handle BUILD files.
                if any(
                        LegacyAddressMapper.is_declaring_file(
                            legacy_address, f) for f in sources_set):
                    yield legacy_address
                else:
                    # Handle claimed files.
                    target_files_iter = self._iter_owned_files_from_hydrated_target(
                        hydrated_target)
                    if any(source_file in sources_set
                           for source_file in target_files_iter):
                        # At least one file in this targets sources match our changed sources - emit its address.
                        yield legacy_address
Exemplo n.º 17
0
 def create_address_mapper(self, build_root):
     work_dir = os.path.join(build_root, '.pants.d')
     scheduler = EngineInitializer.setup_legacy_graph(
         [],
         work_dir,
         build_file_imports_behavior='allow',
         build_root=build_root,
         native=self._native).scheduler
     return LegacyAddressMapper(scheduler.new_session(), build_root)
Exemplo n.º 18
0
async def find_owners(build_configuration: BuildConfiguration,
                      address_mapper: AddressMapper,
                      owners_request: OwnersRequest) -> BuildFileAddresses:
    sources_set = OrderedSet(owners_request.sources)
    dirs_set = OrderedSet(dirname(source) for source in sources_set)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
    candidate_targets = await Get(HydratedTargets, Specs(candidate_specs))

    # Match the source globs against the expanded candidate targets.
    def owns_any_source(legacy_target):
        """Given a `HydratedTarget` instance, check if it owns the given source file."""
        target_kwargs = legacy_target.adaptor.kwargs()

        # Handle `sources`-declaring targets.
        # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target,
        # so we don't actually call `fileset.matches` here.
        # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons:
        #  1) having two implementations isn't great
        #  2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match
        target_sources = target_kwargs.get('sources', None)
        if target_sources and any_matches_filespec(sources_set,
                                                   target_sources.filespec):
            return True

        return False

    direct_owners = tuple(
        ht.adaptor.address for ht in candidate_targets
        if LegacyAddressMapper.any_is_declaring_file(
            ht.adaptor.address, sources_set) or owns_any_source(ht))

    # If the OwnersRequest does not require dependees, then we're done.
    if owners_request.include_dependees == 'none':
        return BuildFileAddresses(direct_owners)
    else:
        # Otherwise: find dependees.
        all_addresses = await Get(BuildFileAddresses,
                                  Specs((DescendantAddresses(''), )))
        all_hydrated_structs = await MultiGet(
            Get(HydratedStruct, Address, a.to_address())
            for a in all_addresses)
        all_structs = [hs.value for hs in all_hydrated_structs]

        bfa = build_configuration.registered_aliases()
        graph = _DependentGraph.from_iterable(
            target_types_from_build_file_aliases(bfa), address_mapper,
            all_structs)
        if owners_request.include_dependees == 'direct':
            return BuildFileAddresses(
                tuple(graph.dependents_of_addresses(direct_owners)))
        else:
            assert owners_request.include_dependees == 'transitive'
            return BuildFileAddresses(
                tuple(graph.transitive_dependents_of_addresses(direct_owners)))
Exemplo n.º 19
0
 def test_is_declaring_file(self):
   scheduler = mock.Mock()
   mapper = LegacyAddressMapper(scheduler, '')
   self.assertTrue(mapper.is_declaring_file(Address('path', 'name'), 'path/BUILD'))
   self.assertTrue(mapper.is_declaring_file(Address('path', 'name'), 'path/BUILD.suffix'))
   self.assertFalse(mapper.is_declaring_file(Address('path', 'name'), 'path/not_a_build_file'))
   self.assertFalse(mapper.is_declaring_file(Address('path', 'name'), 'differing-path/BUILD'))
   self.assertFalse(mapper.is_declaring_file(
     BuildFileAddress(target_name='name', rel_path='path/BUILD.new'),
     'path/BUILD'))
   self.assertTrue(mapper.is_declaring_file(
     BuildFileAddress(target_name='name', rel_path='path/BUILD'),
     'path/BUILD'))
Exemplo n.º 20
0
 def test_is_declaring_file(self):
   scheduler = mock.Mock()
   mapper = LegacyAddressMapper(scheduler, None, '')
   self.assertTrue(mapper.is_declaring_file(Address('path', 'name'), 'path/BUILD'))
   self.assertTrue(mapper.is_declaring_file(Address('path', 'name'), 'path/BUILD.suffix'))
   self.assertFalse(mapper.is_declaring_file(Address('path', 'name'), 'path/not_a_build_file'))
   self.assertFalse(mapper.is_declaring_file(Address('path', 'name'), 'differing-path/BUILD'))
Exemplo n.º 21
0
    def create_build_graph(
        self, specs: Specs, build_root: Optional[str] = None,
    ) -> Tuple[LegacyBuildGraph, LegacyAddressMapper]:
        """Construct and return a `BuildGraph` given a set of input specs."""
        logger.debug("specs are: %r", specs)
        graph = LegacyBuildGraph.create(self.scheduler_session, self.build_file_aliases)
        logger.debug("build_graph is: %s", graph)
        # Ensure the entire generator is unrolled.
        for _ in graph.inject_roots_closure(specs.address_specs):
            pass

        address_mapper = LegacyAddressMapper(self.scheduler_session, build_root or get_buildroot())
        logger.debug("address_mapper is: %s", address_mapper)
        return graph, address_mapper
Exemplo n.º 22
0
 def test_is_declaring_file(self):
   scheduler = mock.Mock()
   mapper = LegacyAddressMapper(scheduler, '')
   self.assertTrue(mapper.is_declaring_file(Address('path', 'name'), 'path/BUILD'))
   self.assertTrue(mapper.is_declaring_file(Address('path', 'name'), 'path/BUILD.suffix'))
   self.assertFalse(mapper.is_declaring_file(Address('path', 'name'), 'path/not_a_build_file'))
   self.assertFalse(mapper.is_declaring_file(Address('path', 'name'), 'differing-path/BUILD'))
   self.assertFalse(mapper.is_declaring_file(
     BuildFileAddress(target_name='name', rel_path='path/BUILD.new'),
     'path/BUILD'))
   self.assertTrue(mapper.is_declaring_file(
     BuildFileAddress(target_name='name', rel_path='path/BUILD'),
     'path/BUILD'))
Exemplo n.º 23
0
 def test_is_declaring_file(self) -> None:
     scheduler = unittest.mock.Mock()
     mapper = LegacyAddressMapper(scheduler, "")
     self.assertTrue(mapper.is_declaring_file(Address("path", "name"), "path/BUILD"))
     self.assertTrue(mapper.is_declaring_file(Address("path", "name"), "path/BUILD.suffix"))
     self.assertFalse(mapper.is_declaring_file(Address("path", "name"), "path/not_a_build_file"))
     self.assertFalse(mapper.is_declaring_file(Address("path", "name"), "differing-path/BUILD"))
     self.assertFalse(
         mapper.is_declaring_file(
             BuildFileAddress(target_name="name", rel_path="path/BUILD.new"), "path/BUILD"
         )
     )
     self.assertTrue(
         mapper.is_declaring_file(
             BuildFileAddress(target_name="name", rel_path="path/BUILD"), "path/BUILD"
         )
     )
Exemplo n.º 24
0
def find_owners(build_configuration, address_mapper, owners_request):
  sources_set = OrderedSet(owners_request.sources)
  dirs_set = OrderedSet(dirname(source) for source in sources_set)

  # Walk up the buildroot looking for targets that would conceivably claim changed sources.
  candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
  candidate_targets = yield Get(HydratedTargets, Specs(candidate_specs))

  # Match the source globs against the expanded candidate targets.
  def owns_any_source(legacy_target):
    """Given a `HydratedTarget` instance, check if it owns the given source file."""
    target_kwargs = legacy_target.adaptor.kwargs()

    # Handle `sources`-declaring targets.
    # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target,
    # so we don't actually call `fileset.matches` here.
    # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons:
    #  1) having two implementations isn't great
    #  2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match
    target_sources = target_kwargs.get('sources', None)
    if target_sources and any_matches_filespec(sources_set, target_sources.filespec):
      return True

    return False

  direct_owners = tuple(ht.adaptor.address
                        for ht in candidate_targets
                        if LegacyAddressMapper.any_is_declaring_file(ht.adaptor.address, sources_set) or
                           owns_any_source(ht))

  # If the OwnersRequest does not require dependees, then we're done.
  if owners_request.include_dependees == 'none':
    yield BuildFileAddresses(direct_owners)
  else:
    # Otherwise: find dependees.
    all_addresses = yield Get(BuildFileAddresses, Specs((DescendantAddresses(''),)))
    all_structs = yield [Get(HydratedStruct, Address, a.to_address()) for a in all_addresses]
    all_structs = [s.value for s in all_structs]

    bfa = build_configuration.registered_aliases()
    graph = _DependentGraph.from_iterable(target_types_from_build_file_aliases(bfa),
                                          address_mapper,
                                          all_structs)
    if owners_request.include_dependees == 'direct':
      yield BuildFileAddresses(tuple(graph.dependents_of_addresses(direct_owners)))
    else:
      assert owners_request.include_dependees == 'transitive'
      yield BuildFileAddresses(tuple(graph.transitive_dependents_of_addresses(direct_owners)))
Exemplo n.º 25
0
  def iter_target_addresses_for_sources(self, sources):
    """Bulk, iterable form of `target_addresses_for_source`."""
    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    sources_set = set(sources)
    subjects = [AscendantAddresses(directory=d) for d in self._unique_dirs_for_sources(sources_set)]

    for hydrated_targets in self._engine.product_request(HydratedTargets, subjects):
      for hydrated_target in hydrated_targets.dependencies:
        legacy_address = hydrated_target.adaptor.address

        # Handle BUILD files.
        if any(LegacyAddressMapper.is_declaring_file(legacy_address, f) for f in sources_set):
          yield legacy_address
        else:
          if any(self._owns_source(source, hydrated_target) for source in sources_set):
            yield legacy_address
Exemplo n.º 26
0
  def create_build_graph(self, target_roots, build_root=None):
    """Construct and return a `BuildGraph` given a set of input specs.

    :param TargetRoots target_roots: The targets root of the request.
    :param string build_root: The build root.
    :returns: A tuple of (BuildGraph, AddressMapper).
    """
    logger.debug('target_roots are: %r', target_roots)
    graph = LegacyBuildGraph.create(self.scheduler_session, self.build_file_aliases)
    logger.debug('build_graph is: %s', graph)
    # Ensure the entire generator is unrolled.
    for _ in graph.inject_roots_closure(target_roots):
      pass

    address_mapper = LegacyAddressMapper(self.scheduler_session, build_root or get_buildroot())
    logger.debug('address_mapper is: %s', address_mapper)
    return graph, address_mapper
Exemplo n.º 27
0
  def iter_target_addresses_for_sources(self, sources):
    """Bulk, iterable form of `target_addresses_for_source`."""
    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    sources_set = set(sources)
    specs = tuple(AscendantAddresses(directory=d) for d in self._unique_dirs_for_sources(sources_set))

    # Uniqify all transitive hydrated targets.
    hydrated_target_to_address = {}
    hydrated_targets, = self._scheduler.product_request(HydratedTargets, [Specs(specs)])
    for hydrated_target in hydrated_targets.dependencies:
      if hydrated_target not in hydrated_target_to_address:
        hydrated_target_to_address[hydrated_target] = hydrated_target.adaptor.address

    for hydrated_target, legacy_address in six.iteritems(hydrated_target_to_address):
      # Handle BUILD files.
      if (LegacyAddressMapper.any_is_declaring_file(legacy_address, sources_set) or
          self._owns_any_source(sources_set, hydrated_target)):
        yield legacy_address
Exemplo n.º 28
0
  def iter_target_addresses_for_sources(self, sources):
    """Bulk, iterable form of `target_addresses_for_source`."""
    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    sources_set = set(sources)
    subjects = [AscendantAddresses(directory=d) for d in self._unique_dirs_for_sources(sources_set)]

    for legacy_target in self._engine.product_request(LegacyTarget, subjects):
      legacy_address = legacy_target.adaptor.address

      # Handle BUILD files.
      if any(LegacyAddressMapper.is_declaring_file(legacy_address, f) for f in sources_set):
        yield legacy_address
      else:
        # Handle claimed files.
        target_files_iter = self._iter_owned_files_from_legacy_target(legacy_target)
        if any(source_file in sources_set for source_file in target_files_iter):
          # At least one file in this targets sources match our changed sources - emit its address.
          yield legacy_address
Exemplo n.º 29
0
    def iter_target_addresses_for_sources(self, sources):
        """Bulk, iterable form of `target_addresses_for_source`."""
        # Walk up the buildroot looking for targets that would conceivably claim changed sources.
        sources_set = set(sources)
        subjects = [
            AscendantAddresses(directory=d)
            for d in self._unique_dirs_for_sources(sources_set)
        ]

        for hydrated_targets in self._scheduler.product_request(
                HydratedTargets, subjects):
            for hydrated_target in hydrated_targets.dependencies:
                legacy_address = hydrated_target.adaptor.address

                # Handle BUILD files.
                if any(
                        LegacyAddressMapper.is_declaring_file(
                            legacy_address, f) for f in sources_set):
                    yield legacy_address
                else:
                    if any(
                            self._owns_source(source, hydrated_target)
                            for source in sources_set):
                        yield legacy_address
Exemplo n.º 30
0
 def create_address_mapper(self, build_root):
     work_dir = os.path.join(build_root, '.pants.d')
     scheduler, engine, _, _ = EngineInitializer.setup_legacy_graph(
         [], work_dir, build_root=build_root, native=self._native)
     return LegacyAddressMapper(scheduler, engine, build_root)
Exemplo n.º 31
0
 def create_address_mapper(self, build_root):
   scheduler, engine, _, _ = EngineInitializer.setup_legacy_graph([], build_root=build_root, native=self._native)
   return LegacyAddressMapper(scheduler, engine, build_root)