def compute_pantsd_invalidation_globs(buildroot, bootstrap_options): """Computes the merged value of the `--pantsd-invalidation-globs` option. Combines --pythonpath and --pants-config-files files that are in {buildroot} dir with those invalidation_globs provided by users. """ invalidation_globs = set() globs = set(sys.path + bootstrap_options.pythonpath + bootstrap_options.pants_config_files + bootstrap_options.pantsd_invalidation_globs) for glob in globs: if glob.startswith("!"): invalidation_globs.add(glob) continue glob_relpath = fast_relpath_optional( glob, buildroot) if os.path.isabs(glob) else glob if glob_relpath: invalidation_globs.update([glob_relpath, glob_relpath + "/**"]) else: logger.debug( f"Changes to {glob}, outside of the buildroot, will not be invalidated." ) return list(sorted(invalidation_globs))
def add(absolute_path, include=False): # To ensure that the path is ignored regardless of whether it is a symlink or a directory, we # strip trailing slashes (which would signal that we wanted to ignore only directories). maybe_rel_path = fast_relpath_optional(absolute_path, buildroot) if maybe_rel_path: rel_path = maybe_rel_path.rstrip(os.path.sep) prefix = "!" if include else "" pants_ignore.append(f"{prefix}/{rel_path}")
def matching_address_families( self, address_families_dict: Dict[str, "AddressFamily"], ) -> List["AddressFamily"]: return [ af for ns, af in address_families_dict.items() if fast_relpath_optional(self.directory, ns) is not None ]
def add_ignore(absolute_path): # To ensure that the path is ignored regardless of whether it is a symlink or a directory, we # strip trailing slashes (which would signal that we wanted to ignore only directories). maybe_rel_path = fast_relpath_optional(absolute_path, buildroot) # Exclude temp workdir from <pants_ignore>. # temp workdir is /path/to/<pants_workdir>/tmp/tmp<process_id>.pants.d if maybe_rel_path and not re.search("tmp/tmp(.+).pants.d", maybe_rel_path): rel_path = maybe_rel_path.rstrip(os.path.sep) pants_ignore.append(f'/{rel_path}')
def compute_pantsd_invalidation_globs(buildroot, bootstrap_options, absolute_pidfile): """Computes the merged value of the `--pantsd-invalidation-globs` option. Combines --pythonpath and --pants-config-files files that are in {buildroot} dir with those invalidation_globs provided by users. """ invalidation_globs = OrderedSet() # Globs calculated from the sys.path and other file-like configuration need to be sanitized # to relative globs (where possible). potentially_absolute_globs = ( absolute_pidfile, *sys.path, *bootstrap_options.pythonpath, *bootstrap_options.pants_config_files, ) for glob in potentially_absolute_globs: glob_relpath = fast_relpath_optional( glob, buildroot) if os.path.isabs(glob) else glob if glob_relpath: invalidation_globs.update([glob_relpath, glob_relpath + "/**"]) else: logger.debug( f"Changes to {glob}, outside of the buildroot, will not be invalidated." ) # Explicitly specified globs are already relative, and are added verbatim. invalidation_globs.update(( "!*.pyc", "!__pycache__/", # TODO: This is a bandaid for https://github.com/pantsbuild/pants/issues/7022: # macros should be adapted to allow this dependency to be automatically detected. "requirements.txt", "3rdparty/**/requirements.txt", *bootstrap_options.pantsd_invalidation_globs, )) return list(invalidation_globs)
def fast_relpath_collection(collection): buildroot = get_buildroot() return [fast_relpath_optional(c, buildroot) or c for c in collection]
def addresses_from_address_families(address_mapper, specs): """Given an AddressMapper and list of Specs, return matching BuildFileAddresses. Raises a AddressLookupError if: - there were no matching AddressFamilies, or - the Spec matches no addresses for SingleAddresses. """ # Capture a Snapshot covering all paths for these Specs, then group by directory. snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs)) dirnames = set(dirname(f.stat.path) for f in snapshot.files) address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames] # NB: `@memoized` does not work on local functions. def by_directory(): if by_directory.cached is None: by_directory.cached = {af.namespace: af for af in address_families} return by_directory.cached by_directory.cached = None def raise_empty_address_family(spec): raise ResolveError('Path "{}" contains no BUILD files.'.format( spec.directory)) def exclude_address(address): if address_mapper.exclude_patterns: address_str = address.spec return any( p.search(address_str) is not None for p in address_mapper.exclude_patterns) return False addresses = [] included = set() def include(address_families, predicate=None): matched = False for af in address_families: for a in af.addressables.keys(): if not exclude_address(a) and (predicate is None or predicate(a)): matched = True if a not in included: addresses.append(a) included.add(a) return matched for spec in specs.dependencies: if type(spec) is DescendantAddresses: matched = include(af for af in address_families if fast_relpath_optional( af.namespace, spec.directory) is not None) if not matched: raise AddressLookupError( 'Spec {} does not match any targets.'.format(spec)) elif type(spec) is SiblingAddresses: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) include([address_family]) elif type(spec) is SingleAddress: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) if not include([address_family], predicate=lambda a: a.target_name == spec.name): _raise_did_you_mean(address_family, spec.name) elif type(spec) is AscendantAddresses: include(af for af in address_families if fast_relpath_optional( spec.directory, af.namespace) is not None) else: raise ValueError('Unrecognized Spec type: {}'.format(spec)) yield BuildFileAddresses(addresses)
def matches(self, path_from_buildroot): path_relative_to_rel_root = fast_relpath_optional(path_from_buildroot, self.rel_root) return path_relative_to_rel_root is not None and path_relative_to_rel_root in self.files
def pathglob_for(filename): return PathGlobsAndRoot( PathGlobs( (fast_relpath_optional(filename, get_buildroot()),)), text_type(get_buildroot()))
def addresses_from_address_families(address_mapper, specs): """Given an AddressMapper and list of Specs, return matching BuildFileAddresses. Raises a AddressLookupError if: - there were no matching AddressFamilies, or - the Spec matches no addresses for SingleAddresses. """ # Capture a Snapshot covering all paths for these Specs, then group by directory. snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs)) dirnames = set(dirname(f.stat.path) for f in snapshot.files) address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames] # NB: `@memoized` does not work on local functions. def by_directory(): if by_directory.cached is None: by_directory.cached = {af.namespace: af for af in address_families} return by_directory.cached by_directory.cached = None def raise_empty_address_family(spec): raise ResolveError('Path "{}" contains no BUILD files.'.format(spec.directory)) def exclude_address(address): if address_mapper.exclude_patterns: address_str = address.spec return any(p.search(address_str) is not None for p in address_mapper.exclude_patterns) return False addresses = [] included = set() def include(address_families, predicate=None): matched = False for af in address_families: for a in af.addressables.keys(): if not exclude_address(a) and (predicate is None or predicate(a)): matched = True if a not in included: addresses.append(a) included.add(a) return matched for spec in specs.dependencies: if type(spec) is DescendantAddresses: matched = include( af for af in address_families if fast_relpath_optional(af.namespace, spec.directory) is not None ) if not matched: raise AddressLookupError( 'Spec {} does not match any targets.'.format(spec)) elif type(spec) is SiblingAddresses: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) include([address_family]) elif type(spec) is SingleAddress: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) if not include([address_family], predicate=lambda a: a.target_name == spec.name): _raise_did_you_mean(address_family, spec.name) elif type(spec) is AscendantAddresses: include( af for af in address_families if fast_relpath_optional(spec.directory, af.namespace) is not None ) else: raise ValueError('Unrecognized Spec type: {}'.format(spec)) yield BuildFileAddresses(addresses)
def matches(self, path_from_buildroot): path_relative_to_rel_root = fast_relpath_optional( path_from_buildroot, self.rel_root) return path_relative_to_rel_root is not None and path_relative_to_rel_root in self.files
def matching_address_families( self, address_families_dict: Mapping[str, "AddressFamily"] ) -> Tuple["AddressFamily", ...]: return tuple(af for ns, af in address_families_dict.items() if fast_relpath_optional(self.directory, ns) is not None)
def addresses_from_address_families(address_mapper, specs): """Given an AddressMapper and list of Specs, return matching BuildFileAddresses. Raises a AddressLookupError if: - there were no matching AddressFamilies, or - the Spec matches no addresses for SingleAddresses. """ # Capture a Snapshot covering all paths for these Specs, then group by directory. snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs)) dirnames = set(dirname(f.stat.path) for f in snapshot.files) address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames] # NB: `@memoized` does not work on local functions. def by_directory(): if by_directory.cached is None: by_directory.cached = {af.namespace: af for af in address_families} return by_directory.cached by_directory.cached = None def raise_empty_address_family(spec): raise ResolveError('Path "{}" does not contain any BUILD files.'.format(spec.directory)) def exclude_address(spec): if specs.exclude_patterns: return any(p.search(spec) is not None for p in specs.exclude_patterns_memo()) return False def filter_for_tag(tag): return lambda t: tag in [str(t_tag) for t_tag in t.kwargs().get("tags", [])] include_target = wrap_filters(create_filters(specs.tags if specs.tags else '', filter_for_tag)) addresses = [] included = set() def include(address_families, predicate=None): matched = False for af in address_families: for (a, t) in af.addressables.items(): if (predicate is None or predicate(a)): if include_target(t) and (not exclude_address(a.spec)): matched = True if a not in included: addresses.append(a) included.add(a) return matched for spec in specs.dependencies: if type(spec) is DescendantAddresses: matched = include( af for af in address_families if fast_relpath_optional(af.namespace, spec.directory) is not None ) if not matched: raise AddressLookupError( 'Spec {} does not match any targets.'.format(spec)) elif type(spec) is SiblingAddresses: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) include([address_family]) elif type(spec) is SingleAddress: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) # spec.name here is generally the root node specified on commandline. equality here implies # a root node i.e. node specified on commandline. if not include([address_family], predicate=lambda a: a.target_name == spec.name): if len(addresses) == 0: _raise_did_you_mean(address_family, spec.name) elif type(spec) is AscendantAddresses: include( af for af in address_families if fast_relpath_optional(spec.directory, af.namespace) is not None ) else: raise ValueError('Unrecognized Spec type: {}'.format(spec)) yield BuildFileAddresses(addresses)
def matches_target_residence_dir(self, residence_dir: str) -> bool: return fast_relpath_optional(residence_dir, self.directory) is not None
async def build_local_dists( request: LocalDistsPexRequest, ) -> LocalDistsPex: transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)) applicable_targets = [ tgt for tgt in transitive_targets.closure if PythonDistributionFieldSet.is_applicable(tgt) ] local_dists_wheels = await MultiGet( Get(LocalDistWheels, PythonDistributionFieldSet, PythonDistributionFieldSet.create(target)) for target in applicable_targets ) # The primary use-case of the "local dists" feature is to support consuming native extensions # as wheels without having to publish them first. # It doesn't seem very useful to consume locally-built sdists, and it makes it hard to # reason about possible sys.path collisions between the in-repo sources and whatever the # sdist will place on the sys.path when it's installed. # So for now we simply ignore sdists, with a warning if necessary. provided_files: set[str] = set() wheels: list[str] = [] wheels_digests = [] for local_dist_wheels in local_dists_wheels: wheels.extend(local_dist_wheels.wheel_paths) wheels_digests.append(local_dist_wheels.wheels_digest) provided_files.update(local_dist_wheels.provided_files) wheels_digest = await Get(Digest, MergeDigests(wheels_digests)) dists_pex = await Get( Pex, PexRequest( output_filename="local_dists.pex", requirements=PexRequirements(wheels), interpreter_constraints=request.interpreter_constraints, additional_inputs=wheels_digest, internal_only=request.internal_only, additional_args=["--intransitive"], ), ) if not wheels: # The source calculations below are not (always) cheap, so we skip them if no wheels were # produced. See https://github.com/pantsbuild/pants/issues/14561 for one possible approach # to sharing the cost of these calculations. return LocalDistsPex(dists_pex, request.sources) # We check source roots in reverse lexicographic order, # so we'll find the innermost root that matches. source_roots = sorted(request.sources.source_roots, reverse=True) remaining_sources = set(request.sources.source_files.files) unrooted_files_set = set(request.sources.source_files.unrooted_files) for source in request.sources.source_files.files: if source not in unrooted_files_set: for source_root in source_roots: source_relpath = fast_relpath_optional(source, source_root) if source_relpath is not None and source_relpath in provided_files: remaining_sources.remove(source) remaining_sources_snapshot = await Get( Snapshot, DigestSubset( request.sources.source_files.snapshot.digest, PathGlobs(sorted(remaining_sources)) ), ) subtracted_sources = PythonSourceFiles( SourceFiles(remaining_sources_snapshot, request.sources.source_files.unrooted_files), request.sources.source_roots, ) return LocalDistsPex(dists_pex, subtracted_sources)
def matching_address_families(self, address_families_dict): return [ af for ns, af in address_families_dict.items() if fast_relpath_optional(self.directory, ns) is not None ]
def matches(self, tgt_residence_dir: str) -> bool: return fast_relpath_optional(self.directory, tgt_residence_dir) is not None
def addresses_from_address_families(address_mapper, specs): """Given an AddressMapper and list of Specs, return matching BuildFileAddresses. Raises a AddressLookupError if: - there were no matching AddressFamilies, or - the Spec matches no addresses for SingleAddresses. """ # Capture a Snapshot covering all paths for these Specs, then group by directory. snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs)) dirnames = set(dirname(f.stat.path) for f in snapshot.files) address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames] # NB: `@memoized` does not work on local functions. def by_directory(): if by_directory.cached is None: by_directory.cached = {af.namespace: af for af in address_families} return by_directory.cached by_directory.cached = None def raise_empty_address_family(spec): raise ResolveError('Path "{}" does not contain any BUILD files.'.format(spec.directory)) def exclude_address(spec): if specs.exclude_patterns: return any(p.search(spec) is not None for p in specs.exclude_patterns_memo()) return False def filter_for_tag(tag): return lambda t: tag in map(str, t.kwargs().get("tags", [])) include_target = wrap_filters(create_filters(specs.tags if specs.tags else '', filter_for_tag)) addresses = [] included = set() def include(address_families, predicate=None): matched = False for af in address_families: for (a, t) in af.addressables.items(): if (predicate is None or predicate(a)): if include_target(t) and (not exclude_address(a.spec)): matched = True if a not in included: addresses.append(a) included.add(a) return matched for spec in specs.dependencies: if type(spec) is DescendantAddresses: matched = include( af for af in address_families if fast_relpath_optional(af.namespace, spec.directory) is not None ) if not matched: raise AddressLookupError( 'Spec {} does not match any targets.'.format(spec)) elif type(spec) is SiblingAddresses: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) include([address_family]) elif type(spec) is SingleAddress: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) # spec.name here is generally the root node specified on commandline. equality here implies # a root node i.e. node specified on commandline. if not include([address_family], predicate=lambda a: a.target_name == spec.name): if len(addresses) == 0: _raise_did_you_mean(address_family, spec.name) elif type(spec) is AscendantAddresses: include( af for af in address_families if fast_relpath_optional(spec.directory, af.namespace) is not None ) else: raise ValueError('Unrecognized Spec type: {}'.format(spec)) yield BuildFileAddresses(addresses)
def matching_address_families(self, address_families_dict): return [ af for ns, af in address_families_dict.items() if fast_relpath_optional(self.directory, ns) is not None ]
async def build_local_dists( request: LocalDistsPexRequest, ) -> LocalDistsPex: transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)) applicable_targets = [ tgt for tgt in transitive_targets.closure if PythonDistributionFieldSet.is_applicable(tgt) ] python_dist_field_sets = [ PythonDistributionFieldSet.create(target) for target in applicable_targets ] dists = await MultiGet( [Get(BuiltPackage, PackageFieldSet, field_set) for field_set in python_dist_field_sets] ) # The primary use-case of the "local dists" feature is to support consuming native extensions # as wheels without having to publish them first. # It doesn't seem very useful to consume locally-built sdists, and it makes it hard to # reason about possible sys.path collisions between the in-repo sources and whatever the # sdist will place on the sys.path when it's installed. # So for now we simply ignore sdists, with a warning if necessary. provided_files = set() wheels = [] all_contents = await MultiGet(Get(DigestContents, Digest, dist.digest) for dist in dists) for dist, contents, tgt in zip(dists, all_contents, applicable_targets): artifacts = {(a.relpath or "") for a in dist.artifacts} # A given local dist might build a wheel and an sdist (and maybe other artifacts - # we don't know what setup command was run...) # As long as there is a wheel, we can ignore the other artifacts. wheel = next((art for art in artifacts if art.endswith(".whl")), None) if wheel: wheel_content = next(content for content in contents if content.path == wheel) wheels.append(wheel) buf = BytesIO() buf.write(wheel_content.content) buf.seek(0) with zipfile.ZipFile(buf) as zf: provided_files.update(zf.namelist()) else: logger.warning( f"Encountered a dependency on the {tgt.alias} target at {tgt.address.spec}, but " "this target does not produce a Python wheel artifact. Therefore this target's " "code will be used directly from sources, without a distribution being built, " "and therefore any native extensions in it will not be built.\n\n" f"See {doc_url('python-distributions')} for details on how to set up a {tgt.alias} " "target to produce a wheel." ) dists_digest = await Get(Digest, MergeDigests([dist.digest for dist in dists])) wheels_digest = await Get(Digest, DigestSubset(dists_digest, PathGlobs(["**/*.whl"]))) dists_pex = await Get( Pex, PexRequest( output_filename="local_dists.pex", requirements=PexRequirements(wheels), interpreter_constraints=request.interpreter_constraints, additional_inputs=wheels_digest, internal_only=request.internal_only, ), ) # We check source roots in reverse lexicographic order, # so we'll find the innermost root that matches. source_roots = list(reversed(sorted(request.sources.source_roots))) remaining_sources = set(request.sources.source_files.files) unrooted_files_set = set(request.sources.source_files.unrooted_files) for source in request.sources.source_files.files: if source not in unrooted_files_set: for source_root in source_roots: source_relpath = fast_relpath_optional(source, source_root) if source_relpath is not None and source_relpath in provided_files: remaining_sources.remove(source) remaining_sources_snapshot = await Get( Snapshot, DigestSubset( request.sources.source_files.snapshot.digest, PathGlobs(sorted(remaining_sources)) ), ) subtracted_sources = PythonSourceFiles( SourceFiles(remaining_sources_snapshot, request.sources.source_files.unrooted_files), request.sources.source_roots, ) return LocalDistsPex(dists_pex, subtracted_sources)