def compute_value( cls, raw_value: Optional[Union[Iterable[str], Dict[str, Iterable[str]]]], *, address: Address, ) -> Optional[Union[Tuple[str, ...], FrozenDict[str, Tuple[str, ...]]]]: value_or_default = super().compute_value(raw_value, address=address) invalid_field_type_exception = InvalidFieldTypeException( address, cls.alias, value_or_default, expected_type= ("either an iterable of strings or a dictionary of platforms to iterable of " "strings"), ) if isinstance(value_or_default, dict): try: return FrozenDict({ platform: tuple(sorted(ensure_str_list(lib_names))) for platform, lib_names in value_or_default.items() }) except ValueError: raise invalid_field_type_exception try: ensure_str_list(value_or_default) except ValueError: raise invalid_field_type_exception return tuple(sorted(value_or_default))
def _scrub_args(classpath, main, jvm_options, args): classpath = ensure_str_list(classpath, allow_single_str=True) if not isinstance(main, str) or not main: raise ValueError( "A non-empty main classname is required, given: {}".format( main)) jvm_options = ensure_str_list(jvm_options or (), allow_single_str=True) args = ensure_str_list(args or (), allow_single_str=True) return classpath, main, jvm_options, args
def run_export( self, test_target, workdir, load_libs=False, only_default=False, extra_args=None ): """Runs ./pants export ... and returns its json output. :param string|list test_target: spec of the targets to run on. :param string workdir: working directory to run pants with. :param bool load_libs: whether to load external libraries (of any conf). :param bool only_default: if loading libraries, whether to only resolve the default conf, or to additionally resolve sources and javadocs. :param list extra_args: list of extra arguments for the pants invocation. :return: the json output of the console task. :rtype: dict """ export_out_file = os.path.join(workdir, "export_out.txt") args = ["export", f"--output-file={export_out_file}", *ensure_str_list(test_target)] libs_args = ["--no-export-libraries"] if not load_libs else self._confs_args if load_libs and only_default: libs_args = [] pants_run = self.run_pants_with_workdir(args + libs_args + (extra_args or []), workdir) self.assert_success(pants_run) self.assertTrue( os.path.exists(export_out_file), msg=f"Could not find export output file in {export_out_file}", ) with open(export_out_file, "r") as json_file: json_data = json.load(json_file) if not load_libs: self.assertIsNone(json_data.get("libraries")) return json_data
def __init__(self, module_name, libraries=None, include_patterns=None, exclude_patterns=None, compatibility=None, within_data_subdir=None, payload=None, **kwargs): """ :param str module_name: The name of the specific python module containing headers and/or libraries to extract (e.g. 'tensorflow'). :param list libraries: addresses of python_requirement_library targets that specify the wheels you want to unpack :param list include_patterns: fileset patterns to include from the archive :param list exclude_patterns: fileset patterns to exclude from the archive. Exclude patterns are processed before include_patterns. :param compatibility: Python interpreter constraints used to create the pex for the requirement target. If unset, the default interpreter constraints are used. This argument is unnecessary unless the native code depends on libpython. :param bool within_data_subdir: If True, descend into '<name>-<version>.data/' when matching `include_patterns`. For python wheels which declare any non-code data, this is usually needed to extract that without manually specifying the relative path, including the package version. For example, when `data_files` is used in a setup.py, `within_data_subdir=True` will allow specifying `include_patterns` matching exactly what is specified in the setup.py. """ deprecated_conditional( lambda: type(within_data_subdir) not in (bool, type(None)), removal_version="1.28.0.dev2", entity_description="A non-boolean value for `within_data_subdir`", hint_message= "The location of the .data subdirectory will be inferred from the module name!", ) payload = payload or Payload() payload.add_fields({ "library_specs": PrimitiveField(libraries or ()), "module_name": PrimitiveField(module_name), "include_patterns": PrimitiveField(include_patterns or ()), "exclude_patterns": PrimitiveField(exclude_patterns or ()), "compatibility": PrimitiveField( ensure_str_list(compatibility or (), allow_single_str=True)), "within_data_subdir": PrimitiveField(within_data_subdir), # TODO: consider supporting transitive deps like UnpackedJars! # TODO: consider supporting `platforms` as in PythonBinary! }) super().__init__(payload=payload, **kwargs) if not libraries: raise self.ExpectedLibrariesError( "Expected non-empty libraries attribute for {spec}".format( spec=self.address.spec))
async def hydrate_sources( request: SourcesRequest, glob_match_error_behavior: GlobMatchErrorBehavior) -> SourcesResult: sources_field = request.field globs: Iterable[str] if sources_field.sanitized_raw_value is not None: globs = ensure_str_list(sources_field.sanitized_raw_value) conjunction = GlobExpansionConjunction.all_match else: if sources_field.default_globs is None: return SourcesResult(EMPTY_SNAPSHOT) globs = sources_field.default_globs conjunction = GlobExpansionConjunction.any_match snapshot = await Get[Snapshot]( PathGlobs( (sources_field.prefix_glob_with_address(glob) for glob in globs), conjunction=conjunction, glob_match_error_behavior=glob_match_error_behavior, # TODO(#9012): add line number referring to the sources field. When doing this, we'll # likely need to `await Get[BuildFileAddress](Address)`. description_of_origin=( f"{sources_field.address}'s `{sources_field.alias}` field" if glob_match_error_behavior != GlobMatchErrorBehavior.ignore else None), )) sources_field.validate_snapshot(snapshot) return SourcesResult(snapshot)
def __init__(self, address=None, payload=None, sources=None, setup_requires=None, **kwargs): """ :param address: The Address that maps to this Target in the BuildGraph. :type address: :class:`pants.build_graph.address.Address` :param payload: The configuration encapsulated by this target. Also in charge of most fingerprinting details. :type payload: :class:`pants.base.payload.Payload` :param sources: Files to "include". Paths are relative to the BUILD file's directory. :type sources: :class:`twitter.common.dirutil.Fileset` or list of strings. Must include setup.py. :param list setup_requires: A list of python requirements to provide during the invocation of setup.py. """ if not "setup.py" in sources: raise TargetDefinitionException( self, "A file named setup.py must be in the same " "directory as the BUILD file containing this target.", ) payload = payload or Payload() payload.add_fields({ "setup_requires": PrimitiveField(ensure_str_list(setup_requires or ())) }) super().__init__(address=address, payload=payload, sources=sources, **kwargs)
def __init__( self, identity, workdir, nailgun_classpath, distribution, startup_timeout=10, connect_timeout=10, connect_attempts=5, metadata_base_dir=None, ): Executor.__init__(self, distribution=distribution) FingerprintedProcessManager.__init__( self, name=identity, process_name=self._PROCESS_NAME, metadata_base_dir=metadata_base_dir, ) if not isinstance(workdir, str): raise ValueError( "Workdir must be a path string, not: {workdir}".format(workdir=workdir) ) self._identity = identity self._workdir = workdir self._ng_stdout = os.path.join(workdir, "stdout") self._ng_stderr = os.path.join(workdir, "stderr") self._nailgun_classpath = ensure_str_list(nailgun_classpath, allow_single_str=True) self._startup_timeout = startup_timeout self._connect_timeout = connect_timeout self._connect_attempts = connect_attempts
def sanitize_raw_value(cls, raw_value: Optional[Iterable[str]], address: Address) -> Optional[Tuple[str, ...]]: value_or_default = super().sanitize_raw_value(raw_value, address=address) if value_or_default is None: return None return tuple(ensure_str_list(value_or_default))
def sanitize_raw_value(cls, raw_value: Optional[Iterable[str]], *, address: Address) -> Optional[Tuple[str, ...]]: value_or_default = super().sanitize_raw_value(raw_value, address=address) if value_or_default is None: return None try: ensure_str_list(value_or_default) except ValueError: raise InvalidFieldTypeException( address, cls.alias, value_or_default, expected_type="an iterable of strings (e.g. a list of strings)", ) return tuple(sorted(value_or_default))
def __init__(self, classpath, ivy_settings=None, ivy_resolution_cache_dir=None, extra_jvm_options=None): """Configures an ivy wrapper for the ivy distribution at the given classpath. :param ivy_settings: path to find settings.xml file :param ivy_resolution_cache_dir: path to store downloaded ivy artifacts :param extra_jvm_options: list of strings to add to command line when invoking Ivy """ self._classpath = ensure_str_list(classpath) self._ivy_settings = ivy_settings if self._ivy_settings and not isinstance(self._ivy_settings, str): raise ValueError( "ivy_settings must be a string, given {} of type {}".format( self._ivy_settings, type(self._ivy_settings))) self._ivy_resolution_cache_dir = ivy_resolution_cache_dir if not isinstance(self._ivy_resolution_cache_dir, str): raise ValueError( "ivy_resolution_cache_dir must be a string, given {} of type {}" .format(self._ivy_resolution_cache_dir, type(self._ivy_resolution_cache_dir))) self._extra_jvm_options = extra_jvm_options or [] self._lock = OwnerPrintingInterProcessFileLock( os.path.join(self._ivy_resolution_cache_dir, "pants_ivy.file_lock"))
def append_classpath(self, classpath): """Specifies a Class-Path entry for this jar's manifest. If called multiple times, new entry will be appended to the existing classpath. :param iterable classpath: a list of paths """ self._classpath = self._classpath + ensure_str_list(classpath)
def __init__(self, address=None, payload=None, sources=None, provides=None, compatibility=None, **kwargs): """ :param dependencies: The addresses of targets that this target depends on. These dependencies may be ``python_library``-like targets (``python_library``, ``python_thrift_library``, ``python_antlr_library`` and so forth) or ``python_requirement_library`` targets. :type dependencies: list of strings :param sources: Files to "include". Paths are relative to the BUILD file's directory. :type sources: ``EagerFilesetWithSpec`` :param provides: The `setup_py <#setup_py>`_ to publish that represents this target outside the repo. :param compatibility: either a string that represents interpreter compatibility for this target using the Requirement-style format, e.g. ``'CPython>=2.7,<3'`` (Select a CPython interpreter with version ``>=2.7`` AND version ``<3``) or a list of Requirement-style strings which will be OR'ed together. If the compatibility requirement is agnostic to interpreter class, using the example above, a Requirement-style compatibility constraint like '>=2.7,<3' (N.B.: not prefixed with CPython) can be used. """ self.address = address payload = payload or Payload() payload.add_fields({ "sources": self.create_sources_field(sources, address.spec_path, key_arg="sources"), "provides": provides, "compatibility": PrimitiveField( ensure_str_list(compatibility or (), allow_single_str=True)), }) super().__init__(address=address, payload=payload, **kwargs) if provides and not isinstance(provides, PythonArtifact): raise TargetDefinitionException( self, "Target must provide a valid pants setup_py object. Received a '{}' object instead." .format(provides.__class__.__name__), ) self._provides = provides # Check that the compatibility requirements are well-formed. for req in self.payload.compatibility: try: PythonIdentity.parse_requirement(req) except ValueError as e: raise TargetDefinitionException(self, str(e))
def test_ensure_str_list() -> None: assert ensure_str_list(("hello", "there")) == ["hello", "there"] assert ensure_str_list("hello", allow_single_str=True) == ["hello"] with pytest.raises(ValueError): ensure_str_list("hello") with pytest.raises(ValueError): ensure_str_list(0) # type: ignore[arg-type] with pytest.raises(ValueError): ensure_str_list([0, 1]) # type: ignore[list-item]
def __init__(self, coverage=None, timeout=None, **kwargs): """ :param coverage: the module(s) whose coverage should be generated, e.g. 'twitter.common.log' or ['twitter.common.log', 'twitter.common.http'] :param int timeout: A timeout (in seconds) which covers the total runtime of all tests in this target. Only applied if `--test-pytest-timeouts` is set to True. """ self._coverage = ensure_str_list( coverage) if coverage is not None else [] self._timeout = timeout super().__init__(**kwargs)
def distribution(files=None, executables=None, java_home=None): with temporary_dir() as dist_root: for f in ensure_str_list(files or ()): touch(os.path.join(dist_root, f)) for executable in ensure_list(executables or (), expected_type=EXE): path = os.path.join(dist_root, executable.relpath) with safe_open(path, "w") as fp: java_home = os.path.join(dist_root, java_home) if java_home else dist_root fp.write(executable.contents(java_home)) chmod_plus_x(path) yield dist_root
def test_ensure_str_list(self) -> None: assert ensure_str_list("hello") == ["hello"] assert ensure_str_list(["hello", "there"]) == ["hello", "there"] with pytest.raises(ValueError): ensure_str_list(0) # type: ignore[arg-type] with pytest.raises(ValueError): ensure_str_list([0, 1]) # type: ignore[list-item]
def __init__( self, *, specified: str | Iterable[str] | None = None, option_name: str, check_existence: Iterable[str] = (), check_content: Mapping[str, bytes] = FrozenDict(), ) -> None: self.specified = tuple( ensure_str_list(specified or (), allow_single_str=True)) self.check_existence = tuple(sorted(check_existence)) self.check_content = FrozenDict(check_content) self.option_name = option_name
def distribution(files=None, executables=None, java_home=None, dist_dir=None): # NB attempt to include the java version in the tmp dir name for better test failure messages. executables_as_list = ensure_list(executables or (), expected_type=EXE) if executables_as_list: dist_prefix = "jvm_{}_".format(executables_as_list[0]._version) else: dist_prefix = "jvm_na_" with temporary_dir(root_dir=dist_dir, prefix=dist_prefix) as dist_root: for f in ensure_str_list(files or ()): touch(os.path.join(dist_root, f)) for executable in executables_as_list: path = os.path.join(dist_root, executable.relpath) with safe_open(path, "w") as fp: java_home = os.path.join(dist_root, java_home) if java_home else dist_root fp.write(executable.contents(java_home)) chmod_plus_x(path) yield dist_root
def __call__(self, rel_path=None, mapper=None, relative_to=None, fileset=None): """ :param rel_path: Base path of the "source" file paths. By default, path of the BUILD file. Useful for assets that don't live in the source code repo. :param mapper: Function that takes a path string and returns a path string. Takes a path in the source tree, returns a path to use in the resulting bundle. By default, an identity mapper. :param string relative_to: Set up a simple mapping from source path to bundle path. :param fileset: The set of files to include in the bundle. A string filename or a list of filenames/globs. E.g., ``relative_to='common'`` removes that prefix from all files in the application bundle. """ if fileset is None: raise ValueError( "In {}:\n Bare bundle() declarations without a `fileset=` parameter " "are no longer supported.".format(self._parse_context.rel_path) ) if mapper and relative_to: raise ValueError("Must specify exactly one of 'mapper' or 'relative_to'") # A fileset is either a string or a list of file paths. All globs are expected to already # have been expanded. fileset = ensure_str_list(fileset, allow_single_str=True) assert all("*" not in fp for fp in fileset), ( "All globs should have already been hydrated for the `bundle(fileset=)` field. " f"Given the fileset: {fileset}" ) real_rel_path = rel_path or self._parse_context.rel_path if relative_to: base = os.path.join(get_buildroot(), real_rel_path, relative_to) mapper = RelativeToMapper(base) else: mapper = mapper or RelativeToMapper(os.path.join(get_buildroot(), real_rel_path)) return BundleProps(real_rel_path, mapper, fileset)
def compute_value( cls, raw_value: Optional[Dict[str, Iterable[str]]], *, address: Address) -> Optional[FrozenDict[str, Tuple[str, ...]]]: value_or_default = super().compute_value(raw_value, address=address) if value_or_default is None: return None invalid_type_exception = InvalidFieldTypeException( address, cls.alias, raw_value, expected_type="a dictionary of string -> an iterable of strings", ) if not isinstance(value_or_default, dict): raise invalid_type_exception result = {} for k, v in value_or_default.items(): if not isinstance(k, str): raise invalid_type_exception try: result[k] = tuple(ensure_str_list(v)) except ValueError: raise invalid_type_exception return FrozenDict(result)
def __init__( self, sources=None, entry_point=None, inherit_path=False, # pex option zip_safe=True, # pex option always_write_cache=False, # pex option ignore_errors=False, # pex option shebang=None, # pex option emit_warnings=None, # pex option platforms=(), **kwargs): """ :param string entry_point: the default entry point for this binary. if None, drops into the entry point that is defined by source. Something like "pants.bin.pants_exe:main", where "pants.bin.pants_exe" is the package name and "main" is the function name (if omitted, the module is executed directly, presuming it has a ``__main.py__``). :param sources: Zero or one source files. If more than one file is required, it should be put in a python_library which should be added to dependencies. :param inherit_path: inherit the sys.path of the environment that this binary runs in :param zip_safe: whether or not this binary is safe to run in compacted (zip-file) form :param always_write_cache: whether or not the .deps cache of this PEX file should always be written to disk. :param ignore_errors: should we ignore inability to resolve dependencies? :param str shebang: Use this shebang for the generated pex. :param bool emit_warnings: Whether or not to emit pex warnings. :param platforms: extra platforms to target when building this binary. If this is, e.g., ``['current', 'linux-x86_64', 'macosx-10.4-x86_64']``, then when building the pex, then for any platform-dependent modules, Pants will include ``egg``\\s for Linux (64-bit Intel), Mac OS X (version 10.4 or newer), and the current platform (whatever is being used when making the PEX). """ if inherit_path is False: inherit_path = "false" payload = Payload() payload.add_fields({ "entry_point": PrimitiveField(entry_point), "inherit_path": PrimitiveField(inherit_path), "zip_safe": PrimitiveField(bool(zip_safe)), "always_write_cache": PrimitiveField(bool(always_write_cache)), "ignore_errors": PrimitiveField(bool(ignore_errors)), "platforms": PrimitiveField( tuple(ensure_str_list(platforms or [], allow_single_str=True))), "shebang": PrimitiveField(shebang), "emit_warnings": PrimitiveField(self.Defaults.should_emit_warnings(emit_warnings)), }) super().__init__(sources=sources, payload=payload, **kwargs) if (not sources or not sources.files) and entry_point is None: raise TargetDefinitionException( self, "A python binary target must specify either a single source or entry_point." ) if not isinstance(platforms, (list, tuple)) and not isinstance(platforms, str): raise TargetDefinitionException( self, "platforms must be a list, tuple or str.") if sources and sources.files and entry_point: entry_point_module = entry_point.split(":", 1)[0] entry_source = list(self.sources_relative_to_source_root())[0] source_entry_point = self.translate_source_path_to_py_module_specifier( entry_source) if entry_point_module != source_entry_point: raise TargetDefinitionException( self, "Specified both source and entry_point but they do not agree: {} vs {}" .format(source_entry_point, entry_point_module), )
def compatibility(self) -> Optional[List[str]]: if "compatibility" not in self._kwargs: return None return ensure_str_list(self._kwargs["compatibility"], allow_single_str=True)
def manifest_content(classpath): return (("Manifest-Version: 1.0\r\n" + "Class-Path: {}\r\n" + "Created-By: org.pantsbuild.tools.jar.JarBuilder\r\n\r\n" ).format(" ".join( ensure_str_list(classpath, allow_single_str=True))).encode())
def sanitize_raw_value( self, raw_value: Optional[Iterable[str]]) -> Optional[Tuple[str, ...]]: if raw_value is None: return None return tuple(ensure_str_list(raw_value))
def hydrate(self, raw_value: Optional[Union[str, Iterable[str]]], *, address: Address) -> Optional[Tuple[str, ...]]: if raw_value is None: return None return tuple(ensure_str_list(raw_value))