async def hydrate_dockerfile( request: GenerateDockerfileRequest) -> GeneratedSources: target = request.protocol_target address = target.address instructions = target[DockerImageInstructionsField].value if instructions and request.protocol_sources.files: raise InvalidFieldException( f"The `{target.alias}` {address} provides both a Dockerfile with the `source` field, " "and Dockerfile contents with the `instructions` field, which is not supported.\n\n" "To fix, please either set `source=None` or `instructions=None`.") if not (instructions or request.protocol_sources.files): raise InvalidFieldException( f"The `{target.alias}` {address} does not specify any Dockerfile.\n\n" "Provide either the filename to a Dockerfile in your workspace as the `source` field " "value, or the Dockerfile content to the `instructions` field.") def dockerfile_path(): name_parts = [ "Dockerfile", address.target_name, address.generated_name ] return os.path.join(address.spec_path, ".".join(filter(bool, name_parts))) output = (await Get( Snapshot, CreateDigest((FileContent(dockerfile_path(), "\n".join( [*instructions, ""]).encode()), )), ) if instructions else request.protocol_sources) return GeneratedSources(output)
def compute_value(cls, raw_value: Optional[str], *, address: Address) -> Optional[str]: ep = super().compute_value(raw_value, address=address) entry_point = ep.strip() if ep is not None else None if not entry_point: raise InvalidFieldException( f"The entry point for {address} cannot be blank. It must indicate a Python module " "by name or path and an optional nullary function in that module separated by a " "colon, i.e.: module_name_or_path(':'function_name)?") module_or_path, sep, func = entry_point.partition(":") if not module_or_path: raise InvalidFieldException( f"The entry point for {address} must specify a module; given: {ep!r}" ) if ":" in module_or_path or ":" in func: raise InvalidFieldException( f"The entry point for {address} can only contain one colon separating the entry " f"point's module from the entry point function in that module; given: {ep!r}" ) if sep and not func: logger.warning( f"Assuming no entry point function and stripping trailing ':' from the entry point " f"{ep!r} declared in {address}. Consider deleting it to make it clear no entry " f"point function is intended.") return module_or_path return entry_point
def validate_resolved_files(self, files: Sequence[str]) -> None: """Check that all files are coming from the same directory.""" super().validate_resolved_files(files) if not files: raise InvalidFieldException( softwrap(f""" The `{self.alias}` field in target `{self.address}` must resolve to at least one file. """)) files_outside_dirs = [f for f in files if len(PurePath(f).parts) == 1] if files_outside_dirs: raise InvalidFieldException( softwrap(f""" The `{self.alias}` field in target `{self.address}` must be paths to files in a single sources directory. Individual files were found: {files_outside_dirs} """)) directory_prefixes = {PurePath(f).parts[0] for f in files} if len(directory_prefixes) > 1: raise InvalidFieldException( softwrap(f""" The `{self.alias}` field in target `{self.address}` must be paths to files in a single sources directory. Multiple directories were found: {directory_prefixes} """))
def compute_value(cls, raw_value: Optional[str], *, address: Address) -> Optional[EntryPoint]: ep = super().compute_value(raw_value, address=address) if ep is None: raise InvalidFieldException( f"An entry point must be specified for {address}. It must indicate a Python module " "by name or path and an optional nullary function in that module separated by a " "colon, i.e.: module_name_or_path(':'function_name)?") try: return EntryPoint.parse(ep, provenance=f"for {address}") except ValueError as e: raise InvalidFieldException(str(e))
def compute_value( cls, raw_value: Optional[Iterable[str]], *, address: Address ) -> Tuple[Requirement, ...]: value = super().compute_value(raw_value, address=address) invalid_type_error = InvalidFieldTypeException( address, cls.alias, value, expected_type="an iterable of pip-style requirement strings (e.g. a list)", ) if isinstance(value, str) or not isinstance(value, collections.abc.Iterable): raise invalid_type_error result = [] for v in value: # We allow passing a pre-parsed `Requirement`. This is intended for macros which might # have already parsed so that we can avoid parsing multiple times. if isinstance(v, Requirement): result.append(v) elif isinstance(v, str): try: parsed = Requirement.parse(v) except Exception as e: raise InvalidFieldException( format_invalid_requirement_string_error( v, e, description_of_origin=( f"the '{cls.alias}' field for the target {address}" ), ) ) result.append(parsed) else: raise invalid_type_error return tuple(result)
def compute_value(cls, raw_value: Optional[str], address: Address) -> str: value = cast(str, super().compute_value(raw_value, address)) if ":" not in value: raise InvalidFieldException( f"The `{cls.alias}` field in target at {address} must end in the " f"format `:my_handler_func`, but was {value}.") return value
def compute_value(cls, raw_value: Optional[str], address: Address) -> str: value = cast(str, super().compute_value(raw_value, address)) if not re.match(cls.PYTHON_RUNTIME_REGEX, value): raise InvalidFieldException( f"The `{cls.alias}` field in target at {address} must be of the form pythonXY, " f"but was {value}.") return value
def validate_resolved_files(self, files: Sequence[str]) -> None: super().validate_resolved_files(files) if self.go_mod_path not in files: raise InvalidFieldException( f"The {repr(self.alias)} field in target {self.address} must include " f"{self.go_mod_path}, but only had: {list(files)}\n\n" f"Make sure that you're declaring the `{GoModTarget.alias}` target in the same " "directory as your `go.mod` file.") invalid_files = set(files) - {self.go_mod_path, self.go_sum_path} if invalid_files: raise InvalidFieldException( f"The {repr(self.alias)} field in target {self.address} must only include " f"`{self.go_mod_path}` and optionally {self.go_sum_path}, but had: " f"{sorted(invalid_files)}\n\n" f"Make sure that you're declaring the `{GoModTarget.alias}` target in the same " f"directory as your `go.mod` file and that you don't override the `{self.alias}` " "field.")
def compute_value(cls, raw_value: Optional[int], *, address: Address) -> Optional[int]: value = super().compute_value(raw_value, address=address) if value is not None and value < 1: raise InvalidFieldException( f"The value for the `timeout` field in target {address} must be > 0, but was " f"{value}.") return value
def compute_value(cls, raw_value: Optional[str], *, address: Address) -> str: value = cast(str, super().compute_value(raw_value, address=address)) if not re.match(cls.PYTHON_RUNTIME_REGEX, value): raise InvalidFieldException( f"runtime field in python_awslambda target at {address.spec} must " f"be of the form pythonX.Y, but was {value}") return value
def compute_value(cls, raw_value: Optional[str], *, address: Address) -> str: value = cast(str, super().compute_value(raw_value, address=address)) if not value.startswith("pantsbuild.pants."): raise InvalidFieldException( f"The {repr(cls.alias)} in target {address} must start with `pantsbuild.pants`, " f"but was {value}.") return value
def compute_value(cls, raw_value: Optional[Iterable[str]], address: Address) -> Optional[Tuple[str, ...]]: value_or_default = super().compute_value(raw_value, address) if not value_or_default: raise InvalidFieldException( f"The {repr(cls.alias)} field in target {address} must be set to files/globs in " f"the target's directory, but it was set to {repr(value_or_default)}." ) return value_or_default
def compute_value(cls, raw_value: Optional[str], address: Address) -> Optional[str]: value_or_default = super().compute_value(raw_value, address) if value_or_default and value_or_default.startswith("file:"): raise InvalidFieldException( f"The `{cls.alias}` field does not support `file:` URLS, but the target " f"{address} sets the field to `{value_or_default}`.\n\n" "Instead, use the `jar` field to specify the relative path to the local jar file." ) return value_or_default
def validate_resolved_files(self, files: Sequence[str]) -> None: super().validate_resolved_files(files) file = files[0] file_name = os.path.basename(file) if file_name == "conftest.py": raise InvalidFieldException( f"The {repr(self.alias)} field in target {self.address} should not be set to the " f"file 'conftest.py', but was set to {repr(self.value)}.\n\nInstead, use a " "`python_source` target or the target generator `python_test_utils`. You can run " f"`./pants tailor` after removing this target ({self.address}) to autogenerate a " "`python_test_utils` target.")
async def resolve_pex_entry_point(request: ResolvePexEntryPointRequest) -> ResolvedPexEntryPoint: ep_val = request.entry_point_field.value if ep_val is None: return ResolvedPexEntryPoint(None, file_name_used=False) address = request.entry_point_field.address # We support several different schemes: # 1) `path.to.module` => preserve exactly. # 2) `path.to.module:func` => preserve exactly. # 3) `app.py` => convert into `path.to.app`. # 4) `app.py:func` => convert into `path.to.app:func`. # If it's already a module (cases #1 and #2), simply use that. Otherwise, convert the file name # into a module path (cases #3 and #4). if not ep_val.module.endswith(".py"): return ResolvedPexEntryPoint(ep_val, file_name_used=False) # Use the engine to validate that the file exists and that it resolves to only one file. full_glob = os.path.join(address.spec_path, ep_val.module) entry_point_paths = await Get( Paths, PathGlobs( [full_glob], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin=f"{address}'s `{request.entry_point_field.alias}` field", ), ) # We will have already raised if the glob did not match, i.e. if there were no files. But # we need to check if they used a file glob (`*` or `**`) that resolved to >1 file. if len(entry_point_paths.files) != 1: raise InvalidFieldException( softwrap( f""" Multiple files matched for the `{request.entry_point_field.alias}` {ep_val.spec!r} for the target {address}, but only one file expected. Are you using a glob, rather than a file name? All matching files: {list(entry_point_paths.files)}. """ ) ) entry_point_path = entry_point_paths.files[0] source_root = await Get( SourceRoot, SourceRootRequest, SourceRootRequest.for_file(entry_point_path), ) stripped_source_path = os.path.relpath(entry_point_path, source_root.path) module_base, _ = os.path.splitext(stripped_source_path) normalized_path = module_base.replace(os.path.sep, ".") return ResolvedPexEntryPoint( dataclasses.replace(ep_val, module=normalized_path), file_name_used=True )
def compute_value(cls, raw_value: Optional[str], address: Address) -> EntryPoint: value = super().compute_value(raw_value, address) if not isinstance(value, str): raise InvalidFieldTypeException(address, cls.alias, value, expected_type="a string") try: return EntryPoint.parse(value, provenance=f"for {address}") except ValueError as e: raise InvalidFieldException(str(e))
def compute_value(cls, raw_value: Optional[Iterable[str]], address: Address) -> Tuple[str, ...]: value_or_default = super().compute_value(raw_value, address) # Add some arbitrary validation to test that hydration/validation works properly. bad_extensions = [ extension for extension in value_or_default if not extension.startswith("Fortran") ] if bad_extensions: raise InvalidFieldException( f"The {repr(cls.alias)} field in target {address} expects all elements to be " f"prefixed by `Fortran`. Received {bad_extensions}.", ) return tuple(value_or_default)
def compute_value(cls, raw_value: Optional[Iterable[str]], address: Address) -> Optional[Tuple[str, ...]]: value_or_default = super().compute_value(raw_value, address) if not value_or_default: raise InvalidFieldException( f"The {repr(cls.alias)} field in target {address} must be set to files/globs in " f"the target's directory, but it was set to {repr(value_or_default)}." ) # Ban recursive globs and subdirectories. We assume that a `go_package` corresponds # to exactly one directory. invalid_globs = [ glob for glob in (value_or_default or ()) if "**" in glob or os.path.sep in glob ] if invalid_globs: raise InvalidFieldException( f"The {repr(cls.alias)} field in target {address} must only have globs for the " f"target's directory, i.e. it cannot include values with `**` and `{os.path.sep}`, " f"but it was set to: {sorted(value_or_default)}") return value_or_default
def compute_value(cls, raw_value: Optional[Iterable[str]], address: Address) -> Tuple[str, ...]: # Ensure that we avoid certain problematic extensions and always use some defaults. specified_extensions = super().compute_value(raw_value, address) banned = [ extension for extension in specified_extensions if extension in cls.banned_extensions ] if banned: raise InvalidFieldException( f"The {repr(cls.alias)} field in target {address} is using banned " f"extensions: {banned}") return (*specified_extensions, *cls.default_extensions)
async def create_python_binary_run_request( field_set: PythonBinaryFieldSet, python_binary_defaults: PythonBinaryDefaults) -> RunRequest: entry_point = field_set.entry_point.value if entry_point is None: # TODO: This is overkill? We don't need to hydrate the sources and strip snapshots, # we only need the path relative to the source root. binary_sources = await Get(HydratedSources, HydrateSourcesRequest(field_set.sources)) stripped_binary_sources = await Get( StrippedSourceFiles, SourceFiles(binary_sources.snapshot, ())) entry_point = PythonBinarySources.translate_source_file_to_entry_point( stripped_binary_sources.snapshot.files) if entry_point is None: raise InvalidFieldException( "You must either specify `sources` or `entry_point` for the target " f"{repr(field_set.address)} in order to run it, but both fields were undefined." ) transitive_targets = await Get(TransitiveTargets, Addresses([field_set.address])) output_filename = f"{field_set.address.target_name}.pex" pex_request = Get( Pex, PexFromTargetsRequest( addresses=Addresses([field_set.address]), platforms=PexPlatforms.create_from_platforms_field( field_set.platforms), output_filename=output_filename, additional_args=field_set.generate_additional_args( python_binary_defaults), include_source_files=False, ), ) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure, include_files=True), ) pex, sources = await MultiGet(pex_request, sources_request) merged_digest = await Get( Digest, MergeDigests([pex.digest, sources.source_files.snapshot.digest])) return RunRequest( digest=merged_digest, binary_name=pex.output_filename, prefix_args=("-m", entry_point), env={"PEX_EXTRA_SYS_PATH": ":".join(sources.source_roots)}, )
def compute_value(cls, raw_value: Optional[Iterable[str]], *, address: Address) -> Tuple[Requirement, ...]: value = super().compute_value(raw_value, address=address) invalid_type_error = InvalidFieldTypeException( address, cls.alias, value, expected_type= "an iterable of pip-style requirement strings (e.g. a list)", ) if isinstance(value, (str, PythonRequirement)) or not isinstance( value, collections.abc.Iterable): raise invalid_type_error result = [] for v in value: # We allow passing a pre-parsed `Requirement`. This is intended for macros which might # have already parsed so that we can avoid parsing multiple times. if isinstance(v, Requirement): result.append(v) elif isinstance(v, str): try: parsed = Requirement.parse(v) except Exception as e: raise InvalidFieldException( format_invalid_requirement_string_error( v, e, description_of_origin= (f"the '{cls.alias}' field for the target {address}" ), )) result.append(parsed) elif isinstance(v, PythonRequirement): extra_suggestions = "" if v.repository: extra_suggestions += ( f"\n\nInstead of setting 'repository={v.repository}`, add this to the " "option `repos` in the `[python-repos]` options scope." ) warn_or_error( removal_version="2.1.0.dev0", deprecated_entity_description="Using `pants_requirement`", hint= (f"In the '{cls.alias}' field for {address}, use '{str(v.requirement)}' " f"instead of 'pants_requirement('{str(v.requirement)}').{extra_suggestions}" ), ) result.append(v.requirement) else: raise invalid_type_error return tuple(result)
def compute_value(cls, raw_value: Optional[str], address: Address) -> Optional[str]: value_or_default = super().compute_value(raw_value, address=address) if isinstance(value_or_default, str) and value_or_default.startswith("/"): val = value_or_default.strip("/") raise InvalidFieldException( softwrap(f""" The `{cls.alias}` field in target {address} must be a relative path, but was {value_or_default!r}. Use {val!r} for a path relative to the build root, or {'./' + val!r} for a path relative to the BUILD file (i.e. {os.path.join(address.spec_path, val)!r}). """)) return value_or_default
def compute_value(cls, raw_value: Optional[str], address: Address) -> Optional[str]: value = super().compute_value(raw_value, address) if value is None: return None if not re.match(cls.PYTHON_RUNTIME_REGEX, value): raise InvalidFieldException( softwrap( f""" The `{cls.alias}` field in target at {address} must be of the form pythonX.Y, but was {value}. """ ) ) return value
def validate_resolved_files(self, files: Sequence[str]) -> None: super().validate_resolved_files(files) # We don't technically need to error for `conftest.py` here because `PythonTestSourceField` # already validates this, but we get a better error message this way so that users don't # have to reason about generated targets. conftest_files = [ fp for fp in files if os.path.basename(fp) == "conftest.py" ] if conftest_files: raise InvalidFieldException( f"The {repr(self.alias)} field in target {self.address} should not include the " f"file 'conftest.py', but included these: {conftest_files}.\n\nInstead, use a " "`python_source` target or the target generator `python_test_utils`. You can run " f"`./pants tailor` after removing the files from the {repr(self.alias)} field of " f"this target ({self.address}) to autogenerate a `python_test_utils` target." )
async def determine_main_pkg_for_go_binary( request: GoBinaryMainPackageRequest, ) -> GoBinaryMainPackage: addr = request.field.address if request.field.value: wrapped_specified_tgt = await Get( WrappedTarget, AddressInput, AddressInput.parse(request.field.value, relative_to=addr.spec_path), ) if not wrapped_specified_tgt.target.has_field( GoFirstPartyPackageSourcesField): raise InvalidFieldException( f"The {repr(GoBinaryMainPackageField.alias)} field in target {addr} must point to " "a `go_first_party_package` target, but was the address for a " f"`{wrapped_specified_tgt.target.alias}` target.\n\n" "Hint: you should normally not specify this field so that Pants will find the " "`go_first_party_package` target for you. (Pants generates " "`go_first_party_package` targets based on the `go_mod` target)." ) return GoBinaryMainPackage(wrapped_specified_tgt.target.address) candidate_targets = await Get( Targets, AddressSpecs([SiblingAddresses(addr.spec_path)])) relevant_pkg_targets = [ tgt for tgt in candidate_targets if tgt.has_field(GoFirstPartyPackageSourcesField) and tgt.residence_dir == addr.spec_path ] if len(relevant_pkg_targets) == 1: return GoBinaryMainPackage(relevant_pkg_targets[0].address) wrapped_tgt = await Get(WrappedTarget, Address, addr) alias = wrapped_tgt.target.alias if not relevant_pkg_targets: raise ResolveError( f"The `{alias}` target {addr} requires that there is a `go_first_party_package` " f"target for its directory {addr.spec_path}, but none were found.\n\n" "Have you added a `go_mod` target (which will generate `go_first_party_package` " "targets)?") raise ResolveError( f"There are multiple `go_first_party_package` targets for the same directory of the " f"`{alias}` target {addr}: {addr.spec_path}. It is ambiguous what to use as the `main` " "package.\n\n" f"To fix, please either set the `main` field for `{addr} or remove these " "`go_first_party_package` targets so that only one remains: " f"{sorted(tgt.address.spec for tgt in relevant_pkg_targets)}")
async def resolve_python_aws_handler( request: ResolvePythonAwsHandlerRequest, ) -> ResolvedPythonAwsHandler: handler_val = request.field.value field_alias = request.field.alias address = request.field.address path, _, func = handler_val.partition(":") # If it's already a module, simply use that. Otherwise, convert the file name into a module # path. if not path.endswith(".py"): return ResolvedPythonAwsHandler(handler_val, file_name_used=False) # Use the engine to validate that the file exists and that it resolves to only one file. full_glob = os.path.join(address.spec_path, path) handler_paths = await Get( Paths, PathGlobs( [full_glob], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin=f"{address}'s `{field_alias}` field", ), ) # We will have already raised if the glob did not match, i.e. if there were no files. But # we need to check if they used a file glob (`*` or `**`) that resolved to >1 file. if len(handler_paths.files) != 1: raise InvalidFieldException( softwrap( f""" Multiple files matched for the `{field_alias}` {repr(handler_val)} for the target {address}, but only one file expected. Are you using a glob, rather than a file name? All matching files: {list(handler_paths.files)}. """ ) ) handler_path = handler_paths.files[0] source_root = await Get( SourceRoot, SourceRootRequest, SourceRootRequest.for_file(handler_path), ) stripped_source_path = os.path.relpath(handler_path, source_root.path) module_base, _ = os.path.splitext(stripped_source_path) normalized_path = module_base.replace(os.path.sep, ".") return ResolvedPythonAwsHandler(f"{normalized_path}:{func}", file_name_used=True)
async def package_pex_binary( field_set: PexBinaryFieldSet, pex_binary_defaults: PexBinaryDefaults, global_options: GlobalOptions, ) -> BuiltPackage: entry_point = field_set.entry_point.value if entry_point is None: binary_source_paths = await Get( Paths, PathGlobs, field_set.sources.path_globs(FilesNotFoundBehavior.error)) if len(binary_source_paths.files) != 1: raise InvalidFieldException( "No `entry_point` was set for the target " f"{repr(field_set.address)}, so it must have exactly one source, but it has " f"{len(binary_source_paths.files)}") entry_point_path = binary_source_paths.files[0] source_root = await Get( SourceRoot, SourceRootRequest, SourceRootRequest.for_file(entry_point_path), ) entry_point = PexBinarySources.translate_source_file_to_entry_point( os.path.relpath(entry_point_path, source_root.path)) output_filename = field_set.output_path.value_or_default( field_set.address, file_ending="pex", use_legacy_format=global_options.options.pants_distdir_legacy_paths, ) two_step_pex = await Get( TwoStepPex, TwoStepPexFromTargetsRequest( PexFromTargetsRequest( addresses=[field_set.address], internal_only=False, entry_point=entry_point, platforms=PexPlatforms.create_from_platforms_field( field_set.platforms), output_filename=output_filename, additional_args=field_set.generate_additional_args( pex_binary_defaults), )), ) return BuiltPackage(two_step_pex.pex.digest, (BuiltPackageArtifact(output_filename), ))
async def generate_targets_from_pex_binaries( request: GenerateTargetsFromPexBinaries, union_membership: UnionMembership, ) -> GeneratedTargets: generator_addr = request.generator.address entry_points_field = request.generator[PexEntryPointsField].value or [] overrides = request.require_unparametrized_overrides() inherited_fields = { field.alias: field.value for field in request.generator.field_values.values() if not isinstance(field, (PexEntryPointsField, OverridesField)) } # Note that we don't check for overlap because it seems unlikely to be a problem. # If it does, we should add this check. (E.g. `path.to.app` and `path/to/app.py`) def create_pex_binary(entry_point_spec: str) -> PexBinary: return PexBinary( { PexEntryPointField.alias: entry_point_spec, **inherited_fields, # Note that overrides comes last to make sure that it indeed overrides. **overrides.pop(entry_point_spec, {}), }, # ":" is a forbidden character in target names generator_addr.create_generated(entry_point_spec.replace(":", "-")), union_membership, residence_dir=generator_addr.spec_path, ) pex_binaries = [ create_pex_binary(entry_point) for entry_point in entry_points_field ] if overrides: raise InvalidFieldException( f"Unused key in the `overrides` field for {request.generator.address}: " f"{sorted(overrides)}" f"Tip: if you'd like to override a field's value for every `{PexBinary.alias}` target " "generated by this target, change the field directly on this target rather than using " "the `overrides` field.") return GeneratedTargets(request.generator, pex_binaries)
async def validate_python_dependencies( request: PythonValidateDependenciesRequest, python_setup: PythonSetup, ) -> ValidatedDependencies: dependencies = await MultiGet( Get( WrappedTarget, WrappedTargetRequest( d, description_of_origin=f"the dependencies of {request.field_set.address}" ), ) for d in request.dependencies ) # Validate that the ICs for dependencies are all compatible with our own. target_ics = request.field_set.interpreter_constraints.value_or_global_default(python_setup) non_subset_items = [] for dep in dependencies: if not dep.target.has_field(InterpreterConstraintsField): continue dep_ics = dep.target[InterpreterConstraintsField].value_or_global_default(python_setup) if not interpreter_constraints_contains( dep_ics, target_ics, python_setup.interpreter_universe ): non_subset_items.append(f"{dep_ics}: {dep.target.address}") if non_subset_items: raise InvalidFieldException( softwrap( f""" The target {request.field_set.address} has the `interpreter_constraints` {target_ics}, which are not a subset of the `interpreter_constraints` of some of its dependencies: {bullet_list(sorted(non_subset_items))} To fix this, you should likely adjust {request.field_set.address}'s `interpreter_constraints` to match the narrowest range in the above list. """ ) ) return ValidatedDependencies()
async def resolve_pex_entry_point( request: ResolvePexEntryPointRequest) -> ResolvedPexEntryPoint: if request.entry_point_field.value: return ResolvedPexEntryPoint(request.entry_point_field.value) binary_source_paths = await Get( Paths, PathGlobs, request.sources.path_globs(FilesNotFoundBehavior.error)) if len(binary_source_paths.files) != 1: raise InvalidFieldException( "No `entry_point` was set for the target " f"{repr(request.sources.address)}, so it must have exactly one source, but it has " f"{len(binary_source_paths.files)}.") entry_point_path = binary_source_paths.files[0] source_root = await Get( SourceRoot, SourceRootRequest, SourceRootRequest.for_file(entry_point_path), ) stripped_source_path = os.path.relpath(entry_point_path, source_root.path) module_base, _ = os.path.splitext(stripped_source_path) return ResolvedPexEntryPoint(module_base.replace(os.path.sep, "."))